BA / Mbed OS BaBoRo1
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 try:
00002     from urllib2 import urlopen, URLError
00003 except ImportError:
00004     from urllib.request import urlopen, URLError
00005 from bs4 import BeautifulSoup
00006 from os.path import join, dirname, basename
00007 from os import makedirs
00008 from errno import EEXIST
00009 from threading import Thread
00010 try:
00011     from Queue import Queue
00012 except ImportError:
00013     from queue import Queue
00014 from re import compile, sub
00015 from sys import stderr, stdout
00016 from itertools import takewhile
00017 import argparse
00018 from json import dump, load
00019 from zipfile import ZipFile
00020 from tempfile import gettempdir
00021 import warnings
00022 from distutils.version import LooseVersion
00023 
00024 from tools.flash_algo import PackFlashAlgo
00025 
00026 warnings.filterwarnings("ignore")
00027 
00028 from fuzzywuzzy import process
00029 
00030 RootPackURL = "http://www.keil.com/pack/index.idx"
00031 
00032 LocalPackDir = dirname(__file__)
00033 LocalPackIndex = join(LocalPackDir, "index.json")
00034 LocalPackAliases = join(LocalPackDir, "aliases.json")
00035 
00036 
00037 protocol_matcher = compile("\w*://")
00038 def strip_protocol(url) :
00039     return protocol_matcher.sub("", str(url))
00040 
00041 def largest_version(content) :
00042     return sorted([t['version'] for t in content.package.releases('release')],
00043                   reverse=True, key=lambda v: LooseVersion(v))[0]
00044 
00045 def do_queue(Class, function, interable) :
00046     q = Queue()
00047     threads = [Class(q, function) for each in range(20)]
00048     for each in threads :
00049         each.setDaemon(True)
00050         each.start()
00051     for thing in interable :
00052         q.put(thing)
00053     q.join()
00054 
00055 class Reader (Thread) :
00056     def __init__(self, queue, func) :
00057         Thread.__init__(self)
00058         self.queue = queue
00059         self.func = func
00060     def run(self) :
00061         while True :
00062             url = self.queue.get()
00063             self.func(url)
00064             self.queue.task_done()
00065 
00066 
00067 class Cache  () :
00068     """ The Cache object is the only relevant API object at the moment
00069 
00070     Constructing the Cache object does not imply any caching.
00071     A user of the API must explicitly call caching functions.
00072 
00073     :param silent: A boolean that, when True, significantly reduces the printing of this Object
00074     :type silent: bool
00075     :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
00076     :type no_timeouts: bool
00077     """
00078     def __init__ (self, silent, no_timeouts) :
00079         self.silent  = silent
00080         self.counter  = 0
00081         self.total  = 1
00082         self._index  = {}
00083         self._aliases  = {}
00084         self.urls  = None
00085         self.no_timeouts  = no_timeouts
00086         self.data_path  = gettempdir()
00087 
00088     def display_counter (self, message) :
00089         stdout.write("{} {}/{}\r".format(message, self.counter , self.total ))
00090         stdout.flush()
00091 
00092     def cache_file  (self, url) :
00093         """Low level interface to caching a single file.
00094 
00095         :param url: The URL to cache.
00096         :type url: str
00097         :rtype: None
00098         """
00099         if not self.silent  : print("Caching {}...".format(url))
00100         dest = join(self.data_path , strip_protocol(url))
00101         try :
00102             makedirs(dirname(dest))
00103         except OSError as exc :
00104             if exc.errno == EEXIST : pass
00105             else : raise
00106         try:
00107             with open(dest, "wb+") as fd :
00108                 fd.write(urlopen(url).read())
00109         except URLError as e:
00110             stderr.write(e.reason)
00111         self.counter  += 1
00112         self.display_counter ("Caching Files")
00113 
00114     def pdsc_to_pack  (self, url) :
00115         """Find the URL of the specified pack file described by a PDSC.
00116 
00117         The PDSC is assumed to be cached and is looked up in the cache by its URL.
00118 
00119         :param url: The url used to look up the PDSC.
00120         :type url: str
00121         :return: The url of the PACK file.
00122         :rtype: str
00123         """
00124         content = self.pdsc_from_cache (url)
00125         new_url = content.package.url.get_text()
00126         if not new_url.endswith("/") :
00127             new_url = new_url + "/"
00128         return (new_url + content.package.vendor.get_text() + "." +
00129                 content.package.find('name').get_text() + "." +
00130                 largest_version(content) + ".pack")
00131 
00132     def cache_pdsc_and_pack (self, url) :
00133         self.cache_file (url)
00134         try :
00135             self.cache_file (self.pdsc_to_pack (url))
00136         except AttributeError :
00137             stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
00138             self.counter  += 1
00139 
00140     def get_urls (self):
00141         """Extract the URLs of all know PDSC files.
00142 
00143         Will pull the index from the internet if it is not cached.
00144 
00145         :return: A list of all PDSC URLs
00146         :rtype: [str]
00147         """
00148         if not self.urls  :
00149             try : root_data = self.pdsc_from_cache (RootPackURL)
00150             except IOError : root_data = self.cache_and_parse (RootPackURL)
00151             self.urls  = ["/".join([pdsc.get('url').strip("/"),
00152                                    pdsc.get('name').strip("/")])
00153                          for pdsc in root_data.find_all("pdsc")]
00154         return self.urls 
00155 
00156     def _get_sectors(self, device):
00157         """Extract sector sizes from device FLM algorithm
00158 
00159         Will return None if there is no algorithm, pdsc URL formatted in correctly
00160 
00161         :return: A list tuples of sector start and size
00162         :rtype: [list]
00163         """
00164         try:
00165             pack = self.pack_from_cache (device)
00166             ret = []
00167             for filename in device['algorithm'].keys():
00168                 try:
00169                     flm = pack.open(filename)
00170                     flash_alg = PackFlashAlgo(flm.read())
00171                     sectors = [(flash_alg.flash_start + offset, size)
00172                                for offset, size in flash_alg.sector_sizes]
00173                     ret.extend(sectors)
00174                 except Exception:
00175                     pass
00176             ret.sort(key=lambda sector: sector[0])
00177             return ret
00178         except Exception:
00179             return None
00180 
00181     def _extract_dict(self, device, filename, pack) :
00182         to_ret = dict(pdsc_file=filename, pack_file=pack)
00183         try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
00184                                                       size=m["size"]))
00185                                        for m in device("memory")])
00186         except (KeyError, TypeError, IndexError) as e:
00187             try : to_ret["memory"] = dict([(m["name"], dict(start=m["start"],
00188                                                           size=m["size"]))
00189                                            for m in device("memory")])
00190             except (KeyError, TypeError, IndexError) as e : pass
00191         try: algorithms = device("algorithm")
00192         except:
00193             try: algorithms = device.parent("algorithm")
00194             except: pass
00195         else:
00196             if not algorithms:
00197                 try: algorithms = device.parent("algorithm")
00198                 except: pass
00199         try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
00200                                            dict(start=algo["start"],
00201                                                 size=algo["size"],
00202                                                 ramstart=algo.get("ramstart",None),
00203                                                 ramsize=algo.get("ramsize",None),
00204                                                 default=algo.get("default",1)))
00205                                        for algo in algorithms])
00206         except (KeyError, TypeError, IndexError) as e: pass
00207         try: to_ret["debug"] = device.parent.parent.debug["svd"]
00208         except (KeyError, TypeError, IndexError) as e : pass
00209         try: to_ret["debug"] = device.parent.debug["svd"]
00210         except (KeyError, TypeError, IndexError) as e : pass
00211         try: to_ret["debug"] = device.debug["svd"]
00212         except (KeyError, TypeError, IndexError) as e : pass
00213 
00214         to_ret["compile"] = {}
00215         try: compile_l1 = device.parent("compile")
00216         except (KeyError, TypeError, IndexError) as e : compile_l1 = []
00217         try: compile_l2 = device.parent.parent("compile")
00218         except (KeyError, TypeError, IndexError) as e : compile_l2 = []
00219         compile = compile_l2 + compile_l1
00220         for c in compile:
00221             try: to_ret["compile"]["header"] = c["header"]
00222             except (KeyError, TypeError, IndexError) as e : pass
00223             try: to_ret["compile"]["define"] =  c["define"]
00224             except (KeyError, TypeError, IndexError) as e : pass
00225 
00226         try: to_ret["core"] = device.parent.processor['dcore']
00227         except (KeyError, TypeError, IndexError) as e : pass
00228         try: to_ret["core"] = device.parent.parent.processor['dcore']
00229         except (KeyError, TypeError, IndexError) as e : pass
00230 
00231         to_ret["processor"] = {}
00232         try: proc_l1 = device("processor")
00233         except (KeyError, TypeError, IndexError) as e: proc_l1 = []
00234         try: proc_l2 = device.parent("processor")
00235         except (KeyError, TypeError, IndexError) as e: proc_l2 = []
00236         try: proc_l3 = device.parent.parent("processor")
00237         except (KeyError, TypeError, IndexError) as e: proc_l3 = []
00238         proc = proc_l3 + proc_l2 + proc_l1
00239         for p in proc:
00240             try: to_ret["processor"]["fpu"] = p['dfpu']
00241             except (KeyError, TypeError, IndexError) as e: pass
00242             try: to_ret["processor"]["endianness"] = p['dendian']
00243             except (KeyError, TypeError, IndexError) as e: pass
00244             try: to_ret["processor"]["clock"] = p['dclock']
00245             except (KeyError, TypeError, IndexError) as e: pass
00246 
00247         try: to_ret["vendor"] = device.parent['dvendor']
00248         except (KeyError, TypeError, IndexError) as e: pass
00249         try: to_ret["vendor"] = device.parent.parent['dvendor']
00250         except (KeyError, TypeError, IndexError) as e: pass
00251 
00252         if not to_ret["processor"]:
00253             del to_ret["processor"]
00254 
00255         if not to_ret["compile"]:
00256             del to_ret["compile"]
00257 
00258         to_ret['debug-interface'] = []
00259         to_ret['sectors'] = self._get_sectors (to_ret)
00260 
00261         return to_ret
00262 
00263     def _generate_index_helper(self, d) :
00264         try :
00265             pack = self.pdsc_to_pack (d)
00266             self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in
00267                                     (self.pdsc_from_cache (d)("device"))]))
00268         except AttributeError as e :
00269             stderr.write("[ ERROR ] file {}\n".format(d))
00270             print(e)
00271         self.counter  += 1
00272         self.display_counter ("Generating Index")
00273 
00274     def _generate_aliases_helper(self, d) :
00275         try :
00276             mydict = []
00277             for dev in self.pdsc_from_cache (d)("board"):
00278                 try :
00279                     mydict.append((dev['name'], dev.mounteddevice['dname']))
00280                 except (KeyError, TypeError, IndexError) as e:
00281                     pass
00282             self._aliases .update(dict(mydict))
00283         except (AttributeError, TypeError) as e :
00284             pass
00285         self.counter  += 1
00286         self.display_counter ("Scanning for Aliases")
00287 
00288     def get_flash_algorthim_binary (self, device_name, all=False) :
00289         """Retrieve the flash algorithm file for a particular part.
00290 
00291         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00292 
00293         :param device_name: The exact name of a device
00294         :param all: Return an iterator of all flash algos for this device
00295         :type device_name: str
00296         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00297         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm.
00298                  When "all" is set to True then an iterator for file-like objects is returned
00299         :rtype: ZipExtFile or ZipExtFile iterator if all is True
00300         """
00301         device = self.index [device_name]
00302         pack = self.pack_from_cache (device)
00303         algo_itr = (pack.open(path) for path in device['algorithm'].keys())
00304         return algo_itr if all else algo_itr.next()
00305 
00306     def get_svd_file (self, device_name) :
00307         """Retrieve the flash algorithm file for a particular part.
00308 
00309         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00310 
00311         :param device_name: The exact name of a device
00312         :type device_name: str
00313         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00314         :rtype: ZipExtFile
00315         """
00316         device = self.index [device_name]
00317         pack = self.pack_from_cache (device)
00318         return pack.open(device['debug'])
00319 
00320     def generate_index(self) :
00321         self._index  = {}
00322         self.counter  = 0
00323         do_queue(Reader, self._generate_index_helper , self.get_urls ())
00324         with open(LocalPackIndex, "wb+") as out:
00325             self._index ["version"] = "0.1.0"
00326             dump(self._index , out)
00327         stdout.write("\n")
00328 
00329     def generate_aliases(self) :
00330         self._aliases  = {}
00331         self.counter  = 0
00332         do_queue(Reader, self._generate_aliases_helper , self.get_urls ())
00333         with open(LocalPackAliases, "wb+") as out:
00334             dump(self._aliases , out)
00335         stdout.write("\n")
00336 
00337     def find_device(self, match) :
00338         choices = process.extract(match, self.index .keys(), limit=len(self.index ))
00339         choices = sorted([(v, k) for k, v in choices], reverse=True)
00340         if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
00341         return [(v, self.index [v]) for k,v in choices]
00342 
00343     def dump_index_to_file(self, file) :
00344         with open(file, "wb+") as out:
00345             dump(self.index , out)
00346 
00347     @property
00348     def index (self) :
00349         """An index of most of the important data in all cached PDSC files.
00350 
00351         :Example:
00352 
00353         >>> from ArmPackManager import Cache
00354         >>> a = Cache()
00355         >>> a.index["LPC1768"]
00356         {u'algorithm': {u'RAMsize': u'0x0FE0',
00357                 u'RAMstart': u'0x10000000',
00358                 u'name': u'Flash/LPC_IAP_512.FLM',
00359                 u'size': u'0x80000',
00360                 u'start': u'0x00000000'},
00361          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00362          u'debug': u'SVD/LPC176x5x.svd',
00363          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00364          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00365                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00366                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00367 
00368 
00369         """
00370         if not self._index  :
00371             with open(LocalPackIndex) as i :
00372                 self._index  = load(i)
00373         return self._index 
00374     @property
00375     def aliases (self) :
00376         """An index of most of the important data in all cached PDSC files.
00377 
00378         :Example:
00379 
00380         >>> from ArmPackManager import Cache
00381         >>> a = Cache()
00382         >>> a.index["LPC1768"]
00383         {u'algorithm': {u'RAMsize': u'0x0FE0',
00384                 u'RAMstart': u'0x10000000',
00385                 u'name': u'Flash/LPC_IAP_512.FLM',
00386                 u'size': u'0x80000',
00387                 u'start': u'0x00000000'},
00388          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00389          u'debug': u'SVD/LPC176x5x.svd',
00390          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00391          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00392                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00393                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00394 
00395 
00396         """
00397         if not self._aliases  :
00398             with open(LocalPackAliases) as i :
00399                 self._aliases  = load(i)
00400         return self._aliases 
00401 
00402     def cache_everything (self) :
00403         """Cache every PACK and PDSC file known.
00404 
00405         Generates an index afterwards.
00406 
00407         .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
00408         """
00409         self.cache_pack_list (self.get_urls ())
00410         self.generate_index ()
00411         self.generate_aliases ()
00412 
00413     def cache_descriptors (self) :
00414         """Cache every PDSC file known.
00415 
00416         Generates an index afterwards.
00417 
00418         .. note:: This process may use 11MB of drive space and take upwards of 1 minute.
00419         """
00420         self.cache_descriptor_list (self.get_urls ())
00421         self.generate_index ()
00422         self.generate_aliases ()
00423 
00424     def cache_descriptor_list (self, list) :
00425         """Cache a list of PDSC files.
00426 
00427         :param list: URLs of PDSC files to cache.
00428         :type list: [str]
00429         """
00430         self.total  = len(list)
00431         self.display_counter ("Caching Files")
00432         do_queue(Reader, self.cache_file , list)
00433         stdout.write("\n")
00434 
00435     def cache_pack_list (self, list) :
00436         """Cache a list of PACK files, referenced by their PDSC URL
00437 
00438         :param list: URLs of PDSC files to cache.
00439         :type list: [str]
00440         """
00441         self.total  = len(list) * 2
00442         self.display_counter ("Caching Files")
00443         do_queue(Reader, self.cache_pdsc_and_pack , list)
00444         stdout.write("\n")
00445 
00446     def pdsc_from_cache (self, url) :
00447         """Low level inteface for extracting a PDSC file from the cache.
00448 
00449         Assumes that the file specified is a PDSC file and is in the cache.
00450 
00451         :param url: The URL of a PDSC file.
00452         :type url: str
00453         :return: A parsed representation of the PDSC file.
00454         :rtype: BeautifulSoup
00455         """
00456         dest = join(self.data_path , strip_protocol(url))
00457         with open(dest, "r") as fd :
00458             return BeautifulSoup(fd, "html.parser")
00459 
00460     def pack_from_cache (self, device) :
00461         """Low level inteface for extracting a PACK file from the cache.
00462 
00463         Assumes that the file specified is a PACK file and is in the cache.
00464 
00465         :param url: The URL of a PACK file.
00466         :type url: str
00467         :return: A parsed representation of the PACK file.
00468         :rtype: ZipFile
00469         """
00470         return ZipFile(join(self.data_path ,
00471                             strip_protocol(device['pack_file'])))
00472 
00473     def gen_dict_from_cache() :
00474         pdsc_files = pdsc_from_cache(RootPackUrl)
00475 
00476     def cache_and_parse (self, url) :
00477         """A low level shortcut that Caches and Parses a PDSC file.
00478 
00479         :param url: The URL of the PDSC file.
00480         :type url: str
00481         :return: A parsed representation of the PDSC file.
00482         :rtype: BeautifulSoup
00483         """
00484         self.cache_file (url)
00485         return self.pdsc_from_cache (url)