the other jimmy / mbed-sdk-tools

Fork of mbed-sdk-tools by mbed official

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 from urllib2 import urlopen, URLError
00002 from bs4 import BeautifulSoup
00003 from os.path import join, dirname, basename
00004 from os import makedirs
00005 from errno import EEXIST
00006 from threading import Thread
00007 from Queue import Queue
00008 from re import compile, sub
00009 from sys import stderr, stdout
00010 from fuzzywuzzy import process
00011 from itertools import takewhile
00012 import argparse
00013 from json import dump, load
00014 from zipfile import ZipFile
00015 from tempfile import gettempdir
00016 
00017 RootPackURL = "http://www.keil.com/pack/index.idx"
00018 
00019 LocalPackDir = dirname(__file__)
00020 LocalPackIndex = join(LocalPackDir, "index.json")
00021 LocalPackAliases = join(LocalPackDir, "aliases.json")
00022 
00023 
00024 protocol_matcher = compile("\w*://")
00025 def strip_protocol(url) :
00026     return protocol_matcher.sub("", str(url))
00027 
00028 def largest_version(content) :
00029     return sorted([t['version'] for t in content.package.releases('release')], reverse=True)[0]
00030 
00031 def do_queue(Class, function, interable) :
00032     q = Queue()
00033     threads = [Class(q, function) for each in range(20)]
00034     for each in threads :
00035         each.setDaemon(True)
00036         each.start()
00037     for thing in interable :
00038         q.put(thing)
00039     q.join()
00040 
00041 class Reader (Thread) :
00042     def __init__(self, queue, func) :
00043         Thread.__init__(self)
00044         self.queue = queue
00045         self.func = func
00046     def run(self) :
00047         while True :
00048             url = self.queue.get()
00049             self.func(url)
00050             self.queue.task_done()
00051 
00052 
00053 class Cache  () :
00054     """ The Cache object is the only relevant API object at the moment
00055 
00056     Constructing the Cache object does not imply any caching.
00057     A user of the API must explicitly call caching functions.
00058 
00059     :param silent: A boolean that, when True, significantly reduces the printing of this Object
00060     :type silent: bool
00061     :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
00062     :type no_timeouts: bool
00063     """
00064     def __init__ (self, silent, no_timeouts) :
00065         self.silent  = silent
00066         self.counter  = 0
00067         self.total  = 1
00068         self._index  = {}
00069         self._aliases  = {}
00070         self.urls  = None
00071         self.no_timeouts  = no_timeouts
00072         self.data_path  = gettempdir()
00073 
00074     def display_counter (self, message) :
00075         stdout.write("{} {}/{}\r".format(message, self.counter , self.total ))
00076         stdout.flush()
00077 
00078     def cache_file  (self, url) :
00079         """Low level interface to caching a single file.
00080 
00081         :param url: The URL to cache.
00082         :type url: str
00083         :rtype: None
00084         """
00085         if not self.silent  : print("Caching {}...".format(url))
00086         dest = join(self.data_path , strip_protocol(url))
00087         try :
00088             makedirs(dirname(dest))
00089         except OSError as exc :
00090             if exc.errno == EEXIST : pass
00091             else : raise
00092         try:
00093             with open(dest, "wb+") as fd :
00094                 fd.write(urlopen(url).read())
00095         except URLError as e:
00096             stderr.write(e.reason)
00097         self.counter  += 1
00098         self.display_counter ("Caching Files")
00099 
00100     def pdsc_to_pack  (self, url) :
00101         """Find the URL of the specified pack file described by a PDSC.
00102 
00103         The PDSC is assumed to be cached and is looked up in the cache by its URL.
00104 
00105         :param url: The url used to look up the PDSC.
00106         :type url: str
00107         :return: The url of the PACK file.
00108         :rtype: str
00109         """
00110         content = self.pdsc_from_cache (url)
00111         new_url = content.package.url.get_text()
00112         if not new_url.endswith("/") :
00113             new_url = new_url + "/"
00114         return (new_url + content.package.vendor.get_text() + "." +
00115                 content.package.find('name').get_text() + "." +
00116                 largest_version(content) + ".pack")
00117 
00118     def cache_pdsc_and_pack (self, url) :
00119         self.cache_file (url)
00120         try :
00121             self.cache_file (self.pdsc_to_pack (url))
00122         except AttributeError :
00123             stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
00124             self.counter  += 1
00125 
00126     def get_urls (self):
00127         """Extract the URLs of all know PDSC files.
00128 
00129         Will pull the index from the internet if it is not cached.
00130 
00131         :return: A list of all PDSC URLs
00132         :rtype: [str]
00133         """
00134         if not self.urls  :
00135             try : root_data = self.pdsc_from_cache (RootPackURL)
00136             except IOError : root_data = self.cache_and_parse (RootPackURL)
00137             self.urls  = ["/".join([pdsc.get('url').strip("/"),
00138                                    pdsc.get('name').strip("/")])
00139                          for pdsc in root_data.find_all("pdsc")]
00140         return self.urls 
00141 
00142     def _extract_dict(self, device, filename, pack) :
00143         to_ret = dict(pdsc_file=filename, pack_file=pack)
00144         try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
00145                                                       size=m["size"]))
00146                                        for m in device("memory")])
00147         except (KeyError, TypeError, IndexError) as e : pass
00148         try: algorithms = device("algorithm")
00149         except:
00150             try: algorithms = device.parent("algorithm")
00151             except: pass
00152         else:
00153             if not algorithms:
00154                 try: algorithms = device.parent("algorithm")
00155                 except: pass
00156         try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
00157                                            dict(start=algo["start"],
00158                                                 size=algo["size"],
00159                                                 ramstart=algo.get("ramstart",None),
00160                                                 ramsize=algo.get("ramsize",None),
00161                                                 default=algo.get("default",1)))
00162                                        for algo in algorithms])
00163         except (KeyError, TypeError, IndexError) as e: pass
00164         try: to_ret["debug"] = device.parent.parent.debug["svd"]
00165         except (KeyError, TypeError, IndexError) as e : pass
00166         try: to_ret["debug"] = device.parent.debug["svd"]
00167         except (KeyError, TypeError, IndexError) as e : pass
00168         try: to_ret["debug"] = device.debug["svd"]
00169         except (KeyError, TypeError, IndexError) as e : pass
00170 
00171         to_ret["compile"] = {}
00172         try: compile_l1 = device.parent("compile")
00173         except (KeyError, TypeError, IndexError) as e : compile_l1 = []
00174         try: compile_l2 = device.parent.parent("compile")
00175         except (KeyError, TypeError, IndexError) as e : compile_l2 = []
00176         compile = compile_l2 + compile_l1
00177         for c in compile:
00178             try: to_ret["compile"]["header"] = c["header"]
00179             except (KeyError, TypeError, IndexError) as e : pass
00180             try: to_ret["compile"]["define"] =  c["define"]
00181             except (KeyError, TypeError, IndexError) as e : pass
00182 
00183         try: to_ret["core"] = device.parent.processor['dcore']
00184         except (KeyError, TypeError, IndexError) as e : pass
00185         try: to_ret["core"] = device.parent.parent.processor['dcore']
00186         except (KeyError, TypeError, IndexError) as e : pass
00187 
00188         to_ret["processor"] = {}
00189         try: proc_l1 = device("processor")
00190         except (KeyError, TypeError, IndexError) as e: proc_l1 = []
00191         try: proc_l2 = device.parent("processor")
00192         except (KeyError, TypeError, IndexError) as e: proc_l2 = []
00193         try: proc_l3 = device.parent.parent("processor")
00194         except (KeyError, TypeError, IndexError) as e: proc_l3 = []
00195         proc = proc_l3 + proc_l2 + proc_l1
00196         for p in proc:
00197             try: to_ret["processor"]["fpu"] = p['dfpu']
00198             except (KeyError, TypeError, IndexError) as e: pass
00199             try: to_ret["processor"]["endianness"] = p['dendian']
00200             except (KeyError, TypeError, IndexError) as e: pass
00201             try: to_ret["processor"]["clock"] = p['dclock']
00202             except (KeyError, TypeError, IndexError) as e: pass
00203 
00204         try: to_ret["vendor"] = device.parent['dvendor']
00205         except (KeyError, TypeError, IndexError) as e: pass
00206         try: to_ret["vendor"] = device.parent.parent['dvendor']
00207         except (KeyError, TypeError, IndexError) as e: pass
00208 
00209         if not to_ret["processor"]:
00210             del to_ret["processor"]
00211 
00212         if not to_ret["compile"]:
00213             del to_ret["compile"]
00214 
00215         to_ret['debug-interface'] = []
00216 
00217         return to_ret
00218 
00219     def _generate_index_helper(self, d) :
00220         try :
00221             pack = self.pdsc_to_pack (d)
00222             self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in
00223                                     (self.pdsc_from_cache (d)("device"))]))
00224         except AttributeError as e :
00225             stderr.write("[ ERROR ] file {}\n".format(d))
00226             print(e)
00227         self.counter  += 1
00228         self.display_counter ("Generating Index")
00229 
00230     def _generate_aliases_helper(self, d) :
00231         try :
00232             mydict = []
00233             for dev in self.pdsc_from_cache (d)("board"):
00234                 try :
00235                     mydict.append((dev['name'], dev.mounteddevice['dname']))
00236                 except (KeyError, TypeError, IndexError) as e:
00237                     pass
00238             self._aliases .update(dict(mydict))
00239         except (AttributeError, TypeError) as e :
00240             pass
00241         self.counter  += 1
00242         self.display_counter ("Scanning for Aliases")
00243 
00244     def get_flash_algorthim_binary (self, device_name) :
00245         """Retrieve the flash algorithm file for a particular part.
00246 
00247         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00248 
00249         :param device_name: The exact name of a device
00250         :type device_name: str
00251         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00252         :rtype: ZipExtFile
00253         """
00254         pack = self.pack_from_cache (self.index [device_name])
00255         return pack.open(device['algorithm']['file'])
00256 
00257     def get_svd_file (self, device_name) :
00258         """Retrieve the flash algorithm file for a particular part.
00259 
00260         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00261 
00262         :param device_name: The exact name of a device
00263         :type device_name: str
00264         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00265         :rtype: ZipExtFile
00266         """
00267         pack = self.pack_from_cache (self.index [device_name])
00268         return pack.open(device['debug'])
00269 
00270     def generate_index(self) :
00271         self._index  = {}
00272         self.counter  = 0
00273         do_queue(Reader, self._generate_index_helper , self.get_urls ())
00274         with open(LocalPackIndex, "wb+") as out:
00275             self._index ["version"] = "0.1.0"
00276             dump(self._index , out)
00277         stdout.write("\n")
00278 
00279     def generate_aliases(self) :
00280         self._aliases  = {}
00281         self.counter  = 0
00282         do_queue(Reader, self._generate_aliases_helper , self.get_urls ())
00283         with open(LocalPackAliases, "wb+") as out:
00284             dump(self._aliases , out)
00285         stdout.write("\n")
00286 
00287     def find_device(self, match) :
00288         choices = process.extract(match, self.index .keys(), limit=len(self.index ))
00289         choices = sorted([(v, k) for k, v in choices], reverse=True)
00290         if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
00291         return [(v, self.index [v]) for k,v in choices]
00292 
00293     def dump_index_to_file(self, file) :
00294         with open(file, "wb+") as out:
00295             dump(self.index , out)
00296 
00297     @property
00298     def index (self) :
00299         """An index of most of the important data in all cached PDSC files.
00300 
00301         :Example:
00302 
00303         >>> from ArmPackManager import Cache
00304         >>> a = Cache()
00305         >>> a.index["LPC1768"]
00306         {u'algorithm': {u'RAMsize': u'0x0FE0',
00307                 u'RAMstart': u'0x10000000',
00308                 u'name': u'Flash/LPC_IAP_512.FLM',
00309                 u'size': u'0x80000',
00310                 u'start': u'0x00000000'},
00311          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00312          u'debug': u'SVD/LPC176x5x.svd',
00313          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00314          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00315                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00316                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00317 
00318 
00319         """
00320         if not self._index  :
00321             with open(LocalPackIndex) as i :
00322                 self._index  = load(i)
00323         return self._index 
00324     @property
00325     def aliases (self) :
00326         """An index of most of the important data in all cached PDSC files.
00327 
00328         :Example:
00329 
00330         >>> from ArmPackManager import Cache
00331         >>> a = Cache()
00332         >>> a.index["LPC1768"]
00333         {u'algorithm': {u'RAMsize': u'0x0FE0',
00334                 u'RAMstart': u'0x10000000',
00335                 u'name': u'Flash/LPC_IAP_512.FLM',
00336                 u'size': u'0x80000',
00337                 u'start': u'0x00000000'},
00338          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00339          u'debug': u'SVD/LPC176x5x.svd',
00340          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00341          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00342                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00343                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00344 
00345 
00346         """
00347         if not self._aliases  :
00348             with open(join(self.data_path , "aliases.json")) as i :
00349                 self._aliases  = load(i)
00350         return self._aliases 
00351 
00352     def cache_everything (self) :
00353         """Cache every PACK and PDSC file known.
00354 
00355         Generates an index afterwards.
00356 
00357         .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
00358         """
00359         self.cache_pack_list (self.get_urls ())
00360         self.generate_index ()
00361         self.generate_aliases ()
00362 
00363     def cache_descriptors (self) :
00364         """Cache every PDSC file known.
00365 
00366         Generates an index afterwards.
00367 
00368         .. note:: This process may use 11MB of drive space and take upwards of 1 minute.
00369         """
00370         self.cache_descriptor_list (self.get_urls ())
00371         self.generate_index ()
00372         self.generate_aliases ()
00373 
00374     def cache_descriptor_list (self, list) :
00375         """Cache a list of PDSC files.
00376 
00377         :param list: URLs of PDSC files to cache.
00378         :type list: [str]
00379         """
00380         self.total  = len(list)
00381         self.display_counter ("Caching Files")
00382         do_queue(Reader, self.cache_file , list)
00383         stdout.write("\n")
00384 
00385     def cache_pack_list (self, list) :
00386         """Cache a list of PACK files, referenced by their PDSC URL
00387 
00388         :param list: URLs of PDSC files to cache.
00389         :type list: [str]
00390         """
00391         self.total  = len(list) * 2
00392         self.display_counter ("Caching Files")
00393         do_queue(Reader, self.cache_pdsc_and_pack , list)
00394         stdout.write("\n")
00395 
00396     def pdsc_from_cache (self, url) :
00397         """Low level inteface for extracting a PDSC file from the cache.
00398 
00399         Assumes that the file specified is a PDSC file and is in the cache.
00400 
00401         :param url: The URL of a PDSC file.
00402         :type url: str
00403         :return: A parsed representation of the PDSC file.
00404         :rtype: BeautifulSoup
00405         """
00406         dest = join(self.data_path , strip_protocol(url))
00407         with open(dest, "r") as fd :
00408             return BeautifulSoup(fd, "html.parser")
00409 
00410     def pack_from_cache (self, url) :
00411         """Low level inteface for extracting a PACK file from the cache.
00412 
00413         Assumes that the file specified is a PACK file and is in the cache.
00414 
00415         :param url: The URL of a PACK file.
00416         :type url: str
00417         :return: A parsed representation of the PACK file.
00418         :rtype: ZipFile
00419         """
00420         return ZipFile(join(self.data_path ,
00421                             strip_protocol(device['pack_file'])))
00422 
00423     def gen_dict_from_cache() :
00424         pdsc_files = pdsc_from_cache(RootPackUrl)
00425 
00426     def cache_and_parse (self, url) :
00427         """A low level shortcut that Caches and Parses a PDSC file.
00428 
00429         :param url: The URL of the PDSC file.
00430         :type url: str
00431         :return: A parsed representation of the PDSC file.
00432         :rtype: BeautifulSoup
00433         """
00434         self.cache_file (url)
00435         return self.pdsc_from_cache (url)
00436