Rizky Ardi Maulana / mbed-os
Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 from pycurl import Curl
00002 from bs4 import BeautifulSoup
00003 from os.path import join, dirname, basename
00004 from os import makedirs
00005 from errno import EEXIST
00006 from threading import Thread
00007 from Queue import Queue
00008 from re import compile, sub
00009 from sys import stderr, stdout
00010 from fuzzywuzzy import process
00011 from itertools import takewhile
00012 import argparse
00013 from json import dump, load
00014 from zipfile import ZipFile
00015 from tempfile import gettempdir
00016 
00017 RootPackURL = "http://www.keil.com/pack/index.idx"
00018 
00019 LocalPackDir = dirname(__file__)
00020 LocalPackIndex = join(LocalPackDir, "index.json")
00021 LocalPackAliases = join(LocalPackDir, "aliases.json")
00022 
00023 
00024 protocol_matcher = compile("\w*://")
00025 def strip_protocol(url) :
00026     return protocol_matcher.sub("", str(url))
00027 
00028 def largest_version(content) :
00029     return sorted([t['version'] for t in content.package.releases('release')], reverse=True)[0]
00030 
00031 def do_queue(Class, function, interable) :
00032     q = Queue()
00033     threads = [Class(q, function) for each in range(20)]
00034     for each in threads :
00035         each.setDaemon(True)
00036         each.start()
00037     for thing in interable :
00038         q.put(thing)
00039     q.join()
00040 
00041 class Reader (Thread) :
00042     def __init__(self, queue, func) :
00043         Thread.__init__(self)
00044         self.queue = queue
00045         self.func = func
00046     def run(self) :
00047         while True :
00048             url = self.queue.get()
00049             self.func(url)
00050             self.queue.task_done()
00051 
00052 class Cacher (Thread) :
00053     def __init__(self, queue, func) :
00054         Thread.__init__(self)
00055         self.queue = queue
00056         self.curl = Curl()
00057         self.curl.setopt(self.curl.FOLLOWLOCATION, True)
00058         self.func = func
00059     def run(self) :
00060         while True :
00061             url = self.queue.get()
00062             self.func(self.curl, url)
00063             self.queue.task_done()
00064 
00065 
00066 class Cache  () :
00067     """ The Cache object is the only relevant API object at the moment
00068 
00069     Constructing the Cache object does not imply any caching.
00070     A user of the API must explicitly call caching functions.
00071 
00072     :param silent: A boolean that, when True, significantly reduces the printing of this Object
00073     :type silent: bool
00074     :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
00075     :type no_timeouts: bool
00076     """
00077     def __init__ (self, silent, no_timeouts) :
00078         self.silent  = silent
00079         self.counter  = 0
00080         self.total  = 1
00081         self._index  = {}
00082         self._aliases  = {}
00083         self.urls  = None
00084         self.no_timeouts  = no_timeouts
00085         self.data_path  = gettempdir()
00086 
00087     def display_counter (self, message) :
00088         stdout.write("{} {}/{}\r".format(message, self.counter , self.total ))
00089         stdout.flush()
00090 
00091     def cache_file  (self, curl, url) :
00092         """Low level interface to caching a single file.
00093 
00094         :param curl: The user is responsible for providing a curl.Curl object as the curl parameter.
00095         :type curl: curl.Curl
00096         :param url: The URL to cache.
00097         :type url: str
00098         :rtype: None
00099         """
00100         if not self.silent  : print("Caching {}...".format(url))
00101         dest = join(self.data_path , strip_protocol(url))
00102         try :
00103             makedirs(dirname(dest))
00104         except OSError as exc :
00105             if exc.errno == EEXIST : pass
00106             else : raise
00107         with open(dest, "wb+") as fd :
00108             curl.setopt(curl.URL, url)
00109             curl.setopt(curl.FOLLOWLOCATION, True)
00110             curl.setopt(curl.WRITEDATA, fd)
00111             if not self.no_timeouts  :
00112                 curl.setopt(curl.CONNECTTIMEOUT, 2)
00113                 curl.setopt(curl.LOW_SPEED_LIMIT, 50 * 1024)
00114                 curl.setopt(curl.LOW_SPEED_TIME, 2)
00115             try :
00116                 curl.perform()
00117             except Exception as e :
00118                 stderr.write("[ ERROR ] file {} did not download {}\n".format(url, str(e)))
00119         self.counter  += 1
00120         self.display_counter ("Caching Files")
00121 
00122     def pdsc_to_pack  (self, url) :
00123         """Find the URL of the specified pack file described by a PDSC.
00124 
00125         The PDSC is assumed to be cached and is looked up in the cache by its URL.
00126 
00127         :param url: The url used to look up the PDSC.
00128         :type url: str
00129         :return: The url of the PACK file.
00130         :rtype: str
00131         """
00132         content = self.pdsc_from_cache (url)
00133         new_url = content.package.url.get_text()
00134         if not new_url.endswith("/") :
00135             new_url = new_url + "/"
00136         return (new_url + content.package.vendor.get_text() + "." +
00137                 content.package.find('name').get_text() + "." +
00138                 largest_version(content) + ".pack")
00139 
00140     def cache_pdsc_and_pack (self, curl, url) :
00141         self.cache_file (curl, url)
00142         try :
00143             self.cache_file (curl, self.pdsc_to_pack (url))
00144         except AttributeError :
00145             stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
00146             self.counter  += 1
00147 
00148     def get_urls (self):
00149         """Extract the URLs of all know PDSC files.
00150 
00151         Will pull the index from the internet if it is not cached.
00152 
00153         :return: A list of all PDSC URLs
00154         :rtype: [str]
00155         """
00156         if not self.urls  :
00157             try : root_data = self.pdsc_from_cache (RootPackURL)
00158             except IOError : root_data = self.cache_and_parse (RootPackURL)
00159             self.urls  = ["/".join([pdsc.get('url').strip("/"),
00160                                    pdsc.get('name').strip("/")])
00161                          for pdsc in root_data.find_all("pdsc")]
00162         return self.urls 
00163 
00164     def _extract_dict(self, device, filename, pack) :
00165         to_ret = dict(pdsc_file=filename, pack_file=pack)
00166         try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
00167                                                       size=m["size"]))
00168                                        for m in device("memory")])
00169         except (KeyError, TypeError, IndexError) as e : pass
00170         try: algorithms = device("algorithm")
00171         except:
00172             try: algorithms = device.parent("algorithm")
00173             except: pass
00174         else:
00175             if not algorithms:
00176                 try: algorithms = device.parent("algorithm")
00177                 except: pass
00178         try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
00179                                            dict(start=algo["start"],
00180                                                 size=algo["size"],
00181                                                 ramstart=algo.get("ramstart",None),
00182                                                 ramsize=algo.get("ramsize",None),
00183                                                 default=algo.get("default",1)))
00184                                        for algo in algorithms])
00185         except (KeyError, TypeError, IndexError) as e: pass
00186         try: to_ret["debug"] = device.parent.parent.debug["svd"]
00187         except (KeyError, TypeError, IndexError) as e : pass
00188         try: to_ret["debug"] = device.parent.debug["svd"]
00189         except (KeyError, TypeError, IndexError) as e : pass
00190         try: to_ret["debug"] = device.debug["svd"]
00191         except (KeyError, TypeError, IndexError) as e : pass
00192 
00193         to_ret["compile"] = {}
00194         try: compile_l1 = device.parent("compile")
00195         except (KeyError, TypeError, IndexError) as e : compile_l1 = []
00196         try: compile_l2 = device.parent.parent("compile")
00197         except (KeyError, TypeError, IndexError) as e : compile_l2 = []
00198         compile = compile_l2 + compile_l1
00199         for c in compile:
00200             try: to_ret["compile"]["header"] = c["header"]
00201             except (KeyError, TypeError, IndexError) as e : pass
00202             try: to_ret["compile"]["define"] =  c["define"]
00203             except (KeyError, TypeError, IndexError) as e : pass
00204 
00205         try: to_ret["core"] = device.parent.processor['dcore']
00206         except (KeyError, TypeError, IndexError) as e : pass
00207         try: to_ret["core"] = device.parent.parent.processor['dcore']
00208         except (KeyError, TypeError, IndexError) as e : pass
00209 
00210         to_ret["processor"] = {}
00211         try: proc_l1 = device("processor")
00212         except (KeyError, TypeError, IndexError) as e: proc_l1 = []
00213         try: proc_l2 = device.parent("processor")
00214         except (KeyError, TypeError, IndexError) as e: proc_l2 = []
00215         try: proc_l3 = device.parent.parent("processor")
00216         except (KeyError, TypeError, IndexError) as e: proc_l3 = []
00217         proc = proc_l3 + proc_l2 + proc_l1
00218         for p in proc:
00219             try: to_ret["processor"]["fpu"] = p['dfpu']
00220             except (KeyError, TypeError, IndexError) as e: pass
00221             try: to_ret["processor"]["endianness"] = p['dendian']
00222             except (KeyError, TypeError, IndexError) as e: pass
00223             try: to_ret["processor"]["clock"] = p['dclock']
00224             except (KeyError, TypeError, IndexError) as e: pass
00225 
00226         try: to_ret["vendor"] = device.parent['dvendor']
00227         except (KeyError, TypeError, IndexError) as e: pass
00228         try: to_ret["vendor"] = device.parent.parent['dvendor']
00229         except (KeyError, TypeError, IndexError) as e: pass
00230 
00231         if not to_ret["processor"]:
00232             del to_ret["processor"]
00233 
00234         if not to_ret["compile"]:
00235             del to_ret["compile"]
00236 
00237         to_ret['debug-interface'] = []
00238 
00239         return to_ret
00240 
00241     def _generate_index_helper(self, d) :
00242         try :
00243             pack = self.pdsc_to_pack (d)
00244             self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in
00245                                     (self.pdsc_from_cache (d)("device"))]))
00246         except AttributeError as e :
00247             stderr.write("[ ERROR ] file {}\n".format(d))
00248             print(e)
00249         self.counter  += 1
00250         self.display_counter ("Generating Index")
00251 
00252     def _generate_aliases_helper(self, d) :
00253         try :
00254             mydict = []
00255             for dev in self.pdsc_from_cache (d)("board"):
00256                 try :
00257                     mydict.append((dev['name'], dev.mounteddevice['dname']))
00258                 except (KeyError, TypeError, IndexError) as e:
00259                     pass
00260             self._aliases .update(dict(mydict))
00261         except (AttributeError, TypeError) as e :
00262             pass
00263         self.counter  += 1
00264         self.display_counter ("Scanning for Aliases")
00265 
00266     def get_flash_algorthim_binary (self, device_name) :
00267         """Retrieve the flash algorithm file for a particular part.
00268 
00269         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00270 
00271         :param device_name: The exact name of a device
00272         :type device_name: str
00273         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00274         :rtype: ZipExtFile
00275         """
00276         pack = self.pack_from_cache (self.index [device_name])
00277         return pack.open(device['algorithm']['file'])
00278 
00279     def get_svd_file (self, device_name) :
00280         """Retrieve the flash algorithm file for a particular part.
00281 
00282         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00283 
00284         :param device_name: The exact name of a device
00285         :type device_name: str
00286         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00287         :rtype: ZipExtFile
00288         """
00289         pack = self.pack_from_cache (self.index [device_name])
00290         return pack.open(device['debug'])
00291 
00292     def generate_index(self) :
00293         self._index  = {}
00294         self.counter  = 0
00295         do_queue(Reader, self._generate_index_helper , self.get_urls ())
00296         with open(LocalPackIndex, "wb+") as out:
00297             self._index ["version"] = "0.1.0"
00298             dump(self._index , out)
00299         stdout.write("\n")
00300 
00301     def generate_aliases(self) :
00302         self._aliases  = {}
00303         self.counter  = 0
00304         do_queue(Reader, self._generate_aliases_helper , self.get_urls ())
00305         with open(LocalPackAliases, "wb+") as out:
00306             dump(self._aliases , out)
00307         stdout.write("\n")
00308 
00309     def find_device(self, match) :
00310         choices = process.extract(match, self.index .keys(), limit=len(self.index ))
00311         choices = sorted([(v, k) for k, v in choices], reverse=True)
00312         if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
00313         return [(v, self.index [v]) for k,v in choices]
00314 
00315     def dump_index_to_file(self, file) :
00316         with open(file, "wb+") as out:
00317             dump(self.index , out)
00318 
00319     @property
00320     def index (self) :
00321         """An index of most of the important data in all cached PDSC files.
00322 
00323         :Example:
00324 
00325         >>> from ArmPackManager import Cache
00326         >>> a = Cache()
00327         >>> a.index["LPC1768"]
00328         {u'algorithm': {u'RAMsize': u'0x0FE0',
00329                 u'RAMstart': u'0x10000000',
00330                 u'name': u'Flash/LPC_IAP_512.FLM',
00331                 u'size': u'0x80000',
00332                 u'start': u'0x00000000'},
00333          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00334          u'debug': u'SVD/LPC176x5x.svd',
00335          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00336          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00337                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00338                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00339 
00340 
00341         """
00342         if not self._index  :
00343             with open(LocalPackIndex) as i :
00344                 self._index  = load(i)
00345         return self._index 
00346     @property
00347     def aliases (self) :
00348         """An index of most of the important data in all cached PDSC files.
00349 
00350         :Example:
00351 
00352         >>> from ArmPackManager import Cache
00353         >>> a = Cache()
00354         >>> a.index["LPC1768"]
00355         {u'algorithm': {u'RAMsize': u'0x0FE0',
00356                 u'RAMstart': u'0x10000000',
00357                 u'name': u'Flash/LPC_IAP_512.FLM',
00358                 u'size': u'0x80000',
00359                 u'start': u'0x00000000'},
00360          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00361          u'debug': u'SVD/LPC176x5x.svd',
00362          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00363          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00364                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00365                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00366 
00367 
00368         """
00369         if not self._aliases  :
00370             with open(join(self.data_path , "aliases.json")) as i :
00371                 self._aliases  = load(i)
00372         return self._aliases 
00373 
00374     def cache_everything (self) :
00375         """Cache every PACK and PDSC file known.
00376 
00377         Generates an index afterwards.
00378 
00379         .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
00380         """
00381         self.cache_pack_list (self.get_urls ())
00382         self.generate_index ()
00383         self.generate_aliases ()
00384 
00385     def cache_descriptors (self) :
00386         """Cache every PDSC file known.
00387 
00388         Generates an index afterwards.
00389 
00390         .. note:: This process may use 11MB of drive space and take upwards of 1 minute.
00391         """
00392         self.cache_descriptor_list (self.get_urls ())
00393         self.generate_index ()
00394         self.generate_aliases ()
00395 
00396     def cache_descriptor_list (self, list) :
00397         """Cache a list of PDSC files.
00398 
00399         :param list: URLs of PDSC files to cache.
00400         :type list: [str]
00401         """
00402         self.total  = len(list)
00403         self.display_counter ("Caching Files")
00404         do_queue(Cacher, self.cache_file , list)
00405         stdout.write("\n")
00406 
00407     def cache_pack_list (self, list) :
00408         """Cache a list of PACK files, referenced by their PDSC URL
00409 
00410         :param list: URLs of PDSC files to cache.
00411         :type list: [str]
00412         """
00413         self.total  = len(list) * 2
00414         self.display_counter ("Caching Files")
00415         do_queue(Cacher, self.cache_pdsc_and_pack , list)
00416         stdout.write("\n")
00417 
00418     def pdsc_from_cache (self, url) :
00419         """Low level inteface for extracting a PDSC file from the cache.
00420 
00421         Assumes that the file specified is a PDSC file and is in the cache.
00422 
00423         :param url: The URL of a PDSC file.
00424         :type url: str
00425         :return: A parsed representation of the PDSC file.
00426         :rtype: BeautifulSoup
00427         """
00428         dest = join(self.data_path , strip_protocol(url))
00429         with open(dest, "r") as fd :
00430             return BeautifulSoup(fd, "html.parser")
00431 
00432     def pack_from_cache (self, url) :
00433         """Low level inteface for extracting a PACK file from the cache.
00434 
00435         Assumes that the file specified is a PACK file and is in the cache.
00436 
00437         :param url: The URL of a PACK file.
00438         :type url: str
00439         :return: A parsed representation of the PACK file.
00440         :rtype: ZipFile
00441         """
00442         return ZipFile(join(self.data_path ,
00443                             strip_protocol(device['pack_file'])))
00444 
00445     def gen_dict_from_cache() :
00446         pdsc_files = pdsc_from_cache(RootPackUrl)
00447 
00448     def cache_and_parse (self, url) :
00449         """A low level shortcut that Caches and Parses a PDSC file.
00450 
00451         :param url: The URL of the PDSC file.
00452         :type url: str
00453         :return: A parsed representation of the PDSC file.
00454         :rtype: BeautifulSoup
00455         """
00456         self.cache_file (Curl(), url)
00457         return self.pdsc_from_cache (url)
00458