Greg Steiert / pegasus_dev

Dependents:   blinky_max32630fthr

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 from urllib2 import urlopen, URLError
00002 from bs4 import BeautifulSoup
00003 from os.path import join, dirname, basename
00004 from os import makedirs
00005 from errno import EEXIST
00006 from threading import Thread
00007 from Queue import Queue
00008 from re import compile, sub
00009 from sys import stderr, stdout
00010 from fuzzywuzzy import process
00011 from itertools import takewhile
00012 import argparse
00013 from json import dump, load
00014 from zipfile import ZipFile
00015 from tempfile import gettempdir
00016 
00017 RootPackURL = "http://www.keil.com/pack/index.idx"
00018 
00019 LocalPackDir = dirname(__file__)
00020 LocalPackIndex = join(LocalPackDir, "index.json")
00021 LocalPackAliases = join(LocalPackDir, "aliases.json")
00022 
00023 
00024 protocol_matcher = compile("\w*://")
00025 def strip_protocol(url) :
00026     return protocol_matcher.sub("", str(url))
00027 
00028 def largest_version(content) :
00029     return sorted([t['version'] for t in content.package.releases('release')], reverse=True)[0]
00030 
00031 def do_queue(Class, function, interable) :
00032     q = Queue()
00033     threads = [Class(q, function) for each in range(20)]
00034     for each in threads :
00035         each.setDaemon(True)
00036         each.start()
00037     for thing in interable :
00038         q.put(thing)
00039     q.join()
00040 
00041 class Reader (Thread) :
00042     def __init__(self, queue, func) :
00043         Thread.__init__(self)
00044         self.queue = queue
00045         self.func = func
00046     def run(self) :
00047         while True :
00048             url = self.queue.get()
00049             self.func(url)
00050             self.queue.task_done()
00051 
00052 
00053 class Cache  () :
00054     """ The Cache object is the only relevant API object at the moment
00055 
00056     Constructing the Cache object does not imply any caching.
00057     A user of the API must explicitly call caching functions.
00058 
00059     :param silent: A boolean that, when True, significantly reduces the printing of this Object
00060     :type silent: bool
00061     :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
00062     :type no_timeouts: bool
00063     """
00064     def __init__ (self, silent, no_timeouts) :
00065         self.silent  = silent
00066         self.counter  = 0
00067         self.total  = 1
00068         self._index  = {}
00069         self._aliases  = {}
00070         self.urls  = None
00071         self.no_timeouts  = no_timeouts
00072         self.data_path  = gettempdir()
00073 
00074     def display_counter (self, message) :
00075         stdout.write("{} {}/{}\r".format(message, self.counter , self.total ))
00076         stdout.flush()
00077 
00078     def cache_file  (self, url) :
00079         """Low level interface to caching a single file.
00080 
00081         :param curl: The user is responsible for providing a curl.Curl object as the curl parameter.
00082         :type curl: curl.Curl
00083         :param url: The URL to cache.
00084         :type url: str
00085         :rtype: None
00086         """
00087         if not self.silent  : print("Caching {}...".format(url))
00088         dest = join(self.data_path , strip_protocol(url))
00089         try :
00090             makedirs(dirname(dest))
00091         except OSError as exc :
00092             if exc.errno == EEXIST : pass
00093             else : raise
00094         try:
00095             with open(dest, "wb+") as fd :
00096                 fd.write(urlopen(url).read())
00097         except URLError as e:
00098             stderr.write(e.reason)
00099         self.counter  += 1
00100         self.display_counter ("Caching Files")
00101 
00102     def pdsc_to_pack  (self, url) :
00103         """Find the URL of the specified pack file described by a PDSC.
00104 
00105         The PDSC is assumed to be cached and is looked up in the cache by its URL.
00106 
00107         :param url: The url used to look up the PDSC.
00108         :type url: str
00109         :return: The url of the PACK file.
00110         :rtype: str
00111         """
00112         content = self.pdsc_from_cache (url)
00113         new_url = content.package.url.get_text()
00114         if not new_url.endswith("/") :
00115             new_url = new_url + "/"
00116         return (new_url + content.package.vendor.get_text() + "." +
00117                 content.package.find('name').get_text() + "." +
00118                 largest_version(content) + ".pack")
00119 
00120     def cache_pdsc_and_pack (self, curl, url) :
00121         self.cache_file (curl, url)
00122         try :
00123             self.cache_file (curl, self.pdsc_to_pack (url))
00124         except AttributeError :
00125             stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
00126             self.counter  += 1
00127 
00128     def get_urls (self):
00129         """Extract the URLs of all know PDSC files.
00130 
00131         Will pull the index from the internet if it is not cached.
00132 
00133         :return: A list of all PDSC URLs
00134         :rtype: [str]
00135         """
00136         if not self.urls  :
00137             try : root_data = self.pdsc_from_cache (RootPackURL)
00138             except IOError : root_data = self.cache_and_parse (RootPackURL)
00139             self.urls  = ["/".join([pdsc.get('url').strip("/"),
00140                                    pdsc.get('name').strip("/")])
00141                          for pdsc in root_data.find_all("pdsc")]
00142         return self.urls 
00143 
00144     def _extract_dict(self, device, filename, pack) :
00145         to_ret = dict(pdsc_file=filename, pack_file=pack)
00146         try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
00147                                                       size=m["size"]))
00148                                        for m in device("memory")])
00149         except (KeyError, TypeError, IndexError) as e : pass
00150         try: algorithms = device("algorithm")
00151         except:
00152             try: algorithms = device.parent("algorithm")
00153             except: pass
00154         else:
00155             if not algorithms:
00156                 try: algorithms = device.parent("algorithm")
00157                 except: pass
00158         try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
00159                                            dict(start=algo["start"],
00160                                                 size=algo["size"],
00161                                                 ramstart=algo.get("ramstart",None),
00162                                                 ramsize=algo.get("ramsize",None),
00163                                                 default=algo.get("default",1)))
00164                                        for algo in algorithms])
00165         except (KeyError, TypeError, IndexError) as e: pass
00166         try: to_ret["debug"] = device.parent.parent.debug["svd"]
00167         except (KeyError, TypeError, IndexError) as e : pass
00168         try: to_ret["debug"] = device.parent.debug["svd"]
00169         except (KeyError, TypeError, IndexError) as e : pass
00170         try: to_ret["debug"] = device.debug["svd"]
00171         except (KeyError, TypeError, IndexError) as e : pass
00172 
00173         to_ret["compile"] = {}
00174         try: compile_l1 = device.parent("compile")
00175         except (KeyError, TypeError, IndexError) as e : compile_l1 = []
00176         try: compile_l2 = device.parent.parent("compile")
00177         except (KeyError, TypeError, IndexError) as e : compile_l2 = []
00178         compile = compile_l2 + compile_l1
00179         for c in compile:
00180             try: to_ret["compile"]["header"] = c["header"]
00181             except (KeyError, TypeError, IndexError) as e : pass
00182             try: to_ret["compile"]["define"] =  c["define"]
00183             except (KeyError, TypeError, IndexError) as e : pass
00184 
00185         try: to_ret["core"] = device.parent.processor['dcore']
00186         except (KeyError, TypeError, IndexError) as e : pass
00187         try: to_ret["core"] = device.parent.parent.processor['dcore']
00188         except (KeyError, TypeError, IndexError) as e : pass
00189 
00190         to_ret["processor"] = {}
00191         try: proc_l1 = device("processor")
00192         except (KeyError, TypeError, IndexError) as e: proc_l1 = []
00193         try: proc_l2 = device.parent("processor")
00194         except (KeyError, TypeError, IndexError) as e: proc_l2 = []
00195         try: proc_l3 = device.parent.parent("processor")
00196         except (KeyError, TypeError, IndexError) as e: proc_l3 = []
00197         proc = proc_l3 + proc_l2 + proc_l1
00198         for p in proc:
00199             try: to_ret["processor"]["fpu"] = p['dfpu']
00200             except (KeyError, TypeError, IndexError) as e: pass
00201             try: to_ret["processor"]["endianness"] = p['dendian']
00202             except (KeyError, TypeError, IndexError) as e: pass
00203             try: to_ret["processor"]["clock"] = p['dclock']
00204             except (KeyError, TypeError, IndexError) as e: pass
00205 
00206         try: to_ret["vendor"] = device.parent['dvendor']
00207         except (KeyError, TypeError, IndexError) as e: pass
00208         try: to_ret["vendor"] = device.parent.parent['dvendor']
00209         except (KeyError, TypeError, IndexError) as e: pass
00210 
00211         if not to_ret["processor"]:
00212             del to_ret["processor"]
00213 
00214         if not to_ret["compile"]:
00215             del to_ret["compile"]
00216 
00217         to_ret['debug-interface'] = []
00218 
00219         return to_ret
00220 
00221     def _generate_index_helper(self, d) :
00222         try :
00223             pack = self.pdsc_to_pack (d)
00224             self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in
00225                                     (self.pdsc_from_cache (d)("device"))]))
00226         except AttributeError as e :
00227             stderr.write("[ ERROR ] file {}\n".format(d))
00228             print(e)
00229         self.counter  += 1
00230         self.display_counter ("Generating Index")
00231 
00232     def _generate_aliases_helper(self, d) :
00233         try :
00234             mydict = []
00235             for dev in self.pdsc_from_cache (d)("board"):
00236                 try :
00237                     mydict.append((dev['name'], dev.mounteddevice['dname']))
00238                 except (KeyError, TypeError, IndexError) as e:
00239                     pass
00240             self._aliases .update(dict(mydict))
00241         except (AttributeError, TypeError) as e :
00242             pass
00243         self.counter  += 1
00244         self.display_counter ("Scanning for Aliases")
00245 
00246     def get_flash_algorthim_binary (self, device_name) :
00247         """Retrieve the flash algorithm file for a particular part.
00248 
00249         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00250 
00251         :param device_name: The exact name of a device
00252         :type device_name: str
00253         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00254         :rtype: ZipExtFile
00255         """
00256         pack = self.pack_from_cache (self.index [device_name])
00257         return pack.open(device['algorithm']['file'])
00258 
00259     def get_svd_file (self, device_name) :
00260         """Retrieve the flash algorithm file for a particular part.
00261 
00262         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00263 
00264         :param device_name: The exact name of a device
00265         :type device_name: str
00266         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00267         :rtype: ZipExtFile
00268         """
00269         pack = self.pack_from_cache (self.index [device_name])
00270         return pack.open(device['debug'])
00271 
00272     def generate_index(self) :
00273         self._index  = {}
00274         self.counter  = 0
00275         do_queue(Reader, self._generate_index_helper , self.get_urls ())
00276         with open(LocalPackIndex, "wb+") as out:
00277             self._index ["version"] = "0.1.0"
00278             dump(self._index , out)
00279         stdout.write("\n")
00280 
00281     def generate_aliases(self) :
00282         self._aliases  = {}
00283         self.counter  = 0
00284         do_queue(Reader, self._generate_aliases_helper , self.get_urls ())
00285         with open(LocalPackAliases, "wb+") as out:
00286             dump(self._aliases , out)
00287         stdout.write("\n")
00288 
00289     def find_device(self, match) :
00290         choices = process.extract(match, self.index .keys(), limit=len(self.index ))
00291         choices = sorted([(v, k) for k, v in choices], reverse=True)
00292         if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
00293         return [(v, self.index [v]) for k,v in choices]
00294 
00295     def dump_index_to_file(self, file) :
00296         with open(file, "wb+") as out:
00297             dump(self.index , out)
00298 
00299     @property
00300     def index (self) :
00301         """An index of most of the important data in all cached PDSC files.
00302 
00303         :Example:
00304 
00305         >>> from ArmPackManager import Cache
00306         >>> a = Cache()
00307         >>> a.index["LPC1768"]
00308         {u'algorithm': {u'RAMsize': u'0x0FE0',
00309                 u'RAMstart': u'0x10000000',
00310                 u'name': u'Flash/LPC_IAP_512.FLM',
00311                 u'size': u'0x80000',
00312                 u'start': u'0x00000000'},
00313          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00314          u'debug': u'SVD/LPC176x5x.svd',
00315          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00316          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00317                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00318                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00319 
00320 
00321         """
00322         if not self._index  :
00323             with open(LocalPackIndex) as i :
00324                 self._index  = load(i)
00325         return self._index 
00326     @property
00327     def aliases (self) :
00328         """An index of most of the important data in all cached PDSC files.
00329 
00330         :Example:
00331 
00332         >>> from ArmPackManager import Cache
00333         >>> a = Cache()
00334         >>> a.index["LPC1768"]
00335         {u'algorithm': {u'RAMsize': u'0x0FE0',
00336                 u'RAMstart': u'0x10000000',
00337                 u'name': u'Flash/LPC_IAP_512.FLM',
00338                 u'size': u'0x80000',
00339                 u'start': u'0x00000000'},
00340          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00341          u'debug': u'SVD/LPC176x5x.svd',
00342          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00343          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00344                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00345                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00346 
00347 
00348         """
00349         if not self._aliases  :
00350             with open(join(self.data_path , "aliases.json")) as i :
00351                 self._aliases  = load(i)
00352         return self._aliases 
00353 
00354     def cache_everything (self) :
00355         """Cache every PACK and PDSC file known.
00356 
00357         Generates an index afterwards.
00358 
00359         .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
00360         """
00361         self.cache_pack_list (self.get_urls ())
00362         self.generate_index ()
00363         self.generate_aliases ()
00364 
00365     def cache_descriptors (self) :
00366         """Cache every PDSC file known.
00367 
00368         Generates an index afterwards.
00369 
00370         .. note:: This process may use 11MB of drive space and take upwards of 1 minute.
00371         """
00372         self.cache_descriptor_list (self.get_urls ())
00373         self.generate_index ()
00374         self.generate_aliases ()
00375 
00376     def cache_descriptor_list (self, list) :
00377         """Cache a list of PDSC files.
00378 
00379         :param list: URLs of PDSC files to cache.
00380         :type list: [str]
00381         """
00382         self.total  = len(list)
00383         self.display_counter ("Caching Files")
00384         do_queue(Reader, self.cache_file , list)
00385         stdout.write("\n")
00386 
00387     def cache_pack_list (self, list) :
00388         """Cache a list of PACK files, referenced by their PDSC URL
00389 
00390         :param list: URLs of PDSC files to cache.
00391         :type list: [str]
00392         """
00393         self.total  = len(list) * 2
00394         self.display_counter ("Caching Files")
00395         do_queue(Reader, self.cache_pdsc_and_pack , list)
00396         stdout.write("\n")
00397 
00398     def pdsc_from_cache (self, url) :
00399         """Low level inteface for extracting a PDSC file from the cache.
00400 
00401         Assumes that the file specified is a PDSC file and is in the cache.
00402 
00403         :param url: The URL of a PDSC file.
00404         :type url: str
00405         :return: A parsed representation of the PDSC file.
00406         :rtype: BeautifulSoup
00407         """
00408         dest = join(self.data_path , strip_protocol(url))
00409         with open(dest, "r") as fd :
00410             return BeautifulSoup(fd, "html.parser")
00411 
00412     def pack_from_cache (self, url) :
00413         """Low level inteface for extracting a PACK file from the cache.
00414 
00415         Assumes that the file specified is a PACK file and is in the cache.
00416 
00417         :param url: The URL of a PACK file.
00418         :type url: str
00419         :return: A parsed representation of the PACK file.
00420         :rtype: ZipFile
00421         """
00422         return ZipFile(join(self.data_path ,
00423                             strip_protocol(device['pack_file'])))
00424 
00425     def gen_dict_from_cache() :
00426         pdsc_files = pdsc_from_cache(RootPackUrl)
00427 
00428     def cache_and_parse (self, url) :
00429         """A low level shortcut that Caches and Parses a PDSC file.
00430 
00431         :param url: The URL of the PDSC file.
00432         :type url: str
00433         :return: A parsed representation of the PDSC file.
00434         :rtype: BeautifulSoup
00435         """
00436         self.cache_file (Curl(), url)
00437         return self.pdsc_from_cache (url)
00438