init

Dependencies:   mbed

Embed: (wiki syntax)

« Back to documentation index

Show/hide line numbers __init__.py Source File

__init__.py

00001 try:
00002     from urllib2 import urlopen, URLError
00003 except ImportError:
00004     from urllib.request import urlopen, URLError
00005 from bs4 import BeautifulSoup
00006 from os.path import join, dirname, basename
00007 from os import makedirs
00008 from errno import EEXIST
00009 from threading import Thread
00010 try:
00011     from Queue import Queue
00012 except ImportError:
00013     from queue import Queue
00014 from re import compile, sub
00015 from sys import stderr, stdout
00016 from itertools import takewhile
00017 import argparse
00018 from json import dump, load
00019 from zipfile import ZipFile
00020 from tempfile import gettempdir
00021 import warnings
00022 from distutils.version import LooseVersion
00023 
00024 from tools.flash_algo import PackFlashAlgo
00025 
00026 warnings.filterwarnings("ignore")
00027 
00028 from fuzzywuzzy import process
00029 
00030 RootPackURL = "http://www.keil.com/pack/index.idx"
00031 
00032 LocalPackDir = dirname(__file__)
00033 LocalPackIndex = join(LocalPackDir, "index.json")
00034 LocalPackAliases = join(LocalPackDir, "aliases.json")
00035 
00036 
00037 protocol_matcher = compile("\w*://")
00038 def strip_protocol(url) :
00039     return protocol_matcher.sub("", str(url))
00040 
00041 def largest_version(content) :
00042     return sorted([t['version'] for t in content.package.releases('release')],
00043                   reverse=True, key=lambda v: LooseVersion(v))[0]
00044 
00045 def do_queue(Class, function, interable) :
00046     q = Queue()
00047     threads = [Class(q, function) for each in range(20)]
00048     for each in threads :
00049         each.setDaemon(True)
00050         each.start()
00051     for thing in interable :
00052         q.put(thing)
00053     q.join()
00054 
00055 class Reader (Thread) :
00056     def __init__(self, queue, func) :
00057         Thread.__init__(self)
00058         self.queue = queue
00059         self.func = func
00060     def run(self) :
00061         while True :
00062             url = self.queue.get()
00063             self.func(url)
00064             self.queue.task_done()
00065 
00066 
00067 class Cache  () :
00068     """ The Cache object is the only relevant API object at the moment
00069 
00070     Constructing the Cache object does not imply any caching.
00071     A user of the API must explicitly call caching functions.
00072 
00073     :param silent: A boolean that, when True, significantly reduces the printing of this Object
00074     :type silent: bool
00075     :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things.
00076     :type no_timeouts: bool
00077     """
00078     def __init__ (self, silent, no_timeouts) :
00079         self.silent  = silent
00080         self.counter  = 0
00081         self.total  = 1
00082         self._index  = {}
00083         self._aliases  = {}
00084         self.urls  = None
00085         self.no_timeouts  = no_timeouts
00086         self.data_path  = gettempdir()
00087 
00088     def display_counter (self, message) :
00089         stdout.write("{} {}/{}\r".format(message, self.counter , self.total ))
00090         stdout.flush()
00091 
00092     def cache_file  (self, url) :
00093         """Low level interface to caching a single file.
00094 
00095         :param url: The URL to cache.
00096         :type url: str
00097         :rtype: None
00098         """
00099         if not self.silent  : print("Caching {}...".format(url))
00100         dest = join(self.data_path , strip_protocol(url))
00101         try :
00102             makedirs(dirname(dest))
00103         except OSError as exc :
00104             if exc.errno == EEXIST : pass
00105             else : raise
00106         try:
00107             with open(dest, "wb+") as fd :
00108                 fd.write(urlopen(url).read())
00109         except URLError as e:
00110             stderr.write(e.reason)
00111         self.counter  += 1
00112         self.display_counter ("Caching Files")
00113 
00114     def pdsc_to_pack  (self, url) :
00115         """Find the URL of the specified pack file described by a PDSC.
00116 
00117         The PDSC is assumed to be cached and is looked up in the cache by its URL.
00118 
00119         :param url: The url used to look up the PDSC.
00120         :type url: str
00121         :return: The url of the PACK file.
00122         :rtype: str
00123         """
00124         content = self.pdsc_from_cache (url)
00125         new_url = content.package.url.get_text()
00126         if not new_url.endswith("/") :
00127             new_url = new_url + "/"
00128         return (new_url + content.package.vendor.get_text() + "." +
00129                 content.package.find('name').get_text() + "." +
00130                 largest_version(content) + ".pack")
00131 
00132     def cache_pdsc_and_pack (self, url) :
00133         self.cache_file (url)
00134         try :
00135             self.cache_file (self.pdsc_to_pack (url))
00136         except AttributeError :
00137             stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url))
00138             self.counter  += 1
00139 
00140     def get_urls (self):
00141         """Extract the URLs of all know PDSC files.
00142 
00143         Will pull the index from the internet if it is not cached.
00144 
00145         :return: A list of all PDSC URLs
00146         :rtype: [str]
00147         """
00148         if not self.urls  :
00149             try : root_data = self.pdsc_from_cache (RootPackURL)
00150             except IOError : root_data = self.cache_and_parse (RootPackURL)
00151             self.urls  = ["/".join([pdsc.get('url').strip("/"),
00152                                    pdsc.get('name').strip("/")])
00153                          for pdsc in root_data.find_all("pdsc")]
00154         return self.urls 
00155 
00156     def _get_sectors(self, device):
00157         """Extract sector sizes from device FLM algorithm
00158 
00159         Will return None if there is no algorithm, pdsc URL formatted in correctly
00160 
00161         :return: A list tuples of sector start and size
00162         :rtype: [list]
00163         """
00164         try:
00165             pack = self.pack_from_cache (device)
00166             algo_itr = (pack.open(path) for path in device['algorithm'].keys())
00167             algo_bin = algo_itr.next()
00168             flm_file = algo_bin.read()
00169             return PackFlashAlgo(flm_file).sector_sizes
00170         except Exception:
00171             return None
00172 
00173     def _extract_dict(self, device, filename, pack) :
00174         to_ret = dict(pdsc_file=filename, pack_file=pack)
00175         try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"],
00176                                                       size=m["size"]))
00177                                        for m in device("memory")])
00178         except (KeyError, TypeError, IndexError) as e:
00179             try : to_ret["memory"] = dict([(m["name"], dict(start=m["start"],
00180                                                           size=m["size"]))
00181                                            for m in device("memory")])
00182             except (KeyError, TypeError, IndexError) as e : pass
00183         try: algorithms = device("algorithm")
00184         except:
00185             try: algorithms = device.parent("algorithm")
00186             except: pass
00187         else:
00188             if not algorithms:
00189                 try: algorithms = device.parent("algorithm")
00190                 except: pass
00191         try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'),
00192                                            dict(start=algo["start"],
00193                                                 size=algo["size"],
00194                                                 ramstart=algo.get("ramstart",None),
00195                                                 ramsize=algo.get("ramsize",None),
00196                                                 default=algo.get("default",1)))
00197                                        for algo in algorithms])
00198         except (KeyError, TypeError, IndexError) as e: pass
00199         try: to_ret["debug"] = device.parent.parent.debug["svd"]
00200         except (KeyError, TypeError, IndexError) as e : pass
00201         try: to_ret["debug"] = device.parent.debug["svd"]
00202         except (KeyError, TypeError, IndexError) as e : pass
00203         try: to_ret["debug"] = device.debug["svd"]
00204         except (KeyError, TypeError, IndexError) as e : pass
00205 
00206         to_ret["compile"] = {}
00207         try: compile_l1 = device.parent("compile")
00208         except (KeyError, TypeError, IndexError) as e : compile_l1 = []
00209         try: compile_l2 = device.parent.parent("compile")
00210         except (KeyError, TypeError, IndexError) as e : compile_l2 = []
00211         compile = compile_l2 + compile_l1
00212         for c in compile:
00213             try: to_ret["compile"]["header"] = c["header"]
00214             except (KeyError, TypeError, IndexError) as e : pass
00215             try: to_ret["compile"]["define"] =  c["define"]
00216             except (KeyError, TypeError, IndexError) as e : pass
00217 
00218         try: to_ret["core"] = device.parent.processor['dcore']
00219         except (KeyError, TypeError, IndexError) as e : pass
00220         try: to_ret["core"] = device.parent.parent.processor['dcore']
00221         except (KeyError, TypeError, IndexError) as e : pass
00222 
00223         to_ret["processor"] = {}
00224         try: proc_l1 = device("processor")
00225         except (KeyError, TypeError, IndexError) as e: proc_l1 = []
00226         try: proc_l2 = device.parent("processor")
00227         except (KeyError, TypeError, IndexError) as e: proc_l2 = []
00228         try: proc_l3 = device.parent.parent("processor")
00229         except (KeyError, TypeError, IndexError) as e: proc_l3 = []
00230         proc = proc_l3 + proc_l2 + proc_l1
00231         for p in proc:
00232             try: to_ret["processor"]["fpu"] = p['dfpu']
00233             except (KeyError, TypeError, IndexError) as e: pass
00234             try: to_ret["processor"]["endianness"] = p['dendian']
00235             except (KeyError, TypeError, IndexError) as e: pass
00236             try: to_ret["processor"]["clock"] = p['dclock']
00237             except (KeyError, TypeError, IndexError) as e: pass
00238 
00239         try: to_ret["vendor"] = device.parent['dvendor']
00240         except (KeyError, TypeError, IndexError) as e: pass
00241         try: to_ret["vendor"] = device.parent.parent['dvendor']
00242         except (KeyError, TypeError, IndexError) as e: pass
00243 
00244         if not to_ret["processor"]:
00245             del to_ret["processor"]
00246 
00247         if not to_ret["compile"]:
00248             del to_ret["compile"]
00249 
00250         to_ret['debug-interface'] = []
00251         to_ret['sectors'] = self._get_sectors (to_ret)
00252 
00253         return to_ret
00254 
00255     def _generate_index_helper(self, d) :
00256         try :
00257             pack = self.pdsc_to_pack (d)
00258             self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in
00259                                     (self.pdsc_from_cache (d)("device"))]))
00260         except AttributeError as e :
00261             stderr.write("[ ERROR ] file {}\n".format(d))
00262             print(e)
00263         self.counter  += 1
00264         self.display_counter ("Generating Index")
00265 
00266     def _generate_aliases_helper(self, d) :
00267         try :
00268             mydict = []
00269             for dev in self.pdsc_from_cache (d)("board"):
00270                 try :
00271                     mydict.append((dev['name'], dev.mounteddevice['dname']))
00272                 except (KeyError, TypeError, IndexError) as e:
00273                     pass
00274             self._aliases .update(dict(mydict))
00275         except (AttributeError, TypeError) as e :
00276             pass
00277         self.counter  += 1
00278         self.display_counter ("Scanning for Aliases")
00279 
00280     def get_flash_algorthim_binary (self, device_name, all=False) :
00281         """Retrieve the flash algorithm file for a particular part.
00282 
00283         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00284 
00285         :param device_name: The exact name of a device
00286         :param all: Return an iterator of all flash algos for this device
00287         :type device_name: str
00288         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00289         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm.
00290                  When "all" is set to True then an iterator for file-like objects is returned
00291         :rtype: ZipExtFile or ZipExtFile iterator if all is True
00292         """
00293         device = self.index [device_name]
00294         pack = self.pack_from_cache (device)
00295         algo_itr = (pack.open(path) for path in device['algorithm'].keys())
00296         return algo_itr if all else algo_itr.next()
00297 
00298     def get_svd_file (self, device_name) :
00299         """Retrieve the flash algorithm file for a particular part.
00300 
00301         Assumes that both the PDSC and the PACK file associated with that part are in the cache.
00302 
00303         :param device_name: The exact name of a device
00304         :type device_name: str
00305         :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm
00306         :rtype: ZipExtFile
00307         """
00308         device = self.index [device_name]
00309         pack = self.pack_from_cache (device)
00310         return pack.open(device['debug'])
00311 
00312     def generate_index(self) :
00313         self._index  = {}
00314         self.counter  = 0
00315         do_queue(Reader, self._generate_index_helper , self.get_urls ())
00316         with open(LocalPackIndex, "wb+") as out:
00317             self._index ["version"] = "0.1.0"
00318             dump(self._index , out)
00319         stdout.write("\n")
00320 
00321     def generate_aliases(self) :
00322         self._aliases  = {}
00323         self.counter  = 0
00324         do_queue(Reader, self._generate_aliases_helper , self.get_urls ())
00325         with open(LocalPackAliases, "wb+") as out:
00326             dump(self._aliases , out)
00327         stdout.write("\n")
00328 
00329     def find_device(self, match) :
00330         choices = process.extract(match, self.index .keys(), limit=len(self.index ))
00331         choices = sorted([(v, k) for k, v in choices], reverse=True)
00332         if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices))
00333         return [(v, self.index [v]) for k,v in choices]
00334 
00335     def dump_index_to_file(self, file) :
00336         with open(file, "wb+") as out:
00337             dump(self.index , out)
00338 
00339     @property
00340     def index (self) :
00341         """An index of most of the important data in all cached PDSC files.
00342 
00343         :Example:
00344 
00345         >>> from ArmPackManager import Cache
00346         >>> a = Cache()
00347         >>> a.index["LPC1768"]
00348         {u'algorithm': {u'RAMsize': u'0x0FE0',
00349                 u'RAMstart': u'0x10000000',
00350                 u'name': u'Flash/LPC_IAP_512.FLM',
00351                 u'size': u'0x80000',
00352                 u'start': u'0x00000000'},
00353          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00354          u'debug': u'SVD/LPC176x5x.svd',
00355          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00356          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00357                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00358                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00359 
00360 
00361         """
00362         if not self._index  :
00363             with open(LocalPackIndex) as i :
00364                 self._index  = load(i)
00365         return self._index 
00366     @property
00367     def aliases (self) :
00368         """An index of most of the important data in all cached PDSC files.
00369 
00370         :Example:
00371 
00372         >>> from ArmPackManager import Cache
00373         >>> a = Cache()
00374         >>> a.index["LPC1768"]
00375         {u'algorithm': {u'RAMsize': u'0x0FE0',
00376                 u'RAMstart': u'0x10000000',
00377                 u'name': u'Flash/LPC_IAP_512.FLM',
00378                 u'size': u'0x80000',
00379                 u'start': u'0x00000000'},
00380          u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'],
00381          u'debug': u'SVD/LPC176x5x.svd',
00382          u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc',
00383          u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'},
00384                      u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'},
00385                      u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}}
00386 
00387 
00388         """
00389         if not self._aliases  :
00390             with open(LocalPackAliases) as i :
00391                 self._aliases  = load(i)
00392         return self._aliases 
00393 
00394     def cache_everything (self) :
00395         """Cache every PACK and PDSC file known.
00396 
00397         Generates an index afterwards.
00398 
00399         .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete.
00400         """
00401         self.cache_pack_list (self.get_urls ())
00402         self.generate_index ()
00403         self.generate_aliases ()
00404 
00405     def cache_descriptors (self) :
00406         """Cache every PDSC file known.
00407 
00408         Generates an index afterwards.
00409 
00410         .. note:: This process may use 11MB of drive space and take upwards of 1 minute.
00411         """
00412         self.cache_descriptor_list (self.get_urls ())
00413         self.generate_index ()
00414         self.generate_aliases ()
00415 
00416     def cache_descriptor_list (self, list) :
00417         """Cache a list of PDSC files.
00418 
00419         :param list: URLs of PDSC files to cache.
00420         :type list: [str]
00421         """
00422         self.total  = len(list)
00423         self.display_counter ("Caching Files")
00424         do_queue(Reader, self.cache_file , list)
00425         stdout.write("\n")
00426 
00427     def cache_pack_list (self, list) :
00428         """Cache a list of PACK files, referenced by their PDSC URL
00429 
00430         :param list: URLs of PDSC files to cache.
00431         :type list: [str]
00432         """
00433         self.total  = len(list) * 2
00434         self.display_counter ("Caching Files")
00435         do_queue(Reader, self.cache_pdsc_and_pack , list)
00436         stdout.write("\n")
00437 
00438     def pdsc_from_cache (self, url) :
00439         """Low level inteface for extracting a PDSC file from the cache.
00440 
00441         Assumes that the file specified is a PDSC file and is in the cache.
00442 
00443         :param url: The URL of a PDSC file.
00444         :type url: str
00445         :return: A parsed representation of the PDSC file.
00446         :rtype: BeautifulSoup
00447         """
00448         dest = join(self.data_path , strip_protocol(url))
00449         with open(dest, "r") as fd :
00450             return BeautifulSoup(fd, "html.parser")
00451 
00452     def pack_from_cache (self, device) :
00453         """Low level inteface for extracting a PACK file from the cache.
00454 
00455         Assumes that the file specified is a PACK file and is in the cache.
00456 
00457         :param url: The URL of a PACK file.
00458         :type url: str
00459         :return: A parsed representation of the PACK file.
00460         :rtype: ZipFile
00461         """
00462         return ZipFile(join(self.data_path ,
00463                             strip_protocol(device['pack_file'])))
00464 
00465     def gen_dict_from_cache() :
00466         pdsc_files = pdsc_from_cache(RootPackUrl)
00467 
00468     def cache_and_parse (self, url) :
00469         """A low level shortcut that Caches and Parses a PDSC file.
00470 
00471         :param url: The URL of the PDSC file.
00472         :type url: str
00473         :return: A parsed representation of the PDSC file.
00474         :rtype: BeautifulSoup
00475         """
00476         self.cache_file (url)
00477         return self.pdsc_from_cache (url)