Marco Zecchini
/
Example_RTOS
Rtos API example
Embed:
(wiki syntax)
Show/hide line numbers
__init__.py
00001 from urllib2 import urlopen, URLError 00002 from bs4 import BeautifulSoup 00003 from os.path import join, dirname, basename 00004 from os import makedirs 00005 from errno import EEXIST 00006 from threading import Thread 00007 from Queue import Queue 00008 from re import compile, sub 00009 from sys import stderr, stdout 00010 from itertools import takewhile 00011 import argparse 00012 from json import dump, load 00013 from zipfile import ZipFile 00014 from tempfile import gettempdir 00015 import warnings 00016 from distutils.version import LooseVersion 00017 00018 warnings.filterwarnings("ignore") 00019 00020 from fuzzywuzzy import process 00021 00022 RootPackURL = "http://www.keil.com/pack/index.idx" 00023 00024 LocalPackDir = dirname(__file__) 00025 LocalPackIndex = join(LocalPackDir, "index.json") 00026 LocalPackAliases = join(LocalPackDir, "aliases.json") 00027 00028 00029 protocol_matcher = compile("\w*://") 00030 def strip_protocol(url) : 00031 return protocol_matcher.sub("", str(url)) 00032 00033 def largest_version(content) : 00034 return sorted([t['version'] for t in content.package.releases('release')], 00035 reverse=True, key=lambda v: LooseVersion(v))[0] 00036 00037 def do_queue(Class, function, interable) : 00038 q = Queue() 00039 threads = [Class(q, function) for each in range(20)] 00040 for each in threads : 00041 each.setDaemon(True) 00042 each.start() 00043 for thing in interable : 00044 q.put(thing) 00045 q.join() 00046 00047 class Reader (Thread) : 00048 def __init__(self, queue, func) : 00049 Thread.__init__(self) 00050 self.queue = queue 00051 self.func = func 00052 def run(self) : 00053 while True : 00054 url = self.queue.get() 00055 self.func(url) 00056 self.queue.task_done() 00057 00058 00059 class Cache () : 00060 """ The Cache object is the only relevant API object at the moment 00061 00062 Constructing the Cache object does not imply any caching. 00063 A user of the API must explicitly call caching functions. 00064 00065 :param silent: A boolean that, when True, significantly reduces the printing of this Object 00066 :type silent: bool 00067 :param no_timeouts: A boolean that, when True, disables the default connection timeout and low speed timeout for downloading things. 00068 :type no_timeouts: bool 00069 """ 00070 def __init__ (self, silent, no_timeouts) : 00071 self.silent = silent 00072 self.counter = 0 00073 self.total = 1 00074 self._index = {} 00075 self._aliases = {} 00076 self.urls = None 00077 self.no_timeouts = no_timeouts 00078 self.data_path = gettempdir() 00079 00080 def display_counter (self, message) : 00081 stdout.write("{} {}/{}\r".format(message, self.counter , self.total )) 00082 stdout.flush() 00083 00084 def cache_file (self, url) : 00085 """Low level interface to caching a single file. 00086 00087 :param url: The URL to cache. 00088 :type url: str 00089 :rtype: None 00090 """ 00091 if not self.silent : print("Caching {}...".format(url)) 00092 dest = join(self.data_path , strip_protocol(url)) 00093 try : 00094 makedirs(dirname(dest)) 00095 except OSError as exc : 00096 if exc.errno == EEXIST : pass 00097 else : raise 00098 try: 00099 with open(dest, "wb+") as fd : 00100 fd.write(urlopen(url).read()) 00101 except URLError as e: 00102 stderr.write(e.reason) 00103 self.counter += 1 00104 self.display_counter ("Caching Files") 00105 00106 def pdsc_to_pack (self, url) : 00107 """Find the URL of the specified pack file described by a PDSC. 00108 00109 The PDSC is assumed to be cached and is looked up in the cache by its URL. 00110 00111 :param url: The url used to look up the PDSC. 00112 :type url: str 00113 :return: The url of the PACK file. 00114 :rtype: str 00115 """ 00116 content = self.pdsc_from_cache (url) 00117 new_url = content.package.url.get_text() 00118 if not new_url.endswith("/") : 00119 new_url = new_url + "/" 00120 return (new_url + content.package.vendor.get_text() + "." + 00121 content.package.find('name').get_text() + "." + 00122 largest_version(content) + ".pack") 00123 00124 def cache_pdsc_and_pack (self, url) : 00125 self.cache_file (url) 00126 try : 00127 self.cache_file (self.pdsc_to_pack (url)) 00128 except AttributeError : 00129 stderr.write("[ ERROR ] {} does not appear to be a conforming .pdsc file\n".format(url)) 00130 self.counter += 1 00131 00132 def get_urls (self): 00133 """Extract the URLs of all know PDSC files. 00134 00135 Will pull the index from the internet if it is not cached. 00136 00137 :return: A list of all PDSC URLs 00138 :rtype: [str] 00139 """ 00140 if not self.urls : 00141 try : root_data = self.pdsc_from_cache (RootPackURL) 00142 except IOError : root_data = self.cache_and_parse (RootPackURL) 00143 self.urls = ["/".join([pdsc.get('url').strip("/"), 00144 pdsc.get('name').strip("/")]) 00145 for pdsc in root_data.find_all("pdsc")] 00146 return self.urls 00147 00148 def _extract_dict(self, device, filename, pack) : 00149 to_ret = dict(pdsc_file=filename, pack_file=pack) 00150 try : to_ret["memory"] = dict([(m["id"], dict(start=m["start"], 00151 size=m["size"])) 00152 for m in device("memory")]) 00153 except (KeyError, TypeError, IndexError) as e : pass 00154 try: algorithms = device("algorithm") 00155 except: 00156 try: algorithms = device.parent("algorithm") 00157 except: pass 00158 else: 00159 if not algorithms: 00160 try: algorithms = device.parent("algorithm") 00161 except: pass 00162 try : to_ret["algorithm"] = dict([(algo.get("name").replace('\\','/'), 00163 dict(start=algo["start"], 00164 size=algo["size"], 00165 ramstart=algo.get("ramstart",None), 00166 ramsize=algo.get("ramsize",None), 00167 default=algo.get("default",1))) 00168 for algo in algorithms]) 00169 except (KeyError, TypeError, IndexError) as e: pass 00170 try: to_ret["debug"] = device.parent.parent.debug["svd"] 00171 except (KeyError, TypeError, IndexError) as e : pass 00172 try: to_ret["debug"] = device.parent.debug["svd"] 00173 except (KeyError, TypeError, IndexError) as e : pass 00174 try: to_ret["debug"] = device.debug["svd"] 00175 except (KeyError, TypeError, IndexError) as e : pass 00176 00177 to_ret["compile"] = {} 00178 try: compile_l1 = device.parent("compile") 00179 except (KeyError, TypeError, IndexError) as e : compile_l1 = [] 00180 try: compile_l2 = device.parent.parent("compile") 00181 except (KeyError, TypeError, IndexError) as e : compile_l2 = [] 00182 compile = compile_l2 + compile_l1 00183 for c in compile: 00184 try: to_ret["compile"]["header"] = c["header"] 00185 except (KeyError, TypeError, IndexError) as e : pass 00186 try: to_ret["compile"]["define"] = c["define"] 00187 except (KeyError, TypeError, IndexError) as e : pass 00188 00189 try: to_ret["core"] = device.parent.processor['dcore'] 00190 except (KeyError, TypeError, IndexError) as e : pass 00191 try: to_ret["core"] = device.parent.parent.processor['dcore'] 00192 except (KeyError, TypeError, IndexError) as e : pass 00193 00194 to_ret["processor"] = {} 00195 try: proc_l1 = device("processor") 00196 except (KeyError, TypeError, IndexError) as e: proc_l1 = [] 00197 try: proc_l2 = device.parent("processor") 00198 except (KeyError, TypeError, IndexError) as e: proc_l2 = [] 00199 try: proc_l3 = device.parent.parent("processor") 00200 except (KeyError, TypeError, IndexError) as e: proc_l3 = [] 00201 proc = proc_l3 + proc_l2 + proc_l1 00202 for p in proc: 00203 try: to_ret["processor"]["fpu"] = p['dfpu'] 00204 except (KeyError, TypeError, IndexError) as e: pass 00205 try: to_ret["processor"]["endianness"] = p['dendian'] 00206 except (KeyError, TypeError, IndexError) as e: pass 00207 try: to_ret["processor"]["clock"] = p['dclock'] 00208 except (KeyError, TypeError, IndexError) as e: pass 00209 00210 try: to_ret["vendor"] = device.parent['dvendor'] 00211 except (KeyError, TypeError, IndexError) as e: pass 00212 try: to_ret["vendor"] = device.parent.parent['dvendor'] 00213 except (KeyError, TypeError, IndexError) as e: pass 00214 00215 if not to_ret["processor"]: 00216 del to_ret["processor"] 00217 00218 if not to_ret["compile"]: 00219 del to_ret["compile"] 00220 00221 to_ret['debug-interface'] = [] 00222 00223 return to_ret 00224 00225 def _generate_index_helper(self, d) : 00226 try : 00227 pack = self.pdsc_to_pack (d) 00228 self._index .update(dict([(dev['dname'], self._extract_dict (dev, d, pack)) for dev in 00229 (self.pdsc_from_cache (d)("device"))])) 00230 except AttributeError as e : 00231 stderr.write("[ ERROR ] file {}\n".format(d)) 00232 print(e) 00233 self.counter += 1 00234 self.display_counter ("Generating Index") 00235 00236 def _generate_aliases_helper(self, d) : 00237 try : 00238 mydict = [] 00239 for dev in self.pdsc_from_cache (d)("board"): 00240 try : 00241 mydict.append((dev['name'], dev.mounteddevice['dname'])) 00242 except (KeyError, TypeError, IndexError) as e: 00243 pass 00244 self._aliases .update(dict(mydict)) 00245 except (AttributeError, TypeError) as e : 00246 pass 00247 self.counter += 1 00248 self.display_counter ("Scanning for Aliases") 00249 00250 def get_flash_algorthim_binary (self, device_name, all=False) : 00251 """Retrieve the flash algorithm file for a particular part. 00252 00253 Assumes that both the PDSC and the PACK file associated with that part are in the cache. 00254 00255 :param device_name: The exact name of a device 00256 :param all: Return an iterator of all flash algos for this device 00257 :type device_name: str 00258 :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm 00259 :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm. 00260 When "all" is set to True then an iterator for file-like objects is returned 00261 :rtype: ZipExtFile or ZipExtFile iterator if all is True 00262 """ 00263 device = self.index [device_name] 00264 pack = self.pack_from_cache (device) 00265 algo_itr = (pack.open(path) for path in device['algorithm'].keys()) 00266 return algo_itr if all else algo_itr.next() 00267 00268 def get_svd_file (self, device_name) : 00269 """Retrieve the flash algorithm file for a particular part. 00270 00271 Assumes that both the PDSC and the PACK file associated with that part are in the cache. 00272 00273 :param device_name: The exact name of a device 00274 :type device_name: str 00275 :return: A file-like object that, when read, is the ELF file that describes the flashing algorithm 00276 :rtype: ZipExtFile 00277 """ 00278 device = self.index [device_name] 00279 pack = self.pack_from_cache (device) 00280 return pack.open(device['debug']) 00281 00282 def generate_index(self) : 00283 self._index = {} 00284 self.counter = 0 00285 do_queue(Reader, self._generate_index_helper , self.get_urls ()) 00286 with open(LocalPackIndex, "wb+") as out: 00287 self._index ["version"] = "0.1.0" 00288 dump(self._index , out) 00289 stdout.write("\n") 00290 00291 def generate_aliases(self) : 00292 self._aliases = {} 00293 self.counter = 0 00294 do_queue(Reader, self._generate_aliases_helper , self.get_urls ()) 00295 with open(LocalPackAliases, "wb+") as out: 00296 dump(self._aliases , out) 00297 stdout.write("\n") 00298 00299 def find_device(self, match) : 00300 choices = process.extract(match, self.index .keys(), limit=len(self.index )) 00301 choices = sorted([(v, k) for k, v in choices], reverse=True) 00302 if choices : choices = list(takewhile(lambda t: t[0] == choices[0][0], choices)) 00303 return [(v, self.index [v]) for k,v in choices] 00304 00305 def dump_index_to_file(self, file) : 00306 with open(file, "wb+") as out: 00307 dump(self.index , out) 00308 00309 @property 00310 def index (self) : 00311 """An index of most of the important data in all cached PDSC files. 00312 00313 :Example: 00314 00315 >>> from ArmPackManager import Cache 00316 >>> a = Cache() 00317 >>> a.index["LPC1768"] 00318 {u'algorithm': {u'RAMsize': u'0x0FE0', 00319 u'RAMstart': u'0x10000000', 00320 u'name': u'Flash/LPC_IAP_512.FLM', 00321 u'size': u'0x80000', 00322 u'start': u'0x00000000'}, 00323 u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'], 00324 u'debug': u'SVD/LPC176x5x.svd', 00325 u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc', 00326 u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'}, 00327 u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'}, 00328 u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}} 00329 00330 00331 """ 00332 if not self._index : 00333 with open(LocalPackIndex) as i : 00334 self._index = load(i) 00335 return self._index 00336 @property 00337 def aliases (self) : 00338 """An index of most of the important data in all cached PDSC files. 00339 00340 :Example: 00341 00342 >>> from ArmPackManager import Cache 00343 >>> a = Cache() 00344 >>> a.index["LPC1768"] 00345 {u'algorithm': {u'RAMsize': u'0x0FE0', 00346 u'RAMstart': u'0x10000000', 00347 u'name': u'Flash/LPC_IAP_512.FLM', 00348 u'size': u'0x80000', 00349 u'start': u'0x00000000'}, 00350 u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'], 00351 u'debug': u'SVD/LPC176x5x.svd', 00352 u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc', 00353 u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'}, 00354 u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'}, 00355 u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}} 00356 00357 00358 """ 00359 if not self._aliases : 00360 with open(LocalPackAliases) as i : 00361 self._aliases = load(i) 00362 return self._aliases 00363 00364 def cache_everything (self) : 00365 """Cache every PACK and PDSC file known. 00366 00367 Generates an index afterwards. 00368 00369 .. note:: This process may use 4GB of drive space and take upwards of 10 minutes to complete. 00370 """ 00371 self.cache_pack_list (self.get_urls ()) 00372 self.generate_index () 00373 self.generate_aliases () 00374 00375 def cache_descriptors (self) : 00376 """Cache every PDSC file known. 00377 00378 Generates an index afterwards. 00379 00380 .. note:: This process may use 11MB of drive space and take upwards of 1 minute. 00381 """ 00382 self.cache_descriptor_list (self.get_urls ()) 00383 self.generate_index () 00384 self.generate_aliases () 00385 00386 def cache_descriptor_list (self, list) : 00387 """Cache a list of PDSC files. 00388 00389 :param list: URLs of PDSC files to cache. 00390 :type list: [str] 00391 """ 00392 self.total = len(list) 00393 self.display_counter ("Caching Files") 00394 do_queue(Reader, self.cache_file , list) 00395 stdout.write("\n") 00396 00397 def cache_pack_list (self, list) : 00398 """Cache a list of PACK files, referenced by their PDSC URL 00399 00400 :param list: URLs of PDSC files to cache. 00401 :type list: [str] 00402 """ 00403 self.total = len(list) * 2 00404 self.display_counter ("Caching Files") 00405 do_queue(Reader, self.cache_pdsc_and_pack , list) 00406 stdout.write("\n") 00407 00408 def pdsc_from_cache (self, url) : 00409 """Low level inteface for extracting a PDSC file from the cache. 00410 00411 Assumes that the file specified is a PDSC file and is in the cache. 00412 00413 :param url: The URL of a PDSC file. 00414 :type url: str 00415 :return: A parsed representation of the PDSC file. 00416 :rtype: BeautifulSoup 00417 """ 00418 dest = join(self.data_path , strip_protocol(url)) 00419 with open(dest, "r") as fd : 00420 return BeautifulSoup(fd, "html.parser") 00421 00422 def pack_from_cache (self, device) : 00423 """Low level inteface for extracting a PACK file from the cache. 00424 00425 Assumes that the file specified is a PACK file and is in the cache. 00426 00427 :param url: The URL of a PACK file. 00428 :type url: str 00429 :return: A parsed representation of the PACK file. 00430 :rtype: ZipFile 00431 """ 00432 return ZipFile(join(self.data_path , 00433 strip_protocol(device['pack_file']))) 00434 00435 def gen_dict_from_cache() : 00436 pdsc_files = pdsc_from_cache(RootPackUrl) 00437 00438 def cache_and_parse (self, url) : 00439 """A low level shortcut that Caches and Parses a PDSC file. 00440 00441 :param url: The URL of the PDSC file. 00442 :type url: str 00443 :return: A parsed representation of the PDSC file. 00444 :rtype: BeautifulSoup 00445 """ 00446 self.cache_file (url) 00447 return self.pdsc_from_cache (url) 00448
Generated on Sun Jul 17 2022 08:25:18 by 1.7.2