Search is not available for this dataset
text
stringlengths
75
104k
def blank_tiles(input_word): """Searches a string for blank tile characters ("?" and "_"). Args: input_word: the user supplied string to search through Returns: a tuple of: input_word without blanks integer number of blanks (no points) integer number of questions (points) """ blanks = 0 questions = 0 input_letters = [] for letter in input_word: if letter == "_": blanks += 1 elif letter == "?": questions += 1 else: input_letters.append(letter) return input_letters, blanks, questions
def word_list(sowpods=False, start="", end=""): """Opens the word list file. Args: sowpods: a boolean to declare using the sowpods list or TWL (default) start: a string of starting characters to find anagrams based on end: a string of ending characters to find anagrams based on Yeilds: a word at a time out of 178691 words for TWL, 267751 for sowpods. Much less if either start or end are used (filtering is applied here) """ location = os.path.join( os.path.dirname(os.path.realpath(__file__)), "wordlists", ) if sowpods: filename = "sowpods.txt" else: filename = "twl.txt" filepath = os.path.join(location, filename) with open(filepath) as wordfile: for word in wordfile.readlines(): word = word.strip() if start and end and word.startswith(start) and word.endswith(end): yield word elif start and word.startswith(start) and not end: yield word elif end and word.endswith(end) and not start: yield word elif not start and not end: yield word
def valid_scrabble_word(word): """Checks if the input word could be played with a full bag of tiles. Returns: True or false """ letters_in_bag = { "a": 9, "b": 2, "c": 2, "d": 4, "e": 12, "f": 2, "g": 3, "h": 2, "i": 9, "j": 1, "k": 1, "l": 4, "m": 2, "n": 6, "o": 8, "p": 2, "q": 1, "r": 6, "s": 4, "t": 6, "u": 4, "v": 2, "w": 2, "x": 1, "y": 2, "z": 1, "_": 2, } for letter in word: if letter == "?": continue try: letters_in_bag[letter] -= 1 except KeyError: return False if letters_in_bag[letter] < 0: letters_in_bag["_"] -= 1 if letters_in_bag["_"] < 0: return False return True
def main(args): """docstring for main""" try: args.query = ' '.join(args.query).replace('?', '') so = SOSearch(args.query, args.tags) result = so.first_q().best_answer.code if result != None: print result else: print("Sorry I can't find your answer, try adding tags") except NoResult, e: print("Sorry I can't find your answer, try adding tags")
def cli_run(): """docstring for argparse""" parser = argparse.ArgumentParser(description='Stupidly simple code answers from StackOverflow') parser.add_argument('query', help="What's the problem ?", type=str, nargs='+') parser.add_argument('-t','--tags', help='semicolon separated tags -> python;lambda') args = parser.parse_args() main(args)
def stringReceived(self, string): """Handle a JSON AMP dialect request. First, the JSON is parsed. Then, all JSON dialect specific values in the request are turned into the correct objects. Then, finds the correct responder function, calls it, and serializes the result (or error). """ request = loads(string) identifier = request.pop("_ask") commandName = request.pop("_command") command, responder = self._getCommandAndResponder(commandName) self._parseRequestValues(request, command) d = self._runResponder(responder, request, command, identifier) d.addCallback(self._writeResponse)
def _getCommandAndResponder(self, commandName): """Gets the command class and matching responder function for the given command name. """ # DISGUSTING IMPLEMENTATION DETAIL EXPLOITING HACK locator = self._remote.boxReceiver.locator responder = locator.locateResponder(commandName) responderFunction = responder.func_closure[1].cell_contents command = responder.func_closure[2].cell_contents return command, responderFunction
def _parseRequestValues(self, request, command): """Parses all the values in the request that are in a form specific to the JSON AMP dialect. """ for key, ampType in command.arguments: ampClass = ampType.__class__ if ampClass is exposed.ExposedResponderLocator: request[key] = self._remote continue decoder = _decoders.get(ampClass) if decoder is not None: value = request.get(key) request[key] = decoder(value, self)
def _runResponder(self, responder, request, command, identifier): """Run the responser function. If it succeeds, add the _answer key. If it fails with an error known to the command, serialize the error. """ d = defer.maybeDeferred(responder, **request) def _addIdentifier(response): """Return the response with an ``_answer`` key. """ response["_answer"] = identifier return response def _serializeFailure(failure): """ If the failure is serializable by this AMP command, serialize it. """ key = failure.trap(*command.allErrors) response = { "_error_code": command.allErrors[key], "_error_description": str(failure.value), "_error": identifier } return response d.addCallbacks(_addIdentifier, _serializeFailure) return d
def _writeResponse(self, response): """ Serializes the response to JSON, and writes it to the transport. """ encoded = dumps(response, default=_default) self.transport.write(encoded)
def connectionLost(self, reason): """ Tells the box receiver to stop receiving boxes. """ self._remote.boxReceiver.stopReceivingBoxes(reason) return basic.NetstringReceiver.connectionLost(self, reason)
def buildProtocol(self, addr): """ Builds a bridge and associates it with an AMP protocol instance. """ proto = self._factory.buildProtocol(addr) return JSONAMPDialectReceiver(proto)
def get_bundle(iss, ver_keys, bundle_file): """ Read a signed JWKS bundle from disc, verify the signature and instantiate a JWKSBundle instance with the information from the file. :param iss: :param ver_keys: :param bundle_file: :return: """ fp = open(bundle_file, 'r') signed_bundle = fp.read() fp.close() return JWKSBundle(iss, None).upload_signed_bundle(signed_bundle, ver_keys)
def get_signing_keys(eid, keydef, key_file): """ If the *key_file* file exists then read the keys from there, otherwise create the keys and store them a file with the name *key_file*. :param eid: The ID of the entity that the keys belongs to :param keydef: What keys to create :param key_file: A file name :return: A :py:class:`oidcmsg.key_jar.KeyJar` instance """ if os.path.isfile(key_file): kj = KeyJar() kj.import_jwks(json.loads(open(key_file, 'r').read()), eid) else: kj = build_keyjar(keydef)[1] # make it know under both names fp = open(key_file, 'w') fp.write(json.dumps(kj.export_jwks())) fp.close() kj.issuer_keys[eid] = kj.issuer_keys[''] return kj
def jwks_to_keyjar(jwks, iss=''): """ Convert a JWKS to a KeyJar instance. :param jwks: String representation of a JWKS :return: A :py:class:`oidcmsg.key_jar.KeyJar` instance """ if not isinstance(jwks, dict): try: jwks = json.loads(jwks) except json.JSONDecodeError: raise ValueError('No proper JSON') kj = KeyJar() kj.import_jwks(jwks, issuer=iss) return kj
def create_signed_bundle(self, sign_alg='RS256', iss_list=None): """ Create a signed JWT containing a dictionary with Issuer IDs as keys and JWKSs as values. If iss_list is empty then all available issuers are included. :param sign_alg: Which algorithm to use when signing the JWT :param iss_list: A list of issuer IDs who's keys should be included in the signed bundle. :return: A signed JWT """ data = self.dict(iss_list) _jwt = JWT(self.sign_keys, iss=self.iss, sign_alg=sign_alg) return _jwt.pack({'bundle':data})
def loads(self, jstr): """ Upload a bundle from an unsigned JSON document :param jstr: A bundle as a dictionary or a JSON document """ if isinstance(jstr, dict): _info = jstr else: _info = json.loads(jstr) for iss, jwks in _info.items(): kj = KeyJar() if isinstance(jwks, dict): kj.import_jwks(jwks, issuer=iss) else: kj.import_jwks_as_json(jwks, issuer=iss) self.bundle[iss] = kj return self
def dict(self, iss_list=None): """ Return the bundle of keys as a dictionary with the issuer IDs as the keys and the key sets represented as JWKS instances. :param iss_list: List of Issuer IDs that should be part of the output :rtype: Dictionary """ _int = {} for iss, kj in self.bundle.items(): if iss_list is None or iss in iss_list: try: _int[iss] = kj.export_jwks_as_json(issuer=iss) except KeyError: _int[iss] = kj.export_jwks_as_json() return _int
def upload_signed_bundle(self, sign_bundle, ver_keys): """ Input is a signed JWT with a JSON document representing the key bundle as body. This method verifies the signature and the updates the instance bundle with whatever was in the received package. Note, that as with dictionary update if an Issuer ID already exists in the instance bundle that will be overwritten with the new information. :param sign_bundle: A signed JWT :param ver_keys: Keys that can be used to verify the JWT signature. """ jwt = verify_signed_bundle(sign_bundle, ver_keys) self.loads(jwt['bundle'])
def as_keyjar(self): """ Convert a key bundle into a KeyJar instance. :return: An :py:class:`oidcmsg.key_jar.KeyJar` instance """ kj = KeyJar() for iss, k in self.bundle.items(): try: kj.issuer_keys[iss] = k.issuer_keys[iss] except KeyError: kj.issuer_keys[iss] = k.issuer_keys[''] return kj
def make_shortcut(cmd): """return a function which runs the given cmd make_shortcut('ls') returns a function which executes envoy.run('ls ' + arguments)""" def _(cmd_arguments, *args, **kwargs): return run("%s %s" % (cmd, cmd_arguments), *args, **kwargs) return _
def nova_process(body, message): """ This function deal with the nova notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = nova_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in nova_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = nova_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def cinder_process(body, message): """ This function deal with the cinder notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = cinder_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in cinder_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = cinder_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def neutron_process(body, message): """ This function deal with the neutron notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = neutron_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in neutron_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = neutron_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def glance_process(body, message): """ This function deal with the glance notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = glance_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in glance_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = glance_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def swift_process(body, message): """ This function deal with the swift notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = swift_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in swift_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = swift_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def keystone_process(body, message): """ This function deal with the keystone notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = keystone_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in keystone_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = keystone_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def heat_process(body, message): """ This function deal with the heat notification. First, find process from customer_process that not include wildcard. if not find from customer_process, then find process from customer_process_wildcard. if not find from customer_process_wildcard, then use ternya default process. :param body: dict of openstack notification. :param message: kombu Message class :return: """ event_type = body['event_type'] process = heat_customer_process.get(event_type) if process is not None: process(body, message) else: matched = False process_wildcard = None for pattern in heat_customer_process_wildcard.keys(): if pattern.match(event_type): process_wildcard = heat_customer_process_wildcard.get(pattern) matched = True break if matched: process_wildcard(body, message) else: default_process(body, message) message.ack()
def serve(self, server=None): """Serve app using wsgiref or provided server. Args: - server (callable): An callable """ if server is None: from wsgiref.simple_server import make_server server = lambda app: make_server('', 8000, app).serve_forever() print('Listening on 0.0.0.0:8000') try: server(self) finally: server.socket.close()
def pout(msg, log=None): """Print 'msg' to stdout, and option 'log' at info level.""" _print(msg, sys.stdout, log_func=log.info if log else None)
def perr(msg, log=None): """Print 'msg' to stderr, and option 'log' at info level.""" _print(msg, sys.stderr, log_func=log.error if log else None)
def register(CommandSubClass): """A class decorator for Command classes to register in the default set.""" name = CommandSubClass.name() if name in Command._all_commands: raise ValueError("Command already exists: " + name) Command._all_commands[name] = CommandSubClass return CommandSubClass
def register(Class, CommandSubClass): """A class decorator for Command classes to register.""" for name in [CommandSubClass.name()] + CommandSubClass.aliases(): if name in Class._registered_commands[Class]: raise ValueError("Command already exists: " + name) Class._registered_commands[Class][name] = CommandSubClass return CommandSubClass
def loadCommandMap(Class, subparsers=None, instantiate=True, **cmd_kwargs): """Instantiate each registered command to a dict mapping name/alias to instance. Due to aliases, the returned length may be greater there the number of commands, but the unique instance count will match. """ if not Class._registered_commands: raise ValueError("No commands have been registered with {}" .format(Class)) all = {} for Cmd in set(Class._registered_commands[Class].values()): cmd = Cmd(subparsers=subparsers, **cmd_kwargs) \ if instantiate else Cmd for name in [Cmd.name()] + Cmd.aliases(): all[name] = cmd return all
def toString(self, value): """ If all of the constraints are satisfied with the given value, defers to the composed AMP argument's ``toString`` method. """ self._checkConstraints(value) return self.baseArgument.toString(value)
def fromString(self, string): """ Converts the string to a value using the composed AMP argument, then checks all the constraints against that value. """ value = self.baseArgument.fromString(string) self._checkConstraints(value) return value
def _updateCompleterDict(completers, cdict, regex=None): """Merges ``cdict`` into ``completers``. In the event that a key in cdict already exists in the completers dict a ValueError is raised iff ``regex`` false'y. If a regex str is provided it and the duplicate key are updated to be unique, and the updated regex is returned. """ for key in cdict: if key in completers and not regex: raise ValueError(f"Duplicate completion key: {key}") if key in completers: uniq = "_".join([key, str(uuid.uuid4()).replace("-", "")]) regex = regex.replace(f"P<{key}>", f"P<{uniq}>") completers[uniq] = cdict[key] else: completers[key] = cdict[key] return regex
def get_completions(self, document, complete_event): # Get word/text before cursor. if self.sentence: word_before_cursor = document.text_before_cursor else: word_before_cursor = document.get_word_before_cursor(WORD=self.WORD) if self.ignore_case: word_before_cursor = word_before_cursor.lower() def word_matches(word): """ True when the word before the cursor matches. """ if self.ignore_case: word = word.lower() if self.match_middle: return word_before_cursor in word else: return word.startswith(word_before_cursor) ''' log.debug("------------------------------------------------------") log.debug(f"** WORD {self.WORD}") log.debug(f"** words {self.words}") log.debug(f"** word_before_cursor {word_before_cursor}") ''' words = self._words_callable() if self._words_callable else self.words for a in words: if word_matches(a): display_meta = self.meta_dict.get(a, '') log.debug(f"MATCH: {a}, {-len(word_before_cursor)}," f" meta: {display_meta}") yield Completion(self.quote(a), -len(word_before_cursor), display_meta=display_meta)
def work(self): """ Start ternya work. First, import customer's service modules. Second, init openstack mq. Third, keep a ternya connection that can auto-reconnect. """ self.init_modules() connection = self.init_mq() TernyaConnection(self, connection).connect()
def init_mq(self): """Init connection and consumer with openstack mq.""" mq = self.init_connection() self.init_consumer(mq) return mq.connection
def init_modules(self): """Import customer's service modules.""" if not self.config: raise ValueError("please read your config file.") log.debug("begin to import customer's service modules.") modules = ServiceModules(self.config) modules.import_modules() log.debug("end to import customer's service modules.")
def init_nova_consumer(self, mq): """ Init openstack nova mq 1. Check if enable listening nova notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Nova): log.debug("disable listening nova notification") return for i in range(self.config.nova_mq_consumer_count): mq.create_consumer(self.config.nova_mq_exchange, self.config.nova_mq_queue, ProcessFactory.process(Openstack.Nova)) log.debug("enable listening openstack nova notification.")
def init_cinder_consumer(self, mq): """ Init openstack cinder mq 1. Check if enable listening cinder notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Cinder): log.debug("disable listening cinder notification") return for i in range(self.config.cinder_mq_consumer_count): mq.create_consumer(self.config.cinder_mq_exchange, self.config.cinder_mq_queue, ProcessFactory.process(Openstack.Cinder)) log.debug("enable listening openstack cinder notification.")
def init_neutron_consumer(self, mq): """ Init openstack neutron mq 1. Check if enable listening neutron notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Neutron): log.debug("disable listening neutron notification") return for i in range(self.config.neutron_mq_consumer_count): mq.create_consumer(self.config.neutron_mq_exchange, self.config.neutron_mq_queue, ProcessFactory.process(Openstack.Neutron)) log.debug("enable listening openstack neutron notification.")
def init_glance_consumer(self, mq): """ Init openstack glance mq 1. Check if enable listening glance notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Glance): log.debug("disable listening glance notification") return for i in range(self.config.glance_mq_consumer_count): mq.create_consumer(self.config.glance_mq_exchange, self.config.glance_mq_queue, ProcessFactory.process(Openstack.Glance)) log.debug("enable listening openstack glance notification.")
def init_swift_consumer(self, mq): """ Init openstack swift mq 1. Check if enable listening swift notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Swift): log.debug("disable listening swift notification") return for i in range(self.config.swift_mq_consumer_count): mq.create_consumer(self.config.swift_mq_exchange, self.config.swift_mq_queue, ProcessFactory.process(Openstack.Swift)) log.debug("enable listening openstack swift notification.")
def init_keystone_consumer(self, mq): """ Init openstack swift mq 1. Check if enable listening keystone notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Keystone): log.debug("disable listening keystone notification") return for i in range(self.config.keystone_mq_consumer_count): mq.create_consumer(self.config.keystone_mq_exchange, self.config.keystone_mq_queue, ProcessFactory.process(Openstack.Keystone)) log.debug("enable listening openstack keystone notification.")
def init_heat_consumer(self, mq): """ Init openstack heat mq 1. Check if enable listening heat notification 2. Create consumer :param mq: class ternya.mq.MQ """ if not self.enable_component_notification(Openstack.Heat): log.debug("disable listening heat notification") return for i in range(self.config.heat_mq_consumer_count): mq.create_consumer(self.config.heat_mq_exchange, self.config.heat_mq_queue, ProcessFactory.process(Openstack.Heat)) log.debug("enable listening openstack heat notification.")
def enable_component_notification(self, openstack_component): """ Check if customer enable openstack component notification. :param openstack_component: Openstack component type. """ openstack_component_mapping = { Openstack.Nova: self.config.listen_nova_notification, Openstack.Cinder: self.config.listen_cinder_notification, Openstack.Neutron: self.config.listen_neutron_notification, Openstack.Glance: self.config.listen_glance_notification, Openstack.Swift: self.config.listen_swift_notification, Openstack.Keystone: self.config.listen_keystone_notification, Openstack.Heat: self.config.listen_heat_notification } return openstack_component_mapping[openstack_component]
def music_info(songid): """ Get music info from baidu music api """ if isinstance(songid, list): songid = ','.join(songid) data = { "hq": 1, "songIds": songid } res = requests.post(MUSIC_INFO_URL, data=data) info = res.json() music_data = info["data"] songs = [] for song in music_data["songList"]: song_link, size = _song_link(song, music_data["xcode"]) songs.append({ "name": song["songName"], "singer": song["artistName"], "lrc_link": song["lrcLink"], "song_link": song_link, "size": size }) return songs
def download_music(song, thread_num=4): """ process for downing music with multiple threads """ filename = "{}.mp3".format(song["name"]) if os.path.exists(filename): os.remove(filename) part = int(song["size"] / thread_num) if part <= 1024: thread_num = 1 _id = uuid.uuid4().hex logger.info("downloading '{}'...".format(song["name"])) threads = [] for i in range(thread_num): if i == thread_num - 1: end = '' else: end = (i + 1) * part - 1 thread = Worker((i * part, end), song, _id) thread.start() threads.append(thread) for t in threads: t.join() fileParts = glob.glob("part-{}-*".format(_id)) fileParts.sort(key=lambda e: e.split('-')[-1]) logger.info("'{}' combine parts...".format(song["name"])) with open(filename, "ab") as f: for part in fileParts: with open(part, "rb") as d: shutil.copyfileobj(d, f) os.remove(part) logger.info("'{}' finished".format(song["name"]))
def execute(self, globals_=None, _locals=None): """ Execute a code object The inputs and behavior of this function should match those of eval_ and exec_. .. _eval: https://docs.python.org/3/library/functions.html?highlight=eval#eval .. _exec: https://docs.python.org/3/library/functions.html?highlight=exec#exec .. note:: Need to figure out how the internals of this function must change for ``eval`` or ``exec``. :param code: a python code object :param globals_: optional globals dictionary :param _locals: optional locals dictionary """ if globals_ is None: globals_ = globals() if _locals is None: self._locals = globals_ else: self._locals = _locals self.globals_ = globals_ if self.contains_op("YIELD_VALUE"): return self.iterate_instructions() else: return self.execute_instructions()
def load_name(self, name): """ Implementation of the LOAD_NAME operation """ if name in self.globals_: return self.globals_[name] b = self.globals_['__builtins__'] if isinstance(b, dict): return b[name] else: return getattr(b, name)
def pop(self, n): """ Pop the **n** topmost items from the stack and return them as a ``list``. """ poped = self.__stack[len(self.__stack) - n:] del self.__stack[len(self.__stack) - n:] return poped
def build_class(self, callable_, args): """ Implement ``builtins.__build_class__``. We must wrap all class member functions using :py:func:`function_wrapper`. This requires using a :py:class:`Machine` to execute the class source code and then recreating the class source code using an :py:class:`Assembler`. .. note: We might be able to bypass the call to ``builtins.__build_class__`` entirely and manually construct a class object. https://github.com/python/cpython/blob/master/Python/bltinmodule.c """ self._print('build_class') self._print(callable_) self._print('args=',args) if isinstance(args[0], FunctionType): c = args[0].get_code() else: c = args[0].__closure__[0].cell_contents.__code__ # execute the original class source code print('execute original class source code') machine = MachineClassSource(c, self.verbose) l = dict() machine.execute(self.globals_, l) # construct new code for class source a = Assembler() for name, value in l.items(): a.load_const(value) a.store_name(name) a.load_const(None) a.return_value() print('new code for class source') dis.dis(a.code()) #machine = Machine(self.verbose) f = types.FunctionType(a.code(), self.globals_, args[1]) args = (f, *args[1:]) self.call_callbacks('CALL_FUNCTION', callable_, *args) return callable_(*args)
def call_function(self, c, i): """ Implement the CALL_FUNCTION_ operation. .. _CALL_FUNCTION: https://docs.python.org/3/library/dis.html#opcode-CALL_FUNCTION """ callable_ = self.__stack[-1-i.arg] args = tuple(self.__stack[len(self.__stack) - i.arg:]) self._print('call function') self._print('\tfunction ', callable_) self._print('\ti.arg ', i.arg) self._print('\targs ', args) self.call_callbacks('CALL_FUNCTION', callable_, *args) if isinstance(callable_, FunctionType): ret = callable_(*args) elif callable_ is builtins.__build_class__: ret = self.build_class(callable_, args) elif callable_ is builtins.globals: ret = self.builtins_globals() else: ret = callable_(*args) self.pop(1 + i.arg) self.__stack.append(ret)
def dump(filename, dbname, username=None, password=None, host=None, port=None, tempdir='/tmp', mysqldump_path='mysqldump'): """Perfoms a mysqldump backup. Create a database dump for the given database. returns statuscode and shelloutput """ filepath = os.path.join(tempdir, filename) cmd = mysqldump_path cmd += ' --result-file=' + os.path.join(tempdir, filename) if username: cmd += ' --user=%s' % username if host: cmd += ' --host=%s' % host if port: cmd += ' --port=%s' % port if password: cmd += ' --password=%s' % password cmd += ' ' + dbname ## run mysqldump return sh(cmd)
def _connection(username=None, password=None, host=None, port=None): "returns a connected cursor to the database-server." c_opts = {} if username: c_opts['user'] = username if password: c_opts['passwd'] = password if host: c_opts['host'] = host if port: c_opts['port'] = port dbc = MySQLdb.connect(**c_opts) dbc.autocommit(True) return dbc
def render_ditaa(self, code, options, prefix='ditaa'): """Render ditaa code into a PNG output file.""" hashkey = code.encode('utf-8') + str(options) + \ str(self.builder.config.ditaa) + \ str(self.builder.config.ditaa_args) infname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "ditaa") outfname = '%s-%s.%s' % (prefix, sha(hashkey).hexdigest(), "png") inrelfn = posixpath.join(self.builder.imgpath, infname) infullfn = path.join(self.builder.outdir, '_images', infname) outrelfn = posixpath.join(self.builder.imgpath, outfname) outfullfn = path.join(self.builder.outdir, '_images', outfname) if path.isfile(outfullfn): return outrelfn, outfullfn ensuredir(path.dirname(outfullfn)) # ditaa expects UTF-8 by default if isinstance(code, unicode): code = code.encode('utf-8') ditaa_args = [self.builder.config.ditaa] ditaa_args.extend(self.builder.config.ditaa_args) ditaa_args.extend(options) ditaa_args.extend( [infullfn] ) ditaa_args.extend( [outfullfn] ) f = open(infullfn, 'w') f.write(code) f.close() try: self.builder.warn(ditaa_args) p = Popen(ditaa_args, stdout=PIPE, stdin=PIPE, stderr=PIPE) except OSError, err: if err.errno != ENOENT: # No such file or directory raise self.builder.warn('ditaa command %r cannot be run (needed for ditaa ' 'output), check the ditaa setting' % self.builder.config.ditaa) self.builder._ditaa_warned_dot = True return None, None wentWrong = False try: # Ditaa may close standard input when an error occurs, # resulting in a broken pipe on communicate() stdout, stderr = p.communicate(code) except OSError, err: if err.errno != EPIPE: raise wentWrong = True except IOError, err: if err.errno != EINVAL: raise wentWrong = True if wentWrong: # in this case, read the standard output and standard error streams # directly, to get the error message(s) stdout, stderr = p.stdout.read(), p.stderr.read() p.wait() if p.returncode != 0: raise DitaaError('ditaa exited with error:\n[stderr]\n%s\n' '[stdout]\n%s' % (stderr, stdout)) return outrelfn, outfullfn
def _atexit(self): """Invoked in the 'finally' block of Application.run.""" self.log.debug("Application._atexit") if self._atexit_func: self._atexit_func(self)
def run(self, args_list=None): """Run Application.main and exits with the return value.""" self.log.debug("Application.run: {args_list}".format(**locals())) retval = None try: retval = self._run(args_list=args_list) except KeyboardInterrupt: self.log.verbose("Interrupted") # pragma: nocover except SystemExit as exit: self.log.verbose("Exited") retval = exit.code except Exception: print("Uncaught exception", file=sys.stderr) traceback.print_exc() if "debug_pdb" in self.args and self.args.debug_pdb: debugger() retval = Application.UNCAUGHT_EXCEPTION_EXIT raise finally: try: self._atexit() finally: sys.stderr.flush() sys.stdout.flush() sys.exit(retval)
def cd(path): """Context manager that changes to directory `path` and return to CWD when exited. """ old_path = os.getcwd() os.chdir(path) try: yield finally: os.chdir(old_path)
def copytree(src, dst, symlinks=True): """ Modified from shutil.copytree docs code sample, merges files rather than requiring dst to not exist. """ from shutil import copy2, Error, copystat names = os.listdir(src) if not Path(dst).exists(): os.makedirs(dst) errors = [] for name in names: srcname = os.path.join(src, name) dstname = os.path.join(dst, name) try: if symlinks and os.path.islink(srcname): linkto = os.readlink(srcname) os.symlink(linkto, dstname) elif os.path.isdir(srcname): copytree(srcname, dstname, symlinks) else: copy2(srcname, dstname) # XXX What about devices, sockets etc.? except OSError as why: errors.append((srcname, dstname, str(why))) # catch the Error from the recursive copytree so that we can # continue with other files except Error as err: errors.extend(err.args[0]) try: copystat(src, dst) except OSError as why: # can't copy file access times on Windows if why.winerror is None: errors.extend((src, dst, str(why))) if errors: raise Error(errors)
def debugger(): """If called in the context of an exception, calls post_mortem; otherwise set_trace. ``ipdb`` is preferred over ``pdb`` if installed. """ e, m, tb = sys.exc_info() if tb is not None: _debugger.post_mortem(tb) else: _debugger.set_trace()
def keys(self): """ Implements the dict.keys() method """ self.sync() for k in self.db.keys(): try: yield self.key_conv['from'](k) except KeyError: yield k
def get_mtime(fname): """ Find the time this file was last modified. :param fname: File name :return: The last time the file was modified. """ try: mtime = os.stat(fname).st_mtime_ns except OSError: # The file might be right in the middle of being written # so sleep time.sleep(1) mtime = os.stat(fname).st_mtime_ns return mtime
def is_changed(self, item): """ Find out if this item has been modified since last :param item: A key :return: True/False """ fname = os.path.join(self.fdir, item) if os.path.isfile(fname): mtime = self.get_mtime(fname) try: _ftime = self.fmtime[item] except KeyError: # Never been seen before self.fmtime[item] = mtime return True if mtime > _ftime: # has changed self.fmtime[item] = mtime return True else: return False else: logger.error('Could not access {}'.format(fname)) raise KeyError(item)
def sync(self): """ Goes through the directory and builds a local cache based on the content of the directory. """ if not os.path.isdir(self.fdir): os.makedirs(self.fdir) for f in os.listdir(self.fdir): fname = os.path.join(self.fdir, f) if not os.path.isfile(fname): continue if f in self.fmtime: if self.is_changed(f): self.db[f] = self._read_info(fname) else: mtime = self.get_mtime(fname) self.db[f] = self._read_info(fname) self.fmtime[f] = mtime
def items(self): """ Implements the dict.items() method """ self.sync() for k, v in self.db.items(): try: yield self.key_conv['from'](k), v except KeyError: yield k, v
def clear(self): """ Completely resets the database. This means that all information in the local cache and on disc will be erased. """ if not os.path.isdir(self.fdir): os.makedirs(self.fdir, exist_ok=True) return for f in os.listdir(self.fdir): del self[f]
def update(self, ava): """ Implements the dict.update() method """ for key, val in ava.items(): self[key] = val
def chr(x): ''' x-->int / byte Returns-->BYTE (not str in python3) Behaves like PY2 chr() in PY2 or PY3 if x is str of length > 1 or int > 256 raises ValueError/TypeError is not SUPPRESS_ERRORS ''' global _chr if isinstance(x, int): if x > 256: if SUPPRESS_ERRORS: x = x % 256 return toBytes(_chr(x)) elif isinstance(x, bytes): x = fromBytes(x) if len(x) > 1: if not SUPPRESS_ERRORS: raise TypeError('chr() found string of length > 2') x = x[0] return toBytes(x) elif isinstance(x, str): if len(x) > 1: if not SUPPRESS_ERRORS: raise TypeError('chr() found string of length > 2') x = x[0] return toBytes(x) else: raise TypeError('Unknown type passed to chr: %s', str(type(x)))
def ord(x): ''' x-->char (str of length 1) Returns-->int Behaves like PY2 ord() in PY2 or PY3 if x is str of length > 1 or int > 256 raises ValueError/TypeError is not SUPPRESS_ERRORS ''' global _ord if isinstance(x, int): if x > 256: if not SUPPRESS_ERRORS: raise ValueError('ord() arg not in range(256)') return x % 256 elif isinstance(x, bytes): x = fromBytes(x) if len(x) > 1: if SUPPRESS_ERRORS: x = x[0] return _ord(x) elif isinstance(x, str): if len(x) > 1: if SUPPRESS_ERRORS: x = x[0] return _ord(x) else: raise TypeError('Unknown type passed to ord: %s', str(type(x)))
def hex(x): ''' x-->bytes | bytearray Returns-->bytes: hex-encoded ''' if isinstance(x, bytearray): x = bytes(x) return encode(x, 'hex')
def fromBytes(x): ''' x-->unicode string | bytearray | bytes Returns-->unicode string, with encoding=latin1 ''' if isinstance(x, unicode): return x if isinstance(x, bytearray): x = bytes(x) elif isinstance(x, bytes): pass else: return x # unchanged (int etc) return decode(x, DEF_ENCODING)
def toBytes(x): ''' x-->unicode string | bytearray | bytes Returns-->bytes If x is unicode, MUST have encoding=latin1 ''' if isinstance(x, bytes): return x elif isinstance(x, bytearray): return bytes(x) elif isinstance(x, unicode): pass else: return x # unchanged (int etc) # ASSUMES latin1 encoding - Could raise an exception return encode(x, DEF_ENCODING)
def get_rand_int(encoding='latin1', avoid=[]): ''' encoding-->str: one of ENCODINGS avoid-->list of int: to void (unprintable chars etc) Returns-->int that can be converted to requested encoding which is NOT in avoid ''' UNICODE_LIMIT = 0x10ffff # See: https://en.wikipedia.org/wiki/UTF-8#Invalid_code_points SURROGATE_RANGE = (0xD800, 0xDFFF) if encoding not in ENCODINGS: raise ValueError('Unsupported encoding: ' + str(encoding)) if encoding == 'ascii': maxord = 2 ** 7 elif encoding == 'latin1': maxord = 2 ** 8 elif encoding == 'utf16': maxord = 2 ** 16 elif encoding == 'utf8': maxord = 2 ** 32 elif encoding == 'utf32': maxord = 2 ** 32 rndint = random.randrange(0, min(maxord, UNICODE_LIMIT)) while ( (rndint in avoid) or (SURROGATE_RANGE[0] <= rndint <= SURROGATE_RANGE[1]) ): rndint = random.randrange(0, min(maxord, UNICODE_LIMIT)) return rndint
def get_rand_str(encoding='latin1', l=64, avoid=[]): ''' encoding-->str: one of ENCODINGS l-->int: length of returned str avoid-->list of int: to void (unprintable chars etc) Returns-->unicode str of the requested encoding ''' ret = unicode('') while len(ret) < l: rndint = get_rand_int(encoding=encoding, avoid=avoid) ret += unichr(rndint) return ret
def get_rand_bytes(encoding='latin1', l=64, avoid=[]): ''' encoding-->str: one of ENCODINGS l-->int: length of unicode str avoid-->list of int: to void (unprintable chars etc) Returns-->bytes representing unicode str of the requested encoding ''' return encode( get_rand_str(encoding=encoding, l=l, avoid=avoid), encoding=encoding )
def main(): ''' Parse argv for options and arguments, and start schema generation. ''' parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path...]", formatter=optparse.TitledHelpFormatter()) parser.set_description(__doc__.strip()) parser.add_option("-f", "--function", dest="function", metavar="NAME", help="append integrity checking actions to functions named NAME (required)", action="store", default=None) parser.add_option("-o", "--output", dest='output', metavar="PATH", help="save sql model instances to PATH (required)", action="store", default=None) parser.add_option("-v", "--verbosity", dest='verbosity', action="count", help="increase debug logging level", default=2) (opts, args) = parser.parse_args() if len(args) == 0 or None in [opts.output, opts.function]: parser.print_help() sys.exit(1) levels = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG, } logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG)) m = ooaofooa.load_metamodel(args) for c_c in m.select_many('C_C'): filt = lambda sel: ooaofooa.is_contained_in(sel, c_c) and sel.Name == opts.function s_sync = m.select_any('S_SYNC', filt) if not s_sync: s_sync = m.new('S_SYNC', Name=opts.function) pe_pe = m.new('PE_PE') s_dt = m.select_any('S_DT', where(Name='boolean')) relate(pe_pe, s_sync, 8001) relate(s_dt, s_sync, 25) generate_actions(m, c_c, s_sync) xtuml.persist_instances(m, opts.output)
def scrape(ctx, url): """ Rip the events from a given rss feed, normalize the data and store. """ data = load_feed(url) feed = data['feed'] entries = data['entries'] # THIS IS SPECIFIC TO # http://konfery.cz/rss/ _type = 'community' country = 'Czech Republic' # title, title_detail, links, link, published, summary, tags # unused: summary_detail, guidislink, published_parsed for entry in entries: _id = sluggify(entry['id']) city = entry['tags'][0]['term'] landing = entry['link'] start_time = dt_normalize(entry['published_parsed'], local_tz=True) title = entry['title'] summary = entry['summary'] link = entry['link'] ipdb.set_trace()
def download_image(self): """ Download the image and return the local path to the image file. """ split = urlsplit(self.url) filename = split.path.split("/")[-1] # Ensure the directory to store the image cache exists if not os.path.exists(self.cache_directory): os.makedirs(self.cache_directory) filepath = os.path.join(self.cache_directory, filename) data = urllib_request.urlopen(self.url) with open(filepath, "wb") as image: image.write(data.read()) return filepath
def has_changed(self): """ Method to check if an image has changed since it was last downloaded. By making a head request, this check can be done quicker that downloading and processing the whole file. """ request = urllib_request.Request(self.url) request.get_method = lambda: 'HEAD' response = urllib_request.urlopen(request) information = response.info() if 'Last-Modified' in information: last_modified = information['Last-Modified'] # Return False if the image has not been modified if last_modified == self.image_last_modified: return False self.image_last_modified = last_modified # Return True if the image has been modified # or if the image has no last-modified header return True
async def api_bikes(request): """ Gets stolen bikes within a radius of a given postcode. :param request: The aiohttp request. :return: The bikes stolen with the given range from a postcode. """ postcode: Optional[str] = request.match_info.get('postcode', None) try: radius = int(request.match_info.get('radius', 10)) except ValueError: raise web.HTTPBadRequest(text="Invalid Radius") try: postcode = (await get_postcode_random()) if postcode == "random" else postcode bikes = await get_bikes(postcode, radius) except CachingError as e: raise web.HTTPInternalServerError(text=e.status) else: if bikes is None: raise web.HTTPNotFound(text="Post code does not exist.") else: return str_json_response([bike.serialize() for bike in bikes])
def fancy_tag_compiler(params, defaults, takes_var_args, takes_var_kwargs, takes_context, name, node_class, parser, token): "Returns a template.Node subclass." bits = token.split_contents()[1:] if takes_context: if 'context' in params[:1]: params = params[1:] else: raise TemplateSyntaxError( "Any tag function decorated with takes_context=True " "must have a first argument of 'context'") # Split args and kwargs args = [] kwargs = {} kwarg_found = False unhandled_params = list(params) handled_params = [] if len(bits) > 1 and bits[-2] == 'as': output_var = bits[-1] if len(set(output_var) - set(ALLOWED_VARIABLE_CHARS)) > 0: raise TemplateSyntaxError("%s got output var name with forbidden chars: '%s'" % (name, output_var)) bits = bits[:-2] else: output_var = None for bit in bits: kwarg_match = kwarg_re.match(bit) if kwarg_match: kw, var = kwarg_match.groups() if kw not in params and not takes_var_kwargs: raise TemplateSyntaxError("%s got unknown keyword argument '%s'" % (name, kw)) elif kw in handled_params: raise TemplateSyntaxError("%s got multiple values for keyword argument '%s'" % (name, kw)) else: kwargs[str(kw)] = var kwarg_found = True handled_params.append(kw) else: if kwarg_found: raise TemplateSyntaxError("%s got non-keyword arg after keyword arg" % name) else: args.append(bit) try: handled_params.append(unhandled_params.pop(0)) except IndexError: if not takes_var_args: raise TemplateSyntaxError("%s got too many arguments" % name) # Consider the last n params handled, where n is the number of defaults. if defaults is not None: unhandled_params = unhandled_params[:-len(defaults)] if len(unhandled_params) == 1: raise TemplateSyntaxError("%s didn't get a value for argument '%s'" % (name, unhandled_params[0])) elif len(unhandled_params) > 1: raise TemplateSyntaxError("%s didn't get values for arguments: %s" % ( name, ', '.join(["'%s'" % p for p in unhandled_params]))) return node_class(args, kwargs, output_var, takes_context)
def setup_logging(log_filename=None, log_level="DEBUG", str_format=None, date_format=None, log_file_level="DEBUG", log_stdout_level=None, log_restart=False, log_history=False, formatter=None, silence_modules=None, log_filter=None): """ This will setup logging for a single file but can be called more than once LOG LEVELS are "CRITICAL", "ERROR", "INFO", "DEBUG" :param log_filename: str of the file location :param log_level: str of the overall logging level for setLevel :param str_format: str of the logging format :param date_format: str of the date format :param log_file_level str of the log level to use on this file :param log_stdout_level str of the log level to use on this file (None means no stdout logging) :param log_restart: bool if True the log file will be deleted first :param log_history: bool if True will save another log file in a folder called history with the datetime :param formatter: logging.Format instance to use :param silence_modules list of str of modules to silence :param log_filter: logging.filter instance to add to handler :return: None """ setup_log_level(log_level) if log_filename: setup_file_logging(log_filename=log_filename, str_format=str_format, log_history=log_history, formatter=formatter, log_file_level=log_file_level, log_restart=log_restart, date_format=date_format, log_filter=log_filter) if log_stdout_level is not None: setup_stdout_logging(log_level=log_level, log_stdout_level=log_stdout_level, str_format=str_format, date_format=date_format, formatter=formatter, log_filter=log_filter) silence_module_logging(silence_modules)
def setup_stdout_logging(log_level="DEBUG", log_stdout_level="DEBUG", str_format=None, date_format=None, formatter=None, silence_modules=None, log_filter=None): """ This will setup logging for stdout and stderr :param formatter: :param log_level: str of the overall logging level for setLevel :param log_stdout_level: str of the logging level of stdout :param str_format: str of the logging format :param date_format: str of the date format :param silence_modules: list of str of modules to exclude from logging :param log_filter: logging.filter instance to add to handler :return: None """ setup_log_level(log_level) formatter = formatter or SeabornFormatter(str_format, date_format) if log_stdout_level != 'ERROR': stdout_handler = logging.StreamHandler(sys.__stdout__) add_handler(log_stdout_level, stdout_handler, formatter, NoErrorFilter()) stderr_handler = logging.StreamHandler(sys.__stderr__) add_handler("ERROR", stderr_handler, formatter, log_filter) silence_module_logging(silence_modules)
def setup_file_logging(log_filename, log_file_level="DEBUG", str_format=None, date_format=None, log_restart=False, log_history=False, formatter=None, silence_modules=None, log_filter=None): """ This will setup logging for a single file but can be called more than once LOG LEVELS are "CRITICAL", "ERROR", "INFO", "DEBUG" :param log_filename: str of the file location :param log_file_level str of the log level to use on this file :param str_format: str of the logging format :param date_format: str of the date format :param log_restart: bool if True the log file will be deleted first :param log_history: bool if True will save another log file in a folder called history with the datetime :param formatter: logging.Format instance to use :param log_filter: logging.filter instance to add to handler :param silence_modules list of str of modules to silence :return: None """ from seaborn_timestamp.timestamp import datetime_to_str if os.path.exists(log_filename) and log_restart: os.remove(log_filename) add_file_handler(log_file_level, log_filename, str_format=str_format, date_format=date_format, formatter=formatter, log_filter=log_filter) if log_history: base_name = os.path.basename(log_filename).split('.')[0] + \ '_%s' % datetime_to_str(str_format='%Y-%m-%d_%H-%M-%S') history_log = os.path.join(os.path.dirname(log_filename), 'history', base_name + '.log') add_file_handler(log_file_level, history_log, str_format=str_format, date_format=date_format, log_filter=log_filter) silence_module_logging(silence_modules)
def add_file_handler(log_file_level, log_filename, str_format=None, date_format=None, formatter=None, log_filter=None): """ :param log_filename: :param log_file_level str of the log level to use on this file :param str_format: str of the logging format :param date_format: str of the date format :param log_restart: bool if True the log file will be deleted first :param log_history: bool if True will save another log file in a folder called history with the datetime :param formatter: logging.Format instance to use :param log_filter: logging.filter instance to add to handler :return: None """ formatter = formatter or SeabornFormatter(str_format=str_format, date_format=date_format) mkdir_for_file(log_filename) handler = logging.FileHandler(log_filename) add_handler(log_file_level, handler, formatter, log_filter=log_filter)
def add_handler(log_handler_level, handler, formatter=None, log_filter=None): """ :param log_handler_level: str of the level to set for the handler :param handler: logging.Handler handler to add :param formatter: logging.Formatter instance to use :param log_filter: logging.filter instance to add to handler :return: None """ handler.setLevel(log_handler_level) if formatter is not None: handler.setFormatter(formatter) if log_filter is not None: handler.addFilter(log_filter) log.addHandler(handler)
def set_module_log_level(modules=None, log_level=logging.WARNING): """ This will raise the log level for the given modules in general this is used to silence them :param modules: list of str of module names ex. ['requests'] :param log_level: str of the new log level :return: None """ modules = modules or [] if not isinstance(modules, list): modules = [modules] for module in modules: logging.getLogger(module).setLevel(logging.WARNING)
def findCaller(self, stack_info=False): """ Find the stack frame of the caller so that we can note the source file name, line number and function name. """ f = logging.currentframe() # On some versions of IronPython, currentframe() returns None if # IronPython isn't run with -X:Frames. if f is not None: f = f.f_back rv = "(unknown file)", 0, "(unknown function)" while hasattr(f, "f_code"): co = f.f_code filename = os.path.normcase(co.co_filename) if filename == logging._srcfile or filename == self._srcfile: f = f.f_back continue rv = (co.co_filename, f.f_lineno, co.co_name) if stack_info: sio = io.StringIO() sio.write('Stack (most recent call last):\n') traceback.print_stack(f, file=sio) sinfo = sio.getvalue() if sinfo[-1] == '\n': sinfo = sinfo[:-1] sio.close() break return rv
def get_defining_component(pe_pe): ''' get the C_C in which pe_pe is defined ''' if pe_pe is None: return None if pe_pe.__class__.__name__ != 'PE_PE': pe_pe = xtuml.navigate_one(pe_pe).PE_PE[8001]() ep_pkg = xtuml.navigate_one(pe_pe).EP_PKG[8000]() if ep_pkg: return get_defining_component(ep_pkg) return xtuml.navigate_one(pe_pe).C_C[8003]()
def prebuild_action(instance): ''' Transform textual OAL actions of an *instance* to instances in the ooaofooa subsystems Value and Body. The provided *instance* must be an instance of one of the following classes: - S_SYNC - S_BRG - O_TFR - O_DBATTR - SM_ACT - SPR_RO - SPR_RS - SPR_PO - SPR_PS ''' walker_map = { 'S_SYNC': FunctionPrebuilder, 'S_BRG': BridgePrebuilder, 'O_TFR': OperationPrebuilder, 'O_DBATTR': DerivedAttributePrebuilder, 'SM_ACT': TransitionPrebuilder, 'SPR_RO': RequiredOperationPrebuilder, 'SPR_RS': RequiredSignalPrebuilder, 'SPR_PO': ProvidedOperationPrebuilder, 'SPR_PS': ProvidedSignalPrebuilder } metaclass = xtuml.get_metaclass(instance) walker = walker_map[metaclass.kind](metaclass.metamodel, instance) logger.info('processing action %s' % walker.label) # walker.visitors.append(xtuml.tools.NodePrintVisitor()) root = oal.parse(instance.Action_Semantics_internal) return walker.accept(root)
def prebuild_model(metamodel): ''' Transform textual OAL actions in a ooaofooa *metamodel* to instances in the subsystems Value and Body. Instances of the following classes are supported: - S_SYNC - S_BRG - O_TFR - O_DBATTR - SM_ACT - SPR_RO - SPR_RS - SPR_PO - SPR_PS ''' for kind in ['S_SYNC','S_BRG','O_TFR', 'O_DBATTR', 'SM_ACT', 'SPR_RO', 'SPR_RS', 'SPR_PO', 'SPR_PS']: for inst in metamodel.select_many(kind): if inst.Suc_Pars: prebuild_action(inst)
def main(): ''' Parse command line options and launch the prebuilder. ''' parser = optparse.OptionParser(usage="%prog [options] <model_path> [another_model_path..]", version=xtuml.version.complete_string, formatter=optparse.TitledHelpFormatter()) parser.add_option("-v", "--verbosity", dest='verbosity', action="count", help="increase debug logging level", default=1) parser.add_option("-o", "--output", dest="output", metavar="PATH", help="set output to PATH", action="store", default=None) (opts, args) = parser.parse_args() if len(args) == 0 or opts.output is None: parser.print_help() sys.exit(1) levels = { 0: logging.ERROR, 1: logging.WARNING, 2: logging.INFO, 3: logging.DEBUG, } logging.basicConfig(level=levels.get(opts.verbosity, logging.DEBUG)) m = ooaofooa.load_metamodel(args) prebuild_model(m) xtuml.persist_instances(m, opts.output)
def find_symbol(self, name=None, kind=None): ''' Find a symbol in the symbol table by name, kind, or both. ''' for s in reversed(self.stack): for symbol_name, handle in s.symbols.items(): symbol_kind = handle.__class__.__name__ if name == symbol_name and kind == symbol_kind: return handle elif name is None and kind == handle.__class__.__name__: return handle elif name == symbol_name and kind is None: return handle if name is None and kind == s.handle.__class__.__name__: return s.handle
def update_membership(self, contact, group): ''' input: gdata ContactEntry and GroupEntry objects ''' if not contact: log.debug('Not updating membership for EMPTY contact.') return None _uid = contact.email[0].address _gtitle = group.title.text for contact_group in contact.group_membership_info: if contact_group.href == group.get_id(): log.warn( ' ... {} already a member of {}.'.format(_uid, _gtitle)) return contact log.debug('Adding {} to group {}'.format(_uid, _gtitle)) membership = self.api.contacts.data.GroupMembershipInfo( href=group.id.text) contact.group_membership_info.append(membership) contact = self.api.update(contact) return contact
def is_contained_in(pe_pe, root): ''' Determine if a PE_PE is contained within a EP_PKG or a C_C. ''' if not pe_pe: return False if type(pe_pe).__name__ != 'PE_PE': pe_pe = one(pe_pe).PE_PE[8001]() ep_pkg = one(pe_pe).EP_PKG[8000]() c_c = one(pe_pe).C_C[8003]() if root in [ep_pkg, c_c]: return True elif is_contained_in(ep_pkg, root): return True elif is_contained_in(c_c, root): return True else: return False
def is_global(pe_pe): ''' Check if a PE_PE is globally defined, i.e. not inside a C_C ''' if type(pe_pe).__name__ != 'PE_PE': pe_pe = one(pe_pe).PE_PE[8001]() if one(pe_pe).C_C[8003](): return False pe_pe = one(pe_pe).EP_PKG[8000].PE_PE[8001]() if not pe_pe: return True return is_global(pe_pe)