_id
stringlengths
2
7
title
stringlengths
1
88
partition
stringclasses
3 values
text
stringlengths
75
19.8k
language
stringclasses
1 value
meta_information
dict
q44000
BuildFile.crossrefs
train
def crossrefs(self): """Returns a set of non-local targets referenced by this build file.""" # TODO: memoize this? crefs = set() for node in self.node: if node.repo != self.target.repo or node.path != self.target.path: crefs.add(node) return crefs
python
{ "resource": "" }
q44001
BuildFile.local_targets
train
def local_targets(self): """Iterator over the targets defined in this build file.""" for node in self.node: if (node.repo, node.path) == (self.target.repo, self.target.path): yield node
python
{ "resource": "" }
q44002
JsonBuildFile._parse
train
def _parse(self, stream): """Parse a JSON BUILD file. Args: builddata: dictionary of buildfile data reponame: name of the repo that it came from path: directory path within the repo """ builddata = json.load(stream) log.debug('This is a JSON build file.') if 'targets' not in builddata: log.warn('Warning: No targets defined here.') return for tdata in builddata['targets']: # TODO: validate name target = address.new(target=tdata.pop('name'), repo=self.target.repo, path=self.target.path) # Duplicate target definition? Uh oh. if target in self.node and 'target_obj' in self.node[target]: raise error.ButcherError( 'Target is defined more than once: %s', target) rule_obj = targets.new(name=target, ruletype=tdata.pop('type'), **tdata) log.debug('New target: %s', target) self.add_node(target, {'target_obj': rule_obj}) # dep could be ":blabla" or "//foo:blabla" or "//foo/bar:blabla" for dep in rule_obj.composed_deps() or []: d_target = address.new(dep) if not d_target.repo: # ":blabla" d_target.repo = self.target.repo if d_target.repo == self.target.repo and not d_target.path: d_target.path = self.target.path if d_target not in self.nodes(): self.add_node(d_target) log.debug('New dep: %s -> %s', target, d_target) self.add_edge(target, d_target)
python
{ "resource": "" }
q44003
Failure.from_exception
train
def from_exception(cls, exception, retain_exc_info=True, cause=None, find_cause=True): """Creates a failure object from a exception instance.""" exc_info = ( type(exception), exception, getattr(exception, '__traceback__', None) ) return cls.from_exc_info(exc_info=exc_info, retain_exc_info=retain_exc_info, cause=cause, find_cause=find_cause)
python
{ "resource": "" }
q44004
Failure.validate
train
def validate(cls, data): """Validate input data matches expected failure ``dict`` format.""" try: jsonschema.validate( data, cls.SCHEMA, # See: https://github.com/Julian/jsonschema/issues/148 types={'array': (list, tuple)}) except jsonschema.ValidationError as e: raise InvalidFormat("Failure data not of the" " expected format: %s" % (e.message)) else: # Ensure that all 'exc_type_names' originate from one of # base exceptions, because those are the root exceptions that # python mandates/provides and anything else is invalid... causes = collections.deque([data]) while causes: cause = causes.popleft() try: generated_on = cause['generated_on'] ok_bases = cls.BASE_EXCEPTIONS[generated_on[0]] except (KeyError, IndexError): ok_bases = [] root_exc_type = cause['exc_type_names'][-1] if root_exc_type not in ok_bases: raise InvalidFormat( "Failure data 'exc_type_names' must" " have an initial exception type that is one" " of %s types: '%s' is not one of those" " types" % (ok_bases, root_exc_type)) sub_cause = cause.get('cause') if sub_cause is not None: causes.append(sub_cause)
python
{ "resource": "" }
q44005
Failure.matches
train
def matches(self, other): """Checks if another object is equivalent to this object. :returns: checks if another object is equivalent to this object :rtype: boolean """ if not isinstance(other, Failure): return False if self.exc_info is None or other.exc_info is None: return self._matches(other) else: return self == other
python
{ "resource": "" }
q44006
Failure.reraise_if_any
train
def reraise_if_any(failures, cause_cls_finder=None): """Re-raise exceptions if argument is not empty. If argument is empty list/tuple/iterator, this method returns None. If argument is converted into a list with a single ``Failure`` object in it, that failure is reraised. Else, a :class:`~.WrappedFailure` exception is raised with the failure list as causes. """ if not isinstance(failures, (list, tuple)): # Convert generators/other into a list... failures = list(failures) if len(failures) == 1: failures[0].reraise(cause_cls_finder=cause_cls_finder) elif len(failures) > 1: raise WrappedFailure(failures)
python
{ "resource": "" }
q44007
Failure.check
train
def check(self, *exc_classes): """Check if any of ``exc_classes`` caused the failure. Arguments of this method can be exception types or type names (strings **fully qualified**). If captured exception is an instance of exception of given type, the corresponding argument is returned, otherwise ``None`` is returned. """ for cls in exc_classes: cls_name = utils.cls_to_cls_name(cls) if cls_name in self._exc_type_names: return cls return None
python
{ "resource": "" }
q44008
Failure.pformat
train
def pformat(self, traceback=False): """Pretty formats the failure object into a string.""" buf = six.StringIO() if not self._exc_type_names: buf.write('Failure: %s' % (self._exception_str)) else: buf.write('Failure: %s: %s' % (self._exc_type_names[0], self._exception_str)) if traceback: if self._traceback_str is not None: traceback_str = self._traceback_str.rstrip() else: traceback_str = None if traceback_str: buf.write(os.linesep) buf.write(traceback_str) else: buf.write(os.linesep) buf.write('Traceback not available.') return buf.getvalue()
python
{ "resource": "" }
q44009
Failure.iter_causes
train
def iter_causes(self): """Iterate over all causes.""" curr = self._cause while curr is not None: yield curr curr = curr._cause
python
{ "resource": "" }
q44010
Failure.from_dict
train
def from_dict(cls, data): """Converts this from a dictionary to a object.""" data = dict(data) cause = data.get('cause') if cause is not None: data['cause'] = cls.from_dict(cause) return cls(**data)
python
{ "resource": "" }
q44011
Failure.to_dict
train
def to_dict(self, include_args=True, include_kwargs=True): """Converts this object to a dictionary. :param include_args: boolean indicating whether to include the exception args in the output. :param include_kwargs: boolean indicating whether to include the exception kwargs in the output. """ data = { 'exception_str': self.exception_str, 'traceback_str': self.traceback_str, 'exc_type_names': self.exception_type_names, 'exc_args': self.exception_args if include_args else tuple(), 'exc_kwargs': self.exception_kwargs if include_kwargs else {}, 'generated_on': self.generated_on, } if self._cause is not None: data['cause'] = self._cause.to_dict(include_args=include_args, include_kwargs=include_kwargs) return data
python
{ "resource": "" }
q44012
explain_feature
train
def explain_feature(featurename): '''print the location of single feature and its version if the feature is located inside a git repository, this will also print the git-rev and modified files ''' import os import featuremonkey import importlib import subprocess def guess_version(feature_module): if hasattr(feature_module, '__version__'): return feature_module.__version__ if hasattr(feature_module, 'get_version'): return feature_module.get_version() return ('unable to determine version:' ' please add __version__ or get_version()' ' to this feature module!') def git_rev(module): stdout, stderr = subprocess.Popen( ["git", "rev-parse", "HEAD"], cwd=os.path.dirname(module.__file__), stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate() if 'Not a git repo' in stderr: return '-' else: return stdout.strip() def git_changes(module): stdout = subprocess.Popen( ["git", "diff", "--name-only"], cwd=os.path.dirname(module.__file__), stdout=subprocess.PIPE, stderr=subprocess.PIPE ).communicate()[0] return stdout.strip() or '-' if featurename in featuremonkey.get_features_from_equation_file(os.environ['PRODUCT_EQUATION_FILENAME']): print() print(featurename) print('-' * 60) print() is_subfeature = '.features.' in featurename try: feature_module = importlib.import_module(featurename) except ImportError: print('Error: unable to import feature "%s"' % featurename) print('Location: %s' % os.path.dirname(feature_module.__file__)) print() if is_subfeature: print('Version: see parent feature') print() else: print('Version: %s' % str(guess_version(feature_module))) print() print('git: %s' % git_rev(feature_module)) print() print('git changed: %s' % '\n\t\t'.join(git_changes(feature_module).split('\n'))) else: print('No feature named ' + featurename)
python
{ "resource": "" }
q44013
explain_features
train
def explain_features(): '''print the location of each feature and its version if the feature is located inside a git repository, this will also print the git-rev and modified files ''' from ape import tasks import featuremonkey import os featurenames = featuremonkey.get_features_from_equation_file(os.environ['PRODUCT_EQUATION_FILENAME']) for featurename in featurenames: tasks.explain_feature(featurename)
python
{ "resource": "" }
q44014
Version.imprint
train
def imprint(self, path=None): """Write the determined version, if any, to ``self.version_file`` or the path passed as an argument. """ if self.version is not None: with open(path or self.version_file, 'w') as h: h.write(self.version + '\n') else: raise ValueError('Can not write null version to file.') return self
python
{ "resource": "" }
q44015
Version.from_file
train
def from_file(self, path=None): """Look for a version in ``self.version_file``, or in the specified path if supplied. """ if self._version is None: self._version = file_version(path or self.version_file) return self
python
{ "resource": "" }
q44016
Version.from_git
train
def from_git(self, path=None, prefer_daily=False): """Use Git to determine the package version. This routine uses the __file__ value of the caller to determine which Git repository root to use. """ if self._version is None: frame = caller(1) path = frame.f_globals.get('__file__') or '.' providers = ([git_day, git_version] if prefer_daily else [git_version, git_day]) for provider in providers: if self._version is not None: break try: with cd(path): self._version = provider() except CalledProcessError: pass except OSError as e: if e.errno != errno.ENOENT: raise return self
python
{ "resource": "" }
q44017
Version.from_pkg
train
def from_pkg(self): """Use pkg_resources to determine the installed package version. """ if self._version is None: frame = caller(1) pkg = frame.f_globals.get('__package__') if pkg is not None: self._version = pkg_version(pkg) return self
python
{ "resource": "" }
q44018
PeerContact.__load_dump
train
def __load_dump(self, message): """ Calls the hook method to modify the loaded peer description before giving it to the directory :param message: The received Herald message :return: The updated peer description """ dump = message.content if self._hook is not None: # Call the hook try: updated_dump = self._hook(message, dump) if updated_dump is not None: # Use the new description dump = updated_dump except (TypeError, ValueError) as ex: self._logger("Invalid description hook: %s", ex) return dump
python
{ "resource": "" }
q44019
PeerContact.herald_message
train
def herald_message(self, herald_svc, message): """ Handles a message received by Herald :param herald_svc: Herald service :param message: Received message """ subject = message.subject if subject == SUBJECT_DISCOVERY_STEP_1: # Step 1: Register the remote peer and reply with our dump try: # Delayed registration notification = self._directory.register_delayed( self.__load_dump(message)) peer = notification.peer if peer is not None: # Registration succeeded self.__delayed_notifs[peer.uid] = notification # Reply with our dump herald_svc.reply( message, self._directory.get_local_peer().dump(), SUBJECT_DISCOVERY_STEP_2) except ValueError: self._logger.error("Error registering a discovered peer") elif subject == SUBJECT_DISCOVERY_STEP_2: # Step 2: Register the dump, notify local listeners, then let # the remote peer notify its listeners try: # Register the peer notification = self._directory.register_delayed( self.__load_dump(message)) if notification.peer is not None: # Let the remote peer notify its listeners herald_svc.reply(message, None, SUBJECT_DISCOVERY_STEP_3) # Now we can notify listeners notification.notify() except ValueError: self._logger.error("Error registering a peer using the " "description it sent") elif subject == SUBJECT_DISCOVERY_STEP_3: # Step 3: notify local listeners about the remote peer try: self.__delayed_notifs.pop(message.sender).notify() except KeyError: # Unknown peer pass else: # Unknown subject self._logger.warning("Unknown discovery step: %s", subject)
python
{ "resource": "" }
q44020
MailingListManager.api_url
train
def api_url(self): """Returns the api_url or None. """ if not self._api_url: error_msg = ( f"Email is enabled but API_URL is not set. " f"See settings.{self.api_url_attr}" ) try: self._api_url = getattr(settings, self.api_url_attr) except AttributeError: raise EmailNotEnabledError(error_msg, code="api_url_attribute_error") else: if not self._api_url: raise EmailNotEnabledError(error_msg, code="api_url_is_none") return self._api_url
python
{ "resource": "" }
q44021
MailingListManager.api_key
train
def api_key(self): """Returns the api_key or None. """ if not self._api_key: error_msg = ( f"Email is enabled but API_KEY is not set. " f"See settings.{self.api_key_attr}" ) try: self._api_key = getattr(settings, self.api_key_attr) except AttributeError: raise EmailNotEnabledError(error_msg, code="api_key_attribute_error") else: if not self._api_key: raise EmailNotEnabledError(error_msg, code="api_key_is_none") return self._api_key
python
{ "resource": "" }
q44022
MailingListManager.subscribe
train
def subscribe(self, user, verbose=None): """Returns a response after attempting to subscribe a member to the list. """ if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") if not user.email: raise UserEmailError(f"User {user}'s email address is not defined.") response = requests.post( f"{self.api_url}/{self.address}/members", auth=("api", self.api_key), data={ "subscribed": True, "address": user.email, "name": f"{user.first_name} {user.last_name}", "description": f'{user.userprofile.job_title or ""}', "upsert": "yes", }, ) if verbose: sys.stdout.write( f"Subscribing {user.email} to {self.address}. " f"Got response={response.status_code}.\n" ) return response
python
{ "resource": "" }
q44023
MailingListManager.unsubscribe
train
def unsubscribe(self, user, verbose=None): """Returns a response after attempting to unsubscribe a member from the list. """ if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") response = requests.put( f"{self.api_url}/{self.address}/members/{user.email}", auth=("api", self.api_key), data={"subscribed": False}, ) if verbose: sys.stdout.write( f"Unsubscribing {user.email} from {self.address}. " f"Got response={response.status_code}.\n" ) return response
python
{ "resource": "" }
q44024
MailingListManager.create
train
def create(self, verbose=None): """Returns a response after attempting to create the list. """ if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") response = requests.post( self.api_url, auth=("api", self.api_key), data={ "address": self.address, "name": self.name, "description": self.display_name, }, ) if verbose: sys.stdout.write( f"Creating mailing list {self.address}. " f"Got response={response.status_code}.\n" ) return response
python
{ "resource": "" }
q44025
MailingListManager.delete
train
def delete(self): """Returns a response after attempting to delete the list. """ if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") return requests.delete( f"{self.api_url}/{self.address}", auth=("api", self.api_key) )
python
{ "resource": "" }
q44026
MailingListManager.delete_member
train
def delete_member(self, user): """Returns a response after attempting to remove a member from the list. """ if not self.email_enabled: raise EmailNotEnabledError("See settings.EMAIL_ENABLED") return requests.delete( f"{self.api_url}/{self.address}/members/{user.email}", auth=("api", self.api_key), )
python
{ "resource": "" }
q44027
Rolex._freq_parser
train
def _freq_parser(self, freq): """Parse timedelta. Valid keywords "days", "day", "d", "hours", "hour", "h", "minutes", "minute", "min", "m", "seconds", "second", "sec", "s", "weeks", "week", "w", """ freq = freq.lower().strip() valid_keywords = [ "days", "day", "d", "hours", "hour", "h", "minutes", "minute", "min", "m", "seconds", "second", "sec", "s", "weeks", "week", "w", ] error_message = "'%s' is invalid, use one of %s" % ( freq, valid_keywords) try: # day for surfix in ["days", "day", "d"]: if freq.endswith(surfix): freq = freq.replace(surfix, "") return timedelta(days=int(freq)) # hour for surfix in ["hours", "hour", "h"]: if freq.endswith(surfix): freq = freq.replace(surfix, "") return timedelta(hours=int(freq)) # minute for surfix in ["minutes", "minute", "min", "m"]: if freq.endswith(surfix): freq = freq.replace(surfix, "") return timedelta(minutes=int(freq)) # second for surfix in ["seconds", "second", "sec", "s"]: if freq.endswith(surfix): freq = freq.replace(surfix, "") return timedelta(seconds=int(freq)) # week for surfix in ["weeks", "week", "w"]: if freq.endswith(surfix): freq = freq.replace(surfix, "") return timedelta(days=int(freq) * 7) except: pass raise ValueError(error_message)
python
{ "resource": "" }
q44028
Rolex.weekday_series
train
def weekday_series(self, start, end, weekday, return_date=False): """Generate a datetime series with same weekday number. ISO weekday number: Mon to Sun = 1 to 7 Usage:: >>> start, end = "2014-01-01 06:30:25", "2014-02-01 06:30:25" >>> rolex.weekday_series(start, end, weekday=2) # All Tuesday [ datetime(2014, 1, 7, 6, 30, 25), datetime(2014, 1, 14, 6, 30, 25), datetime(2014, 1, 21, 6, 30, 25), datetime(2014, 1, 28, 6, 30, 25), ] :param weekday: int or list of int **中文文档** 生成星期数一致的时间序列。 """ start = self.parse_datetime(start) end = self.parse_datetime(end) if isinstance(weekday, integer_types): weekday = [weekday, ] series = list() for i in self.time_series( start, end, freq="1day", return_date=return_date): if i.isoweekday() in weekday: series.append(i) return series
python
{ "resource": "" }
q44029
Rolex._rnd_datetime
train
def _rnd_datetime(self, start, end): """Internal random datetime generator. """ return self.from_utctimestamp( random.randint( int(self.to_utctimestamp(start)), int(self.to_utctimestamp(end)), ) )
python
{ "resource": "" }
q44030
Rolex.add_minutes
train
def add_minutes(self, datetimestr, n): """Returns a time that n minutes after a time. :param datetimestr: a datetime object or a datetime str :param n: number of minutes, value can be negative **中文文档** 返回给定日期N分钟之后的时间。 """ a_datetime = self.parse_datetime(datetimestr) return a_datetime + timedelta(seconds=60 * n)
python
{ "resource": "" }
q44031
Rolex.add_hours
train
def add_hours(self, datetimestr, n): """Returns a time that n hours after a time. :param datetimestr: a datetime object or a datetime str :param n: number of hours, value can be negative **中文文档** 返回给定日期N小时之后的时间。 """ a_datetime = self.parse_datetime(datetimestr) return a_datetime + timedelta(seconds=3600 * n)
python
{ "resource": "" }
q44032
Rolex.add_weeks
train
def add_weeks(self, datetimestr, n, return_date=False): """Returns a time that n weeks after a time. :param datetimestr: a datetime object or a datetime str :param n: number of weeks, value can be negative :param return_date: returns a date object instead of datetime **中文文档** 返回给定日期N周之后的时间。 """ a_datetime = self.parse_datetime(datetimestr) a_datetime += timedelta(days=7 * n) if return_date: return a_datetime.date() else: return a_datetime
python
{ "resource": "" }
q44033
lint
train
def lint(ctx: click.Context, amend: bool = False, stage: bool = False): """ Runs all linters Args: ctx: click context amend: whether or not to commit results stage: whether or not to stage changes """ _lint(ctx, amend, stage)
python
{ "resource": "" }
q44034
_WaitingPost.callback
train
def callback(self, herald_svc, message): """ Tries to call the callback of the post message. Avoids errors to go outside this method. :param herald_svc: Herald service instance :param message: Received answer message """ if self.__callback is not None: try: # pylint: disable=W0703 self.__callback(herald_svc, message) except Exception as ex: _logger.exception("Error calling callback: %s", ex)
python
{ "resource": "" }
q44035
_WaitingPost.errback
train
def errback(self, herald_svc, exception): """ Tries to call the error callback of the post message. Avoids errors to go outside this method. :param herald_svc: Herald service instance :param exception: An exception describing/caused by the error """ if self.__errback is not None: try: # pylint: disable=W0703 self.__errback(herald_svc, exception) except Exception as ex: _logger.exception("Error calling errback: %s", ex)
python
{ "resource": "" }
q44036
zpipe
train
def zpipe(ctx): """build inproc pipe for talking to threads mimic pipe used in czmq zthread_fork. Returns a pair of PAIRs connected via inproc """ a = ctx.socket(zmq.PAIR) a.linger = 0 b = ctx.socket(zmq.PAIR) b.linger = 0 socket_set_hwm(a, 1) socket_set_hwm(b, 1) iface = "inproc://%s" % binascii.hexlify(os.urandom(8)) a.bind(iface) b.connect(iface) return a, b
python
{ "resource": "" }
q44037
get_country
train
def get_country(similar=False, **kwargs): """ Get a country for pycountry """ result_country = None try: if similar: for country in countries: if kwargs.get('name', '') in country.name: result_country = country break else: result_country = countries.get(**kwargs) except Exception as ex: msg = ('Country not found in pycountry with params introduced' ' - {}'.format(ex)) logger.error(msg, params=kwargs) return result_country
python
{ "resource": "" }
q44038
get_location
train
def get_location(address=""): """ Retrieve location coordinates from an address introduced. """ coordinates = None try: geolocator = Nominatim() location = geolocator.geocode(address) coordinates = (location.latitude, location.longitude) except Exception as ex: logger.error('Fail get location - {}'.format(ex)) return coordinates
python
{ "resource": "" }
q44039
get_address
train
def get_address(coords=None, **kwargs): """ Retrieve addres from a location in coords format introduced. """ address = None try: if (not coords) and \ ('latitude' in kwargs and 'longitude' in kwargs) or \ ('location' in kwargs): coords = kwargs.get( 'location', (kwargs.get('latitude'), kwargs.get('longitude'))) # transform coords if isinstance(coords, (list, tuple)) and len(coords) == 2: coords = "{}, {}".join(map(str, coords)) geolocator = Nominatim() location = geolocator.reverse(coords) address = location.address except Exception as ex: logger.error('Fail get reverse address - {}'.format(ex)) return address
python
{ "resource": "" }
q44040
Request.set_documents
train
def set_documents(self, documents, fully_formed=False): """ Wrap documents in the correct root tags, add id fields and convert them to xml strings. Args: documents -- If fully_formed is False (default), accepts dict where keys are document ids and values can be ether xml string, etree.ElementTree or dict representation of an xml document (see dict_to_etree()). If fully_formed is True, accepts list or single document where ids are integrated in document or not needed and document has the right root tag. Keyword args: fully_formed -- If documents are fully formed (contains the right root tags and id fields) set to True to avoid the owerhead of documets beeing parsed at all. If set to True only list of documents or a single document can be pased as 'documents', not a dict of documents. Default is False. """ def add_id(document, id): def make_id_tag(root, rel_path, max_depth): if max_depth < 0: raise ParameterError("document_id_xpath too deep!") if not rel_path: return root else: child = root.find(rel_path[0]) if child is None: child = ET.Element(rel_path[0]) root.append(child) return make_id_tag(child, rel_path[1:], max_depth - 1) make_id_tag(document, doc_id_xpath, 10).text = str(id) if fully_formed: # documents is a list or single document that contians root tags and id fields. if not isinstance(documents, list): documents = [documents] else: # documents is dict with ids as keys and documents as values. doc_root_tag = self.connection.document_root_xpath # Local scope is faster. doc_id_xpath = self.connection.document_id_xpath.split('/') # Convert to etrees. documents = dict([(id, to_etree((document if document is not None else query.term('', doc_root_tag)), doc_root_tag)) for id, document in documents.items()]) # TODO: possibly ineficient # If root not the same as given xpath, make new root and append to it. for id, document in documents.items(): if document.tag != doc_root_tag: documents[id] = ET.Element(doc_root_tag) documents[id].append(document) # documents is still the old reference # Insert ids in documents and collapse to a list of documents. for id, document in documents.items(): add_id(document, id) documents = documents.values() self._documents = map(to_raw_xml, documents)
python
{ "resource": "" }
q44041
Request.set_doc_ids
train
def set_doc_ids(self, doc_ids): """ Build xml documents from a list of document ids. Args: doc_ids -- A document id or a lost of those. """ if isinstance(doc_ids, list): self.set_documents(dict.fromkeys(doc_ids)) else: self.set_documents({doc_ids: None})
python
{ "resource": "" }
q44042
Request.add_property
train
def add_property(self, set_property, name, starting_value, tag_name=None): """ Set properies of atributes stored in content using stored common fdel and fget and given fset. Args: set_property -- Function that sets given property. name -- Name of the atribute this property must simulate. Used as key in content dict by default. starting_value -- Starting value of given property. Keyword args: tag_name -- The tag name stored in conted dict as a key if different to name. """ def del_property(self, tag_name): try: del self._content[tag_name] except KeyError: pass def get_property(self, tag_name): try: return self._content[tag_name] except KeyError: return None tag_name = (name if tag_name is None else tag_name) fget = lambda self: get_property(self, tag_name) fdel = lambda self: del_property(self, tag_name) fset = lambda self, value: set_property(value) setattr(self.__class__, name, property(fget, fset, fdel)) set_property(starting_value)
python
{ "resource": "" }
q44043
Request.set_query
train
def set_query(self, value): """ Convert a dict form of query in a string of needed and store the query string. Args: value -- A query string or a dict with query xpaths as keys and text or nested query dicts as values. """ if isinstance(value, basestring) or value is None: self._content['query'] = value elif hasattr(value, 'keys'): self._content['query'] = query.terms_from_dict(value) else: raise TypeError("Query must be a string or dict. Got: " + type(value) + " insted!")
python
{ "resource": "" }
q44044
Request.get_xml_request
train
def get_xml_request(self): """ Make xml request string from stored request information. Returns: A properly formated XMl request string containing all set request fields and wraped in connections envelope. """ def wrap_xml_content(xml_content): """ Wrap XML content string in the correct CPS request envelope.""" fields = ['<?xml version="1.0" encoding="utf-8"?>\n', '<cps:request xmlns:cps="www.clusterpoint.com">\n', '<cps:storage>', self.connection._storage, '</cps:storage>\n'] if self.timestamp: fields += [] # TODO: implement if self.request_id: fields += ['<cps:request_id>', str(self.request_id), '</cps:request_id>\n'] if self.connection.reply_charset: fields += [] # TODO: implement if self.connection.application: fields += ['<cps:application>', self.connection.application, '</cps:application>\n'] fields += ['<cps:command>', self._command, '</cps:command>\n', '<cps:user>', self.connection._user, '</cps:user>\n', '<cps:password>', self.connection._password, '</cps:password>\n', '<cps:account>', self.connection._account, '</cps:account>\n'] if self.timeout: fields += ['<cps:timeout>', str(self.timeout), '</cps:timeout>\n'] if self.type: fields += ['<cps:type>', self.type, '</cps:type>\n'] if xml_content: fields += ['<cps:content>\n', xml_content, '\n</cps:content>\n'] else: fields += '<cps:content/>\n' fields += '</cps:request>\n' # String concat from list faster than incremental concat. xml_request = ''.join(fields) return xml_request xml_content = [] if self._documents: xml_content += self._documents for key, value in self._nested_content.items(): if value: xml_content += ['<{0}>'.format(key)] +\ ['<{0}>{1}</{0}>'.format(sub_key, sub_value) for sub_key, sub_value in value if sub_value] +\ ['</{0}>'.format(key)] for key, value in self._content.items(): if not isinstance(value, list): value = [value] xml_content += ['<{0}>{1}</{0}>'.format(key, item) for item in value if item] xml_content = '\n'.join(xml_content) return wrap_xml_content(xml_content)
python
{ "resource": "" }
q44045
Request.send
train
def send(self): """ Send an XML string version of content through the connection. Returns: Response object. """ xml_request = self.get_xml_request() if(self.connection._debug == 1): print(xml_request) Debug.warn('-' * 25) Debug.warn(self._command) Debug.dump("doc: \n", self._documents) Debug.dump("cont: \n", self._content) Debug.dump("nest cont \n", self._nested_content) Debug.dump("Request: \n", xml_request) response = _handle_response(self.connection._send_request(xml_request), self._command, self.connection.document_id_xpath) # TODO: jāpabeidz debugs # if(self.connection._debug == 1): # # print(response) # print(format(ET.tostring(response))) return response
python
{ "resource": "" }
q44046
format_duration
train
def format_duration(secs): """ Format a duration in seconds as minutes and seconds. """ secs = int(secs) if abs(secs) > 60: mins = abs(secs) / 60 secs = abs(secs) - (mins * 60) return '%s%im %02is' % ('-' if secs < 0 else '', mins, secs) return '%is' % secs
python
{ "resource": "" }
q44047
ClassificationTrainer.learn
train
def learn(self, numEpochs, batchsize): """Train the classifier for a given number of epochs, with a given batchsize""" for epoch in range(numEpochs): print('epoch %d' % epoch) indexes = np.random.permutation(self.trainsize) for i in range(0, self.trainsize, batchsize): x = Variable(self.x_train[indexes[i: i + batchsize]]) t = Variable(self.y_train[indexes[i: i + batchsize]]) self.optimizer.update(self.model, x, t)
python
{ "resource": "" }
q44048
ClassificationTrainer.evaluate
train
def evaluate(self, batchsize): """Evaluate how well the classifier is doing. Return mean loss and mean accuracy""" sum_loss, sum_accuracy = 0, 0 for i in range(0, self.testsize, batchsize): x = Variable(self.x_test[i: i + batchsize]) y = Variable(self.y_test[i: i + batchsize]) loss = self.model(x, y) sum_loss += loss.data * batchsize sum_accuracy += self.model.accuracy.data * batchsize return sum_loss / self.testsize, sum_accuracy / self.testsize
python
{ "resource": "" }
q44049
ClassificationTrainer.save
train
def save(self, model_filename, optimizer_filename): """ Save the state of the model & optimizer to disk """ serializers.save_hdf5(model_filename, self.model) serializers.save_hdf5(optimizer_filename, self.optimizer)
python
{ "resource": "" }
q44050
Classifier.classify
train
def classify(self, phrase_vector): """ Run this over an input vector and see the result """ x = Variable(np.asarray([phrase_vector])) return self.model.predictor(x).data[0]
python
{ "resource": "" }
q44051
help
train
def help(route): r"""Displays help for the given route. Args: route (str): A route that resolves a member. """ help_text = getRouteHelp(route.split('/') if route else []) if help_text is None: err('Can\'t help :(') else: print '\n%s' % help_text
python
{ "resource": "" }
q44052
Base58Encoder.encode
train
def encode(data: Union[str, bytes]) -> str: """ Return Base58 string from data :param data: Bytes or string data """ return ensure_str(base58.b58encode(ensure_bytes(data)))
python
{ "resource": "" }
q44053
KDE.integrate_box
train
def integrate_box(self,low,high,forcequad=False,**kwargs): """Integrates over a box. Optionally force quad integration, even for non-adaptive. If adaptive mode is not being used, this will just call the `scipy.stats.gaussian_kde` method `integrate_box_1d`. Else, by default, it will call `scipy.integrate.quad`. If the `forcequad` flag is turned on, then that integration will be used even if adaptive mode is off. Parameters ---------- low : float Lower limit of integration high : float Upper limit of integration forcequad : bool If `True`, then use the quad integration even if adaptive mode is off. kwargs Keyword arguments passed to `scipy.integrate.quad`. """ if not self.adaptive and not forcequad: return self.gauss_kde.integrate_box_1d(low,high)*self.norm return quad(self.evaluate,low,high,**kwargs)[0]
python
{ "resource": "" }
q44054
PlugsMail.validate_context
train
def validate_context(self): """ Make sure there are no duplicate context objects or we might end up with switched data Converting the tuple to a set gets rid of the eventual duplicate objects, comparing the length of the original tuple and set tells us if we have duplicates in the tuple or not """ if self.context and len(self.context) != len(set(self.context)): LOGGER.error('Cannot have duplicated context objects') raise Exception('Cannot have duplicated context objects.')
python
{ "resource": "" }
q44055
PlugsMail.get_instance_of
train
def get_instance_of(self, model_cls): """ Search the data to find a instance of a model specified in the template """ for obj in self.data.values(): if isinstance(obj, model_cls): return obj LOGGER.error('Context Not Found') raise Exception('Context Not Found')
python
{ "resource": "" }
q44056
PlugsMail.get_context
train
def get_context(self): """ Create a dict with the context data context is not required, but if it is defined it should be a tuple """ if not self.context: return else: assert isinstance(self.context, tuple), 'Expected a Tuple not {0}'.format(type(self.context)) for model in self.context: model_cls = utils.get_model_class(model) key = utils.camel_to_snake(model_cls.__name__) self.context_data[key] = self.get_instance_of(model_cls)
python
{ "resource": "" }
q44057
PlugsMail.get_context_data
train
def get_context_data(self): """ Context Data is equal to context + extra_context Merge the dicts context_data and extra_context and update state """ self.get_context() self.context_data.update(self.get_extra_context()) return self.context_data
python
{ "resource": "" }
q44058
PlugsMail.send
train
def send(self, to, language=None, **data): """ This is the method to be called """ self.data = data self.get_context_data() if app_settings['SEND_EMAILS']: try: if language: mail.send(to, template=self.template, context=self.context_data, language=language) else: mail.send(to, template=self.template, context=self.context_data) except EmailTemplate.DoesNotExist: msg = 'Trying to use a non existent email template {0}'.format(self.template) LOGGER.error('Trying to use a non existent email template {0}'.format(self.template))
python
{ "resource": "" }
q44059
CreateAnAlertAPI.data
train
def data(self): """Parameters passed to the API containing the details to create a new alert. :return: parameters to create new alert. :rtype: dict """ data = {} data["name"] = self.name data["query"] = self.queryd data["languages"] = self.languages data["countries"] = self.countries if self.countries else "" data["sources"] = self.sources if self.sources else "" data["blocked_sites"] = self.blocked_sites if self.blocked_sites else "" data["noise_detection"] = self.noise_detection if self.noise_detection else "" data["reviews_pages"] = self.reviews_pages if self.reviews_pages else "" # Deletes parameter if it does not have a value for key, value in list(data.items()): if value == '': del data[key] data = json.dumps(data) return data
python
{ "resource": "" }
q44060
FetchMentionChildrenAPI.url
train
def url(self): """The concatenation of the `base_url` and `end_url` that make up the resultant url. :return: the `base_url` and the `end_url`. :rtype: str """ end_url = ("/accounts/{account_id}/alerts/{alert_id}/mentions/" "{mention_id}/children?") def without_keys(d, keys): return {x: d[x] for x in d if x not in keys} keys = {"access_token", "account_id", "alert_id"} parameters = without_keys(self.params, keys) for key, value in list(parameters.items()): if value != '': end_url += '&' + key + '={' + key + '}' end_url = end_url.format(**self.params) return self._base_url + end_url
python
{ "resource": "" }
q44061
CurateAMentionAPI.data
train
def data(self): """Parameters passed to the API containing the details to update a alert. :return: parameters to create new alert. :rtype: dict """ data = {} data["favorite"] = self.favorite if self.favorite else "" data["trashed"] = self.trashed if self.trashed else "" data["read"] = self.read if self.read else "" data["tags"] = self.tags if self.tags else "" data["folder"] = self.folder if self.folder else "" data["tone"] = self.tone if self.tone else "" # Deletes parameter if it does not have a value for key, value in list(data.items()): if value == '': del data[key] data = json.dumps(data) return data
python
{ "resource": "" }
q44062
call
train
def call(command, collect_missing=False, silent=True): r"""Calls a task, as if it were called from the command line. Args: command (str): A route followed by params (as if it were entered in the shell). collect_missing (bool): Collects any missing argument for the command through the shell. Defaults to False. Returns: The return value of the called command. """ return (_execCommand if silent else execCommand)(shlex.split(command), collect_missing)
python
{ "resource": "" }
q44063
add
train
def add(TargetGroup, NewMember, Config=None, Args=None): r"""Adds members to an existing group. Args: TargetGroup (Group): The target group for the addition. NewMember (Group / Task): The member to be added. Config (dict): The config for the member. Args (OrderedDict): ArgConfig for the NewMember, if it's a task (optional). """ Member = Task(NewMember, Args or {}, Config or {}) if isfunction(NewMember) else Group(NewMember, Config or {}) ParentMembers = TargetGroup.__ec_member__.Members ParentMembers[Member.Config['name']] = Member alias = Member.Config.get('alias') if alias: ParentMembers[alias] = Member
python
{ "resource": "" }
q44064
do_check_pep8
train
def do_check_pep8(files, status): """ Run the python pep8 tool against the filst of supplied files. Append any linting errors to the returned status list Args: files (str): list of files to run pep8 against status (list): list of pre-receive check failures to eventually print to the user Returns: status list of current pre-redeive check failures. Might be an empty list. """ for file_name in files: args = ['flake8', '--max-line-length=120', '{0}'.format(file_name)] output = run(*args) if output: status.append("Python PEP8/Flake8: {0}: {1}".format(file_name, output)) return status
python
{ "resource": "" }
q44065
do_check
train
def do_check(func, files, status): """ Generic do_check helper method Args: func (function): Specific function to call files (list): list of files to run against status (list): list of pre-receive check failures to eventually print to the user Returns: status list of current pre-redeive check failures. Might be an empty list. """ for file_name in files: with open(file_name, 'r') as f: output = func.parse(f.read(), file_name) if output: status.append("{0}: {1}".format(file_name, output)) return status
python
{ "resource": "" }
q44066
check_for_empty_defaults
train
def check_for_empty_defaults(status): """ Method to check for empty roles structure. When a role is created using ansible-galaxy it creates a default scaffolding structure. Best practice dictates that if any of these are not used then they should be removed. For example a bare main.yml with the following string is created for a 'defaults' for a role called 'myrole': --- defaults file for myrole This should be removed. Args: status (list): list of pre-receive check failures to eventually print to the user Returns: status list of current pre-redeive check failures. Might be an empty list. """ dirs_to_check = ('./vars', './handlers', './defaults', './tasks') for dirpath, dirname, filename in os.walk('.'): if dirpath == './files' or dirpath == "./templates": if not any([dirname, filename]): status.append("There are no files in the {0} directory. please" " remove directory".format(dirpath)) if dirpath in dirs_to_check: try: joined_filename = os.path.join(dirpath, 'main.yml') with open(joined_filename, 'r') as f: # try to match: # --- # (tasks|vars|defaults) file for myrole # if re.match(r'^---\n# \S+ file for \S+\n$', f.read()): status.append("Empty file, please remove file and " "directory: {0}".format(joined_filename)) except IOError: # Can't find a main.yml - but this could be legitimate pass return status
python
{ "resource": "" }
q44067
Revocation.from_inline
train
def from_inline(cls: Type[RevocationType], version: int, currency: str, inline: str) -> RevocationType: """ Return Revocation document instance from inline string Only self.pubkey is populated. You must populate self.identity with an Identity instance to use raw/sign/signed_raw methods :param version: Version number :param currency: Name of the currency :param inline: Inline document :return: """ cert_data = Revocation.re_inline.match(inline) if cert_data is None: raise MalformedDocumentError("Revokation") pubkey = cert_data.group(1) signature = cert_data.group(2) return cls(version, currency, pubkey, signature)
python
{ "resource": "" }
q44068
Revocation.from_signed_raw
train
def from_signed_raw(cls: Type[RevocationType], signed_raw: str) -> RevocationType: """ Return Revocation document instance from a signed raw string :param signed_raw: raw document file in duniter format :return: """ lines = signed_raw.splitlines(True) n = 0 version = int(Revocation.parse_field("Version", lines[n])) n += 1 Revocation.parse_field("Type", lines[n]) n += 1 currency = Revocation.parse_field("Currency", lines[n]) n += 1 issuer = Revocation.parse_field("Issuer", lines[n]) n += 1 identity_uid = Revocation.parse_field("IdtyUniqueID", lines[n]) n += 1 identity_timestamp = Revocation.parse_field("IdtyTimestamp", lines[n]) n += 1 identity_signature = Revocation.parse_field("IdtySignature", lines[n]) n += 1 signature = Revocation.parse_field("Signature", lines[n]) n += 1 identity = Identity(version, currency, issuer, identity_uid, identity_timestamp, identity_signature) return cls(version, currency, identity, signature)
python
{ "resource": "" }
q44069
Revocation.extract_self_cert
train
def extract_self_cert(signed_raw: str) -> Identity: """ Return self-certified Identity instance from the signed raw Revocation document :param signed_raw: Signed raw document string :return: """ lines = signed_raw.splitlines(True) n = 0 version = int(Revocation.parse_field("Version", lines[n])) n += 1 Revocation.parse_field("Type", lines[n]) n += 1 currency = Revocation.parse_field("Currency", lines[n]) n += 1 issuer = Revocation.parse_field("Issuer", lines[n]) n += 1 unique_id = Revocation.parse_field("IdtyUniqueID", lines[n]) n += 1 timestamp = Revocation.parse_field("IdtyTimestamp", lines[n]) n += 1 signature = Revocation.parse_field("IdtySignature", lines[n]) n += 1 return Identity(version, currency, issuer, unique_id, timestamp, signature)
python
{ "resource": "" }
q44070
Revocation.signed_raw
train
def signed_raw(self) -> str: """ Return Revocation signed raw document string :return: """ if not isinstance(self.identity, Identity): raise MalformedDocumentError("Can not return full revocation document created from inline") raw = self.raw() signed = "\n".join(self.signatures) signed_raw = raw + signed + "\n" return signed_raw
python
{ "resource": "" }
q44071
clean_text
train
def clean_text(text): """ Retrieve clean text without markdown sintax or other things. """ if text: text = html2text.html2text(clean_markdown(text)) return re.sub(r'\s+', ' ', text).strip()
python
{ "resource": "" }
q44072
clean_markdown
train
def clean_markdown(text): """ Parse markdown sintaxt to html. """ result = text if isinstance(text, str): result = ''.join( BeautifulSoup(markdown(text), 'lxml').findAll(text=True)) return result
python
{ "resource": "" }
q44073
select_regexp_char
train
def select_regexp_char(char): """ Select correct regex depending the char """ regexp = '{}'.format(char) if not isinstance(char, str) and not isinstance(char, int): regexp = '' if isinstance(char, str) and not char.isalpha() and not char.isdigit(): regexp = r"\{}".format(char) return regexp
python
{ "resource": "" }
q44074
exclude_chars
train
def exclude_chars(text, exclusion=None): """ Clean text string of simbols in exclusion list. """ exclusion = [] if exclusion is None else exclusion regexp = r"|".join([select_regexp_char(x) for x in exclusion]) or r'' return re.sub(regexp, '', text)
python
{ "resource": "" }
q44075
strip_accents
train
def strip_accents(text): """ Strip agents from a string. """ normalized_str = unicodedata.normalize('NFD', text) return ''.join([ c for c in normalized_str if unicodedata.category(c) != 'Mn'])
python
{ "resource": "" }
q44076
normalizer
train
def normalizer(text, exclusion=OPERATIONS_EXCLUSION, lower=True, separate_char='-', **kwargs): """ Clean text string of simbols only alphanumeric chars. """ clean_str = re.sub(r'[^\w{}]'.format( "".join(exclusion)), separate_char, text.strip()) or '' clean_lowerbar = clean_str_without_accents = strip_accents(clean_str) if '_' not in exclusion: clean_lowerbar = re.sub(r'\_', separate_char, clean_str_without_accents.strip()) limit_guion = re.sub(r'\-+', separate_char, clean_lowerbar.strip()) # TODO: refactor with a regexp if limit_guion and separate_char and separate_char in limit_guion[0]: limit_guion = limit_guion[1:] if limit_guion and separate_char and separate_char in limit_guion[-1]: limit_guion = limit_guion[:-1] if lower: limit_guion = limit_guion.lower() return limit_guion
python
{ "resource": "" }
q44077
normalize_dict
train
def normalize_dict(dictionary, **kwargs): """ Given an dict, normalize all of their keys using normalize function. """ result = {} if isinstance(dictionary, dict): keys = list(dictionary.keys()) for key in keys: result[normalizer(key, **kwargs)] = normalize_dict(dictionary.get(key), **kwargs) else: result = dictionary return result
python
{ "resource": "" }
q44078
pluralize
train
def pluralize(data_type): """ adds s to the data type or the correct english plural form """ known = { u"address": u"addresses", u"company": u"companies" } if data_type in known.keys(): return known[data_type] else: return u"%ss" % data_type
python
{ "resource": "" }
q44079
remove_properties_containing_None
train
def remove_properties_containing_None(properties_dict): """ removes keys from a dict those values == None json schema validation might fail if they are set and the type or format of the property does not match """ # remove empty properties - as validations may fail new_dict = dict() for key in properties_dict.keys(): value = properties_dict[key] if value is not None: new_dict[key] = value return new_dict
python
{ "resource": "" }
q44080
dict_to_object
train
def dict_to_object(d): """Recursively converts a dict to an object""" top = type('CreateSendModel', (object,), d) seqs = tuple, list, set, frozenset for i, j in d.items(): if isinstance(j, dict): setattr(top, i, dict_to_object(j)) elif isinstance(j, seqs): setattr(top, i, type(j)(dict_to_object(sj) if isinstance(sj, dict) else sj for sj in j)) else: setattr(top, i, j) return top
python
{ "resource": "" }
q44081
veq_samples
train
def veq_samples(R_dist,Prot_dist,N=1e4,alpha=0.23,l0=20,sigl=20): """Source for diff rot """ ls = stats.norm(l0,sigl).rvs(N) Prots = Prot_dist.rvs(N) Prots *= diff_Prot_factor(ls,alpha) return R_dist.rvs(N)*2*np.pi*RSUN/(Prots*DAY)/1e5
python
{ "resource": "" }
q44082
cleanup
train
def cleanup(): """Clean up the installation directory.""" lib_dir = os.path.join(os.environ['CONTAINER_DIR'], '_lib') if os.path.exists(lib_dir): shutil.rmtree(lib_dir) os.mkdir(lib_dir)
python
{ "resource": "" }
q44083
create_project_venv
train
def create_project_venv(): """ Create a project-level virtualenv. :raises: if virtualenv exists already :return: ``VirtualEnv`` object """ print('... creating project-level virtualenv') venv_dir = get_project_venv_dir() if os.path.exists(venv_dir): raise Exception('ERROR: virtualenv already exists!') use_venv_module = sys.version_info >= (3, 0) and 'APE_USE_VIRTUALENV' not in os.environ VirtualEnv.create_virtualenv(venv_dir, use_venv_module=use_venv_module) print('... virtualenv successfully created') return VirtualEnv(venv_dir)
python
{ "resource": "" }
q44084
fetch_pool
train
def fetch_pool(repo_url, branch='master', reuse_existing=False): """Fetch a git repository from ``repo_url`` and returns a ``FeaturePool`` object.""" repo_name = get_repo_name(repo_url) lib_dir = get_lib_dir() pool_dir = get_pool_dir(repo_name) print('... fetching %s ' % repo_name) if os.path.exists(pool_dir): if not reuse_existing: raise Exception('ERROR: repository already exists') else: try: a = call(['git', 'clone', repo_url], cwd=lib_dir) except OSError: raise Exception('ERROR: You probably dont have git installed: sudo apt-get install git') if a != 0: raise Exception('ERROR: check your repository url and credentials!') try: call(['git', 'checkout', branch], cwd=pool_dir) except OSError: raise Exception('ERROR: cannot switch branches') print('... repository successfully cloned') return FeaturePool(pool_dir)
python
{ "resource": "" }
q44085
noglobals
train
def noglobals(fn): """ decorator for functions that dont get access to globals """ return type(fn)( getattr(fn, 'func_code', getattr(fn, '__code__')), {'__builtins__': builtins}, getattr(fn, 'func_name', getattr(fn, '__name__')), getattr(fn, 'func_defaults', getattr(fn, '__defaults__')), getattr(fn, 'func_closure', getattr(fn, '__closure__')) )
python
{ "resource": "" }
q44086
force_list
train
def force_list(element): """ Given an element or a list, concatenates every element and clean it to create a full text """ if element is None: return [] if isinstance(element, (collections.Iterator, list)): return element return [element]
python
{ "resource": "" }
q44087
flatten
train
def flatten(data, parent_key='', sep='_'): """ Transform dictionary multilevel values to one level dict, concatenating the keys with sep between them. """ items = [] if isinstance(data, list): logger.debug('Flattening list {}'.format(data)) list_keys = [str(i) for i in range(0, len(data))] items.extend( flatten(dict(zip(list_keys, data)), parent_key, sep=sep).items()) elif isinstance(data, dict): logger.debug('Flattening dict {}'.format(data)) for key, value in data.items(): new_key = parent_key + sep + key if parent_key else key if isinstance(value, collections.MutableMapping): items.extend(flatten(value, new_key, sep=sep).items()) else: if isinstance(value, list): list_keys = [str(i) for i in range(0, len(value))] items.extend( flatten( dict(zip(list_keys, value)), new_key, sep=sep).items()) else: items.append((new_key, value)) else: logger.debug('Nothing to flatten with {}'.format(data)) return data return collections.OrderedDict(items)
python
{ "resource": "" }
q44088
nested_dict_to_list
train
def nested_dict_to_list(path, dic, exclusion=None): """ Transform nested dict to list """ result = [] exclusion = ['__self'] if exclusion is None else exclusion for key, value in dic.items(): if not any([exclude in key for exclude in exclusion]): if isinstance(value, dict): aux = path + key + "/" result.extend(nested_dict_to_list(aux, value)) else: if path.endswith("/"): path = path[:-1] result.append([path, key, value]) return result
python
{ "resource": "" }
q44089
find_value_in_object
train
def find_value_in_object(attr, obj): """Return values for any key coincidence with attr in obj or any other nested dict. """ # Carry on inspecting inside the list or tuple if isinstance(obj, (collections.Iterator, list)): for item in obj: yield from find_value_in_object(attr, item) # Final object (dict or entity) inspect inside elif isinstance(obj, collections.Mapping): # If result is found, inspect inside and return inner results if attr in obj: # If it is iterable, just return the inner elements (avoid nested # lists) if isinstance(obj[attr], (collections.Iterator, list)): for item in obj[attr]: yield item # If not, return just the objects else: yield obj[attr] # Carry on inspecting inside the object for item in obj.values(): if item: yield from find_value_in_object(attr, item)
python
{ "resource": "" }
q44090
dict2orderedlist
train
def dict2orderedlist(dic, order_list, default='', **kwargs): """ Return a list with dict values ordered by a list of key passed in args. """ result = [] for key_order in order_list: value = get_element(dic, key_order, **kwargs) result.append(value if value is not None else default) return result
python
{ "resource": "" }
q44091
get_dimension
train
def get_dimension(data): """ Get dimension of the data passed by argument independently if it's an arrays or dictionaries """ result = [0, 0] if isinstance(data, list): result = get_dimension_array(data) elif isinstance(data, dict): result = get_dimension_dict(data) return result
python
{ "resource": "" }
q44092
get_dimension_array
train
def get_dimension_array(array): """ Get dimension of an array getting the number of rows and the max num of columns. """ if all(isinstance(el, list) for el in array): result = [len(array), len(max([x for x in array], key=len,))] # elif array and isinstance(array, list): else: result = [len(array), 1] return result
python
{ "resource": "" }
q44093
get_ldict_keys
train
def get_ldict_keys(ldict, flatten_keys=False, **kwargs): """ Get first level keys from a list of dicts """ result = [] for ddict in ldict: if isinstance(ddict, dict): if flatten_keys: ddict = flatten(ddict, **kwargs) result.extend(ddict.keys()) return list(set(result))
python
{ "resource": "" }
q44094
get_alldictkeys
train
def get_alldictkeys(ddict, parent=None): """ Get all keys in a dict """ parent = [] if parent is None else parent if not isinstance(ddict, dict): return [tuple(parent)] return reduce( list.__add__, [get_alldictkeys(v, parent + [k]) for k, v in ddict.items()], [])
python
{ "resource": "" }
q44095
clean_dictkeys
train
def clean_dictkeys(ddict, exclusions=None): """ Exclude chars in dict keys and return a clean dictionary. """ exclusions = [] if exclusions is None else exclusions if not isinstance(ddict, dict): return {} for key in list(ddict.keys()): if [incl for incl in exclusions if incl in key]: data = ddict.pop(key) clean_key = exclude_chars(key, exclusions) if clean_key: if clean_key in ddict: ddict[clean_key] = force_list(ddict[clean_key]) add_element(ddict, clean_key, data) else: ddict[clean_key] = data # dict case if isinstance(ddict.get(key), dict): ddict[key] = clean_dictkeys(ddict[key], exclusions) # list case elif isinstance(ddict.get(key), list): for row in ddict[key]: if isinstance(row, dict): row = clean_dictkeys(row, exclusions) return ddict
python
{ "resource": "" }
q44096
authenticate
train
def authenticate(previous_token = None): """ Authenticate the client to the server """ # if we already have a session token, try to authenticate with it if previous_token != None: headers = server_connection.request("authenticate", { 'session_token' : previous_token, 'repository' : config['repository']})[1] # Only care about headers if headers['status'] == 'ok': return previous_token # If the session token has expired, or if we don't have one, re-authenticate headers = server_connection.request("begin_auth", {'repository' : config['repository']})[1] # Only care about headers if headers['status'] == 'ok': signature = base64.b64encode(pysodium.crypto_sign_detached(headers['auth_token'].decode('utf-8'), config['private_key'])) headers = server_connection.request("authenticate", { 'auth_token' : headers['auth_token'], 'signature' : signature, 'user' : config['user'], 'repository' : config['repository']})[1] # Only care about headers if headers['status'] == 'ok': return headers['session_token'] raise SystemExit('Authentication failed')
python
{ "resource": "" }
q44097
find_local_changes
train
def find_local_changes(): """ Find things that have changed since the last run, applying ignore filters """ manifest = data_store.read_local_manifest() old_state = manifest['files'] current_state = get_file_list(config['data_dir']) current_state = [fle for fle in current_state if not next((True for flter in config['ignore_filters'] if fnmatch.fnmatch(fle['path'], flter)), False)] return manifest, find_manifest_changes(current_state, old_state)
python
{ "resource": "" }
q44098
register_action
train
def register_action(action): """ Adds an action to the parser cli. :param action(BaseAction): a subclass of the BaseAction class """ sub = _subparsers.add_parser(action.meta('cmd'), help=action.meta('help')) sub.set_defaults(cmd=action.meta('cmd')) for (name, arg) in action.props().items(): sub.add_argument(arg.name, arg.flag, **arg.options) _actions[action.meta('cmd')] = action
python
{ "resource": "" }
q44099
run
train
def run(*args, **kwargs): """ Runs the parser and it executes the action handler with the provided arguments from the CLI. Also catches the BaseError interrupting the execution and showing the error message to the user. Default arguments comes from the cli args (sys.argv array) but we can force those arguments when writing tests: .. code-block:: python parser.run(['build', '--path', '/custom-app-path'].split()) .. code-block:: python parser.run('build --path /custom-app-path') """ cmd = _parser.parse_args(*args, **kwargs) if hasattr(cmd, 'cmd') is False: return _parser.print_help() Action = _actions.get(cmd.cmd) action = Action() try: action(**{k:getattr(cmd, k) for k in action.props().keys()}) except errors.BaseError as e: e.print_error()
python
{ "resource": "" }