code
stringlengths
75
104k
docstring
stringlengths
1
46.9k
def _set_weight(self, v, load=False): """ Setter method for weight, mapped from YANG variable /routing_system/route_map/content/set/weight (container) If this variable is read-only (config: false) in the source YANG file, then _set_weight is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_weight() directly. YANG Description: BGP weight for routing table """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """weight must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=weight.weight, is_container='container', presence=False, yang_name="weight", rest_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'BGP weight for routing table', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='container', is_config=True)""", }) self.__weight = t if hasattr(self, '_set'): self._set()
Setter method for weight, mapped from YANG variable /routing_system/route_map/content/set/weight (container) If this variable is read-only (config: false) in the source YANG file, then _set_weight is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_weight() directly. YANG Description: BGP weight for routing table
def replace(self, key, value, expire=0, noreply=None): """ The memcached "replace" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If noreply is True, always returns True. Otherwise returns True if the value was stored and False if it wasn't (because the key didn't already exist). """ if noreply is None: noreply = self.default_noreply return self._store_cmd(b'replace', {key: value}, expire, noreply)[key]
The memcached "replace" command. Args: key: str, see class docs for details. value: str, see class docs for details. expire: optional int, number of seconds until the item is expired from the cache, or zero for no expiry (the default). noreply: optional bool, True to not wait for the reply (defaults to self.default_noreply). Returns: If noreply is True, always returns True. Otherwise returns True if the value was stored and False if it wasn't (because the key didn't already exist).
def list_locked(**kwargs): ''' Query the package database those packages which are locked against reinstallation, modification or deletion. Returns returns a list of package names with version strings CLI Example: .. code-block:: bash salt '*' pkg.list_locked jail List locked packages within the specified jail CLI Example: .. code-block:: bash salt '*' pkg.list_locked jail=<jail name or id> chroot List locked packages within the specified chroot (ignored if ``jail`` is specified) CLI Example: .. code-block:: bash salt '*' pkg.list_locked chroot=/path/to/chroot root List locked packages within the specified root (ignored if ``jail`` is specified) CLI Example: .. code-block:: bash salt '*' pkg.list_locked root=/path/to/chroot ''' return ['{0}-{1}'.format(pkgname, version(pkgname, **kwargs)) for pkgname in _lockcmd('lock', name=None, **kwargs)]
Query the package database those packages which are locked against reinstallation, modification or deletion. Returns returns a list of package names with version strings CLI Example: .. code-block:: bash salt '*' pkg.list_locked jail List locked packages within the specified jail CLI Example: .. code-block:: bash salt '*' pkg.list_locked jail=<jail name or id> chroot List locked packages within the specified chroot (ignored if ``jail`` is specified) CLI Example: .. code-block:: bash salt '*' pkg.list_locked chroot=/path/to/chroot root List locked packages within the specified root (ignored if ``jail`` is specified) CLI Example: .. code-block:: bash salt '*' pkg.list_locked root=/path/to/chroot
def _options_protobuf(self, retry_id): """Convert the current object to protobuf. The ``retry_id`` value is used when retrying a transaction that failed (e.g. due to contention). It is intended to be the "first" transaction that failed (i.e. if multiple retries are needed). Args: retry_id (Union[bytes, NoneType]): Transaction ID of a transaction to be retried. Returns: Optional[google.cloud.firestore_v1beta1.types.TransactionOptions]: The protobuf ``TransactionOptions`` if ``read_only==True`` or if there is a transaction ID to be retried, else :data:`None`. Raises: ValueError: If ``retry_id`` is not :data:`None` but the transaction is read-only. """ if retry_id is not None: if self._read_only: raise ValueError(_CANT_RETRY_READ_ONLY) return types.TransactionOptions( read_write=types.TransactionOptions.ReadWrite( retry_transaction=retry_id ) ) elif self._read_only: return types.TransactionOptions( read_only=types.TransactionOptions.ReadOnly() ) else: return None
Convert the current object to protobuf. The ``retry_id`` value is used when retrying a transaction that failed (e.g. due to contention). It is intended to be the "first" transaction that failed (i.e. if multiple retries are needed). Args: retry_id (Union[bytes, NoneType]): Transaction ID of a transaction to be retried. Returns: Optional[google.cloud.firestore_v1beta1.types.TransactionOptions]: The protobuf ``TransactionOptions`` if ``read_only==True`` or if there is a transaction ID to be retried, else :data:`None`. Raises: ValueError: If ``retry_id`` is not :data:`None` but the transaction is read-only.
def git_push(): """ Push new version and corresponding tag to origin :return: """ # get current version new_version = version.__version__ values = list(map(lambda x: int(x), new_version.split('.'))) # Push to origin new version and corresponding tag: # * commit new version # * create tag # * push version,tag to origin local('git add pynb/version.py version.py') local('git commit -m "updated version"') local('git tag {}.{}.{}'.format(values[0], values[1], values[2])) local('git push origin --tags') local('git push')
Push new version and corresponding tag to origin :return:
def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path
Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method.
def pipool(name, ivals): """ This entry point provides toolkit programmers a method for programmatically inserting integer data into the kernel pool. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pipool_c.html :param name: The kernel pool name to associate with values. :type name: str :param ivals: An array of integers to insert into the pool. :type ivals: Array of ints """ name = stypes.stringToCharP(name) n = ctypes.c_int(len(ivals)) ivals = stypes.toIntVector(ivals) libspice.pipool_c(name, n, ivals)
This entry point provides toolkit programmers a method for programmatically inserting integer data into the kernel pool. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pipool_c.html :param name: The kernel pool name to associate with values. :type name: str :param ivals: An array of integers to insert into the pool. :type ivals: Array of ints
def _partial_extraction_fixed(self, idx, extra_idx=0): """ Private method for a single extraction on a fixed-type tab file """ myarray = np.array([]) with open(self.abspath) as fobj: contents = fobj.readlines()[idx+extra_idx:] for line in contents: try: vals = re.findall(r' *[\w\-\+\.]*', line) temp = np.array([float(val) for val in vals if val not in ('', ' ')]) myarray = np.hstack((myarray, temp)) except ValueError: break return myarray
Private method for a single extraction on a fixed-type tab file
def get_over_current(self, channel): '''Reading over current status of power channel ''' try: bit = self._ch_map[channel]['GPIOOC']['bit'] except KeyError: raise ValueError('get_over_current() not supported for channel %s' % channel) return not self._get_power_gpio_value(bit)
Reading over current status of power channel
def prepare_destruction(self): """Get rid of circular references""" self._tool = None self._painter = None self.relieve_model(self._selection) self._selection = None # clear observer class attributes, also see ExtendenController.destroy() self._Observer__PROP_TO_METHS.clear() self._Observer__METH_TO_PROPS.clear() self._Observer__PAT_TO_METHS.clear() self._Observer__METH_TO_PAT.clear() self._Observer__PAT_METH_TO_KWARGS.clear()
Get rid of circular references
def bundle(self, name: str) -> models.Bundle: """Fetch a bundle from the store.""" return self.Bundle.filter_by(name=name).first()
Fetch a bundle from the store.
def descendants(self, node, relations=None, reflexive=False): """ Returns all descendants of specified node. The default implementation is to use networkx, but some implementations of the Ontology class may use a database or service backed implementation, for large graphs. Arguments --------- node : str identifier for node in ontology reflexive : bool if true, return query node in graph relations : list relation (object property) IDs used to filter Returns ------- list[str] descendant node IDs """ if reflexive: decs = self.descendants(node, relations, reflexive=False) decs.append(node) return decs g = None if relations is None: g = self.get_graph() else: g = self.get_filtered_graph(relations) if node in g: return list(nx.descendants(g, node)) else: return []
Returns all descendants of specified node. The default implementation is to use networkx, but some implementations of the Ontology class may use a database or service backed implementation, for large graphs. Arguments --------- node : str identifier for node in ontology reflexive : bool if true, return query node in graph relations : list relation (object property) IDs used to filter Returns ------- list[str] descendant node IDs
def fetchmany(self, size=None): """Fetch many rows from select result set. :param size: Number of rows to return. :returns: list of row records (tuples) """ self._check_closed() if not self._executed: raise ProgrammingError("Require execute() first") if size is None: size = self.arraysize result = [] cnt = 0 while cnt != size: try: result.append(next(self._buffer)) cnt += 1 except StopIteration: break if cnt == size or self._received_last_resultset_part: # No rows are missing or there are no additional rows return result request = RequestMessage.new( self.connection, RequestSegment( message_types.FETCHNEXT, (ResultSetId(self._resultset_id), FetchSize(size - cnt)) ) ) response = self.connection.send_request(request) resultset_part = response.segments[0].parts[1] if resultset_part.attribute & 1: self._received_last_resultset_part = True result.extend(resultset_part.unpack_rows(self._column_types, self.connection)) return result
Fetch many rows from select result set. :param size: Number of rows to return. :returns: list of row records (tuples)
def log_listener(log:logging.Logger=None, level=logging.INFO): """Progress Monitor listener that logs all updates to the given logger""" if log is None: log = logging.getLogger("ProgressMonitor") def listen(monitor): name = "{}: ".format(monitor.name) if monitor.name is not None else "" perc = int(monitor.progress * 100) msg = "[{name}{perc:3d}%] {monitor.message}".format(**locals()) log.log(level, msg) return listen
Progress Monitor listener that logs all updates to the given logger
def _set_ldp_out(self, v, load=False): """ Setter method for ldp_out, mapped from YANG variable /mpls_state/ldp/ldp_out (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_out is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_out() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=ldp_out.ldp_out, is_container='container', presence=False, yang_name="ldp-out", rest_name="ldp-out", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-out', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """ldp_out must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=ldp_out.ldp_out, is_container='container', presence=False, yang_name="ldp-out", rest_name="ldp-out", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'mpls-ldp-out', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-mpls-operational', defining_module='brocade-mpls-operational', yang_type='container', is_config=False)""", }) self.__ldp_out = t if hasattr(self, '_set'): self._set()
Setter method for ldp_out, mapped from YANG variable /mpls_state/ldp/ldp_out (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp_out is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp_out() directly.
def fillScreen(self, color=None): """Fill the matrix with the given RGB color""" md.fill_rect(self.set, 0, 0, self.width, self.height, color)
Fill the matrix with the given RGB color
def edit(self, id): """ Edit a prefix. """ # find prefix c.prefix = Prefix.get(int(id)) # we got a HTTP POST - edit object if request.method == 'POST': c.prefix.prefix = request.params['prefix_prefix'] c.prefix.description = request.params['prefix_description'] if request.params['prefix_node'].strip() == '': c.prefix.node = None else: c.prefix.node = request.params['prefix_node'] if request.params['prefix_country'].strip() == '': c.prefix.country = None else: c.prefix.country = request.params['prefix_country'] if request.params['prefix_comment'].strip() == '': c.prefix.comment = None else: c.prefix.comment = request.params['prefix_comment'] if request.params['prefix_order_id'].strip() == '': c.prefix.order_id = None else: c.prefix.order_id = request.params['prefix_order_id'] if request.params['prefix_customer_id'].strip() == '': c.prefix.customer_id = None else: c.prefix.customer_id = request.params['prefix_customer_id'] if request.params['prefix_vrf'].strip() == '': c.prefix.vrf = None else: # TODO: handle non-existent VRF... c.prefix.vrf = VRF.list({ 'rt': request.params['prefix_vrf'] })[0] if request.params.get('prefix_monitor') is not None: c.prefix.monitor = True else: c.prefix.monitor = False c.prefix.alarm_priority = request.params['prefix_alarm_priority'] c.prefix.save() redirect(url(controller='prefix', action='list')) return render('/prefix_edit.html')
Edit a prefix.
def down_ec2(connection, instance_id, region, log=False): """ shutdown of an existing EC2 instance """ # get the instance_id from the state file, and stop the instance instance = connection.stop_instances(instance_ids=instance_id)[0] while instance.state != "stopped": if log: log_yellow("Instance state: %s" % instance.state) sleep(10) instance.update() if log: log_green('Instance state: %s' % instance.state)
shutdown of an existing EC2 instance
def route(rule=None, **kwargs): """ This decorator defines custom route for both class and methods in the view. It behaves the same way as Flask's @app.route on class: It takes the following args - rule: the root route of the endpoint - decorators: a list of decorators to run on each method on methods: along with the rule, it takes kwargs - endpoint - defaults - ... :param rule: :param kwargs: :return: """ _restricted_keys = ["extends", "route", "decorators"] def decorator(f): if inspect.isclass(f): extends = kwargs.pop("extends", None) if extends and hasattr(extends, self.view_key): for k, v in getattr(extends, self.view_key).items(): kwargs.setdefault(k, v) kwargs.setdefault("route", rule) kwargs["decorators"] = kwargs.get("decorators", []) + f.decorators setattr(f, "_route_extends__", kwargs) setattr(f, "base_route", kwargs.get("route")) setattr(f, "decorators", kwargs.get("decorators", [])) else: if not rule: raise ValueError("'rule' is missing in @route ") for k in _restricted_keys: if k in kwargs: del kwargs[k] # Put the rule cache on the method itself instead of globally if not hasattr(f, '_rule_cache') or f._rule_cache is None: f._rule_cache = {f.__name__: [(rule, kwargs)]} elif not f.__name__ in f._rule_cache: f._rule_cache[f.__name__] = [(rule, kwargs)] else: f._rule_cache[f.__name__].append((rule, kwargs)) return f return decorator
This decorator defines custom route for both class and methods in the view. It behaves the same way as Flask's @app.route on class: It takes the following args - rule: the root route of the endpoint - decorators: a list of decorators to run on each method on methods: along with the rule, it takes kwargs - endpoint - defaults - ... :param rule: :param kwargs: :return:
def register(func_path, factory=mock.MagicMock): # type: (str, Callable) -> Callable """ Kwargs: func_path: import path to mock (as you would give to `mock.patch`) factory: function that returns a mock for the patched func Returns: (decorator) Usage: automock.register('path.to.func.to.mock') # default MagicMock automock.register('path.to.func.to.mock', CustomMockFactory) @automock.register('path.to.func.to.mock') def custom_mock(result): return mock.MagicMock(return_value=result) """ global _factory_map _factory_map[func_path] = factory def decorator(decorated_factory): _factory_map[func_path] = decorated_factory return decorated_factory return decorator
Kwargs: func_path: import path to mock (as you would give to `mock.patch`) factory: function that returns a mock for the patched func Returns: (decorator) Usage: automock.register('path.to.func.to.mock') # default MagicMock automock.register('path.to.func.to.mock', CustomMockFactory) @automock.register('path.to.func.to.mock') def custom_mock(result): return mock.MagicMock(return_value=result)
def acquire_account_for(self, host, owner=None): """ Acquires an account for the given host and returns it. The host is passed to each of the match functions that were passed in when adding the pool. The first pool for which the match function returns True is chosen to assign an account. :type host: :class:`Host` :param host: The host for which an account is acquired. :type owner: object :param owner: An optional descriptor for the owner. :rtype: :class:`Account` :return: The account that was acquired. """ # Check whether a matching account pool exists. for match, pool in self.pools: if match(host) is True: return pool.acquire_account(owner=owner) # Else, choose an account from the default account pool. return self.default_pool.acquire_account(owner=owner)
Acquires an account for the given host and returns it. The host is passed to each of the match functions that were passed in when adding the pool. The first pool for which the match function returns True is chosen to assign an account. :type host: :class:`Host` :param host: The host for which an account is acquired. :type owner: object :param owner: An optional descriptor for the owner. :rtype: :class:`Account` :return: The account that was acquired.
def get_webassets_env_from_settings(settings, prefix='webassets'): """This function will take all webassets.* parameters, and call the ``Environment()`` constructor with kwargs passed in. The only two parameters that are not passed as keywords are: * base_dir * base_url which are passed in positionally. Read the ``WebAssets`` docs for ``Environment`` for more details. """ # Make a dictionary of the webassets.* elements... kwargs = {} # assets settings cut_prefix = len(prefix) + 1 for k in settings: if k.startswith(prefix): val = settings[k] if isinstance(val, six.string_types): if val.lower() in auto_booly: val = asbool(val) elif val.lower().startswith('json:') and k[cut_prefix:] != 'manifest': val = json.loads(val[5:]) kwargs[k[cut_prefix:]] = val if 'base_dir' not in kwargs: raise Exception("You need to provide webassets.base_dir in your configuration") if 'base_url' not in kwargs: raise Exception("You need to provide webassets.base_url in your configuration") asset_dir = kwargs.pop('base_dir') asset_url = kwargs.pop('base_url') if ':' in asset_dir: try: resolved_dir = AssetResolver(None).resolve(asset_dir).abspath() except ImportError: pass else: # Store the original asset spec to use later kwargs['asset_base'] = asset_dir asset_dir = resolved_dir if not asset_url.startswith('/'): if six.moves.urllib.parse.urlparse(asset_url).scheme == '': asset_url = '/' + asset_url if 'debug' in kwargs: kwargs['debug'] = maybebool(kwargs['debug']) if 'cache' in kwargs: cache = kwargs['cache'] = maybebool(kwargs['cache']) if cache and isinstance(cache, six.string_types) and not path.isdir(cache): makedirs(cache) # 'updater' is just passed in... if 'auto_build' in kwargs: kwargs['auto_build'] = maybebool(kwargs['auto_build']) if 'jst_compiler' in kwargs: kwargs['JST_COMPILER'] = kwargs.pop('jst_compiler') if 'jst_namespace' in kwargs: kwargs['JST_NAMESPACE'] = kwargs.pop('jst_namespace') if 'manifest' in kwargs: kwargs['manifest'] = maybebool(kwargs['manifest']) if 'url_expire' in kwargs: kwargs['url_expire'] = maybebool(kwargs['url_expire']) if 'static_view' in kwargs: kwargs['static_view'] = asbool(kwargs['static_view']) else: kwargs['static_view'] = False if 'cache_max_age' in kwargs: kwargs['cache_max_age'] = int(kwargs.pop('cache_max_age')) else: kwargs['cache_max_age'] = None if 'load_path' in kwargs: # force load_path to be an array and split on whitespace if not isinstance(kwargs['load_path'], list): kwargs['load_path'] = kwargs['load_path'].split() paths = kwargs.pop('paths', None) if 'bundles' in kwargs: if isinstance(kwargs['bundles'], six.string_types): kwargs['bundles'] = kwargs['bundles'].split() bundles = kwargs.pop('bundles', None) assets_env = Environment(asset_dir, asset_url, **kwargs) if paths is not None: for map_path, map_url in json.loads(paths).items(): assets_env.append_path(map_path, map_url) def yaml_stream(fname, mode): if path.exists(fname): return open(fname, mode) else: return assets_env.resolver.resolver.resolve(fname).stream() if isinstance(bundles, list): fnames = reversed(bundles) fin = fileinput.input(fnames, openhook=yaml_stream) with closing(fin): lines = [text(line).rstrip() for line in fin] yamlin = six.StringIO('\n'.join(lines)) loader = YAMLLoader(yamlin) result = loader.load_bundles() assets_env.register(result) elif isinstance(bundles, dict): assets_env.register(bundles) return assets_env
This function will take all webassets.* parameters, and call the ``Environment()`` constructor with kwargs passed in. The only two parameters that are not passed as keywords are: * base_dir * base_url which are passed in positionally. Read the ``WebAssets`` docs for ``Environment`` for more details.
def _glimpse_network(self, x_t, l_p): """ Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix """ sensor_output = self._refined_glimpse_sensor(x_t, l_p) sensor_output = T.flatten(sensor_output) h_g = self._relu(T.dot(sensor_output, self.W_g0)) h_l = self._relu(T.dot(l_p, self.W_g1)) g = self._relu(T.dot(h_g, self.W_g2_hg) + T.dot(h_l, self.W_g2_hl)) return g
Parameters: x_t - 28x28 image l_p - 2x1 focus vector Returns: 4x12 matrix
def make_name(super_name, default_super_name, sub_name): """Helper which makes a `str` name; useful for tf.compat.v1.name_scope.""" name = super_name if super_name is not None else default_super_name if sub_name is not None: name += '_' + sub_name return name
Helper which makes a `str` name; useful for tf.compat.v1.name_scope.
def _request_internal(self, command, **kwargs): """Make request parse response""" args = dict(kwargs) if self.ssid: args['ssid'] = self.ssid method = getattr(self.api, command) response = method(**args) if response and 'status' in response: if response['status'] == 'error': raise SubregError( message=response['error']['errormsg'], major=response['error']['errorcode']['major'], minor=response['error']['errorcode']['minor'] ) if response['status'] == 'ok': return response['data'] if 'data' in response else dict() raise Exception("Invalid status found in SOAP response") raise Exception('Invalid response')
Make request parse response
def within_ipv4_network(self, field, values): """ This filter adds specified networks to a filter to check for inclusion. :param str field: name of field to filter on. Taken from 'Show Filter Expression' within SMC. :param list values: network definitions, in cidr format, i.e: 1.1.1.0/24. """ v = ['ipv4_net("%s")' % net for net in values] self.update_filter('{} IN union({})'.format(field, ','.join(v)))
This filter adds specified networks to a filter to check for inclusion. :param str field: name of field to filter on. Taken from 'Show Filter Expression' within SMC. :param list values: network definitions, in cidr format, i.e: 1.1.1.0/24.
def verify(s, sig, pk, sig_format=SER_COMPACT, pk_format=SER_COMPACT, curve=None): """ Verifies that `sig' is a signature of pubkey `pk' for the message `s'. """ if isinstance(s, six.text_type): raise ValueError("Encode `s` to a bytestring yourself to" + " prevent problems with different default encodings") curve = (Curve.by_pk_len(len(pk)) if curve is None else Curve.by_name(curve)) p = curve.pubkey_from_string(pk, pk_format) return p.verify(hashlib.sha512(s).digest(), sig, sig_format)
Verifies that `sig' is a signature of pubkey `pk' for the message `s'.
def factory(feature): """ Factory to choose feature extractor :param feature: name of the feature :return: Feature extractor function """ if feature == 'hog': return hog elif feature == 'deep': return deep elif feature == 'gray': return gray elif feature == 'lab': return lab elif feature == 'luv': return luv elif feature == 'hsv': return hsv elif feature == 'hls': return hls else: return rgb
Factory to choose feature extractor :param feature: name of the feature :return: Feature extractor function
def clean(self): """ Run all of the cleaners added by the user. """ if self.cleaners: yield from asyncio.wait([x() for x in self.cleaners], loop=self.loop)
Run all of the cleaners added by the user.
def _pad_block(self, handle): '''Pad the file with 0s to the end of the next block boundary.''' extra = handle.tell() % 512 if extra: handle.write(b'\x00' * (512 - extra))
Pad the file with 0s to the end of the next block boundary.
def _load_properties(self): """Loads the properties from Flickr.""" self.__loaded = True method = 'flickr.photos.getInfo' data = _doget(method, photo_id=self.id) photo = data.rsp.photo self.__secret = photo.secret self.__server = photo.server self.__isfavorite = photo.isfavorite self.__license = photo.license self.__rotation = photo.rotation owner = photo.owner self.__owner = User(owner.nsid, username=owner.username,\ realname=owner.realname,\ location=owner.location) self.__title = photo.title.text self.__description = photo.description.text self.__ispublic = photo.visibility.ispublic self.__isfriend = photo.visibility.isfriend self.__isfamily = photo.visibility.isfamily self.__dateposted = photo.dates.posted self.__datetaken = photo.dates.taken self.__takengranularity = photo.dates.takengranularity self.__cancomment = photo.editability.cancomment self.__canaddmeta = photo.editability.canaddmeta self.__comments = photo.comments.text try: self.__permcomment = photo.permissions.permcomment self.__permaddmeta = photo.permissions.permaddmeta except AttributeError: self.__permcomment = None self.__permaddmeta = None #TODO: Implement Notes? if hasattr(photo.tags, "tag"): if isinstance(photo.tags.tag, list): self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text) \ for tag in photo.tags.tag] else: tag = photo.tags.tag self.__tags = [Tag(tag.id, User(tag.author), tag.raw, tag.text)]
Loads the properties from Flickr.
def _get_default_jp2_boxes(self): """Create a default set of JP2 boxes.""" # Try to create a reasonable default. boxes = [JPEG2000SignatureBox(), FileTypeBox(), JP2HeaderBox(), ContiguousCodestreamBox()] height = self.codestream.segment[1].ysiz width = self.codestream.segment[1].xsiz num_components = len(self.codestream.segment[1].xrsiz) if num_components < 3: colorspace = core.GREYSCALE else: if len(self.box) == 0: # Best guess is SRGB colorspace = core.SRGB else: # Take whatever the first jp2 header / color specification # says. jp2hs = [box for box in self.box if box.box_id == 'jp2h'] colorspace = jp2hs[0].box[1].colorspace boxes[2].box = [ImageHeaderBox(height=height, width=width, num_components=num_components), ColourSpecificationBox(colorspace=colorspace)] return boxes
Create a default set of JP2 boxes.
def fake2db_initiator(self, number_of_rows, **connection_kwargs): '''Main handler for the operation ''' rows = number_of_rows cursor, conn = self.database_caller_creator(number_of_rows, **connection_kwargs) self.data_filler_simple_registration(rows, cursor, conn) self.data_filler_detailed_registration(rows, cursor, conn) self.data_filler_company(rows, cursor, conn) self.data_filler_user_agent(rows, cursor, conn) self.data_filler_customer(rows, cursor, conn) cursor.close() conn.close()
Main handler for the operation
def delete_attributes(self, attrs): """ Delete just these attributes, not the whole object. :param attrs: Attributes to save, as a list of string names :type attrs: list :return: self :rtype: :class:`boto.sdb.db.model.Model` """ assert(isinstance(attrs, list)), "Argument must be a list of names of keys to delete." self._manager.domain.delete_attributes(self.id, attrs) self.reload() return self
Delete just these attributes, not the whole object. :param attrs: Attributes to save, as a list of string names :type attrs: list :return: self :rtype: :class:`boto.sdb.db.model.Model`
def save_results(self, output_dir='.', prefix='', prefix_sep='_', image_list=None): """ Write out any images generated by the meta-analysis. Args: output_dir (str): folder to write images to prefix (str): all image files will be prepended with this string prefix_sep (str): glue between the prefix and rest of filename image_list (list): optional list of images to save--e.g., ['pFgA_z', 'pAgF']. If image_list is None (default), will save all images. """ if prefix == '': prefix_sep = '' if not exists(output_dir): makedirs(output_dir) logger.debug("Saving results...") if image_list is None: image_list = self.images.keys() for suffix, img in self.images.items(): if suffix in image_list: filename = prefix + prefix_sep + suffix + '.nii.gz' outpath = join(output_dir, filename) imageutils.save_img(img, outpath, self.dataset.masker)
Write out any images generated by the meta-analysis. Args: output_dir (str): folder to write images to prefix (str): all image files will be prepended with this string prefix_sep (str): glue between the prefix and rest of filename image_list (list): optional list of images to save--e.g., ['pFgA_z', 'pAgF']. If image_list is None (default), will save all images.
def login(self, account=None, app_account=None, flush=True): """ Log into the connected host using the best method available. If an account is not given, default to the account that was used during the last call to login(). If a previous call was not made, use the account that was passed to the constructor. If that also fails, raise a TypeError. The app_account is passed to :class:`app_authenticate()` and :class:`app_authorize()`. If app_account is not given, default to the value of the account argument. :type account: Account :param account: The account for protocol level authentication. :type app_account: Account :param app_account: The account for app level authentication. :type flush: bool :param flush: Whether to flush the last prompt from the buffer. """ with self._get_account(account) as account: if app_account is None: app_account = account self.authenticate(account, flush=False) if self.get_driver().supports_auto_authorize(): self.expect_prompt() self.auto_app_authorize(app_account, flush=flush)
Log into the connected host using the best method available. If an account is not given, default to the account that was used during the last call to login(). If a previous call was not made, use the account that was passed to the constructor. If that also fails, raise a TypeError. The app_account is passed to :class:`app_authenticate()` and :class:`app_authorize()`. If app_account is not given, default to the value of the account argument. :type account: Account :param account: The account for protocol level authentication. :type app_account: Account :param app_account: The account for app level authentication. :type flush: bool :param flush: Whether to flush the last prompt from the buffer.
def _require_homogeneous_roots(self, accept_predicate, reject_predicate): """Ensures that there is no ambiguity in the context according to the given predicates. If any targets in the context satisfy the accept_predicate, and no targets satisfy the reject_predicate, returns the accepted targets. If no targets satisfy the accept_predicate, returns None. Otherwise throws TaskError. """ if len(self.context.target_roots) == 0: raise self.NoActivationsError('No target specified.') def resolve(targets): # Recursively resolve target aliases. for t in targets: if type(t) == Target: for r in resolve(t.dependencies): yield r else: yield t expanded_roots = list(resolve(self.context.target_roots)) accepted = list(filter(accept_predicate, expanded_roots)) rejected = list(filter(reject_predicate, expanded_roots)) if len(accepted) == 0: # no targets were accepted, regardless of rejects return None elif len(rejected) == 0: # we have at least one accepted target, and no rejected targets return accepted else: # both accepted and rejected targets # TODO: once https://github.com/pantsbuild/pants/issues/425 lands, we should add # language-specific flags that would resolve the ambiguity here def render_target(target): return '{} (a {})'.format(target.address.reference(), target.type_alias) raise self.IncompatibleActivationsError('Mutually incompatible targets specified: {} vs {} ' '(and {} others)' .format(render_target(accepted[0]), render_target(rejected[0]), len(accepted) + len(rejected) - 2))
Ensures that there is no ambiguity in the context according to the given predicates. If any targets in the context satisfy the accept_predicate, and no targets satisfy the reject_predicate, returns the accepted targets. If no targets satisfy the accept_predicate, returns None. Otherwise throws TaskError.
def create_table(instance, table_id, initial_split_keys=None, column_families=None): """ Creates the specified Cloud Bigtable table. Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists. :type instance: Instance :param instance: The Cloud Bigtable instance that owns the table. :type table_id: str :param table_id: The ID of the table to create in Cloud Bigtable. :type initial_split_keys: list :param initial_split_keys: (Optional) A list of row keys in bytes to use to initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the column_id str, and the value is a :class:`google.cloud.bigtable.column_family.GarbageCollectionRule`. """ if column_families is None: column_families = {} if initial_split_keys is None: initial_split_keys = [] table = Table(table_id, instance) table.create(initial_split_keys, column_families)
Creates the specified Cloud Bigtable table. Raises ``google.api_core.exceptions.AlreadyExists`` if the table exists. :type instance: Instance :param instance: The Cloud Bigtable instance that owns the table. :type table_id: str :param table_id: The ID of the table to create in Cloud Bigtable. :type initial_split_keys: list :param initial_split_keys: (Optional) A list of row keys in bytes to use to initially split the table. :type column_families: dict :param column_families: (Optional) A map of columns to create. The key is the column_id str, and the value is a :class:`google.cloud.bigtable.column_family.GarbageCollectionRule`.
def fetch(self, endpoint=None, page=1, **kwargs): """Returns the results of an API call. This is the main work horse of the class. It builds the API query string and sends the request to MetaSmoke. If there are multiple pages of results, and we've configured `max_pages` to be greater than 1, it will automatically paginate through the results and return a single object. Returned data will appear in the `items` key of the resulting dictionary. :param endpoint: (string) The API end point being called. Available endpoints are listed on the official `API Documentation <https://github.com/Charcoal-SE/metasmoke/wiki/API-Documentation>`__. This can be as simple as ``fetch('posts/feedback')``, to call feedback end point If calling an end point that takes additional parameter, such as `id`s pass the ids as a list to the `ids` key: .. code-block:: python fetch('posts/{ids}', ids=[1,2,3]) This will attempt to retrieve the posts for the three listed ids. If no end point is passed, a ``ValueError`` will be raised :param page: (int) The page in the results to start at. By default, it will start on the first page and automatically paginate until the result set reaches ``max_pages``. :param kwargs: Parameters accepted by individual endpoints. These parameters **must** be named the same as described in the endpoint documentation :rtype: (dictionary) A dictionary containing wrapper data regarding the API call and the results of the call in the `items` key. If multiple pages were received, all of the results will appear in the ``items`` tag. """ if not endpoint: raise ValueError('No endpoint provided.') self._endpoint = endpoint params = {"per_page": self.per_page, "page": page, "key": self._api_key } # This block will replace {ids} placeholds in end points # converting .fetch('posts/{ids}', ids=[222, 1306, 99999]) to # posts/222;1306;99999 for k, value in kwargs.items(): if "{" + k + "}" in endpoint: endpoint = endpoint.replace("{" + k + "}", ';'.join(str(x) for x in value)) kwargs.pop(k, None) date_time_keys = ['from_date', 'to_date'] for k in date_time_keys: if k in kwargs: if isinstance(kwargs[k], datetime.datetime): kwargs[k] = int(calendar.timegm(kwargs[k].utctimetuple())) # This block will see if there there are ids remaining # This would occur if the developer passed `posts` instead of `posts/{ids}` to `fetch` # If this is the case, then convert to a string and assume this goes at the end of the endpoint if 'ids' in kwargs: ids = ';'.join(str(x) for x in kwargs['ids']) kwargs.pop('ids', None) endpoint += "/{}".format(ids) params.update(kwargs) data = [] run_cnt = 1 while run_cnt <= self.max_pages: run_cnt += 1 base_url = "{}{}/".format(self._base_url, endpoint) try: response = requests.get(base_url, params=params, proxies=self.proxy) except requests.exceptions.ConnectionError as e: raise SmokeAPIError(self._previous_call, str(e), str(e), str(e)) self._previous_call = response.url try: response.encoding = 'utf-8-sig' response = response.json() except ValueError as e: raise SmokeAPIError(self._previous_call, str(e), str(e), str(e)) try: code = response["error_code"] name = response["error_name"] message = response["error_message"] raise SmokeAPIError(self._previous_call, code, name, message) except KeyError: pass # This means there is no error data.append(response) if len(data) < 1: break if 'has_more' in response and response['has_more']: params["page"] += 1 else: break r = [] for d in data: r.extend(d['items']) items = list(chain(r)) has_more = data[-1]['has_more'] if 'has_more' in data[-1] else False result = {'has_more': has_more, 'page': params['page'], 'total': len(items), 'items': items} return result
Returns the results of an API call. This is the main work horse of the class. It builds the API query string and sends the request to MetaSmoke. If there are multiple pages of results, and we've configured `max_pages` to be greater than 1, it will automatically paginate through the results and return a single object. Returned data will appear in the `items` key of the resulting dictionary. :param endpoint: (string) The API end point being called. Available endpoints are listed on the official `API Documentation <https://github.com/Charcoal-SE/metasmoke/wiki/API-Documentation>`__. This can be as simple as ``fetch('posts/feedback')``, to call feedback end point If calling an end point that takes additional parameter, such as `id`s pass the ids as a list to the `ids` key: .. code-block:: python fetch('posts/{ids}', ids=[1,2,3]) This will attempt to retrieve the posts for the three listed ids. If no end point is passed, a ``ValueError`` will be raised :param page: (int) The page in the results to start at. By default, it will start on the first page and automatically paginate until the result set reaches ``max_pages``. :param kwargs: Parameters accepted by individual endpoints. These parameters **must** be named the same as described in the endpoint documentation :rtype: (dictionary) A dictionary containing wrapper data regarding the API call and the results of the call in the `items` key. If multiple pages were received, all of the results will appear in the ``items`` tag.
def install(self, opener): # type: (Union[Type[Opener], Opener, Callable[[], Opener]]) -> None """Install an opener. Arguments: opener (`Opener`): an `Opener` instance, or a callable that returns an opener instance. Note: May be used as a class decorator. For example:: registry = Registry() @registry.install class ArchiveOpener(Opener): protocols = ['zip', 'tar'] """ _opener = opener if isinstance(opener, Opener) else opener() assert isinstance(_opener, Opener), "Opener instance required" assert _opener.protocols, "must list one or more protocols" for protocol in _opener.protocols: self._protocols[protocol] = _opener return opener
Install an opener. Arguments: opener (`Opener`): an `Opener` instance, or a callable that returns an opener instance. Note: May be used as a class decorator. For example:: registry = Registry() @registry.install class ArchiveOpener(Opener): protocols = ['zip', 'tar']
def end_anonymous_session_view(request): ''' End the anonymous session if the user is a superuser. ''' request.session['ANONYMOUS_SESSION'] = False messages.add_message(request, messages.INFO, MESSAGES['ANONYMOUS_SESSION_ENDED']) return HttpResponseRedirect(reverse('utilities'))
End the anonymous session if the user is a superuser.
def get(self, *args, **kwargs): """Get the next waiting message from the queue. :returns: A :class:`Message` instance, or ``None`` if there is no messages waiting. """ if not mqueue.qsize(): return None message_data, content_type, content_encoding = mqueue.get() return self.Message(backend=self, body=message_data, content_type=content_type, content_encoding=content_encoding)
Get the next waiting message from the queue. :returns: A :class:`Message` instance, or ``None`` if there is no messages waiting.
def mark_all_as_active(self, recipient=None): """Mark current queryset as active(un-deleted). Optionally, filter by recipient first. """ assert_soft_delete() qset = self.deleted() if recipient: qset = qset.filter(recipient=recipient) return qset.update(deleted=False)
Mark current queryset as active(un-deleted). Optionally, filter by recipient first.
def adj_nodes_aws(aws_nodes): """Adjust details specific to AWS.""" for node in aws_nodes: node.cloud = "aws" node.cloud_disp = "AWS" node.private_ips = ip_to_str(node.private_ips) node.public_ips = ip_to_str(node.public_ips) node.zone = node.extra['availability'] node.size = node.extra['instance_type'] node.type = node.extra['instance_lifecycle'] return aws_nodes
Adjust details specific to AWS.
def get(key, default=-1): """Backport support for original codes.""" if isinstance(key, int): return TransType(key) if key not in TransType._member_map_: extend_enum(TransType, key, default) return TransType[key]
Backport support for original codes.
def main(request, query, hproPk=None, returnMenuOnly=False): """ Main method called for main page""" if settings.PIAPI_STANDALONE: global plugIt, baseURI # Check if settings are ok if settings.PIAPI_ORGAMODE and settings.PIAPI_REALUSERS: return gen404(request, baseURI, "Configuration error. PIAPI_ORGAMODE and PIAPI_REALUSERS both set to True !") hproject = None else: (plugIt, baseURI, hproject) = getPlugItObject(hproPk) hproject.update_last_access(request.user) # Check for SSL Requirements and redirect to ssl if necessary if hproject and hproject.plugItRequiresSsl: if not request.is_secure(): secure_url = 'https://{0}{1}'.format(request.get_host(), request.get_full_path()) return HttpResponsePermanentRedirect(secure_url) orgaMode = None currentOrga = None plugItMenuAction = None availableOrga = [] # If standalone mode, change the current user and orga mode based on parameters if settings.PIAPI_STANDALONE: if not settings.PIAPI_REALUSERS: currentUserMode = request.session.get('plugit-standalone-usermode', 'ano') request.user = generate_user(mode=currentUserMode) orgaMode = settings.PIAPI_ORGAMODE currentOrga = SimpleOrga() currentOrga.name = request.session.get('plugit-standalone-organame', 'EBU') currentOrga.pk = request.session.get('plugit-standalone-orgapk', '-1') currentOrga.ebu_codops = request.session.get('plugit-standalone-orgacodops', 'zzebu') else: request.user.ebuio_member = request.user.is_staff request.user.ebuio_admin = request.user.is_superuser request.user.subscription_labels = _get_subscription_labels(request.user, hproject) proxyMode = settings.PIAPI_PROXYMODE plugItMenuAction = settings.PIAPI_PLUGITMENUACTION # TODO Add STANDALONE Orgas here # availableOrga.append((orga, isAdmin)) # Get meta, if not in proxy mode if not proxyMode: try: meta = plugIt.getMeta(query) except Exception as e: report_backend_error(request, e, 'meta', hproPk) meta = None if not meta: return gen404(request, baseURI, 'meta') else: meta = {} else: request.user.ebuio_member = hproject.isMemberRead(request.user) request.user.ebuio_admin = hproject.isMemberWrite(request.user) request.user.subscription_labels = _get_subscription_labels(request.user, hproject) orgaMode = hproject.plugItOrgaMode proxyMode = hproject.plugItProxyMode plugItMenuAction = hproject.plugItMenuAction # Get meta, if not in proxy mode if not proxyMode: try: meta = plugIt.getMeta(query) except Exception as e: report_backend_error(request, e, 'meta', hproPk) meta = None if not meta: return gen404(request, baseURI, 'meta') else: meta = {} if orgaMode: # List available orgas if request.user.is_authenticated(): # If orga limited only output the necessary orgas to which the user has access if hproject and hproject.plugItLimitOrgaJoinable: # Get List of Plugit Available Orgas first projectOrgaIds = hproject.plugItOrgaJoinable.order_by('name').values_list('pk', flat=True) for (orga, isAdmin) in request.user.getOrgas(distinct=True): if orga.pk in projectOrgaIds: availableOrga.append((orga, isAdmin)) else: availableOrga = request.user.getOrgas(distinct=True) if not availableOrga: # TODO HERE TO CHANGE PUBLIC if not meta.get('public'): # Page is not public, raise 403 return gen403(request, baseURI, 'no_orga_in_orgamode', hproject) else: # Build the current orga realCurrentOrga = get_current_orga(request, hproject, availableOrga) currentOrga = SimpleOrga() currentOrga.pk = realCurrentOrga.pk currentOrga.name = realCurrentOrga.name currentOrga.ebu_codops = realCurrentOrga.ebu_codops # Get rights request.user.ebuio_orga_member = realCurrentOrga.isMember(request.user) request.user.ebuio_orga_admin = realCurrentOrga.isOwner(request.user) cacheKey = get_cache_key(request, meta, orgaMode, currentOrga) # Check access rights error = check_rights_and_access(request, meta, hproject) if error: return error # Check cache (cache, menucache, context) = find_in_cache(cacheKey) if cache: return build_final_response(request, meta, cache, menucache, hproject, proxyMode, context) # Build parameters getParameters, postParameters, files = build_parameters(request, meta, orgaMode, currentOrga) # Bonus headers things_to_add = build_extra_headers(request, proxyMode, orgaMode, currentOrga) current_session = get_current_session(request, hproPk) # Do the action try: (data, session_to_set, headers_to_set) = plugIt.doAction(query, request.method, getParameters, postParameters, files, things_to_add, proxyMode=proxyMode, session=current_session) except Exception as e: report_backend_error(request, e, 'meta', hproPk) return gen500(request, baseURI) update_session(request, session_to_set, hproPk) # Handle special case (redirect, etc..) spe_cases = handle_special_cases(request, data, baseURI, meta) if spe_cases: for header, value in headers_to_set.items(): spe_cases[header] = value return spe_cases # Save data for proxyMode if proxyMode: rendered_data = data data = {} else: rendered_data = None # Get template (templateContent, templateError) = get_template(request, query, meta, proxyMode) if templateError: return templateError # Build the context context = build_context(request, data, hproject, orgaMode, currentOrga, availableOrga) # Render the result menu = None # Some page may not have a menu (result, menu) = render_data(context, templateContent, proxyMode, rendered_data, plugItMenuAction) # Cache the result for future uses if requested cache_if_needed(cacheKey, result, menu, context, meta) # Return menu only : ) if returnMenuOnly: return menu # Return the final response final = build_final_response(request, meta, result, menu, hproject, proxyMode, context) for header, value in headers_to_set.items(): final[header] = value return final
Main method called for main page
def getObjectsAspecting(self, point, aspList): """ Returns a list of objects aspecting a point considering a list of possible aspects. """ res = [] for obj in self: if obj.isPlanet() and aspects.isAspecting(obj, point, aspList): res.append(obj) return ObjectList(res)
Returns a list of objects aspecting a point considering a list of possible aspects.
def citedby_url(self): """URL to Scopus page listing citing papers.""" cite_link = self.coredata.find('link[@rel="scopus-citedby"]', ns) try: return cite_link.get('href') except AttributeError: # cite_link is None return None
URL to Scopus page listing citing papers.
def default_triple(cls, inner_as_primary=True, inner_as_overcontact=False, starA='starA', starB='starB', starC='starC', inner='inner', outer='outer', contact_envelope='contact_envelope'): """Load a bundle with a default triple system. Set inner_as_primary based on what hierarchical configuration you want. inner_as_primary = True: starA - starB -- starC inner_as_primary = False: starC -- starA - starB This is a constructor, so should be called as: >>> b = Bundle.default_triple_primary() :parameter bool inner_as_primary: whether the inner-binary should be the primary component of the outer-orbit :return: instantiated :class:`Bundle` object """ if not conf.devel: raise NotImplementedError("'default_triple' not officially supported for this release. Enable developer mode to test.") b = cls() b.add_star(component=starA) b.add_star(component=starB) b.add_star(component=starC) b.add_orbit(component=inner, period=1) b.add_orbit(component=outer, period=10) if inner_as_overcontact: b.add_envelope(component=contact_envelope) inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB], b[contact_envelope]) else: inner_hier = _hierarchy.binaryorbit(b[inner], b[starA], b[starB]) if inner_as_primary: hierstring = _hierarchy.binaryorbit(b[outer], inner_hier, b[starC]) else: hierstring = _hierarchy.binaryorbit(b[outer], b[starC], inner_hier) b.set_hierarchy(hierstring) b.add_constraint(constraint.keplers_third_law_hierarchical, outer, inner) # TODO: does this constraint need to be rebuilt when things change? # (ie in set_hierarchy) b.add_compute() return b
Load a bundle with a default triple system. Set inner_as_primary based on what hierarchical configuration you want. inner_as_primary = True: starA - starB -- starC inner_as_primary = False: starC -- starA - starB This is a constructor, so should be called as: >>> b = Bundle.default_triple_primary() :parameter bool inner_as_primary: whether the inner-binary should be the primary component of the outer-orbit :return: instantiated :class:`Bundle` object
def cast(current: Any, new: str) -> Any: """Tries to force a new value into the same type as the current when trying to set the value for a parameter. :param current: current value for the parameter, type varies :param new: new value :return: new value with same type as current, or the current value if there was an error casting """ typ = type(current) orig_new = new if typ == bool: try: return bool(int(new)) except (ValueError, TypeError): pass try: new = new.lower() if (new == 'on') or (new[0] in ('y', 't')): return True if (new == 'off') or (new[0] in ('n', 'f')): return False except AttributeError: pass else: try: return typ(new) except (ValueError, TypeError): pass print("Problem setting parameter (now {}) to {}; incorrect type?".format(current, orig_new)) return current
Tries to force a new value into the same type as the current when trying to set the value for a parameter. :param current: current value for the parameter, type varies :param new: new value :return: new value with same type as current, or the current value if there was an error casting
def filter_feed(self, updated=False, following=False, folder=False, filter_folder="", sort="updated", nid=None): """Get filtered feed Only one filter type (updated, following, folder) is possible. :type nid: str :param nid: This is the ID of the network to get the feed from. This is optional and only to override the existing `network_id` entered when created the class :type sort: str :param sort: How to sort feed that will be retrieved; only current known value is "updated" :type updated: bool :param updated: Set to filter through only posts which have been updated since you last read them :type following: bool :param following: Set to filter through only posts which you are following :type folder: bool :param folder: Set to filter through only posts which are in the provided ``filter_folder`` :type filter_folder: str :param filter_folder: Name of folder to show posts from; required only if ``folder`` is set """ assert sum([updated, following, folder]) == 1 if folder: assert filter_folder if updated: filter_type = dict(updated=1) elif following: filter_type = dict(following=1) else: filter_type = dict(folder=1, filter_folder=filter_folder) r = self.request( nid=nid, method="network.filter_feed", data=dict( sort=sort, **filter_type ) ) return self._handle_error(r, "Could not retrieve filtered feed.")
Get filtered feed Only one filter type (updated, following, folder) is possible. :type nid: str :param nid: This is the ID of the network to get the feed from. This is optional and only to override the existing `network_id` entered when created the class :type sort: str :param sort: How to sort feed that will be retrieved; only current known value is "updated" :type updated: bool :param updated: Set to filter through only posts which have been updated since you last read them :type following: bool :param following: Set to filter through only posts which you are following :type folder: bool :param folder: Set to filter through only posts which are in the provided ``filter_folder`` :type filter_folder: str :param filter_folder: Name of folder to show posts from; required only if ``folder`` is set
def parse_numbering(document, xmlcontent): """Parse numbering document. Numbering is defined in file 'numbering.xml'. """ numbering = etree.fromstring(xmlcontent) document.abstruct_numbering = {} document.numbering = {} for abstruct_num in numbering.xpath('.//w:abstractNum', namespaces=NAMESPACES): numb = {} for lvl in abstruct_num.xpath('./w:lvl', namespaces=NAMESPACES): ilvl = int(lvl.attrib[_name('{{{w}}}ilvl')]) fmt = lvl.find(_name('{{{w}}}numFmt')) numb[ilvl] = {'numFmt': fmt.attrib[_name('{{{w}}}val')]} document.abstruct_numbering[abstruct_num.attrib[_name('{{{w}}}abstractNumId')]] = numb for num in numbering.xpath('.//w:num', namespaces=NAMESPACES): num_id = num.attrib[_name('{{{w}}}numId')] abs_num = num.find(_name('{{{w}}}abstractNumId')) if abs_num is not None: number_id = abs_num.attrib[_name('{{{w}}}val')] document.numbering[int(num_id)] = number_id
Parse numbering document. Numbering is defined in file 'numbering.xml'.
def cancelOrder(self, orderId): """ cancel order on IB TWS """ self.ibConn.cancelOrder(orderId) # update order id for next time self.requestOrderIds() return orderId
cancel order on IB TWS
def operation_file(uploader, cmd, filename=''): """File operations""" if cmd == 'list': operation_list(uploader) if cmd == 'do': for path in filename: uploader.file_do(path) elif cmd == 'format': uploader.file_format() elif cmd == 'remove': for path in filename: uploader.file_remove(path) elif cmd == 'print': for path in filename: uploader.file_print(path)
File operations
def add_into(self, other): """Return the sum of the two time series accounting for the time stamp. The other vector will be resized and time shifted wiht sub-sample precision before adding. This assumes that one can assume zeros outside of the original vector range. """ # only handle equal sample rate for now. if self.sample_rate != other.sample_rate: raise ValueError('Sample rate must be the same') # Other is disjoint if ((other.start_time > self.end_time) or (self.start_time > other.end_time)): return self.copy() other = other.copy() dt = float((other.start_time - self.start_time) * self.sample_rate) if not dt.is_integer(): diff = (dt - _numpy.floor(dt)) other.resize(len(other) + (len(other) + 1) % 2 + 1) other = other.cyclic_time_shift(diff) ts = self.copy() start = max(other.start_time, self.start_time) end = min(other.end_time, self.end_time) part = ts.time_slice(start, end) part += other.time_slice(start, end) return ts
Return the sum of the two time series accounting for the time stamp. The other vector will be resized and time shifted wiht sub-sample precision before adding. This assumes that one can assume zeros outside of the original vector range.
def map_resnum_a_to_resnum_b(resnums, a_aln, b_aln): """Map a residue number in a sequence to the corresponding residue number in an aligned sequence. Examples: >>> map_resnum_a_to_resnum_b([1,2,3], '--ABCDEF', 'XXABCDEF') {1: 3, 2: 4, 3: 5} >>> map_resnum_a_to_resnum_b(5, '--ABCDEF', 'XXABCDEF') {5: 7} >>> map_resnum_a_to_resnum_b(5, 'ABCDEF', 'ABCD--') {} >>> map_resnum_a_to_resnum_b(5, 'ABCDEF--', 'ABCD--GH') {} >>> map_resnum_a_to_resnum_b([9,10], '--MKCDLHRLE-E', 'VSNEYSFEGYKLD') {9: 11, 10: 13} Args: resnums (int, list): Residue number or numbers in the first aligned sequence a_aln (str, Seq, SeqRecord): Aligned sequence string b_aln (str, Seq, SeqRecord): Aligned sequence string Returns: int: Residue number in the second aligned sequence """ resnums = ssbio.utils.force_list(resnums) aln_df = get_alignment_df(a_aln, b_aln) maps = aln_df[aln_df.id_a_pos.isin(resnums)] able_to_map_to_b = maps[pd.notnull(maps.id_b_pos)] successful_map_from_a = able_to_map_to_b.id_a_pos.values.tolist() mapping = dict([(int(a), int(b)) for a,b in zip(able_to_map_to_b.id_a_pos, able_to_map_to_b.id_b_pos)]) cant_map = list(set(resnums).difference(successful_map_from_a)) if len(cant_map) > 0: log.warning('Unable to map residue numbers {} in first sequence to second'.format(cant_map)) return mapping
Map a residue number in a sequence to the corresponding residue number in an aligned sequence. Examples: >>> map_resnum_a_to_resnum_b([1,2,3], '--ABCDEF', 'XXABCDEF') {1: 3, 2: 4, 3: 5} >>> map_resnum_a_to_resnum_b(5, '--ABCDEF', 'XXABCDEF') {5: 7} >>> map_resnum_a_to_resnum_b(5, 'ABCDEF', 'ABCD--') {} >>> map_resnum_a_to_resnum_b(5, 'ABCDEF--', 'ABCD--GH') {} >>> map_resnum_a_to_resnum_b([9,10], '--MKCDLHRLE-E', 'VSNEYSFEGYKLD') {9: 11, 10: 13} Args: resnums (int, list): Residue number or numbers in the first aligned sequence a_aln (str, Seq, SeqRecord): Aligned sequence string b_aln (str, Seq, SeqRecord): Aligned sequence string Returns: int: Residue number in the second aligned sequence
def gaussian_pdf(std=10.0, mean=0.0): """Gaussian PDF for orientation averaging. Args: std: The standard deviation in degrees of the Gaussian PDF mean: The mean in degrees of the Gaussian PDF. This should be a number in the interval [0, 180) Returns: pdf(x), a function that returns the value of the spherical Jacobian- normalized Gaussian PDF with the given STD at x (degrees). It is normalized for the interval [0, 180]. """ norm_const = 1.0 def pdf(x): return norm_const*np.exp(-0.5 * ((x-mean)/std)**2) * \ np.sin(np.pi/180.0 * x) norm_dev = quad(pdf, 0.0, 180.0)[0] # ensure that the integral over the distribution equals 1 norm_const /= norm_dev return pdf
Gaussian PDF for orientation averaging. Args: std: The standard deviation in degrees of the Gaussian PDF mean: The mean in degrees of the Gaussian PDF. This should be a number in the interval [0, 180) Returns: pdf(x), a function that returns the value of the spherical Jacobian- normalized Gaussian PDF with the given STD at x (degrees). It is normalized for the interval [0, 180].
def plot_flat(r, c, figsize): "Shortcut for `enumerate(subplots.flatten())`" return enumerate(plt.subplots(r, c, figsize=figsize)[1].flatten())
Shortcut for `enumerate(subplots.flatten())`
def bellman_ford(graph, weight, source=0): """ Single source shortest paths by Bellman-Ford :param graph: directed graph in listlist or listdict format :param weight: can be negative. in matrix format or same listdict graph :returns: distance table, precedence table, bool :explanation: bool is True if a negative circuit is reachable from the source, circuits can have length 2. :complexity: `O(|V|*|E|)` """ n = len(graph) dist = [float('inf')] * n prec = [None] * n dist[source] = 0 for nb_iterations in range(n): changed = False for node in range(n): for neighbor in graph[node]: alt = dist[node] + weight[node][neighbor] if alt < dist[neighbor]: dist[neighbor] = alt prec[neighbor] = node changed = True if not changed: # fixed point return dist, prec, False return dist, prec, True
Single source shortest paths by Bellman-Ford :param graph: directed graph in listlist or listdict format :param weight: can be negative. in matrix format or same listdict graph :returns: distance table, precedence table, bool :explanation: bool is True if a negative circuit is reachable from the source, circuits can have length 2. :complexity: `O(|V|*|E|)`
def unhide_dataset(dataset_id,**kwargs): """ Hide a particular piece of data so it can only be seen by its owner. Only an owner can hide (and unhide) data. Data with no owner cannot be hidden. The exceptions paramater lists the usernames of those with permission to view the data read, write and share indicate whether these users can read, edit and share this data. """ user_id = kwargs.get('user_id') dataset_i = _get_dataset(dataset_id) #check that I can unhide the dataset if dataset_i.created_by != int(user_id): raise HydraError('Permission denied. ' 'User %s is not the owner of dataset %s' %(user_id, dataset_i.name)) dataset_i.hidden = 'N' db.DBSession.flush()
Hide a particular piece of data so it can only be seen by its owner. Only an owner can hide (and unhide) data. Data with no owner cannot be hidden. The exceptions paramater lists the usernames of those with permission to view the data read, write and share indicate whether these users can read, edit and share this data.
def handle_remove_readonly(func, path, exc): """Error handler for shutil.rmtree. Windows source repo folders are read-only by default, so this error handler attempts to set them as writeable and then proceed with deletion. :param function func: The caller function :param str path: The target path for removal :param Exception exc: The raised exception This function will call check :func:`is_readonly_path` before attempting to call :func:`set_write_bit` on the target path and try again. """ # Check for read-only attribute from .compat import ResourceWarning, FileNotFoundError, PermissionError PERM_ERRORS = (errno.EACCES, errno.EPERM, errno.ENOENT) default_warning_message = "Unable to remove file due to permissions restriction: {!r}" # split the initial exception out into its type, exception, and traceback exc_type, exc_exception, exc_tb = exc if is_readonly_path(path): # Apply write permission and call original function set_write_bit(path) try: func(path) except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno == errno.ENOENT: return elif e.errno in PERM_ERRORS: remaining = None if os.path.isdir(path): remaining =_wait_for_files(path) if remaining: warnings.warn(default_warning_message.format(path), ResourceWarning) return raise if exc_exception.errno in PERM_ERRORS: set_write_bit(path) remaining = _wait_for_files(path) try: func(path) except (OSError, IOError, FileNotFoundError, PermissionError) as e: if e.errno in PERM_ERRORS: warnings.warn(default_warning_message.format(path), ResourceWarning) pass elif e.errno == errno.ENOENT: # File already gone pass else: raise else: return elif exc_exception.errno == errno.ENOENT: pass else: raise exc_exception
Error handler for shutil.rmtree. Windows source repo folders are read-only by default, so this error handler attempts to set them as writeable and then proceed with deletion. :param function func: The caller function :param str path: The target path for removal :param Exception exc: The raised exception This function will call check :func:`is_readonly_path` before attempting to call :func:`set_write_bit` on the target path and try again.
def TemplateValidator(value): """Try to compile a string into a Django template""" try: Template(value) except Exception as e: raise ValidationError( _("Cannot compile template (%(exception)s)"), params={"exception": e} )
Try to compile a string into a Django template
def _set_default(path, dest, name): ''' Set the subvolume as the current default. ''' subvol_id = __salt__['btrfs.subvolume_show'](path)[name]['subvolume id'] return __salt__['btrfs.subvolume_set_default'](subvol_id, dest)
Set the subvolume as the current default.
def execute(self, command): """Primary method to execute ipmitool commands :param command: ipmi command to execute, str or list e.g. > ipmi = ipmitool('consolename.prod', 'secretpass') > ipmi.execute('chassis status') > """ if isinstance(command, str): self.method(command.split()) elif isinstance(command, list): self.method(command) else: raise TypeError("command should be either a string or list type") if self.error: raise IPMIError(self.error) else: return self.status
Primary method to execute ipmitool commands :param command: ipmi command to execute, str or list e.g. > ipmi = ipmitool('consolename.prod', 'secretpass') > ipmi.execute('chassis status') >
def get_sql_for_new_models(apps=None, using=DEFAULT_DB_ALIAS): """ Unashamedly copied and tweaked from django.core.management.commands.syncdb """ connection = connections[using] # Get a list of already installed *models* so that references work right. tables = connection.introspection.table_names() seen_models = connection.introspection.installed_models(tables) created_models = set() pending_references = {} if apps: apps = [models.get_app(a) for a in apps] else: apps = models.get_apps() # Build the manifest of apps and models that are to be synchronized all_models = [ (app.__name__.split('.')[-2], [ m for m in models.get_models(app, include_auto_created=True) if router.allow_syncdb(using, m) ]) for app in apps ] def model_installed(model): opts = model._meta converter = connection.introspection.table_name_converter db_table_in = (converter(opts.db_table) in tables) auto_create_in = ( opts.auto_created and converter(opts.auto_created._meta.db_table) in tables ) return not (db_table_in or auto_create_in) manifest = SortedDict( (app_name, filter(model_installed, model_list)) for app_name, model_list in all_models ) statements = [] sql = None for app_name, model_list in manifest.items(): for model in model_list: # Create the model's database table, if it doesn't already exist. sql, references = connection.creation.sql_create_model( model, no_style(), seen_models ) seen_models.add(model) created_models.add(model) statements.append("### New Model: %s.%s" % ( app_name, str(model).replace("'>", "").split(".")[-1] )) for refto, refs in references.items(): pending_references.setdefault(refto, []).extend(refs) if refto in seen_models: sql.extend( connection.creation.sql_for_pending_references( refto, no_style(), pending_references ) ) sql.extend( connection.creation.sql_for_pending_references( model, no_style(), pending_references ) ) statements.extend(sql) custom_sql = None for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: custom_sql = custom_sql_for_model( model, no_style(), connection ) if custom_sql: statements.extend(custom_sql) index_sql = None for app_name, model_list in manifest.items(): for model in model_list: if model in created_models: index_sql = connection.creation.sql_indexes_for_model( model, no_style() ) if index_sql: statements.extend(index_sql) return statements
Unashamedly copied and tweaked from django.core.management.commands.syncdb
def read_validate_params(self, request): """ Validate the incoming request. :param request: The incoming :class:`oauth2.web.Request`. :return: Returns ``True`` if data is valid. :raises: :class:`oauth2.error.OAuthInvalidError` """ self.refresh_token = request.post_param("refresh_token") if self.refresh_token is None: raise OAuthInvalidError( error="invalid_request", explanation="Missing refresh_token in request body") self.client = self.client_authenticator.by_identifier_secret(request) try: access_token = self.access_token_store.fetch_by_refresh_token( self.refresh_token ) except AccessTokenNotFound: raise OAuthInvalidError(error="invalid_request", explanation="Invalid refresh token") refresh_token_expires_at = access_token.refresh_expires_at self.refresh_grant_type = access_token.grant_type if refresh_token_expires_at != 0 and \ refresh_token_expires_at < int(time.time()): raise OAuthInvalidError(error="invalid_request", explanation="Invalid refresh token") self.data = access_token.data self.user_id = access_token.user_id self.scope_handler.parse(request, "body") self.scope_handler.compare(access_token.scopes) return True
Validate the incoming request. :param request: The incoming :class:`oauth2.web.Request`. :return: Returns ``True`` if data is valid. :raises: :class:`oauth2.error.OAuthInvalidError`
def apply_object(self, config_obj, apply_on=None): # type: (object, Optional[Tuple[str, ...]]) -> None """Apply additional configuration from any Python object This will look for object attributes that exist in the base_config and apply their values on the current configuration object """ self._init_flat_pointers() try: config_obj_keys = vars(config_obj).keys() # type: Iterable[str] except TypeError: config_obj_keys = filter(lambda k: k[0] != '_', dir(config_obj)) for config_key in config_obj_keys: if apply_on: flat_key = apply_on + (config_key, ) else: flat_key = (config_key, ) if flat_key in self._flat_pointers: container, orig_key = self._flat_pointers[flat_key] container[orig_key] = getattr(config_obj, config_key)
Apply additional configuration from any Python object This will look for object attributes that exist in the base_config and apply their values on the current configuration object
def list_as_sub(access_token, subscription_id): '''List availability sets in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of the list of availability set properties. ''' endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Compute/availabilitySets', '?api-version=', COMP_API]) return do_get_next(endpoint, access_token)
List availability sets in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of the list of availability set properties.
def __verify_arguments(self): """! @brief Checks algorithm's arguments and if some of them is incorrect then exception is thrown. """ if self.__kmax > len(self.__data): raise ValueError("K max value '" + str(self.__kmax) + "' is bigger than amount of objects '" + str(len(self.__data)) + "' in input data.") if self.__kmin <= 1: raise ValueError("K min value '" + str(self.__kmin) + "' should be greater than 1 (impossible to provide " "silhouette score for only one cluster).")
! @brief Checks algorithm's arguments and if some of them is incorrect then exception is thrown.
def find_root_thrifts(basedirs, sources, log=None): """Finds the root thrift files in the graph formed by sources and their recursive includes. :basedirs: A set of thrift source file base directories to look for includes in. :sources: Seed thrift files to examine. :log: An optional logger. """ root_sources = set(sources) for source in sources: root_sources.difference_update(find_includes(basedirs, source, log=log)) return root_sources
Finds the root thrift files in the graph formed by sources and their recursive includes. :basedirs: A set of thrift source file base directories to look for includes in. :sources: Seed thrift files to examine. :log: An optional logger.
def parse_player_info(self, player): """ Parse a PlayerInfo struct. This arrives before a FileInfo message """ if not player.ishltv: self.player_info[player.name] = { "user_id": player.userID, "guid": player.guid, "bot": player.fakeplayer, }
Parse a PlayerInfo struct. This arrives before a FileInfo message
def DiffPrimitiveArrays(self, oldObj, newObj): """Diff two primitive arrays""" if len(oldObj) != len(newObj): __Log__.debug('DiffDoArrays: Array lengths do not match %d != %d' % (len(oldObj), len(newObj))) return False match = True if self._ignoreArrayOrder: oldSet = oldObj and frozenset(oldObj) or frozenset() newSet = newObj and frozenset(newObj) or frozenset() match = (oldSet == newSet) else: for i, j in zip(oldObj, newObj): if i != j: match = False break if not match: __Log__.debug( 'DiffPrimitiveArrays: One of the elements do not match.') return False return True
Diff two primitive arrays
def reset( self ): """ Resets the colors to the default settings. """ dataSet = self.dataSet() if ( not dataSet ): dataSet = XScheme() dataSet.reset()
Resets the colors to the default settings.
def _get_version(): ''' Get the pkgin version ''' version_string = __salt__['cmd.run']( [_check_pkgin(), '-v'], output_loglevel='trace') if version_string is None: # Dunno why it would, but... return False version_match = VERSION_MATCH.search(version_string) if not version_match: return False return version_match.group(1).split('.')
Get the pkgin version
def vgdisplay(vgname='', quiet=False): ''' Return information about the volume group(s) vgname volume group name quiet if the volume group is not present, do not show any error CLI Examples: .. code-block:: bash salt '*' lvm.vgdisplay salt '*' lvm.vgdisplay nova-volumes ''' ret = {} cmd = ['vgdisplay', '-c'] if vgname: cmd.append(vgname) cmd_ret = __salt__['cmd.run_all'](cmd, python_shell=False, ignore_retcode=quiet) if cmd_ret['retcode'] != 0: return {} out = cmd_ret['stdout'].splitlines() for line in out: comps = line.strip().split(':') ret[comps[0]] = { 'Volume Group Name': comps[0], 'Volume Group Access': comps[1], 'Volume Group Status': comps[2], 'Internal Volume Group Number': comps[3], 'Maximum Logical Volumes': comps[4], 'Current Logical Volumes': comps[5], 'Open Logical Volumes': comps[6], 'Maximum Logical Volume Size': comps[7], 'Maximum Physical Volumes': comps[8], 'Current Physical Volumes': comps[9], 'Actual Physical Volumes': comps[10], 'Volume Group Size (kB)': comps[11], 'Physical Extent Size (kB)': comps[12], 'Total Physical Extents': comps[13], 'Allocated Physical Extents': comps[14], 'Free Physical Extents': comps[15], 'UUID': comps[16], } return ret
Return information about the volume group(s) vgname volume group name quiet if the volume group is not present, do not show any error CLI Examples: .. code-block:: bash salt '*' lvm.vgdisplay salt '*' lvm.vgdisplay nova-volumes
def _process_iq_response(self, stanza): """Process IQ stanza of type 'response' or 'error'. :Parameters: - `stanza`: the stanza received :Types: - `stanza`: `Iq` If a matching handler is available pass the stanza to it. Otherwise ignore it if it is "error" or "result" stanza or return "feature-not-implemented" error if it is "get" or "set". """ stanza_id = stanza.stanza_id from_jid = stanza.from_jid if from_jid: ufrom = from_jid.as_unicode() else: ufrom = None res_handler = err_handler = None try: res_handler, err_handler = self._iq_response_handlers.pop( (stanza_id, ufrom)) except KeyError: logger.debug("No response handler for id={0!r} from={1!r}" .format(stanza_id, ufrom)) logger.debug(" from_jid: {0!r} peer: {1!r} me: {2!r}" .format(from_jid, self.peer, self.me)) if ( (from_jid == self.peer or from_jid == self.me or self.me and from_jid == self.me.bare()) ): try: logger.debug(" trying id={0!r} from=None" .format(stanza_id)) res_handler, err_handler = \ self._iq_response_handlers.pop( (stanza_id, None)) except KeyError: pass if stanza.stanza_type == "result": if res_handler: response = res_handler(stanza) else: return False else: if err_handler: response = err_handler(stanza) else: return False self._process_handler_result(response) return True
Process IQ stanza of type 'response' or 'error'. :Parameters: - `stanza`: the stanza received :Types: - `stanza`: `Iq` If a matching handler is available pass the stanza to it. Otherwise ignore it if it is "error" or "result" stanza or return "feature-not-implemented" error if it is "get" or "set".
def KIC(N, rho, k): r"""Kullback information criterion .. math:: KIC(k) = log(\rho_k) + 3 \frac{k+1}{N} :validation: double checked versus octave. """ from numpy import log, array res = log(rho) + 3. * (k+1.) /float(N) return res
r"""Kullback information criterion .. math:: KIC(k) = log(\rho_k) + 3 \frac{k+1}{N} :validation: double checked versus octave.
def create(cls, fields=None, **fields_kwargs): """ create an instance of cls with the passed in fields and set it into the db fields -- dict -- field_name keys, with their respective values **fields_kwargs -- dict -- if you would rather pass in fields as name=val, that works also """ # NOTE -- you cannot use hydrate/populate here because populate alters modified fields instance = cls(fields, **fields_kwargs) instance.save() return instance
create an instance of cls with the passed in fields and set it into the db fields -- dict -- field_name keys, with their respective values **fields_kwargs -- dict -- if you would rather pass in fields as name=val, that works also
def _ConvertDictToObject(cls, json_dict): """Converts a JSON dict into an object. The dictionary of the JSON serialized objects consists of: { '__type__': 'AttributeContainer' '__container_type__': ... ... } Here '__type__' indicates the object base type. In this case 'AttributeContainer'. '__container_type__' indicates the attribute container type. The rest of the elements of the dictionary make up the attributes. Args: json_dict (dict[str, object]): JSON serialized objects. Returns: AttributeContainer|dict|list|tuple: deserialized object. Raises: ValueError: if the class type or container type is not supported. """ # Use __type__ to indicate the object class type. class_type = json_dict.get('__type__', None) if not class_type: # Dealing with a regular dict. return json_dict if class_type == 'bytes': return binascii.a2b_qp(json_dict['stream']) if class_type == 'tuple': return tuple(cls._ConvertListToObject(json_dict['values'])) if class_type == 'collections.Counter': return cls._ConvertDictToCollectionsCounter(json_dict) if class_type == 'AttributeContainer': # Use __container_type__ to indicate the attribute container type. container_type = json_dict.get('__container_type__', None) # Since we would like the JSON as flat as possible we handle decoding # a path specification. elif class_type == 'PathSpec': return cls._ConvertDictToPathSpec(json_dict) else: raise ValueError('Unsupported class type: {0:s}'.format(class_type)) container_class = ( containers_manager.AttributeContainersManager.GetAttributeContainer( container_type)) if not container_class: raise ValueError('Unsupported container type: {0:s}'.format( container_type)) container_object = container_class() supported_attribute_names = container_object.GetAttributeNames() for attribute_name, attribute_value in iter(json_dict.items()): # Be strict about which attributes to set in non event values. if (container_type not in ('event', 'event_data') and attribute_name not in supported_attribute_names): if attribute_name not in ('__container_type__', '__type__'): logger.debug(( '[ConvertDictToObject] unsupported attribute name: ' '{0:s}.{1:s}').format(container_type, attribute_name)) continue if isinstance(attribute_value, dict): attribute_value = cls._ConvertDictToObject(attribute_value) elif isinstance(attribute_value, list): attribute_value = cls._ConvertListToObject(attribute_value) setattr(container_object, attribute_name, attribute_value) return container_object
Converts a JSON dict into an object. The dictionary of the JSON serialized objects consists of: { '__type__': 'AttributeContainer' '__container_type__': ... ... } Here '__type__' indicates the object base type. In this case 'AttributeContainer'. '__container_type__' indicates the attribute container type. The rest of the elements of the dictionary make up the attributes. Args: json_dict (dict[str, object]): JSON serialized objects. Returns: AttributeContainer|dict|list|tuple: deserialized object. Raises: ValueError: if the class type or container type is not supported.
def get_diversity(self,X): """compute mean diversity of individual outputs""" # diversity in terms of cosine distances between features feature_correlations = np.zeros(X.shape[0]-1) for i in np.arange(1,X.shape[0]-1): feature_correlations[i] = max(0.0,r2_score(X[0],X[i])) # pdb.set_trace() self.diversity.append(1-np.mean(feature_correlations))
compute mean diversity of individual outputs
def get_cds_ranges_for_transcript(self, transcript_id): """ obtain the sequence for a transcript from ensembl """ headers = {"content-type": "application/json"} self.attempt = 0 ext = "/overlap/id/{}?feature=cds".format(transcript_id) r = self.ensembl_request(ext, headers) cds_ranges = [] for cds_range in json.loads(r): if cds_range["Parent"] != transcript_id: continue start = cds_range["start"] end = cds_range["end"] cds_ranges.append((start, end)) return cds_ranges
obtain the sequence for a transcript from ensembl
def com_google_fonts_check_metadata_canonical_weight_value(font_metadata): """METADATA.pb: Check that font weight has a canonical value.""" first_digit = font_metadata.weight / 100 if (font_metadata.weight % 100) != 0 or \ (first_digit < 1 or first_digit > 9): yield FAIL, ("METADATA.pb: The weight is declared" " as {} which is not a" " multiple of 100" " between 100 and 900.").format(font_metadata.weight) else: yield PASS, "Font weight has a canonical value."
METADATA.pb: Check that font weight has a canonical value.
def align(args): """ %prog align database.fasta read1.fq [read2.fq] Wrapper for three modes of BWA - mem (default), aln, bwasw (long reads). """ valid_modes = ("bwasw", "aln", "mem") p = OptionParser(align.__doc__) p.add_option("--mode", default="mem", choices=valid_modes, help="BWA mode") set_align_options(p) p.set_sam_options() opts, args = p.parse_args(args) mode = opts.mode nargs = len(args) if nargs not in (2, 3): sys.exit(not p.print_help()) tag = "bwa-{0}: ".format(mode) c = mem if nargs == 2: tag += "Single-end alignment" if mode == "bwasw": c = bwasw elif mode == "aln": c = samse else: assert mode != "bwasw", "Cannot use --bwasw with paired-end mode" tag += "Paired-end alignment" if mode == "aln": c = sampe logging.debug(tag) cmd, samfile = c(args, opts) if cmd: cmd = output_bam(cmd, samfile) bam = opts.bam unmapped = opts.unmapped sh(cmd) if unmapped: dbfile, readfile = args[:2] mopts = [samfile, "--unmapped"] if not bam: mopts += ["--sam"] mapped(mopts) FileShredder([samfile]) return samfile, None
%prog align database.fasta read1.fq [read2.fq] Wrapper for three modes of BWA - mem (default), aln, bwasw (long reads).
def tag(self, repository_tag, tags=[]): """ Tags image with one or more tags. Raises exception on failure. """ if not isinstance(repository_tag, six.string_types): raise TypeError('repository_tag must be a string') if not isinstance(tags, list): raise TypeError('tags must be a list.') if ':' in repository_tag: repository, tag = repository_tag.split(':') tags.append(tag) else: repository = repository_tag if not tags: tags.append('latest') for tag in tags: repo_tag = "{0}:{1}".format(repository, tag) if repo_tag not in self.repo_tags: logger.info("Tagging Image: {0} Repo Tag: {1}".format(self.identifier, repo_tag)) self.repo_tags = self.repo_tags + (repo_tag, ) # always going to force tags until a feature is added to allow users to specify. try: self.client.tag(self.id, repository, tag) except: self.client.tag(self.id, repository, tag, force=True)
Tags image with one or more tags. Raises exception on failure.
def show_std_icons(): """ Show all standard Icons """ app = qapplication() dialog = ShowStdIcons(None) dialog.show() sys.exit(app.exec_())
Show all standard Icons
def debug(request, message, extra_tags='', fail_silently=False, async=False): """Adds a message with the ``DEBUG`` level.""" if ASYNC and async: messages.debug(_get_user(request), message) else: add_message(request, constants.DEBUG, message, extra_tags=extra_tags, fail_silently=fail_silently)
Adds a message with the ``DEBUG`` level.
def _postrun(cls, span, obj, **kwargs): """ Trigger to execute just before closing the span :param opentracing.span.Span span: the SpanContext instance :param Any obj: Object to use as context :param dict kwargs: additional data """ span.set_tag("response.status_code", obj.status_code) span.set_tag( "response.content_lenght", len(getattr(obj, 'content', "")) )
Trigger to execute just before closing the span :param opentracing.span.Span span: the SpanContext instance :param Any obj: Object to use as context :param dict kwargs: additional data
def reports(self): """returns a list of reports on the server""" if self._metrics is None: self.__init() self._reports = [] for r in self._metrics: url = self._url + "/%s" % six.moves.urllib.parse.quote_plus(r['reportname']) self._reports.append(UsageReport(url=url, securityHandler=self._securityHandler, proxy_url=self._proxy_url, proxy_port=self._proxy_port, initialize=True)) del url return self._reports
returns a list of reports on the server
def get_manifest_selfLink(self, repo_name, digest=None): ''' get a selfLink for the manifest, for use by the client get_manifest function, along with the parents pull Parameters ========== repo_name: reference to the <username>/<repository>:<tag> to obtain digest: a tag or shasum version ''' url = "%s/%s/manifests" % (self.base, repo_name) # Add a digest - a tag or hash (version) if digest is None: digest = 'latest' return "%s/%s" % (url, digest)
get a selfLink for the manifest, for use by the client get_manifest function, along with the parents pull Parameters ========== repo_name: reference to the <username>/<repository>:<tag> to obtain digest: a tag or shasum version
def _cond_x(self, word, suffix_len): """Return Lovins' condition X. Parameters ---------- word : str Word to check suffix_len : int Suffix length Returns ------- bool True if condition is met """ return word[-suffix_len - 1] in {'i', 'l'} or ( word[-suffix_len - 3 : -suffix_len] == 'u' and word[-suffix_len - 1] == 'e' )
Return Lovins' condition X. Parameters ---------- word : str Word to check suffix_len : int Suffix length Returns ------- bool True if condition is met
def rings_full_data(self): """ Returns a generator for iterating over each ring Yields ------ For each ring, tuple composed by ring ID, list of edges, list of nodes Notes ----- Circuit breakers must be closed to find rings, this is done automatically. """ #close circuit breakers for circ_breaker in self.circuit_breakers(): if not circ_breaker.status == 'closed': circ_breaker.close() logger.info('Circuit breakers were closed in order to find MV ' 'rings') #find True rings (cycles from station through breaker and back to station) for ring_nodes in nx.cycle_basis(self._graph, root=self._station): edges_ring = [] for node in ring_nodes: for edge in self.graph_branches_from_node(node): nodes_in_the_branch = self.graph_nodes_from_branch(edge[1]['branch']) if (nodes_in_the_branch[0] in ring_nodes and nodes_in_the_branch[1] in ring_nodes ): if not edge[1]['branch'] in edges_ring: edges_ring.append(edge[1]['branch']) yield (edges_ring[0].ring,edges_ring,ring_nodes)
Returns a generator for iterating over each ring Yields ------ For each ring, tuple composed by ring ID, list of edges, list of nodes Notes ----- Circuit breakers must be closed to find rings, this is done automatically.
def wait_for_redfish_firmware_update_to_complete(self, redfish_object): """Continuously polls for iLO firmware update to complete. :param redfish_object: redfish instance """ p_state = ['Idle'] c_state = ['Idle'] def has_firmware_flash_completed(): """Checks for completion status of firmware update operation The below table shows the conditions for which the firmware update will be considered as DONE (be it success or error):: +-----------------------------------+-----------------------------+ | Previous state | Current state | +===================================+=============================+ | Idle | Error, Complete | +-----------------------------------+-----------------------------+ | Updating, Verifying, | Complete, Error, | | Uploading, Writing | Unknown, Idle | +-----------------------------------+-----------------------------+ :returns: True upon firmware update completion otherwise False """ curr_state, curr_percent = self.get_firmware_update_progress() p_state[0] = c_state[0] c_state[0] = curr_state if (((p_state[0] in ['Updating', 'Verifying', 'Uploading', 'Writing']) and (c_state[0] in ['Complete', 'Error', 'Unknown', 'Idle'])) or (p_state[0] == 'Idle' and (c_state[0] in ['Complete', 'Error']))): return True return False common.wait_for_operation_to_complete( has_firmware_flash_completed, delay_bw_retries=30, failover_msg='iLO firmware update has failed.' ) common.wait_for_ilo_after_reset(redfish_object)
Continuously polls for iLO firmware update to complete. :param redfish_object: redfish instance
def subpoint(self): """Return the latitude and longitude directly beneath this position. Returns a :class:`~skyfield.toposlib.Topos` whose ``longitude`` and ``latitude`` are those of the point on the Earth's surface directly beneath this position, and whose ``elevation`` is the height of this position above the Earth's surface. """ if self.center != 399: # TODO: should an __init__() check this? raise ValueError("you can only ask for the geographic subpoint" " of a position measured from Earth's center") t = self.t xyz_au = einsum('ij...,j...->i...', t.M, self.position.au) lat, lon, elevation_m = reverse_terra(xyz_au, t.gast) # TODO. Move VectorFunction and Topos into this file, since the # three kinds of class work together: Topos is-a VF; VF.at() can # return a Geocentric position; and Geocentric.subpoint() should # return a Topos. I'm deferring the refactoring for now, to get # this new feature to users more quickly. from .toposlib import Topos return Topos(latitude=Angle(radians=lat), longitude=Angle(radians=lon), elevation_m=elevation_m)
Return the latitude and longitude directly beneath this position. Returns a :class:`~skyfield.toposlib.Topos` whose ``longitude`` and ``latitude`` are those of the point on the Earth's surface directly beneath this position, and whose ``elevation`` is the height of this position above the Earth's surface.
def breakLines(self, width): """ Returns a broken line structure. There are two cases A) For the simple case of a single formatting input fragment the output is A fragment specifier with - kind = 0 - fontName, fontSize, leading, textColor - lines= A list of lines Each line has two items. 1. unused width in points 2. word list B) When there is more than one input formatting fragment the output is A fragment specifier with - kind = 1 - lines= A list of fragments each having fields - extraspace (needed for justified) - fontSize - words=word list each word is itself a fragment with various settings This structure can be used to easily draw paragraphs with the various alignments. You can supply either a single width or a list of widths; the latter will have its last item repeated until necessary. A 2-element list is useful when there is a different first line indent; a longer list could be created to facilitate custom wraps around irregular objects. """ if self.debug: print (id(self), "breakLines") if not isinstance(width, (tuple, list)): maxWidths = [width] else: maxWidths = width lines = [] lineno = 0 style = self.style #for bullets, work out width and ensure we wrap the right amount onto line one _handleBulletWidth(self.bulletText, style, maxWidths) maxWidth = maxWidths[0] self.height = 0 autoLeading = getattr(self, 'autoLeading', getattr(style, 'autoLeading', '')) calcBounds = autoLeading not in ('', 'off') frags = self.frags nFrags = len(frags) if nFrags == 1 and not hasattr(frags[0], 'cbDefn'): f = frags[0] fontSize = f.fontSize fontName = f.fontName ascent, descent = getAscentDescent(fontName, fontSize) words = hasattr(f, 'text') and split(f.text, ' ') or f.words spaceWidth = stringWidth(' ', fontName, fontSize, self.encoding) cLine = [] currentWidth = -spaceWidth # hack to get around extra space for word 1 for word in words: #this underscores my feeling that Unicode throughout would be easier! wordWidth = stringWidth(word, fontName, fontSize, self.encoding) newWidth = currentWidth + spaceWidth + wordWidth if newWidth <= maxWidth or not len(cLine): # fit one more on this line cLine.append(word) currentWidth = newWidth else: if currentWidth > self.width: self.width = currentWidth #end of line lines.append((maxWidth - currentWidth, cLine)) cLine = [word] currentWidth = wordWidth lineno += 1 try: maxWidth = maxWidths[lineno] except IndexError: maxWidth = maxWidths[-1] # use the last one #deal with any leftovers on the final line if cLine != []: if currentWidth > self.width: self.width = currentWidth lines.append((maxWidth - currentWidth, cLine)) return f.clone(kind=0, lines=lines, ascent=ascent, descent=descent, fontSize=fontSize) elif nFrags <= 0: return ParaLines(kind=0, fontSize=style.fontSize, fontName=style.fontName, textColor=style.textColor, ascent=style.fontSize, descent=-0.2 * style.fontSize, lines=[]) else: if hasattr(self, 'blPara') and getattr(self, '_splitpara', 0): #NB this is an utter hack that awaits the proper information #preserving splitting algorithm return self.blPara n = 0 words = [] for w in _getFragWords(frags): f = w[-1][0] fontName = f.fontName fontSize = f.fontSize spaceWidth = stringWidth(' ', fontName, fontSize) if not words: currentWidth = -spaceWidth # hack to get around extra space for word 1 maxSize = fontSize maxAscent, minDescent = getAscentDescent(fontName, fontSize) wordWidth = w[0] f = w[1][0] if wordWidth > 0: newWidth = currentWidth + spaceWidth + wordWidth else: newWidth = currentWidth #test to see if this frag is a line break. If it is we will only act on it #if the current width is non-negative or the previous thing was a deliberate lineBreak lineBreak = hasattr(f, 'lineBreak') endLine = (newWidth > maxWidth and n > 0) or lineBreak if not endLine: if lineBreak: continue #throw it away if type(w[1][1]) != six.text_type: nText = six.text_type(w[1][1], 'utf-8') else: nText = w[1][1] if nText: n += 1 fontSize = f.fontSize if calcBounds: cbDefn = getattr(f, 'cbDefn', None) if getattr(cbDefn, 'width', 0): descent, ascent = imgVRange(cbDefn.height, cbDefn.valign, fontSize) else: ascent, descent = getAscentDescent(f.fontName, fontSize) else: ascent, descent = getAscentDescent(f.fontName, fontSize) maxSize = max(maxSize, fontSize) maxAscent = max(maxAscent, ascent) minDescent = min(minDescent, descent) if not words: g = f.clone() words = [g] g.text = nText elif not _sameFrag(g, f): if currentWidth > 0 and ((nText != '' and nText[0] != ' ') or hasattr(f, 'cbDefn')): if hasattr(g, 'cbDefn'): i = len(words) - 1 while i >= 0: wi = words[i] cbDefn = getattr(wi, 'cbDefn', None) if cbDefn: if not getattr(cbDefn, 'width', 0): i -= 1 continue if not wi.text.endswith(' '): wi.text += ' ' break else: if type(g.text) == type(' '): space = " " else: space = b" " if not g.text.endswith(space): g.text += space g = f.clone() words.append(g) g.text = nText else: if type(g.text) is bytes: g.text = g.text.decode("utf8") if type(nText) is bytes: nText = nText.decode("utf8") if nText != '' and nText[0] != ' ': g.text += ' ' + nText for i in w[2:]: g = i[0].clone() g.text = i[1] words.append(g) fontSize = g.fontSize if calcBounds: cbDefn = getattr(g, 'cbDefn', None) if getattr(cbDefn, 'width', 0): descent, ascent = imgVRange(cbDefn.height, cbDefn.valign, fontSize) else: ascent, descent = getAscentDescent(g.fontName, fontSize) else: ascent, descent = getAscentDescent(g.fontName, fontSize) maxSize = max(maxSize, fontSize) maxAscent = max(maxAscent, ascent) minDescent = min(minDescent, descent) currentWidth = newWidth else: # either it won't fit, or it's a lineBreak tag if lineBreak: g = f.clone() words.append(g) if currentWidth > self.width: self.width = currentWidth #end of line lines.append(FragLine(extraSpace=maxWidth - currentWidth, wordCount=n, lineBreak=lineBreak, words=words, fontSize=maxSize, ascent=maxAscent, descent=minDescent)) #start new line lineno += 1 try: maxWidth = maxWidths[lineno] except IndexError: maxWidth = maxWidths[-1] # use the last one if lineBreak: n = 0 words = [] continue currentWidth = wordWidth n = 1 g = f.clone() maxSize = g.fontSize if calcBounds: cbDefn = getattr(g, 'cbDefn', None) if getattr(cbDefn, 'width', 0): minDescent, maxAscent = imgVRange(cbDefn.height, cbDefn.valign, maxSize) else: maxAscent, minDescent = getAscentDescent(g.fontName, maxSize) else: maxAscent, minDescent = getAscentDescent(g.fontName, maxSize) words = [g] g.text = w[1][1] for i in w[2:]: g = i[0].clone() g.text = i[1] words.append(g) fontSize = g.fontSize if calcBounds: cbDefn = getattr(g, 'cbDefn', None) if getattr(cbDefn, 'width', 0): descent, ascent = imgVRange(cbDefn.height, cbDefn.valign, fontSize) else: ascent, descent = getAscentDescent(g.fontName, fontSize) else: ascent, descent = getAscentDescent(g.fontName, fontSize) maxSize = max(maxSize, fontSize) maxAscent = max(maxAscent, ascent) minDescent = min(minDescent, descent) #deal with any leftovers on the final line if words != []: if currentWidth > self.width: self.width = currentWidth lines.append(ParaLines(extraSpace=(maxWidth - currentWidth), wordCount=n, words=words, fontSize=maxSize, ascent=maxAscent, descent=minDescent)) return ParaLines(kind=1, lines=lines) return lines
Returns a broken line structure. There are two cases A) For the simple case of a single formatting input fragment the output is A fragment specifier with - kind = 0 - fontName, fontSize, leading, textColor - lines= A list of lines Each line has two items. 1. unused width in points 2. word list B) When there is more than one input formatting fragment the output is A fragment specifier with - kind = 1 - lines= A list of fragments each having fields - extraspace (needed for justified) - fontSize - words=word list each word is itself a fragment with various settings This structure can be used to easily draw paragraphs with the various alignments. You can supply either a single width or a list of widths; the latter will have its last item repeated until necessary. A 2-element list is useful when there is a different first line indent; a longer list could be created to facilitate custom wraps around irregular objects.
def Operate(self, values): """Takes a list of values and if at least one matches, returns True.""" for val in values: try: if self.Operation(val, self.right_operand): return True except (TypeError, ValueError): pass return False
Takes a list of values and if at least one matches, returns True.
def read(self): """ Reads the whole las data (header, vlrs ,points, etc) and returns a LasData object """ vlrs = self.read_vlrs() self._warn_if_not_at_expected_pos( self.header.offset_to_point_data, "end of vlrs", "start of points" ) self.stream.seek(self.start_pos + self.header.offset_to_point_data) try: points = self._read_points(vlrs) except (RuntimeError, errors.LazPerfNotFound) as e: logger.error("LazPerf failed to decompress ({}), trying laszip.".format(e)) self.stream.seek(self.start_pos) self.__init__(io.BytesIO(laszip_decompress(self.stream))) return self.read() if points.point_format.has_waveform_packet: self.stream.seek( self.start_pos + self.header.start_of_waveform_data_packet_record ) if self.header.global_encoding.are_waveform_flag_equal(): raise errors.PylasError( "Incoherent values for internal and external waveform flags, both are {})".format( "set" if self.header.global_encoding.waveform_internal else "unset" ) ) if self.header.global_encoding.waveform_internal: # TODO: Find out what to do with these _, _ = self._read_internal_waveform_packet() elif self.header.global_encoding.waveform_external: logger.info( "Waveform data is in an external file, you'll have to load it yourself" ) if self.header.version >= "1.4": evlrs = self.read_evlrs() return las14.LasData( header=self.header, vlrs=vlrs, points=points, evlrs=evlrs ) return las12.LasData(header=self.header, vlrs=vlrs, points=points)
Reads the whole las data (header, vlrs ,points, etc) and returns a LasData object
def SaveAvatarToFile(self, Filename, AvatarId=1): """Saves user avatar to a file. :Parameters: Filename : str Destination path. AvatarId : int Avatar Id. """ s = 'USER %s AVATAR %s %s' % (self.Handle, AvatarId, path2unicode(Filename)) self._Owner._DoCommand('GET %s' % s, s)
Saves user avatar to a file. :Parameters: Filename : str Destination path. AvatarId : int Avatar Id.
def optimize(image, fmt='jpeg', quality=80): """ Optimize the image if the IMAGE_OPTIMIZATION_CMD is set. IMAGE_OPTIMIZATION_CMD must accept piped input """ from io import BytesIO if IMAGE_OPTIMIZATION_CMD and is_tool(IMAGE_OPTIMIZATION_CMD): image_buffer = BytesIO() image.save(image_buffer, format=fmt, quality=quality) image_buffer.seek(0) # If you don't reset the file pointer, the read command returns an empty string p1 = subprocess.Popen(IMAGE_OPTIMIZATION_CMD, stdin=subprocess.PIPE, stdout=subprocess.PIPE) output_optim, output_err = p1.communicate(image_buffer.read()) if not output_optim: logger.debug("No image buffer received from IMAGE_OPTIMIZATION_CMD") logger.debug("output_err: {0}".format(output_err)) return image im = Image.open(BytesIO(output_optim)) return im else: return image
Optimize the image if the IMAGE_OPTIMIZATION_CMD is set. IMAGE_OPTIMIZATION_CMD must accept piped input
async def _on_heartbeat(self, update): """Receive a new heartbeat for a service.""" name = update['service'] if name not in self.services: return with self._state_lock: self.services[name].heartbeat()
Receive a new heartbeat for a service.
def parse_csv(file_path: str, entrez_id_header, log_fold_change_header, adjusted_p_value_header, entrez_delimiter, base_mean_header=None, sep=",") -> List[Gene]: """Read a csv file on differential expression values as Gene objects. :param str file_path: The path to the differential expression file to be parsed. :param config.Params params: An object that includes paths, cutoffs and other information. :return list: A list of Gene objects. """ logger.info("In parse_csv()") df = pd.read_csv(file_path, sep=sep) return handle_dataframe( df, entrez_id_name=entrez_id_header, log2_fold_change_name=log_fold_change_header, adjusted_p_value_name=adjusted_p_value_header, entrez_delimiter=entrez_delimiter, base_mean=base_mean_header, )
Read a csv file on differential expression values as Gene objects. :param str file_path: The path to the differential expression file to be parsed. :param config.Params params: An object that includes paths, cutoffs and other information. :return list: A list of Gene objects.