code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _eval_comparison(self, ident: tuple[str, int | slice | None], op: str, value: str | int | tuple[int, ...]) -> bool: name, key = ident if name == 'sys.version_info': if key is None: key = slice(None, None, None) if isinstance(key, int) and (not isinstance(value, int)): ...
Evaluate a comparison and return a bool. Args: ident: A tuple of a dotted name string and an optional __getitem__ key. op: One of the comparison operator strings in cmp_slots.COMPARES. value: The value to be compared against. Returns: The boolean result of the comparison. Raises: ParseError: If the comparison cannot...
github-repos
def _get_two_lines(f): l0 = f.readline() l1 = f.readline() return l0, l1
Get the first and second lines Args: f (filelike): File that is opened for ascii. Returns: bytes
juraj-google-style
def concurrent_exec(func, param_list): with concurrent.futures.ThreadPoolExecutor(max_workers=30) as executor: future_to_params = {executor.submit(func, *p): p for p in param_list} return_vals = [] for future in concurrent.futures.as_completed(future_to_params): params = future_t...
Executes a function with different parameters pseudo-concurrently. This is basically a map function. Each element (should be an iterable) in the param_list is unpacked and passed into the function. Due to Python's GIL, there's no true concurrency. This is suited for IO-bound tasks. Args: func: The function that parfo...
codesearchnet
def set_channel_created(self, channel_link, channel_id): self.channel_link = channel_link self.channel_id = channel_id self.__record_progress(Status.PUBLISH_CHANNEL if config.PUBLISH else Status.DONE)
set_channel_created: records progress after creating channel on Kolibri Studio Args: channel_link (str): link to uploaded channel channel_id (str): id of channel that has been uploaded Returns: None
juraj-google-style
def validate(cls, mapper_spec): writer_spec = cls.get_params(mapper_spec, allow_old=False) if cls.BUCKET_NAME_PARAM not in writer_spec: raise errors.BadWriterParamsError( "%s is required for Google Cloud Storage" % cls.BUCKET_NAME_PARAM) try: cloudstorage.validate_...
Validate mapper specification. Args: mapper_spec: an instance of model.MapperSpec. Raises: BadWriterParamsError: if the specification is invalid for any reason such as missing the bucket name or providing an invalid bucket name.
juraj-google-style
def get_cqz(self, callsign, timestamp=timestamp_now): return self.get_all(callsign, timestamp)[const.CQZ]
Returns CQ Zone of a callsign Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: int: containing the callsign's CQ Zone Raises: KeyError: no CQ Zone found for callsign
codesearchnet
def get_best_blockhash(self, id=None, endpoint=None): return self._call_endpoint(GET_BEST_BLOCK_HASH, id=id, endpoint=endpoint)
Get the hash of the highest block Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
juraj-google-style
def targets(self, module): if (module not in self.module_targets): raise BuildError('Could not find module in targets()', module=module) return [self.find(x, module) for x in self.module_targets[module]]
Find the targets for a given module. Returns: list: A sequence of all of the targets for the specified module.
codesearchnet
def debug(text): frame = inspect.currentframe().f_back module = frame.f_globals['__name__'] func = frame.f_code.co_name msg = ('%s.%s: %s' % (module, func, text)) _LOGGER.debug(msg)
Log a message to syslog and stderr Args: text (str): The string object to print
codesearchnet
def reminders_info(self, *, reminder: str, **kwargs) -> SlackResponse: self._validate_xoxp_token() kwargs.update({'reminder': reminder}) return self.api_call('reminders.info', http_verb='GET', params=kwargs)
Gets information about a reminder. Args: reminder (str): The ID of the reminder. e.g. 'Rm12345678'
codesearchnet
def ParseContactRow(self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) event_data = TwitterAndroidContactEventData() event_data.query = query event_data.identifier = self._GetRowValue(query_hash, row, '_id') event_data.user_identifier = self._GetRowValue(query_hash, ...
Parses a status row from the database. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row resulting from query.
juraj-google-style
def load_text(self, text, tokenizer=None): if tokenizer: words = [x.lower() for x in tokenizer(text)] else: words = self.tokenize(text) self._dictionary.update(words) self._update_dictionary()
Load text from which to generate a word frequency list Args: text (str): The text to be loaded tokenizer (function): The function to use to tokenize a string
codesearchnet
def configure_vrf(self, vrf_name, commands): commands = make_iterable(commands) commands.insert(0, ('vrf definition %s' % vrf_name)) return self.configure(commands)
Configures the specified VRF using commands Args: vrf_name (str): The VRF name to configure commands: The list of commands to configure Returns: True if the commands completed successfully
codesearchnet
def deserialize(proto): _, type_registrations = _REVIVED_TYPE_REGISTRY.get(proto.identifier, (None, None)) if type_registrations is not None: for type_registration in type_registrations: if type_registration.should_load(proto): return (type_registration.from_proto(proto), typ...
Create a trackable object from a SavedUserObject proto. Args: proto: A SavedUserObject to deserialize. Returns: A tuple of (trackable, assignment_fn) where assignment_fn has the same signature as setattr and should be used to add dependencies to `trackable` when they are available.
github-repos
def do_post(endpoint, body, access_token): headers = {"content-type": "application/json", "Authorization": 'Bearer ' + access_token} headers['User-Agent'] = get_user_agent() return requests.post(endpoint, data=body, headers=headers)
Do an HTTP POST request and return JSON. Args: endpoint (str): Azure Resource Manager management endpoint. body (str): JSON body of information to post. access_token (str): A valid Azure authentication token. Returns: HTTP response. JSON body.
juraj-google-style
def _find_human_readable_labels(synsets, synset_to_human): humans = [] for s in synsets: assert (s in synset_to_human), ('Failed to find: %s' % s) humans.append(synset_to_human[s]) return humans
Build a list of human-readable labels. Args: synsets: list of strings; each string is a unique WordNet ID. synset_to_human: dict of synset to human labels, e.g., 'n02119022' --> 'red fox, Vulpes vulpes' Returns: List of human-readable strings corresponding to each synset.
codesearchnet
def finish(self, exitcode): self._queue.put(self.Finish(exitcode)) self._thread.join()
Cleans up. Anything pushed after finish will be dropped. Args: exitcode: The exitcode of the watched process.
codesearchnet
def set_current(self, current): self.current = current self.input = current.input self.output = current.output self.cmd = current.task_data['cmd'] if self.cmd and NEXT_CMD_SPLITTER in self.cmd: self.cmd, self.next_cmd = self.cmd.split(NEXT_...
Creates some aliases for attributes of ``current``. Args: current: :attr:`~zengine.engine.WFCurrent` object.
juraj-google-style
def to_cmd_args(mapping): sorted_keys = sorted(mapping.keys()) def arg_name(obj): string = _decode(obj) if string: return u'--%s' % string if len(string) > 1 else u'-%s' % string else: return u'' arg_names = [arg_name(argument) for argument in sorted...
Transform a dictionary in a list of cmd arguments. Example: >>>args = mapping.to_cmd_args({'model_dir': '/opt/ml/model', 'batch_size': 25}) >>> >>>print(args) ['--model_dir', '/opt/ml/model', '--batch_size', 25] Args: mapping (dict[str, object]): A Python mapping. Returns: (list): List of cmd arguments
juraj-google-style
class TimmWrapperImageProcessor(BaseImageProcessor): main_input_name = 'pixel_values' def __init__(self, pretrained_cfg: Dict[str, Any], architecture: Optional[str]=None, **kwargs): requires_backends(self, 'timm') super().__init__(architecture=architecture) self.data_config = timm.data....
Wrapper class for timm models to be used within transformers. Args: pretrained_cfg (`Dict[str, Any]`): The configuration of the pretrained model used to resolve evaluation and training transforms. architecture (`Optional[str]`, *optional*): Name of the architecture of the model.
github-repos
def from_lengths_and_angles(abc: List[float], ang: List[float]): return Lattice.from_parameters(abc[0], abc[1], abc[2], ang[0], ang[1], ang[2])
Create a Lattice using unit cell lengths and angles (in degrees). Args: abc (3x1 array): Lattice parameters, e.g. (4, 4, 5). ang (3x1 array): Lattice angles in degrees, e.g., (90,90,120). Returns: A Lattice with the specified lattice parameters.
juraj-google-style
def find_sanitiser_nodes(sanitiser, sanitisers_in_file): for sanitiser_tuple in sanitisers_in_file: if (sanitiser == sanitiser_tuple.trigger_word): (yield sanitiser_tuple.cfg_node)
Find nodes containing a particular sanitiser. Args: sanitiser(string): sanitiser to look for. sanitisers_in_file(list[Node]): list of CFG nodes with the sanitiser. Returns: Iterable of sanitiser nodes.
codesearchnet
def load(self, data): resp = self.client.api.load_image(data) images = [] for chunk in resp: if 'stream' in chunk: match = re.search( r'(^Loaded image ID: |^Loaded image: )(.+)$', chunk['stream'] ) ...
Load an image that was previously saved using :py:meth:`~docker.models.images.Image.save` (or ``docker save``). Similar to ``docker load``. Args: data (binary): Image data to be loaded. Returns: (list of :py:class:`Image`): The images. Raises: :py:class:`docker.errors.APIError` If the server returns an error.
juraj-google-style
def _parse(self, stream): builddata = json.load(stream) log.debug('This is a JSON build file.') if 'targets' not in builddata: log.warn('Warning: No targets defined here.') return for tdata in builddata['targets']: target = addr...
Parse a JSON BUILD file. Args: builddata: dictionary of buildfile data reponame: name of the repo that it came from path: directory path within the repo
juraj-google-style
def _preprocess_conv3d_input(x, data_format): tf_data_format = 'NDHWC' if data_format == 'channels_first': if not _has_nchw_support(): x = array_ops.transpose(x, (0, 2, 3, 4, 1)) else: tf_data_format = 'NCDHW' return (x, tf_data_format)
Transpose and cast the input before the conv3d. Args: x: input tensor. data_format: string, `"channels_last"` or `"channels_first"`. Returns: A tensor.
github-repos
def get(self): raise NotImplementedError()
Get the current tracking value. Returns: The current tracked value, the type of which depends on the specific tracker implementation.
github-repos
def run(self): qclog = open(self.qclog_file, 'w') p = subprocess.Popen(self.current_command, stdout=qclog) return p
Perform the actual QChem run. Returns: (subprocess.Popen) Used for monitoring.
codesearchnet
def num_nodes(self, leaves=True, internal=True): if not isinstance(leaves, bool): raise TypeError("leaves must be a bool") if not isinstance(internal, bool): raise TypeError("internal must be a bool") num = 0 for node in self.traverse_preorder(): ...
Compute the total number of selected nodes in this ``Tree`` Args: ``leaves`` (``bool``): ``True`` to include leaves, otherwise ``False`` ``internal`` (``bool``): ``True`` to include internal nodes, otherwise ``False`` Returns: ``int``: The total number of selected nodes in this ``Tree``
juraj-google-style
def plot_dendrogram(ax, obj, show_diameters=True): dnd = Dendrogram(obj, show_diameters=show_diameters) dnd.generate() _render_dendrogram(dnd, ax, 0.0) ax.set_title('Morphology Dendrogram') ax.set_xlabel('micrometers (um)') ax.set_ylabel('micrometers (um)') ax.set_aspect('auto') ax.legen...
Dendrogram of `obj` Args: obj: Neuron or tree \ neurom.Neuron, neurom.Tree show_diameters : boolean \ Determines if node diameters will \ be show or not.
codesearchnet
def handle_error(self, error, download_request): if hasattr(error, "errno") and error.errno == errno.EACCES: self.handle_certificate_problem(str(error)) else: self.handle_general_download_error(str(error), download_request)
Checks what error occured and looks for an appropriate solution. Args: error: Exception The error that has occured. download_request: The request which resulted in the error.
juraj-google-style
def source(self, value=None): if value is not None: try: value = str(value) except ValueError: raise ValueError('value {} need to be of type str ' 'for field `source`'.format(value)) if ',' in value: ...
Corresponds to IDD Field `source` Args: value (str): value for IDD Field `source` if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
juraj-google-style
def GetPasswdMap(self, since=None): return PasswdUpdateGetter().GetUpdates(self._GetClient(), self.conf['bucket'], self.conf['passwd_object'], since)
Return the passwd map from this source. Args: since: Get data only changed since this timestamp (inclusive) or None for all data. Returns: instance of passwd.PasswdMap
github-repos
def sample_mgrid(self, mgrid: np.array) -> np.array: mgrid = np.ascontiguousarray(mgrid, np.float32) if (mgrid.shape[0] != self.dimensions): raise ValueError(('mgrid.shape[0] must equal self.dimensions, %r[0] != %r' % (mgrid.shape, self.dimensions))) out = np.ndarray(mgrid.shape[1:], np.float32) ...
Sample a mesh-grid array and return the result. The :any:`sample_ogrid` method performs better as there is a lot of overhead when working with large mesh-grids. Args: mgrid (numpy.ndarray): A mesh-grid array of points to sample. A contiguous array of type `numpy.float32` is preferred. Returns: numpy.ndarray: An arra...
codesearchnet
def angle_3points(p0, p1, p2): vec1 = vector(p1, p0) vec2 = vector(p2, p0) return math.atan2(np.linalg.norm(np.cross(vec1, vec2)), np.dot(vec1, vec2))
compute the angle in radians between three 3D points Calculated as the angle between p1-p0 and p2-p0. Args: p0, p1, p2: indexable objects with indices 0, 1, 2 corresponding to 3D cartesian coordinates. Returns: Angle in radians between (p1-p0) and (p2-p0). 0.0 if p0==p1 or p0==p2.
codesearchnet
def print_tree_deps_of(module, all_edges=None): if all_edges is None: all_edges = create_reverse_dependency_tree() tree = get_tree_starting_at(module, all_edges) lines = [(tree[0], tree[0])] for index in range(1, len(tree)): edges = tree[index] start_edges = {edge[0] for edge in ...
Prints the tree of modules depending on a given module. Args: module (`str`): The module that will be the root of the subtree we want. all_eges (`List[Tuple[str, str]]`, *optional*): The list of all edges of the tree. Will be set to `create_reverse_dependency_tree()` if not passed.
github-repos
def trade_day(dt, cal='US'): from xone import calendar dt = pd.Timestamp(dt).date() return calendar.trading_dates(start=dt - pd.Timedelta('10D'), end=dt, calendar=cal)[-1]
Latest trading day w.r.t given dt Args: dt: date of reference cal: trading calendar Returns: pd.Timestamp: last trading day Examples: >>> trade_day('2018-12-25').strftime('%Y-%m-%d') '2018-12-24'
juraj-google-style
def set_source_interface(self, name): cmd = self.command_builder('ntp source', value=name) return self.configure(cmd)
Assign the NTP source on the node Args: name (string): The interface port that specifies the NTP source. Returns: True if the operation succeeds, otherwise False.
codesearchnet
def _inter_manager_operations(self, other, how_to_join, func): reindexed_self, reindexed_other_list, joined_index = self.copartition( 0, other, how_to_join, False ) reindexed_other = reindexed_other_list[0] new_columns = self._join_index_objects( ...
Inter-data operations (e.g. add, sub). Args: other: The other Manager for the operation. how_to_join: The type of join to join to make (e.g. right, outer). Returns: New DataManager with new data and index.
juraj-google-style
def GetArtifactCollectorArgs(flow_args, knowledge_base): args = rdf_artifacts.ClientArtifactCollectorArgs() args.knowledge_base = knowledge_base args.apply_parsers = flow_args.apply_parsers args.ignore_interpolation_errors = flow_args.ignore_interpolation_errors args.max_file_size = flow_args.max_file_siz...
Prepare bundle of artifacts and their dependencies for the client. Args: flow_args: An `ArtifactCollectorFlowArgs` instance. knowledge_base: contains information about the client Returns: rdf value object containing a list of extended artifacts and the knowledge base
juraj-google-style
def combine_columns(columns): columns_zipped = itertools.zip_longest(*columns) return ''.join((x for zipped in columns_zipped for x in zipped if x))
Combine ``columns`` into a single string. Example: >>> combine_columns(['eape', 'xml']) 'example' Args: columns (iterable): ordered columns to combine Returns: String of combined columns
codesearchnet
def at(self, instant): for event in self: if event.begin <= instant <= event.end: yield event
Iterates (in chronological order) over all events that are occuring during `instant`. Args: instant (Arrow object)
juraj-google-style
def list_experiments(self, collection_name): exp = ExperimentResource( name='', collection_name=collection_name, coord_frame='foo') return self._list_resource(exp)
List all experiments that belong to a collection. Args: collection_name (string): Name of the parent collection. Returns: (list) Raises: requests.HTTPError on failure.
juraj-google-style
def var(x, axis=None, keepdims=False): if x.dtype.base_dtype == dtypes_module.bool: x = math_ops.cast(x, floatx()) return math_ops.reduce_variance(x, axis=axis, keepdims=keepdims)
Variance of a tensor, alongside the specified axis. Args: x: A tensor or variable. axis: An integer, the axis to compute the variance. keepdims: A boolean, whether to keep the dimensions or not. If `keepdims` is `False`, the rank of the tensor is reduced by 1. If `keepdims` is `True`, the reduced dimension is retained...
github-repos
def make_lda_variational(activation, num_topics, layer_sizes): encoder_net = tf.keras.Sequential() for num_hidden_units in layer_sizes: encoder_net.add(tf.keras.layers.Dense(num_hidden_units, activation=activation, kernel_initializer=tf.compat.v1.glorot_normal_initializer())) encoder_net.add(tf.kera...
Creates the variational distribution for LDA. Args: activation: Activation function to use. num_topics: The number of topics. layer_sizes: The number of hidden units per layer in the encoder. Returns: lda_variational: A function that takes a bag-of-words Tensor as input and returns a distribution over topics.
codesearchnet
def get_nested_dmaps(dmap): if (not isinstance(dmap, DynamicMap)): return [] dmaps = [dmap] for o in dmap.callback.inputs: dmaps.extend(get_nested_dmaps(o)) return list(set(dmaps))
Recurses DynamicMap to find DynamicMaps inputs Args: dmap: DynamicMap to recurse to look for DynamicMap inputs Returns: List of DynamicMap instances that were found
codesearchnet
def custom_line_color_map(self, values): if not isinstance(values, list): raise TypeError("custom_line_color_map must be a list") self.options["custom_line_color_map"] = values
Set the custom line color map. Args: values (list): list of colors. Raises: TypeError: Custom line color map must be a list.
juraj-google-style
def _GetNameFromProduct(self): product = (self.product or '') product = product.split(' ') product_lower_case = [segment.lower() for segment in product] number_of_segments = len(product) if ('windows' in product_lower_case): segment_index = (product_lower_case.index('windows') + 1) i...
Determines the predefined operating system name from the product. Returns: str: operating system name, such as "macOS Mojave" or "Windows XP" or None if the name cannot be determined. This value is used to programmatically link a parser preset to an operating system and therefore must be one of predefined values.
codesearchnet
def _apply_gradients_and_copy(self, opt, raw_grad_list, ps_var_grads): with tf.name_scope('apply_gradients'): var_update_ops = [] for vid, (g, v) in enumerate(ps_var_grads): apply_gradient_op = opt.apply_gradients([(g, v)]) ...
Apply averaged gradients to ps vars, and then copy the updated variables back to each tower. Args: raw_grad_list: Ngpu x Nvar x 2 gradient list from all towers ps_var_grads: Nvar x 2 (grad, ps_var) Returns: list of copy ops
juraj-google-style
def run(self, fetches, feed_dict=None, options=None, run_metadata=None): return self._sess.run(fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata)
Run ops in the monitored session. This method is completely compatible with the `tf.Session.run()` method. Args: fetches: Same as `tf.Session.run()`. feed_dict: Same as `tf.Session.run()`. options: Same as `tf.Session.run()`. run_metadata: Same as `tf.Session.run()`. Returns: Same as `tf.Session.run()`.
github-repos
def get_incomplete_penetrance_genes(hpo_lines): genes = parse_hpo_genes(hpo_lines) incomplete_penetrance_genes = set() for hgnc_symbol in genes: if genes[hgnc_symbol].get('incomplete_penetrance'): incomplete_penetrance_genes.add(hgnc_symbol) return incomplete_penetrance_genes
Get a set with all genes that have incomplete penetrance according to HPO Args: hpo_lines(iterable(str)) Returns: incomplete_penetrance_genes(set): A set with the hgnc symbols of all genes with incomplete penetrance
juraj-google-style
def add_menu_item(self, command, title): m_item = Gtk.MenuItem() m_item.set_label(title) m_item.connect('activate', command) self.menu.append(m_item) self.menu.show_all()
Add mouse right click menu item. Args: command (callable): function that will be called after left mouse click on title title (str): label that will be shown in menu
juraj-google-style
def _find_paths_referenced(self) -> Tuple[Optional[str], Collection[str]]:
Finds paths for any elements referenced in this expression. Recursively builds paths by visiting each node in the tree. Returns a tuple of (context, paths) where `context` is an identifier which may be part of a dotted path completed by its parent and `paths` are the dotted paths found so far. Implementations must re...
github-repos
def open(self, path, mime_type='application/octet-stream', compression_type=CompressionTypes.AUTO) -> BinaryIO: return self._path_open(path, 'rb', mime_type, compression_type)
Returns a read channel for the given file path. Args: path: string path of the file object to be written to the system mime_type: MIME type to specify the type of content in the file object compression_type: Type of compression to be used for this object Returns: file handle with a close function for the user to use
github-repos
def process_input_data(filename, imager, grid_data, grid_norm, grid_weights): ms = oskar.MeasurementSet.open(filename) block_start = 0 num_rows = ms.num_rows num_baselines = ((ms.num_stations * (ms.num_stations - 1)) while (block_start < num_rows): block_size = (num_rows - block_start) ...
Reads visibility data from a Measurement Set. The visibility grid or weights grid is updated accordingly. Visibility data are read from disk in blocks of size num_baselines. Args: filename (str): Name of Measurement Set to open. imager (oskar.Imager): Handle to configured imager. grid_...
codesearchnet
def get_image_features(self, pixel_values: torch.FloatTensor, image_sizes: torch.Tensor, vision_feature_layer: Optional[Union[int, List[int]]]=None, **kwargs): vision_feature_layer = vision_feature_layer if vision_feature_layer is not None else self.config.vision_feature_layer kwargs = {k: v for k, v in kwargs....
Obtains image last hidden states from the vision tower and apply multimodal projection. Args: pixel_values (`torch.FloatTensor]` of shape `(batch_size, channels, height, width)`): The tensors corresponding to the input images. vision_feature_layer (`Union[int, List[int]]`, *optional*): The index of the layer to select...
github-repos
def dump_tree(self, statement=None, indent_level=0): out = u"" indent = u" "*indent_level if statement is None: for root_statement in self.statements: out += self.dump_tree(root_statement, indent_level) else: out += indent + str(stateme...
Dump the AST for this parsed file. Args: statement (SensorGraphStatement): the statement to print if this function is called recursively. indent_level (int): The number of spaces to indent this statement. Used for recursively printing blocks of statements. Returns: str: The AST for this parsed sg file as a nested tre...
juraj-google-style
def screenrecord(self, bit_rate: int = 5000000, time_limit: int = 180, filename: _PATH = '/sdcard/demo.mp4') -> None: self._execute('-s', self.device_sn, 'shell', 'screenrecord', '--bit-rate', str(bit_rate), '--time-limit', str(time_limit), filename)
Recording the display of devices running Android 4.4 (API level 19) and higher. Args: bit_rate:You can increase the bit rate to improve video quality, but doing so results in larger movie files. time_limit: Sets the maximum recording time, in seconds, and the maximum value is 180 (3 minutes).
juraj-google-style
def add(reader, writer, column, start, stop, value): for (i, row) in enumerate(reader): if ((i >= start) and (i <= stop)): row[column] = (type(value)(row[column]) + value) writer.appendRecord(row)
Adds a value over a range of rows. Args: reader: A FileRecordStream object with input data. writer: A FileRecordStream object to write output data to. column: The column of data to modify. start: The first row in the range to modify. end: The last row in the range to modify. value: The value to add.
codesearchnet
def _append_commands(dct, module_name, commands): for command in commands: entry_point = '{command}{subcommand} = {module}{callable}'.format(command=command.command, subcommand=(':{}'.format(command.subcommand) if command.subcommand else ''), module=module_name, callable=(':{}'.format(command.callable) if c...
Append entry point strings representing the given Command objects. Args: dct: The dictionary to append with entry point strings. Each key will be a primary command with a value containing a list of entry point strings representing a Command. module_name: The name of the module in which the command object resides. comm...
codesearchnet
def get_last(self, num=10): max_item = self.get_max_item() urls = [urljoin(self.item_url, f'{i}.json') for i in range(((max_item - num) + 1), (max_item + 1))] result = self._run_async(urls=urls) return [Item(r) for r in result if r]
Returns last `num` of HN stories Downloads all the HN articles and returns them as Item objects Returns: `list` object containing ids of HN stories.
codesearchnet
def read_raster(raster_file): ds = gdal_Open(raster_file) band = ds.GetRasterBand(1) data = band.ReadAsArray() xsize = band.XSize ysize = band.YSize nodata_value = band.GetNoDataValue() geotrans = ds.GetGeoTransform() dttype = band.DataType srs = osr_SpatialReference() srs.Import...
Read raster by GDAL. Args: raster_file: raster file path. Returns: Raster object.
codesearchnet
def list_mapped_classes(): cls_dict = {key: value for key, value in MODULE.rdfclass.__dict__.items() if not isinstance(value, RdfConfigManager) and key not in ['properties'] and hasattr(value, 'es_defs') ...
Returns all the rdfclasses that have and associated elasticsearch mapping Args: None
juraj-google-style
def parse_topology(ml_log, log=None, ml_version='1.3.4BETA', print_output=False): topology = {'manifold': True, 'non_manifold_E': 0, 'non_manifold_V': 0} with open(ml_log) as fread: for line in fread: if ('V:' in line): vert_edge_face = line.replace('V:', ' ').replace('E:', '...
Parse the ml_log file generated by the measure_topology function. Args: ml_log (str): MeshLab log file to parse log (str): filename to log output Returns: dict: dictionary with the following keys: vert_num (int): number of vertices edge_num (int): number of edges face_num (int): number of faces unref_vert_num (int): ...
codesearchnet
def WriteMap(self, map_data=None, force_write=False): if map_data is None: writable_map = self.data else: writable_map = map_data entries_written = self.Write(writable_map) if entries_written is None: self.log.warning('cache write failed, exiting') return 1 if force_w...
Write a map to disk. Args: map_data: optional Map object to overwrite our current data with. force_write: optional flag to indicate verification checks can be ignored. Returns: 0 if succesful, 1 if not
github-repos
def seek(self, offset, whence=os.SEEK_SET): self._checkClosed() if whence == os.SEEK_SET: self._position = offset elif whence == os.SEEK_CUR: self._position += offset elif whence == os.SEEK_END: self._position = self._downloader.size + offset else: raise ValueError('W...
Set the stream's current offset. Note if the new offset is out of bound, it is adjusted to either 0 or EOF. Args: offset: seek offset as number. whence: seek mode. Supported modes are os.SEEK_SET (absolute seek), os.SEEK_CUR (seek relative to the current position), and os.SEEK_END (seek relative to the end, offset sh...
github-repos
def api_client(connection, client_class=xbahn.api.Client): return client_class( link=xbahn.connection.link.Link( receive=connection, send=connection ) )
Establishes an API client for one-way communication connection with an API Server Arguments: - connection (xbahn.connection.Connection) Keyword Arguments: - client_class (xbahn.api.Client): if supplied use this class to initantiate the client object. If omitted will use xbahn.api.Client. Returns: - client_class: cli...
juraj-google-style
def get_max_muO2(self, min_voltage=None, max_voltage=None): data = [] for pair in self._select_in_voltage_range(min_voltage, max_voltage): if pair.muO2_discharge is not None: data.extend([d['chempot'] for d in pair.muO2_discharge]) if pair.muO2_charge is ...
Maximum critical oxygen chemical potential along path. Args: min_voltage: The minimum allowable voltage. max_voltage: The maximum allowable voltage. Returns: Maximum critical oxygen chemical of all compounds along the insertion path (a subset of the path can be chosen by the optional arguments).
juraj-google-style
def from_attrs(cls, desired_attrs=None, except_attrs=None, critical_attrs=None): if isinstance(desired_attrs, roids.OID): desired_attrs = set([desired_attrs]) if isinstance(except_attrs, roids.OID): except_attrs = set([except_attrs]) if isinsta...
Get a generator of mechanisms supporting the specified attributes. See RFC 5587's :func:`indicate_mechs_by_attrs` for more information. Args: desired_attrs ([OID]): Desired attributes except_attrs ([OID]): Except attributes critical_attrs ([OID]): Critical attributes Returns: [Mechanism]: A set of mechanisms having t...
juraj-google-style
def _VerifyValues(self, pool_func, input_sizes, ksize, strides, padding, expected, use_gpu, v2=False, one_dim=False, use_negative_input=False): for data_format, use_gpu_2 in GetTestConfigs(include_nchw_vect_c=True, one_dimensional=one_dim): if use_gpu_2 == use_gpu: self._VerifyOneTest(pool_func,...
Verifies the output values of the pooling function. Args: pool_func: Function to be called, co.MaxPool, co.AvgPool, or the Lua version. input_sizes: Input tensor dimensions. ksize: The kernel size dimensions strides: The stride dimensions padding: Padding type. expected: An array containing the expected operation outp...
github-repos
def barycentric_coords(coords, simplex): coords = np.atleast_2d(coords) t = (np.transpose(simplex[(:(- 1), :)]) - np.transpose(simplex[((- 1), :)])[(:, None)]) all_but_one = np.transpose(np.linalg.solve(t, np.transpose((coords - simplex[(- 1)])))) last_coord = (1 - np.sum(all_but_one, axis=(- 1))[(:, No...
Converts a list of coordinates to barycentric coordinates, given a simplex with d+1 points. Only works for d >= 2. Args: coords: list of n coords to transform, shape should be (n,d) simplex: list of coordinates that form the simplex, shape should be (d+1, d) Returns: a LIST of barycentric coordinates (even if the ori...
codesearchnet
def compute_verdict(self, results): if (results['class'] in self.reject_classes): threshold = self.reject_classes[results['class']] if (float(results['confidence']) >= threshold): logger.debug('<{0}> Suggesting to reject the message based on DSPAM results: user={1[user]}, class={1[class]...
Match results to the configured reject, quarantine and accept classes, and return a verdict based on that. The verdict classes are matched in the order: reject_classes, quarantine_classes, accept_classes. This means that you can configure different verdicts for different confidence results, for instance: reject_classe...
codesearchnet
def get_dns_zone_ids(env='dev', facing='internal'): client = boto3.Session(profile_name=env).client('route53') zones = client.list_hosted_zones_by_name(DNSName='.'.join([env, DOMAIN])) zone_ids = [] for zone in zones['HostedZones']: LOG.debug('Found Hosted Zone: %s', zone) if fac...
Get Route 53 Hosted Zone IDs for _env_. Args: env (str): Deployment environment. facing (str): Type of ELB, external or internal. Returns: list: Hosted Zone IDs for _env_. Only *PrivateZone* when _facing_ is internal.
juraj-google-style
def aggregate_single_gradient_using_copy(grad_and_vars, use_mean, check_inf_nan): grads = [g for g, _ in grad_and_vars] grad = math_ops.add_n(grads) if use_mean and len(grads) > 1: grad = array_ops.multiply(grad, 1.0 / len(grads)) v = grad_and_vars[0][1] if check_inf_nan: has_nan_or_...
Calculate the average gradient for a shared variable across all replicas. Note that this function provides a synchronization point across all replicas. Args: grad_and_vars: A list or tuple of (gradient, variable) tuples. Each (gradient, variable) pair within the outer list represents the gradient of the variable calc...
github-repos
def get_asset_tensors(export_dir, meta_graph_def_to_load, import_scope=None): collection_def = meta_graph_def_to_load.collection_def asset_tensor_dict = {} asset_protos = [] if meta_graph_def_to_load.asset_file_def: asset_protos = meta_graph_def_to_load.asset_file_def elif constants.ASSETS_K...
Gets the asset tensors, if defined in the meta graph def to load. Args: export_dir: Directory where the SavedModel is located. meta_graph_def_to_load: The meta graph def from the SavedModel to be loaded. import_scope: Optional `string` -- if specified, prepend this followed by '/' to all returned asset tensor names. ...
github-repos
def get_dataframe(self, force_computation=False): if self.df is not None and not force_computation: return self.df self.df = self.fetch(self.context) self.df = self.preprocess(self.df) self.transform(self.df) return self.df
Preprocesses then transforms the return of fetch(). Args: force_computation (bool, optional) : Defaults to False. If set to True, forces the computation of DataFrame at each call. Returns: pandas.DataFrame: Preprocessed and transformed DataFrame.
juraj-google-style
def _get_block_publisher(self, state_hash): state_view = self._state_view_factory.create_view(state_hash) try: class BatchPublisher: def send(self, transactions): ...
Returns the block publisher based on the consensus module set by the "sawtooth_settings" transaction family. Args: state_hash (str): The current state root hash for reading settings. Raises: InvalidGenesisStateError: if any errors occur getting the BlockPublisher.
juraj-google-style
def sendfrom(self, user_id, dest_address, amount, minconf=1): amount = Decimal(amount).quantize(self.quantum, rounding=ROUND_HALF_EVEN) txhash = self.rpc.call('sendfrom', user_id, dest_address, float(str(amount)), minconf) self.logger.debug(('Send %s %s from %s to %s' % (str(amount), self.coin, str(user_id)...
Send coins from user's account. Args: user_id (str): this user's unique identifier dest_address (str): address which is to receive coins amount (str or Decimal): amount to send (eight decimal points) minconf (int): ensure the account has a valid balance using this many confirmations (default=1) Returns: str: transact...
codesearchnet
def merge(self: 'FetchResponse', other: 'FetchResponse') \ -> 'FetchResponse': if self.seq != other.seq: raise ValueError(other) new_data = OrderedDict(self.data) new_data.update(other.data) return FetchResponse(self.seq, list(new_data.items()))
Merge the other FETCH response, adding any fetch attributes that do not already exist in this FETCH response. For example:: * 3 FETCH (UID 119) * 3 FETCH (FLAGS (\\Seen)) Would merge into:: * 3 FETCH (UID 119 FLAGS (\\Seen)) Args: other: The other response to merge.
juraj-google-style
def is_ipython_subprocess() -> bool: return False
Check if we are in a sub-process launched from within a `ipython` terminal. Returns: `True` only if we are in ipython terminal (e.g. `ml_python`) and inside a sub-process.
github-repos
def parse_input(self): if self._text: lines = iter(self._text.splitlines()) elif self._file: lines = self._file else: lines = () sample_lines = [] for line in lines: if (len(sample_lines) > 100): break sample_lines.append(line) lines = itertool...
Parse the listings. Returns: iter: A iterable of :class:`.ftp.ls.listing.FileEntry`
codesearchnet
def ScanForStorageMediaImage(self, source_path_spec): try: type_indicators = analyzer.Analyzer.GetStorageMediaImageTypeIndicators( source_path_spec, resolver_context=self._resolver_context) except RuntimeError as exception: raise errors.BackEndError(( 'Unable to process sour...
Scans the path specification for a supported storage media image format. Args: source_path_spec (PathSpec): source path specification. Returns: PathSpec: storage media image path specification or None if no supported storage media image type was found. Raises: BackEndError: if the source cannot be scanned or more th...
juraj-google-style
def new_netting_channel( self, partner: Address, settle_timeout: int, given_block_identifier: BlockSpecification, ) -> ChannelID: checking_block = self.client.get_checking_block() self._new_channel_preconditions( partner=partner, ...
Creates a new channel in the TokenNetwork contract. Args: partner: The peer to open the channel with. settle_timeout: The settle timeout to use for this channel. given_block_identifier: The block identifier of the state change that prompted this proxy action Returns: The ChannelID of the new netting channel.
juraj-google-style
def get_results_as_numpy_array(self, parameter_space, result_parsing_function, runs): return np.array(self.get_space(self.db.get_complete_results(), {}, parameter_space, runs, result_parsing_function))
Return the results relative to the desired parameter space in the form of a numpy array. Args: parameter_space (dict): dictionary containing parameter/list-of-values pairs. result_parsing_function (function): user-defined function, taking a result dictionary as argument, that can be used to parse the result files and ...
codesearchnet
def default_batch_size(self) -> int: return OnnxConfig.default_fixed_batch
The default batch size to use if no other indication Returns: Integer > 0
github-repos
def append(self, annotation): self._annotations[annotation.id] = annotation self._dirty = True return annotation
Add an annotation. Args: annotation (gkeepapi.node.Annotation): An Annotation object. Returns: gkeepapi.node.Annotation: The Annotation.
juraj-google-style
def pack_small_tensors(tower_grads, max_bytes=0): assert max_bytes >= 0 orig_grads = [g for g, _ in tower_grads[0]] assert all(g.dtype == tf.float32 for g in orig_grads) sizes = [4 * g.shape.num_elements() for g in orig_grads] print_stats(sizes) small_ranges = [] large_indices = []...
Concatenate gradients together more intelligently. Does binpacking Args: tower_grads: List of lists of (gradient, variable) tuples. max_bytes: Int giving max number of bytes in a tensor that may be considered small.
juraj-google-style
def init_algebra(*, default_hs_cls='LocalSpace'): from qnet.algebra.core.hilbert_space_algebra import LocalSpace from qnet.algebra.core.abstract_quantum_algebra import QuantumExpression default_hs_cls = getattr(importlib.import_module('qnet'), default_hs_cls) if issubclass(default_hs_cls, LocalSpace): ...
Initialize the algebra system Args: default_hs_cls (str): The name of the :class:`.LocalSpace` subclass that should be used when implicitly creating Hilbert spaces, e.g. in :class:`.OperatorSymbol`
codesearchnet
def add_documents(self, docs): for sent in docs: sent = map(self.process_token, sent) self._token_count.update(sent)
Update dictionary from a collection of documents. Each document is a list of tokens. Args: docs (list): documents to add.
codesearchnet
def parse_data_types_from_doc_ref(api, doc, namespace_context, ignore_missing_entries=False): output = [] (data_types, routes_by_ns) = parse_data_types_and_routes_from_doc_ref(api, doc, namespace_context, ignore_missing_entries=ignore_missing_entries) for d in data_types: output.append(d) for (n...
Given a documentation string, parse it and return all references to other data types. If there are references to routes, include also the data types of those routes. Args: - api: The API containing this doc ref. - doc: The documentation string to parse. - namespace_context: The namespace name relative to this document...
codesearchnet
def _find_max_beta_token_len(): max_beta_len = (- 1) for (beta, uni) in _map.BETACODE_MAP.items(): if (len(beta) > max_beta_len): max_beta_len = len(beta) return max_beta_len
Finds the maximum length of a single betacode token. Returns: The length of the longest key in the betacode map, which corresponds to the longest single betacode token.
codesearchnet
def _validate_alias_name(alias_name): if (not alias_name): raise CLIError(EMPTY_ALIAS_ERROR) if (not re.match('^[a-zA-Z]', alias_name)): raise CLIError(INVALID_STARTING_CHAR_ERROR.format(alias_name[0]))
Check if the alias name is valid. Args: alias_name: The name of the alias to validate.
codesearchnet
def get_objects_from_form(variant_ids, form_fields, object_type): submission_fields = [] if (object_type == 'variant'): submission_fields = CLINVAR_HEADER else: submission_fields = CASEDATA_HEADER submission_objects = [] for variant_id in variant_ids: subm_obj = {} if...
Extract the objects to be saved in the clinvar database collection. object_type param specifies if these objects are variant or casedata objects Args: variant_ids(list): list of database variant ids form_fields(dict): it's the submission form dictionary. Keys have the same names as CLINVAR_HEADER and CASEDATA_HEADER o...
codesearchnet
def piece_to_id(input, model_file=None, model_proto=None, name=None): return _gen_sentencepiece_processor_op.sentencepiece_piece_to_id( input, model_file=model_file, model_proto=model_proto, name=name)
Converts piece into vocabulary id. Args: input: An arbitrary tensor of string. model_file: The sentencepiece model file path. model_proto: The sentencepiece model serialized proto. Either `model_file` or `model_proto` must be set. name: The name argument that is passed to the op function. Returns: A tensor of int32 wi...
juraj-google-style
def __init__(self, *schedules: List[Union[ScheduleComponent, Tuple[int, ScheduleComponent]]], name: str = None): self._name = name try: timeslots = [] children = [] for sched_pair in schedules: if not isinstan...
Create empty schedule. Args: *schedules: Child Schedules of this parent Schedule. May either be passed as the list of schedules, or a list of (start_time, schedule) pairs name: Name of this schedule Raises: PulseError: If timeslots intercept.
juraj-google-style
def make_client(servers: Sequence[str], *args, **kwargs) -> GMatrixClient: if (len(servers) > 1): sorted_servers = [server_url for (server_url, _) in sort_servers_closest(servers)] log.info('Automatically selecting matrix homeserver based on RTT', sorted_servers=sorted_servers) elif (len(servers...
Given a list of possible servers, chooses the closest available and create a GMatrixClient Params: servers: list of servers urls, with scheme (http or https) Rest of args and kwargs are forwarded to GMatrixClient constructor Returns: GMatrixClient instance for one of the available servers
codesearchnet
def migrate_config(self, current_config, config_to_migrate, always_update, update_defaults): value = self._search_config_for_possible_names(current_config) self._update_config(config_to_migrate, value, always_update, update_defaults)
Migrate config value in current_config, updating config_to_migrate. Given the current_config object, it will attempt to find a value based on all the names given. If no name could be found, then it will simply set the value to the default. If a value is found and is in the list of previous_defaults, it will either up...
codesearchnet
def sort_imports_in_all_inits(check_only=True): failures = [] for root, _, files in os.walk(PATH_TO_TRANSFORMERS): if '__init__.py' in files: result = sort_imports(os.path.join(root, '__init__.py'), check_only=check_only) if result: failures = [os.path.join(root, ...
Sort the imports defined in the `_import_structure` of all inits in the repo. Args: check_only (`bool`, *optional*, defaults to `True`): Whether or not to just check (and not auto-fix) the init.
github-repos
def __getitem__(self, key): if self._dims is not None: if isinstance(key, slice): return TensorShape(self._dims[key]) elif self._v2_behavior: return self._dims[key] else: return self.dims[key] elif isinstance(key, slice): start = key.start if k...
Returns the value of a dimension or a shape, depending on the key. Args: key: If `key` is an integer, returns the dimension at that index; otherwise if `key` is a slice, returns a TensorShape whose dimensions are those selected by the slice from `self`. Returns: An integer if `key` is an integer, or a `TensorShape` i...
github-repos
def translations(self, **kwargs): path = self._get_id_path('translations') response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
Get the translations for a specific movie id. Args: append_to_response: (optional) Comma separated, any movie method. Returns: A dict representation of the JSON returned from the API.
codesearchnet
def _is_composite_function(self, func: function_pb2.FunctionDef) -> bool: return func.signature.name.startswith('composite_')
Determine whether a FunctionDef is composite function. Args: func: A FunctionDef object. Returns: True iff `func` is composte function.
github-repos