code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def get_clusters_interfaces(clusters, extra_cond=(lambda nic: True)): interfaces = {} for cluster in clusters: nics = get_cluster_interfaces(cluster, extra_cond=extra_cond) interfaces.setdefault(cluster, nics) return interfaces
Returns for each cluster the available cluster interfaces Args: clusters (str): list of the clusters extra_cond (lambda): extra predicate to filter network card retrieved from the API. E.g lambda nic: not nic['mounted'] will retrieve all the usable network cards that are not mounted by default. Returns: dict of clust...
codesearchnet
def visualize_instance_html(self, exp, label, div_name, exp_object_name, text=True, opacity=True): if not text: return u'' text = (self.indexed_string.raw_string() .encode('utf-8', 'xmlcharrefreplace').decode('utf-8')) text = r...
Adds text with highlighted words to visualization. Args: exp: list of tuples [(id, weight), (id,weight)] label: label id (integer) div_name: name of div object to be used for rendering(in js) exp_object_name: name of js explanation object text: if False, return empty opacity: if True, fade colors according to weight
juraj-google-style
def remove_indirect_links(g, alg='aracne', **kwargs): alg = {'aracne': aracne, 'nd': network_deconvolution, 'clr': clr}[alg] mat = np.array(nx.adjacency_matrix(g).todense()) return nx.relabel_nodes(nx.DiGraph(alg(mat, **kwargs)), {idx: i for (idx, i) in enumerate(list(g.nodes()))})
Apply deconvolution to a networkx graph. Args: g (networkx.Graph): Graph to apply deconvolution to alg (str): Algorithm to use ('aracne', 'clr', 'nd') kwargs (dict): extra options for algorithms Returns: networkx.Graph: graph with undirected links removed.
codesearchnet
def pack_image_features(self, image_features, image_sizes, vision_feature_select_strategy, image_newline=None): new_image_features = [] feature_lens = [] for image_idx, image_feature in enumerate(image_features): if image_feature.shape[0] > 1: base_image_feature = image_feature[0] ...
Reshape, unpad and then pack each image_feature into a single image_features tensor containing all visual vectors. Args: image_features (`List[torch.Tensor]` of length num_images, each of shape `(num_patches, image_length, embed_dim)`) List of image feature tensor, each contains all the visual feature of all patches. ...
github-repos
def Patch(self, request, global_params=None): config = self.GetMethodConfig('Patch') return self._RunMethod(config, request, global_params=global_params)
Updates a `BuildTrigger` by its project ID and trigger ID. This API is experimental. Args: request: (CloudbuildProjectsTriggersPatchRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (BuildTrigger) The response message.
github-repos
def get_image_and_mask(self, label, positive_only=True, hide_rest=False, num_features=5, min_weight=0.0): if (label not in self.local_exp): raise KeyError('Label not in explanation') segments = self.segments image = self.image exp = self.local_exp[label] mask = np.zeros(segments.shape, segme...
Init function. Args: label: label to explain positive_only: if True, only take superpixels that contribute to the prediction of the label. Otherwise, use the top num_features superpixels, which can be positive or negative towards the label hide_rest: if True, make the non-explanation part of the return image gray num_...
codesearchnet
def get_special_tokens_mask(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None, already_has_special_tokens: bool=False) -> List[int]: if already_has_special_tokens: return super().get_special_tokens_mask(token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True) ...
Retrieve sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` method. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_spe...
github-repos
def sample_observed_state(self, s: pd.Series) -> Dict: return { n[0]: { i.name: np.random.normal(s[n[0]] * i.mean, i.stdev) for i in n[1]["indicators"].values() } for n in self.nodes(data=True) }
Sample observed state vector. This is the implementation of the emission function. Args: s: Latent state vector. Returns: Observed state vector.
juraj-google-style
def get_mailcap_entry(self, url): for parser in mime_parsers.parsers: if parser.pattern.match(url): try: modified_url, content_type = parser.get_mimetype(url) except Exce...
Search through the mime handlers list and attempt to find the appropriate command to open the provided url with. Will raise a MailcapEntryNotFound exception if no valid command exists. Params: url (text): URL that will be checked Returns: command (text): The string of the command that should be executed in a subproc...
juraj-google-style
def export(self, name=None): with ops.name_scope(name, '%s_Export' % self.name, [self.resource_handle]): exported_keys, exported_values = gen_lookup_ops.lookup_table_export_v2(self.resource_handle, self._key_dtype, self._value_dtype) exported_values.set_shape(exported_keys.get_shape().concatenate(self._...
Returns tensors of all keys and values in the table. Args: name: A name for the operation (optional). Returns: A pair of tensors with the first tensor containing all keys and the second tensors containing all values in the table.
github-repos
def Logger(name, debug=False, facility=None): logger = logging.getLogger(name) logger.handlers = [] logger.addHandler(logging.NullHandler()) logger.propagate = False logger.setLevel(logging.DEBUG) formatter = logging.Formatter(name + ': %(levelname)s %(message)s') if debug: console_handler = ...
Get a logging object with handlers for sending logs to SysLog. Args: name: string, the name of the logger which will be added to log entries. debug: bool, True if debug output should write to the console. facility: int, an encoding of the SysLog handler's facility and priority. Returns: logging object, an object for ...
juraj-google-style
def strip_hidden(key_tuples, visibilities): result = [] for key_tuple in key_tuples: if len(key_tuple) != len(visibilities): raise ValueError( "length of key tuple {} is not equal to length of visibilities {}".format( key_tuple, visibilities ...
Filter each tuple according to visibility. Args: key_tuples: A sequence of tuples of equal length (i.e. rectangular) visibilities: A sequence of booleans equal in length to the tuples contained in key_tuples. Returns: A sequence equal in length to key_tuples where the items are tuples with a length corresponding to t...
juraj-google-style
def find_paths_referenced(self) -> Collection[str]: _, paths = self._find_paths_referenced() return set(paths)
Finds paths for any elements referenced in this expression. For example, given the expression 'a.b.where(c > d.e).f' returns paths {'a', 'a.b', 'a.b.c', 'a.b.d', 'a.b.d.e', 'a.b.f'} Returns: A collections of paths referenced in the expression.
github-repos
def debug_watch_keys(self, node_name, device_name=None): try: device_name = self._infer_device_name(device_name, node_name) except ValueError: return [] if node_name not in self._debug_watches[device_name]: return [] watch_keys = [] for watched_slot in self._debug_watches[dev...
Get all tensor watch keys of given node according to partition graphs. Args: node_name: (`str`) name of the node. device_name: (`str`) name of the device. If there is only one device or if node_name exists on only one device, this argument is optional. Returns: (`list` of `str`) all debug tensor watch keys. Returns a...
github-repos
def _log_score(score): logger.info( "Score of ({}/{}) set for submission {}" .format(score.points_earned, score.points_possible, score.submission.uuid) )
Log the creation of a score. Args: score (Score): The score model. Returns: None
juraj-google-style
def instantiate(self, substitutions): param_dict = self.substitute_params(substitutions) pkg, ident = self.name.rsplit(".", 1) pkg = "malcolm.modules.%s" % pkg try: ob = importlib.import_module(pkg) except ImportError as e: raise_with_traceback( ...
Keep recursing down from base using dotted name, then call it with self.params and args Args: substitutions (dict): Substitutions to make to self.param_dict Returns: The found object called with (*args, map_from_d) E.g. if ob is malcolm.parts, and name is "ca.CADoublePart", then the object will be malcolm.parts.ca.C...
juraj-google-style
def nonzero_monies(self): return [copy.copy(m) for m in self._money_obs if (m.amount != 0)]
Get a list of the underlying ``Money`` instances that are not zero Returns: ([Money]): A list of zero or more money instances. Currencies will be unique.
codesearchnet
def has_chosen(state, correct, msgs): if not issubclass(type(correct), int): raise InstructorError( "Inside `has_chosen()`, the argument `correct` should be an integer." ) student_process = state.student_process if not isDefinedInProcess(MC_VAR_NAME, student_process): ...
Test multiple choice exercise. Test for a MultipleChoiceExercise. The correct answer (as an integer) and feedback messages are passed to this function. Args: correct (int): the index of the correct answer (should be an instruction). Starts at 1. msgs (list(str)): a list containing all feedback messages belonging to e...
juraj-google-style
def _initialize_splittable_and_unsplittable_dims(self, default_splittability, exception_dims_iterable=None): default_dims = set() exception_dims = set() if exception_dims_iterable: exception_dims.update(exception_dims_iterable) for t in itertools.chain(self.inputs, self.outputs): for dim...
Initializer for splittable_dims and unsplittable_dims. Helper method to categorize all dimensions in the input/output tensors as either splittable or unsplittable. Args: default_splittability: a string which is either "splittable" or "unsplittable". exception_dims_iterable: an optional iterable of names of dimensions...
codesearchnet
def _source_file_paths_outside_tensorflow_py_library(code_defs, id_to_string): file_ids = set() for code_def in code_defs: for trace in code_def.traces: file_ids.add(trace.file_id) non_tf_files = (id_to_string[file_id] for file_id in file_ids) non_tf_files = (f for f in non_tf_files ...
Extract source file paths outside TensorFlow Python library. Args: code_defs: An iterable of `CodeDef` protos, i.e., an iterable of stack traces. id_to_string: A proto map from integer ids to strings. Returns: An iterable of source file paths outside the TensorFlow Python library.
github-repos
def ParseAccountInformation( self, parser_mediator, query, row, **unused_kwargs): query_hash = hash(query) display_name = self._GetRowValue(query_hash, row, 'given_displayname') fullname = self._GetRowValue(query_hash, row, 'fullname') username = '{0!s} <{1!s}>'.format(fullname, d...
Parses account information. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. query (str): query that created the row. row (sqlite3.Row): row with account information.
juraj-google-style
def __delitem__(self, keyword): status = False if keyword: if not self.case_sensitive: keyword = keyword.lower() current_dict = self.keyword_trie_dict character_trie_list = [] for letter in keyword: if letter in cur...
To remove keyword from the dictionary pass the keyword and the clean name it maps to. Args: keyword : string keyword that you want to remove if it's present Examples: >>> keyword_processor.add_keyword('Big Apple') >>> del keyword_processor['Big Apple']
juraj-google-style
class Poisson(reduction_metrics.MeanMetricWrapper): def __init__(self, name='poisson', dtype=None): super().__init__(fn=poisson, name=name, dtype=dtype) def get_config(self): return {'name': self.name, 'dtype': self.dtype}
Computes the Poisson metric between `y_true` and `y_pred`. Formula: ```python metric = y_pred - y_true * log(y_pred) ``` Args: name: (Optional) string name of the metric instance. dtype: (Optional) data type of the metric result. Examples: >>> m = keras.metrics.Poisson() >>> m.update_state([[0, 1], [0, 0]], [[1, 1...
github-repos
def imshow(img, win_name='', wait_time=0): cv2.imshow(win_name, imread(img)) cv2.waitKey(wait_time)
Show an image. Args: img (str or ndarray): The image to be displayed. win_name (str): The window name. wait_time (int): Value of waitKey param.
codesearchnet
class CLIPEncoder(nn.Module): def __init__(self, config: CLIPConfig): super().__init__() self.config = config self.layers = nn.ModuleList([CLIPEncoderLayer(config) for _ in range(config.num_hidden_layers)]) self.gradient_checkpointing = False @can_return_tuple def forward(s...
Transformer encoder consisting of `config.num_hidden_layers` self attention layers. Each layer is a [`CLIPEncoderLayer`]. Args: config: CLIPConfig
github-repos
def _load_config(self, client_secrets_file, client_id, client_secret): if (client_id and client_secret): (self.client_id, self.client_secret) = (client_id, client_secret) return if client_secrets_file: self._load_client_secrets(client_secrets_file) return if ('GOOGLE_OAUTH2_C...
Loads oauth2 configuration in order of priority. Priority: 1. Config passed to the constructor or init_app. 2. Config passed via the GOOGLE_OAUTH2_CLIENT_SECRETS_FILE app config. 3. Config passed via the GOOGLE_OAUTH2_CLIENT_ID and GOOGLE_OAUTH2_CLIENT_SECRET app config. Raises: ValueError if no config could be found...
codesearchnet
def ClaimNotificationsForCollection(cls, token=None, start_time=None, lease_time=200, collection=None): class CollectionFilter(object): def __init__(self, collection): self.collection = collection def FilterRecord(self, notification): if (self.collection is None): ...
Return unclaimed hunt result notifications for collection. Args: token: The security token to perform database operations with. start_time: If set, an RDFDateTime indicating at what point to start claiming notifications. Only notifications with a timestamp after this point will be claimed. lease_time: How long to clai...
codesearchnet
def create_win_salt_restart_task(): cmd = 'cmd' args = '/c ping -n 3 127.0.0.1 && net stop salt-minion && net start salt-minion' return __salt__['task.create_task'](name='restart-salt-minion', user_name='System', force=True, action_type='Execute', cmd=cmd, arguments=args, trigger_type='Once', start_date='19...
Create a task in Windows task scheduler to enable restarting the salt-minion Returns: bool: ``True`` if successful, otherwise ``False`` CLI Example: .. code-block:: bash salt '*' service.create_win_salt_restart_task()
codesearchnet
def init(dvc_dir): config_file = os.path.join(dvc_dir, Config.CONFIG) open(config_file, 'w+').close() return Config(dvc_dir)
Initializes dvc config. Args: dvc_dir (str): path to .dvc directory. Returns: dvc.config.Config: config object.
codesearchnet
def build_nccl_all_reduce(input_tensors, red_op, un_op=None): if red_op == math_ops.add: output_tensors = nccl_ops.all_sum(input_tensors) else: raise ValueError('red_op not supported by NCCL all-reduce: ', red_op) if un_op: un_op_wrapped = [] for t in output_tensors: ...
Build a subgraph that does one full all-reduce, using NCCL. Args: input_tensors: list of `tf.Tensor` of same-shape and type values to be reduced. red_op: binary elementwise reduction operator. Must be one of {tf.add} un_op: optional unary elementwise Op to apply to fully-reduce values. Returns: list of `tf.Tensor` of...
github-repos
class PatchTSMixerGatedAttention(nn.Module): def __init__(self, in_size: int, out_size: int): super().__init__() self.attn_layer = nn.Linear(in_size, out_size) self.attn_softmax = nn.Softmax(dim=-1) def forward(self, inputs): attn_weight = self.attn_softmax(self.attn_layer(inpu...
Module that applies gated attention to input data. Args: in_size (`int`): The input size. out_size (`int`): The output size.
github-repos
def validate_format(self, **kwargs): args = dict( dict_type=self._dict, allow_no_value=self._allow_no_value, inline_comment_prefixes=self._inline_comment_prefixes, strict=self._strict, empty_lines_in_values=self._empty_lines_in_values ...
Call ConfigParser to validate config Args: kwargs: are passed to :class:`configparser.ConfigParser`
juraj-google-style
def get_project_details(self, project_id): if (not is_valid_uuid(project_id)): raise StorageArgumentException('Invalid UUID for project_id: {0}'.format(project_id)) return self._authenticated_request.to_endpoint('project/{}/'.format(project_id)).return_body().get()
Get information on a given project Args: project_id (str): The UUID of the requested project. Returns: A dictionary describing the project:: { u'collab_id': 2271, u'created_by': u'303447', u'created_on': u'2017-03-10T12:50:06.077891Z', u'description': u'', u'entity_type': u'project', u'modified_by': u'303447', u'mod...
codesearchnet
def _resource_apply_dense(self, grad, handle): raise NotImplementedError()
Add ops to apply dense gradients to the variable `handle`. Args: grad: a `Tensor` representing the gradient. handle: a `Tensor` of dtype `resource` which points to the variable to be updated. Returns: An `Operation` which updates the value of the variable.
github-repos
def add_output(self, output): if not isinstance(output, Output): raise TypeError('`output` must be an Output instance or None') self.outputs.append(output)
Adds an output to a Transaction's list of outputs. Args: output (:class:`~bigchaindb.common.transaction. Output`): An Output to be added to the Transaction.
juraj-google-style
def is_bit_mask(enumeration, potential_mask): if (not isinstance(potential_mask, six.integer_types)): return False mask_enumerations = (CryptographicUsageMask, ProtectionStorageMask, StorageStatusMask) if (enumeration not in mask_enumerations): return False mask = 0 for value in [e.v...
A utility function that checks if the provided value is a composite bit mask of enumeration values in the specified enumeration class. Args: enumeration (class): One of the mask enumeration classes found in this file. These include: * Cryptographic Usage Mask * Protection Storage Mask * Storage Status Mask potential_m...
codesearchnet
def filter(self, field_name, operand, value): if (operand not in self._FILTER_OPERANDS): raise ValueError('Operand must be one of {}'.format(', '.join(self._FILTER_OPERANDS))) record_stub = record_factory(self._app) field = record_stub.get_field(field_name) self._raw['filters'].append({'fieldId'...
Adds a filter to report Notes: All filters are currently AND'ed together Args: field_name (str): Target field name to filter on operand (str): Operand used in comparison. See `swimlane.core.search` for options value: Target value used in comparision
codesearchnet
def CreateDataTypeMap(self, definition_name): data_type_definition = self._definitions_registry.GetDefinitionByName(definition_name) if (not data_type_definition): return None return DataTypeMapFactory.CreateDataTypeMapByType(data_type_definition)
Creates a specific data type map by name. Args: definition_name (str): name of the data type definition. Returns: DataTypeMap: data type map or None if the date type definition is not available.
codesearchnet
def lex_index(n, k, lst): if len(lst) != k: raise VisualizationError("list should have length k") comb = list(map(lambda x: n - 1 - x, lst)) dualm = sum([n_choose_k(comb[k - 1 - i], i + 1) for i in range(k)]) return int(dualm)
Return the lex index of a combination.. Args: n (int): the total number of options . k (int): The number of elements. lst (list): list Returns: int: returns int index for lex order Raises: VisualizationError: if length of list is not equal to k
juraj-google-style
def get_site_orbital_dos(self, site, orbital): return Dos(self.efermi, self.energies, self.pdos[site][orbital])
Get the Dos for a particular orbital of a particular site. Args: site: Site in Structure associated with CompleteDos. orbital: Orbital in the site. Returns: Dos containing densities for orbital of site.
juraj-google-style
def _init_profile_batch(self, profile_batch): profile_batch_error_message = 'profile_batch must be a non-negative integer or 2-tuple of positive integers. A pair of positive integers signifies a range of batches to profile. Found: {}'.format(profile_batch) if isinstance(profile_batch, str): profile_batc...
Validate profile_batch value and set the range of batches to profile. Sets values of _start_batch and _stop_batch attributes, specifying the start and stop batch to profile. Setting `profile_batch=0` disables profiling. Args: profile_batch: The range of batches to profile. Should be a non-negative integer or a comma s...
github-repos
def get_creator_by_name(name): return {'docker(container)': Container.creator, 'shell': Bash.creator, 'docker(image)': Image.creator, 'python': Script.creator, 'packer': Packer.creator, 'ansible(simple)': Ansible.creator}[name]
Get creator function by name. Args: name (str): name of the creator function. Returns: function: creater function.
juraj-google-style
def email_address(self, address, owner=None, **kwargs): return EmailAddress(self.tcex, address, owner=owner, **kwargs)
Create the Email Address TI object. Args: owner: address: **kwargs: Return:
juraj-google-style
def camel_to_snake(name): s1 = re.sub('(.)([A-Z][a-z]+)', '\\1_\\2', name) return re.sub('([a-z0-9])([A-Z])', '\\1_\\2', s1).lower()
Converts CamelCase to snake_case. Args: name (string): The name to convert from CamelCase to snake_case. Returns: string: Converted string.
codesearchnet
def __edit_distance_alt(self, words): words = [x.lower() for x in words] return [e2 for e1 in words for e2 in self.edit_distance_1(e1)]
Compute all strings that are 1 edits away from all the words using only the letters in the corpus Args: words (list): The words for which to calculate the edit distance Returns: set: The set of strings that are edit distance two from the \ provided words
codesearchnet
def get_audio_features(self, input_features: torch.FloatTensor, feature_attention_mask: Optional[torch.LongTensor]=None, audio_feature_lengths: Optional[torch.LongTensor]=None): if feature_attention_mask is not None: audio_feature_lengths = torch.sum(feature_attention_mask, dim=1) input_features = i...
Encodes audios into continuous embeddings that can be forwarded to the language model. Args: input_features (`torch.FloatTensor`): The tensors corresponding to the input audios. feature_attention_mask (`torch.LongTensor`, *optional*): Mask to avoid performing attention on padding feature indices. Mask values selected ...
github-repos
def ReadTimestamp(filename): if not os.path.exists(filename): return None try: timestamp_file = open(filename, 'r') timestamp_string = timestamp_file.read().strip() except IOError as e: logging.warning('error opening timestamp file: %s', e) timestamp_string = None ...
Return a timestamp from a file. The timestamp file format is a single line, containing a string in the ISO-8601 format YYYY-MM-DDThh:mm:ssZ (i.e. UTC time). We do not support all ISO-8601 formats for reasons of convenience in the code. Timestamps internal to nss_cache deliberately do not carry milliseconds. Args: f...
github-repos
def get_contact(self, response=None, nir=None, handle=None, retry_count=3, dt_format=None): if (response or (nir == 'krnic')): contact_response = response else: contact_response = self._net.get_http_raw(url=str(NIR_WHOIS[nir]['url']).format(handle), retry_count=retry_count, headers=NIR_WHOIS[nir...
The function for retrieving and parsing NIR whois data based on NIR_WHOIS contact_fields. Args: response (:obj:`str`): Optional response object, this bypasses the lookup. nir (:obj:`str`): The NIR to query ('jpnic' or 'krnic'). Required if response is None. handle (:obj:`str`): For NIRs that have separate contact quer...
codesearchnet
def rmdir(path, dir_fd=None): system = get_instance(path) system.remove(system.ensure_dir_path(path))
Remove a directory. Equivalent to "os.rmdir". Args: path (path-like object): Path or URL. dir_fd: directory descriptors; see the os.rmdir() description for how it is interpreted. Not supported on cloud storage objects.
codesearchnet
def set_viewbox(self, x, y, w, h): self.attributes['viewBox'] = ('%s %s %s %s' % (x, y, w, h)) self.attributes['preserveAspectRatio'] = 'none'
Sets the origin and size of the viewbox, describing a virtual view area. Args: x (int): x coordinate of the viewbox origin y (int): y coordinate of the viewbox origin w (int): width of the viewbox h (int): height of the viewbox
codesearchnet
def to_grid_locator(latitude, longitude, precision='square'): if (precision not in ('square', 'subsquare', 'extsquare')): raise ValueError(('Unsupported precision value %r' % precision)) if (not ((- 90) <= latitude <= 90)): raise ValueError(('Invalid latitude value %r' % latitude)) if (not (...
Calculate Maidenhead locator from latitude and longitude. Args: latitude (float): Position's latitude longitude (float): Position's longitude precision (str): Precision with which generate locator string Returns: str: Maidenhead locator for latitude and longitude Raise: ValueError: Invalid precision identifier Value...
codesearchnet
def update_user_attributes(self, user, claims): required_fields = [field.name for field in user._meta.fields if field.blank is False] for field, claim in settings.CLAIM_MAPPING.items(): if hasattr(user, field): if claim in claims: setattr(user, ...
Updates user attributes based on the CLAIM_MAPPING setting. Args: user (django.contrib.auth.models.User): User model instance claims (dict): claims from the access token
juraj-google-style
def is_extension_type(tensor): return isinstance(tensor, composite_tensor.CompositeTensor)
Returns whether a tensor is of an ExtensionType. github.com/tensorflow/community/pull/269 Currently it works by checking if `tensor` is a `CompositeTensor` instance, but this will be changed to use an appropriate extensiontype protocol check once ExtensionType is made public. Args: tensor: An object to test Returns:...
github-repos
def iaf_hparams(hidden_size=512, filter_size=4096): hparams = common_hparams.basic_params1() hparams.hidden_size = hidden_size hparams.add_hparam("attention_key_channels", None) hparams.add_hparam("attention_value_channels", None) hparams.add_hparam("num_heads", 4) hparams.add_hparam("attention_dropo...
Create hyperpameters for inverse autoregressive flows. Args: hidden_size: Width of attention layers and neural network output layer. filter_size: Hidden layer width for neural network. Returns: hparams: Hyperpameters with basic presets for inverse autoregressive flows.
juraj-google-style
def setup_logging(verbosity, formats=None): if formats is None: formats = {} log_level = logging.INFO log_format = formats.get("info", INFO_FORMAT) if sys.stdout.isatty(): log_format = formats.get("color", COLOR_FORMAT) if verbosity > 0: log_level = logging.DEBUG ...
Configure a proper logger based on verbosity and optional log formats. Args: verbosity (int): 0, 1, 2 formats (dict): Optional, looks for `info`, `color`, and `debug` keys which may override the associated default log formats.
juraj-google-style
def metadata_extractor(self): if (not hasattr(self, '_local_file')): raise AttributeError('local_file attribute must be set before calling metadata_extractor') if (not hasattr(self, '_metadata_extractor')): if self.local_file.endswith('.whl'): logger.info('Getting metadata from wheel...
Returns an instance of proper MetadataExtractor subclass. Always returns the same instance. Returns: The proper MetadataExtractor subclass according to local file suffix.
codesearchnet
def cmd2(command, shell=False, detatch=False, verbose=False, verbout=None): import shlex if isinstance(command, (list, tuple)): raise ValueError('command tuple not supported yet') args = shlex.split(command, posix=(not WIN32)) if (verbose is True): verbose = 2 if (verbout is None): ...
Trying to clean up cmd Args: command (str): string command shell (bool): if True, process is run in shell detatch (bool): if True, process is run in background verbose (int): verbosity mode verbout (bool): if True, `command` writes to stdout in realtime. defaults to True iff verbose > 0 Returns: dict: info - informat...
codesearchnet
def GetVSSStoreIdentifiers(self, volume_system, volume_identifiers): print_header = True while True: if print_header: self._PrintVSSStoreIdentifiersOverview( volume_system, volume_identifiers) print_header = False self._output_writer.Write('\n') lines = self...
Retrieves VSS store identifiers. This method can be used to prompt the user to provide VSS store identifiers. Args: volume_system (VShadowVolumeSystem): volume system. volume_identifiers (list[str]): volume identifiers including prefix. Returns: list[str]: selected volume identifiers including prefix or None.
juraj-google-style
def gen_public_api(output_dir: str, output_package: str, root_init_template: str, api_version: int, compat_api_versions: Sequence[int], compat_init_templates: Sequence[str], use_lazy_loading: bool, file_prefixes_to_strip: Sequence[str], mapping_files: Sequence[str], packages_to_ignore: Sequence[str], module_prefix: str...
Generates the public API for tensorflow. Args: output_dir: The directory to output the files to. output_package: The package to use for the imports. root_init_template: The template for the root init file. api_version: The version of the API to generate. compat_api_versions: The versions of the compat APIs to generate...
github-repos
def get_info(ads): infos = [] for ad in ads: device_info = ad.device_info user_added_info = {k: str(v) for k, v in device_info['user_added_info'].items()} device_info['user_added_info'] = user_added_info infos.append(device_info) return infos
Get information on a list of AndroidDevice objects. Args: ads: A list of AndroidDevice objects. Returns: A list of dict, each representing info for an AndroidDevice objects. Everything in this dict should be yaml serializable.
github-repos
def spawn_program(self, name, arguments=[], timeout=30, exclusive=False): logger.debug('Spawning program for interaction ...') if exclusive: kill_longrunning(self.config) return RunningProgram(self, name, arguments, timeout)
Spawns a program in the working directory. This method allows the interaction with the running program, based on the returned RunningProgram object. Args: name (str): The name of the program to be executed. arguments (tuple): Command-line arguments for the program. timeout (int): The timeout for execution....
codesearchnet
def IsWalletTransaction(self, tx): for (key, contract) in self._contracts.items(): for output in tx.outputs: if (output.ScriptHash.ToBytes() == contract.ScriptHash.ToBytes()): return True for script in tx.scripts: if script.VerificationScript: ...
Verifies if a transaction belongs to the wallet. Args: tx (TransactionOutput):an instance of type neo.Core.TX.Transaction.TransactionOutput to verify. Returns: bool: True, if transaction belongs to wallet. False, if not.
codesearchnet
def testSaveAndLoadSingleVariable(self, shard_config): strategy = self._create_strategy(shard_config[0]) with strategy.scope(): var = variables_lib.Variable([1.0, 2.0, 3.0, 4.0, 5.0, 6.0]) model_dir = self.get_temp_dir() save.save(var, model_dir) strategy2 = self._create_strategy(shard_confi...
Test saving and loading ShardedVariable with different numbers of shards. Loading tf.Variables into multiple Shards is not yet supported Args: shard_config: The number of shards to use before and after loading. For example, [2, 1] means to create and save the variable with 2 shards and load it into 1 shard (i.e., a r...
github-repos
def _buffer_incomplete_responses(raw_output, buf): if raw_output: if buf: raw_output = b''.join([buf, raw_output]) buf = None if (b'\n' not in raw_output): buf = raw_output raw_output = None elif (not raw_output.endswith(b'\n')): re...
It is possible for some of gdb's output to be read before it completely finished its response. In that case, a partial mi response was read, which cannot be parsed into structured data. We want to ALWAYS parse complete mi records. To do this, we store a buffer of gdb's output if the output did not end in a newline. Ar...
codesearchnet
def activate_vacation(self, endtime: datetime, temperature: float): data = { "endtime": endtime.strftime("%Y_%m_%d %H:%M"), "temperature": temperature, } return self._restCall("home/heating/activateVacation", json.dumps(data))
activates the vatation mode until the given time Args: endtime(datetime): the time when the vatation mode should automatically be disabled temperature(float): the settemperature during the vacation mode
juraj-google-style
def Key(self): return getattr(self, self._KEY)
Return unique identifier for this MapEntry object. Returns: A str which contains the name of the attribute to be used as an index value for a maps.MapEntry instance in a maps.Map.
github-repos
def WriteBlobsWithUnknownHashes(self, blobs_data): blobs_ids = [rdf_objects.BlobID.FromBlobData(d) for d in blobs_data] self.WriteBlobs(dict(zip(blobs_ids, blobs_data))) return blobs_ids
Calculates hash ids and writes contents of given data blobs. Args: blobs_data: An iterable of bytes. Returns: A list of rdf_objects.BlobID objects with each blob id corresponding to an element in the original blobs_data argument.
codesearchnet
def encode_structure(nested_structure): return _map_structure(nested_structure, _get_encoders())
Encodes nested structures composed of encodable types into a proto. Args: nested_structure: Structure to encode. Returns: Encoded proto. Raises: NotEncodableError: For values for which there are no encoders.
github-repos
def has_nrows( state, incorrect_msg="Your query returned a table with {{n_stu}} row{{'s' if n_stu > 1 else ''}} while it should return a table with {{n_sol}} row{{'s' if n_sol > 1 else ''}}.", ): has_result(state) n_stu = len(next(iter(state.student_result.values()))) n_sol = len(ne...
Test whether the student and solution query results have equal numbers of rows. Args: incorrect_msg: If specified, this overrides the automatically generated feedback message in case the number of rows in the student and solution query don't match.
juraj-google-style
def delete(self, dash_id): removed_info = dict(time_modified=r_db.zscore(config.DASH_ID_KEY, dash_id), meta=r_db.hget(config.DASH_META_KEY, dash_id), content=r_db.hget(config.DASH_CONTENT_KEY, dash_id)) r_db.zrem(config.DASH_ID_KEY, dash_id) r_db.hdel(config.DASH_META_KEY, dash_id) r_db.hdel(config.DASH...
Delete a dash meta and content, return updated dash content. Actually, just remove it to a specfied place in database. Args: dash_id: dashboard id. Returns: Redirect to home page.
codesearchnet
def insert_arguments_into_sql_query(compilation_result, arguments): if compilation_result.language != SQL_LANGUAGE: raise AssertionError(u'Unexpected query output language: {}'.format(compilation_result)) base_query = compilation_result.query return base_query.params(**arguments)
Insert the arguments into the compiled SQL query to form a complete query. Args: compilation_result: CompilationResult, compilation result from the GraphQL compiler. arguments: Dict[str, Any], parameter name -> value, for every parameter the query expects. Returns: SQLAlchemy Selectable, a executable SQL query with p...
juraj-google-style
def no_company_with_insufficient_companies_house_data(value): for (prefix, name) in company_types_with_insufficient_companies_house_data: if value.upper().startswith(prefix): raise ValidationError(MESSAGE_INSUFFICIENT_DATA, params={'name': name})
Confirms that the company number is not for for a company that Companies House does not hold information on. Args: value (string): The company number to check. Raises: django.forms.ValidationError
codesearchnet
def universal_transformer_layer(x, hparams, ffn_unit, attention_unit, pad_remover=None): def add_vanilla_transformer_layer(x, num_layers, name): 'Passes the input through num_layers of vanilla transformer layers.\n\n Args:\n x: input\n num_layers: number of layers\n name: string, prefix of l...
Core function applying the universal transformer layer. Args: x: input hparams: model hyper-parameters ffn_unit: feed-forward unit attention_unit: multi-head attention unit pad_remover: to mask out padding in convolutional layers (efficiency). Returns: the output tensor, extra output (can be memory, ponder time, etc...
codesearchnet
def process_request(self, request, credential=None): self._client_identity = [None, None] header = request.request_header self._set_protocol_version(header.protocol_version) max_response_size = None if header.maximum_response_size: max_response_size = header.maximum_response_size.value n...
Process a KMIP request message. This routine is the main driver of the KmipEngine. It breaks apart and processes the request header, handles any message errors that may result, and then passes the set of request batch items on for processing. This routine is thread-safe, allowing multiple client connections to use the...
codesearchnet
def latest_file(path_name, keyword='', ext='', **kwargs) -> str: files = all_files( path_name=path_name, keyword=keyword, ext=ext, full_path=True ) if not files: from xbbg.io import logs logger = logs.get_logger(latest_file, level=kwargs.pop('log', 'warning')) logger.d...
Latest modified file in folder Args: path_name: full path name keyword: keyword to search ext: file extension Returns: str: latest file name
juraj-google-style
def compare(self, value, expectation, regex_expr=False): return compare(value, expectation, regex_expr=regex_expr)
Compares two values with regular expression matching support. Arguments: value (mixed): value to compare. expectation (mixed): value to match. regex_expr (bool, optional): enables string based regex matching. Returns: bool
codesearchnet
def _create_inbound_stream(self, config=None): if (config is None): raise ValueError('No stream config to create stream from.') name = self._get_stream_name(config) stream_handlers = self._get_stream_handlers(config, name) stream_input = config.get('input', None) if (stream_input is None): ...
Creates an inbound stream from its config. Params: config: stream configuration as read by ait.config Returns: stream: a Stream Raises: ValueError: if any of the required config values are missing
codesearchnet
def failure_message(description, options): message = "expected to find {}".format(description) if options["count"] is not None: message += " {count} {times}".format( count=options["count"], times=declension("time", "times", options["count"])) elif options["between"] is...
Returns a expectation failure message for the given query description. Args: description (str): A description of the failed query. options (Dict[str, Any]): The query options. Returns: str: A message describing the failure.
juraj-google-style
def to_dataframe(self, start_row=0, max_rows=None): fetcher = self._get_row_fetcher(start_row=start_row, max_rows=max_rows, page_size=self._MAX_PAGE_SIZE) count = 0 page_token = None df_list = [] df = None w...
Exports the table to a Pandas dataframe. Args: start_row: the row of the table at which to start the export (default 0) max_rows: an upper limit on the number of rows to export (default None) Returns: A Pandas dataframe containing the table data.
juraj-google-style
def copy_remote_file(web_file, destination): size = 0 dir_name = os.path.dirname(destination) if not os.path.exists(dir_name): os.makedirs(dir_name) with open(destination, 'wb') as file_: chunk_size = 8 * 1024 for chunk in web_file.iter_content(chunk_size=chunk_size): ...
Check if exist the destination path, and copy the online resource file to local. Args: :web_file: reference to online file resource to take. :destination: path to store the file.
juraj-google-style
def __init__(self, channel): self.CompleteQuery = channel.unary_unary( "/google.cloud.talent.v4beta1.Completion/CompleteQuery", request_serializer=google_dot_cloud_dot_talent__v4beta1_dot_proto_dot_completion__service__pb2.CompleteQueryRequest.SerializeToString, resp...
Constructor. Args: channel: A grpc.Channel.
juraj-google-style
def _FormatUsername(self, event): username = self._output_mediator.GetUsername(event) return self._FormatField(username)
Formats the username. Args: event (EventObject): event. Returns: str: formatted username field.
juraj-google-style
def __init__(self, channel): self.GetRequiredPlugins = channel.unary_unary( '/pulumirpc.LanguageRuntime/GetRequiredPlugins', request_serializer=language__pb2.GetRequiredPluginsRequest.SerializeToString, response_deserializer=language__pb2.GetRequiredPluginsResponse.FromString, )...
Constructor. Args: channel: A grpc.Channel.
juraj-google-style
def NCHWToNHWC(input_tensor: Union[tensor_lib.Tensor, list[int]]) -> Union[tensor_lib.Tensor, list[int]]: new_axes = {4: [0, 2, 3, 1], 5: [0, 2, 3, 4, 1]} if isinstance(input_tensor, tensor_lib.Tensor): ndims = input_tensor.shape.ndims return array_ops.transpose(input_tensor, new_axes[ndims]) ...
Converts the input from the NCHW format to NHWC. Args: input_tensor: a 4- or 5-D tensor, or an array representing shape Returns: converted tensor or shape array
github-repos
def untar(file_path, extract_folder=None): file_path = Path(file_path) if extract_folder is None: extract_folder = file_path.parent extract_folder = Path(extract_folder) tar = tarfile.open(file_path) tar.extractall(extract_folder) tar.close()
Simple tar archive extractor Args: file_path: path to the tar file to be extracted extract_folder: folder to which the files will be extracted
juraj-google-style
def _get_native_delegate_pointer(self): return self._delegate_ptr
Returns the native TfLiteDelegate pointer. It is not safe to copy this pointer because it needs to be freed. Returns: TfLiteDelegate *
github-repos
def make_serializable(json): new_dict = dict() for key, value in iteritems(json): if is_valid_json(value): new_dict[key] = value return new_dict
This function ensures that the dictionary is JSON serializable. If not, keys with non-serializable values are removed from the return value. Args: json (dict): Dictionary to convert to serializable Returns: new_dict (dict): New dictionary with non JSON serializable values removed
juraj-google-style
def resolve_type(self, name: str | pytd_node.Node) -> pytd.Type: if isinstance(name, (pytd.GenericType, pytd.AnythingType)): return name if isinstance(name, pytd.NamedType): name = name.name assert isinstance(name, str), f'Expected str, got {name}' if name == 'nothing': return py...
Return the fully resolved name for an alias. Args: name: The name of the type or alias. Returns: A pytd.NamedType with the fully resolved and qualified name.
github-repos
def _handle_changed_fields(self, old_data): for link in self.get_links(is_set=False): fld_id = un_camel_id(link['field']) if not old_data or old_data.get(fld_id) != self._data[fld_id]: if self._data[fld_id]: linked_mdl = get...
Looks for changed relation fields between new and old data (before/after save). Creates back_link references for updated fields. Args: old_data: Object's data before save.
juraj-google-style
def GetDisplayName(self, file_entry=None): if (file_entry is None): file_entry = self._file_entry if (file_entry is None): raise ValueError('Missing file entry') path_spec = getattr(file_entry, 'path_spec', None) relative_path = path_helper.PathHelper.GetRelativePathForPathSpec(path_spec...
Retrieves the display name for a file entry. Args: file_entry (Optional[dfvfs.FileEntry]): file entry object, where None will return the display name of self._file_entry. Returns: str: human readable string that describes the path to the file entry. Raises: ValueError: if the file entry is missing.
codesearchnet
def _visualize(self, x_label, y_labels, ticks, overlay, draw, annotate, width=6, height=4): for label in y_labels: if (not all((isinstance(x, numbers.Real) for x in self[label]))): raise ValueError("The column '{0}' contains non-numerical values. A plot cannot be drawn for this column.".format(l...
Generic visualization that overlays or separates the draw function. Raises: ValueError: The Table contains non-numerical values in columns other than `column_for_categories`
codesearchnet
def expandEntitiesFromEmail(e): email = {} email['type'] = 'i3visio.email' email['value'] = e email['attributes'] = [] alias = {} alias['type'] = 'i3visio.alias' alias['value'] = e.split('@')[0] alias['attributes'] = [] domain = {} domain['type'] = 'i3visio.domain' domain['va...
Method that receives an email an creates linked entities Args: ----- e: Email to verify. Returns: -------- Three different values: email, alias and domain in a list.
codesearchnet
def ProduceExtractionWarning(self, message, path_spec=None): if not self._storage_writer: raise RuntimeError('Storage writer not set.') if not path_spec and self._file_entry: path_spec = self._file_entry.path_spec parser_chain = self.GetParserChain() warning = warnings.ExtractionWarni...
Produces an extraction warning. Args: message (str): message of the warning. path_spec (Optional[dfvfs.PathSpec]): path specification, where None will use the path specification of current file entry set in the mediator. Raises: RuntimeError: when storage writer is not set.
juraj-google-style
def from_config(cls, config): return cls(**config)
Creates a quantizer from its config. This method is the reverse of `get_config`, capable of instantiating the same quantizer from the config dictionary. This method is used by Keras `model_to_estimator`, saving and loading models to HDF5 formats, Keras model cloning, some visualization utilities, and exporting models...
github-repos
def onWith(self, evnt, func): self.on(evnt, func) try: yield self finally: self.off(evnt, func)
A context manager which can be used to add a callback and remove it when using a ``with`` statement. Args: evnt (str): An event name func (function): A callback function to receive event tufo
juraj-google-style
def invert(self) -> Rigid: rot_inv = self._rots.invert() trn_inv = rot_inv.apply(self._trans) return Rigid(rot_inv, -1 * trn_inv)
Inverts the transformation. Returns: The inverse transformation.
github-repos
def _add_strings_to_commastring(self, field, strings): allstringsadded = True for string in strings: if not self._add_string_to_commastring(field, string): allstringsadded = False return allstringsadded
Add a list of strings to a comma separated list of strings Args: field (str): Field containing comma separated list strings (List[str]): list of strings to add Returns: bool: True if all strings added or False if any already present.
juraj-google-style
def write_record(cls, file_handle, value): encoded_length = struct.pack(b'<Q', len(value)) file_handle.write(b''.join([encoded_length, struct.pack(b'<I', cls._masked_crc32c(encoded_length)), value, struct.pack(b'<I', cls._masked_crc32c(value))]))
Encode a value as a TFRecord. Args: file_handle: The file to write to. value: A bytes object representing content of the record.
github-repos
def from_bytes(cls, bt): log.debug("Parsing email from bytes") if six.PY2: raise MailParserEnvironmentError( "Parsing from bytes is valid only for Python 3.x version") message = email.message_from_bytes(bt) return cls(message)
Init a new object from bytes. Args: bt (bytes-like object): raw email as bytes-like object Returns: Instance of MailParser
juraj-google-style
def add_router(self, path, router): if (self.strict_router_check and (not isinstance(router, Router))): raise TypeError(('Expected object of type Router, found %r' % type(router))) log.info('{} Adding router {} on path {}', id(self), router, path) self.middleware.add(path=path, func=router, method_m...
Adds a router to the list of routers Args: path (str or regex): The path on which the router binds router (growler.Router): The router which will respond to requests Raises: TypeError: If `strict_router_check` attribute is True and the router is not an instance of growler.Router.
codesearchnet