code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def cancel(self, consumers): for consumer in consumers: del self._consumers[consumer.queue] protocol = yield self.when_connected() yield protocol.cancel(consumer)
Cancel a consumer that was previously started with consume. Args: consumer (list of fedora_messaging.api.Consumer): The consumers to cancel.
juraj-google-style
def read_float(self, little_endian=True): if little_endian: endian = '<' else: endian = '>' return self.unpack(('%sf' % endian), 4)
Read 4 bytes as a float value from the stream. Args: little_endian (bool): specify the endianness. (Default) Little endian. Returns: float:
codesearchnet
def _process_new(self, feed_item): lp = self.landing_page_dao.get(feed_item, required=True) feed_item[FieldMap.CAMPAIGN_LANDING_PAGE_ID] = lp['id'] feed_item[FieldMap.CAMPAIGN_LANDING_PAGE_NAME] = lp['name'] return {'advertiserId': feed_item.get(FieldMap.ADVERTISER_ID, None), 'name': feed_item.get(Field...
Creates a new campaign DCM object from a feed item representing a campaign from the Bulkdozer feed. This function simply creates the object to be inserted later by the BaseDAO object. Args: feed_item: Feed item representing the campaign from the Bulkdozer feed. Returns: A campaign object ready to be inserted in DCM ...
github-repos
def update_(self, sct_dict, conf_arg=True): for (opt, val) in sct_dict.items(): if (opt not in self.def_): continue if ((not conf_arg) or self.def_[opt].conf_arg): self[opt] = val
Update values of configuration section with dict. Args: sct_dict (dict): dict indexed with option names. Undefined options are discarded. conf_arg (bool): if True, only options that can be set in a config file are updated.
codesearchnet
def destroy_record(client=None, found_record=None, record='', zone_id=''): LOG.debug('Found DNS record: %s', found_record) if (found_record['Name'].strip('.') == record): dns_json = get_template(template_file='destroy/destroy_dns.json.j2', record=json.dumps(found_record)) dns_dict = json.loads(d...
Destroy an individual DNS record. Args: client (botocore.client.Route53): Route 53 boto3 client. found_record (dict): Route 53 record set:: {'Name': 'unicorn.forrest.dev.example.com.', 'ResourceRecords': [{'Value': 'internal-unicornforrest-1777489395.us-east-1.elb.amazonaws.com' }], 'TTL': 60, 'Type': 'CNAME'} recor...
codesearchnet
def read_from(fpath, verbose=None, aslines=False, strict=True, n=None, errors='replace'): if (n is None): n = __READ_TAIL_N__ verbose = _rectify_verb_read(verbose) if verbose: print(('[util_io] * Reading text file: %r ' % util_path.tail(fpath, n=n))) try: if (not util_path.checkp...
r""" Reads text from a file. Automatically returns utf8. Args: fpath (str): file path aslines (bool): if True returns list of lines verbose (bool): verbosity flag Returns: str: text from fpath (this is unicode) Ignore: x = b'''/whaleshark_003_fors\xc3\xb8g.wmv" />\r\n''' ut.writeto('foo.txt', x) y = ut.readfrom('foo...
codesearchnet
def get_event(self, event_key): event = self.event_key_map.get(event_key) if event: return event self.logger.error('Event "%s" is not in datafile.' % event_key) self.error_handler.handle_error(exceptions.InvalidEventException(enums.Errors.INVALID_EVENT_KEY_ERROR)) return None
Get event for the provided event key. Args: event_key: Event key for which event is to be determined. Returns: Event corresponding to the provided event key.
juraj-google-style
def machine_op(self, operation): operations = {'feed2start': 1, 'feedone': 2, 'cut': 3 } if operation in operations: self.send('^'+'O'+'P'+chr(operations[operation])) else: raise RuntimeEr...
Perform machine operations Args: operations: which operation you would like Returns: None Raises: RuntimeError: Invalid operation
juraj-google-style
def get_first_model_with_resource_name(cls, resource_name): models = cls.get_models_with_resource_name(resource_name) if len(models) > 0: return models[0] return None
Get the first model corresponding to a resource_name Args: resource_name: the resource name
juraj-google-style
def config_init(config_file, json_config_obj, config_dirname=None): HOME = os.environ['HOME'] if config_dirname: dir_path = HOME + '/' + config_dirname if not os.path.exists(dir_path): os.mkdir(dir_path) os.chmod(dir_path, 0o755) else: dir_path = HOM...
Summary: Creates local config from JSON seed template Args: :config_file (str): filesystem object containing json dict of config values :json_config_obj (json): data to be written to config_file :config_dirname (str): dir name containing config_file Returns: TYPE: bool, Success | Failure
juraj-google-style
def profile_df(df): return IPython.core.display.HTML( pandas_profiling.ProfileReport(df).html.replace('bootstrap', 'nonexistent'))
Generate a profile of data in a dataframe. Args: df: the Pandas dataframe.
juraj-google-style
class EncodecEncoderOutput(ModelOutput): audio_codes: Optional[torch.LongTensor] = None audio_scales: Optional[torch.FloatTensor] = None
Args: audio_codes (`torch.LongTensor` of shape `(batch_size, nb_chunks, chunk_length)`, *optional*): Discret code embeddings computed using `model.encode`. audio_scales (`torch.Tensor` of shape `(batch_size, nb_chunks)`, *optional*): Scaling factor for each `audio_codes` input. This is used to unscale each chunk of au...
github-repos
def run_simulations(self, param_list, show_progress=True): if (self.runner is None): raise Exception('No runner was ever specified for this CampaignManager.') if (param_list == []): return desired_params = self.db.get_params() for p in param_list: passed = list(p.keys()) ...
Run several simulations specified by a list of parameter combinations. Note: this function does not verify whether we already have the required simulations in the database - it just runs all the parameter combinations that are specified in the list. Args: param_list (list): list of parameter combinations to execute. ...
codesearchnet
def build_hlo_module(root: testlib_base.HloInstruction, *instructions: testlib_base.HloInstruction, extra_computations: Sequence[testlib_base.HloComputation] | None=None) -> tuple[testlib_base.HloModule, testlib_base.BufferAssignment]: hlo_module = testlib_base.HloModule(root.name()) hlo_module.add_entry_comput...
Builds an HLO module from a root instruction and its dependencies. Args: root: The root instruction of the module. *instructions: The instructions that are dependencies of the root instruction. extra_computations: Any extra computations that should be added to the module. Returns: A tuple containing the HLO module an...
github-repos
def to_pil_image(image: Union[np.ndarray, 'PIL.Image.Image', 'torch.Tensor', 'tf.Tensor', 'jnp.ndarray'], do_rescale: Optional[bool]=None, image_mode: Optional[str]=None, input_data_format: Optional[Union[str, ChannelDimension]]=None) -> 'PIL.Image.Image': requires_backends(to_pil_image, ['vision']) if isinstan...
Converts `image` to a PIL Image. Optionally rescales it and puts the channel dimension back as the last axis if needed. Args: image (`PIL.Image.Image` or `numpy.ndarray` or `torch.Tensor` or `tf.Tensor`): The image to convert to the `PIL.Image` format. do_rescale (`bool`, *optional*): Whether or not to apply the scali...
github-repos
def decode(self, encoded): if self.enforce_reversible: self.enforce_reversible = False if self.encode(self.decode(encoded)) != encoded: raise ValueError('Decoding is not reversible for "%s"' % encoded) self.enforce_reversible = True return en...
Decodes an object. Args: object_ (object): Encoded object. Returns: object: Object decoded.
juraj-google-style
def _set_update(self): try: self._updateStack = False stack_name = self._config.get('environment', {}).get('stack_name', None) response = self._cloudFormation.describe_stacks(StackName=stack_name) stack = response['Stacks'][0] if (stack['StackStatus'] == 'ROLLBACK_COMPLETE'):...
Determine if we are creating a new stack or updating and existing one. The update member is set as you would expect at the end of this query. Args: None Returns: True
codesearchnet
def __init__( self, deconvolution_layer_list, opt_params=None, learning_rate=1e-05, verbose_mode=False ): for deconvolution_layer in deconvolution_layer_list: if isinstance(deconvolution_layer, DeconvolutionLayer) is False: raise T...
Init. Args: deconvolution_layer_list: `list` of `DeconvolutionLayer`. opt_params: is-a `OptParams`. If `None`, this value will be `Adam`. learning_rate: Learning rate. verbose_mode: Verbose mode or not.
juraj-google-style
def indices2nodes(self, indices): if set(indices) - set(self.node_indices): raise ValueError( "`indices` must be a subset of the Subsystem's indices.") return tuple(self._index2node[n] for n in indices)
Return |Nodes| for these indices. Args: indices (tuple[int]): The indices in question. Returns: tuple[Node]: The |Node| objects corresponding to these indices. Raises: ValueError: If requested indices are not in the subsystem.
juraj-google-style
def enable_tracing(self): if (not self.connected): raise HardwareError('Cannot enable tracing if we are not in a connected state') if (self._traces is not None): _clear_queue(self._traces) return self._traces self._traces = queue.Queue() self._loop.run_coroutine(self.adapter.open...
Open the tracing interface and accumulate traces in a queue. This method is safe to call multiple times in a single device connection. There is no way to check if the tracing interface is opened or to close it once it is opened (apart from disconnecting from the device). The first time this method is called, it will ...
codesearchnet
def load_local(self, state, name): var = self.block_env.get_local(self.frame.current_block, name) if self.ctx.options.strict_undefined_checks and self.ctx.python_version >= (3, 10) and (not var): raise KeyError() return self.load_from(state, self.frame.f_locals, name)
Called when a local is loaded onto the stack. Uses the name to retrieve the value from the current locals(). Args: state: The current VM state. name: Name of the local Returns: A tuple of the state and the value (cfg.Variable) Raises: KeyError: If the name is determined to be undefined
github-repos
def AddDescriptor(self, desc): if (not isinstance(desc, descriptor.Descriptor)): raise TypeError('Expected instance of descriptor.Descriptor.') self._descriptors[desc.full_name] = desc self._AddFileDescriptor(desc.file)
Adds a Descriptor to the pool, non-recursively. If the Descriptor contains nested messages or enums, the caller must explicitly register them. This method also registers the FileDescriptor associated with the message. Args: desc: A Descriptor.
codesearchnet
def vgg_layer(inputs, nout, kernel_size=3, activation=tf.nn.leaky_relu, padding='SAME', is_training=True, has_batchnorm=False, scope=None): with tf.variable_scope(scope): net = tfl.conv2d(inputs, nout, kernel_size=kernel_size, padding=padding, activation=None, name='conv') if has_batchnorm: ...
A layer of VGG network with batch norm. Args: inputs: image tensor nout: number of output channels kernel_size: size of the kernel activation: activation function padding: padding of the image is_training: whether it is training mode or not has_batchnorm: whether batchnorm is applied or not scope: variable scope of th...
codesearchnet
def put_archive(self, path, data): return self.client.api.put_archive(self.id, path, data)
Insert a file or folder in this container using a tar archive as source. Args: path (str): Path inside the container where the file(s) will be extracted. Must exist. data (bytes): tar data to be extracted Returns: (bool): True if the call succeeds. Raises: :py:class:`~docker.errors.APIError` If an error occurs.
juraj-google-style
def _set_dacl_inheritance(path, objectType, inheritance=True, copy=True, clear=False): ret = {'result': False, 'comment': '', 'changes': {}} if path: try: sd = win32security.GetNamedSecurityInfo(path, objectType, win32security.DACL_SECURITY_INFORMATION) ...
helper function to set the inheritance Args: path (str): The path to the object objectType (str): The type of object inheritance (bool): True enables inheritance, False disables copy (bool): Copy inherited ACEs to the DACL before disabling inheritance clear (bool): Remove non-inherited ACEs from the DACL
juraj-google-style
def get_index(fn, cols, names, sep): if (not has_index(fn)): return generate_index(fn, cols, names, sep) file_index = read_index(get_index_fn(fn)) if (len((set(names) - (set(file_index.columns) - {'seek'}))) != 0): raise ValueError('{}: missing index columns: reindex'.format(fn)) if ('se...
Restores the index for a given file. Args: fn (str): the name of the file. cols (list): a list containing column to keep (as int). names (list): the name corresponding to the column to keep (as str). sep (str): the field separator. Returns: pandas.DataFrame: the index. If the index doesn't exist for the file, it is ...
codesearchnet
def deploy(target): if (not os.getenv(CIRCLECI_ENV_VAR)): raise EnvironmentError('Must be on CircleCI to run this script') current_branch = os.getenv('CIRCLE_BRANCH') if ((target == 'PROD') and (current_branch != 'master')): raise EnvironmentError('Refusing to deploy to production from branc...
Deploys the package and documentation. Proceeds in the following steps: 1. Ensures proper environment variables are set and checks that we are on Circle CI 2. Tags the repository with the new version 3. Creates a standard distribution and a wheel 4. Updates version.py to have the proper version 5. Commits the ChangeL...
codesearchnet
def __init__(self, date=None, year=None, season=None, day_of_season=None, *args, **kwargs): if year is not None and season is not None and \ day_of_season is not None: date = (datetime.datetime(year=year - 1166, month=1, day=1) + datetime.tim...
Discordian date setup and mangling. Note: year, season and day_of_season are all required if any are used Args: date: optional date object with a timetuple method, or uses today year: optional integer discordian year to create from season: optional integer discodian season to create from day_of_season: optional int d...
juraj-google-style
def get_geno_marker(self, marker, return_index=False): if (self._mode != 'r'): raise UnsupportedOperation("not available in 'w' mode") if (marker not in self._bim.index): raise ValueError('{}: marker not in BIM'.format(marker)) seek_index = self._bim.loc[(marker, 'i')] self.seek(seek_ind...
Gets the genotypes for a given marker. Args: marker (str): The name of the marker. return_index (bool): Wether to return the marker's index or not. Returns: numpy.ndarray: The genotypes of the marker (additive format).
codesearchnet
def make_directory_writable(dirname): retval = shell_call(['docker', 'run', '-v', '{0}:/output_dir'.format(dirname), 'busybox:1.27.2', 'chmod', '-R', 'a+rwx', '/output_dir']) if (not retval): logging.error('Failed to change permissions on directory: %s', dirname) return retval
Makes directory readable and writable by everybody. Args: dirname: name of the directory Returns: True if operation was successfull If you run something inside Docker container and it writes files, then these files will be written as root user with restricted permissions. So to be able to read/modify these files out...
codesearchnet
def bind_sockets(address, port): ss = netutil.bind_sockets(port=port or 0, address=address) assert len(ss) ports = {s.getsockname()[1] for s in ss} assert len(ports) == 1, "Multiple ports assigned??" actual_port = ports.pop() if port: assert actual_port == port return ss, actual...
Bind a socket to a port on an address. Args: address (str) : An address to bind a port on, e.g. ``"localhost"`` port (int) : A port number to bind. Pass 0 to have the OS automatically choose a free port. This function returns a 2-tuple with the new socket as the first element, and the port that was bound as the sec...
juraj-google-style
def getitem_row_array(self, key): key = list(key) def getitem(df, internal_indices=[]): return df.iloc[internal_indices] result = self.data.apply_func_to_select_indices( 1, getitem, key, keep_remaining=False ) new_inde...
Get row data for target labels. Args: key: Target numeric indices by which to retrieve data. Returns: A new QueryCompiler.
juraj-google-style
def load_file(file_path, credentials=None): if file_path.startswith('gs: return _load_file_from_gcs(file_path, credentials) else: return open(file_path, 'r')
Load a file from either local or gcs. Args: file_path: The target file path, which should have the prefix 'gs://' if to be loaded from gcs. credentials: Optional credential to be used to load the file from gcs. Returns: A python File object if loading file from local or a StringIO object if loading from gcs.
juraj-google-style
def format_filter_value(self, element, value): format_func = self.allowed_filter.get(element) return format_func(value)
Calls the specific function to format value, depending on the given element. Arguments: element (string): The element of the VT to be formatted. value (dictionary): The element value. Returns: Returns a formatted value.
juraj-google-style
def loads(s, single=False): es = deserialize(s) if single: return next(es) return es
Deserialize :class:`Eds` string representations Args: s (str): Eds string single (bool): if `True`, only return the first Xmrs object Returns: a generator of :class:`Eds` objects (unless the *single* option is `True`)
juraj-google-style
def del_node(self, node): for node_ in self.values(): if node in node_: node_.pop(node) return bool(self.pop(node))
Removes a **node object** from the ``DictGraph``. Returns ``True`` if a **node object** has been removed. If the **node object** is not in the ``DictGraph`` raises a ``KeyError``. Arguments: - node(``object``) **node object** to be removed. Any hashable Python ``object``.
juraj-google-style
def _write(self, file_prefix, options=None): if options and options.experimental_enable_async_checkpoint: self._checkpoint_options = options if checkpoint_context.in_preemption_save_context(): if self._async_checkpointer_impl is not None: self._async_checkpointer_impl.syn...
Internal method that implements Checkpoint.write(). Args: file_prefix: A prefix to use for the checkpoint filenames (/path/to/directory/and_a_prefix). options: Optional `tf.train.CheckpointOptions` object. Returns: The full path to the checkpoint (i.e. `file_prefix`).
github-repos
def _query(cls, *args, **kwds): if 'distinct' in kwds: if 'group_by' in kwds: raise TypeError( 'cannot use distinct= and group_by= at the same time') projection = kwds.get('projection') if not projection: raise TypeError( 'cannot use distinct= with...
Create a Query object for this class. Args: distinct: Optional bool, short hand for group_by = projection. *args: Used to apply an initial filter **kwds: are passed to the Query() constructor. Returns: A Query object.
juraj-google-style
def ShlexSplit(string): precondition.AssertType(string, Text) if PY2: string = string.encode('utf-8') parts = shlex.split(string) if PY2: parts = [part.decode('utf-8') for part in parts] return parts
A wrapper for `shlex.split` that works with unicode objects. Args: string: A unicode string to split. Returns: A list of unicode strings representing parts of the input string.
codesearchnet
def __init__(self, autoconnect=True, password=None, db=0, **connection_kwargs): if 'read_callback' in connection_kwargs or \ 'close_callback' in connection_kwargs: raise Exception("read_callback and close_callback are not allowed " ...
Constructor. Args: autoconnect (boolean): True if the client is in autoconnect mode (and in autoreconnection mode) (default True). password (string): the password to authenticate with. db (int): database number. **connection_kwargs: :class:`Connection` object kwargs.
juraj-google-style
def unzip_file(source_file, dest_dir=None, mkdir=False): if (dest_dir is None): (dest_dir, fname) = os.path.split(source_file) elif (not os.path.isdir(dest_dir)): if mkdir: preparedir(dest_dir) else: created = preparedir(dest_dir, False) if (not create...
Unzip a compressed file. Args: source_file: Full path to a valid compressed file (e.g. c:/ladybug/testPts.zip) dest_dir: Target folder to extract to (e.g. c:/ladybug). Default is set to the same directory as the source file. mkdir: Set to True to create the directory if doesn't exist (Default: False)
codesearchnet
def beautify(self, string): if not string: return string string, phrases = self.parse(string) if not phrases: return string if not self.positional and not self.always: raise errors.ArgumentError("Found phrases, but no styles " "were supplied!") return self.stringify(string, phr...
Wraps together all actions needed to beautify a string, i.e. parse the string and then stringify the phrases (replace tags with formatting codes). Arguments: string (str): The string to beautify/parse. Returns: The parsed, stringified and ultimately beautified string. Raises: errors.ArgumentError if phrases were fou...
juraj-google-style
def Validate(self, sections=None, parameters=None): if isinstance(sections, string_types): sections = [sections] if (sections is None): sections = [] if (parameters is None): parameters = [] validation_errors = {} for section in sections: for descriptor in self.type_i...
Validate sections or individual parameters. The GRR configuration file contains several sections, used by different components. Many of these components don't care about other sections. This method allows a component to declare in advance what sections and parameters it cares about, and have these validated. Args: se...
codesearchnet
def _generate_latex_source(circuit, filename=None, scale=0.7, style=None, reverse_bits=False, plot_barriers=True, justify=None): (qregs, cregs, ops) = utils._get_layered_instructions(circuit, reverse_bits=reverse_bits, justify=justify) qcimg = _latex.QCircuitImage(qregs, cregs, ops, scale, style=style, plot_bar...
Convert QuantumCircuit to LaTeX string. Args: circuit (QuantumCircuit): input circuit scale (float): image scaling filename (str): optional filename to write latex style (dict or str): dictionary of style or file name of style file reverse_bits (bool): When set to True reverse the bit order inside registers for the ou...
codesearchnet
def generate_stack_policy_args(stack_policy=None): args = {} if stack_policy: logger.debug("Stack has a stack policy") if stack_policy.url: raise NotImplementedError else: args["St...
Converts a stack policy object into keyword args. Args: stack_policy (:class:`stacker.providers.base.Template`): A template object representing a stack policy. Returns: dict: A dictionary of keyword arguments to be used elsewhere.
juraj-google-style
def _find_channel_index(data_format): for (i, c) in enumerate(data_format): if (c == 'C'): return i raise ValueError('data_format requires a channel dimension. Got: {}'.format(data_format))
Returns the index of the channel dimension. Args: data_format: A string of characters corresponding to Tensor dimensionality. Returns: channel_index: An integer indicating the channel dimension. Raises: ValueError: If no channel dimension was found.
codesearchnet
def format(self, *args, **kwargs): inplace = kwargs.pop("inplace", False) if not inplace: return str(self).format(*args, **kwargs) self._lines = str(self).format(*args, **kwargs).splitlines()
Format the string representation of the editor. Args: inplace (bool): If True, overwrite editor's contents with formatted contents
juraj-google-style
def get_snmp_configuration(self): uri = '{}{}'.format(self.data['uri'], self.SNMP_CONFIGURATION_PATH) return self._helper.do_get(uri)
Gets the SNMP configuration for a logical interconnect. Returns: dict: SNMP configuration.
codesearchnet
def noisy_moment(self, moment: 'cirq.Moment', system_qubits: Sequence['cirq.Qid']) -> 'cirq.OP_TREE': if (not hasattr(self.noisy_moments, '_not_overridden')): return self.noisy_moments([moment], system_qubits) if (not hasattr(self.noisy_operation, '_not_overridden')): return [self.noisy_operatio...
Adds noise to the operations from a moment. Args: moment: The moment to add noise to. system_qubits: A list of all qubits in the system. Returns: An OP_TREE corresponding to the noisy operations for the moment.
codesearchnet
def assert_no_new_tensors(f: _F) -> _F: def decorator(self: 'TensorFlowTestCase', **kwargs): def _is_tensorflow_object(obj) -> bool: try: return isinstance(obj, (tensor_lib.Tensor, variables.Variable, tensor_shape.Dimension, tensor_shape.TensorShape)) excep...
Decorator for asserting that no new Tensors persist after a test. Mainly useful for checking that code using the Python C API has correctly manipulated reference counts. Clears the caches that it knows about, runs the garbage collector, then checks that there are no Tensor or Tensor-like objects still around. This in...
github-repos
def headers_present(self, headers): headers = {name: re.compile('(.*)') for name in headers} self.add_matcher(matcher('HeadersMatcher', headers))
Defines a list of headers that must be present in the outgoing request in order to satisfy the matcher, no matter what value the headers hosts. Header keys are case insensitive. Arguments: headers (list|tuple): header keys to match. Returns: self: current Mock instance. Example:: (pook.get('server.com/api') .heade...
juraj-google-style
def create_row_token_type_ids_from_sequences(self, query_ids: List[int], table_values: List[TableValue]) -> List[int]: table_row_ids = list(zip(*table_values))[2] if table_values else [] return [0] * (1 + len(query_ids) + 1) + list(table_row_ids)
Creates the row token type IDs according to the query token IDs and a list of table values. Args: query_ids (`List[int]`): list of token IDs corresponding to the ID. table_values (`List[TableValue]`): lift of table values, which are named tuples containing the token value, the column ID and the row ID of said token. ...
github-repos
def from_string(string_data, file_format="xyz"): mols = pb.readstring(str(file_format), str(string_data)) return BabelMolAdaptor(mols.OBMol)
Uses OpenBabel to read a molecule from a string in all supported formats. Args: string_data: String containing molecule data. file_format: String specifying any OpenBabel supported formats. Returns: BabelMolAdaptor object
juraj-google-style
def detect_builtin_shadowing_definitions(self, contract): result = [] for function in contract.functions: if (function.contract == contract): if self.is_builtin_symbol(function.name): result.append((self.SHADOWING_FUNCTION, function, None)) result += self.detect_b...
Detects if functions, access modifiers, events, state variables, or local variables are named after built-in symbols. Any such definitions are returned in a list. Returns: list of tuple: (type, definition, [local variable parent])
codesearchnet
def _fit(self, col): column = col[self.col_name].replace({np.nan: np.inf}) frequencies = column.groupby(column).count().rename({np.inf: None}).to_dict() start = 0 end = 0 num_vals = len(col) for val in frequencies: prob = frequencies[val] /...
Create a map of the empirical probability for each category. Args: col(pandas.DataFrame): Data to transform.
juraj-google-style
def multi(self, **kwargs): path = self._get_path('multi') response = self._GET(path, kwargs) self._set_attrs_to_values(response) return response
Search the movie, tv show and person collections with a single query. Args: query: CGI escpaed string. page: (optional) Minimum value of 1. Expected value is an integer. language: (optional) ISO 639-1 code. include_adult: (optional) Toggle the inclusion of adult titles. Expected value is True or False. Returns: A dic...
juraj-google-style
def conv(self, input_tensor: core.Tensor) -> Mapping[str, core.Tensor]: scale = [1.0] * self.out_channel_size offset = [0.5] * self.out_channel_size mean, variance = (scale, offset) out = nn_ops.conv2d(input_tensor, self.filters, strides=strides, dilations=dilations, padding=padding, data_format='NHWC')...
Performs a 2D convolution operation. Args: input_tensor: Input tensor to perform convolution on. Returns: A map of: output key -> output result.
github-repos
def getWhoisInfo(domain): new = [] try: emails = {} emails['type'] = 'i3visio.alias' emails['value'] = str(domain.split('.')[0]) emails['attributes'] = [] new.append(emails) except: pass info = whois.whois(domain) if (info.status == None): rais...
Method that trie to recover the whois info from a domain. Args: ----- domain: The domain to verify. Returns: -------- dict: A dictionary containing the result as an i3visio entity with its `value`, `type` and `attributes`.
codesearchnet
def func(self, w, *args): x0 = args[0] x1 = args[1] n0 = x0.shape[0] n1 = x1.shape[0] n = max(n0, n1) * 10 idx0 = np.random.choice(range(n0), size=n) idx1 = np.random.choice(range(n1), size=n) b0 = np.ones((n0, 1)) b1 ...
Return the costs of the neural network for predictions. Args: w (array of float): weight vectors such that: w[:-h1] -- weights between the input and h layers w[-h1:] -- weights between the h and output layers args: features (args[0]) and target (args[1]) Returns: combined cost of RMSE, L1, and L2 regularization
juraj-google-style
def get_gains_losses(changes): res = {'gains': [], 'losses': []} for change in changes: if change > 0: res['gains'].append(change) else: res['losses'].append(change * -1) logger.debug('Gains: {0}'.format(res['gains'])) logger.debug('Losses: {0}'.format(res['l...
Categorizes changes into gains and losses Args: changes: List of floats of price changes between entries in JSON. Returns: Dict of changes with keys 'gains' and 'losses'. All values are positive.
juraj-google-style
def create_pipeline_stage(self, pipeline_key, name, **kwargs): if not (pipeline_key and name): return requests.codes.bad_request, None uri = '/'.join([ self.api_uri, self.pipelines_suffix, pipeline_key, self.stages_suffix]) kwargs.update({'name':name}) new_box = StreakStage...
Creates a pipeline stage with the provided attributes. Args: name required name string kwargs {..} see StreakStage object for details return (status code, stage dict)
juraj-google-style
def _format_subscripts(self, subscripts, value, limit=10, indent=2): lines = [] subscripts = np.transpose(subscripts) prefix = ' ' * indent if np.ndim(value) == 0: return [prefix + '[0] : ' + str(value)] for subscript in itertools.islice(subscripts, limit): lines.append(prefix + str(...
Generate a summary of ndarray subscripts as a list of str. If limit == N, this method will print up to the first N subscripts on separate lines. A line of ellipses (...) will be appended at the end if the number of subscripts exceeds N. Args: subscripts: The tensor (np.ndarray) subscripts, of the same format as np_wh...
github-repos
def temporal_padding(x, padding=(1, 1)): assert len(padding) == 2 pattern = [[0, 0], [padding[0], padding[1]], [0, 0]] return array_ops.pad(x, pattern)
Pads the middle dimension of a 3D tensor. Args: x: Tensor or variable. padding: Tuple of 2 integers, how many zeros to add at the start and end of dim 1. Returns: A padded 3D tensor.
github-repos
def nodes(self, device_name=None): if not self._debug_graphs: raise LookupError('No partition graphs have been loaded.') if device_name is None: nodes = [] for device_name in self._debug_graphs: nodes.extend(self._debug_graphs[device_name].node_inputs.keys()) return n...
Get a list of all nodes from the partition graphs. Args: device_name: (`str`) name of device. If None, all nodes from all available devices will be included. Returns: All nodes' names, as a list of str. Raises: LookupError: If no partition graphs have been loaded. ValueError: If specified node name does not exist.
github-repos
def detect_timezone(): if (sys.platform == 'win32'): tz = _detect_timezone_windows() if (tz is not None): return tz tz = _detect_timezone_environ() if (tz is not None): return tz tz = _detect_timezone_etc_timezone() if (tz is not None): return tz tz = ...
Try and detect the timezone that Python is currently running in. We have a bunch of different methods for trying to figure this out (listed in order they are attempted). * In windows, use win32timezone.TimeZoneInfo.local() * Try TZ environment variable. * Try and find /etc/timezone file (with timezone name). * Try and...
codesearchnet
def convert_mass_to_atomic_fractions(mass_fractions): atomic_fractions = {} for z, mass_fraction in mass_fractions.items(): atomic_fractions[z] = mass_fraction / pyxray.element_atomic_weight(z) total_fraction = sum(atomic_fractions.values()) for z, fraction in atomic_fractions.items(): ...
Converts a mass fraction :class:`dict` to an atomic fraction :class:`dict`. Args: mass_fractions (dict): mass fraction :class:`dict`. The composition is specified by a dictionary. The keys are atomic numbers and the values weight fractions. No wildcard are accepted.
juraj-google-style
def parse_date(date_string, ignoretz=True): try: return parser.parse(date_string, ignoretz=ignoretz) except TypeError: return None
Parse a string as a date. If the string fails to parse, `None` will be returned instead >>> parse_date('2017-08-15T18:24:31') datetime.datetime(2017, 8, 15, 18, 24, 31) Args: date_string (`str`): Date in string format to parse ignoretz (`bool`): If set ``True``, ignore time zones and return a naive :class:`datetime` ...
codesearchnet
def _ReadTableHeader(self, file_object, table_header_offset): data_type_map = self._GetDataTypeMap('keychain_table_header') table_header, _ = self._ReadStructureFromFileObject( file_object, table_header_offset, data_type_map) return table_header
Reads the table header. Args: file_object (file): file-like object. table_header_offset (int): offset of the tables header relative to the start of the file. Returns: keychain_table_header: table header. Raises: ParseError: if the table header cannot be read.
juraj-google-style
def argmin(input_, key=None): if isinstance(input, dict): return list(input.keys())[argmin(list(input.values()), key=key)] else: if key is None: def _key(item): return item[1] else: def _key(item): ...
Returns index / key of the item with the smallest value. Args: input_ (dict or list): Note: a[argmin(a, key=key)] == min(a, key=key)
juraj-google-style
def from_config(cls, config): return cls(**config)
Creates a layer from its config. This method is the reverse of `get_config`, capable of instantiating the same layer from the config dictionary. It does not handle layer connectivity (handled by Network), nor weights (handled by `set_weights`). Args: config: A Python dictionary, typically the output of get_config. R...
github-repos
def bitwise_xor(x, y): if any_symbolic_tensors((x, y)): return BitwiseXor().symbolic_call(x, y) return backend.numpy.bitwise_xor(x, y)
Compute the bit-wise XOR of two arrays element-wise. Computes the bit-wise XOR of the underlying binary representation of the integers in the input arrays. This ufunc implements the C/Python operator `^`. Args: x: Input integer tensor. y: Input integer tensor. Returns: Result tensor.
github-repos
def insert(self, entity_id, property_uri, value): if (not entity_id.startswith('http')): entity_uri = urllib.parse.urljoin(self.base_url, entity_id) else: entity_uri = entity_id if entity_uri.endswith('/'): entity_uri = entity_uri[:(- 1)] if (not entity_id.endswith('fcr:metadata'...
Method inserts a new entity's property in Fedora4 Repository Args: entity_id(string): Unique ID of Fedora object property_uri(string): URI of property value: Value of the property, can be literal or URI reference Returns: boolean: True if successful changed in Fedora, False otherwise
codesearchnet
def setKeepAliveTimeOut(self, iTimeOut): print '%s call setKeepAliveTimeOut' % self.port print iTimeOut try: cmd = WPANCTL_CMD + 'setprop NCP:SleepyPollInterval %s' % str(iTimeOut*1000) print cmd return self.__sendCommand(cmd)[0] != 'Fail' exc...
set keep alive timeout for device has been deprecated and also set SED polling rate Args: iTimeOut: data poll period for sleepy end device Returns: True: successful to set the data poll period for SED False: fail to set the data poll period for SED
juraj-google-style
def _normalize_angle(angle, range, step): while angle <= range[0]: angle += step while angle >= range[1]: angle -= step return angle
Finds an angle that matches the given one modulo step. Increments and decrements the given value with a given step. Args: range: a 2-tuple of min and max target values. step: tuning step. Returns: Normalized value within a given range.
juraj-google-style
def register_agent(self, host, sweep_id=None, project_name=None): mutation = gql() if project_name is None: project_name = self.settings('project') def no_retry_400(e): if not isinstance(e, requests.HTTPError): return True if...
Register a new agent Args: host (str): hostname persistent (bool): long running or oneoff sweep (str): sweep id project_name: (str): model that contains sweep
juraj-google-style
def _next_file(self): while True: if self._bucket_iter: try: return self._bucket_iter.next().filename except StopIteration: self._bucket_iter = None self._bucket = None if (self._index >= len(self._filenames)): retur...
Find next filename. self._filenames may need to be expanded via listbucket. Returns: None if no more file is left. Filename otherwise.
codesearchnet
def CalculateForecastStats(matched, available, possible=None): if (matched > 0): available_percent = ((float(available) / matched) * 100.0) else: available_percent = 0 if (possible is not None): if (matched > 0): possible_percent = ((possible / float(matched)) * 100.0) ...
Calculate forecast percentage stats. Args: matched: The number of matched impressions. available: The number of available impressions. possible: The optional number of possible impressions. Returns: The percentage of impressions that are available and possible.
codesearchnet
def stats(self): per_utt_stats = self.stats_per_utterance() return stats.DataStats.concatenate(per_utt_stats.values())
Return statistics calculated overall samples of all utterances in the corpus. Returns: DataStats: A DataStats object containing statistics overall samples in the corpus.
codesearchnet
def safe_date(self, x): t = x[self.col_name] if np.isnan(t): return t elif np.isposinf(t): t = sys.maxsize elif np.isneginf(t): t = -sys.maxsize tmp = time.localtime(float(t) / 1e9) return time.strftime(self.date_format, tm...
Transform x[self.col_name] into a date string. Args: x(dict like / pandas.Series): Row containing data to cast safely. Returns: str
juraj-google-style
def memory_read(self, addr, num_units, zone=None, nbits=None): buf_size = num_units buf = None access = 0 if (nbits is None): buf = (ctypes.c_uint8 * buf_size)() access = 0 elif (nbits == 8): buf = (ctypes.c_uint8 * buf_size)() access = 1 elif (nbits == 16): ...
Reads memory from a target system or specific memory zone. The optional ``zone`` specifies a memory zone to access to read from, e.g. ``IDATA``, ``DDATA``, or ``CODE``. The given number of bits, if provided, must be either ``8``, ``16``, or ``32``. If not provided, always reads ``num_units`` bytes. Args: self (JLin...
codesearchnet
def upload_metric(self, dataset_name, table_name, run_id): expected_file = os.path.join( self._logging_dir, logger.METRIC_LOG_FILE_NAME) with tf.gfile.GFile(expected_file) as f: lines = f.readlines() metrics = [] for line in filter(lambda l: l.strip(), lines): metric = jso...
Upload metric information to Bigquery. Args: dataset_name: string, the name of bigquery dataset where the data will be uploaded. table_name: string, the name of bigquery table under the dataset where the metric data will be uploaded. This is different from the benchmark_run table. run_id: string, a unique ID that will...
juraj-google-style
def storage_volume_attachments(self): if (not self.__storage_volume_attachments): self.__storage_volume_attachments = StorageVolumeAttachments(self.__connection) return self.__storage_volume_attachments
Gets the StorageVolumeAttachments API client. Returns: StorageVolumeAttachments:
codesearchnet
def market_if_touched(self, accountID, **kwargs): return self.create( accountID, order=MarketIfTouchedOrderRequest(**kwargs) )
Shortcut to create a MarketIfTouched Order in an Account Args: accountID : The ID of the Account kwargs : The arguments to create a MarketIfTouchedOrderRequest Returns: v20.response.Response containing the results from submitting the request
juraj-google-style
def pack_container(in_container, out_file): container_filename = local.path(out_file).basename out_container = local.cwd / "container-out" / container_filename out_dir = out_container.dirname with local.cwd(in_container): tar("cjf", out_container, ".") c_hash = download.update_has...
Pack a container image into a .tar.bz2 archive. Args: in_container (str): Path string to the container image. out_file (str): Output file name.
juraj-google-style
def get_course_details(self, course_id): try: return self.client.course(course_id).get() except (SlumberBaseException, ConnectionError, Timeout) as exc: LOGGER.exception('Failed to retrieve course enrollment details for course [%s] due to: [%s]', course_id, str(exc)) return {}
Query the Enrollment API for the course details of the given course_id. Args: course_id (str): The string value of the course's unique identifier Returns: dict: A dictionary containing details about the course, in an enrollment context (allowed modes, etc.)
codesearchnet
class ClvpProcessor(ProcessorMixin): feature_extractor_class = 'ClvpFeatureExtractor' tokenizer_class = 'ClvpTokenizer' model_input_names = ['input_ids', 'input_features', 'attention_mask'] def __init__(self, feature_extractor, tokenizer): super().__init__(feature_extractor, tokenizer) def...
Constructs a CLVP processor which wraps a CLVP Feature Extractor and a CLVP Tokenizer into a single processor. [`ClvpProcessor`] offers all the functionalities of [`ClvpFeatureExtractor`] and [`ClvpTokenizer`]. See the [`~ClvpProcessor.__call__`], [`~ClvpProcessor.decode`] and [`~ClvpProcessor.batch_decode`] for more ...
github-repos
def append(self, node): if (not isinstance(node, grammar.STATEMENTS)): raise ValueError self.to_append[(- 1)].append(node)
Append a statement to the current statement. Note that multiple calls to append will result in the last statement to be appended to end up at the bottom. Args: node: The statement to append. Raises: ValueError: If the given node is not a statement.
codesearchnet
def set_params(self, **params): if ('bias' in params.keys()): self.intercept_ = params['bias'] if ('weights' in params.keys()): self.coef_ = params['weights'] for key in params.keys(): if ('b_' == key[:2]): self.B[int(key[2:])] = params[key] return self
Set the parameters of the estimator. Args: bias (array-like) : bias of the estimator. Also known as the intercept in a linear model. weights (array-like) : weights of the features. Also known as coeficients. NER biases (array-like) : NER entities infering column position on X and bias value. Ex: `b_4=10, b_5=6`. Exam...
codesearchnet
def cidr_check(cidr, return_cidr=True): try: if int(cidr) < 0 or int(cidr) > 32: good_cidr = False else: good_cidr = True if return_cidr: while not good_cidr: print("Sorry the CIDR value %s is not a valid value must be a value of 0 to ...
Function to verify a good CIDR value Args: cidr: CIDR value 0 to 32 return_cidr: Set to True it returns a CIDR value, set to False returns True or False Returns: see return_cidr for return options
juraj-google-style
def _unconstrained_to_raw_svi(unconstrained_parameters): b = tf.math.exp(unconstrained_parameters[..., 1]) rho = 2 * tf.math.sigmoid(unconstrained_parameters[..., 2]) - 1 m = unconstrained_parameters[..., 3] sigma = tf.math.exp(unconstrained_parameters[..., 4]) a = tf.math.exp(unconstrained_paramete...
Converts unconstrained optimizarion parameters to raw SVI ones. Performs the inverse transformation of the internal unconstrained model parameters into the standard raw SVI parameters `a, b, rho, m, sigma`. Args: unconstrained_parameters: A rank 2 real `Tensor` of shape [batch_size, 5], representing SVI model's raw p...
github-repos
def _GetMemberDataTypeMaps(self, data_type_definition, data_type_map_cache): if (not data_type_definition): raise errors.FormatError('Missing data type definition') members = getattr(data_type_definition, 'members', None) if (not members): raise errors.FormatError('Invalid data type definiti...
Retrieves the member data type maps. Args: data_type_definition (DataTypeDefinition): data type definition. data_type_map_cache (dict[str, DataTypeMap]): cached data type maps. Returns: list[DataTypeMap]: member data type maps. Raises: FormatError: if the data type maps cannot be determined from the data type defini...
codesearchnet
def maybe_add_training_arg(original_call, wrapped_call, expects_training_arg, default_training_value): if not expects_training_arg: return (wrapped_call, None) def wrap_with_training_arg(*args, **kwargs): training_arg_index = get_training_arg_index(original_call) training = get...
Decorate call and optionally adds training argument. If a layer expects a training argument, this function ensures that 'training' is present in the layer args or kwonly args, with the default training value. Args: original_call: Original call function. wrapped_call: Wrapped call function. expects_training_arg: Wheth...
github-repos
def Deserialize(self, reader): self.HashStart = reader.ReadSerializableArray('neocore.UInt256.UInt256') self.HashStop = reader.ReadUInt256()
Deserialize full object. Args: reader (neo.IO.BinaryReader):
juraj-google-style
def _exclude_denylisted_ops(self, node_names): return [node_name for node_name in node_names if self._debug_dump.node_op_type(debug_graphs.get_node_name(node_name)) not in self._GRAPH_STRUCT_OP_TYPE_DENYLIST]
Exclude all nodes whose op types are in _GRAPH_STRUCT_OP_TYPE_DENYLIST. Args: node_names: An iterable of node or graph element names. Returns: A list of node names that are not denylisted.
github-repos
def send_message(host, data, timeout=None, properties=None): channel = _get_channel(host, timeout) if (not properties): properties = pika.BasicProperties(content_type='application/json', delivery_mode=2, headers={'UUID': str(uuid.uuid4())}) parameters = settings.get_amqp_settings()[host] channel...
Send message to given `host`. Args: host (str): Specified host: aleph/ftp/whatever available host. data (str): JSON data. timeout (int, default None): How much time wait for connection.
codesearchnet
def get_discovery_doc(self, services, hostname=None): if (not isinstance(services, (tuple, list))): services = [services] util.check_list_type(services, remote._ServiceClass, 'services', allow_none=False) return self.__discovery_doc_descriptor(services, hostname=hostname)
JSON dict description of a protorpc.remote.Service in discovery format. Args: services: Either a single protorpc.remote.Service or a list of them that implements an api/version. hostname: string, Hostname of the API, to override the value set on the current service. Defaults to None. Returns: dict, The discovery docu...
codesearchnet
def add(self, label): label.label_list = self self.label_tree.addi(label.start, label.end, label)
Add a label to the end of the list. Args: label (Label): The label to add.
codesearchnet
def ForceRemoveFileObject(self, path_spec): cache_value = self._file_object_cache.GetCacheValue(path_spec.comparable) if (not cache_value): return False while (not cache_value.IsDereferenced()): cache_value.vfs_object.close() return True
Forces the removal of a file-like object based on a path specification. Args: path_spec (PathSpec): path specification. Returns: bool: True if the file-like object was cached.
codesearchnet
def five_crop(img, size): if isinstance(size, numbers.Number): size = (int(size), int(size)) else: assert (len(size) == 2), 'Please provide only two dimensions (h, w) for size.' (w, h) = img.size (crop_h, crop_w) = size if ((crop_w > w) or (crop_h > h)): raise ValueError('Req...
Crop the given PIL Image into four corners and the central crop. .. Note:: This transform returns a tuple of images and there may be a mismatch in the number of inputs and targets your ``Dataset`` returns. Args: size (sequence or int): Desired output size of the crop. If size is an int instead of sequence like (h, w)...
codesearchnet
def run_calibration(self, saved_model_path: str, signature_keys: list[str], tags: set[str], force_graph_mode_calibration: bool, representative_dataset_file_map_serialized: dict[str, bytes]) -> Optional[bool]: dataset_file_map = {} for signature_key, dataset_file_serialized in representative_dataset_file_map_ser...
Runs calibration and adds calibration statistics to exported model. Args: saved_model_path: Path to the SavedModel to run calibration. signature_keys: List of signature keys corresponding to SignatureDefs to run calibration on. tags: A set of tags that identify the MetaGraphDef. force_graph_mode_calibration: If True, ...
github-repos