code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _create_variable(self, next_creator, **kwargs): if kwargs.pop('per_worker_variable', False): logging.info('Creating per worker variable') return self._create_per_worker_variable(next_creator, **kwargs) var_creator = self._create_var_creator(next_creator, **kwargs) if 'colocate_with' in k...
Implements StrategyExtendedV2._create_variable. Creates a `Variable` or a `ShardedVariable`. A `ShardedVariable` will be created if satisfying all the following criteria: 1. `self._variable_partitioner` results in more than one partition on the first axis. 2. variable's rank is greater than 0. 3. variable is not coloc...
github-repos
def _StartMonitoringProcess(self, process): if (process is None): raise ValueError('Missing process.') pid = process.pid if (pid in self._process_information_per_pid): raise KeyError('Already monitoring process (PID: {0:d}).'.format(pid)) if (pid in self._rpc_clients_per_pid): ra...
Starts monitoring a process. Args: process (MultiProcessBaseProcess): process. Raises: IOError: if the RPC client cannot connect to the server. KeyError: if the process is not registered with the engine or if the process is already being monitored. OSError: if the RPC client cannot connect to the server. ValueError: ...
codesearchnet
def extract_anomalies(y_true, smoothed_errors, window_size, batch_size, error_buffer): if len(y_true) <= batch_size * window_size: raise ValueError("Window size (%s) larger than y_true (len=%s)." % (batch_size, len(y_true))) num_windows = int((len(y_true) - (batch_size * w...
Extracts anomalies from the errors. Args: y_true (): smoothed_errors (): window_size (int): batch_size (int): error_buffer (int): Returns:
juraj-google-style
def call(self, command, *args): return self.rpc.call(str(command), *args)
Passes an arbitrary command to the coin daemon. Args: command (str): command to be sent to the coin daemon
juraj-google-style
def random_int_generator(maxrange): try: return random.randint(0, maxrange) except: (line, filename, synerror) = trace() raise ArcRestHelperError({'function': 'random_int_generator', 'line': line, 'filename': filename, 'synerror': synerror}) finally: pass
Generates a random integer from 0 to `maxrange`, inclusive. Args: maxrange (int): The upper range of integers to randomly choose. Returns: int: The randomly generated integer from :py:func:`random.randint`. Examples: >>> arcresthelper.common.random_int_generator(15) 9
codesearchnet
def plot_zt_dop(self, temps='all', output='average', relaxation_time=1e-14): import matplotlib.pyplot as plt if (output == 'average'): zt = self._bz.get_zt(relaxation_time=relaxation_time, output='average') elif (output == 'eigs'): zt = self._bz.get_zt(relaxation_time=relaxation_time, output...
Plot the figure of merit zT in function of doping levels for different temperatures. Args: temps: the default 'all' plots all the temperatures in the analyzer. Specify a list of temperatures if you want to plot only some. output: with 'average' you get an average of the three directions with 'eigs' you get all the thr...
codesearchnet
def handle_unsubscribe(self, request, path): ret = [] if path: name = path[0] child = self.children[name] ret += child.handle_unsubscribe(request, path[1:]) if not child.children and not child.update_requests \ ...
Remove from the notifier list and send a return Args: request (Subscribe): The original subscribe request path (list): The relative path from ourself Returns: list: [(callback, Response)] that need to be called
juraj-google-style
def query_dict_to_string(query): query_params = [] for key, value in query.items(): query_params.append(key + "=" + value) return "&".join(query_params)
Convert an OrderedDict to a query string. Args: query (obj): The key value object with query params. Returns: str: The query string. Note: This method does the same as urllib.parse.urlencode except that it doesn't actually encode the values.
juraj-google-style
def from_lasio_curve(cls, curve, depth=None, basis=None, start=None, stop=None, step=0.1524, run=(- 1), null=(- 999.25), service_company=None, date=None): data = curve.data unit = curve.unit if (depth is not None): d = np.diff(depth) if (not np.allclose((d - np.mean(d)), np.zeros_like(d))): ...
Makes a curve object from a lasio curve object and either a depth basis or start and step information. Args: curve (ndarray) depth (ndarray) basis (ndarray) start (float) stop (float) step (float): default: 0.1524 run (int): default: -1 null (float): default: -999.25 service_company (str): Optional. data (str): Option...
codesearchnet
def value_to_pytd_type(self, node, v, seen, view): if isinstance(v, (abstract.Empty, typing_overlay.Never)): return pytd.NothingType() elif isinstance(v, abstract.TYPE_VARIABLE_INSTANCES): return self._type_variable_to_pytd_type(node, v, seen, view) elif isinstance(v, (typing_overlay.TypeVar...
Get a PyTD type representing this object, as seen at a node. Args: node: The node from which we want to observe this object. v: The object. seen: The set of values seen before while computing the type. view: A Variable -> binding map. Returns: A PyTD type.
github-repos
def _unverified_decode(token): token = _helpers.to_bytes(token) if token.count(b'.') != 2: raise ValueError( 'Wrong number of segments in token: {0}'.format(token)) encoded_header, encoded_payload, signature = token.split(b'.') signed_section = encoded_header + b'.' + encoded_...
Decodes a token and does no verification. Args: token (Union[str, bytes]): The encoded JWT. Returns: Tuple[str, str, str, str]: header, payload, signed_section, and signature. Raises: ValueError: if there are an incorrect amount of segments in the token.
juraj-google-style
def find_container_traits(cls_or_string): if utils.is_str(cls_or_string): if (not templates.is_instantiation(cls_or_string)): return None name = templates.name(cls_or_string) if name.startswith('std::'): name = name[len('std::'):] if name.startswith('std::tr1:...
Find the container traits type of a declaration. Args: cls_or_string (str | declarations.declaration_t): a string Returns: declarations.container_traits: a container traits
codesearchnet
def parse_vasprun( self ): self.vasprun_filename = match_filename( 'vasprun.xml' ) if not self.vasprun_filename: raise FileNotFoundError( 'Could not find vasprun.xml or vasprun.xml.gz file' ) try: self.vasprun = Vasprun( self.vasprun_filename, parse_p...
Read in `vasprun.xml` as a pymatgen Vasprun object. Args: None Returns: None None: If the vasprun.xml is not well formed this method will catch the ParseError and set self.vasprun = None.
juraj-google-style
def normalize_digits_only(number, keep_non_digits=False): number = unicod(number) number_length = len(number) normalized_digits = U_EMPTY_STRING for ii in range(number_length): d = unicode_digit(number[ii], -1) if d != -1: normalized_digits += unicod(d) elif keep...
Normalizes a string of characters representing a phone number. This converts wide-ascii and arabic-indic numerals to European numerals, and strips punctuation and alpha characters (optional). Arguments: number -- a string representing a phone number keep_non_digits -- whether to keep non-digits Returns the normalize...
juraj-google-style
def sparse_intersection_indices_and_values(x1, x2): ones1 = tf.sparse.map_values(ones_like_int8, x1) ones2 = tf.sparse.map_values(ones_like_int8, x2) intersection_extra_dim = tf.sets.intersection(tf.sparse.expand_dims(ones1, axis=-1), tf.sparse.expand_dims(ones2, axis=-1)) def empty_intersection(): ...
Compute the indices for the intersection of two `tf.SparseTensor`s and modify the values for these indices. Args: x1: the first `tf.SparseTensor`. x2: the second `tf.SparseTensor`. Returns: A tuple containing: - the indices for the intersection - `x1` values for the intersection indices (some values were removed) - `x...
github-repos
def parse(self, filename): with io.open(filename, 'r', encoding='utf-8') as _: lines = _.readlines() all_source_files = set() source_map = {} lineno = 0 root = None index = None cur_level = (- 1) parent_queue = [] for line in lines: try: (level, line) = de...
Parse a sitemap file. Args: filename: str, the path to the sitemap file. Returns: Sitemap: the generated sitemap.
codesearchnet
def port_create_gre(br, port, id, remote): if not 0 <= id < 2**32: return False elif not __salt__['dig.check_ip'](remote): return False elif not bridge_exists(br): return False elif port in port_list(br): cmd = 'ovs-vsctl set interface {0} type=gre options:remote_ip=...
Generic Routing Encapsulation - creates GRE tunnel between endpoints. Args: br: A string - bridge name. port: A string - port name. id: An integer - unsigned 32-bit number, tunnel's key. remote: A string - remote endpoint's IP address. Returns: True on success, else False. .. versionadded:: 2016.3.0 CLI Example: .....
juraj-google-style
def from_proto(context_def, import_scope=None): ret = WhileContext(context_def=context_def, import_scope=import_scope) ret.Enter() for nested_def in context_def.nested_contexts: from_control_flow_context_def(nested_def, import_scope=import_scope) ret.Exit() return ret
Returns a `WhileContext` object created from `context_def`. Args: context_def: A `WhileContextDef` protocol buffer. import_scope: Optional `string`. Name scope to add. Returns: A `WhileContext` Python object.
github-repos
def _get_parameter_conversion_entry(parameter_config): entry = _PARAM_CONVERSION_MAP.get(parameter_config.get('type')) if ((entry is None) and ('enum' in parameter_config)): entry = _PARAM_CONVERSION_MAP['enum'] return entry
Get information needed to convert the given parameter to its API type. Args: parameter_config: The dictionary containing information specific to the parameter in question. This is retrieved from request.parameters in the method config. Returns: The entry from _PARAM_CONVERSION_MAP with functions/information needed to...
codesearchnet
def _execute_primitives(self, commands): for p in commands: if (self._scanchain and self._scanchain._debug): print(' Executing', p) p.execute(self)
Run a list of executable primitives on this controller, and distribute the returned data to the associated TDOPromises. Args: commands: A list of Executable Primitives to be run in order.
codesearchnet
def _write_log(self, version_key, meta_data, index_fields): meta_data = meta_data or {} meta_data.update({ 'version_key': version_key, 'timestamp': time.time(), }) obj = log_bucket.new(data=meta_data) obj.add_index('version_key_bin', version_key) ...
Creates a log entry for current object, Args: version_key(str): Version_bucket key from _write_version(). meta_data (dict): JSON serializable meta data for logging of save operation. {'lorem': 'ipsum', 'dolar': 5} index_fields (list): Tuple list for secondary indexing keys in riak (with 'bin' or 'int'). [('lorem','bin'...
juraj-google-style
def add_document(self, key, url, **kwargs): document = self._check_metadata_for_file(key=key, url=url, **kwargs) for dict_key in ( 'description', 'fulltext', 'hidden', 'material', 'original_url', 'url', 'filena...
Adds document to record Args: key (string): document key url (string): document url Keyword Args: description (string): simple description fulltext (bool): mark if this is a full text hidden (bool): is document should be hidden material (string): original_url (string): original url filename (string): current url Retu...
juraj-google-style
def module_help(self, module): helplist = [] self._render_our_module_key_flags(module, helplist) return '\n'.join(helplist)
Describes the key flags of a module. Args: module: module|str, the module to describe the key flags for. Returns: str, describing the key flags of a module.
codesearchnet
def _get_named_attributes(self): for (cls, instance) in zip(self.get_class_attributes(), self._get_instance_attributes()): (attr_name, cls_value) = cls instance_value = instance[1] (yield (attr_name, instance_value, cls_value))
Return generator for attribute's name, instance and class values. Add attribute name to meth:`_get_attributes` for a better debugging message, so user can find the error easier. Returns: generator: Tuple with attribute's name, instance and class values.
codesearchnet
def check_initialized(self): for (name, field) in self.__by_name.items(): value = getattr(self, name) if (value is None): if field.required: raise ValidationError(('Message %s is missing required field %s' % (type(self).__name__, name))) else: try: ...
Check class for initialization status. Check that all required fields are initialized Raises: ValidationError: If message is not initialized.
codesearchnet
def get_cudnn_version(): key = 'cudnn_ver' cmds = cmds_all[PLATFORM.lower()][key] out, err = run_shell_cmd(cmds[0]) if err and FLAGS.debug: print('Error in finding `cudnn.h`:\n %s' % str(err)) if len(out.split(b' ')) > 1: cmd = cmds[0] + ' | ' + cmds[1] out_re, err_re = run_s...
Retrieves the version of cuDNN library detected. Returns: String that is the version of cuDNN library detected. e.g. '7.5.0'
github-repos
def GetSources(self, event): if self.DATA_TYPE != event.data_type: raise errors.WrongFormatter('Unsupported data type: {0:s}.'.format( event.data_type)) file_system_type = getattr(event, 'file_system_type', 'UNKNOWN') timestamp_desc = getattr(event, 'timestamp_desc', 'Time') source...
Determines the the short and long source for an event object. Args: event (EventObject): event. Returns: tuple(str, str): short and long source string. Raises: WrongFormatter: if the event object cannot be formatted by the formatter.
juraj-google-style
def _is_op_stateful(op): if op.type == 'GlobalIterId': return False if op.type == 'UpdateFdoWithGlobalMinibatchStatistics': return False if op.type == 'CollectiveGatherV2' and op.get_attr('is_stateless'): return False if op.type == 'CollectiveAllToAllV2' and op.get_attr('is_state...
Check whether an op is stateful. This helper function handles two special cases to make the stateful analysis consistent with the mlir side effect analysis. 1. GlobalIterIdOp should be stateless. 2. CollectiveGatherV2 with attribute is_stateless to be True should be stateless. Args: op: Operation Returns: Boolean in...
github-repos
def _CreateWindowsPathResolver( self, file_system, mount_point, environment_variables): if environment_variables is None: environment_variables = [] path_resolver = windows_path_resolver.WindowsPathResolver( file_system, mount_point) for environment_variable in environment_variabl...
Create a Windows path resolver and sets the environment variables. Args: file_system (dfvfs.FileSystem): file system. mount_point (dfvfs.PathSpec): mount point path specification. environment_variables (list[EnvironmentVariableArtifact]): environment variables. Returns: dfvfs.WindowsPathResolver: Windows path resolve...
juraj-google-style
def _info_from_string(info_string): try: json_value = json.loads(info_string) except ValueError: raise ValueError(('invalid JSON: %r' % (info_string,))) if (not isinstance(json_value, dict)): raise ValueError(('not a JSON object: %r' % (json_value,))) if (json_value.get('version'...
Parse a `TensorBoardInfo` object from its string representation. Args: info_string: A string representation of a `TensorBoardInfo`, as produced by a previous call to `_info_to_string`. Returns: A `TensorBoardInfo` value. Raises: ValueError: If the provided string is not valid JSON, or if it does not represent a JSON...
codesearchnet
class IntGELU(nn.Module): def __init__(self, quant_mode=True, force_dequant='none'): super().__init__() self.quant_mode = quant_mode if force_dequant in ['nonlinear', 'gelu']: logger.info('Force dequantize gelu') self.quant_mode = False if not self.quant_mode...
Quantized version of `torch.nn.GELU`. Adds quantization-specific arguments on top of `torch.nn.GELU`. Args: quant_mode (`bool`, *optional*, defaults to `False`): Whether or not the layer is quantized. force_dequant (`str`, *optional*, defaults to `"none"`): Force dequantize the layer if either "gelu" or "nonlinear" is...
github-repos
def UpdateNumberOfEvents(self, number_of_consumed_events, number_of_produced_events): consumed_events_delta = 0 if (number_of_consumed_events is not None): if (number_of_consumed_events < self.number_of_consumed_events): raise ValueError('Number of consumed events smaller than previous updat...
Updates the number of events. Args: number_of_consumed_events (int): total number of events consumed by the process. number_of_produced_events (int): total number of events produced by the process. Returns: bool: True if either number of events has increased. Raises: ValueError: if the consumed or produced number of...
codesearchnet
def download_file_maybe_extract(url, directory, filename=None, extension=None, check_files=[]): if (filename is None): filename = _get_filename_from_url(url) filepath = os.path.join(directory, filename) check_files = [os.path.join(directory, f) for f in check_files] if ((len(check_files) > 0) an...
Download the file at ``url`` to ``directory``. Extract to ``directory`` if tar or zip. Args: url (str): Url of file. directory (str): Directory to download to. filename (str, optional): Name of the file to download; Otherwise, a filename is extracted from the url. extension (str, optional): Extension of the file; Othe...
codesearchnet
def wait(self, container, timeout=None, condition=None): url = self._url('/containers/{0}/wait', container) params = {} if (condition is not None): if utils.version_lt(self._version, '1.30'): raise errors.InvalidVersion('wait condition is not supported for API version < 1.30') pa...
Block until a container stops, then return its exit code. Similar to the ``docker wait`` command. Args: container (str or dict): The container to wait on. If a dict, the ``Id`` key is used. timeout (int): Request timeout condition (str): Wait until a container state reaches the given condition, either ``not-running`` ...
codesearchnet
def GetPresetByName(self, name): name = name.lower() return self._definitions.get(name, None)
Retrieves a specific preset definition by name. Args: name (str): name of the preset. Returns: ParserPreset: a parser preset or None if not available.
juraj-google-style
def send_status(status: 'EFBStatus'): global middlewares, master if status is None: return s: 'Optional[EFBStatus]' = status for i in middlewares: s = i.process_status(cast('EFBStatus', s)) if s is None: return status = cast('EFBStatus', s) statu...
Deliver a message to the destination channel. Args: status (EFBStatus): The status
juraj-google-style
def exists(self): session = client.get_client().create_session() ret = (self._base_query(session).count() > 0) session.close() return ret
Check if a target exists This function is called by :mod:`luigi` to check if a task output exists. By default, :mod:`luigi` considers a task as complete if all it targets (outputs) exist. Returns: bool: ``True`` if target exists, ``False`` otherwise
codesearchnet
def atol_for_validation(self) -> float: return 0.0001
What absolute tolerance value to use during model conversion validation. Returns: Float absolute tolerance value.
github-repos
def program_to_text(program): def label(node): return '<%d>%s' % (node.id, node.name) s = io.StringIO() seen = set() for node in cfg_utils.order_nodes(program.cfg_nodes): seen.add(node) s.write(f'{label(node)}\n') s.write(f' From: {', '.join((label(n) for n in node.inco...
Generate a text (CFG nodes + assignments) version of a program. For debugging only. Args: program: An instance of cfg.Program Returns: A string representing all of the data for this program.
github-repos
def group_id(self, resource_id): if (self._name != 'group'): self._request_uri = '{}/{}'.format(self._api_uri, resource_id)
Update the request URI to include the Group ID for specific group retrieval. Args: resource_id (string): The group id.
codesearchnet
def recreate_function(saved_function, concrete_functions): function_spec = _deserialize_function_spec_as_nonmethod(saved_function.function_spec) def restored_function_body(*args, **kwargs): if not saved_function.concrete_functions: raise ValueError('Found zero restored functions fo...
Creates a `Function` from a `SavedFunction`. Args: saved_function: `SavedFunction` proto. concrete_functions: map from function name to `ConcreteFunction`. As a side effect of this function, the `FunctionSpec` from `saved_function` is added to each `ConcreteFunction` in this map. Returns: A `Function`.
github-repos
def uninstall(path, restart=False): cmd = ['wusa.exe', '/uninstall', '/quiet'] kb = os.path.splitext(os.path.basename(path))[0] if os.path.exists(path): cmd.append(path) else: cmd.append('/kb:{0}'.format((kb[2:] if kb.lower().startswith('kb') else kb))) if restart: cmd.append...
Uninstall a specific KB. Args: path (str): The full path to the msu file to uninstall. This can also be just the name of the KB to uninstall restart (bool): ``True`` to force a restart if required by the installation. Adds the ``/forcerestart`` switch to the ``wusa.exe`` command. ``False`` will add the ``/norestart`...
codesearchnet
def __getitem__(cls, args): type_, bound, keyfunc = cls._get_args(args) keyfunc_name = cls._get_fullname(keyfunc) identity = cls._identity BaseClass, MetaClass = cls._get_bases(type_) instantiate = cls._instantiate @six.add_metaclass(MetaClass) ...
Create a new subclass of a type bounded by the arguments. If a callable is passed as the third argument of the slice, it will be used as the comparison function for the boundaries. Args: args: A tuple with two or three parameters: a type, a slice representing the minimum and maximum lengths allowed for values of that...
juraj-google-style
def apply_range_set(self, hist: Hist) -> None: axis = self.axis(hist) assert (not isinstance(self.min_val, float)) assert (not isinstance(self.max_val, float)) min_val = self.min_val(axis) max_val = self.max_val(axis) self.axis(hist).SetRange(min_val, max_val)
Apply the associated range set to the axis of a given hist. Note: The min and max values should be bins, not user ranges! For more, see the binning explanation in ``apply_func_to_find_bin(...)``. Args: hist: Histogram to which the axis range restriction should be applied. Returns: None. The range is set on the axis.
codesearchnet
def RunStateMethod(self, method_name, request=None, responses=None, event=None, direct_response=None): client_id = None try: self.context.current_state = method_name if request and responses...
Completes the request by calling the state method. Args: method_name: The name of the state method to call. request: A RequestState protobuf. responses: A list of GrrMessages responding to the request. event: A threading.Event() instance to signal completion of this request. direct_response: A flow.Responses() object ...
juraj-google-style
def add(self, resource, provider_uri_or_id, timeout=(- 1)): uri = (self._provider_client.build_uri(provider_uri_or_id) + '/device-managers') return self._client.create(resource=resource, uri=uri, timeout=timeout)
Adds a Device Manager under the specified provider. Args: resource (dict): Object to add. provider_uri_or_id: ID or URI of provider. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stop waiting for its completion. Returns: dict: Added SAN Man...
codesearchnet
def get_manual_homology_models(self, input_dict, outdir=None, clean=True, force_rerun=False): if outdir: outdir_set = True else: outdir_set = False counter = 0 for g in tqdm(self.genes): if (g.id not in input_dict): continue if (not outdir_set): ou...
Copy homology models to the GEM-PRO project. Requires an input of a dictionary formatted like so:: { model_gene: { homology_model_id1: { 'model_file': '/path/to/homology/model.pdb', 'file_type': 'pdb' 'additional_info': info_value }, homology_model_id2: { 'model_file': '/path/to/homology/model.pdb' 'file_type': 'pdb'...
codesearchnet
def remove_tag(self, tag): return self._remove_hdxobject(self.data.get('tags'), tag, matchon='name')
Remove a tag Args: tag (str): Tag to remove Returns: bool: True if tag removed or False if not
juraj-google-style
def expectation(self, function): return self._expectation(function)
Returns an estimate of the expectation value of the given function. Args: function: Mapping from a 2D tensor of bitstrings to a possibly nested structure. The structure must have atomic elements all of which are float tensors with the same batch size as the input bitstrings.
github-repos
def kmeans_pp(data, k, centers=None): genes, cells = data.shape if sparse.issparse(data) and not sparse.isspmatrix_csc(data): data = sparse.csc_matrix(data) num_known_centers = 0 if centers is None: centers = np.zeros((genes, k)) else: num_known_centers = cente...
Generates kmeans++ initial centers. Args: data (array): A 2d array- genes x cells k (int): Number of clusters centers (array, optional): if provided, these are one or more known cluster centers. 2d array of genes x number of centers (<=k). Returns: centers - a genes x k array of cluster means. assignments - a cells x...
juraj-google-style
def channels_replies(self, *, channel: str, thread_ts: str, **kwargs) -> SlackResponse: kwargs.update({"channel": channel, "thread_ts": thread_ts}) return self.api_call("channels.replies", http_verb="GET", params=kwargs)
Retrieve a thread of messages posted to a channel Args: channel (str): The channel id. e.g. 'C1234567890' thread_ts (str): The timestamp of an existing message with 0 or more replies. e.g. '1234567890.123456'
juraj-google-style
def CopyAttributesFromSessionCompletion(self, session_completion): if self.identifier != session_completion.identifier: raise ValueError('Session identifier mismatch.') self.aborted = session_completion.aborted if session_completion.analysis_reports_counter: self.analysis_reports_counter ...
Copies attributes from a session completion. Args: session_completion (SessionCompletion): session completion attribute container. Raises: ValueError: if the identifier of the session completion does not match that of the session.
juraj-google-style
def set_maximum(self, q_data, marked, center, bin_lower, foothills): as_bin = [] as_glob = [] marked_so_far = [] will_be_considered_again = False as_bin.append(center) center_data = q_data[center] while (len(as_bin) > 0): p = as_bin.pop((- 1)) if (marked[p] != self.UNMARKED):...
Grow a region at a certain bin level and check if the region has reached the maximum size. Args: q_data: Quantized data array marked: Array marking points that are objects center: Coordinates of the center pixel of the region being grown bin_lower: Intensity level of lower bin being evaluated foothills: List of points...
codesearchnet
def load_bmp(path): surface = object.__new__(Surface) surface._ptr = check_ptr_err(lib.SDL_LoadBMP_RW(lib.SDL_RWFromFile(path, 'rb'), 1)) return surface
Load a surface from a file. Args: path (str): Path to the BMP file to load. Returns: Surface: A surface containing the pixels loaded from the file. Raises: SDLError: If the file cannot be loaded.
codesearchnet
def _to_values_def(self, export_scope=None): values_def = control_flow_pb2.ValuesDef() values_def.values.extend([ops.strip_name_scope(v, export_scope) for v in sorted(self._values)]) for k, v in self._external_values.items(): k = ops.strip_name_scope(k, export_scope) values_def.external_valu...
Converts the values to a `ValuesDef` protocol buffer. Args: export_scope: Optional `string`. Name scope to remove. Returns: A `ValuesDef` protocol buffer.
github-repos
def pickle_load(cls, filepath): if os.path.isdir(filepath): for (dirpath, dirnames, filenames) in os.walk(filepath): fnames = [f for f in filenames if (f == cls.PICKLE_FNAME)] if fnames: if (len(fnames) == 1): filepath = os.path.join(dirpath, fname...
Loads the object from a pickle file. Args: filepath: Filename or directory name. It filepath is a directory, we scan the directory tree starting from filepath and we read the first pickle database. Raise RuntimeError if multiple databases are found.
codesearchnet
def _check_self_to_empty(self, stateid): x_term = stateid.rfind('@') y_term = stateid.rfind('A') if y_term > x_term: x_term = y_term ids = stateid[x_term + 1:].split(',') if len(ids) < 2: return 0 if ids[0] == ids[1]: ...
Because of the optimization, the rule for empty states is missing A check takes place live Args: stateid (int): The state identifier Returns: bool: A true or false response
juraj-google-style
def GetArchiveTypeIndicators(cls, path_spec, resolver_context=None): if (cls._archive_remainder_list is None or cls._archive_store is None): specification_store, remainder_list = cls._GetSpecificationStore( definitions.FORMAT_CATEGORY_ARCHIVE) cls._archive_remainder_list = remaind...
Determines if a file contains a supported archive types. Args: path_spec (PathSpec): path specification. resolver_context (Optional[Context]): resolver context, where None represents the built-in context which is not multi process safe. Returns: list[str]: supported format type indicators.
juraj-google-style
def check_url(url): request = urllib2.Request(url) try: response = urlopen(request) return (True, response.code) except urllib2.HTTPError as e: return (False, e.code)
Check if resource at URL is fetchable. (by trying to fetch it and checking for 200 status. Args: url (str): Url to check. Returns: Returns a tuple of {True/False, response code}
codesearchnet
def retrieve_info_for_model(model_type, frameworks: Optional[List[str]]=None): if model_type not in auto_module.MODEL_NAMES_MAPPING: raise ValueError(f'{model_type} is not a valid model type.') model_name = auto_module.MODEL_NAMES_MAPPING[model_type] config_class = auto_module.configuration_auto.CON...
Retrieves all the information from a given model_type. Args: model_type (`str`): A valid model type (like "bert" or "gpt2") frameworks (`List[str]`, *optional*): If passed, will only keep the info corresponding to the passed frameworks. Returns: `Dict`: A dictionary with the following keys: - **frameworks** (`List[st...
github-repos
def percent_point(self, U): self.check_fit() if not 0 < U < 1: raise ValueError('cdf value must be in [0,1]') return scipy.optimize.brentq(self.cumulative_distribution, -1000.0, 1000.0, args=(U))
Given a cdf value, returns a value in original space. Args: U: `int` or `float` cdf value in [0,1] Returns: float: value in original space
juraj-google-style
def get_surveys(self): payload = { 'Request': 'getSurveys', 'Format': 'JSON' } r = self._session.get(QUALTRICS_URL, params=payload) output = r.json() return output['Result']['Surveys']
Gets all surveys in account Args: None Returns: list: a list of all surveys
juraj-google-style
def eval_algorithm(closing, low, high): if high - low == 0: return 100 * (closing - low) else: return 100 * (closing - low) / (high - low)
Evaluates the SO algorithm Args: closing: Float of current closing price. low: Float of lowest low closing price throughout some duration. high: Float of highest high closing price throughout some duration. Returns: Float SO between 0 and 100.
juraj-google-style
def build_from_token_counts(self, token_counts, min_count, num_iterations=4): self._init_alphabet_from_tokens(six.iterkeys(token_counts)) self._init_subtokens_from_list(list(self._alphabet)) if min_count < 1: min_count = 1 ...
Train a SubwordTextTokenizer based on a dictionary of word counts. Args: token_counts: a dictionary of Unicode strings to int. min_count: an integer - discard subtokens with lower counts. num_iterations: an integer; how many iterations of refinement.
juraj-google-style
def register(self, task_json=None, json_filename=None): if ((not task_json) and (not json_filename)): raise Exception("Both task json and filename can't be none.") if (task_json and json_filename): raise Exception("Both task json and filename can't be provided.") if json_filename: ta...
Registers a new GBDX task. Args: task_json (dict): Dictionary representing task definition. json_filename (str): A full path of a file with json representing the task definition. Only one out of task_json and json_filename should be provided. Returns: Response (str).
codesearchnet
def FromDBInstance(db_token): hash_ar = bytearray(binascii.unhexlify(db_token.ContractHash)) hash_ar.reverse() hash = UInt160(data=hash_ar) token = NEP5Token(script=None) token.SetScriptHash(hash) token.name = db_token.Name token.symbol = db_token.Symbol ...
Get a NEP5Token instance from a database token. Args: db_token (neo.Implementations.Wallets.peewee.Models.NEP5Token): Returns: NEP5Token: self.
juraj-google-style
def post_process(self, outputs, target_sizes): logger.warning_once('`post_process` is deprecated and will be removed in v5 of Transformers, please use `post_process_object_detection` instead, with `threshold=0.` for equivalent results.') out_logits, out_bbox = (outputs.logits, outputs.pred_boxes) if len(out...
Converts the raw output of [`YolosForObjectDetection`] into final bounding boxes in (top_left_x, top_left_y, bottom_right_x, bottom_right_y) format. Only supports PyTorch. Args: outputs ([`YolosObjectDetectionOutput`]): Raw outputs of the model. target_sizes (`torch.Tensor` of shape `(batch_size, 2)`): Tensor containi...
github-repos
def dump_table_as_insert_sql(engine: Engine, table_name: str, fileobj: TextIO, wheredict: Dict[(str, Any)]=None, include_ddl: bool=False, multirow: bool=False) -> None: log.info('dump_data_as_insert_sql: table_name={}', table_name) writelines_nl(fileobj, [SEP1, sql_comment('Data for table: {}'.format(table_name...
Reads a table from the database, and writes SQL to replicate the table's data to the output ``fileobj``. Args: engine: SQLAlchemy :class:`Engine` table_name: name of the table fileobj: file-like object to write to wheredict: optional dictionary of ``{column_name: value}`` to use as ``WHERE`` filters include_ddl: if ``...
codesearchnet
def from_dict(cls, d): labels_dict = d['labels_dict'] projections = {} structure = None if isinstance(list(d['bands'].values())[0], dict): eigenvals = {Spin(int(k)): np.array(d['bands'][k]['data']) for k in d['bands']} else: ...
Create from dict. Args: A dict with all data for a band structure object. Returns: A BandStructure object
juraj-google-style
def from_dict(cls, image_processor_dict: dict[str, Any], **kwargs): image_processor_dict = image_processor_dict.copy() return_unused_kwargs = kwargs.pop('return_unused_kwargs', False) if 'size' in kwargs and 'size' in image_processor_dict: image_processor_dict['size'] = kwargs.pop('size') if 'cr...
Instantiates a type of [`~image_processing_utils.ImageProcessingMixin`] from a Python dictionary of parameters. Args: image_processor_dict (`Dict[str, Any]`): Dictionary that will be used to instantiate the image processor object. Such a dictionary can be retrieved from a pretrained checkpoint by leveraging the [`~ima...
github-repos
def document(self, name, file_name, owner=None, **kwargs): return Document(self.tcex, name, file_name, owner=owner, **kwargs)
Create the Document TI object. Args: owner: name: file_name: **kwargs: Return:
juraj-google-style
def evaluate_forward(distribution, x_data, parameters=None, cache=None): assert (len(x_data) == len(distribution)), ('distribution %s is not of length %d' % (distribution, len(x_data))) assert hasattr(distribution, '_cdf'), 'distribution require the `_cdf` method to function.' cache = (cache if (cache is no...
Evaluate forward Rosenblatt transformation. Args: distribution (Dist): Distribution to evaluate. x_data (numpy.ndarray): Locations for where evaluate forward transformation at. parameters (:py:data:typing.Any): Collection of parameters to override the default ones in the distribution. cache (:py:data:typing.Any): A co...
codesearchnet
def read(self, length=-1): if 0 <= length < len(self): newpos = self.pos + length data = self.buf[self.pos:newpos] self.pos = newpos self.__discard() return data data = self.buf[self.pos:] self.clear() return data
Reads from the FIFO. Reads as much data as possible from the FIFO up to the specified length. If the length argument is negative or ommited all data currently available in the FIFO will be read. If there is no data available in the FIFO an empty string is returned. Args: length: The amount of data to read from the FI...
juraj-google-style
def make_sine_surface(dims=DEFAULT_DIMS, offset=0.5, scale=1.0): gradients = (((np.array(make_gradients(dims)) - offset) * scale) * np.pi) return np.sin(np.linalg.norm(gradients, axis=0))
Makes a surface from the 3D sine function. Args: dims (pair): the dimensions of the surface to create offset (float): an offset applied to the function scale (float): a scale applied to the sine frequency Returns: surface: A surface.
codesearchnet
def cast(self, dtype: tf.DType) -> 'TensorFluent': if self.dtype == dtype: return self t = tf.cast(self.tensor, dtype) scope = self.scope.as_list() batch = self.batch return TensorFluent(t, scope, batch=batch)
Returns a TensorFluent for the cast operation with given `dtype`. Args: dtype: The output's data type. Returns: A TensorFluent wrapping the cast operation.
juraj-google-style
def get_keys(data_list, leading_columns=LEADING_COLUMNS): all_keys = set().union(*(list(d.keys()) for d in data_list)) leading_keys = [] for key in leading_columns: if key not in all_keys: continue leading_keys.append(key) all_keys.remove(key) return leading_k...
Gets all possible keys from a list of dicts, sorting by leading_columns first Args: data_list: list of dicts to pull keys from leading_columns: list of keys to put first in the result Returns: list of keys to be included as columns in excel worksheet
juraj-google-style
def _interpretPayload(functioncode, payload): r raise NotImplementedError() output = '' output += 'Modbus payload decoder\n' output += 'Input payload (length {} characters): {!r} \n'.format(len(payload), payload) output += 'Function code: {} (dec).\n'.format(functioncode) if len(payload...
r"""Generate a human readable description of a Modbus payload. Args: * functioncode (int): Function code * payload (str): The payload that should be interpreted. It should be a byte string. Returns: A descriptive string. For example, the payload ``'\x10\x01\x00\x01'`` for functioncode 3 should give something like:: ...
juraj-google-style
def _push(project): repo = project.repo remote_name = project.get('project', 'remote') remote = repo.remote(remote_name) result = _call_remote_push(remote) failures = lfilter(complement(did_git_push_succeed), result) if failures: for push_info in failures: logger.error('Faile...
Push default branch and project template branch to remote With default config (i.e. remote and branch names), equivalent to:: $ git push origin master:master project-template:project-template Raises: ballet.exc.BalletError: Push failed in some way
codesearchnet
def __call__(self, shape, dtype=None, **kwargs): _validate_kwargs(self.__class__.__name__, kwargs) dtype = _get_dtype(dtype) if not dtype.is_numpy_compatible or dtype == dtypes.string: raise ValueError('Expected numeric or boolean dtype, got %s.' % dtype) if _PARTITION_SHAPE in kwargs: s...
Returns a tensor object initialized as specified by the initializer. Args: shape: Shape of the tensor. dtype: Optional dtype of the tensor. Only numeric or boolean dtypes are supported. If not specified, `tf.keras.backend.floatx()` is used, which default to `float32` unless you configured it otherwise (via `tf.keras.b...
github-repos
async def rewind(self, query='1'): if (not (self.state == 'ready')): logger.debug("Trying to rewind from wrong state '{}'".format(self.state)) return if (query == ''): query = '1' try: num = int(query) except TypeError: self.statuslog.error('Rewind argument must b...
The rewind command Args: query (str): The number of items to skip
codesearchnet
def _publish_actor_class_to_key(self, key, actor_class_info): self._worker.redis_client.hmset(key, actor_class_info) self._worker.redis_client.rpush("Exports", key)
Push an actor class definition to Redis. The is factored out as a separate function because it is also called on cached actor class definitions when a worker connects for the first time. Args: key: The key to store the actor class info at. actor_class_info: Information about the actor class.
juraj-google-style
def push_file(self, local_source, remote_dir): remote_dest = remote_dir + '/' + os.path.basename(local_source) try: self.makedirs(remote_dir, exist_ok=True) except IOError as e: logger.exception("Pushing {0} to {1} failed".format(local_source, remote_dir)) ...
Transport a local file to a directory on a remote machine Args: - local_source (string): Path - remote_dir (string): Remote path Returns: - str: Path to copied file on remote machine Raises: - BadScriptPath : if script path on the remote side is bad - BadPermsScriptPath : You do not have perms to make the channel sc...
juraj-google-style
def matches(self, msg_seq: int, msg: MessageInterface) -> bool: return all(crit.matches(msg_seq, msg) for crit in self.all_criteria)
The message matches if all the defined search key criteria match. Args: msg_seq: The message sequence ID. msg: The message object.
juraj-google-style
def is_gpu(self): return (self._device.get_info(cl.device_info.TYPE) == cl.device_type.GPU)
Check if the device associated with this environment is a GPU. Returns: boolean: True if the device is an GPU, false otherwise.
codesearchnet
def _empty_resource_attributes(self): self.status_code = 404 self.headers = {} self.exists = False self.rdf = self._build_rdf() if type(self) == NonRDFSource: self.binary.empty()
small method to empty values if resource is removed or absent Args: None Return: None: empties selected resource attributes
juraj-google-style
def get(self, request, *args, **kwargs): context = self.get_context_data(**kwargs) context.update(self.extra_context) context['crumbs'] = self.get_crumbs() context['title'] = self.title context['suit'] = ('suit' in settings.INSTALLED_APPS) if ((context.get('dashboard_grid', None) is None) and se...
Django view get function. Add items of extra_context, crumbs and grid to context. Args: request (): Django's request object. *args (): request args. **kwargs (): request kwargs. Returns: response: render to response with context.
codesearchnet
def normal_var(data, mean): if not isinstance(data, np.ndarray): data = np.array(data) cumm = [0.0] cumm.extend(np.cumsum(np.power(np.abs(data - mean), 2))) def cost(s, t): dist = float(t - s) diff = cumm[t] - cumm[s] return dist * np.log(diff/dist) r...
Creates a segment cost function for a time series with a Normal distribution with changing variance Args: data (:obj:`list` of float): 1D time series data variance (float): variance Returns: function: Function with signature (int, int) -> float where the first arg is the starting index, and the second is the last arg....
juraj-google-style
def dump(self, destination, with_defaults=False): if isinstance(destination, six.string_types): with open(destination, 'w', encoding='utf-8') as f: self._rw.dump_config_to_file(self._config, f, with_defaults=with_defaults) else: self._rw.dump_config_to_fi...
Write configuration values to the specified destination. Args: destination: with_defaults (bool): if ``True``, values of items with no custom values will be included in the output if they have a default value set.
juraj-google-style
def parse(self, text): tokens = self.lex(text) parser = Parser(tokens) return parser.parse()
Parse self.text. Args: text (str): the text to lex Returns: object: a node representing the current rule.
codesearchnet
def run_query(query: str) -> None: try: result = parse_query(query) except Exception as e: result = f'ERROR: {type(e).__name__}: {e.__str__()}.' return result return filter_records(convert_to_dataframe(result), query)
Run a query and display the result. Args: query (str): The query to be executed.
github-repos
def to_weld_type(weld_type, dim): for i in xrange(dim): weld_type = WeldVec(weld_type) return weld_type
Summary Args: weld_type (TYPE): Description dim (TYPE): Description Returns: TYPE: Description
juraj-google-style
def get_firmware(self): firmware_uri = self._helper.build_subresource_uri(self.data['uri'], subresource_path=self.FIRMWARE_PATH) return self._helper.do_get(firmware_uri)
Gets the installed firmware for a logical interconnect. Returns: dict: LIFirmware.
codesearchnet
def disaggregate_wind(wind_daily, method='equal', a=None, b=None, t_shift=None): assert (method in ('equal', 'cosine', 'random')), 'Invalid method' wind_eq = melodist.distribute_equally(wind_daily) if (method == 'equal'): wind_disagg = wind_eq elif (method == 'cosine'): assert (None not ...
general function for windspeed disaggregation Args: wind_daily: daily values method: keyword specifying the disaggregation method to be used a: parameter a for the cosine function b: parameter b for the cosine function t_shift: parameter t_shift for the cosine function Returns: Disaggregated hourly values of windspee...
codesearchnet
def _ParseFValue(self, registry_key): registry_value = registry_key.GetValueByName('F') if not registry_value: raise errors.ParseError( 'missing value: "F" in Windows Registry key: {0:s}.'.format( registry_key.name)) f_value_map = self._GetDataTypeMap('f_value') try:...
Parses an F value. Args: registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Returns: f_value: F value stored in the Windows Registry key. Raises: ParseError: if the Windows Registry key does not contain an F value or F value cannot be parsed.
juraj-google-style
def SetLookupHash(self, lookup_hash): if lookup_hash not in self.SUPPORTED_HASHES: raise ValueError('Unsupported lookup hash: {0!s}'.format(lookup_hash)) self.lookup_hash = lookup_hash
Sets the hash to query. Args: lookup_hash (str): name of the hash attribute to look up. Raises: ValueError: if the lookup hash is not supported.
juraj-google-style
def _full_pred_succ_maps(self, pred_map, succ_map, input_circuit, wire_map): full_pred_map = {} full_succ_map = {} for w in input_circuit.input_map: if (w in wire_map): full_pred_map[wire_map[w]] = pred_map[wire_map[w]] full_succ_map[wire_map[w]] = succ_map[wire_map[w]] ...
Map all wires of the input circuit. Map all wires of the input circuit to predecessor and successor nodes in self, keyed on wires in self. Args: pred_map (dict): comes from _make_pred_succ_maps succ_map (dict): comes from _make_pred_succ_maps input_circuit (DAGCircuit): the input circuit wire_map (dict): the map from...
codesearchnet
def __live_receivers(signal): with __lock: __purge() receivers = [funcref() for funcref in __receivers[signal]] return receivers
Return all signal handlers that are currently still alive for the input `signal`. Args: signal: A signal name. Returns: A list of callable receivers for the input signal.
codesearchnet
def push(self, targets, jobs=None, remote=None, show_checksums=False): return self.repo.cache.local.push(targets, jobs=jobs, remote=self._get_cloud(remote, 'push'), show_checksums=show_checksums)
Push data items in a cloud-agnostic way. Args: targets (list): list of targets to push to the cloud. jobs (int): number of jobs that can be running simultaneously. remote (dvc.remote.base.RemoteBase): optional remote to push to. By default remote from core.remote config option is used. show_checksums (bool): show chec...
codesearchnet
def load(path, compile=True, options=None): metadata = saved_metadata_pb2.SavedMetadata() meta_graph_def = loader_impl.parse_saved_model(path).meta_graphs[0] object_graph_def = meta_graph_def.object_graph_def path_to_metadata_pb = os.path.join(path, constants.SAVED_METADATA_PATH) if gfile.Exists(pat...
Loads Keras objects from a SavedModel. Any Keras layer or model saved to the SavedModel will be loaded back as Keras objects. Other objects are loaded as regular trackable objects (same as `tf.saved_model.load`). Currently, Keras saving/loading only retains the Keras object's weights, losses, and call function. The ...
github-repos
def remove_foothills(self, q_data, marked, bin_num, bin_lower, centers, foothills): hills = [] for foot in foothills: center = foot[0] hills[:] = foot[1][:] while (len(hills) > 0): pt = hills.pop((- 1)) marked[pt] = self.GLOBBED for (s_index, val) in n...
Mark points determined to be foothills as globbed, so that they are not included in future searches. Also searches neighboring points to foothill points to determine if they should also be considered foothills. Args: q_data: Quantized data marked: Marked bin_num: Current bin being searched bin_lower: Next bin being se...
codesearchnet