code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _make_signature_checker(api_signature, signature): if not (isinstance(signature, dict) and all((isinstance(k, (str, int)) for k in signature))): raise TypeError('signatures must be dictionaries mapping parameter names to type annotations.') checkers = [] param_names = list(api_signature.paramete...
Builds a PySignatureChecker for the given type signature. Args: api_signature: The `inspect.Signature` of the API whose signature is being checked. signature: Dictionary mapping parameter names to type annotations. Returns: A `PySignatureChecker`.
github-repos
def generate_surface_vectors(self, film_millers, substrate_millers): vector_sets = [] for f in film_millers: film_slab = SlabGenerator(self.film, f, 20, 15, primitive=False).get_slab() film_vectors = reduce_vectors(film_slab.lattice...
Generates the film/substrate slab combinations for a set of given miller indicies Args: film_millers(array): all miller indices to generate slabs for film substrate_millers(array): all miller indicies to generate slabs for substrate
juraj-google-style
def convert_to_dataframe(ds: xr.Dataset) -> pd.DataFrame: if len(ds.coords): df = ds.to_dataframe().reset_index() else: ds = ds.compute().to_dict(data='list') df = pd.DataFrame({k: [v['data']] for k, v in ds['data_vars'].items()}) return df
Convert xarray Dataset to pandas DataFrame. Args: ds (xr.Dataset): xarray Dataset to be converted. Returns: pd.DataFrame: Pandas DataFrame containing the data from the xarray Dataset.
github-repos
def RegisterHelper(cls, resolver_helper): if (resolver_helper.type_indicator in cls._resolver_helpers): raise KeyError('Resolver helper object already set for type indicator: {0!s}.'.format(resolver_helper.type_indicator)) cls._resolver_helpers[resolver_helper.type_indicator] = resolver_helper
Registers a path specification resolver helper. Args: resolver_helper (ResolverHelper): resolver helper. Raises: KeyError: if resolver helper object is already set for the corresponding type indicator.
codesearchnet
def get_special_tokens_mask(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None, already_has_special_tokens: bool=False) -> List[int]: if already_has_special_tokens: return super().get_special_tokens_mask(token_ids_0=token_ids_0, token_ids_1=token_ids_1, already_has_special_tokens=True) ...
Retrieves sequence ids from a token list that has no special tokens added. This method is called when adding special tokens using the tokenizer `prepare_for_model` methods. Args: token_ids_0 (`List[int]`): List of ids. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. already_has_s...
github-repos
def convert_structure_to_signature(structure, arg_names=None, signature_context=None): def encode_arg(arg, path): if isinstance(arg, tensor_lib.Tensor): user_specified_name = None try: user_specified_name = compat.as_str(arg.op.get_attr('_user_specified_name...
Convert a potentially nested structure to a signature. Args: structure: Structure to convert, where top level collection is a list or a tuple. arg_names: Optional list of arguments that has equal number of elements as `structure` and is used for naming corresponding TensorSpecs. signature_context: TraceType InternalTr...
github-repos
def _extract_token_timestamps(self, generate_outputs, alignment_heads, time_precision=0.02, num_frames=None, num_input_ids=None): cross_attentions = [] for i in range(self.config.decoder_layers): cross_attentions.append(torch.cat([x[i] for x in generate_outputs.cross_attentions], dim=2)) weights = t...
Calculates token-level timestamps using the encoder-decoder cross-attentions and dynamic time-warping (DTW) to map each output token to a position in the input audio. If `num_frames` is specified, the encoder-decoder cross-attentions will be cropped before applying DTW. Returns: tensor containing the timestamps in sec...
github-repos
def run(self, input_dir, output_file_path): logging.info('Running defense %s', self.submission_id) tmp_run_dir = self.temp_copy_extracted_submission() output_dir = os.path.dirname(output_file_path) output_filename = os.path.basename(output_file_path) cmd = ['--network=none', '-m=24g'...
Runs defense inside Docker. Args: input_dir: directory with input (adversarial images). output_file_path: path of the output file. Returns: how long it took to run submission in seconds
juraj-google-style
def validate(cls, job_config): if job_config.output_writer_cls != cls: raise errors.BadWriterParamsError( "Expect output writer class %r, got %r." % (cls, job_config.output_writer_cls))
Validates relevant parameters. This method can validate fields which it deems relevant. Args: job_config: an instance of map_job.JobConfig. Raises: errors.BadWriterParamsError: required parameters are missing or invalid.
juraj-google-style
def load_info(cat): res = _load_yaml_(f'{PKG_PATH}/markets/{cat}.yml') root = os.environ.get('BBG_ROOT', '').replace('\\', '/') if (not root): return res for (cat, ovrd) in _load_yaml_(f'{root}/markets/{cat}.yml').items(): if isinstance(ovrd, dict): if (cat in res): ...
Load parameters for assets Args: cat: category Returns: dict Examples: >>> import pandas as pd >>> >>> assets = load_info(cat='assets') >>> all(cat in assets for cat in ['Equity', 'Index', 'Curncy', 'Corp']) True >>> os.environ['BBG_PATH'] = '' >>> exch = load_info(cat='exch') >>> pd.Series(exch['EquityUS']).allday ...
codesearchnet
def _bind_length_scalar_handlers(tids, scalar_factory, lns=_NON_ZERO_LENGTH_LNS): handler = partial(_length_scalar_handler, scalar_factory) return _bind_length_handlers(tids, handler, lns)
Binds a set of scalar handlers for an inclusive range of low-nibble values. Args: tids (Sequence[int]): The Type IDs to bind to. scalar_factory (Callable): The factory for the scalar parsing function. This function can itself return a function representing a thunk to defer the scalar parsing or a direct value. lns (Se...
codesearchnet
def auth_criteria(self): auth = {} for attr in dir(self): if (attr != 'auth_criteria'): attribute = getattr(self, attr) if (isinstance(attribute, Callable) and hasattr(attribute, '_service_auth')): auth[getattr(self, attr)._service_auth] = attribute return aut...
This attribute provides the mapping of services to their auth requirement Returns: (dict) : the mapping from services to their auth requirements.
codesearchnet
def _run_graph_for_calibration(float_model_dir: str, signature_keys: Sequence[str], tags: Collection[str], representative_dataset: rd.RepresentativeDatasetOrMapping, force_graph_mode_calibration: bool) -> None: try: _validate_representative_dataset(representative_dataset, signature_keys) except Exceptio...
Runs the graph for calibration using representative datasets. Args: float_model_dir: Path to the model to calibrate. signature_keys: Sequence of keys identifying SignatureDef containing inputs and outputs. tags: Collection of tags identifying the MetaGraphDef within the SavedModel to analyze. representative_dataset: A...
github-repos
def _validate_cluster_spec(cluster_spec, task_type, task_id): allowed_task_types = ('chief', 'worker', 'evaluator', 'ps', None) cluster_spec = normalize_cluster_spec(cluster_spec) if any((job not in allowed_task_types for job in cluster_spec.jobs)): raise ValueError('Disallowed task type found in cl...
Validates `cluster_spec`. It checks: 1) task type is one of "chief", "worker", "ps", "evaluator", or not provided (None). 2) whether there is such a task type as `task_type` in the `cluster_spec`. The only exception is `evaluator`. In other words, it is still a valid configuration when `task_type` is `evaluator` but i...
github-repos
def get_environment_details(zone, environment): default_context = google.datalab.Context.default() url = (Api._ENDPOINT + (Api._ENVIRONMENTS_PATH_FORMAT % (default_context.project_id, zone, environment))) return google.datalab.utils.Http.request(url, credentials=default_context.credentials)
Issues a request to Composer to get the environment details. Args: zone: GCP zone of the composer environment environment: name of the Composer environment Returns: A parsed result object. Raises: Exception if there is an error performing the operation.
codesearchnet
def __matches(s1, s2, ngrams_fn, n=3): ngrams1, ngrams2 = set(ngrams_fn(s1, n=n)), set(ngrams_fn(s2, n=n)) return ngrams1.intersection(ngrams2)
Returns the n-grams that match between two sequences See also: SequenceMatcher.get_matching_blocks Args: s1: a string s2: another string n: an int for the n in n-gram Returns: set:
juraj-google-style
def roll_to_business_day(self, date_tensor, roll_convention): if roll_convention == constants.BusinessDayConvention.NONE: return date_tensor rolled_ordinals_table = self._compute_rolled_dates_table(roll_convention) ordinals_with_offset = date_tensor.ordinal() - self._ordinal_offset + 1 rolled_or...
Rolls the given dates to business dates according to given convention. Args: date_tensor: DateTensor of dates to roll from. roll_convention: BusinessDayConvention. Determines how to roll a date that falls on a holiday. Returns: The resulting DateTensor.
github-repos
def __init__(self, flow_obj, parent_runner=None, runner_args=None, token=None): self.token = token or flow_obj.token self.parent_runner = parent_runner if parent_runner is not None: self.queue_manager = parent_runner.queue_manager else: self.queue_manager = q...
Constructor for the Flow Runner. Args: flow_obj: The flow object this runner will run states for. parent_runner: The parent runner of this runner. runner_args: A FlowRunnerArgs() instance containing initial values. If not specified, we use the runner_args from the flow_obj. token: An instance of access_control.ACLToke...
juraj-google-style
def _PrintCheckDependencyStatus( self, dependency, result, status_message, verbose_output=True): if not result or dependency.is_optional: if dependency.is_optional: status_indicator = '[OPTIONAL]' else: status_indicator = '[FAILURE]' print('{0:s}\t{1:s}'.format(status_i...
Prints the check dependency status. Args: dependency (DependencyDefinition): dependency definition. result (bool): True if the Python module is available and conforms to the minimum required version, False otherwise. status_message (str): status message. verbose_output (Optional[bool]): True if output should be verbos...
juraj-google-style
def switch_to_line_in(self, source=None): if source: uid = source.uid else: uid = self.uid self.avTransport.SetAVTransportURI([ ('InstanceID', 0), ('CurrentURI', 'x-rincon-stream:{0}'.format(uid)), ('CurrentURIMetaData', '') ...
Switch the speaker's input to line-in. Args: source (SoCo): The speaker whose line-in should be played. Default is line-in from the speaker itself.
juraj-google-style
def set(self, key, samples, sampling_rate): if (not np.issubdtype(samples.dtype, np.floating)): raise ValueError('Samples are required as np.float32!') if (len(samples.shape) > 1): raise ValueError('Only single channel supported!') self.raise_error_if_not_open() if (key in self._file): ...
Set the samples and sampling-rate for the given key. Existing data will be overwritten. The samples have to have ``np.float32`` datatype and values in the range of -1.0 and 1.0. Args: key (str): A key to store the data for. samples (numpy.ndarray): 1-D array of audio samples (np.float32). sampling_rate (int): The samp...
codesearchnet
def setValues(self, values): if isinstance(values, dict): indices, values = list(zip(*values.items())) indices = Utils.toTupleArray(indices) if any(isinstance(value, basestring) for value in values): values = list(map(str, values)) sel...
Assign the values (string or float) to the parameter instances with the specified indices, equivalent to the AMPL code: .. code-block:: ampl let {i in indices} par[i] := values[i]; Args: values: list, dictionary or :class:`~amplpy.DataFrame` with the indices and the values to be set. Raises: TypeError: If called on...
juraj-google-style
def Print(x, data, message, **kwargs): return PrintOperation(x, data, message, **kwargs).outputs[0]
Call tf.Print. Args: x: a Tensor. data: a list of Tensor message: a string **kwargs: keyword arguments to tf.Print Returns: a Tensor which is identical in value to x
juraj-google-style
def imrotate(img, angle, center=None, scale=1.0, border_value=0, auto_bound=False): if ((center is not None) and auto_bound): raise ValueError('`auto_bound` conflicts with `center`') (h, w) = img.shape[:2] if (center is None): center = (((w - 1) * 0.5), ((h - 1) * 0.5)) assert isinstance...
Rotate an image. Args: img (ndarray): Image to be rotated. angle (float): Rotation angle in degrees, positive values mean clockwise rotation. center (tuple): Center of the rotation in the source image, by default it is the center of the image. scale (float): Isotropic scale factor. border_value (int): Border value. au...
codesearchnet
def memcache_get(self, key, for_cas=False, namespace=None, use_cache=False, deadline=None): if not isinstance(key, basestring): raise TypeError('key must be a string; received %r' % key) if not isinstance(for_cas, bool): raise TypeError('for_cas must be a bool; received %r' %...
An auto-batching wrapper for memcache.get() or .get_multi(). Args: key: Key to set. This must be a string; no prefix is applied. for_cas: If True, request and store CAS ids on the Context. namespace: Optional namespace. deadline: Optional deadline for the RPC. Returns: A Future (!) whose return value is the value re...
juraj-google-style
def from_func_graph(name: Union[str, bytes], graph: func_graph_module.FuncGraph, attrs: Dict[str, attr_value_pb2.AttrValue], function_type: Optional[function_type_lib.FunctionType]=None, overwrite: bool=False) -> AtomicFunction: if attrs and attributes_lib.IMPLEMENTS in attrs: has_resource_vars = any((inp.d...
Initializes an AtomicFunction from FuncGraph. Args: name: str, the name for the created function. graph: Graph, the graph containing the operations in the function attrs: dict mapping names of attributes to their AttrValue values function_type: known FunctionType to use, otherwise one is derived. overwrite: overwrites...
github-repos
def _create_produce_requests(self, collated): requests = {} for (node_id, batches) in six.iteritems(collated): requests[node_id] = self._produce_request(node_id, self.config['acks'], self.config['request_timeout_ms'], batches) return requests
Transfer the record batches into a list of produce requests on a per-node basis. Arguments: collated: {node_id: [RecordBatch]} Returns: dict: {node_id: ProduceRequest} (version depends on api_version)
codesearchnet
def restore_site_properties(self, site_property='ff_map', filename=None): if (not (self.control_params['filetype'] == 'pdb')): raise ValueError() filename = (filename or self.control_params['output']) bma = BabelMolAdaptor.from_file(filename, 'pdb') pbm = pb.Molecule(bma._obmol) assert (len(...
Restore the site properties for the final packed molecule. Args: site_property (str): filename (str): path to the final packed molecule. Returns: Molecule
codesearchnet
def framebuffer(self, color_attachments=(), depth_attachment=None) -> 'Framebuffer': if type(color_attachments) is Texture or type(color_attachments) is Renderbuffer: color_attachments = (color_attachments,) ca_mglo = tuple(x.mglo for x in color_attachments) da_mglo = None...
A :py:class:`Framebuffer` is a collection of buffers that can be used as the destination for rendering. The buffers for Framebuffer objects reference images from either Textures or Renderbuffers. Args: color_attachments (list): A list of :py:class:`Texture` or :py:class:`Renderbuffer` objects. depth_attachment (Render...
juraj-google-style
def extract_paths(self, paths, ignore_nopath): try: super().extract_paths(paths=paths, ignore_nopath=ignore_nopath) except ExtractPathError as err: LOGGER.debug('%s: failed extracting files: %s', self.vm.name(), err.message) if self._has_guestfs: self.extract_paths_dead(paths...
Extract the given paths from the domain Attempt to extract all files defined in ``paths`` with the method defined in :func:`~lago.plugins.vm.VMProviderPlugin.extract_paths`, if it fails, and `guestfs` is available it will try extracting the files with guestfs. Args: paths(list of tuples): files to extract in `[(src1,...
codesearchnet
def get(self, key): lock.acquire() try: if key not in self: return None current_time = time.time() if self[key].expire > current_time: return self[key].value deletes = [] for k, val in sel...
Get an object from the cache Arguments: key (str): Cache key Returns: Cached object
juraj-google-style
def is_valid_isbn(isbn): length = len(isbn) if length == 10: return is_isbn10_valid(isbn) elif length == 13: return is_isbn13_valid(isbn) return False
Validate given `isbn`. Wrapper for :func:`is_isbn10_valid`/ :func:`is_isbn13_valid`. Args: isbn (str/list): ISBN number as string or list of digits. Note: Function doesn't require `isbn` type to be specified (it can be both 10/13 isbn's versions). Returns: bool: ``True`` if ISBN is valid.
juraj-google-style
def __init__(self, submission_id, submissions, storage_bucket): self.submission_id = submission_id self.storage_bucket = storage_bucket self.type = None self.submission = None if submission_id in submissions.attacks: self.type = TYPE_NONTARGETED self.submission = submissions.attacks...
Initializes ExecutableSubmission. Args: submission_id: ID of the submissions submissions: instance of CompetitionSubmissions with all submissions storage_bucket: storage bucket where all submissions are stored Raises: WorkerError: if submission was not found
juraj-google-style
def prune_unused_nodes(meta_graph, signature_def): graph = tf_v1.Graph() with graph.as_default(): tf_v1.train.import_meta_graph(meta_graph, input_map={}, import_scope='') used_node_names = set() for (_, tensor_def) in signature_def.outputs.items(): output_tensor = graph.get_t...
Function to prune unused ops given a signature def. This function does a graph traversal through from all outputs as defined in the signature_def to collect all used nodes. Then, any nodes which are unused can be discarded. This is useful for graph which are executing eagerly or on TPUs. Args: meta_graph: The input/o...
codesearchnet
def albedo(self, value=999.0): if (value is not None): try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float for field `albedo`'.format(value)) self._albedo = value
Corresponds to IDD Field `albedo` Args: value (float): value for IDD Field `albedo` Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
codesearchnet
def write_auth(msg_type, profile_name, auth, cfg): key_fmt = ((profile_name + '_') + msg_type) pwd = [] for (k, v) in CONFIG[msg_type]['auth'].items(): pwd.append(auth[k]) if (len(pwd) > 1): cfg.pwd[key_fmt] = ' :: '.join(pwd) else: cfg.pwd[key_fmt] = pwd[0]
Write the settings into the auth portion of the cfg. Args: :msg_type: (str) message type to create config entry. :profile_name: (str) name of the profile entry :auth: (dict) auth parameters :cfg: (jsonconfig.Config) config instance.
codesearchnet
def set_global_step(self, new_global_step, name=None): return gen_data_flow_ops.accumulator_set_global_step(self._accumulator_ref, math_ops.cast(ops.convert_to_tensor(new_global_step), _dtypes.int64), name=name)
Sets the global time step of the accumulator. The operation logs a warning if we attempt to set to a time step that is lower than the accumulator's own time step. Args: new_global_step: Value of new time step. Can be a variable or a constant name: Optional name for the operation. Returns: Operation that sets the acc...
github-repos
def _trigger(self): self._completed.set() for callback in self._callbacks: callback(self)
Trigger all callbacks registered to this Future. This method is called internally by the batch once the batch completes. Args: message_id (str): The message ID, as a string.
juraj-google-style
def get_corner(self, time): if (self.start_time <= time <= self.end_time): diff = (time - self.start_time) return (self.i[diff][(0, 0)], self.j[diff][(0, 0)]) else: return ((- 1), (- 1))
Gets the corner array indices of the STObject at a given time that corresponds to the upper left corner of the bounding box for the STObject. Args: time: time at which the corner is being extracted. Returns: corner index.
codesearchnet
def decorate(self, name_or_func): if os.environ.get("SC2_NO_STOPWATCH"): return name_or_func if callable(name_or_func) else lambda func: func def decorator(name, func): @functools.wraps(func) def _stopwatch(*args, **kwargs): with self(name): return func(*args, **kwargs)...
Decorate a function/method to check its timings. To use the function's name: @sw.decorate def func(): pass To name it explicitly: @sw.decorate("name") def random_func_name(): pass Args: name_or_func: the name or the function to decorate. Returns: If a name is passed, returns this as a decorator, otherwise returns t...
juraj-google-style
def _PrintTSKPartitionIdentifiersOverview( self, volume_system, volume_identifiers): header = 'The following partitions were found:\n' self._output_writer.Write(header) column_names = ['Identifier', 'Offset (in bytes)', 'Size (in bytes)'] table_view = views.CLITabularTableView(column_names=c...
Prints an overview of TSK partition identifiers. Args: volume_system (dfvfs.TSKVolumeSystem): volume system. volume_identifiers (list[str]): allowed volume identifiers. Raises: SourceScannerError: if a volume cannot be resolved from the volume identifier.
juraj-google-style
def crop(img, i, j, h, w): if (not _is_pil_image(img)): raise TypeError('img should be PIL Image. Got {}'.format(type(img))) return img.crop((j, i, (j + w), (i + h)))
Crop the given PIL Image. Args: img (PIL Image): Image to be cropped. i (int): i in (i,j) i.e coordinates of the upper left corner. j (int): j in (i,j) i.e coordinates of the upper left corner. h (int): Height of the cropped image. w (int): Width of the cropped image. Returns: PIL Image: Cropped image.
codesearchnet
def is_fit_to_structure(self, structure, tol=0.01): return ((self - self.fit_to_structure(structure)) < tol).all()
Tests whether a tensor is invariant with respect to the symmetry operations of a particular structure by testing whether the residual of the symmetric portion is below a tolerance Args: structure (Structure): structure to be fit to tol (float): tolerance for symmetry testing
codesearchnet
def toco_convert(input_data, input_tensors, output_tensors, *args, **kwargs): return convert_graphdef(input_data, input_tensors, output_tensors, *args, **kwargs)
Convert a TensorFlow GraphDef to TFLite. This function is deprecated. Please use `tf.lite.TFLiteConverter` API instead. Conversion can be customized by providing arguments that are forwarded to `build_model_flags` and `build_conversion_flags` (see documentation for details). Args: input_data: Input data (i.e. often `s...
github-repos
def proc_val(key, val): float_keys = ('etot_conv_thr','forc_conv_thr','conv_thr','Hubbard_U','Hubbard_J0','defauss', 'starting_magnetization',) int_keys = ('nstep','iprint','nberrycyc','gdir','nppstr','ibrav','nat','ntyp','nbnd','nr1', 'nr2','nr3','nr1...
Static helper method to convert PWINPUT parameters to proper type, e.g., integers, floats, etc. Args: key: PWINPUT parameter key val: Actual value of PWINPUT parameter.
juraj-google-style
def add_sources_argument(cls, group, allow_filters=True, prefix=None, add_root_paths=False): prefix = (prefix or cls.argument_prefix) group.add_argument(('--%s-sources' % prefix), action='store', nargs='+', dest=('%s_sources' % prefix.replace('-', '_')), help=('%s source files to parse' % prefix)) if allow_...
Subclasses may call this to add sources and source_filters arguments. Args: group: arparse.ArgumentGroup, the extension argument group allow_filters: bool, Whether the extension wishes to expose a source_filters argument. prefix: str, arguments have to be namespaced.
codesearchnet
def _save_and_log_checkpoint(self, actor): actor_id = self._worker.actor_id checkpoint_info = self._worker.actor_checkpoint_info[actor_id] checkpoint_info.num_tasks_since_last_checkpoint += 1 now = int(1000 * time.time()) checkpoint_context = ray.actor.CheckpointContext(...
Save an actor checkpoint if necessary and log any errors. Args: actor: The actor to checkpoint. Returns: The result of the actor's user-defined `save_checkpoint` method.
juraj-google-style
def assign(self, value, use_locking=False, name=None, read_value=True): assign = state_ops.assign(self._variable, value, use_locking=use_locking, name=name) if read_value: return assign return assign.op
Assigns a new value to the variable. This is essentially a shortcut for `assign(self, value)`. Args: value: A `Tensor`. The new value for this variable. use_locking: If `True`, use locking during the assignment. name: The name of the operation to be created read_value: if True, will return something which evaluates t...
github-repos
def convert_persistent_value(self, shift, instruction): command_dict = {'name': 'pv', 't0': (shift + instruction.start_time), 'ch': instruction.channels[0].name, 'val': instruction.command.value} return self._qobj_model(**command_dict)
Return converted `PersistentValueInstruction`. Args: shift(int): Offset time. instruction (PersistentValueInstruction): persistent value instruction. Returns: dict: Dictionary of required parameters.
codesearchnet
def create_chunker(self, chunk_size): rolling_hash = _rabinkarprh.RabinKarpHash(self.window_size, self._seed) rolling_hash.set_threshold(1.0 / chunk_size) return RabinKarpCDC._Chunker(rolling_hash)
Create a chunker performing content-defined chunking (CDC) using Rabin Karp's rolling hash scheme with a specific, expected chunk size. Args: chunk_size (int): (Expected) target chunk size. Returns: BaseChunker: A chunker object.
juraj-google-style
def open_model(self, model_path, audit=False): if audit: self._add_entry(templates.FILE_OPEN_AUDIT .format(model_path=model_path)) else: self._add_entry(templates.FILE_OPEN .format(model_path=model...
Append a open non-workshared model entry to the journal. This instructs Revit to open a non-workshared model. Args: model_path (str): full path to non-workshared model audit (bool): if True audits the model when opening
juraj-google-style
def _field_to_json(field, row_value): if (row_value is None): return None if (field.mode == 'REPEATED'): return _repeated_field_to_json(field, row_value) if (field.field_type == 'RECORD'): return _record_field_to_json(field.fields, row_value) return _scalar_field_to_json(field, r...
Convert a field into JSON-serializable values. Args: field ( \ :class:`~google.cloud.bigquery.schema.SchemaField`, \ ): The SchemaField to use for type conversion and field name. row_value (Union[ \ Sequence[list], \ any, \ ]): Row data to be inserted. If the SchemaField's mode is REPEATED, assume this is a list. If ...
codesearchnet
def _evaluateTFLiteModelUsingSignatureDef(self, tflite_model, signature_key, inputs): interpreter = Interpreter(model_content=tflite_model) signature_runner = interpreter.get_signature_runner(signature_key) return signature_runner(**inputs)
Evaluates the model on the `inputs`. Args: tflite_model: TensorFlow Lite model. signature_key: Signature key. inputs: Map from input tensor names in the SignatureDef to tensor value. Returns: Dictionary of outputs. Key is the output name in the SignatureDef 'signature_key' Value is the output value
github-repos
def generate_typegraph(program: cfg.Program, var_table: dict[int, str], loader: jinja2.BaseLoader) -> str: encoder = typegraph_serializer.TypegraphEncoder() enc_prog = encoder.default(program) return _generate_visualization(template_file=_TYPEGRAPH_TEMPLATE_NAME, loader=loader, program=json.dumps(enc_prog),...
Generate the visualization webpage. Args: program: cfg.Program. The instance of the program to visualize. var_table: dict[int, str]. A mapping of cfg.Variable IDs to names. loader: A jinja2 loader Returns: str. The rendered visualization page.
github-repos
def get_by(self, field, value): if (not field): logger.exception(RESOURCE_CLIENT_INVALID_FIELD) raise ValueError(RESOURCE_CLIENT_INVALID_FIELD) filter = '"{0}=\'{1}\'"'.format(field, value) results = self.get_all(filter=filter) if ('.' not in field): results = [item for item in r...
Get the resource by passing a field and its value. Note: This function uses get_all passing a filter.The search is case-insensitive. Args: field: Field name to filter. value: Value to filter. Returns: dict
codesearchnet
def Serialize(self, writer): super(AccountState, self).Serialize(writer) writer.WriteUInt160(self.ScriptHash) writer.WriteBool(self.IsFrozen) writer.WriteVarInt(len(self.Votes)) for vote in self.Votes: writer.WriteBytes(vote) blen = len(self.Balances...
Serialize full object. Args: writer (neo.IO.BinaryWriter):
juraj-google-style
def default(self, value): if isinstance(value, messages.Enum): return str(value) if (six.PY3 and isinstance(value, bytes)): return value.decode('utf8') if isinstance(value, messages.Message): result = {} for field in value.all_fields(): item = value.get_assigned_v...
Return dictionary instance from a message object. Args: value: Value to get dictionary for. If not encodable, will call superclasses default method.
codesearchnet
def FindNode(self, component_path): node = self.state.component_tree for component in component_path: node = node[component] return node
Find the node in the component_tree from component_path. Args: component_path: A list of components which reference a node in the component tree. This allows us to resume processing in the tree. Returns: A node in the component_tree.
codesearchnet
def GetTermSize(self): return self._term_size
Returns the terminal (x, y) dimensions in characters. Returns: (x, y): A tuple of the terminal x and y dimensions.
github-repos
def __init__(self, **kwargs): if kwargs: raise ValueError('Unused keyword arguments: {0:s}.'.format( ', '.join(kwargs))) super(Decrypter, self).__init__()
Initializes a decrypter. Args: kwargs (dict): keyword arguments depending on the decrypter. Raises: ValueError: when there are unused keyword arguments.
juraj-google-style
def __init__(self, xid=None, experimenter=None, exp_type=None, data=b''): super().__init__(xid) self.experimenter = experimenter self.exp_type = exp_type self.data = data
Create a ExperimenterHeader with the optional parameters below. Args: xid (int): xid to be used on the message header. experimenter (int): Vendor ID: MSB 0: low-order bytes are IEEE OUI. MSB != 0: defined by ONF. exp_type (int): Experimenter defined.
juraj-google-style
def to_geojson(self, filename, proj, metadata=None): if (metadata is None): metadata = {} json_obj = {'type': 'FeatureCollection', 'features': [], 'properties': {}} json_obj['properties']['times'] = self.times.tolist() json_obj['properties']['dx'] = self.dx json_obj['properties']['step'] = s...
Output the data in the STObject to a geoJSON file. Args: filename: Name of the file proj: PyProj object for converting the x and y coordinates back to latitude and longitue values. metadata: Metadata describing the object to be included in the top-level properties.
codesearchnet
def convert_datetime_type(obj): if (pd and (obj is pd.NaT)): return np.nan if (pd and isinstance(obj, pd.Period)): return (obj.to_timestamp().value / (10 ** 6.0)) if (pd and isinstance(obj, _pd_timestamp)): return (obj.value / (10 ** 6.0)) elif (pd and isinstance(obj, pd.Timedelt...
Convert any recognized date, time, or datetime value to floating point milliseconds since epoch. Arg: obj (object) : the object to convert Returns: float : milliseconds
codesearchnet
def delete_vmss_vms(access_token, subscription_id, resource_group, vmss_name, vm_ids): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourceGroups/', resource_group, '/providers/Microsoft.Compute/virtualMach...
Delete a VM in a VM Scale Set. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. resource_group (str): Azure resource group name. vmss_name (str): Name of the virtual machine scale set. vm_ids (str): String representation of a JSON list of VM IDs. E.g. '[1,2]'....
juraj-google-style
def _CopyDateTimeFromStringISO8601(self, time_string): if (not time_string): raise ValueError('Invalid time string.') time_string_length = len(time_string) (year, month, day_of_month) = self._CopyDateFromString(time_string) if (time_string_length <= 10): return {'year': year, 'month': mo...
Copies a date and time from an ISO 8601 date and time string. Args: time_string (str): time value formatted as: hh:mm:ss.######[+-]##:## Where # are numeric digits ranging from 0 to 9 and the seconds fraction can be either 3 or 6 digits. The fraction of second and time zone offset are optional. Returns: tuple[int, i...
codesearchnet
def set_record(self, name, record_id, record): if (name not in self._cache): self._cache[name] = {} self._cache[name][record_id] = record
Save a record into the cache. Args: name (string): The name to save the model under. record_id (int): The record id. record (:class:`cinder_data.model.CinderModel`): The model
codesearchnet
def sg_prod(tensor, opt): r return tf.reduce_prod(tensor, axis=opt.axis, keep_dims=opt.keep_dims, name=opt.name)
r"""Computes the product of elements across axis of a tensor. See `tf.reduce_prod()` in tensorflow. Args: tensor: A `Tensor` (automatically given by chain). opt: axis : A tuple/list of integers or an integer. The axis to reduce. keep_dims: If true, retains reduced dimensions with length 1. name: If provided, replace ...
juraj-google-style
def AddArg(self, argument): self.args.append(argument) if (len(self.args) > self.number_of_args): raise errors.ParseError('Too many arguments for this expression.') elif (len(self.args) == self.number_of_args): return True return False
Adds a new argument to this expression. Args: argument (str): argument to add. Returns: True if the argument is the last argument, False otherwise. Raises: ParseError: If there are too many arguments.
codesearchnet
def __checkDecisionParameters(self, result, **values): error = [] if not result: error.append('Function parameter (result array) should contain one or more header string!') if not values: error.append('Function parameter (values variables) should contain one or more variable') for header in result: ...
Checker of decision parameters, it will raise ValueError if finds something wrong. Args: result (array of str): See public decision methods **values (array of str): See public decision methods Raise: ValueError: Result array none. ValueError: Values dict none. ValueError: Not find result key in header. ValueError: Re...
juraj-google-style
def send_msg(self, address, args=[]): if not address.startswith('/'): address = '/{}'.format(address) msg = osc_message_builder.OscMessageBuilder(address=address) for arg in args: msg.add_arg(arg) self.conn.send(msg.build()) return
Send multiple args into a single message to a given address. Args: address (str): OSC Address. args (list): Arguments to be parsed in VVVV.
juraj-google-style
def __init__(self, channel): self.Exchange = channel.unary_unary( '/communicator_objects.UnityToExternal/Exchange', request_serializer=mlagents_dot_envs_dot_communicator__objects_dot_unity__message__pb2.UnityMessage.SerializeToString, response_deserializer=mlagents_dot_envs_dot_communic...
Constructor. Args: channel: A grpc.Channel.
juraj-google-style
def create_pipeline_field(self, pipeline_key, name, field_type, **kwargs): uri = '/'.join([self.api_uri, self.pipelines_suffix, pipeline_key, self.fields_suffix ]) code, data = self._create_field(uri, name, field_type, **kwargs) return code, data
Creates a pipeline field with the provided attributes. Args: pipeline_key specifying the pipeline to add the field to name required name string field_type required type string [TEXT_INPUT, DATE or PERSON] kwargs {} return (status code, field dict)
juraj-google-style
def write(self, data): block_remaining = _BLOCK_SIZE - self.__position % _BLOCK_SIZE if block_remaining < _HEADER_LENGTH: self.__writer.write('\x00' * block_remaining) self.__position += block_remaining block_remaining = _BLOCK_SIZE if block_remaining < len(data) + _HEADER_LE...
Write single record. Args: data: record data to write as string, byte array or byte sequence.
juraj-google-style
def inv_attractor(dx, alpha: float=300, gamma: int=2): return dx.div(1 + alpha * dx.pow(gamma))
Inverse attractor: dc = dx / (1 + alpha*dx^gamma), where dx = a - c, a = attractor point, c = bin center, dc = shift in bin center This is the default one according to the accompanying paper. Args: dx (`torch.Tensor`): The difference tensor dx = Ai - Cj, where Ai is the attractor point and Cj is the bin center. alpha ...
github-repos
def convert_bboxes_from_albumentations(bboxes, target_format, rows, cols, check_validity=False): return [convert_bbox_from_albumentations(bbox, target_format, rows, cols, check_validity) for bbox in bboxes]
Convert a list of bounding boxes from the format used by albumentations to a format, specified in `target_format`. Args: bboxes (list): List of bounding box with coordinates in the format used by albumentations target_format (str): required format of the output bounding box. Should be 'coco' or 'pascal_voc'. rows (int...
juraj-google-style
def filter_by_analysis_period(self, analysis_period): _filtered_data = self.filter_by_months(analysis_period.months_int) _filtered_data.header._analysis_period = analysis_period return _filtered_data
Filter the Data Collection based on an analysis period. Args: analysis period: A Ladybug analysis period Return: A new Data Collection with filtered data
juraj-google-style
def is_datafile_valid(datafile): try: datafile_json = json.loads(datafile) except: return False try: jsonschema.Draft4Validator(constants.JSON_SCHEMA).validate(datafile_json) except: return False return True
Given a datafile determine if it is valid or not. Args: datafile: JSON string representing the project. Returns: Boolean depending upon whether datafile is valid or not.
juraj-google-style
def Open(self, file_object): file_object.seek(0, os.SEEK_SET) signature_data = file_object.read(6) self.file_format = None if (len(signature_data) > 2): if (signature_data[:2] == self._CPIO_SIGNATURE_BINARY_BIG_ENDIAN): self.file_format = 'bin-big-endian' elif (signature_data...
Opens the CPIO archive file. Args: file_object (FileIO): a file-like object. Raises: IOError: if the file format signature is not supported. OSError: if the file format signature is not supported.
codesearchnet
async def _call_rpc(self, header): length, _, cmd, feature, address = struct.unpack("<BBBBB", bytes(header)) rpc_id = (feature << 8) | cmd payload = self.rpc_payload[:length] self._logger.debug("Calling RPC %d:%04X with %s", address, rpc_id, binascii.hexlify(payload)) ...
Call an RPC given a header and possibly a previously sent payload Args: header (bytearray): The RPC header we should call
juraj-google-style
def del_method(self, m): if isinstance(m, types.FunctionType) and not iscoroutinefunction(m): wrkey = ('function', id(m)) else: f, obj = get_method_vars(m) wrkey = (f, id(obj)) if wrkey in self: del self[wrkey]
Remove an instance method or function if it exists Args: m: The instance method or function to remove
juraj-google-style
def read(self, input_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): super(GetResponsePayload, self).read(input_stream, kmip_version=kmip_version) local_stream = utils.BytearrayStream(input_stream.read(self.length)) if self.is_tag_next(enums.Tags.OBJECT_TYPE, local_stream): self._object_type = pri...
Read the data encoding the Get response payload and decode it into its constituent parts. Args: input_stream (stream): A data stream containing encoded object data, supporting a read method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object wil...
codesearchnet
def CacheFileObject(self, path_spec, file_object): self._file_object_cache.CacheObject(path_spec.comparable, file_object)
Caches a file-like object based on a path specification. Args: path_spec (PathSpec): path specification. file_object (FileIO): file-like object.
juraj-google-style
def _collect_tokens(self, node: dict) -> list: tokens = [self._termination_char] if self._termination_char in node else [] for token, subtrie_head in node.items(): if token != self._termination_char: subtokens = self._collect_tokens(subtrie_head) tokens.extend([token + subtoken f...
Generates all tokens in the Trie starting from a given node. Args: node (dict): The node in the Trie from which tokens need to be generated. Returns: list: List of tokens generated from the given node.
github-repos
def _get_query_results(self, job_id, retry, project=None, timeout_ms=None, location=None): extra_params = {'maxResults': 0} if (project is None): project = self.project if (timeout_ms is not None): extra_params['timeoutMs'] = timeout_ms if (location is None): location = self.loca...
Get the query results object for a query job. Arguments: job_id (str): Name of the query job. retry (google.api_core.retry.Retry): (Optional) How to retry the RPC. project (str): (Optional) project ID for the query job (defaults to the project of the client). timeout_ms (int): (Optional) number of milliseconds the the...
codesearchnet
def prune_intermediate_layers(node): if not node.get('children'): return layer_blocks = [(i, child) for i, child in enumerate(node['children']) if is_layer_block(child)] if len(layer_blocks) > 2: to_remove = [i for i, _ in layer_blocks[1:-1]] node['children'] = [child for i, child in...
Recursively removes intermediate layers from the tree to improve readability. Keeps at least the first and last layers if many consecutive layers are present. Args: node (`dict`): The root or subnode to prune recursively.
github-repos
def __init__(self, all_reduce_alg='nccl', num_packs=1): self._all_reduce_alg = all_reduce_alg self._num_packs = num_packs self._simple_cross_replica_ops = ReductionToOneDevice() super(AllReduceCrossDeviceOps, self).__init__()
Initializes the object. Args: all_reduce_alg: the all-reduce algorithm to use, currently only "nccl" or "hierarchical_copy" are supported. num_packs: a non-negative integer. The number of packs to split values into. If zero, no packing will be done.
github-repos
def read_named_csv(name, data_path=DATA_PATH, nrows=None, verbose=True): if os.path.isfile(name): try: return read_json(name) except (IOError, UnicodeDecodeError, json.JSONDecodeError): pass try: return read_csv(name, nrows=nrows) except (IOEr...
Convert a dataset in a local file (usually a CSV) into a Pandas DataFrame TODO: should be called read_named_dataset Args: `name` is assumed not to have an extension (like ".csv"), alternative extensions are tried automatically.file
juraj-google-style
def get_new_address(self, id=None, endpoint=None): return self._call_endpoint(GET_NEW_ADDRESS, id=id, endpoint=endpoint)
Create new address Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
juraj-google-style
def ListPlugins(logdir): plugins_dir = os.path.join(logdir, _PLUGINS_DIR) try: entries = tf.io.gfile.listdir(plugins_dir) except tf.errors.NotFoundError: return [] return [x.rstrip('/') for x in entries if (x.endswith('/') or _IsDirectory(plugins_dir, x))]
List all the plugins that have registered assets in logdir. If the plugins_dir does not exist, it returns an empty list. This maintains compatibility with old directories that have no plugins written. Args: logdir: A directory that was created by a TensorFlow events writer. Returns: a list of plugin names, as string...
codesearchnet
def resource_import(filename: str, *, module: Optional[epath.PathLike]=None) -> str: path = epath.resource_path(module) if module else _static_path() path = path.joinpath(filename) content = path.read_text() if path.suffix == '.css': return f'<style>{content}</style>' elif path.suffix == '.j...
Returns the `HTML` associated with the resource. Args: filename: Path to the `.css`, `.js` resource module: Python module name from which the filename is relative too.
github-repos
def bytes_to_long(bytesdata: bytes) -> int: assert len(bytesdata) == 8 return sum((b << (k * 8) for k, b in enumerate(bytesdata)))
Converts an 8-byte sequence to a long integer. Args: bytesdata: 8 consecutive bytes, as a ``bytes`` object, in little-endian format (least significant byte [LSB] first) Returns: integer
juraj-google-style
def open_required(func): @functools.wraps(func) def wrapper(self, *args, **kwargs): 'Wrapper function to check that the given ``JLink`` has been\n opened.\n\n Args:\n self (JLink): the ``JLink`` instance\n args: list of arguments to pass to the wrapped fu...
Decorator to specify that the J-Link DLL must be opened, and a J-Link connection must be established. Args: func (function): function being decorated Returns: The wrapper function.
codesearchnet
def _config_session(): config = tf.ConfigProto() config.gpu_options.allow_growth = True config.gpu_options.visible_device_list = '0' return tf.Session(config=config)
Configure session for particular device Returns: tensorflow.Session
codesearchnet
def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: if token_ids_1 is None: return token_ids_0 + [self.sep_token_id] sep = [self.sep_token_id] return token_ids_0 + sep + token_ids_1 + sep
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A BERT sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the spe...
github-repos
def char_decode(self, sequences): decode_strs = [seq.replace(' ', '') for seq in self.char_tokenizer.batch_decode(sequences)] return decode_strs
Convert a list of lists of char token ids into a list of strings by calling char tokenizer. Args: sequences (`torch.Tensor`): List of tokenized input ids. Returns: `List[str]`: The list of char decoded sentences.
github-repos
def detect_content_type(self, path=None, payload=None, objectInput=None): if objectInput: message = 'Detection content type with file object is not stable.' log.exception(message) raise TikaAppError(message) f = file_path(path, payload, objectInput) switches = ['-d', f] result = ...
Return the content type of passed file or payload. Args: path (string): Path of file to analyze payload (string): Payload base64 to analyze objectInput (object): file object/standard input to analyze Returns: content type of file (string)
codesearchnet
def update(self, data): for (key, value) in data.items(): setattr(self, key, value)
Update the current memory record with the given data dict. Args: data (dict): Data dictionary to update the record attributes with.
codesearchnet
def __init__(self, content=None, min=0, max=HUGE, name=None): assert 0 <= min <= max <= HUGE, (min, max) if content is not None: content = tuple(map(tuple, content)) assert len(content), repr(content) for alt in content: assert len(alt), repr(alt) self.content = content ...
Initializer. Args: content: optional sequence of subsequences of patterns; if absent, matches one node; if present, each subsequence is an alternative [*] min: optional minimum number of times to match, default 0 max: optional maximum number of times to match, default HUGE name: optional name assigned to this match [...
github-repos
def __init__(self, config=None, start=True): config = config or DispatcherConfig() if config.fault_tolerant_mode and (not config.work_dir): raise ValueError('Cannot enable fault tolerant mode without configuring a work dir. Make sure to set `work_dir` in the `config` object passed to `DispatcherServer`....
Creates a new dispatch server. Args: config: (Optional.) A `tf.data.experimental.service.DispatcherConfig` configuration. If `None`, the dispatcher will use default configuration values. start: (Optional.) Boolean, indicating whether to start the server after creating it. Defaults to True.
github-repos
def _add_bound_method(self, bound_method, identify_observed): inst = bound_method.__self__ method_name = bound_method.__name__ key = self.make_key(bound_method) if key not in self.observers: self.observers[key] = ObserverBoundMethod( inst, method_nam...
Add an bound method as an observer. Args: bound_method: The bound method to add as an observer. identify_observed: See the docstring for add_observer. Returns: True if the bound method is added, otherwise False.
juraj-google-style