code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def alias_tags(tags_list, alias_map): def _alias_dict(tags): tags_ = [alias_map.get(t, t) for t in tags] return list(set([t for t in tags_ if (t is not None)])) tags_list_ = [_alias_dict(tags) for tags in tags_list] return tags_list_
update tags to new values Args: tags_list (list): alias_map (list): list of 2-tuples with regex, value Returns: list: updated tags CommandLine: python -m utool.util_tags alias_tags --show Example: >>> # DISABLE_DOCTEST >>> from utool.util_tags import * # NOQA >>> import utool as ut >>> tags_list = [['t1', 't2'], [...
codesearchnet
def taubin(script, iterations=10, t_lambda=0.5, t_mu=(- 0.53), selected=False): filter_xml = ''.join([' <filter name="Taubin Smooth">\n', ' <Param name="lambda" ', 'value="{}" '.format(t_lambda), 'description="Lambda" ', 'type="RichFloat" ', '/>\n', ' <Param name="mu" ', 'value="{}" '.format(t_mu), 'descript...
The lambda & mu Taubin smoothing, it make two steps of smoothing, forth and back, for each iteration. Based on: Gabriel Taubin "A signal processing approach to fair surface design" Siggraph 1995 Args: script: the FilterScript object or script filename to write the filter to. iterations (int): The number of times that...
codesearchnet
def get_channel_dimension_axis(image: np.ndarray, input_data_format: Optional[Union[ChannelDimension, str]]=None) -> int: if input_data_format is None: input_data_format = infer_channel_dimension_format(image) if input_data_format == ChannelDimension.FIRST: return image.ndim - 3 elif input_d...
Returns the channel dimension axis of the image. Args: image (`np.ndarray`): The image to get the channel dimension axis of. input_data_format (`ChannelDimension` or `str`, *optional*): The channel dimension format of the image. If `None`, will infer the channel dimension from the image. Returns: The channel dimensio...
github-repos
def CopyToDateTimeString(self): if (self._timestamp is None): return None (number_of_days, hours, minutes, seconds) = self._GetTimeValues(int(self._timestamp)) (year, month, day_of_month) = self._GetDateValuesWithEpoch(number_of_days, self._EPOCH) microseconds = int(((self._timestamp % 1) * defi...
Copies the Cocoa timestamp to a date and time string. Returns: str: date and time value formatted as: YYYY-MM-DD hh:mm:ss.###### or None if the timestamp cannot be copied to a date and time string.
codesearchnet
def look_up(self, **keys: Dict[InstanceName, ScalarValue]) -> "ArrayEntry": if not isinstance(self.schema_node, ListNode): raise InstanceValueError(self.json_pointer(), "lookup on non-list") try: for i in range(len(self.value)): en = self.value[i] ...
Return the entry with matching keys. Args: keys: Keys and values specified as keyword arguments. Raises: InstanceValueError: If the receiver's value is not a YANG list. NonexistentInstance: If no entry with matching keys exists.
juraj-google-style
def from_service_account_file(cls, filename, **kwargs): info, signer = _service_account_info.from_filename( filename, require=['client_email', 'token_uri']) return cls._from_signer_and_info(signer, info, **kwargs)
Creates a Credentials instance from a service account json file. Args: filename (str): The path to the service account json file. kwargs: Additional arguments to pass to the constructor. Returns: google.auth.service_account.Credentials: The constructed credentials.
juraj-google-style
def _fdopen(self, *args, **kwargs): if not is_int_type(args[0]): raise TypeError('an integer is required') return FakeFileOpen(self.filesystem)(*args, **kwargs)
Redirector to open() builtin function. Args: *args: Pass through args. **kwargs: Pass through kwargs. Returns: File object corresponding to file_des. Raises: TypeError: if file descriptor is not an integer.
juraj-google-style
def assertArrayNear(self, farray1, farray2, err, msg=None): self.assertEqual(len(farray1), len(farray2), msg=msg) for f1, f2 in zip(farray1, farray2): self.assertNear(float(f1), float(f2), err, msg=msg)
Asserts that two float arrays are near each other. Checks that for all elements of farray1 and farray2 |f1 - f2| < err. Asserts a test failure if not. Args: farray1: a list of float values. farray2: a list of float values. err: a float value. msg: Optional message to report on failure.
github-repos
def set_window_size(self, width, height, window_handle='current'): self._execute(Command.SET_WINDOW_SIZE, { 'width': int(width), 'height': int(height), 'window_handle': window_handle})
Sets the width and height of the current window. Support: Web(WebView) Args: width(int): the width in pixels. height(int): the height in pixels. window_handle(str): Identifier of window_handle, default to 'current'. Returns: WebDriver Object.
juraj-google-style
def matches_alias(self, alias: str) -> bool: del self del alias return False
Indicates whether the expression will be selected as the given alias. Intended to be over-ridden by sub-classes which can safely implement it. Given an expression and an alias, indicates whether the expression will be SELECT'd as the given alias. For example, an expression like `SELECT a.b` matches the alias 'b', maki...
github-repos
def get_patched_request(requires, patchlist): rules = {'': (True, True, True), '!': (False, False, False), '~': (False, False, True), '^': (True, True, True)} requires = [(Requirement(x) if (not isinstance(x, Requirement)) else x) for x in requires] appended = [] for patch in patchlist: if (patc...
Apply patch args to a request. For example, consider: >>> print get_patched_request(["foo-5", "bah-8.1"], ["foo-6"]) ["foo-6", "bah-8.1"] >>> print get_patched_request(["foo-5", "bah-8.1"], ["^bah"]) ["foo-5"] The following rules apply wrt how normal/conflict/weak patches override (note though that the new request i...
codesearchnet
def __field_to_parameter_type(self, field): variant = field.variant if (variant == messages.Variant.MESSAGE): raise TypeError("A message variant can't be used in a parameter.") custom_variant_map = {messages.Variant.SINT32: 'int32', messages.Variant.SINT64: 'int64', messages.Variant.BOOL: 'boolean',...
Converts the field variant type into a string describing the parameter. Args: field: An instance of a subclass of messages.Field. Returns: A string corresponding to the variant enum of the field, with a few exceptions. In the case of signed ints, the 's' is dropped; for the BOOL variant, 'boolean' is used; and for th...
codesearchnet
def copy_pkg(self, filename, id_=(- 1)): self._copy(filename, id_=id_, file_type=PKG_FILE_TYPE)
Copy a package to the distribution server. Bundle-style packages must be zipped prior to copying. Args: filename: Full path to file to upload. id_: ID of Package object to associate with, or -1 for new packages (default).
codesearchnet
def _Enter(tensor, frame_name, is_constant=False, parallel_iterations=10, use_ref=True, use_input_shape=True, name=None): tensor = ops.internal_convert_to_tensor_or_composite(tensor, as_ref=True) if isinstance(tensor, tensor_lib.Tensor): if tensor.dtype._is_ref_dtype and use_ref: result = ge...
Creates or finds a child frame, and makes `tensor` available to it. The unique `frame_name` is used by the `Executor` to identify frames. If `is_constant` is true, `tensor` is a constant in the child frame; otherwise it may be changed in the child frame. At most `parallel_iterations` iterations are run in parallel in ...
github-repos
def get_vep_info(vep_string, vep_header): vep_annotations = [ dict(zip(vep_header, vep_annotation.split('|'))) for vep_annotation in vep_string.split(',') ] return vep_annotations
Make the vep annotations into a dictionaries A vep dictionary will have the vep column names as keys and the vep annotations as values. The dictionaries are stored in a list Args: vep_string (string): A string with the CSQ annotation vep_header (list): A list with the vep header Return: vep_annotations (list): A lis...
juraj-google-style
def peek_step(self, val: ObjectValue, sn: 'DataNode') -> Tuple[(Value, 'DataNode')]: cn = sn.get_data_child(self.name, self.namespace) try: return (val[cn.iname()], cn) except (IndexError, KeyError, TypeError): return (None, cn)
Return member value addressed by the receiver + its schema node. Args: val: Current value (object). sn: Current schema node.
codesearchnet
def get_stats_for_node_def(graph, node, statistic_type) -> Any: try: stats_func = _stats_registry.lookup(node.op + ',' + statistic_type) result = stats_func(graph, node) except LookupError: result = OpStats(statistic_type) return result
Looks up the node's statistics function in the registry and calls it. This function takes a Graph object and a NodeDef from a GraphDef, and if there's an associated statistics method, calls it and returns a result. If no function has been registered for the particular node type, it returns an empty statistics object. ...
github-repos
def add_archive_as_dir(self, zip_file_obj): BalancedDiscStorage._check_interface(zip_file_obj) file_hash = self._get_hash(zip_file_obj) dir_path = self._create_dir_path(file_hash) full_path = os.path.join(dir_path, file_hash) if os.path.exists(full_path): s...
Add archive to the storage and unpack it. Args: zip_file_obj (file): Opened file-like object. Returns: obj: Path where the `zip_file_obj` was unpacked wrapped in \ :class:`.PathAndHash` structure. Raises: ValueError: If there is too many files in .zip archive. \ See :attr:`._max_zipfiles` for details. AssertionError...
juraj-google-style
def fetch_token(self, **kwargs): kwargs.setdefault('client_secret', self.client_config['client_secret']) return self.oauth2session.fetch_token(self.client_config['token_uri'], **kwargs)
Completes the Authorization Flow and obtains an access token. This is the final step in the OAuth 2.0 Authorization Flow. This is called after the user consents. This method calls :meth:`requests_oauthlib.OAuth2Session.fetch_token` and specifies the client configuration's token URI (usually Google's token server). A...
codesearchnet
def attention_bias_batch(batch_coordinates_q, batch_coordinates_k=None, condition_fn=None): if batch_coordinates_k is None: batch_coordinates_k = batch_coordinates_q def to_float(bc): bc = tf.squeeze(bc, 1) bc = tf.to_float(bc) return bc ...
Generate a mask to prevent the batch to attend to each others. Args: batch_coordinates_q: Int-like Tensor of shape [length_q, 1] containing the coordinates of the batches batch_coordinates_k: Int-like Tensor of shape [length_k, 1] containing the coordinates of the batches. If None, do self-attention. condition_fn: Cal...
juraj-google-style
def hashed(field_name, percent, fields=None, count=0): if (field_name is None): raise Exception('Hash field must be specified') def _hashed_sampling(sql): projection = Sampling._create_projection(fields) sql = ('SELECT %s FROM (%s) WHERE MOD(ABS(FARM_FINGERPRINT(CAST(%s AS STRING))), 10...
Provides a sampling strategy based on hashing and selecting a percentage of data. Args: field_name: the name of the field to hash. percent: the percentage of the resulting hashes to select. fields: an optional list of field names to retrieve. count: optional maximum count of rows to pick. Returns: A sampling function ...
codesearchnet
def parse(self, **global_args): if self.build_file not in ParseContext._parsed: butcher_context = {} for str_to_exec in self._strs_to_exec: ast = compile(str_to_exec, '<string>', 'exec') exec_function(ast, butcher_contex...
Entry point to parsing a BUILD file. Args: **global_args: Variables to include in the parsing environment.
juraj-google-style
def __init__(self, root_path, root_url, site_title, site_desc=None): self.root_path = root_path self.root_url = root_url self.site_title = site_title self.site_desc = site_desc self.cm = russell.content.ContentManager(root_url) self.pages = self.cm.pages self.posts = self.cm.posts self.tags = self....
Constructor. Args: root_path (str): Full path to the directory which contains the posts, pages, templates etc. directories. root_url (str): The root URL of your website. site_title (str): The title of your website. site_desc (str): A subtitle or description of your website.
juraj-google-style
def query(self, expr, **kwargs): columns = self.columns def query_builder(df, **kwargs): df = df.copy() df.index = pandas.RangeIndex(len(df)) df.columns = columns df.query(expr, inplace=True, **kwargs) df.columns = pandas.RangeIndex(len(df.columns)) return df ...
Query columns of the DataManager with a boolean expression. Args: expr: Boolean expression to query the columns with. Returns: DataManager containing the rows where the boolean expression is satisfied.
codesearchnet
def read(self, nodes=None, **kwargs): if (nodes is None): required_nodes = (self.wishlist - set(self.datasets.keys())) nodes = self.dep_tree.leaves(nodes=required_nodes) return self._read_datasets(nodes, **kwargs)
Load datasets from the necessary reader. Args: nodes (iterable): DependencyTree Node objects **kwargs: Keyword arguments to pass to the reader's `load` method. Returns: DatasetDict of loaded datasets
codesearchnet
def create_resource(self, resource_type=None, uri=None): if (resource_type in [NonRDFSource, Binary, BasicContainer, DirectContainer, IndirectContainer]): return resource_type(self, uri) else: raise TypeError('expecting Resource type, such as BasicContainer or NonRDFSource')
Convenience method for creating a new resource Note: A Resource is instantiated, but is not yet created. Still requires resource.create(). Args: uri (rdflib.term.URIRef, str): uri of resource to create resource_type (NonRDFSource (Binary), BasicContainer, DirectContainer, IndirectContainer): resource type to create...
codesearchnet
def month_name_to_number(month, to_int=False): number = { 'Jan': '01', 'Feb': '02', 'Mar': '03', 'Apr': '04', 'May': '05', 'Jun': '06', 'Jul': '07', 'Aug': '08', 'Sep': '09', 'Oct': '10', 'Nov': '11', 'Dec': '12', }.get(month) return int(number) if to_int else number
Convert a month name (MMM) to its number (01-12). Args: month (str): 3-letters string describing month. to_int (bool): cast number to int or not. Returns: str/int: the month's number (between 01 and 12).
juraj-google-style
def pad(self, images: 'torch.Tensor', size: int) -> 'torch.Tensor': height, width = get_image_size(images, ChannelDimension.FIRST) pad_height = (height pad_width = (width return F.pad(images, (0, 0, pad_width, pad_height), padding_mode='symmetric')
Pad an image to make the height and width divisible by `size`. Args: images (`torch.Tensor`): Images to pad. size (`int`): The size to make the height and width divisible by. Returns: `torch.Tensor`: The padded images.
github-repos
def signature(cert, sig, body): body = six.b(body) sig = base64.decodestring(sig) padder = padding.PKCS1v15() public_key = cert.public_key() try: public_key.verify(sig, body, padder, hashes.SHA1()) return True except InvalidSignature: warnings.warn('Signature verification...
Validate data request signature. See `validate.request` for additional info. Args: cert: cryptography.hazmat.backends.openssl.x509._Certificate. The Amazon signing certificate. sig: str. Signature header value sent by request. body: str. HTTPS request body. Returns: bool: True if valid, False otherwise.
codesearchnet
def get_cost_per_kg(self, comp): comp = comp if isinstance(comp, Composition) else Composition(comp) return self.get_cost_per_mol(comp) / ( comp.weight.to("kg") * const.N_A)
Get best estimate of minimum cost/kg based on known data Args: comp: Composition as a pymatgen.core.structure.Composition Returns: float of cost/kg
juraj-google-style
def Deserialize(self, reader): super(SpentCoinState, self).Deserialize(reader) self.TransactionHash = reader.ReadUInt256() self.TransactionHeight = reader.ReadUInt32() count = reader.ReadVarInt() items = [0] * count for i in range(0, count): index ...
Deserialize full object. Args: reader (neocore.IO.BinaryReader):
juraj-google-style
def WriteSignedBinaryBlobs(binary_urn, blobs, token=None): if _ShouldUseLegacyDatastore(): aff4.FACTORY.Delete(binary_urn, token=token) with data_store.DB.GetMutationPool() as mutation_pool: with aff4.FACTORY.Create(binary_urn, collects.GRRSignedBlob, mode='w', mutation_pool=mutation_poo...
Saves signed blobs to the datastore. If a signed binary with the given URN already exists, its contents will get overwritten. Args: binary_urn: RDFURN that should serve as a unique identifier for the binary. blobs: An Iterable of signed blobs to write to the datastore. token: ACL token to use with the legacy (non-rel...
codesearchnet
def initialize(self, prefix_name='default', *args, **kwargs): if self.loaded: raise WorkdirError(('Workdir %s already initialized' % self.path)) if (not os.path.exists(self.path)): LOGGER.debug('Creating workdir %s', self.path) os.makedirs(self.path) self.prefixes[prefix_name] = self...
Initializes a workdir by adding a new prefix to the workdir. Args: prefix_name(str): Name of the new prefix to add *args: args to pass along to the prefix constructor *kwargs: kwargs to pass along to the prefix constructor Returns: The newly created prefix Raises: PrefixAlreadyExists: if the prefix name already exis...
codesearchnet
def _get_dtype_from_nested_lists(list_or_tuple): for elem in list_or_tuple: if isinstance(elem, core.Tensor): return elem.dtype.base_dtype elif isinstance(elem, (list, tuple)): maybe_dtype = _get_dtype_from_nested_lists(elem) if maybe_dtype is not None: ...
Returns the dtype of any tensor-like object in `list_or_tuple`, if found. Args: list_or_tuple: A list or tuple representing an object that can be converted to a `tf.Tensor`. Returns: The dtype of any tensor-like object in `list_or_tuple`, or `None` if no such object exists.
github-repos
def make_value_from_datastore(self, value): if value is None: return None _json = json.loads(value, cls=JsonDecoder) if self.data_type == dict: return _json return self.data_type.from_json(_json)
Convert value from datastore representation. Args: value: datastore value. Returns: value to store in the model.
juraj-google-style
def job_stories(self, raw=False, limit=None): job_stories = self._get_stories('jobstories', limit) if raw: job_stories = [story.raw for story in job_stories] return job_stories
Returns list of item ids of latest Job stories Args: limit (int): specifies the number of stories to be returned. raw (bool): Flag to indicate whether to transform all objects into raw json. Returns: `list` object containing ids of Job stories.
codesearchnet
def register(cls, config_class, model_class, exist_ok=False) -> None: if hasattr(model_class, 'config_class') and model_class.config_class.__name__ != config_class.__name__: raise ValueError(f'The model class you are passing has a `config_class` attribute that is not consistent with the config class you pas...
Register a new model for this class. Args: config_class ([`PretrainedConfig`]): The configuration corresponding to the model to register. model_class ([`PreTrainedModel`]): The model to register.
github-repos
def train_and_maybe_evaluate(hparams): schema = taxi.read_schema(hparams.schema_file) tf_transform_output = tft.TFTransformOutput(hparams.tf_transform_dir) train_input = lambda: model.input_fn(hparams.train_files, tf_transform_output, batch_size=TRAIN_BATCH_SIZE) eval_input = lambda: model.input_fn(hpar...
Run the training and evaluate using the high level API. Args: hparams: Holds hyperparameters used to train the model as name/value pairs. Returns: The estimator that was used for training (and maybe eval)
github-repos
def SetAndLoadTagFile(self, tagging_file_path): tag_file = tagging_file.TaggingFile(tagging_file_path) self._tagging_rules = tag_file.GetEventTaggingRules()
Sets the tag file to be used by the plugin. Args: tagging_file_path (str): path of the tagging file.
juraj-google-style
def get_archive(self, path, chunk_size=DEFAULT_DATA_CHUNK_SIZE): return self.client.api.get_archive(self.id, path, chunk_size)
Retrieve a file or folder from the container in the form of a tar archive. Args: path (str): Path to the file or folder to retrieve chunk_size (int): The number of bytes returned by each iteration of the generator. If ``None``, data will be streamed as it is received. Default: 2 MB Returns: (tuple): First element is ...
codesearchnet
def skip(reason, extras=None): raise signals.TestSkip(reason, extras)
Skip a test. Args: reason: The reason this test is skipped. extras: An optional field for extra information to be included in test result. Raises: signals.TestSkip: Mark a test as skipped.
github-repos
def stop(self, timeout_s=None): self._stopping.set() with self._current_phase_thread_lock: phase_thread = self._current_phase_thread if (not phase_thread): return if phase_thread.is_alive(): phase_thread.kill() _LOG.debug('Waiting for cancelled phase to exit: %s',...
Stops execution of the current phase, if any. It will raise a ThreadTerminationError, which will cause the test to stop executing and terminate with an ERROR state. Args: timeout_s: int or None, timeout in seconds to wait for the phase to stop.
codesearchnet
def parse_gene_panel(path, institute='cust000', panel_id='test', panel_type='clinical', date=datetime.now(), version=1.0, display_name=None, genes=None): LOG.info('Parsing gene panel %s', panel_id) gene_panel = {} gene_panel['path'] = path gene_panel['type'] = panel_type gene_panel['date'] = date ...
Parse the panel info and return a gene panel Args: path(str): Path to panel file institute(str): Name of institute that owns the panel panel_id(str): Panel id date(datetime.datetime): Date of creation version(float) full_name(str): Option to have a long name Returns: gene_panel(dict)
codesearchnet
def dot_product(p1, p2, o=(0, 0)): v1 = vector(o, p1) v2 = vector(o, p2) return v1[0] * v2[0] + v1[1] * v2[1]
Returns dot product Args: p1, p2: point (x, y) o: origin
juraj-google-style
def from_spec(cls, spec: Spec, _run_init: bool=True) -> Union[Self, type[Self]]: if spec.type is None: raise ValueError(f'Spec type not found in {spec}') subspace = _spec_type_to_subspace(spec.type) subclass: type[Self] = _KNOWN_SPECIFIABLE[subspace].get(spec.type, None) if subclass is None: ...
Generate a `Specifiable` subclass object based on a spec. Args: spec: the specification of a `Specifiable` subclass object _run_init: whether to call `__init__` or not for the initial instantiation Returns: Self: the `Specifiable` subclass object
github-repos
def _distributed_apply(self, distribution, grads_and_vars, global_step=None, name=None): reduced_grads = distribution.extended.batch_reduce_to(ds_reduce_util.ReduceOp.SUM, grads_and_vars) var_list = [v for _, v in grads_and_vars] grads_and_vars = zip(reduced_grads, var_list) with ops.init_scope(): ...
A version of `apply_gradients` for cross-replica context. This is a version of `apply_gradients()` for when you are using a `DistributionStrategy` and are in a cross-replica context. If in a replica context, use `apply_gradients()` as normal. Args: distribution: A `DistributionStrategy` object. grads_and_vars: List o...
github-repos
def find_call(self, path, method): if (not path.endswith('/')): path += '/' path = path.split('/')[1:] return self._recursive_route_match(self._routes, path, method, [])
Find callable for the specified URL path and HTTP method. Args: path (:obj:`str`): URL path to match method (:obj:`str`): HTTP method Note: A trailing '/' is always assumed in the path.
codesearchnet
def _GetRowValue(self, query_hash, row, value_name): keys_name_to_index_map = self._keys_per_query.get(query_hash, None) if not keys_name_to_index_map: keys_name_to_index_map = { name: index for index, name in enumerate(row.keys())} self._keys_per_query[query_hash] = keys_name_to_inde...
Retrieves a value from the row. Args: query_hash (int): hash of the query, that uniquely identifies the query that produced the row. row (sqlite3.Row): row. value_name (str): name of the value. Returns: object: value.
juraj-google-style
def setup(self, puller: bool=None, subscriptions: Dict[(str, Any)]={}): if puller: puller = self._zmq.socket(zmq.PULL) (ip, port, host) = self.rslv('rcv') puller.bind('tcp: self.poll(puller) if subscriptions: for publisher in subscriptions: self.add(publisher,...
Sets up this Node with the specified Interfaces before it is run. Args: puller: Indication if a Puller Interface should be created. subscriptions: Collection of the Subscriber Interfaces to be created and their Slots.
codesearchnet
def _take_lease(self, lease, uuid_path, safe=True): if safe: lease_taken_by = self._lease_valid(lease) if lease_taken_by and lease_taken_by != uuid_path: raise LagoSubnetLeaseTakenException( lease.subnet, lease_taken_by ) ...
Persist the given lease to the store and make the prefix in uuid_path his owner Args: lease(lago.subnet_lease.Lease): Object representation of the lease uuid_path (str): Path to the prefix uuid safe (bool): If true (the default), validate the the lease isn't taken. Raises: LagoSubnetLeaseException: If safe == True an...
juraj-google-style
def recipe_iam(config, auth_write, role, email): iam(config, {'auth': auth_write, 'role': role, 'email': email})
Sets project permissions for an email. Args: auth_write (authentication) - Credentials used for writing data. role (string) - projects/[project name]/roles/[role name] email (string) - Email address to grant role to.
github-repos
def create_from_binary(cls, ignore_signature_check, binary_view): sig, fx_offset, fx_count, lsn, seq_number, hard_link_count, first_attr_offset, \ usage_flags, entry_len, alloc_len, base_record, next_attr_id, record_n = \ cls._REPR.unpack(binary_view[:cls._REPR.size]) baad ...
Creates a new object MFTHeader from a binary stream. The binary stream can be represented by a byte string, bytearray or a memoryview of the bytearray. Args: binary_view (memoryview of bytearray) - A binary stream with the information of the attribute Returns: MFTHeader: New object using hte binary stream as source
juraj-google-style
def get_value(self): try: self.raw_value except (AttributeError, KeyError) as err: self._reraise_if_required(err) default_value = self.default_value if self.transform_default: return self.transform(default_value) return default_value else: value = ...
Return dictionary with values of subsettings. Returns: dict: values of subsettings.
codesearchnet
def make_legacy_input_feature_spec(include_label=True): result = {} if include_label: result['clicked'] = tf.io.FixedLenFeature(shape=[], dtype=tf.int64) for name in _INTEGER_COLUMN_NAMES: result[name] = tf.io.FixedLenFeature(shape=[], dtype=tf.int64, default_value=-1) for name in _CATEG...
Input schema definition. Args: include_label: Indicates whether the label feature should be included. Returns: A `Schema` object.
github-repos
def register(self, type_name: str, cls: Type[Any], override_existing: bool=False) -> None: if type_name in self._type_to_cls_map and (not override_existing): raise KeyError(f'Type {type_name!r} has already been registered with class {self._type_to_cls_map[type_name].__name__}.') self._type_to_cls_map[ty...
Register a ``symbolic.Object`` class with a type name. Args: type_name: String identifier for the class, which will be used as the value of `_type` property when deciding which class to construct object when converting a JSON value to object. cls: Class to register. override_existing: Whether allow to override existin...
github-repos
def __call__(self, input_ids: torch.LongTensor, z_threshold: float=3.0, return_dict: bool=False) -> Union[WatermarkDetectorOutput, np.array]: if input_ids[0, 0] == self.bos_token_id: input_ids = input_ids[:, 1:] if input_ids.shape[-1] - self.processor.context_width < 1: raise ValueError(f'Must h...
Args: input_ids (`torch.LongTensor`): The watermark generated text. It is advised to remove the prompt, which can affect the detection. z_threshold (`Dict`, *optional*, defaults to `3.0`): Changing this threshold will change the sensitivity of the detector. Higher z threshold gives less sensitivity and vice versa for l...
github-repos
def get_profiles(adapter, vcf_file): vcf = get_file_handle(vcf_file) individuals = vcf.samples profiles = {individual: [] for individual in individuals} for profile_variant in adapter.profile_variants(): ref = profile_variant['ref'] alt = profile_variant['alt'] pos = profile_vari...
Given a vcf, get a profile string for each sample in the vcf based on the profile variants in the database Args: adapter(MongoAdapter): Adapter to mongodb vcf_file(str): Path to vcf file Returns: profiles (dict(str)): The profiles (given as strings) for each sample in vcf.
codesearchnet
def _evolve(self, state, qargs=None): if qargs is not None: return SuperOp(self)._evolve(state, qargs) state = self._format_state(state, density_matrix=True) if state.shape[0] != self._input_dim: raise QiskitError( "QuantumChanne...
Evolve a quantum state by the QuantumChannel. Args: state (QuantumState): The input statevector or density matrix. qargs (list): a list of QuantumState subsystem positions to apply the operator on. Returns: DensityMatrix: the output quantum state as a density matrix. Raises: QiskitError: if the operator dimension do...
juraj-google-style
def stop(self, drain_queue_and_join=True): with self.start_stop_lock: if not self.running: return self.running = False if drain_queue_and_join: while True: try: value = self.future_queue.get(block=True, timeout=0.1) ...
Stops running threads and wait for them to exit, if necessary. This method is thread safe and is called from various threads. Note that the `drain_queue_and_join` argument must be set correctly. It is safe to call this method multiple times, extra calls are ignored. Args: drain_queue_and_join: set to True to drain th...
github-repos
def Validate(self, problems, validate_children=True): self.ValidateRouteId(problems) self.ValidateServicePeriod(problems) self.ValidateDirectionId(problems) self.ValidateTripId(problems) self.ValidateShapeIdsExistInShapeList(problems) self.ValidateRouteIdExistsInRouteList(problems) self.Vali...
Validate attributes of this object. Check that this object has all required values set to a valid value without reference to the rest of the schedule. If the _schedule attribute is set then check that references such as route_id and service_id are correct. Args: problems: A ProblemReporter object validate_children: i...
codesearchnet
def experimental_set_type(self, type_proto) -> None: with self.graph._c_graph.get() as c_graph: if type_proto.type_id not in (full_type_pb2.TFT_UNSET, full_type_pb2.TFT_PRODUCT): raise ValueError('error setting the type of ', self.name, ': expected TFT_UNSET or TFT_PRODUCT, got ', type_proto.typ...
Sets the corresponding node's `experimental_type` field. See the description of `NodeDef.experimental_type` for more info. Args: type_proto: A FullTypeDef proto message. The root type_if of this object must be `TFT_PRODUCT`, even for ops which only have a singlre return value.
github-repos
def volatility(self, strike: types.FloatTensor, expiry_dates: Optional[types.DateTensor]=None, expiry_times: Optional[types.FloatTensor]=None, term: Optional[types.Period]=None) -> types.FloatTensor: pass
Returns the interpolated volatility on a specified set of expiries. Args: strike: The strikes for which the interpolation is desired. expiry_dates: Optional input specifying the expiry dates for which interpolation is desired. The user should supply either `expiry_dates` or `expiry_times` for interpolation. expiry_tim...
github-repos
def write_payload(payload=None, objectInput=None): temp = tempfile.mkstemp()[1] log.debug('Write payload in temp file {!r}'.format(temp)) with open(temp, 'wb') as f: if payload: payload = base64.b64decode(payload) elif objectInput: if six.PY3: payload ...
This function writes a base64 payload or file object on disk. Args: payload (string): payload in base64 objectInput (object): file object/standard input to analyze Returns: Path of file
codesearchnet
def adaptive_set( self, reannealing_per=50, thermostat=0.9, t_min=0.001, t_default=1.0 ): self.__reannealing_per = reannealing_per self.__thermostat = thermostat self.__t_min = t_min self.__t_default = t_default
Init for Adaptive Simulated Annealing. Args: reannealing_per: How often will this model reanneals there per cycles. thermostat: Thermostat. t_min: The minimum temperature. t_default: The default temperature.
juraj-google-style
def from_dict(cls, tx): inputs = [Input.from_dict(input_) for input_ in tx['inputs']] outputs = [Output.from_dict(output) for output in tx['outputs']] return cls(tx['operation'], tx['asset'], inputs, outputs, tx['metadata'], tx['version'], hash_id=tx['id'])
Transforms a Python dictionary to a Transaction object. Args: tx_body (dict): The Transaction to be transformed. Returns: :class:`~bigchaindb.common.transaction.Transaction`
juraj-google-style
def writeTable(self, tableName): lock_and_call((lambda : self._impl.writeTable(tableName)), self._lock)
Write the table corresponding to the specified name, equivalent to the AMPL statement .. code-block:: ampl write table tableName; Args: tableName: Name of the table to be written.
codesearchnet
def _get_offset_from_gcs(self): headers = {'content-range': 'bytes */*'} (status, response_headers, content) = self._api.put_object(self._path_with_token, headers=headers) errors.check_status(status, [308], self._path, headers, response_headers, content, {'upload_path': self._path_with_token}) val = res...
Get the last offset that has been written to GCS. This is a utility method that does not modify self. Returns: an int of the last offset written to GCS by this upload, inclusive. -1 means nothing has been written.
codesearchnet
def autodiscover(self, message): if message["version"] in self.allowed_versions: logger.debug("<%s> Client version matches server " "version." % message["cuuid"]) response = serialize_data({"method": "OHAI Client", ...
This function simply returns the server version number as a response to the client. Args: message (dict): A dictionary of the autodiscover message from the client. Returns: A JSON string of the "OHAI Client" server response with the server's version number. Examples: >>> response '{"method": "OHAI Client", "version"...
juraj-google-style
def get_image_features(self, pixel_values: torch.FloatTensor): image_outputs = self.vision_tower(pixel_values).last_hidden_state return self.multi_modal_projector(image_outputs)
Obtains image last hidden states from the vision tower and apply multimodal projection. Args: pixel_values (`torch.FloatTensor]` of shape `(batch_size, channels, height, width)`) Returns: image_features (`torch.Tensor`): Image feature tensor of shape `(num_images, image_length, embed_dim)`).
github-repos
def get_symmetric_wallace_tensor(self, tau): wallace = self.get_wallace_tensor(tau) return Tensor((0.5 * (wallace + np.transpose(wallace, [2, 3, 0, 1]))))
Gets the symmetrized wallace tensor for determining yield strength criteria. Args: tau (3x3 array-like): stress at which to evaluate the wallace tensor.
codesearchnet
class Constant(Initializer): def __init__(self, value=0): self.value = value def __call__(self, shape, dtype=None, **kwargs): del kwargs return constant_op.constant(self.value, dtype=_get_dtype(dtype), shape=shape) def get_config(self): return {'value': self.value...
Initializer that generates tensors with constant values. Also available via the shortcut function `tf.keras.initializers.constant`. Only scalar values are allowed. The constant value provided must be convertible to the dtype requested when calling the initializer. Examples: >>> # Standalone usage: >>> initializer =...
github-repos
def concrete(self, other=None): new_system = self.clone() if other: new_system.applyFeatures(other, missing="other") soft_features = self.getValue(SoftFeatures.SOFT, []) score = 0 for f in sorted(soft_features, key=lambda f: f.soft, reverse=True): ...
Return copy and score after being applied other system and soft features. Args: - other(system, optional): system to apply just before soft features. Return(tuple): tuple of the resulting system and its score.
juraj-google-style
def find_custom_args_with_details(file_content: str, custom_args_var_name: str) -> list[dict]: escaped_variable_name = re.escape(custom_args_var_name) regex_pattern = f'^\\s*({escaped_variable_name})\\s*=\\s*(r?\\"\\"\\")(.*?)(\\"\\"\\")' flags = re.MULTILINE | re.DOTALL match = re.search(regex_pattern,...
Find the given custom args variable in the file content and return its content. Args: file_content: The string content of the Python file. custom_args_var_name: The name of the custom args variable.
github-repos
def from_string(rxn_string): (rct_str, prod_str) = rxn_string.split('->') def get_comp_amt(comp_str): return {Composition(m.group(2)): float((m.group(1) or 1)) for m in re.finditer('([\\d\\.]*(?:[eE]-?[\\d\\.]+)?)\\s*([A-Z][\\w\\.\\(\\)]*)', comp_str)} return BalancedReaction(get_comp_amt(rct_str),...
Generates a balanced reaction from a string. The reaction must already be balanced. Args: rxn_string: The reaction string. For example, "4 Li + O2-> 2Li2O" Returns: BalancedReaction
codesearchnet
def list_devices(device_type=None): return distribution_lib.list_devices(device_type)
Return all the available devices based on the device type. Note: in a distributed setting, global devices are returned. Args: device_type: string, one of `"cpu"`, `"gpu"` or `"tpu"`. Defaults to `"gpu"` or `"tpu"` if available when `device_type` is not provided. Otherwise will return the `"cpu"` devices. Return: Lis...
github-repos
def forward(self, hidden_states: List[torch.Tensor], patch_height, patch_width) -> List[torch.Tensor]: batch_size = hidden_states[0].shape[0] hidden_states = torch.cat(hidden_states, dim=0) cls_token, hidden_states = (hidden_states[:, 0], hidden_states[:, 1:]) total_batch_size, sequence_length, num_chan...
Args: hidden_states (`List[torch.FloatTensor]`, each of shape `(batch_size, sequence_length + 1, hidden_size)`): List of hidden states from the backbone.
github-repos
def parse_case_data(config=None, ped=None, owner=None, vcf_snv=None, vcf_sv=None, vcf_cancer=None, vcf_str=None, peddy_ped=None, peddy_sex=None, peddy_check=None, delivery_report=None, multiqc=None): config_data = (copy.deepcopy(config) or {}) if ('analysis_date' not in config_data): config_data['analys...
Parse all data necessary for loading a case into scout This can be done either by providing a VCF file and other information on the command line. Or all the information can be specified in a config file. Please see Scout documentation for further instructions. Args: config(dict): A yaml formatted config file ped(iter...
codesearchnet
def switch_to_window(self, window_name): data = {'name': window_name} self._execute(Command.SWITCH_TO_WINDOW, data)
Switch to the given window. Support: Web(WebView) Args: window_name(str): The window to change focus to. Returns: WebDriver Object.
codesearchnet
def get(self, tx_id): pool = current_app.config['bigchain_pool'] with pool() as bigchain: tx = bigchain.get_transaction(tx_id) if not tx: return make_error(404) return tx.to_dict()
API endpoint to get details about a transaction. Args: tx_id (str): the id of the transaction. Return: A JSON string containing the data about the transaction.
juraj-google-style
def run_inference(self, batch: Sequence[np.ndarray], engine: TensorRTEngine, inference_args: Optional[dict[str, Any]]=None) -> Iterable[PredictionResult]: return self.inference_fn(batch, engine, inference_args)
Runs inferences on a batch of Tensors and returns an Iterable of TensorRT Predictions. Args: batch: A np.ndarray or a np.ndarray that represents a concatenation of multiple arrays as a batch. engine: A TensorRT engine. inference_args: Any additional arguments for an inference that are not applicable to TensorRT. Retu...
github-repos
def Images(self, run, tag): accumulator = self.GetAccumulator(run) return accumulator.Images(tag)
Retrieve the image events associated with a run and tag. Args: run: A string name of the run for which values are retrieved. tag: A string name of the tag for which values are retrieved. Raises: KeyError: If the run is not found, or the tag is not available for the given run. Returns: An array of `event_accumulator....
juraj-google-style
def join(self, *args, **kwargs): super(ThreadReturn, self).join(*args, **kwargs) return self._return
Joins the thread. Args: self (ThreadReturn): the ``ThreadReturn`` instance args: optional list of arguments kwargs: optional key-word arguments Returns: The return value of the exited thread.
juraj-google-style
def authenticate(self): basic_auth = request.authorization is_valid = False user = None if basic_auth: (is_valid, user) = self.check_basic_auth(basic_auth.username, basic_auth.password) else: token = request.headers.get('Authorization', None) param_token = request.args.get('a...
Authenticate user by any means and return either true or false. Args: Returns: tuple (is_valid, username): True is valid user, False if not
codesearchnet
def FindFileByName(self, file_name): try: return self._file_descriptors[file_name] except KeyError: pass try: file_proto = self._internal_db.FindFileByName(file_name) except KeyError as error: if self._descriptor_db: file_proto = self._descriptor_db.FindFileBy...
Gets a FileDescriptor by file name. Args: file_name: The path to the file to get a descriptor for. Returns: A FileDescriptor for the named file. Raises: KeyError: if the file cannot be found in the pool.
codesearchnet
def output_vars(self, transitive: bool=False) -> Set[str]: output_vars = set() def list_var_defs(k, v, p): del k, p if isinstance(v, SymbolDefinition): output_vars.add(v.name) if isinstance(v, Function): return pg.TraverseAction.CONTINUE return pg.Travers...
Returns the output context from this instruction. Args: transitive: If True, transitive output context will be included. Returns: A set of output variable names.
github-repos
def ParseFileObject(self, parser_mediator, file_object): file_offset = 0 try: (timestamp, event_data) = self._ReadEntry(parser_mediator, file_object, file_offset) except errors.ParseError as exception: raise errors.UnableToParseFile('Unable to parse first utmp entry with error: {0!s}'.format...
Parses an utmp file-like object. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): a file-like object. Raises: UnableToParseFile: when the file cannot be parsed.
codesearchnet
def _adjusted_script_code(self, script): script_code = ByteData() if script[0] == len(script) - 1: return script script_code += VarInt(len(script)) script_code += script return script_code
Checks if the script code pased in to the sighash function is already length-prepended This will break if there's a redeem script that's just a pushdata That won't happen in practice Args: script (bytes): the spend script Returns: (bytes): the length-prepended script (if necessary)
juraj-google-style
def _ParseFileEntry(self, knowledge_base, file_entry): if not file_entry or not file_entry.link: raise errors.PreProcessFail( 'Unable to read: {0:s} with error: not a symbolic link'.format( self.ARTIFACT_DEFINITION_NAME)) _, _, time_zone = file_entry.link.partition('zoneinfo/...
Parses artifact file system data for a preprocessing attribute. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_entry (dfvfs.FileEntry): file entry that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails.
juraj-google-style
def _ExtractContentFromDataStream( self, mediator, file_entry, data_stream_name): self.processing_status = definitions.STATUS_INDICATOR_EXTRACTING if self._processing_profiler: self._processing_profiler.StartTiming('extracting') self._event_extractor.ParseDataStream( mediator, fil...
Extracts content from a data stream. Args: mediator (ParserMediator): mediates the interactions between parsers and other components, such as storage and abort signals. file_entry (dfvfs.FileEntry): file entry to extract its content. data_stream_name (str): name of the data stream whose content is to be extracted.
juraj-google-style
def affine_coupling(name, x, mid_channels=512, activation='relu', reverse=False, dropout=0.0): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): x_shape = common_layers.shape_list(x) (x1, x2) = tf.split(x, num_or_size_splits=2, axis=(- 1)) z1 = x1 log_scale_and_shift = conv_stack('n...
Reversible affine coupling layer. Args: name: variable scope. x: 4-D Tensor. mid_channels: number of channels in the coupling layer. activation: Can be either "relu" or "gatu". reverse: Forward or reverse operation. dropout: default, 0.0 Returns: output: x shifted and scaled by an affine transformation. objective: log...
codesearchnet
def _Execute(self, http): message = mime_multipart.MIMEMultipart('mixed') setattr(message, '_write_headers', lambda self: None) for key in self.__request_response_handlers: msg = mime_nonmultipart.MIMENonMultipart('application', 'http') msg['Co...
Serialize batch request, send to server, process response. Args: http: A httplib2.Http object to be used to make the request with. Raises: httplib2.HttpLib2Error if a transport error has occured. apiclient.errors.BatchError if the response is the wrong format.
juraj-google-style
def GetResults(self): result = analyzer_result.AnalyzerResult() result.analyzer_name = self.NAME result.attribute_name = self._ATTRIBUTE_NAME rule_names = [match.rule for match in self._matches] result.attribute_value = ','.join(rule_names) return [result]
Retrieves results of the most recent analysis. Returns: list[AnalyzerResult]: results.
codesearchnet
def get_proj(prj_code): if (prj_code in CUSTOM_PRJ): proj = pyproj.Proj(CUSTOM_PRJ[prj_code]) else: proj = pyproj.Proj(init=prj_code) return proj
Helper method for handling projection codes that are unknown to pyproj Args: prj_code (str): an epsg proj code Returns: projection: a pyproj projection
codesearchnet
def set_disk_timeout(timeout, power='ac', scheme=None): return _set_powercfg_value(scheme=scheme, sub_group='SUB_DISK', setting_guid='DISKIDLE', power=power, value=timeout)
Set the disk timeout in minutes for the given power scheme Args: timeout (int): The amount of time in minutes before the disk will timeout power (str): Set the value for AC or DC power. Default is ``ac``. Valid options are: - ``ac`` (AC Power) - ``dc`` (Battery) scheme (str): The scheme to use, leave as ``None`` to...
codesearchnet
def _process_new(self, feed_item): if feed_item.get(FieldMap.AD_ACTIVE, None): self._wait_all_creative_activation(feed_item) campaign = self._campaign_dao.get(feed_item, required=True) creative_assignments = [] placement_assignments = [] event_tag_assignments = [] self._process_assignmen...
Creates a new ad DCM object from a feed item representing an ad from the Bulkdozer feed. This function simply creates the object to be inserted later by the BaseDAO object. Args: feed_item: Feed item representing the ad from the Bulkdozer feed. Returns: An ad object ready to be inserted in DCM through the API.
github-repos
def set_window_position(self, x, y, window_handle='current'): self._execute(Command.SET_WINDOW_POSITION, {'x': int(x), 'y': int(y), 'window_handle': window_handle})
Sets the x,y position of the current window. Support: Web(WebView) Args: x(int): the x-coordinate in pixels. y(int): the y-coordinate in pixels. window_handle(str): Identifier of window_handle, default to 'current'. Returns: WebDriver Object.
codesearchnet
def ping(self, suffix='public_tokens/'): return self.remote_utils.ping(super(neuroRemote, self).url(), suffix)
Return the status-code of the API (estimated using the public-tokens lookup page). Arguments: suffix (str : 'public_tokens/'): The url endpoint to check Returns: int: status code
juraj-google-style
def add_to_gitignore(line: str): if not line.endswith('\n'): line = f'{line}\n' if GIT_IGNORE.exists(): if line in GIT_IGNORE.read_text(encoding='utf8'): return previous_content = GIT_IGNORE.read_text(encoding='utf8') else: previous_content = '' GIT_IGNOR...
Adds a line to the .gitignore file of the repo Args: line: line to add
juraj-google-style
def normalize(array, min_value=0., max_value=1.): arr_min = np.min(array) arr_max = np.max(array) normalized = (array - arr_min) / (arr_max - arr_min + K.epsilon()) return (max_value - min_value) * normalized + min_value
Normalizes the numpy array to (min_value, max_value) Args: array: The numpy array min_value: The min value in normalized array (Default value = 0) max_value: The max value in normalized array (Default value = 1) Returns: The array normalized to range between (min_value, max_value)
juraj-google-style
def data(self, resource_value, return_value=False): if return_value: self._request_entity = None self._request.add_payload('returnValue', True) self._request_uri = '{}/{}/data'.format(self._request_uri, resource_value)
Alias for metric_name method +--------------+------------------------------------+ | HTTP Method | API Endpoint URI's | +==============+====================================+ | POST | /v2/customMetrics/{id}|{name}/data | +--------------+------------------------------------+ Example ------- Th...
codesearchnet