code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def send(query, address=DEFAULT_ADDRESS, port=DEFAULT_PORT, ttl=DEFAULT_TTL, local_only=False, timeout_s=2): sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl) if local_only: sock.sets...
Sends a query to the given multicast socket and returns responses. Args: query: The string query to send. address: Multicast IP address component of the socket to send to. port: Multicast UDP port component of the socket to send to. ttl: TTL for multicast messages. 1 to keep traffic in-network. timeout_s: Seconds to w...
juraj-google-style
def get_system_time(): now = win32api.GetLocalTime() meridian = 'AM' hours = int(now[4]) if (hours == 12): meridian = 'PM' elif (hours == 0): hours = 12 elif (hours > 12): hours = (hours - 12) meridian = 'PM' return '{0:02d}:{1:02d}:{2:02d} {3}'.format(hours, ...
Get the system time. Returns: str: Returns the system time in HH:MM:SS AM/PM format. CLI Example: .. code-block:: bash salt 'minion-id' system.get_system_time
codesearchnet
def base256_encode(n, minwidth=0): if (n > 0): arr = [] while n: (n, rem) = divmod(n, 256) arr.append(rem) b = bytearray(reversed(arr)) elif (n == 0): b = bytearray(b'\x00') else: raise ValueError('Negative numbers not supported') if ((minw...
Encode the input with base256. Args: n (int): input value. minwidth: minimum return value length. Raises: ValueError: if a negative number is provided. Returns: bytearray:
codesearchnet
async def verify_chain_of_trust(chain): log_path = os.path.join(chain.context.config['task_log_dir'], 'chain_of_trust.log') scriptworker_log = logging.getLogger('scriptworker') with contextual_log_handler(chain.context, path=log_path, log_obj=scriptworker_log, formatter=AuditLogFormatter(fmt=chain.context.c...
Build and verify the chain of trust. Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on failure
codesearchnet
def call(self, inputs): del inputs latent_code = ed.MultivariateNormalDiag(loc=tf.zeros(self.latent_size), sample_shape=1, name='latent_code') state = self.lstm.zero_state(1, dtype=tf.float32) t = 0 productions = [] stack = [self.grammar.start_symbol] while stack: symbol = stack.pop(...
Runs the model forward to generate a sequence of productions. Args: inputs: Unused. Returns: productions: Tensor of shape [1, num_productions, num_production_rules]. Slices along the `num_productions` dimension represent one-hot vectors.
codesearchnet
def _Open(self, path_spec=None, mode='rb'): if ((not self._file_object_set_in_init) and (not path_spec)): raise ValueError('Missing path specification.') if (not self._file_object_set_in_init): if (not path_spec.HasParent()): raise errors.PathSpecError('Unsupported path specification...
Opens the file-like object. Args: path_spec (Optional[PathSpec]): path specification. mode (Optional[str]): file access mode. Raises: AccessError: if the access to open the file was denied. IOError: if the file-like object could not be opened. OSError: if the file-like object could not be opened. PathSpecError: if th...
codesearchnet
def RestrictFeedItemToGeoTarget(client, feed_item, location_id): feed_item_target_service = client.GetService( 'FeedItemTargetService', version='v201809') criterion_target = { 'xsi_type': 'FeedItemCriterionTarget', 'feedId': feed_item['feedId'], 'feedItemId': feed_item['feedItem...
Restrict a feed item to a geo target location. Args: client: An AdWordsClient instance. feed_item: A FeedItem. location_id: The Id of the location to restrict to.
juraj-google-style
def SetHeaders(self, soap_headers, http_headers): self.suds_client.set_options(soapheaders=soap_headers, headers=http_headers)
Set the headers for the underlying client. Args: soap_headers: A SOAP element for the SOAP headers. http_headers: A dictionary for the http headers.
codesearchnet
def get_or_generate_vocabulary(data_dir, tmp_dir, data_prefix, max_page_size_exp, approx_vocab_size=32768, strip=True): num_pages_for_vocab_generation = approx_...
Get or generate the vocabulary. Args: data_dir: a string tmp_dir: a string data_prefix: a string max_page_size_exp: an integer approx_vocab_size: an integer strip: a boolean Returns: a TextEncoder
juraj-google-style
def _PrintProcessingTime(self, processing_status): if (not processing_status): processing_time = '00:00:00' else: processing_time = (time.time() - processing_status.start_time) time_struct = time.gmtime(processing_time) processing_time = time.strftime('%H:%M:%S', time_struct) ...
Prints the processing time. Args: processing_status (ProcessingStatus): processing status.
codesearchnet
def get_config(self): data = self.data if type(self.data).__module__ == np.__name__: data = self.data.tolist() try: json_data = json.dumps(data) except TypeError as e: raise TypeError(f'Data not JSON Serializable: {data}') from e targets = self.targets if type(self.target...
Returns the TimeseriesGenerator configuration as Python dictionary. Returns: A Python dictionary with the TimeseriesGenerator configuration.
github-repos
def sort_imports(file: str, check_only: bool=True): with open(file, encoding='utf-8') as f: code = f.read() if '_import_structure' not in code or 'define_import_structure' in code: return main_blocks = split_code_in_indented_blocks(code, start_prompt='_import_structure = {', end_prompt='if T...
Sort the imports defined in the `_import_structure` of a given init. Args: file (`str`): The path to the init to check/fix. check_only (`bool`, *optional*, defaults to `True`): Whether or not to just check (and not auto-fix) the init.
github-repos
def cast_to_seq(obj, alphabet=IUPAC.extended_protein): if isinstance(obj, Seq): return obj if isinstance(obj, SeqRecord): return obj.seq if isinstance(obj, str): obj = obj.upper() return Seq(obj, alphabet) else: raise ValueError('Must provide a string, Seq, or Seq...
Return a Seq representation of a string or SeqRecord object. Args: obj (str, Seq, SeqRecord): Sequence string or Biopython SeqRecord object alphabet: See Biopython SeqRecord docs Returns: Seq: Seq representation of the sequence
codesearchnet
def get_files(self, retrieve=False): if self.exists and hasattr(self.rdf.triples, 'pcdm') and hasattr(self.rdf.triples.pcdm, 'hasFile'): files = [ self.repo.parse_uri(uri) for uri in self.rdf.triples.pcdm.hasFile ] return files else: return []
get pcdm:hasFile for this resource Args: retrieve (bool): if True, issue .refresh() on resource thereby confirming existence and retrieving payload
juraj-google-style
def add(self, email): if (email not in self._collaborators): self._collaborators[email] = ShareRequestValue.Add self._dirty = True
Add a collaborator. Args: str : Collaborator email address.
codesearchnet
def get_cases(variant_source, case_lines=None, case_type='ped', variant_type='snv', variant_mode='vcf'): individuals = get_individuals( variant_source=variant_source, case_lines=case_lines, case_type=case_type, variant_mode=variant_mode ...
Create a cases and populate it with individuals Args: variant_source (str): Path to vcf files case_lines (Iterable): Ped like lines case_type (str): Format of case lines Returns: case_objs (list(puzzle.models.Case))
juraj-google-style
def slot(self): if (self.type == EventType.TOUCH_FRAME): raise AttributeError(_wrong_prop.format(self.type)) return self._libinput.libinput_event_touch_get_slot(self._handle)
The slot of this touch event. See the kernel's multitouch protocol B documentation for more information. If the touch event has no assigned slot, for example if it is from a single touch device, this property returns -1. For events not of type :attr:`~libinput.constant.EventType.TOUCH_DOWN`, :attr:`~libinput.constan...
codesearchnet
def check_imports(filename: Union[str, os.PathLike]) -> list[str]: imports = get_imports(filename) missing_packages = [] for imp in imports: try: importlib.import_module(imp) except ImportError as exception: logger.warning(f'Encountered exception while importing {imp}...
Check if the current Python environment contains all the libraries that are imported in a file. Will raise if a library is missing. Args: filename (`str` or `os.PathLike`): The module file to check. Returns: `list[str]`: The list of relative imports in the file.
github-repos
def persist_project(project): from benchbuild.utils.schema import Project, Session session = Session() projects = session.query(Project) \ .filter(Project.name == project.name) \ .filter(Project.group_name == project.group) name = project.name desc = project.__doc__ domain ...
Persist this project in the benchbuild database. Args: project: The project we want to persist.
juraj-google-style
def cancel(self, identifier: typing.Any, exc_type: typing.Optional[type]=None) -> bool: raise NotImplementedError()
Cancel an active coroutine and remove it from the schedule. Args: identifier (typing.Any): The identifier returned from add. exc_type (typing.Optional[type]): The exception type to throw into the coroutine on cancel. No exception is thrown if nothing is given. Instead the coroutine is no longer processed. Returns: bo...
codesearchnet
def _get_recursive_dependancies(self, dependencies_map, sourcepath, recursive=True): collected = set([]) collected.update(dependencies_map.get(sourcepath, [])) sequence = collected.copy() walkthrough = [] if recursive: while True: if (not sequence): break ...
Return all dependencies of a source, recursively searching through its dependencies. This is a common method used by ``children`` and ``parents`` methods. Args: dependencies_map (dict): Internal buffer (internal buffers ``_CHILDREN_MAP`` or ``_PARENTS_MAP``) to use for searching. sourcepath (str): Source file path to...
codesearchnet
def _find_relation(self, span_doc: doc, r: List) -> Dict: rule = r[1][0] span_pivot = 0 relation = {} for e_id, element in enumerate(rule): if not span_doc[span_pivot:]: for extra_id, _, in enumerate(rule[e_id:]): relation[e_id+ex...
Get the relations between the each pattern in the spacy rule and the matches Args: span_doc: doc r: List Returns: Dict
juraj-google-style
def eig(tensor, name=None): if tensor.dtype == dtypes.float32 or tensor.dtype == dtypes.complex64: out_dtype = dtypes.complex64 elif tensor.dtype == dtypes.float64 or tensor.dtype == dtypes.complex128: out_dtype = dtypes.complex128 e, v = gen_linalg_ops.eig(tensor, Tout=out_dtype, compute_v=...
Computes the eigen decomposition of a batch of matrices. The eigenvalues and eigenvectors for a non-Hermitian matrix in general are complex. The eigenvectors are not guaranteed to be linearly independent. Computes the eigenvalues and right eigenvectors of the innermost N-by-N matrices in `tensor` such that `tensor[.....
github-repos
def has_datastore(self): (success, result) = self._read_from_hdx('datastore', self.data['id'], 'resource_id', self.actions()['datastore_search']) if (not success): logger.debug(result) elif result: return True return False
Check if the resource has a datastore. Returns: bool: Whether the resource has a datastore or not
codesearchnet
def _find_uninitialized(self): return set((name for (name, prop) in self._properties.iteritems() if (not prop._is_initialized(self))))
Internal helper to find uninitialized properties. Returns: A set of property names.
codesearchnet
def _genBgTerm_fromXX(self, vTot, vCommon, XX, a=None, c=None): vSpecific = (vTot - vCommon) SP.random.seed(0) if (c == None): c = SP.randn(self.P) XX += (0.001 * SP.eye(XX.shape[0])) L = LA.cholesky(XX, lower=True) R = self.genWeights(self.N, self.P) A = self.genTraitEffect() if...
generate background term from SNPs Args: vTot: variance of Yc+Yi vCommon: variance of Yc XX: kinship matrix a: common scales, it can be set for debugging purposes c: indipendent scales, it can be set for debugging purposes
codesearchnet
def get_data_xlsx(file_name, file_contents=None, on_demand=False): return get_data_xls(file_name, file_contents=file_contents, on_demand=on_demand)
Loads the new excel format files. Old format files will automatically get loaded as well. Args: file_name: The name of the local file, or the holder for the extension type when the file_contents are supplied. file_contents: The file-like object holding contents of file_name. If left as None, then file_name is directly...
codesearchnet
def age(self): date = datetime.today().date() b = self.birthday if b: return int(((date - b).days / 365)) return None
Returns a user's age, based on their birthday. Returns: integer
codesearchnet
def send_location(self, room_id, geo_uri, name, thumb_url=None, thumb_info=None, timestamp=None): content_pack = {'geo_uri': geo_uri, 'msgtype': 'm.location', 'body': name} if thumb_url: content_pack['thumbnail_url'] = thumb_url if thumb_info: content_pack['thumbnail_info'] = thumb_info ...
Send m.location message event Args: room_id (str): The room ID to send the event in. geo_uri (str): The geo uri representing the location. name (str): Description for the location. thumb_url (str): URL to the thumbnail of the location. thumb_info (dict): Metadata about the thumbnail, type ImageInfo. timestamp (int): S...
codesearchnet
def find_and_replace_channel_refs(self, text): match = True pattern = re.compile('< while match: match = pattern.search(text) if match: text = text.replace(match.group(0), (' return text
Find occurrences of Slack channel referenfces and attempts to replace them with just channel names. Args: text (string): The message text Returns: string: The message text with channel references replaced.
codesearchnet
def read(self, size=None): data = self.rfile.read(size) self.bytes_read += len(data) self._check_length() return data
Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified.
juraj-google-style
def _load_schema(file_path, name=None): if name is None: name = os.path.splitext(os.path.basename(file_path))[0] if name not in _SCHEMAS: with open(file_path, 'r') as schema_file: _SCHEMAS[name] = json.load(schema_file) return _SCHEMAS[name]
Loads the QObj schema for use in future validations. Caches schema in _SCHEMAS module attribute. Args: file_path(str): Path to schema. name(str): Given name for schema. Defaults to file_path filename without schema. Return: schema(dict): Loaded schema.
juraj-google-style
def __call__(self, utterances_batch: List[str], history_batch: List[List[str]], states_batch: Optional[list] = None) -> Tuple[List[str], List[float]]: responses, confidences = self.model(utterances_batch) if isinstance(confidences[0], list): confidences = ...
It returns the skill inference result. Output is batches of the skill inference results and estimated confidences. Args: utterances_batch: A batch of utterances. history_batch: A batch of list typed histories for each utterance. states_batch: Optional. A batch of arbitrary typed states for each utterance. Returns: B...
juraj-google-style
def write(self, data): ctx = context.get() if len(data) != 2: logging.error("Got bad tuple of length %d (2-tuple expected): %s", len(data), data) try: key = str(data[0]) value = str(data[1]) except TypeError: logging.error("Expecting a tuple, but got %s:...
Write data. Args: data: actual data yielded from handler. Type is writer-specific.
juraj-google-style
def Send(self, message): if not isinstance(message, common_pb2.Message): raise ValueError("Send requires a fleetspeak.Message") if message.destination.service_name == "system": raise ValueError( "Only predefined messages can have destination.service_name == \"system\"") return s...
Send a message through Fleetspeak. Args: message: A message protocol buffer. Returns: Size of the message in bytes. Raises: ValueError: If message is not a common_pb2.Message.
juraj-google-style
def _safe_setattr(obj, name, value): okey = id(obj) if okey in _set_failures or okey in _final_objs: return False import inspect try: if inspect.ismethod(obj): setattr(obj.__func__, name, value) return True else: if isinstance(obj, di...
Safely sets the attribute of the specified object. This includes not setting attributes for final objects and setting __func__ for instancemethod typed objects. Args: obj: object to set an attribute for. name (str): new attribute name. value: new attribute value. Returns: bool: True if the set attribute was successfu...
juraj-google-style
def retrieve_token(self, token): headers = self.client._get_private_headers() endpoint = '/tokens/{}'.format(token) return self.client._get((self.client.URL_BASE + endpoint), headers=headers)
Retrieve Token details for a specific Token. Args: token: The identifier of the token. Returns:
codesearchnet
def iter_packages(self, name, range_=None, paths=None): for package in iter_packages(name, range_, paths): if (not self.excludes(package)): (yield package)
Same as iter_packages in packages.py, but also applies this filter. Args: name (str): Name of the package, eg 'maya'. range_ (VersionRange or str): If provided, limits the versions returned to those in `range_`. paths (list of str, optional): paths to search for packages, defaults to `config.packages_path`. Returns: ...
codesearchnet
def __init__(self, iterable=None, modify_time=None, update_time=None): if self.__class__ is Map: raise TypeError('Map is an abstract class.') self._data = {} self._index = [] self._last_modification_timestamp = modify_time self._last_update_timestamp = update_time self.log = logging.getL...
Construct a Map object. Args: iterable: A tuple or list that can be iterated over and added to the Map, defaults to None. modify_time: An optional modify time for this Map, defaults to None. defaults to None. update_time: An optional update time for this Map, defaults to None. defaults to None. Raises: TypeError: If ...
github-repos
def _process_datum(self, data, input_reader, ctx, transient_shard_state): if (data is not input_readers.ALLOW_CHECKPOINT): self.slice_context.incr(context.COUNTER_MAPPER_CALLS) handler = transient_shard_state.handler if isinstance(handler, map_job.Mapper): handler(self.slice_cont...
Process a single data piece. Call mapper handler on the data. Args: data: a datum to process. input_reader: input reader. ctx: mapreduce context transient_shard_state: transient shard state. Returns: True if scan should be continued, False if scan should be stopped.
codesearchnet
def check(self, solution): return self.func(*(solution[v] for v in self.variables))
Check that a solution satisfies the constraint. Args: solution (container): An assignment for the variables in the constraint. Returns: bool: True if the solution satisfies the constraint; otherwise False. Examples: This example creates a constraint that :math:`a \\ne b` on binary variables and tests it for two cand...
codesearchnet
def _examples_from_path_handler(self, request): examples_count = int(request.args.get('max_examples')) examples_path = request.args.get('examples_path') sampling_odds = float(request.args.get('sampling_odds')) self.example_class = (tf.train.SequenceExample if request.args.get('sequence_exam...
Returns JSON of the specified examples. Args: request: A request that should contain 'examples_path' and 'max_examples'. Returns: JSON of up to max_examlpes of the examples in the path.
juraj-google-style
def __resource_descriptor(self, resource_path, methods): descriptor = {} method_map = {} sub_resource_index = collections.defaultdict(list) sub_resource_map = {} resource_path_tokens = resource_path.split('.') for (service, protorpc_meth_info) in methods: method_info = getattr(protorpc_m...
Describes a resource. Args: resource_path: string, the path of the resource (e.g., 'entries.items') methods: list of tuples of type (endpoints.Service, protorpc.remote._RemoteMethodInfo), the methods that serve this resource. Returns: Dictionary describing the resource.
codesearchnet
def _prepare_4d_causal_attention_mask_with_cache_position(attention_mask: torch.Tensor, sequence_length: int, target_length: int, dtype: torch.dtype, cache_position: torch.Tensor, batch_size: int, **kwargs): if attention_mask is not None and attention_mask.dim() == 4: causal_mask = attention_mask else: ...
Creates a causal 4D mask of shape `(batch_size, 1, query_length, key_value_length)` from a 2D mask of shape `(batch_size, key_value_length)`, or if the input `attention_mask` is already 4D, do nothing. Args: attention_mask (`torch.Tensor`): A 2D attention mask of shape `(batch_size, key_value_length)` or a 4D attentio...
github-repos
def load_validator(schema_path, schema): if (os.name == 'nt'): file_prefix = 'file: else: file_prefix = 'file:' resolver = RefResolver((file_prefix + schema_path.replace('\\', '/')), schema) validator = Draft4Validator(schema, resolver=resolver) return validator
Create a JSON schema validator for the given schema. Args: schema_path: The filename of the JSON schema. schema: A Python object representation of the same schema. Returns: An instance of Draft4Validator.
codesearchnet
def unset(entity, *types): if not types: types = (TypedField,) fields = list(entity._fields.keys()) remove = (x for x in fields if isinstance(x, types)) for field in remove: del entity._fields[field]
Unset the TypedFields on the input `entity`. Args: entity: A mixbox.Entity object. *types: A variable-length list of TypedField subclasses. If not provided, defaults to TypedField.
juraj-google-style
def _os_release_info(self): if os.path.isfile(self.os_release_file): with open(self.os_release_file) as release_file: return self._parse_os_release_content(release_file) return {}
Get the information items from the specified os-release file. Returns: A dictionary containing all information items.
codesearchnet
def to_dict(self): return {'name': self.name, 'id': self.id, 'type': self.type, 'workflow_id': self.workflow_id, 'queue': self.queue, 'start_time': self.start_time, 'arguments': self.arguments, 'acknowledged': self.acknowledged, 'func_name': self.func_name, 'hostname': self.hostname, 'worker_name': self.worker_name...
Return a dictionary of the job stats. Returns: dict: Dictionary of the stats.
codesearchnet
def op_list_to_dict(op_list, convert_variable_to_tensor=True): if not isinstance(op_list, (list, tuple, set)): raise TypeError(f'Variables to save should be passed in a dict or a list. Got {op_list}') op_list = nest.flatten(list(op_list)) op_list = sorted(op_list, key=lambda x: x.name) names_to_...
Create a dictionary of names to operation lists. This method is only used when the variable name matters (e.g. when saving or restoring from a TF1 name-based checkpoint). In TF2, this can be called from `tf.train.Checkpoint.restore` when loading from a name-based checkpoint. Args: op_list: A (nested) list, tuple, or ...
github-repos
def filter_embeddings(embeddings, vocab, dim): if not isinstance(embeddings, dict): return _embeddings = np.zeros([len(vocab), dim]) for word in vocab: if word in embeddings: word_idx = vocab[word] _embeddings[word_idx] = embeddings[word] return _embeddings
Loads word vectors in numpy array. Args: embeddings (dict): a dictionary of numpy array. vocab (dict): word_index lookup table. Returns: numpy array: an array of word embeddings.
juraj-google-style
def astype(array, y): if isinstance(y, autograd.core.Node): return array.astype(numpy.array(y.value).dtype) return array.astype(numpy.array(y).dtype)
A functional form of the `astype` method. Args: array: The array or number to cast. y: An array or number, as the input, whose type should be that of array. Returns: An array or number with the same dtype as `y`.
juraj-google-style
def create_token_type_ids_from_sequences(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: sep = [self.sep_token_id] cls = [self.cls_token_id] if token_ids_1 is None: return len(cls + token_ids_0 + sep) * [0] return len(cls + token_ids_0 + sep + sep + token_ids_1...
Create a mask from the two sequences passed to be used in a sequence-pair classification task. XLM-RoBERTa does not make use of token type ids, therefore a list of zeros is returned. Args: token_ids_0 (`List[int]`): List of IDs. token_ids_1 (`List[int]`, *optional*): Optional second list of IDs for sequence pairs. Re...
github-repos
def log_flush_for_interval(self, log_type, interval): if (not log_type): log_type = 'policies' interval = interval.replace(' ', '+') flush_url = '{}/{}/interval/{}'.format(self.url, log_type, interval) self.jss.delete(flush_url)
Flush logs for an interval of time. Args: log_type (str): Only documented type is "policies". This will be applied by default if nothing is passed. interval (str): Combination of "Zero", "One", "Two", "Three", "Six", and "Day", "Week", "Month", "Year". e.g. ("Three+Months") Please note: The documentation for this spec...
codesearchnet
def set_config(self, key, value): keyname = ('config:' + key) self.kvstore.set(keyname, value)
Set a persistent config key to a value, stored in the registry Args: key (string): The key name value (string): The key value
codesearchnet
def __init__(self, ascii_codepage='cp1252', registry_file_reader=None): super(WinRegistry, self).__init__() self._ascii_codepage = ascii_codepage self._registry_file_reader = registry_file_reader self._registry_files = {} self._user_registry_files = {}
Initializes the Windows Registry. Args: ascii_codepage (Optional[str]): ASCII string codepage. registry_file_reader (Optional[WinRegistryFileReader]): Windows Registry file reader.
juraj-google-style
def get_padding_value(padding=None, kernel_size=7, stride=1, dilation=1) -> Tuple[Tuple, bool]: dynamic = False if padding is None: padding = (stride - 1 + dilation * (kernel_size - 1)) return (padding, dynamic) if isinstance(padding, str): padding = padding.lower() if paddi...
Utility function to get the tuple padding value given the kernel_size and padding. Args: padding (Union[`str`, `int`], *optional*): Padding value, can be either `"same"`, `"valid"`. If a different value is provided the default padding from PyTorch is used. kernel_size (`int`, *optional*, defaults to 7): Kernel size of...
github-repos
def imshow_bboxes(img, bboxes, colors='green', top_k=(- 1), thickness=1, show=True, win_name='', wait_time=0, out_file=None): img = imread(img) if isinstance(bboxes, np.ndarray): bboxes = [bboxes] if (not isinstance(colors, list)): colors = [colors for _ in range(len(bboxes))] colors = [...
Draw bboxes on an image. Args: img (str or ndarray): The image to be displayed. bboxes (list or ndarray): A list of ndarray of shape (k, 4). colors (list[str or tuple or Color]): A list of colors. top_k (int): Plot the first k bboxes only if set positive. thickness (int): Thickness of lines. show (bool): Whether to sh...
codesearchnet
def _copy(src, dst, src_is_storage, dst_is_storage): if (src_is_storage and dst_is_storage): system_src = get_instance(src) system_dst = get_instance(dst) if (system_src is system_dst): if (system_src.relpath(src) == system_dst.relpath(dst)): raise same_file_error...
Copies file from source to destination Args: src (str or file-like object): Source file. dst (str or file-like object): Destination file. src_is_storage (bool): Source is storage. dst_is_storage (bool): Destination is storage.
codesearchnet
def _probe_services(self, handle): code = 10240 def event_filter_func(event): if ((event.command_class == 4) and (event.command == 2)): (event_handle,) = unpack('B', event.payload[0:1]) return (event_handle == handle) return False def end_filter_func(event): ...
Probe for all primary services and characteristics in those services Args: handle (int): the connection handle to probe
codesearchnet
def is_valid_callsign(self, callsign, timestamp=timestamp_now): try: if self.get_all(callsign, timestamp): return True except KeyError: return False
Checks if a callsign is valid Args: callsign (str): Amateur Radio callsign timestamp (datetime, optional): datetime in UTC (tzinfo=pytz.UTC) Returns: bool: True / False Example: The following checks if "DH1TW" is a valid callsign >>> from pyhamtools import LookupLib, Callinfo >>> my_lookuplib = LookupLib(lookuptype...
codesearchnet
def __init__(self, port=None, max_length=ControllerMaxLen.OFPCML_NO_BUFFER): super().__init__(action_type=ActionType.OFPAT_OUTPUT, length=16) self.port = port self.max_length = max_length
Create a ActionOutput with the optional parameters below. Args: port (:class:`Port` or :class:`int`): Output port. max_length (int): Max length to send to controller.
juraj-google-style
def stack(self, trees: Iterable[Tree[Array['*s']]]) -> Tree[Array['n_trees *s']]: return self.backend.map(_stack, *trees)
Stack a tree of `Iterable[Array]`. Supports `jax`, `tf`, `np`. Example: ```python etree.stack([ {'a': np.array([1])}, {'a': np.array([2])}, {'a': np.array([3])}, ]) == { 'a': np.array([[1], [2], [3]]) } ``` Args: trees: The list of tree to stack Returns: Tree of arrays.
github-repos
def insert_top(self, node): if (not isinstance(node, grammar.STATEMENTS)): raise ValueError self.to_insert_top.append(node)
Insert statements at the top of the function body. Note that multiple calls to `insert_top` will result in the statements being prepended in that order; this is different behavior from `prepend`. Args: node: The statement to prepend. Raises: ValueError: If the given node is not a statement.
codesearchnet
def get_dirty_items(item_list, flag_list): assert (len(item_list) == len(flag_list)) dirty_items = [item for (item, flag) in zip(item_list, flag_list) if (not flag)] return dirty_items
Returns each item in item_list where not flag in flag_list Args: item_list (list): flag_list (list): Returns: dirty_items
codesearchnet
def complain(distribution_name): try: pkg_resources.get_distribution(distribution_name) warnings.warn('The {pkg} distribution is now obsolete. Please `pip uninstall {pkg}`. In the future, this warning will become an ImportError.'.format(pkg=distribution_name), DeprecationWarning) except pkg_reso...
Issue a warning if `distribution_name` is installed. In a future release, this method will be updated to raise ImportError rather than just send a warning. Args: distribution_name (str): The name of the obsolete distribution.
codesearchnet
def load_panel_app(adapter, panel_id=None, institute='cust000'): base_url = 'https: hgnc_map = adapter.genes_by_alias() if panel_id: panel_ids = [panel_id] if not panel_id: LOG.info("Fetching all panel app panels") data = get_request(base_url.format('list...
Load PanelApp panels into scout database If no panel_id load all PanelApp panels Args: adapter(scout.adapter.MongoAdapter) panel_id(str): The panel app panel id
juraj-google-style
def create_redis_client(redis_address, password=None): redis_ip_address, redis_port = redis_address.split(":") return redis.StrictRedis( host=redis_ip_address, port=int(redis_port), password=password)
Create a Redis client. Args: The IP address, port, and password of the Redis server. Returns: A Redis client.
juraj-google-style
def gaussian_pdf(std=10.0, mean=0.0): norm_const = 1.0 def pdf(x): return norm_const*np.exp(-0.5 * ((x-mean)/std)**2) * \ np.sin(np.pi/180.0 * x) norm_dev = quad(pdf, 0.0, 180.0)[0] norm_const /= norm_dev return pdf
Gaussian PDF for orientation averaging. Args: std: The standard deviation in degrees of the Gaussian PDF mean: The mean in degrees of the Gaussian PDF. This should be a number in the interval [0, 180) Returns: pdf(x), a function that returns the value of the spherical Jacobian- normalized Gaussian PDF with the given...
juraj-google-style
def setup_service(api_name, api_version, credentials=None): if (not credentials): credentials = oauth2client.client.GoogleCredentials.get_application_default() return apiclient.discovery.build(api_name, api_version, credentials=credentials)
Configures genomics API client. Args: api_name: Name of the Google API (for example: "genomics") api_version: Version of the API (for example: "v2alpha1") credentials: Credentials to be used for the gcloud API calls. Returns: A configured Google Genomics API client with appropriate credentials.
codesearchnet
def add_institute(self, institute_obj): internal_id = institute_obj['internal_id'] display_name = institute_obj['internal_id'] if self.institute(institute_id=internal_id): raise IntegrityError('Institute {0} already exists in database'.format(display_name)) LOG.info('Adding institute with intern...
Add a institute to the database Args: institute_obj(Institute)
codesearchnet
def reset(self, *args): self.resource = self.resource.reset(list(args)) return self
Resets any of the tokens for this Application. Note that you may have to reauthenticate afterwards. Usage: application.reset('api_token') application.reset('api_token', 'totp_secret') Args: *args (list of str): one or more of ['api_token', 'subscription_token', 'totp_secret'] Returns: The Application.
codesearchnet
def sg_log(tensor, opt): r return tf.log(tensor + tf.sg_eps, name=opt.name)
r"""Log transform a dense tensor See `tf.log()` in tensorflow. Args: tensor: A `Tensor` ( automatically given by chain ) opt: name: If provided, replace current tensor's name. Returns: A `Tensor`.
juraj-google-style
def convert_to_experiment_list(experiments): exp_list = experiments if (experiments is None): exp_list = [] elif isinstance(experiments, Experiment): exp_list = [experiments] elif (type(experiments) is dict): exp_list = [Experiment.from_json(name, spec) for (name, spec) in experi...
Produces a list of Experiment objects. Converts input from dict, single experiment, or list of experiments to list of experiments. If input is None, will return an empty list. Arguments: experiments (Experiment | list | dict): Experiments to run. Returns: List of experiments.
codesearchnet
def translate_file(estimator, subtokenizer, input_file, output_file=None, print_all_translations=True): batch_size = _DECODE_BATCH_SIZE (sorted_inputs, sorted_keys) = _get_sorted_inputs(input_file) num_decode_batches = (((len(sorted_inputs) - 1) def input_generator(): 'Yield encoded strings fr...
Translate lines in file, and save to output file if specified. Args: estimator: tf.Estimator used to generate the translations. subtokenizer: Subtokenizer object for encoding and decoding source and translated lines. input_file: file containing lines to translate output_file: file that stores the generated translation...
codesearchnet
def AddNEP5Token(self, token): if (token.ScriptHash.ToBytes() in self._tokens.keys()): logger.error('Token already in wallet') return self._tokens[token.ScriptHash.ToBytes()] = token
Add a NEP-5 compliant token to the wallet. Args: token (NEP5Token): an instance of type neo.Wallets.NEP5Token. Note: Prints a warning to the console if the token already exists in the wallet.
codesearchnet
def list_pop(list_, i, opts): assert isinstance(opts, ListPopOpts) if isinstance(list_, tensor_array_ops.TensorArray): raise ValueError('TensorArray does not support item removal') elif tensor_util.is_tf_type(list_): if list_.dtype == dtypes.variant: return _tf_tensor_list_pop(li...
The list pop function. Note: it is unspecified where list_ will be mutated or not. If list_ is a TensorFlow entity, it will not be typically mutated. If list_ is a plain list, it will be. In general, if the list is mutated then the return value should point to the original entity. Args: list_: An entity that supports...
github-repos
def device_type_from_string(cl_device_type_str): cl_device_type_str = cl_device_type_str.upper() if hasattr(cl.device_type, cl_device_type_str): return getattr(cl.device_type, cl_device_type_str) return None
Converts values like ``gpu`` to a pyopencl device type string. Supported values are: ``accelerator``, ``cpu``, ``custom``, ``gpu``. If ``all`` is given, None is returned. Args: cl_device_type_str (str): The string we want to convert to a device type. Returns: cl.device_type: the pyopencl device type.
juraj-google-style
def list_experiments(self, collection_name): exp = ExperimentResource(name='', collection_name=collection_name, coord_frame='foo') return self._list_resource(exp)
List all experiments that belong to a collection. Args: collection_name (string): Name of the parent collection. Returns: (list) Raises: requests.HTTPError on failure.
codesearchnet
def bofh_excuse(how_many=1): excuse_path = os.path.join(os.path.dirname(__file__), 'bofh_excuses.json') with open(excuse_path, 'r') as _f: excuse_dict = json.load(_f) return [generate_random_string(excuse_dict) for _ in range(int(how_many))]
Generate random BOFH themed technical excuses! Args: how_many: Number of excuses to generate. (Default: 1) Returns: A list of BOFH excuses.
codesearchnet
def get_rng(obj=None): seed = (((id(obj) + os.getpid()) + int(datetime.now().strftime('%Y%m%d%H%M%S%f'))) % 4294967295) if (_RNG_SEED is not None): seed = _RNG_SEED return np.random.RandomState(seed)
Get a good RNG seeded with time, pid and the object. Args: obj: some object to use to generate random seed. Returns: np.random.RandomState: the RNG.
codesearchnet
def Create(self, request, global_params=None): config = self.GetMethodConfig('Create') return self._RunMethod(config, request, global_params=global_params)
Create an association between a GCP project and a GitHub Enterprise server. Args: request: (CloudbuildProjectsGithubEnterpriseConfigsCreateRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (Operation) The response message.
github-repos
def FindClonedClients(token=None): index = client_index.CreateClientIndex(token=token) clients = index.LookupClients(['.']) hw_infos = _GetHWInfos(clients, token=token) clients_with_multiple_serials = [client_id for (client_id, serials) in iteritems(hw_infos) if (len(serials) > 1)] client_list = aff...
A script to find multiple machines reporting the same client_id. This script looks at the hardware serial numbers that a client reported in over time (they get collected with each regular interrogate). We have seen that sometimes those serial numbers change - for example when a disk is put in a new machine - so report...
codesearchnet
def build_from_token_counts(self, token_counts, min_count, num_iterations=4, reserved_tokens=None, max_subtoken_length=None): if (reserved_tokens is None): reserved_tokens = RESERVED_TOKENS else: for (default, proposed) in zip(RESERVED_TOKENS, reserved_tokens): if (default != propose...
Train a SubwordTextEncoder based on a dictionary of word counts. Args: token_counts: a dictionary of Unicode strings to int. min_count: an integer - discard subtokens with lower counts. num_iterations: an integer. how many iterations of refinement. reserved_tokens: List of reserved tokens. The global variable `RESERV...
codesearchnet
def read_from_file(path, file_type='text', exception=ScriptWorkerException): FILE_TYPE_MAP = {'text': 'r', 'binary': 'rb'} if (file_type not in FILE_TYPE_MAP): raise exception('Unknown file_type {} not in {}!'.format(file_type, FILE_TYPE_MAP)) try: with open(path, FILE_TYPE_MAP[file_type]) a...
Read from ``path``. Small helper function to read from ``file``. Args: path (str): the path to read from. file_type (str, optional): the type of file. Currently accepts ``text`` or ``binary``. Defaults to ``text``. exception (Exception, optional): the exception to raise if unable to read from the file. Defaults to `...
codesearchnet
def write_compacted(g): d_nodes = {} d_edges = {} def conv(value): if isinstance(value, basestring): return value.strip('"') else: return value for node in g.nodes(): label = None attrs = [] for (k, v) in sorted(g.node_attributes(node)): ...
Write a graph in our own compacted format. Returns: str.
codesearchnet
def validate_with_tags(self, tags, confidence): result = {'intent_type': self.name} intent_confidence = 0.0 local_tags = tags[:] used_tags = [] for (require_type, attribute_name) in self.requires: (required_tag, canonical_form, confidence) = find_first_tag(local_tags, require_type) i...
Validate weather tags has required entites for this intent to fire Args: tags(list): Tags and Entities used for validation confidence(float): ? Returns: intent, tags: Returns intent and tags used by the intent on falure to meat required entities then returns intent with confidence of 0.0 and an empty list for tags.
codesearchnet
def _ParseFileData(self, knowledge_base, file_object): line_reader = line_reader_file.BinaryLineReader(file_object) try: reader = line_reader_file.BinaryDSVReader(line_reader, b':') except csv.Error as exception: raise errors.PreProcessFail( 'Unable to read: {0:s} with error: {1!...
Parses file content (data) for user account preprocessing attributes. Args: knowledge_base (KnowledgeBase): to fill with preprocessing information. file_object (dfvfs.FileIO): file-like object that contains the artifact value data. Raises: errors.PreProcessFail: if the preprocessing fails.
juraj-google-style
def expandvars(text, environ=None): if '$' not in text: return text i = 0 if environ is None: environ = os.environ while True: m = ENV_VAR_REGEX.search(text, i) if not m: break i, j = m.span(0) name = m.group(1) if name.startswit...
Expand shell variables of form $var and ${var}. Unknown variables are left unchanged. Args: text (str): String to expand. environ (dict): Environ dict to use for expansions, defaults to os.environ. Returns: The expanded string.
juraj-google-style
def __init__(self, logger, script_type, default_shell=None): self.logger = logger self.script_type = script_type self.default_shell = default_shell or '/bin/bash'
Constructor. Args: logger: logger object, used to write to SysLog and serial port. script_type: string, the type of the script we are running. default_shell: string, the default shell to execute the script.
juraj-google-style
def addgroup(name, group): if six.PY2: name = _to_unicode(name) group = _to_unicode(group) name = _cmd_quote(name) group = _cmd_quote(group).lstrip('\'').rstrip('\'') user = info(name) if not user: return False if group in user['groups']: return True c...
Add user to a group Args: name (str): The user name to add to the group group (str): The name of the group to which to add the user Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' user.addgroup jsnuffy 'Power Users'
juraj-google-style
def ListClients(self, request, timeout=None): return self._RetryLoop((lambda t: self._stub.ListClients(request, timeout=t)))
Provides basic information about Fleetspeak clients. Args: request: fleetspeak.admin.ListClientsRequest timeout: How many seconds to try for. Returns: fleetspeak.admin.ListClientsResponse
codesearchnet
def _CalculateHashesFileEntry( self, file_system, file_entry, parent_full_path, output_writer): full_path = file_system.JoinPath([parent_full_path, file_entry.name]) for data_stream in file_entry.data_streams: hash_value = self._CalculateHashDataStream(file_entry, data_stream.nam...
Recursive calculates hashes starting with the file entry. Args: file_system (dfvfs.FileSystem): file system. file_entry (dfvfs.FileEntry): file entry. parent_full_path (str): full path of the parent file entry. output_writer (StdoutWriter): output writer.
juraj-google-style
def return_handler( self, call_node, function_nodes, saved_function_call_index, first_node ): if any(isinstance(node, YieldNode) for node in function_nodes): rhs_prefix = 'yld_' elif any(isinstance(node, ConnectToExitNode) for...
Handle the return from a function during a function call. Args: call_node(ast.Call) : The node that calls the definition. function_nodes(list[Node]): List of nodes of the function being called. saved_function_call_index(int): Unique number for each call. first_node(EntryOrExitNode or RestoreNode): Used to connect prev...
juraj-google-style
def ExamineEvent(self, mediator, event): self._EnsureRequesterStarted() path_spec = event.pathspec event_identifiers = self._event_identifiers_by_pathspec[path_spec] event_identifier = event.GetIdentifier() event_identifiers.append(event_identifier) if event.data_type not in self.DATA_TY...
Evaluates whether an event contains the right data for a hash lookup. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. event (EventObject): event.
juraj-google-style
def _Fail(self, msg): raise TruthAssertionError(msg)
Fail unconditionally. Args: msg: string to include in the exception. Raises: TruthAssertionError: always, by design.
github-repos
def serialize_to_transport(self, encoding='utf-8', xslt_url=None): assert (encoding in ('utf-8', 'UTF-8')) dataone_exception_pyxb = self.get_pyxb() return d1_common.xml.serialize_for_transport(dataone_exception_pyxb, xslt_url=xslt_url)
Serialize to XML ``bytes`` with prolog. Args: encoding: str Encoding to use for XML doc bytes xslt_url: str If specified, add a processing instruction to the XML doc that specifies the download location for an XSLT stylesheet. Returns: bytes: XML holding a DataONEError based type.
codesearchnet
def close(self): if self.reuse: logger.debug("Ipcontroller not shutting down: reuse enabled") return if self.mode == "manual": logger.debug("Ipcontroller not shutting down: Manual mode") return try: pgid = os.getpgid(self.pro...
Terminate the controller process and its child processes. Args: - None
juraj-google-style
def call(self, inputs: List[Any], global_state: pg.geno.AttributeDict, step: int=0) -> List[Any]: raise NotImplementedError()
Subclasses should override this method. The `global_state` and `step` are optional for the subclasses' call signature. Args: inputs: A list of values as inputs. global_state: An `AttributeDict` object as the global state container, which is readable/writable during the operation. step: Number of examples historically...
github-repos
def play_human(env): try: play(env, fps=env.metadata['video.frames_per_second']) except KeyboardInterrupt: pass env.close()
Play the environment using keyboard as a human. Args: env (gym.Env): the initialized gym environment to play Returns: None
juraj-google-style
def delete(filething): t = OggSpeex(filething) filething.fileobj.seek(0) t.delete(filething)
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
juraj-google-style