code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def labels(self): if (not self.__labels): self.__labels = Labels(self.__connection) return self.__labels
Gets the Labels API client. Returns: Labels:
codesearchnet
def predict(fqdn, result, *argl, **argd): out = None if len(argl) > 0: machine = argl[0] if isclassifier(machine): out = classify_predict(fqdn, result, None, *argl, **argd) elif isregressor(machine): out = regress_predict(fqdn, result, None, *argl, ...
Analyzes the result of a generic predict operation performed by `sklearn`. Args: fqdn (str): full-qualified name of the method that was called. result: result of calling the method with `fqdn`. argl (tuple): positional arguments passed to the method call. argd (dict): keyword arguments passed to the method call.
juraj-google-style
def reqHeadTimeStamp( self, contract: Contract, whatToShow: str, useRTH: bool, formatDate: int = 1) -> datetime.datetime: return self._run( self.reqHeadTimeStampAsync( contract, whatToShow, useRTH, formatDate))
Get the datetime of earliest available historical data for the contract. Args: contract: Contract of interest. useRTH: If True then only show data from within Regular Trading Hours, if False then show all data. formatDate: If set to 2 then the result is returned as a timezone-aware datetime.datetime with UTC timezone.
juraj-google-style
def get_studies_by_regions(dataset, masks, threshold=0.08, remove_overlap=True, studies=None, features=None, regularization='scale'): import nibabel as nib import os try: loaded_masks = [nib.load(os.path.relpath(m)) for m in masks] except OSError: print('Error loading masks. Check the pa...
Set up data for a classification task given a set of masks Given a set of masks, this function retrieves studies associated with each mask at the specified threshold, optionally removes overlap and filters by studies and features, and returns studies by feature matrix (X) and class labels (y) Args: dataset: a Neurosy...
codesearchnet
def _create_dummy_input(func_graph, template_tensor): with func_graph.as_default(): return array_ops.placeholder(template_tensor.dtype, shape=template_tensor.shape)
Creates tensors in func_graph to represent template_tensors. Args: func_graph: FuncGraph. template_tensor: a tensor in the outer graph. Returns: A tensor in func_graph.
github-repos
def transform(self, input_df): _df = input_df.copy(deep=False) for column in self.cat_columns: if column not in _df: raise RuntimeError('Required column {:s} not found'.format(column)) if _df[column].dtype =...
Convert the dataframe to a matrix (numpy ndarray) Args: input_df (dataframe): The dataframe to convert
juraj-google-style
def get_sanger_unevaluated(store, institute_id, user_id): sanger_ordered_by_case = store.sanger_ordered(institute_id, user_id) unevaluated = [] for item in sanger_ordered_by_case: case_id = item['_id'] case_obj = store.case(case_id=case_id) if (not case_obj): continue ...
Get all variants for an institute having Sanger validations ordered but still not evaluated Args: store(scout.adapter.MongoAdapter) institute_id(str) Returns: unevaluated: a list that looks like this: [ {'case1': [varID_1, varID_2, .., varID_n]}, {'case2' : [varID_1, varID_2, .., varID_n]} ], where the keys are case_...
codesearchnet
def GetSecurityDescriptor(self): fwnt_security_descriptor = pyfwnt.security_descriptor() fwnt_security_descriptor.copy_from_byte_stream(self._fsntfs_file_entry.security_descriptor_data) return fwnt_security_descriptor
Retrieves the security descriptor. Returns: pyfwnt.security_descriptor: security descriptor.
codesearchnet
def get_request_data(self, path, action, body=None): body = (body or '') (path_name, path_spec) = self.get_path_spec(path) response = {} if ((path_spec is not None) and (action in path_spec.keys())): for status_code in path_spec[action]['responses'].keys(): resp = path_spec[action]['...
Get the default data and status code of the given path + action request. Args: path: path of the request. action: action of the request(get, post, delete...) body: body sent, used to sent it back for post request. Returns: A tuple with the default response data and status code In case of default status_code, use 0
codesearchnet
def list_vmss_sub(access_token, subscription_id): endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/providers/Microsoft.Compute/virtualMachineScaleSets', '?api-version=', COMP_API]) return do_get_next(endpoint, access_token)
List VM Scale Sets in a subscription. Args: access_token (str): A valid Azure authentication token. subscription_id (str): Azure subscription id. Returns: HTTP response. JSON body of VM scale sets.
codesearchnet
def add_applicator(self, table, cols, function): if table not in self.relations: raise ItsdbError('Cannot add applicator; table "{}" is not ' 'defined by the relations file.' .format(table)) if cols is None: rais...
Add an applicator. When reading *table*, rows in *table* will be modified by apply_rows(). Args: table: The table to apply the function to. cols: The columns in *table* to apply the function on. function: The applicator function.
juraj-google-style
def _get_help_for_modules(self, modules, prefix, include_special_flags): output_lines = [] for module in modules: self._render_our_module_flags(module, output_lines, prefix) if include_special_flags: self._render_module_flags('absl.flags', six.itervalues(_helpers.SPECIAL_FLAGS._flags()), out...
Returns the help string for a list of modules. Private to absl.flags package. Args: modules: List[str], a list of modules to get the help string for. prefix: str, a string that is prepended to each generated help line. include_special_flags: bool, whether to include description of SPECIAL_FLAGS, i.e. --flagfile and -...
codesearchnet
def time_estimate(self, duration, **kwargs): path = ('%s/%s/time_estimate' % (self.manager.path, self.get_id())) data = {'duration': duration} return self.manager.gitlab.http_post(path, post_data=data, **kwargs)
Set an estimated time of work for the object. Args: duration (str): Duration in human format (e.g. 3h30) **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabTimeTrackingError: If the time tracking update cannot be done
codesearchnet
def load_metascenario(self, scenario_list): for scenario in scenario_list: name = scenario.get('name') if (name is None): raise DataError('Scenario in scenario list is missing a name parameter', scenario=scenario) tile_address = scenario.get('tile') args = scenario.get('a...
Load one or more scenarios from a list. Each entry in scenario_list should be a dict containing at least a name key and an optional tile key and args key. If tile is present and its value is not None, the scenario specified will be loaded into the given tile only. Otherwise it will be loaded into the entire device. ...
codesearchnet
def save(self, savefile): with open(str(savefile), 'wb') as f: self.write_to_fp(f) log.debug('Saved to %s', savefile)
Do the TTS API request and write result to file. Args: savefile (string): The path and file name to save the ``mp3`` to. Raises: :class:`gTTSError`: When there's an error with the API request.
codesearchnet
def is_subset(self, other): if isinstance(other, _basebag): for elem, count in self.counts(): if not count <= other.count(elem): return False else: for elem in self: if self.count(elem) > 1 or elem not in other: return False return True
Check that every element in self has a count <= in other. Args: other (Set)
juraj-google-style
def setCTRatio(self, new_ct, password="00000000"): ret = False self.setContext("setCTRatio") try: self.clearCmdMsg() if ((new_ct != CTRatio.Amps_100) and (new_ct != CTRatio.Amps_200) and (new_ct != CTRatio.Amps_400) and (new_ct != CTRatio.Amps...
Serial call to set CT ratio for attached inductive pickup. Args: new_ct (int): A :class:`~ekmmeters.CTRatio` value, a legal amperage setting. password (str): Optional password. Returns: bool: True on completion with ACK.
juraj-google-style
def _force_edge_active_move(self, state: _STATE) -> _STATE: seqs, edges = state unused_edges = edges.copy() for seq in seqs: for i in range(1, len(seq)): unused_edges.remove(self._normalize_edge((seq[i - 1], seq[i]))) edge = self._choose_ra...
Move which forces a random edge to appear on some sequence. This move chooses random edge from the edges which do not belong to any sequence and modifies state in such a way, that this chosen edge appears on some sequence of the search state. Args: state: Search state, not mutated. Returns: New search state with one...
juraj-google-style
def disconnect_sync(self, connection_handle): self.bable.disconnect(connection_handle=connection_handle, sync=True)
Synchronously disconnect from whoever has connected to us Args: connection_handle (int): The handle of the connection we wish to disconnect.
codesearchnet
def update_work_as_completed(self, worker_id, work_id, other_values=None, error=None): client = self._datastore_client try: with client.transaction() as transaction: work_key = client.key(KIND_WORK_TYPE, self._work_type_entity_id, KIND_WORK, work_id) work_entity = client.get(work...
Updates work piece in datastore as completed. Args: worker_id: ID of the worker which did the work work_id: ID of the work which was done other_values: dictionary with additonal values which should be saved with the work piece error: if not None then error occurred during computation of the work piece. In such case wo...
codesearchnet
def check_required_tags_compliance(self, resource): missing_tags = [] notes = [] resource_tags = {tag.key.lower(): tag.value for tag in resource.tags} if (resource.resource_type in self.alert_schedule): target_accounts = self.alert_schedule[resource.resource_type]['scope'] else: targ...
Check whether a resource is compliance Args: resource: A single resource Returns: `(list, list)` A tuple contains missing tags (if there were any) and notes
codesearchnet
def call_replica_local_fn(fn, *args, **kwargs): strategy = None if 'strategy' in kwargs: strategy = kwargs.pop('strategy') elif distribute_lib.has_strategy(): strategy = distribute_lib.get_strategy() is_tpu = backend.is_tpu_strategy(strategy) if not is_tpu and strategy and distribute...
Call a function that uses replica-local variables. This function correctly handles calling `fn` in a cross-replica context. Args: fn: The function to call. *args: Positional arguments to the `fn`. **kwargs: Keyword argument to `fn`. Returns: The result of calling `fn`.
github-repos
def query(self, coords, order=1): out = np.full(len(coords.l.deg), np.nan, dtype='f4') for pole in self.poles: m = (coords.b.deg >= 0) if pole == 'ngp' else (coords.b.deg < 0) if np.any(m): data, w = self._data[pole] x, y = w.wcs_world2p...
Returns the map value at the specified location(s) on the sky. Args: coords (`astropy.coordinates.SkyCoord`): The coordinates to query. order (Optional[int]): Interpolation order to use. Defaults to `1`, for linear interpolation. Returns: A float array containing the map value at every input coordinate. The shape of ...
juraj-google-style
def put(self, key, value): value = self.serializedValue(value) self.child_datastore.put(key, value)
Stores the object `value` named by `key`. Serializes values on the way in, and stores the serialized data into the ``child_datastore``. Args: key: Key naming `value` value: the object to store.
juraj-google-style
async def send(self, metric): message = json.dumps(metric).encode('utf-8') (await self.loop.create_datagram_endpoint((lambda : UDPClientProtocol(message)), remote_addr=(self.ip, self.port)))
Transform metric to JSON bytestring and send to server. Args: metric (dict): Complete metric to send as JSON.
codesearchnet
def execute(api): try: return api.execute() except Exception as exception: now = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f') _print_error('%s: Exception %s: %s' % (now, type(exception).__name__, str(exception))) raise exception
Executes operation. Args: api: The base API object Returns: A response body object
juraj-google-style
def lf_summary(L, Y=None, lf_names=None, est_accs=None): n, m = L.shape if lf_names is not None: col_names = ["j"] d = {"j": list(range(m))} else: lf_names = list(range(m)) col_names = [] d = {} col_names.extend(["Polarity", "Coverage", "Overlaps", "Con...
Returns a pandas DataFrame with the various per-LF statistics. Args: L: an n x m scipy.sparse matrix where L_{i,j} is the label given by the jth LF to the ith candidate Y: an [n] or [n, 1] np.ndarray of gold labels. If provided, the empirical accuracy for each LF will be calculated
juraj-google-style
def operator(name=None, operators=None, aliases=None, kind=None): def delegator(assertion, subject, expected, *args, **kw): return assertion.test(subject, expected, *args, **kw) def decorator(fn): operator = Operator(fn=fn, aliases=aliases, kind=kind) _name = name if isinstance(nam...
Registers a new operator function in the test engine. Arguments: *args: variadic arguments. **kw: variadic keyword arguments. Returns: function
juraj-google-style
def _copy_fn(fn): if not callable(fn): raise TypeError('fn is not callable: %s' % fn) return types.FunctionType(code=fn.__code__, globals=fn.__globals__, name=fn.__name__, argdefs=fn.__defaults__, closure=fn.__closure__)
Create a deep copy of fn. Args: fn: a callable Returns: A `FunctionType`: a deep copy of fn. Raises: TypeError: if `fn` is not a callable.
github-repos
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0): local_stream = BytearrayStream() if (self._device_serial_number is not None): self._device_serial_number.write(local_stream, kmip_version=kmip_version) if (self._password is not None): self._password.write(local_stream,...
Write the data encoding the DeviceCredential struct to a stream. Args: output_stream (stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. Optional,...
codesearchnet
def convert(isbn, code='978'): isbn = _isbn_cleanse(isbn) if (len(isbn) == 10): isbn = (code + isbn[:(- 1)]) return (isbn + calculate_checksum(isbn)) elif isbn.startswith('978'): return (isbn[3:(- 1)] + calculate_checksum(isbn[3:(- 1)])) else: raise IsbnError('Only ISBN-1...
Convert ISBNs between ISBN-10 and ISBN-13. Note: No attempt to hyphenate converted ISBNs is made, because the specification requires that *any* hyphenation must be correct but allows ISBNs without hyphenation. Args: isbn (str): SBN, ISBN-10 or ISBN-13 code (str): EAN Bookland code Returns: ``str``: Converted ISBN-10...
codesearchnet
def _GetFieldByName(message_descriptor, field_name): try: return message_descriptor.fields_by_name[field_name] except KeyError: raise ValueError('Protocol message %s has no "%s" field.' % (message_descriptor.name, field_name))
Returns a field descriptor by field name. Args: message_descriptor: A Descriptor describing all fields in message. field_name: The name of the field to retrieve. Returns: The field descriptor associated with the field name.
juraj-google-style
def write_input(self, output_dir=".", make_dir_if_not_present=True): if make_dir_if_not_present and not os.path.exists(output_dir): os.makedirs(output_dir) feff = self.all_input() feff_input = "\n\n".join(str(feff[k]) for k in ["HEADER", "P...
Writes a set of FEFF input to a directory. Args: output_dir: Directory to output the FEFF input files make_dir_if_not_present: Set to True if you want the directory ( and the whole path) to be created if it is not present.
juraj-google-style
def propose(self): candidates = self._get_candidates() if (candidates is None): return None predictions = self.predict(candidates) idx = self._acquire(predictions) return candidates[idx]
Use the trained model to propose a new pipeline. Returns: int: Index corresponding to pipeline to try in ``dpp_matrix``.
codesearchnet
def stack_and_pad_tensors(batch, padding_index=DEFAULT_PADDING_INDEX, dim=0): lengths = [tensor.shape[0] for tensor in batch] max_len = max(lengths) padded = [pad_tensor(tensor, max_len, padding_index) for tensor in batch] lengths = torch.tensor(lengths) padded = torch.stack(padded, dim=dim).co...
Pad a :class:`list` of ``tensors`` (``batch``) with ``padding_index``. Args: batch (:class:`list` of :class:`torch.Tensor`): Batch of tensors to pad. padding_index (int, optional): Index to pad tensors with. dim (int, optional): Dimension on to which to concatenate the batch of tensors. Returns torch.Tensor, torch.Te...
juraj-google-style
def make_pixel_mask(image: np.ndarray, output_size: Tuple[int, int], input_data_format: Optional[Union[str, ChannelDimension]]=None) -> np.ndarray: input_height, input_width = get_image_size(image, channel_dim=input_data_format) mask = np.zeros(output_size, dtype=np.int64) mask[:input_height, :input_width] ...
Make a pixel mask for the image, where 1 indicates a valid pixel and 0 indicates padding. Args: image (`np.ndarray`): Image to make the pixel mask for. output_size (`Tuple[int, int]`): Output size of the mask.
github-repos
def get_all(cls, include_disabled=True): if (cls == BaseAccount): raise InquisitorError('get_all on BaseAccount is not supported') account_type_id = db.AccountType.find_one(account_type=cls.account_type).account_type_id qry = db.Account.order_by(desc(Account.enabled), Account.account_type_id, Accoun...
Returns a list of all accounts of a given type Args: include_disabled (`bool`): Include disabled accounts. Default: `True` Returns: list of account objects
codesearchnet
def get_mealy_conjecture(self): mma = MealyMachine() for s in self.observation_table.sm_vector: for i in self.alphabet: dst = self.observation_table.equiv_classes[s + i] if dst is None: logging.debug('Conjecture at...
Utilize the observation table to construct a Mealy Machine. The library used for representing the Mealy Machine is the python bindings of the openFST library (pyFST). Args: None Returns: MealyMachine: A mealy machine build based on a closed and consistent observation table.
juraj-google-style
def _central_crop(image, crop_height, crop_width): shape = tf.shape(image) height, width = shape[0], shape[1] mlperf_log.resnet_print(key=mlperf_log.INPUT_CENTRAL_CROP, value=[crop_height, crop_width]) amount_to_be_cropped_h = (height - crop_height) crop_top = amount_to_be_cropp...
Performs central crops of the given image list. Args: image: a 3-D image tensor crop_height: the height of the image following the crop. crop_width: the width of the image following the crop. Returns: 3-D tensor with cropped image.
juraj-google-style
def _request(self, path, key, data, method, key_is_cik, extra_headers={}): if (method == 'GET'): if (len(data) > 0): url = ((path + '?') + data) else: url = path body = None else: url = path body = data headers = {} if key_is_cik: h...
Generically shared HTTP request method. Args: path: The API endpoint to interact with. key: A string for the key used by the device for the API. Either a CIK or token. data: A string for the pre-encoded data to be sent with this request. method: A string denoting the HTTP verb to use for the request (e.g. 'GET', 'POS...
codesearchnet
def to_representation(self, instance): if self.id_only(): return instance.pk pk = getattr(instance, 'pk', None) if not settings.ENABLE_SERIALIZER_OBJECT_CACHE or pk is None: return self._to_representation(instance) else: if pk not in self.obj...
Modified to_representation method. Optionally may cache objects. Arguments: instance: A model instance or data object. Returns: Instance ID if the serializer is meant to represent its ID. Otherwise, a tagged data dict representation.
juraj-google-style
def ssh_reachable(self, tries=None, propagate_fail=True): if not self.running(): return False try: ssh.get_ssh_client( ip_addr=self.ip(), host_name=self.name(), ssh_tries=tries, propagate_fail=propagate_fai...
Check if the VM is reachable with ssh Args: tries(int): Number of tries to try connecting to the host propagate_fail(bool): If set to true, this event will appear in the log and fail the outter stage. Otherwise, it will be discarded. Returns: bool: True if the VM is reachable.
juraj-google-style
def from_celery(cls, name, worker_dict, queues): return WorkerStats( name=name, broker=BrokerStats.from_celery(worker_dict['broker']), pid=worker_dict['pid'], process_pids=worker_dict['pool']['processes'], concurrency=worker_dict['pool']['max-...
Create a WorkerStats object from the dictionary returned by celery. Args: name (str): The name of the worker. worker_dict (dict): The dictionary as returned by celery. queues (list): A list of QueueStats objects that represent the queues this worker is listening on. Returns: WorkerStats: A fully initialized WorkerSta...
juraj-google-style
def nodes_on_wire(self, wire, only_ops=False): current_node = self.input_map.get(wire, None) if not current_node: raise DAGCircuitError('The given wire %s is not present in the circuit' % str(wire)) more_nodes = True while more_nod...
Iterator for nodes that affect a given wire Args: wire (tuple(Register, index)): the wire to be looked at. only_ops (bool): True if only the ops nodes are wanted otherwise all nodes are returned. Yield: DAGNode: the successive ops on the given wire Raises: DAGCircuitError: if the given wire doesn't exist in the DAG
juraj-google-style
def update_batch(self, loss_per_instance): if self.batch_indices is None: raise TensorForceError("Need to call get_batch before each update_batch call.") for index, loss in zip(self.batch_indices, loss_per_instance): new_priority = (np.abs...
Computes priorities according to loss. Args: loss_per_instance:
juraj-google-style
def MakePartialStat(self, fd): is_dir = "Container" in fd.behaviours return { "pathspec": fd.Get(fd.Schema.PATHSPEC, ""), "st_atime": fd.Get(fd.Schema.LAST, 0), "st_blksize": 0, "st_blocks": 0, "st_ctime": 0, "st_dev": 0, "st_gid": 0, "st_in...
Try and give a 'stat' for something not in the data store. Args: fd: The object with no stat. Returns: A dictionary corresponding to what we'll say the 'stat' is for objects which are not actually files, so have no OS level stat.
juraj-google-style
def _compute_linear_scaling_rope_parameters(config: Optional[PretrainedConfig]=None, device: Optional['torch.device']=None, seq_len: Optional[int]=None, **rope_kwargs) -> tuple['torch.Tensor', float]: if config is not None and len(rope_kwargs) > 0: raise ValueError(f'Unexpected arguments: `**rope_kwargs` an...
Computes the inverse frequencies with linear scaling. Credits to the Reddit user /u/kaiokendev Args: config ([`~transformers.PretrainedConfig`]): The model configuration. device (`torch.device`): The device to use for initialization of the inverse frequencies. seq_len (`int`, *optional*): The current sequence length. U...
github-repos
def _wait_all_creative_activation(self, feed_item, timeout=128): for association in feed_item['creative_assignment']: creative = self._creative_dao.get(association, required=True) self._wait_creative_activation(creative['id'], timeout)
Waits for activation of all creatives that should be associated to the feed item that represents an ad. Args: feed_item: Feed item representing an Ad from the Bulkdozer feed. timeout: Optional parameter identifying how long to wait for all creatives to be activated in seconds. Raises: Exception: In case one or more c...
github-repos
def __init__(self, vfs_object): super(ObjectsCacheValue, self).__init__() self._reference_count = 0 self.vfs_object = vfs_object
Initializes the resolver objects cache value object. Args: vfs_object (object): VFS object to cache.
juraj-google-style
def supported_language(lang): try: self.get_collection(lang=lang) return True except LanguageNotSupported as e: return False
Return True if polyglot supports the language. Args: lang (string): Language code.
juraj-google-style
def by_phone(self, phone, cc=None): header, content = self._http_request(self.BASE_URL, phone=phone, cc=cc) return json.loads(content)
Perform a Yelp Phone API Search based on phone number given. Args: phone - Phone number to search by cc - ISO 3166-1 alpha-2 country code. (Optional)
juraj-google-style
def get_dump_sizes_bytes(self, node_name, output_slot, debug_op, device_name=None): device_name = self._infer_device_name(device_name, node_name) watch_key = _get_tensor_watch_key(node_name, output_slot, debug_op) if watch_key not in self._watch_key_to_datum[device_name]: raise WatchKeyDoesNotExistI...
Get the sizes of the dump files for a debug-dumped tensor. Unit of the file size: byte. Args: node_name: (`str`) name of the node that the tensor is produced by. output_slot: (`int`) output slot index of tensor. debug_op: (`str`) name of the debug op. device_name: (`str`) name of the device. If there is only one devi...
github-repos
def lines_from_file(path, as_interned=False, encoding=None): lines = None with io.open(path, encoding=encoding) as f: if as_interned: lines = [sys.intern(line) for line in f.read().splitlines()] else: lines = f.read().splitlines() return lines
Create a list of file lines from a given filepath. Args: path (str): File path as_interned (bool): List of "interned" strings (default False) Returns: strings (list): File line list
codesearchnet
def traverse(self, index=0): if (index < len(self.nodes)): for entity in self.nodes[index]: for next_result in self.traverse(index=(index + 1)): if isinstance(entity, list): (yield (entity + next_result)) else: (yield ([enti...
This is used to produce a list of lists where each each item in that list is a diffrent combination of items from the lists within with every combination of such values. Args: index (int) : the index at witch to start the list. Note this is used only in the function as a processing Returns: list : is every combinatio...
codesearchnet
def molecule(lines): count_line = lines[3] num_atoms = int(count_line[0:3]) num_bonds = int(count_line[3:6]) compound = Compound() compound.graph._node = atoms(lines[4: num_atoms+4]) compound.graph._adj = bonds(lines[num_atoms+4: num_atoms+num_bonds+4], ...
Parse molfile part into molecule object Args: lines (list): lines of molfile part Raises: ValueError: Symbol not defined in periodictable.yaml (Polymer expression not supported yet)
juraj-google-style
def depth_december_average_ground_temperature(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `depth_de...
Corresponds to IDD Field `depth_december_average_ground_temperature` Args: value (float): value for IDD Field `depth_december_average_ground_temperature` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
juraj-google-style
def _checkSetpointValue( setpointvalue, maxvalue ): if maxvalue is None: raise TypeError('The maxvalue (for the setpoint) must not be None!') minimalmodbus._checkNumerical(setpointvalue, minvalue=0, maxvalue=maxvalue, description='setpoint value')
Check that the given setpointvalue is valid. Args: * setpointvalue (numerical): The setpoint value to be checked. Must be positive. * maxvalue (numerical): Upper limit for setpoint value. Must be positive. Raises: TypeError, ValueError
juraj-google-style
def _extract_all_responses(self, resources, api_endpoint, api_name): (all_responses, resources) = self._bulk_cache_lookup(api_name, resources) resource_chunks = self._prepare_resource_chunks(resources) response_chunks = self._request_reports('resource', resource_chunks, api_endpoint) self._extract_respo...
Aux function to extract all the API endpoint responses. Args: resources: list of string hashes. api_endpoint: endpoint path api_name: endpoint name Returns: A dict with the hash as key and the VT report as value.
codesearchnet
def add_individual(self, genotype): logger.debug('Adding genotype {0} to variant {1}'.format(genotype, self['variant_id'])) self['individuals'].append(genotype)
Add the information for a individual This adds a genotype dict to variant['individuals'] Args: genotype (dict): A genotype dictionary
codesearchnet
def conditionally_inline_policies(role_name, sr_entry): service_type = sr_entry['type'] if not (service_type in SERVICE_TYPE_ROLE and "policies" in sr_entry): print_if_verbose("not eligible for policies; service_type: {} is not valid for policies " "or no 'policies' key in service regist...
If 'policies' key lists the filename prefixes of policies to bind to the role, load them from the expected path and inline them onto the role Args: role_name: name of the role to attach the policies to sr_entry: service registry entry
juraj-google-style
def diff_levenshtein(self, diffs): levenshtein = 0 insertions = 0 deletions = 0 for (op, data) in diffs: if op == self.DIFF_INSERT: insertions += len(data) elif op == self.DIFF_DELETE: deletions += len(data) elif op == self.DIFF_EQUAL: levenshtein ...
Compute the Levenshtein distance; the number of inserted, deleted or substituted characters. Args: diffs: Array of diff tuples. Returns: Number of changes.
juraj-google-style
def ne(left: Any, right: Any) -> bool: return not eq(left, right)
Compares if two values are not equal. Use symbolic equality if possible. Example:: @pg.members([ ('x', pg.typing.Any()) ]) class A(pg.Object): def sym_eq(self, right): if super().sym_eq(right): return True return pg.eq(self.x, right) class B: pass assert pg.ne(1, 2) assert pg.ne(A(1), A(2)) # A has override `sym_eq...
github-repos
def add_module(self, module_name, module_ui): m_button = tk.Label(self.module_selection, text=module_name, bg='white', anchor='w') m_button.grid(column=0, row=len(self.module_selection.winfo_children()), padx=0, pady=0, sticky='W E N S') self.module_buttons[module_name] = m_button m_button.bind('<Button...
Adds a module to the list Args: module_name (str): The name of the module module_ui: The function to call to create the module's UI
codesearchnet
def export(self, top=True): out = [] if top: out.append(self._internal_name) out.append(self._to_str(self.comments_2)) return ",".join(out)
Exports object to its string representation. Args: top (bool): if True appends `internal_name` before values. All non list objects should be exported with value top=True, all list objects, that are embedded in as fields inlist objects should be exported with `top`=False Returns: str: The objects string representatio...
juraj-google-style
def convert_placeholder_to_const(input_graph_def, nodes_to_convert=None): input_node_map = {} for node in input_graph_def.node: if node.name not in input_node_map: input_node_map[node.name] = node else: raise ValueError('Duplicate node names detected for ', node.name) ...
Rename the PlaceHolderWithDefault node to constant In a frozen graph, PlaceholderWithDefault nodes can be converted to Constant op nodes with same value. This will help simplify the graph. Args: input_graph_def: A GraphDef containing a model. nodes_to_convert: A list of PlaceholderWithDefault or Placeholder nodes to ...
github-repos
def _ParseRecordExtraField(self, byte_stream, file_offset): extra_field_map = self._GetDataTypeMap('asl_record_extra_field') try: record_extra_field = self._ReadStructureFromByteStream(byte_stream, file_offset, extra_field_map) except (ValueError, errors.ParseError) as exception: raise error...
Parses a record extra field. Args: byte_stream (bytes): byte stream. file_offset (int): offset of the record extra field relative to the start of the file. Returns: asl_record_extra_field: record extra field. Raises: ParseError: if the record extra field cannot be parsed.
codesearchnet
def __init__( self, path: str, query_string: bytes, scheme: str, headers: CIMultiDict, subprotocols: List[str], receive: Callable, send: Callable, accept: Callable, ) -> None: super().__init_...
Create a request object. Arguments: path: The full unquoted path of the request. query_string: The raw bytes for the query string part. scheme: The scheme used for the request. headers: The request headers. subprotocols: The subprotocols requested. receive: Returns an awaitable of the current data accept: Idempotent ...
juraj-google-style
def popn(self, buffer_type, count): buffer_type = str(buffer_type) if buffer_type == u'streaming': chosen_buffer = self.streaming_data else: chosen_buffer = self.storage_data if count > len(chosen_buffer): raise StreamEmptyError("Not enough...
Remove and return the oldest count values from the named buffer Args: buffer_type (str): The buffer to pop from (either u"storage" or u"streaming") count (int): The number of readings to pop Returns: list(IOTileReading): The values popped from the buffer
juraj-google-style
def ValidateCertificateHostname(cert, hostname): hosts = GetValidHostsForCert(cert) boto.log.debug('validating server certificate: hostname=%s, certificate hosts=%s', hostname, hosts) for host in hosts: host_re = host.replace('.', '\\.').replace('*', '[^.]*') if re.search(('^%s$' % (host_re,...
Validates that a given hostname is valid for an SSL certificate. Args: cert: A dictionary representing an SSL certificate. hostname: The hostname to test. Returns: bool: Whether or not the hostname is valid for this certificate.
codesearchnet
def incr(self, key, value, noreply=False): key = self.check_key(key) cmd = (((b'incr ' + key) + b' ') + six.text_type(value).encode('ascii')) if noreply: cmd += b' noreply' cmd += b'\r\n' results = self._misc_cmd([cmd], b'incr', noreply) if noreply: return None if (results[0]...
The memcached "incr" command. Args: key: str, see class docs for details. value: int, the amount by which to increment the value. noreply: optional bool, False to wait for the reply (the default). Returns: If noreply is True, always returns None. Otherwise returns the new value of the key, or None if the key wasn't f...
codesearchnet
def index_all(self, index_name): oks = 0 notoks = 0 for ok, item in streaming_bulk( self.es_client, self._iter_documents(index_name) ): if ok: oks += 1 else: notoks += 1 logging.info( ...
Index all available documents, using streaming_bulk for speed Args: index_name (string): The index
juraj-google-style
def pad_image(self, image: np.ndarray, size: Dict[str, int], data_format: Optional[Union[str, ChannelDimension]]=None, input_data_format: Optional[Union[str, ChannelDimension]]=None) -> np.ndarray: output_height, output_width = (size['height'], size['width']) input_height, input_width = get_image_size(image, ch...
Pad the image to the specified size at the top, bottom, left and right. Args: image (`np.ndarray`): The image to be padded. size (`Dict[str, int]`): The size `{"height": h, "width": w}` to pad the image to. data_format (`str` or `ChannelDimension`, *optional*): The data format of the output image. If unset, the same f...
github-repos
def AsDict(self, dt=True): data = {} if self.body: data['body'] = self.body if self.posted_at: data['posted_at'] = self.posted_at if self.user: data['user'] = self.user.AsDict() return data
A dict representation of this Comment instance. The return value uses the same key names as the JSON representation. Args: dt (bool): If True, return dates as python datetime objects. If False, return dates as ISO strings. Return: A dict representing this Comment instance
juraj-google-style
def sanitize(vpc_config): if (vpc_config is None): return vpc_config elif (type(vpc_config) is not dict): raise ValueError('vpc_config is not a dict: {}'.format(vpc_config)) elif (not vpc_config): raise ValueError('vpc_config is empty') subnets = vpc_config.get(SUBNETS_KEY) i...
Checks that an instance of VpcConfig has the expected keys and values, removes unexpected keys, and raises ValueErrors if any expectations are violated Args: vpc_config (dict): a VpcConfig dict containing 'Subnets' and 'SecurityGroupIds' Returns: A valid VpcConfig dict containing only 'Subnets' and 'SecurityGroupIds'...
codesearchnet
def _get_mutation_to_unknown(self, node: cfg.CFGNode, values: list[_base.BaseValue]) -> list[function.Mutation]: mutations = [] for v in values: if isinstance(v, _instance_base.SimpleValue): for name in v.instance_type_parameters: if name in self._mutated_type_parameters: ...
Mutation for making all type parameters in a list of instances "unknown". This is used if we call a function that has mutable parameters and multiple signatures with unknown parameters. Args: node: The current CFG node. values: A list of instances of BaseValue. Returns: A list of function.Mutation instances.
github-repos
def get_general_case_info(adapter, institute_id=None, slice_query=None): general = {} name_query = slice_query cases = adapter.cases(owner=institute_id, name_query=name_query) phenotype_cases = 0 causative_cases = 0 pinned_cases = 0 cohort_cases = 0 pedigree = { 1: ...
Return general information about cases Args: adapter(adapter.MongoAdapter) institute_id(str) slice_query(str): Query to filter cases to obtain statistics for. Returns: general(dict)
juraj-google-style
def vcf_records(self, format_tags=None, qualified=False): if qualified: sample_names = self.qualified_sample_names else: sample_names = self.sample_names for line in self._file_reader.read_lines(): if line.startswith(" continue ...
Generates parsed VcfRecord objects. Typically called in a for loop to process each vcf record in a VcfReader. VcfReader must be opened in advanced and closed when complete. Skips all headers. Args: qualified: When True, sample names are prefixed with file name Returns: Parsed VcfRecord Raises: StopIteration: when r...
juraj-google-style
def export_node(self, n) -> Dict[(str, Union[(str, List[str])])]: node_dict = {'name': n[0], 'units': _get_units(n[0]), 'dtype': _get_dtype(n[0]), 'arguments': list(self.predecessors(n[0]))} if (not (n[1].get('indicators') is None)): for indicator in n[1]['indicators'].values(): if ('dataset...
Return dict suitable for exporting to JSON. Args: n: A dict representing the data in a networkx AnalysisGraph node. Returns: The node dict with additional fields for name, units, dtype, and arguments.
codesearchnet
def launch_simulation(self, parameter): return next(SimulationRunner.run_simulations(self, [parameter], self.data_folder))
Launch a single simulation, using SimulationRunner's facilities. This function is used by ParallelRunner's run_simulations to map simulation running over the parameter list. Args: parameter (dict): the parameter combination to simulate.
juraj-google-style
def is_frozen_graph(sess): for op in sess.graph.get_operations(): if op.type.startswith('Variable') or op.type.endswith('VariableOp'): return False return True
Determines if the graph is frozen. Determines if a graph has previously been frozen by checking for any operations of type Variable*. If variables are found, the graph is not frozen. Args: sess: TensorFlow Session. Returns: Bool.
github-repos
def _AssertAtLeast3DImage(image): return control_flow_ops.with_dependencies(_CheckAtLeast3DImage(image, require_static=False), image)
Assert that we are working with a properly shaped image. Performs the check statically if possible (i.e. if the shape is statically known). Otherwise adds a control dependency to an assert op that checks the dynamic shape. Args: image: >= 3-D Tensor of size [*, height, width, depth] Raises: ValueError: if image.shap...
github-repos
def compute_bleu(reference_corpus, translation_corpus, max_order=4, use_bp=True): reference_length = 0 translation_length = 0 bp = 1.0 geo_mean = 0 matches_by_order = [0] * max_order possible_matches_by_order = [0] * max_order precisions = [] for (references, translations) in zip(r...
Computes BLEU score of translated segments against one or more references. Args: reference_corpus: list of references for each translation. Each reference should be tokenized into a list of tokens. translation_corpus: list of translations to score. Each translation should be tokenized into a list of tokens. max_order:...
juraj-google-style
def _update_service_current_state(service: ServiceState): LOG.debug("Setting current state from target state for %s", service.id) service.update_current_state(service.target_state)
Update the current state of a service. Updates the current state of services after their target state has changed. Args: service (ServiceState): Service state object to update
juraj-google-style
def save(self, data: dict): with open(self.output_path, 'w') as f: json.dump(data, f)
Save the provided data object in a json file. Args: data (`dict`): The data to store.
github-repos
def init(self, basedir, config, sourcedir, targetdir, cwd='', commit=True): if (not basedir): basedir = '.' (abs_basedir, abs_config, abs_sourcedir, abs_targetdir) = self.expand(basedir, config, sourcedir, targetdir, cwd) self.valid_paths(abs_config, abs_sourcedir, abs_targetdir) if commit: ...
Init project structure and configuration from given arguments Args: basedir (string): Project base directory used to prepend relative paths. If empty or equal to '.', it will be filled with current directory path. config (string): Settings file path. sourcedir (string): Source directory path. targetdir (string): Compi...
codesearchnet
def _ParseFieldsMetadata(self, structure): fields = structure.fields.split(' ') log_line_structure = pyparsing.Empty() if fields[0] == 'date' and fields[1] == 'time': log_line_structure += self.DATE_TIME.setResultsName('date_time') fields = fields[2:] for member in fields: log_l...
Parses the fields metadata and updates the log line definition to match. Args: structure (pyparsing.ParseResults): structure parsed from the log file.
juraj-google-style
def _project_dict(self, **kwargs: Dict[str, Any]) -> Dict[str, Hist]: get_hist_args = copy.deepcopy(kwargs) projection_name_args = copy.deepcopy(kwargs) for key, input_observable in self.observable_to_project_from.items(): output_hist, projection_name, projection_na...
Driver function for projecting and storing a dictionary of observables. Args: kwargs (dict): Additional named args to be passed to projection_name(...) and output_key_name(...) Returns: The projected histograms. The projected histograms are also stored in ``output_observable``.
juraj-google-style
def LateBind(self, target=None): if (not issubclass(target, RDFProtoStruct)): raise TypeError(('Field %s expects a protobuf, but target is %s' % (self, target))) self.late_bound = False self.type = target self.owner.AddDescriptor(self)
Late binding callback. This method is called on this field descriptor when the target RDFValue class is finally defined. It gives the field descriptor an opportunity to initialize after the point of definition. Args: target: The target nested class. Raises: TypeError: If the target class is not of the expected type.
codesearchnet
def get_full_alias(self, query): if (query in self.alias_table.sections()): return query return next((section for section in self.alias_table.sections() if (section.split()[0] == query)), '')
Get the full alias given a search query. Args: query: The query this function performs searching on. Returns: The full alias (with the placeholders, if any).
codesearchnet
def delete_branch(profile, name): ref = ('heads/' + name) data = refs.delete_ref(profile, ref) return data
Delete a branch. Args: profile A profile generated from ``simplygithub.authentication.profile``. Such profiles tell this module (i) the ``repo`` to connect to, and (ii) the ``token`` to connect with. name The name of the branch to delete. Returns: The response of the DELETE request.
codesearchnet
def run(self, text): for regex in self.regexes: text = regex.sub(self.repl, text) return text
Run each regex substitution on ``text``. Args: text (string): the input text. Returns: string: text after all substitutions have been sequentially applied.
juraj-google-style
def get_all_artifacts_per_task_id(chain, upstream_artifacts): all_artifacts_per_task_id = {} for link in chain.links: if (link.task_type in PARENT_TASK_TYPES): add_enumerable_item_to_dict(dict_=all_artifacts_per_task_id, key=link.task_id, item='public/task-graph.json') if (link.task_...
Return every artifact to download, including the Chain Of Trust Artifacts. Args: chain (ChainOfTrust): the chain of trust object upstream_artifacts: the list of upstream artifact definitions Returns: dict: sorted list of paths to downloaded artifacts ordered by taskId
codesearchnet
def serialize_ndarray_b64(o): if o.flags['C_CONTIGUOUS']: o_data = o.data else: o_data = np.ascontiguousarray(o).data data_b64 = base64.b64encode(o_data) return dict( _type='np.ndarray', data=data_b64.decode('utf-8'), dtype=o.dtype, shape=o.shape)
Serializes a :obj:`numpy.ndarray` in a format where the datatype and shape are human-readable, but the array data itself is binary64 encoded. Args: o (:obj:`numpy.ndarray`): :obj:`ndarray` to be serialized. Returns: A dictionary that can be passed to :obj:`json.dumps`.
juraj-google-style
def rsub(self, other, axis="columns", level=None, fill_value=None): return self._binary_op( "rsub", other, axis=axis, level=level, fill_value=fill_value )
Subtract a DataFrame/Series/scalar from this DataFrame. Args: other: The object to use to apply the subtraction to this. axis: The axis to apply the subtraction over. level: Mutlilevel index level to subtract over. fill_value: The value to fill NaNs with. Returns: A new DataFrame with the subtraciont applied.
juraj-google-style
def get_tensor_mtf_dimension_names(self, tensor_name): tensor = self._name_to_tensor(tensor_name) if isinstance(tensor, mtf.Tensor): return tensor.shape.dimension_names else: return []
The Mesh TensorFlow dimensions associated with a tensor. Args: tensor_name: a string, name of a tensor in the graph. Returns: a [string], the names of Mesh TensorFlow dimensions.
juraj-google-style
def prune_unused_nodes(meta_graph, signature_def): graph = tf_v1.Graph() with graph.as_default(): tf_v1.train.import_meta_graph(meta_graph, input_map={}, import_scope="") used_node_names = set() for _, tensor_def in signature_def.outputs.items(): output_tensor = graph.get_tensor_by_n...
Function to prune unused ops given a signature def. This function does a graph traversal through from all outputs as defined in the signature_def to collect all used nodes. Then, any nodes which are unused can be discarded. This is useful for graph which are executing eagerly or on TPUs. Args: meta_graph: The input/o...
juraj-google-style
def __init__(self, dev_id, address, local_key=None, dev_type=None, connection_timeout=10): self.id = dev_id self.address = address self.local_key = local_key self.local_key = local_key.encode('latin1') self.dev_type = dev_type self.connection_timeout = connection...
Represents a Tuya device. Args: dev_id (str): The device id. address (str): The network address. local_key (str, optional): The encryption key. Defaults to None. dev_type (str, optional): The device type. It will be used as key for lookups in payload_dict. Defaults to None. Attributes: port (int): The port to connect...
juraj-google-style
def NormalizePath(path, sep='/'): if (not path): return sep path = SmartUnicode(path) path_list = path.split(sep) if (path_list[0] in ['.', '..', '']): path_list.pop(0) i = 0 while True: list_len = len(path_list) for i in range(i, len(path_list)): if (...
A sane implementation of os.path.normpath. The standard implementation treats leading / and // as different leading to incorrect normal forms. NOTE: Its ok to use a relative path here (without leading /) but any /../ will still be removed anchoring the path at the top level (e.g. foo/../../../../bar => bar). Args: p...
codesearchnet
def _get_structured_grad_output(outputs, grads, body_grad_graph): result = [] outputs_idx = 3 structured_outputs_idx = 3 for g in grads: if g is None: result.append(None) continue output = body_grad_graph.structured_outputs[structured_outputs_idx] structur...
Returns the values that should be returned from the while grad function. Args: outputs: the raw Tensor outputs of the grad While op. grads: the input gradients to the gradient function. body_grad_graph: _WhileBodyGradFuncGraph. Returns: A list of gradient values. May include Nones.
github-repos
def is_compatible_with(self, spec_or_tensor): return (self._dtype.is_compatible_with(spec_or_tensor.dtype) and self._shape.is_compatible_with(spec_or_tensor.shape))
Returns True if spec_or_tensor is compatible with this TensorSpec. Two tensors are considered compatible if they have the same dtype and their shapes are compatible (see `tf.TensorShape.is_compatible_with`). Args: spec_or_tensor: A tf.TensorSpec or a tf.Tensor Returns: True if spec_or_tensor is compatible with self.
juraj-google-style