code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def _credentials_found_in_envars(): return any([os.getenv('PAN_ACCESS_TOKEN'), os.getenv('PAN_CLIENT_ID'), os.getenv('PAN_CLIENT_SECRET'), os.getenv('PAN_REFRESH_TOKEN')])
Check for credentials in envars. Returns: bool: ``True`` if at least one is found, otherwise ``False``.
codesearchnet
def CheckMySQLConnection(db_options): for tries_left in range(_MYSQL_MAX_RETRIES, (- 1), (- 1)): try: connection_options = dict(host=db_options['Mysql.host'], port=db_options['Mysql.port'], db=db_options['Mysql.database_name'], user=db_options['Mysql.database_username'], passwd=db_options['Mysql.database_password'], charset='utf8') ssl_enabled = ('Mysql.client_key_path' in db_options) if ssl_enabled: connection_options['ssl'] = {'key': db_options['Mysql.client_key_path'], 'cert': db_options['Mysql.client_cert_path'], 'ca': db_options['Mysql.ca_cert_path']} connection = MySQLdb.connect(**connection_options) if ssl_enabled: cursor = connection.cursor() cursor.execute("SHOW VARIABLES LIKE 'have_ssl'") res = cursor.fetchone() if ((res[0] == 'have_ssl') and (res[1] == 'YES')): print('SSL enabled successfully.') else: print('Unable to establish SSL connection to MySQL.') return False return True except MySQLdb.OperationalError as mysql_op_error: if (len(mysql_op_error.args) < 2): print(('Unexpected exception type received from MySQL. %d attempts left: %s' % (tries_left, mysql_op_error))) time.sleep(_MYSQL_RETRY_WAIT_SECS) continue if (mysql_op_error.args[0] == mysql_conn_errors.CONNECTION_ERROR): print(('Failed to connect to MySQL. Is it running? %d attempts left.' % tries_left)) elif (mysql_op_error.args[0] == mysql_conn_errors.UNKNOWN_HOST): print('Unknown-hostname error encountered while trying to connect to MySQL.') return False elif (mysql_op_error.args[0] == general_mysql_errors.BAD_DB_ERROR): return True elif (mysql_op_error.args[0] in (general_mysql_errors.ACCESS_DENIED_ERROR, general_mysql_errors.DBACCESS_DENIED_ERROR)): print(('Permission error encountered while trying to connect to MySQL: %s' % mysql_op_error)) return False else: print(('Unexpected operational error encountered while trying to connect to MySQL. %d attempts left: %s' % (tries_left, mysql_op_error))) except MySQLdb.Error as mysql_error: print(('Unexpected error encountered while trying to connect to MySQL. %d attempts left: %s' % (tries_left, mysql_error))) time.sleep(_MYSQL_RETRY_WAIT_SECS) return False
Checks whether a connection can be established to MySQL. Args: db_options: A dict mapping GRR MySQL config options to their values. Returns: A boolean indicating whether a connection could be made to a MySQL server instance with the given options.
codesearchnet
def ReadDataAtOffset(self, file_offset, size): self._file_object.seek(file_offset, os.SEEK_SET) return self._file_object.read(size)
Reads a byte string from the file-like object at a specific offset. Args: file_offset (int): file offset. size (int): number of bytes to read. Returns: bytes: data read. Raises: IOError: if the read failed. OSError: if the read failed.
juraj-google-style
def _manual_repartition(self, axis, repartition_func, **kwargs): func = self._prepare_method(repartition_func, **kwargs) return self.data.manual_shuffle(axis, func)
This method applies all manual partitioning functions. Args: axis: The axis to shuffle data along. repartition_func: The function used to repartition data. Returns: A `BaseFrameManager` object.
codesearchnet
def __init__(self, solution_size, population_size=20): super(_RandomOptimizer, self).__init__(solution_size, population_size)
Create an object that optimizes a given fitness function with random strings. Args: solution_size: The number of bits in every solution. population_size: The number of solutions in every iteration.
juraj-google-style
def moments_of_masked_time_series(time_series_tensor, broadcast_mask): num_unmasked_entries = tf.cast(tf.reduce_sum(input_tensor=tf.cast((~ broadcast_mask), tf.int32), axis=(- 1)), time_series_tensor.dtype) mean = (tf.reduce_sum(input_tensor=tf.where(broadcast_mask, tf.zeros_like(time_series_tensor), time_series_tensor), axis=(- 1)) / num_unmasked_entries) variance = (tf.reduce_sum(input_tensor=tf.where(broadcast_mask, tf.zeros_like(time_series_tensor), ((time_series_tensor - mean[(..., tf.newaxis)]) ** 2)), axis=(- 1)) / num_unmasked_entries) return (mean, variance)
Compute mean and variance, accounting for a mask. Args: time_series_tensor: float `Tensor` time series of shape `concat([batch_shape, [num_timesteps]])`. broadcast_mask: bool `Tensor` of the same shape as `time_series`. Returns: mean: float `Tensor` of shape `batch_shape`. variance: float `Tensor` of shape `batch_shape`.
codesearchnet
def _create_split(last_client_key, next_client_key, query): if not (last_client_key or next_client_key): return query split_query = query.clone() filters = list(split_query.filters) if last_client_key: filters.append((KEY_PROPERTY_NAME, '>=', last_client_key)) if next_client_key: filters.append((KEY_PROPERTY_NAME, '<', next_client_key)) split_query.filters = filters return split_query
Create a new {@link Query} given the query and range. Args: last_client_key: the previous key. If null then assumed to be the beginning. next_client_key: the next key. If null then assumed to be the end. query: query to base the split query on. Returns: A split query with fetches entities in the range [last_key, next_client_key)
github-repos
def start_server(self): persists_shell_cmd = self._get_persisting_command() self.log.debug('Snippet server for package %s is using protocol %d.%d', self.package, _PROTOCOL_MAJOR_VERSION, _PROTOCOL_MINOR_VERSION) option_str = self._get_instrument_options_str() cmd = _LAUNCH_CMD.format(shell_cmd=persists_shell_cmd, user=self._get_user_command_string(), snippet_package=self.package, instrument_options=option_str) self._proc = self._run_adb_cmd(cmd) self._server_start_stdout = [] line = self._read_protocol_line() match = re.match('^SNIPPET START, PROTOCOL ([0-9]+) ([0-9]+)$', line) if not match or int(match.group(1)) != _PROTOCOL_MAJOR_VERSION: raise errors.ServerStartProtocolError(self._device, line) line = self._read_protocol_line() match = re.match('^SNIPPET SERVING, PORT ([0-9]+)$', line) if not match: message = _SNIPPET_SERVER_START_ERROR_DEBUG_TIP.format(instrumentation_result=line, server_start_stdout='\n'.join(self._server_start_stdout)) raise errors.ServerStartProtocolError(self._device, message) self.device_port = int(match.group(1))
Starts the server on the remote device. This function starts the snippet server with adb command, checks the protocol version of the server, parses device port from the server output and sets it to self.device_port. Raises: errors.ServerStartProtocolError: if the protocol reported by the server startup process is unknown. errors.ServerStartError: if failed to start the server or process the server output.
github-repos
def __init__(self, name, constants=None): if not constants: constants = ParsedName.constants if isinstance(name, HumanName): self._parsed_name = name else: self._parsed_name = HumanName(name, constants=constants) self._parsed_name.capitalize()
Create a ParsedName instance. Args: name (Union[str, HumanName]): The name to be parsed (must be non empty nor None). constants (:class:`nameparser.config.Constants`): Configuration for `HumanName` instantiation. (Can be None, if provided it overwrites the default one generated in :method:`prepare_nameparser_constants`.)
juraj-google-style
def sample(self, bqm, num_reads=10): values = tuple(bqm.vartype.value) def _itersample(): for __ in range(num_reads): sample = {v: choice(values) for v in bqm.linear} energy = bqm.energy(sample) (yield (sample, energy)) (samples, energies) = zip(*_itersample()) return SampleSet.from_samples(samples, bqm.vartype, energies)
Give random samples for a binary quadratic model. Variable assignments are chosen by coin flip. Args: bqm (:obj:`.BinaryQuadraticModel`): Binary quadratic model to be sampled from. num_reads (int, optional, default=10): Number of reads. Returns: :obj:`.SampleSet`
codesearchnet
def get_graph_token_from_msi(): if (('ACC_CLOUD' in os.environ) and ('MSI_ENDPOINT' in os.environ)): endpoint = os.environ['MSI_ENDPOINT'] else: return None headers = {'Metadata': 'true'} body = {'resource': (('https: ret = requests.post(endpoint, headers=headers, data=body) return ret.json()['access_token']
get a Microsoft Graph access token using Azure Cloud Shell's MSI_ENDPOINT. Notes: The auth token returned by this function is not an Azure auth token. Use it for querying the Microsoft Graph API. This function only works in an Azure cloud shell or virtual machine. Returns: A Microsoft Graph authentication token string.
codesearchnet
def empty(shape, dtype=None, **kwargs): data = np.empty(shape, dtype) return dc.array(data, **kwargs)
Create an array of given shape and type, without initializing entries. Args: shape (sequence of ints): 2D shape of the array. dtype (data-type, optional): Desired data-type for the array. kwargs (optional): Other arguments of the array (*coords, attrs, and name). Returns: array (decode.array): Decode array without initializing entries.
codesearchnet
def year_month_day_to_ordinal(year, month, day): with tf.compat.v1.name_scope(None, 'ymd2o', [year, month, day]): year = tf.convert_to_tensor(year, tf.int32, name='year') month = tf.convert_to_tensor(month, tf.int32, name='month') day = tf.convert_to_tensor(day, tf.int32, name='day') year -= tf.compat.v2.where(month <= 2, 1, 0) month += tf.compat.v2.where(month > 2, -3, 9) era = year year_of_era = year % _YEARS_IN_ERA day_of_year = _days_in_year_before_month(month) + day - 1 day_of_era = year_of_era * _DAYS_IN_YEAR + year_of_era return era * _DAYS_IN_ERA + day_of_era + _ORDINAL_OF_1_3_0000
Calculates ordinals Tensor given years, months and dates. Args: year: Tensor of int32 type. Elements should be positive. month: Tensor of int32 type of same shape as `year`. Elements should be in range `[1, 12]`. day: Tensor of int32 type of same shape as `year`. Elements should be in range `[1, 31]` and represent valid dates together with corresponding elements of `month` and `year` Tensors. Returns: Tensor of int32 type. Each element is number of days since 1 Jan 0001. 1 Jan 0001 has `ordinal = 1`.
github-repos
def get_sentence(self, offset: int) -> BioCSentence or None: for sentence in self.sentences: if sentence.offset == offset: return sentence return None
Gets sentence with specified offset Args: offset: sentence offset Return: the sentence with specified offset
juraj-google-style
def extract_string_pairs_in_dir(directory, exclude_dirs, special_ui_components_prefix): result = [] for ib_file_path in find_files(directory, [".xib", ".storyboard"], exclude_dirs): result += extract_string_pairs_in_ib_file(ib_file_path, special_ui_components_prefix) return result
Extract string pairs in the given directory's xib/storyboard files. Args: directory (str): The path to the directory. exclude_dirs (str): A list of directories to exclude from extraction. special_ui_components_prefix (str): If not None, extraction will not warn about internationalized UI components with this class prefix. Returns: list: The extracted string pairs for all IB files in the directory.
juraj-google-style
def remove(self, force=False): return self.client.api.remove_plugin(self.name, force=force)
Remove the plugin from the server. Args: force (bool): Remove even if the plugin is enabled. Default: False Raises: :py:class:`docker.errors.APIError` If the server returns an error.
juraj-google-style
def add_scope(scope=None, scope_fn=None): def decorator(f): @functools.wraps(f) def decorated(*args, **kwargs): name = kwargs.pop("name", None) with scope_fn(name or scope or f.__name__): return f(*args, **kwargs) return decorated return decorator
Return a decorator which add a TF name/variable scope to a function. Note that the function returned by the decorator accept an additional 'name' parameter, which can overwrite the name scope given when the function is created. Args: scope (str): name of the scope. If None, the function name is used. scope_fn (fct): Either tf.name_scope or tf.variable_scope Returns: fct: the add_scope decorator
juraj-google-style
def _process_new(self, feed_item): campaign = self.campaign_dao.get(feed_item, required=True) feed_item[FieldMap.CAMPAIGN_ID] = campaign['id'] feed_item[FieldMap.CAMPAIGN_NAME] = campaign['name'] return {'advertiserId': feed_item.get(FieldMap.ADVERTISER_ID, None), 'campaignId': campaign['id'] if campaign else None, 'siteId': feed_item.get(FieldMap.SITE_ID, None), 'name': feed_item.get(FieldMap.PLACEMENT_GROUP_NAME, None), 'placementGroupType': feed_item.get(FieldMap.PLACEMENT_GROUP_TYPE, None), 'pricingSchedule': {'startDate': feed_item.get(FieldMap.PLACEMENT_GROUP_START_DATE, None), 'endDate': feed_item.get(FieldMap.PLACEMENT_GROUP_END_DATE, None), 'pricingType': feed_item.get(FieldMap.PLACEMENT_GROUP_PRICING_TYPE, None)}}
Creates a new placement group DCM object from a feed item representing a placement group from the Bulkdozer feed. This function simply creates the object to be inserted later by the BaseDAO object. Args: feed_item: Feed item representing the placement group from the Bulkdozer feed. Returns: A placement group object ready to be inserted in DCM through the API.
github-repos
def _ParseHeader(self, parser_mediator, structure): (_, month, day, hours, minutes, seconds, year) = structure.date_time month = timelib.MONTH_DICT.get(month.lower(), 0) time_elements_tuple = (year, month, day, hours, minutes, seconds) try: date_time = dfdatetime_time_elements.TimeElements(time_elements_tuple=time_elements_tuple) date_time.is_local_time = True except ValueError: parser_mediator.ProduceExtractionWarning('invalid date time value: {0!s}'.format(structure.date_time)) return self._last_month = month event_data = XChatLogEventData() if (structure.log_action[0] == 'BEGIN'): self._xchat_year = year event_data.text = 'XChat start logging' elif (structure.log_action[0] == 'END'): self._xchat_year = None event_data.text = 'XChat end logging' else: logger.debug('Unknown log action: {0:s}.'.format(' '.join(structure.log_action))) return event = time_events.DateTimeValuesEvent(date_time, definitions.TIME_DESCRIPTION_ADDED, time_zone=parser_mediator.timezone) parser_mediator.ProduceEventWithEventData(event, event_data)
Parses a log header. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. structure (pyparsing.ParseResults): structure of tokens derived from a line of a text file.
codesearchnet
def _Open(self, path_spec, mode='rb'): if not path_spec.HasParent(): raise errors.PathSpecError( 'Unsupported path specification without parent.') resolver.Resolver.key_chain.ExtractCredentialsFromPathSpec(path_spec) encryption_method = getattr(path_spec, 'encryption_method', None) if not encryption_method: raise errors.PathSpecError( 'Unsupported path specification without encryption method.') self._encryption_method = encryption_method
Opens the file system defined by path specification. Args: path_spec (PathSpec): a path specification. mode (Optional[str]): file access mode. The default is 'rb' which represents read-only binary. Raises: AccessError: if the access to open the file was denied. IOError: if the file system could not be opened. PathSpecError: if the path specification is incorrect. ValueError: if the path specification is invalid.
juraj-google-style
def sample(self, signum, frame): stack = [] while frame and frame != self.base_frame: stack.append(( frame.f_code.co_name, frame.f_code.co_filename, frame.f_code.co_firstlineno)) frame = frame.f_back self._stats[tuple(stack)] += 1 signal.setitimer(signal.ITIMER_PROF, _SAMPLE_INTERVAL)
Samples current stack and adds result in self._stats. Args: signum: Signal that activates handler. frame: Frame on top of the stack when signal is handled.
juraj-google-style
def wind_speed(self, value=999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `wind_speed`'.format(value)) if value < 0.0: raise ValueError('value need to be greater or equal 0.0 ' 'for field `wind_speed`') if value > 40.0: raise ValueError('value need to be smaller 40.0 ' 'for field `wind_speed`') self._wind_speed = value
Corresponds to IDD Field `wind_speed` Args: value (float): value for IDD Field `wind_speed` Unit: m/s value >= 0.0 value <= 40.0 Missing value: 999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
juraj-google-style
def distance(p_a, p_b): return sqrt((p_a.lat - p_b.lat) ** 2 + (p_a.lon - p_b.lon) ** 2)
Euclidean distance, between two points Args: p_a (:obj:`Point`) p_b (:obj:`Point`) Returns: float: distance, in degrees
juraj-google-style
def command(self, verb, args=None): if self.__generating: raise NNTPSyncError('Command issued while a generator is active') cmd = verb if args: cmd += (' ' + args) cmd += '\r\n' self.socket.sendall(cmd) try: (code, message) = self.status() except NNTPTemporaryError as e: if (e.code() != 480): raise e (code, message) = self.command('AUTHINFO USER', self.username) if (code == 381): (code, message) = self.command('AUTHINFO PASS', self.password) if (code != 281): raise NNTPReplyError(code, message) (code, message) = self.command(verb, args) return (code, message)
Call a command on the server. If the user has not authenticated then authentication will be done as part of calling the command on the server. For commands that don't return a status message the status message will default to an empty string. Args: verb: The verb of the command to call. args: The arguments of the command as a string (default None). Returns: A tuple of status code (as an integer) and status message. Note: You can run raw commands by supplying the full command (including args) in the verb. Note: Although it is possible you shouldn't issue more than one command at a time by adding newlines to the verb as it will most likely lead to undesirable results.
codesearchnet
def _reduce_pseudo_inverse(nodes): (_, num_nodes) = np.shape(nodes) if (num_nodes == 2): reduction = _REDUCTION0 denom = _REDUCTION_DENOM0 elif (num_nodes == 3): reduction = _REDUCTION1 denom = _REDUCTION_DENOM1 elif (num_nodes == 4): reduction = _REDUCTION2 denom = _REDUCTION_DENOM2 elif (num_nodes == 5): reduction = _REDUCTION3 denom = _REDUCTION_DENOM3 else: raise _helpers.UnsupportedDegree((num_nodes - 1), supported=(1, 2, 3, 4)) result = _helpers.matrix_product(nodes, reduction) result /= denom return result
Performs degree-reduction for a B |eacute| zier curve. Does so by using the pseudo-inverse of the degree elevation operator (which is overdetermined). .. note:: There is also a Fortran implementation of this function, which will be used if it can be built. Args: nodes (numpy.ndarray): The nodes in the curve. Returns: numpy.ndarray: The reduced nodes. Raises: .UnsupportedDegree: If the degree is not 1, 2, 3 or 4.
codesearchnet
def log10(x): if any_symbolic_tensors((x,)): return Log10().symbolic_call(x) return backend.numpy.log10(x)
Return the base 10 logarithm of the input tensor, element-wise. Args: x: Input tensor. Returns: Output tensor, element-wise base 10 logarithm of `x`.
github-repos
def create_border(self, border_style_type): if border_style_type == MenuBorderStyleType.ASCII_BORDER: return self.create_ascii_border() elif border_style_type == MenuBorderStyleType.LIGHT_BORDER: return self.create_light_border() elif border_style_type == MenuBorderStyleType.HEAVY_BORDER: return self.create_heavy_border() elif border_style_type == MenuBorderStyleType.DOUBLE_LINE_BORDER: return self.create_doubleline_border() elif border_style_type == MenuBorderStyleType.HEAVY_OUTER_LIGHT_INNER_BORDER: return self.create_heavy_outer_light_inner_border() elif border_style_type == MenuBorderStyleType.DOUBLE_LINE_OUTER_LIGHT_INNER_BORDER: return self.create_doubleline_outer_light_inner_border() else: self.logger.info('Unrecognized border style type: {}. Defaulting to ASCII.'.format(border_style_type)) return self.create_ascii_border()
Create a new MenuBorderStyle instance based on the given border style type. Args: border_style_type (int): an integer value from :obj:`MenuBorderStyleType`. Returns: :obj:`MenuBorderStyle`: a new MenuBorderStyle instance of the specified style.
juraj-google-style
def all_folders( path_name, keyword='', has_date=False, date_fmt=DATE_FMT ) -> list: if not os.path.exists(path=path_name): return [] path_name = path_name.replace('\\', '/') if keyword: folders = sort_by_modified([ f.replace('\\', '/') for f in glob.iglob(f'{path_name}/*{keyword}*') if os.path.isdir(f) and (f.replace('\\', '/').split('/')[-1][0] != '~') ]) else: folders = sort_by_modified([ f'{path_name}/{f}' for f in os.listdir(path=path_name) if os.path.isdir(f'{path_name}/{f}') and (f[0] != '~') ]) if has_date: folders = filter_by_dates(folders, date_fmt=date_fmt) return folders
Search all folders with criteria Returned list will be sorted by last modified Args: path_name: full path name keyword: keyword to search has_date: whether has date in file name (default False) date_fmt: date format to check for has_date parameter Returns: list: all folder names fulfilled criteria
juraj-google-style
def ToByteArray(self): ms = StreamManager.GetStream() writer = BinaryWriter(ms) self.Serialize(writer) retval = ms.ToArray() StreamManager.ReleaseStream(ms) return retval
Serialize self and get the byte stream. Returns: bytes: serialized object.
codesearchnet
def get(self, key, namespace='default', default=None, as_object=False): if namespace in self.__data and key in self.__data[namespace]: if as_object: return db.ConfigItem.find_one( ConfigItem.namespace_prefix == namespace, ConfigItem.key == key ) return self.__data[namespace][key] else: return default
Return the value of a key/namespace pair Args: key (str): Key to return namespace (str): Namespace of the key default (:obj:`Any`): Optional default value to return, if key was not found as_object (bool): If `True` returns the object as a :py:obj:`ConfigItem` object instead of its primitive type Returns: Requested value if found, else default value or `None`
juraj-google-style
def _GenerateSummary(self): items = [] if self._notices: items.append(('notices: %d' % self._notice_count)) if self._dataset_errors: items.append(('errors: %d' % self._error_count)) if self._dataset_warnings: items.append(('warnings: %d' % self._warning_count)) if items: return ('<p><span class="fail">%s</span></p>' % '<br>'.join(items)) else: return '<p><span class="pass">feeds merged successfully</span></p>'
Generate a summary of the warnings and errors. Returns: The generated HTML as a string.
codesearchnet
def getHostCertPath(self, name): path = s_common.genpath(self.certdir, 'hosts', '%s.crt' % name) if not os.path.isfile(path): return None return path
Gets the path to a host certificate. Args: name (str): The name of the host keypair. Examples: Get the path to the host certificate for the host "myhost": mypath = cdir.getHostCertPath('myhost') Returns: str: The path if exists.
juraj-google-style
def query_put_bounders(query, partition_column, start, end): where = " WHERE TMP_TABLE.{0} >= {1} AND TMP_TABLE.{0} <= {2}".format( partition_column, start, end ) query_with_bounders = "SELECT * FROM ({0}) AS TMP_TABLE {1}".format(query, where) return query_with_bounders
Put bounders in the query Args: query: SQL query string partition_column: partition_column name start: lower_bound end: upper_bound Returns: Query with bounders
juraj-google-style
def stop(save=True): global _profiler with _profiler_lock: if _profiler is None: raise errors.UnavailableError(None, None, 'Cannot export profiling results. No profiler is running.') if save: try: _profiler.export_to_tb() except Exception: _profiler = None raise _profiler = None
Stops the current profiling session. The profiler session will be stopped and profile results can be saved. Args: save: An optional variable to save the results to TensorBoard. Default True. Raises: UnavailableError: If there is no active profiling session.
github-repos
def infer_transportation_mode(self, clf, min_time): for segment in self.segments: segment.infer_transportation_mode(clf, min_time) return self
In-place transportation mode inferring of segments Returns: This track
codesearchnet
def _do_export(self, remote_function): if self._worker.load_code_from_local: return function = remote_function._function function_name_global_valid = (function.__name__ in function.__globals__) function_name_global_value = function.__globals__.get(function.__name__) if (not is_cython(function)): function.__globals__[function.__name__] = remote_function try: pickled_function = pickle.dumps(function) finally: if function_name_global_valid: function.__globals__[function.__name__] = function_name_global_value else: del function.__globals__[function.__name__] check_oversized_pickle(pickled_function, remote_function._function_name, 'remote function', self._worker) key = (((b'RemoteFunction:' + self._worker.task_driver_id.binary()) + b':') + remote_function._function_descriptor.function_id.binary()) self._worker.redis_client.hmset(key, {'driver_id': self._worker.task_driver_id.binary(), 'function_id': remote_function._function_descriptor.function_id.binary(), 'name': remote_function._function_name, 'module': function.__module__, 'function': pickled_function, 'max_calls': remote_function._max_calls}) self._worker.redis_client.rpush('Exports', key)
Pickle a remote function and export it to redis. Args: remote_function: the RemoteFunction object.
codesearchnet
def GetProcessedTaskIdentifiers(self): if (self._storage_type != definitions.STORAGE_TYPE_SESSION): raise IOError('Unsupported storage type.') if (not self._processed_task_storage_path): raise IOError('Missing processed task storage path.') return [path.replace('.plaso', '') for path in os.listdir(self._processed_task_storage_path)]
Identifiers for tasks which have been processed. Returns: list[str]: task identifiers that are processed. Raises: IOError: if the storage type is not supported or if the temporary path for the task storage does not exist. OSError: if the storage type is not supported or if the temporary path for the task storage does not exist.
codesearchnet
def resolve_parameters(val: Any, param_resolver: 'cirq.ParamResolverOrSimilarType') -> Any: if (not param_resolver): return val from cirq import ParamResolver param_resolver = ParamResolver(param_resolver) if isinstance(val, sympy.Basic): return param_resolver.value_of(val) getter = getattr(val, '_resolve_parameters_', None) result = (NotImplemented if (getter is None) else getter(param_resolver)) if (result is not NotImplemented): return result else: return val
Resolves symbol parameters in the effect using the param resolver. This function will use the `_resolve_parameters_` magic method of `val` to resolve any Symbols with concrete values from the given parameter resolver. Args: val: The object to resolve (e.g. the gate, operation, etc) param_resolver: the object to use for resolving all symbols Returns: a gate or operation of the same type, but with all Symbols replaced with floats according to the given ParamResolver. If `val` has no `_resolve_parameters_` method or if it returns NotImplemented, `val` itself is returned.
codesearchnet
def _parse_configs(self, config): for config_dict in config: label = config_dict.keys()[0] cfg = config_dict[label] dbpath = cfg['dbpath'] pattern = self._parse_dbpath(dbpath) read_preference = cfg.get('read_preference', 'primary').upper() read_preference = self._get_read_preference(read_preference) cluster_config = {'params': {'host': cfg['host'], 'port': cfg['port'], 'read_preference': read_preference, 'replicaSet': cfg.get('replicaSet')}, 'pattern': pattern, 'label': label} self._clusters.append(cluster_config)
Builds a dict with information to connect to Clusters. Parses the list of configuration dictionaries passed by the user and builds an internal dict (_clusters) that holds information for creating Clients connecting to Clusters and matching database names. Args: config: A list of dictionaries containing connecting and identification information about Clusters. A dict has the following structure: {label: {host, port, read_preference, dbpath}}. Raises: Exception('No configuration provided'): no configuration provided.
codesearchnet
def get_accounts_for_service(cls, service_type): return [a for a in cls.get_accounts().values() if (a.service_type == service_type)]
Get a list of accounts for a given music service. Args: service_type (str): The service_type to use. Returns: list: A list of `Account` instances.
codesearchnet
def from_string(contents): lines = [l.strip() for l in contents.split('\n')] link0_patt = re.compile('^(%.+)\\s*=\\s*(.+)') link0_dict = {} for (i, l) in enumerate(lines): if link0_patt.match(l): m = link0_patt.match(l) link0_dict[m.group(1).strip('=')] = m.group(2) route_patt = re.compile('^ route = '' route_index = None for (i, l) in enumerate(lines): if route_patt.match(l): route += (' ' + l) route_index = i elif (((l == '') or l.isspace()) and route_index): break (functional, basis_set, route_paras, dieze_tag) = read_route_line(route) ind = 2 title = [] while lines[(route_index + ind)].strip(): title.append(lines[(route_index + ind)].strip()) ind += 1 title = ' '.join(title) ind += 1 toks = re.split('[,\\s]+', lines[(route_index + ind)]) charge = int(toks[0]) spin_mult = int(toks[1]) coord_lines = [] spaces = 0 input_paras = {} ind += 1 for i in range((route_index + ind), len(lines)): if (lines[i].strip() == ''): spaces += 1 if (spaces >= 2): d = lines[i].split('=') if (len(d) == 2): input_paras[d[0]] = d[1] else: coord_lines.append(lines[i].strip()) mol = GaussianInput._parse_coords(coord_lines) mol.set_charge_and_spin(charge, spin_mult) return GaussianInput(mol, charge=charge, spin_multiplicity=spin_mult, title=title, functional=functional, basis_set=basis_set, route_parameters=route_paras, input_parameters=input_paras, link0_parameters=link0_dict, dieze_tag=dieze_tag)
Creates GaussianInput from a string. Args: contents: String representing an Gaussian input file. Returns: GaussianInput object
codesearchnet
def get_mapreduce_yaml(parse=parse_mapreduce_yaml): mr_yaml_path = find_mapreduce_yaml() if (not mr_yaml_path): raise errors.MissingYamlError() mr_yaml_file = open(mr_yaml_path) try: return parse(mr_yaml_file.read()) finally: mr_yaml_file.close()
Locates mapreduce.yaml, loads and parses its info. Args: parse: Used for testing. Returns: MapReduceYaml object. Raises: errors.BadYamlError: when contents is not a valid mapreduce.yaml file or the file is missing.
codesearchnet
def get_unbound_arg_names(arg_names, arg_binding_keys): bound_arg_names = [abk._arg_name for abk in arg_binding_keys] return [arg_name for arg_name in arg_names if (arg_name not in bound_arg_names)]
Determines which args have no arg binding keys. Args: arg_names: a sequence of the names of possibly bound args arg_binding_keys: a sequence of ArgBindingKey each of whose arg names is in arg_names Returns: a sequence of arg names that is a (possibly empty, possibly non-proper) subset of arg_names
codesearchnet
def remove(path, dir_fd=None): system = get_instance(path) if system.is_locator(path) or path[-1] == '/': raise is_a_directory_error("Is a directory: '%s'" % path) system.remove(path)
Remove a file. Equivalent to "os.remove" and "os.unlink". Args: path (path-like object): Path or URL. dir_fd: directory descriptors; see the os.remove() description for how it is interpreted. Not supported on cloud storage objects.
juraj-google-style
def color_get_hsv(c: Tuple[int, int, int]) -> Tuple[float, float, float]: hsv = ffi.new("float [3]") lib.TCOD_color_get_HSV(c, hsv, hsv + 1, hsv + 2) return hsv[0], hsv[1], hsv[2]
Return the (hue, saturation, value) of a color. Args: c (Union[Tuple[int, int, int], Sequence[int]]): An (r, g, b) sequence or Color instance. Returns: Tuple[float, float, float]: A tuple with (hue, saturation, value) values, from 0 to 1.
juraj-google-style
def times(coro, limit=1, raise_exception=False, return_value=None): assert_corofunction(coro=coro) limit = max(limit, 1) times = limit result = None @asyncio.coroutine def wrapper(*args, **kw): nonlocal limit nonlocal result if (limit == 0): if raise_exception: raise RuntimeError(ExceptionMessage.format(times)) if return_value: return return_value return result limit -= 1 if return_value: return (yield from coro(*args, **kw)) result = (yield from coro(*args, **kw)) return result return wrapper
Wraps a given coroutine function to be executed only a certain amount of times. If the execution limit is exceeded, the last execution return value will be returned as result. You can optionally define a custom return value on exceeded via `return_value` param. This function can be used as decorator. arguments: coro (coroutinefunction): coroutine function to wrap. limit (int): max limit of coroutine executions. raise_exception (bool): raise exception if execution times exceeded. return_value (mixed): value to return when execution times exceeded. Raises: TypeError: if coro argument is not a coroutine function. RuntimeError: if max execution excedeed (optional). Returns: coroutinefunction Usage:: async def mul_2(num): return num * 2 timed = paco.times(mul_2, 3) await timed(2) # => 4 await timed(3) # => 6 await timed(4) # => 8 await timed(5) # ignored! # => 8
codesearchnet
def _parse_stop_words_file(self, path): language = None loaded = False if os.path.isfile(path): self._logger.debug('Loading stop words in %s', path) language = path.split('-')[(- 1)] if (not (language in self.__stop_words)): self.__stop_words[language] = set() with codecs.open(path, 'r', 'UTF-8') as file: loaded = True for word in file: self.__stop_words[language].add(word.strip()) return loaded
Load stop words from the given path. Parse the stop words file, saving each word found in it in a set for the language of the file. This language is obtained from the file name. If the file doesn't exist, the method will have no effect. Args: path: Path to the stop words file. Returns: A boolean indicating whether the file was loaded.
codesearchnet
def preprocess_frame(frame): frame = common_layers.convert_rgb_to_real(frame) frame = (frame - 0.5) (frame, _) = glow_ops.uniform_binning_correction(frame) return frame
Preprocess frame. 1. Converts [0, 255] to [-0.5, 0.5] 2. Adds uniform noise. Args: frame: 3-D Tensor representing pixels. Returns: frame: 3-D Tensor with values in between [-0.5, 0.5]
codesearchnet
def from_tokenizer(cls, tokenizer: 'PreTrainedTokenizerBase', **kwargs): do_lower_case = kwargs.pop('do_lower_case', None) do_lower_case = tokenizer.do_lower_case if do_lower_case is None else do_lower_case cls_token_id = kwargs.pop('cls_token_id', None) cls_token_id = tokenizer.cls_token_id if cls_token_id is None else cls_token_id sep_token_id = kwargs.pop('sep_token_id', None) sep_token_id = tokenizer.sep_token_id if sep_token_id is None else sep_token_id pad_token_id = kwargs.pop('pad_token_id', None) pad_token_id = tokenizer.pad_token_id if pad_token_id is None else pad_token_id vocab = tokenizer.get_vocab() vocab = sorted(vocab.items(), key=lambda x: x[1]) vocab_list = [entry[0] for entry in vocab] return cls(vocab_list=vocab_list, do_lower_case=do_lower_case, cls_token_id=cls_token_id, sep_token_id=sep_token_id, pad_token_id=pad_token_id, **kwargs)
Initialize a `TFBertTokenizer` from an existing `Tokenizer`. Args: tokenizer (`PreTrainedTokenizerBase`): The tokenizer to use to initialize the `TFBertTokenizer`. Examples: ```python from transformers import AutoTokenizer, TFBertTokenizer tokenizer = AutoTokenizer.from_pretrained("google-bert/bert-base-uncased") tf_tokenizer = TFBertTokenizer.from_tokenizer(tokenizer) ```
github-repos
def get_domain_template(distro, libvirt_ver, **kwargs): env = Environment( loader=PackageLoader('lago', 'providers/libvirt/templates'), trim_blocks=True, lstrip_blocks=True, ) template_name = 'dom_template-{0}.xml.j2'.format(distro) try: template = env.get_template(template_name) except TemplateNotFound: LOGGER.debug('could not find template %s using default', template_name) template = env.get_template('dom_template-base.xml.j2') return template.render(libvirt_ver=libvirt_ver, **kwargs)
Get a rendered Jinja2 domain template Args: distro(str): domain distro libvirt_ver(int): libvirt version kwargs(dict): args for template render Returns: str: rendered template
juraj-google-style
def has_all_nonzero_section_lengths(neuron, threshold=0.0): bad_ids = [s.id for s in _nf.iter_sections(neuron.neurites) if section_length(s.points) <= threshold] return CheckResult(len(bad_ids) == 0, bad_ids)
Check presence of neuron sections with length not above threshold Arguments: neuron(Neuron): The neuron object to test threshold(float): value above which a section length is considered to be non-zero Returns: CheckResult with result including list of ids of bad sections
juraj-google-style
def finish(self): if (self.proc is None): return None self.proc.stdin.close() for thread in (self._out_thread, self._err_thread): thread.join() (out, err) = [b''.join(chunks) for chunks in (self._out_chunks, self._err_chunks)] self.proc.stdout.close() self.proc.stderr.close() if self.proc.returncode: err = '\n'.join([' '.join(self.cmd), err.decode('utf8')]) raise IOError(err) del self.proc self.proc = None return out
Finishes transconding and returns the video. Returns: bytes Raises: IOError: in case of transcoding error.
codesearchnet
def _update_state(self, state_type: str, value: str) -> datetime: timestamp = datetime.utcnow() field = '{}_state'.format(state_type) old_state = DB.get_hash_value(self._key, field) DB.set_hash_value(self._key, field, value, pipeline=True) DB.set_hash_value(self._key, '{}_timestamp'.format(state_type), timestamp.isoformat(), pipeline=True) DB.execute() self.publish('{}_state_updated'.format(state_type), event_data=dict(state=value, old_state=old_state)) return timestamp
Update the state of type specified (current or target). Args: state_type(str): Type of state to update, current or target. value (str): New state value. Returns: timestamp, current time
juraj-google-style
def _ParseArgs(fn_args, fn_defaults, num_required_args, kwargs, remaining_args, metadata): accepts_positional_args = metadata.get(decorators.ACCEPTS_POSITIONAL_ARGS) capacity = False parsed_args = [] for index, arg in enumerate(fn_args): value = kwargs.pop(arg, None) if value is not None: value = _ParseValue(value, index, arg, metadata) parsed_args.append(value) elif remaining_args and accepts_positional_args: value = remaining_args.pop(0) value = _ParseValue(value, index, arg, metadata) parsed_args.append(value) elif index < num_required_args: raise FireError('The function received no value for the required argument:', arg) else: capacity = True default_index = index - num_required_args parsed_args.append(fn_defaults[default_index]) for key, value in kwargs.items(): kwargs[key] = _ParseValue(value, None, key, metadata) return (parsed_args, kwargs, remaining_args, capacity)
Parses the positional and named arguments from the available supplied args. Modifies kwargs, removing args as they are used. Args: fn_args: A list of argument names that the target function accepts, including positional and named arguments, but not the varargs or kwargs names. fn_defaults: A list of the default values in the function argspec. num_required_args: The number of required arguments from the function's argspec. This is the number of arguments without a default value. kwargs: Dict with named command line arguments and their values. remaining_args: The remaining command line arguments, which may still be used as positional arguments. metadata: Metadata about the function, typically from Fire decorators. Returns: parsed_args: A list of values to be used as positional arguments for calling the target function. kwargs: The input dict kwargs modified with the used kwargs removed. remaining_args: A list of the supplied args that have not been used yet. capacity: Whether the call could have taken args in place of defaults. Raises: FireError: If additional positional arguments are expected, but none are available.
github-repos
def get_sid(principal): if (principal is None): principal = 'NULL SID' try: sid = salt.utils.win_functions.get_sid_from_name(principal) except CommandExecutionError: sid = principal try: sid = win32security.ConvertStringSidToSid(sid) except pywintypes.error: log.exception('Invalid user/group or sid: %s', principal) raise CommandExecutionError('Invalid user/group or sid: {0}'.format(principal)) except TypeError: raise CommandExecutionError return sid
Converts a username to a sid, or verifies a sid. Required for working with the DACL. Args: principal(str): The principal to lookup the sid. Can be a sid or a username. Returns: PySID Object: A sid Usage: .. code-block:: python # Get a user's sid salt.utils.win_dacl.get_sid('jsnuffy') # Verify that the sid is valid salt.utils.win_dacl.get_sid('S-1-5-32-544')
codesearchnet
def manual_to_auto_spmd_partition(tensor, manual_sharding, full_shape, single_dim=-1, unspecified_dims=None): return tf2xla.spmd_shard_to_full_shape(tensor, manual_sharding=manual_sharding, full_shape=full_shape, dim=single_dim, unspecified_dims=unspecified_dims or [])
Switches from manual partitioning to automatic SPMD partitioning. Converts a shard-shaped tensor (manually partitioned in SPMD-style) to a full-shaped tensor to be partitioned automatically by the SPMD partitioner. Args: tensor: A tf.Tensor in shard shape. manual_sharding: a serialized string of OpSharding to be used in manual partitioning. full_shape: the shape of tensor before partitioning. single_dim: If >= 0, the conversion will happen only on this dim in subgroups. unspecified_dims: An optional list of dimensions unspecified. Returns: A full-shaped tensor to be partitioned automatically by the SPMD partitioner.
github-repos
def userinfo(self, access_token): return self.get( url='https: headers={'Authorization': 'Bearer {}'.format(access_token)} )
Returns the user information based on the Auth0 access token. This endpoint will work only if openid was granted as a scope for the access_token. Args: access_token (str): Auth0 access token (obtained during login). Returns: The user profile.
juraj-google-style
def change_numbering(self, new_index=None): if (new_index is None): new_index = range(len(self)) elif len(new_index) != len(self): raise ValueError('len(new_index) has to be the same as len(self)') c_table = self.loc[:, ['b', 'a', 'd']] c_table = c_table.replace(constants.int_label) try: c_table = c_table.astype('i8') except ValueError: raise ValueError('Due to a bug in pandas it is necessary to have ' 'integer columns') c_table = c_table.replace(self.index, new_index) c_table = c_table.replace( {v: k for k, v in constants.int_label.items()}) out = self.copy() out.unsafe_loc[:, ['b', 'a', 'd']] = c_table out._frame.index = new_index return out
Change numbering to a new index. Changes the numbering of index and all dependent numbering (bond_with...) to a new_index. The user has to make sure that the new_index consists of distinct elements. Args: new_index (list): If None the new_index is taken from 1 to the number of atoms. Returns: Zmat: Reindexed version of the zmatrix.
juraj-google-style
def _ParseNumericOption(cls, options, argument_name, default_value=None): argument_value = getattr(options, argument_name, None) if (argument_value is None): return default_value if (not isinstance(argument_value, py2to3.INTEGER_TYPES)): raise errors.BadConfigOption('Unsupported option: {0:s} integer type required.'.format(argument_name)) return argument_value
Parses a numeric command line argument. Args: options (argparse.Namespace): parser options. argument_name (str): name of the command line argument. default_value (Optional[int]): default value of the command line argument. Returns: int: command line argument value or the default value if the command line argument is not set Raises: BadConfigOption: if the command line argument value cannot be converted to a Unicode string.
codesearchnet
def minimize_best_n(Members): return list(reversed(sorted(Members, key=(lambda Member: Member.fitness_score))))
Orders population members from lowest fitness to highest fitness Args: Members (list): list of PyGenetics Member objects Returns: lsit: ordered lsit of Members, from highest fitness to lowest fitness
codesearchnet
def cwise(tf_fn, xs, output_dtype=None, grad_function=None, name=None): return slicewise( tf_fn, xs, output_dtype=output_dtype, splittable_dims=xs[0].shape.dims, grad_function=grad_function, name=name or "cwise")
Component-wise operation with no broadcasting. Args: tf_fn: a component-wise function taking n tf.Tensor inputs and producing a tf.Tensor output xs: n Tensors output_dtype: an optional dtype grad_function: an optional python function name: an optional string Returns: a Tensor
juraj-google-style
def _valid_deleted_file(path): ret = False if path.endswith(' (deleted)'): ret = True if re.compile('\\(path inode=[0-9]+\\)$').search(path): ret = True regex = re.compile('|'.join(LIST_DIRS)) if regex.match(path): ret = False return ret
Filters file path against unwanted directories and decides whether file is marked as deleted. Returns: True if file is desired deleted file, else False. Args: path: A string - path to file
codesearchnet
def as_str_any(value, encoding='utf-8'): if isinstance(value, bytes): return as_str(value, encoding=encoding) else: return str(value)
Converts input to `str` type. Uses `str(value)`, except for `bytes` typed inputs, which are converted using `as_str`. Args: value: A object that can be converted to `str`. encoding: Encoding for `bytes` typed inputs. Returns: A `str` object.
github-repos
def watchpoint_info(self, handle=0, index=(- 1)): if ((index < 0) and (handle == 0)): raise ValueError('Handle must be provided if index is not set.') wp = structs.JLinkWatchpointInfo() res = self._dll.JLINKARM_GetWPInfoEx(index, ctypes.byref(wp)) if (res < 0): raise errors.JLinkException('Failed to get watchpoint info.') for i in range(res): res = self._dll.JLINKARM_GetWPInfoEx(i, ctypes.byref(wp)) if (res < 0): raise errors.JLinkException('Failed to get watchpoint info.') elif ((wp.Handle == handle) or (wp.WPUnit == index)): return wp return None
Returns information about the specified watchpoint. Note: Either ``handle`` or ``index`` can be specified. If the ``index`` is not provided, the ``handle`` must be set, and vice-versa. If both ``index`` and ``handle`` are provided, the ``index`` overrides the provided ``handle``. Args: self (JLink): the ``JLink`` instance handle (int): optional handle of a valid watchpoint. index (int): optional index of a watchpoint. Returns: An instance of ``JLinkWatchpointInfo`` specifying information about the watchpoint if the watchpoint was found, otherwise ``None``. Raises: JLinkException: on error. ValueError: if both handle and index are invalid.
codesearchnet
def open(path, mime_type='application/octet-stream', compression_type=CompressionTypes.AUTO): filesystem = FileSystems.get_filesystem(path) return filesystem.open(path, mime_type, compression_type)
Returns a read channel for the given file path. Args: path: string path of the file object to be written to the system mime_type: MIME type to specify the type of content in the file object compression_type: Type of compression to be used for this object. See ``CompressionTypes`` for possible values. Returns: file handle with a ``close`` function for the user to use.
github-repos
def remove_entry(self, pathname_name, recursive=True): pathname_name = self._normalized_entryname(pathname_name) entry = self.get_entry(pathname_name) if self.filesystem.is_windows_fs: if ((entry.st_mode & PERM_WRITE) == 0): self.filesystem.raise_os_error(errno.EACCES, pathname_name) if self.filesystem.has_open_file(entry): self.filesystem.raise_os_error(errno.EACCES, pathname_name) elif ((not is_root()) and ((self.st_mode & (PERM_WRITE | PERM_EXE)) != (PERM_WRITE | PERM_EXE))): self.filesystem.raise_os_error(errno.EACCES, pathname_name) if (recursive and isinstance(entry, FakeDirectory)): while entry.contents: entry.remove_entry(list(entry.contents)[0]) elif (entry.st_nlink == 1): self.filesystem.change_disk_usage((- entry.size), pathname_name, entry.st_dev) self.st_nlink -= 1 entry.st_nlink -= 1 assert (entry.st_nlink >= 0) del self.contents[pathname_name]
Removes the specified child file or directory. Args: pathname_name: Basename of the child object to remove. recursive: If True (default), the entries in contained directories are deleted first. Used to propagate removal errors (e.g. permission problems) from contained entries. Raises: KeyError: if no child exists by the specified name. OSError: if user lacks permission to delete the file, or (Windows only) the file is open.
codesearchnet
def make_hash(self, task): t = [serialize_object(task['func_name'])[0], serialize_object(task['fn_hash'])[0], serialize_object(task['args'])[0], serialize_object(task['kwargs'])[0], serialize_object(task['env'])[0]] x = b''.join(t) hashedsum = hashlib.md5(x).hexdigest() return hashedsum
Create a hash of the task inputs. This uses a serialization library borrowed from ipyparallel. If this fails here, then all ipp calls are also likely to fail due to failure at serialization. Args: - task (dict) : Task dictionary from dfk.tasks Returns: - hash (str) : A unique hash string
juraj-google-style
def _parse_query_modifier(self, modifier, qval, is_escaped): if modifier == 'range': if not qval[0]: start = '*' elif isinstance(qval[0], date): start = self._handle_date(qval[0]) elif isinstance(qval[0], datetime): start = self._handle_datetime(qval[0]) elif not is_escaped: start = self._escape_query(qval[0]) else: start = qval[0] if not qval[1]: end = '*' elif isinstance(qval[1], date): end = self._handle_date(qval[1]) elif isinstance(qval[1], datetime): end = self._handle_datetime(qval[1]) elif not is_escaped: end = self._escape_query(qval[1]) else: end = qval[1] qval = '[%s TO %s]' % (start, end) else: if not is_escaped and not isinstance(qval, (date, datetime, int, float)): qval = self._escape_query(qval) if modifier == 'exact': qval = qval elif modifier == 'contains': qval = "*%s*" % qval elif modifier == 'startswith': qval = "%s*" % qval elif modifier == 'endswith': qval = "%s*" % qval elif modifier == 'lte': qval = '[* TO %s]' % qval elif modifier == 'gte': qval = '[%s TO *]' % qval elif modifier == 'lt': if isinstance(qval, int): qval -= 1 qval = '[* TO %s]' % qval elif modifier == 'gt': if isinstance(qval, int): qval += 1 qval = '[%s TO *]' % qval return qval
Parses query_value according to query_type Args: modifier (str): Type of query. Exact, contains, lte etc. qval: Value partition of the query. Returns: Parsed query_value.
juraj-google-style
def limit(self, count): query = query_mod.Query(self) return query.limit(count)
Create a limited query with this collection as parent. See :meth:`~.firestore_v1beta1.query.Query.limit` for more information on this method. Args: count (int): Maximum number of documents to return that match the query. Returns: ~.firestore_v1beta1.query.Query: A limited query.
juraj-google-style
def _fft(self, x): x_complex = _to_complex(x) return _FFT_OP[self.block_depth](x_complex)
FFT along the last self.block_depth dimensions of x. Args: x: `Tensor` with floating or complex `dtype`. Should be in the form returned by self._vectorize_then_blockify. Returns: `Tensor` with `dtype` `complex64`.
github-repos
def encode_produce_request(cls, payloads=(), acks=1, timeout=1000): if acks not in (1, 0, -1): raise ValueError('ProduceRequest acks (%s) must be 1, 0, -1' % acks) topics = [] for topic, topic_payloads in group_by_topic_and_partition(payloads).items(): topic_msgs = [] for partition, payload in topic_payloads.items(): partition_msgs = [] for msg in payload.messages: m = kafka.protocol.message.Message( msg.value, key=msg.key, magic=msg.magic, attributes=msg.attributes ) partition_msgs.append((0, m.encode())) topic_msgs.append((partition, MessageSet.encode(partition_msgs, prepend_size=False))) topics.append((topic, topic_msgs)) return kafka.protocol.produce.ProduceRequest[0]( required_acks=acks, timeout=timeout, topics=topics )
Encode a ProduceRequest struct Arguments: payloads: list of ProduceRequestPayload acks: How "acky" you want the request to be 1: written to disk by the leader 0: immediate response -1: waits for all replicas to be in sync timeout: Maximum time (in ms) the server will wait for replica acks. This is _not_ a socket timeout Returns: ProduceRequest
juraj-google-style
def maybe_copy_file_to_directory(source_filepath, target_directory): if (not tf.gfile.Exists(target_directory)): tf.logging.info(('Creating directory %s' % target_directory)) os.mkdir(target_directory) target_filepath = os.path.join(target_directory, os.path.basename(source_filepath)) if (not tf.gfile.Exists(target_filepath)): tf.logging.info(('Copying %s to %s' % (source_filepath, target_filepath))) tf.gfile.Copy(source_filepath, target_filepath) statinfo = os.stat(target_filepath) tf.logging.info(('Successfully copied %s, %s bytes.' % (target_filepath, statinfo.st_size))) else: tf.logging.info(('Not copying, file already found: %s' % target_filepath)) return target_filepath
Copy a file to a directory if it is not already there. Returns the target filepath. Args: source_filepath: a string target_directory: a string Returns: a string
codesearchnet
def _dict_mapping_to_pb(mapping, proto_type): converted_pb = getattr(trace_pb2, proto_type)() ParseDict(mapping, converted_pb) return converted_pb
Convert a dict to protobuf. Args: mapping (dict): A dict that needs to be converted to protobuf. proto_type (str): The type of the Protobuf. Returns: An instance of the specified protobuf.
juraj-google-style
def __init__(self, task_name, dag_name, workflow_name, workflow_id, worker_hostname): self.task_name = task_name self.dag_name = dag_name self.workflow_name = workflow_name self.workflow_id = workflow_id self.worker_hostname = worker_hostname
Initialize the task context object. Args: task_name (str): The name of the task. dag_name (str): The name of the DAG the task was started from. workflow_name (str): The name of the workflow the task was started from. workflow_id (str): The id of the workflow this task is member of. worker_hostname (str): The name of the worker executing this task.
juraj-google-style
def inquire_property(name, doc=None): def inquire_property(self): if not self._started: msg = ("Cannot read {0} from a security context whose " "establishment has not yet been started.") raise AttributeError(msg) return getattr(self._inquire(**{name: True}), name) return property(inquire_property, doc=doc)
Creates a property based on an inquire result This method creates a property that calls the :python:`_inquire` method, and return the value of the requested information. Args: name (str): the name of the 'inquire' result information Returns: property: the created property
juraj-google-style
async def _send_email(email_, config, loop=asyncio.get_event_loop()): smtp_server = get_attribute_from_config(config, EMAIL_SECTION_KEY, SMTP_SERVER_KEY) smtp_port = int(get_attribute_from_config(config, EMAIL_SECTION_KEY, SMTP_PORT_KEY)) user = get_attribute_from_config(config, EMAIL_SECTION_KEY, USER_KEY) password = get_attribute_from_config(config, EMAIL_SECTION_KEY, PASSWORD_KEY) server = aiosmtplib.SMTP(hostname=smtp_server, port=smtp_port, loop=loop, use_tls=False) await server.connect() await server.starttls() await server.login(user, password) await server.send_message(email_) await server.quit()
Send an email. Args: email_ (email.MIMEMultipart): The email to send. config (defaultdict): A defaultdict.
juraj-google-style
async def get_participant(self, p_id: int, force_update=False) -> Participant: found_p = self._find_participant(p_id) if (force_update or (found_p is None)): (await self.get_participants()) found_p = self._find_participant(p_id) return found_p
get a participant by its id |methcoro| Args: p_id: participant id force_update (dfault=False): True to force an update to the Challonge API Returns: Participant: None if not found Raises: APIException
codesearchnet
def __init__(self, scope, parent, name, result, paren=False): CodeExpression.__init__(self, scope, parent, name, result, paren) self.field_of = None self.reference = None
Constructor for references. Args: scope (CodeEntity): The program scope where this object belongs. parent (CodeEntity): This object's parent in the program tree. name (str): The name of the reference in the program. result (str): The return type of the expression in the program. Kwargs: paren (bool): Whether the reference is enclosed in parentheses.
juraj-google-style
def __init__(self, queue, ev_writer, flush_secs, flush_complete, flush_sentinel, close_sentinel): threading.Thread.__init__(self, name='EventLoggerThread') self.daemon = True self._queue = queue self._ev_writer = ev_writer self._flush_secs = flush_secs self._next_event_flush_time = 0 self._flush_complete = flush_complete self._flush_sentinel = flush_sentinel self._close_sentinel = close_sentinel self.failure_exc_info = ()
Creates an _EventLoggerThread. Args: queue: A CloseableQueue from which to dequeue events. The queue will be closed just before the thread exits, whether due to `close_sentinel` or any exception raised in the writing loop. ev_writer: An event writer. Used to log brain events for the visualizer. flush_secs: How often, in seconds, to flush the pending file to disk. flush_complete: A threading.Event that will be set whenever a flush operation requested via `flush_sentinel` has been completed. flush_sentinel: A sentinel element in queue that tells this thread to flush the writer and mark the current flush operation complete. close_sentinel: A sentinel element in queue that tells this thread to terminate and close the queue.
github-repos
def convert(self, obj): if isinstance(obj, pobjects.SymmetricKey): return self._build_core_key(obj, secrets.SymmetricKey) elif isinstance(obj, secrets.SymmetricKey): return self._build_pie_key(obj, pobjects.SymmetricKey) elif isinstance(obj, pobjects.PublicKey): return self._build_core_key(obj, secrets.PublicKey) elif isinstance(obj, secrets.PublicKey): return self._build_pie_key(obj, pobjects.PublicKey) elif isinstance(obj, pobjects.PrivateKey): return self._build_core_key(obj, secrets.PrivateKey) elif isinstance(obj, secrets.PrivateKey): return self._build_pie_key(obj, pobjects.PrivateKey) elif isinstance(obj, pobjects.Certificate): return self._build_core_certificate(obj) elif isinstance(obj, secrets.Certificate): return self._build_pie_certificate(obj) elif isinstance(obj, pobjects.SecretData): return self._build_core_secret_data(obj) elif isinstance(obj, secrets.SecretData): return self._build_pie_secret_data(obj) elif isinstance(obj, pobjects.OpaqueObject): return self._build_core_opaque_object(obj) elif isinstance(obj, secrets.OpaqueObject): return self._build_pie_opaque_object(obj) else: raise TypeError('object type unsupported and cannot be converted')
Convert a Pie object into a core secret object and vice versa. Args: obj (various): A Pie or core secret object to convert into the opposite object space. Required. Raises: TypeError: if the object type is unrecognized or unsupported.
codesearchnet
def __init__(self, proto, *, proto_as_initial_chunk: bool=True, parent_splitter: Optional['ComposableSplitter']=None, fields_in_parent: Optional[util.FieldTypes]=None): self._proto = proto self._parent_splitter = parent_splitter self._fields_in_parent = fields_in_parent self._built = False self._add_chunk_order = [] self._fix_chunk_order = False if parent_splitter is not None: self._chunks = None self._chunked_message = None elif proto_as_initial_chunk: self._chunks = [self._proto] self._chunked_message = chunk_pb2.ChunkedMessage(chunk_index=0) self._add_chunk_order.append(id(self._proto)) else: self._chunks = [] self._chunked_message = chunk_pb2.ChunkedMessage()
Initializes ComposableSplitter. Args: proto: Proto message to split. proto_as_initial_chunk: Whether to initialize chunks with the user-provided proto as the initial chunk. parent_splitter: The parent `ComposableSplitter` object. fields_in_parent: Fields to access `proto` from the parent splitter's proto.
github-repos
def analyze(self, text, tokenizer=str.split): if not self.tagger: self.tagger = Tagger(self.model, preprocessor=self.p, tokenizer=tokenizer) return self.tagger.analyze(text)
Analyze text and return pretty format. Args: text: string, the input text. tokenizer: Tokenize input sentence. Default tokenizer is `str.split`. Returns: res: dict.
juraj-google-style
def _verify_control_structure(self, device_info, control_info=None): if (control_info is None): control_info = self._find_control_structure(device_info.ram_start, device_info.ram_size) return control_info
Verify that a control structure is still valid or find one. Returns: ControlStructure: The verified or discovered control structure.
codesearchnet
def set_hibernate_timeout(timeout, power='ac', scheme=None): return _set_powercfg_value(scheme=scheme, sub_group='SUB_SLEEP', setting_guid='HIBERNATEIDLE', power=power, value=timeout)
Set the hibernate timeout in minutes for the given power scheme Args: timeout (int): The amount of time in minutes before the computer hibernates power (str): Set the value for AC or DC power. Default is ``ac``. Valid options are: - ``ac`` (AC Power) - ``dc`` (Battery) scheme (str): The scheme to use, leave as ``None`` to use the current. Default is ``None``. This can be the GUID or the Alias for the Scheme. Known Aliases are: - ``SCHEME_BALANCED`` - Balanced - ``SCHEME_MAX`` - Power saver - ``SCHEME_MIN`` - High performance Returns: bool: ``True`` if successful, otherwise ``False`` CLI Example: .. code-block:: bash # Sets the hibernate timeout to 30 minutes on Battery salt '*' powercfg.set_hibernate_timeout 30 power=dc
codesearchnet
def add_url_rule(self, route, endpoint, handler): self.app.add_url_rule(route, endpoint, handler)
Add a new url route. Args: See flask.Flask.add_url_route().
juraj-google-style
def flash_attention_mask(batch_size: int, cache_position: torch.Tensor, kv_length: int, kv_offset: int=0, mask_function: Callable=causal_mask_function, attention_mask: Optional[torch.Tensor]=None, **kwargs): if attention_mask is not None: attention_mask = attention_mask[:, -kv_length:] if attention_mask.all(): attention_mask = None return attention_mask
Create the attention mask necesary to use FA2. Since FA2 is un-padded by definition, here we simply return `None` if the mask is fully causal, or we return the 2D mask which will then be used to extract the seq_lens. We just slice it in case of sliding window. Args: batch_size (`int`): The batch size of the input sequence. cache_position (`torch.Tensor`): A tensor of shape (query_length,) indicating the current indices of the input sequence elements. kv_length (`int`): The size that the key and value states will have during the attention computation. kv_offset (`int`, optional): An optional offset to indicate at which first position the key and values states will refer to. mask_function (`Callable`): The mask factory function describing the mask pattern. attention_mask (`torch.Tensor`, optional): The 2D attention mask corresponding to padded tokens of shape (batch_size, number_of_seen_tokens+q_length)
github-repos
def _format_ase2clusgeo(obj, all_atomtypes=None): totalAN = len(obj) if all_atomtypes is not None: atomtype_set = set(all_atomtypes) else: atomtype_set = set(obj.get_atomic_numbers()) atomtype_lst = np.sort(list(atomtype_set)) n_atoms_per_type_lst = [] pos_lst = [] for atomtype in atomtype_lst: condition = obj.get_atomic_numbers() == atomtype pos_onetype = obj.get_positions()[condition] n_onetype = pos_onetype.shape[0] pos_lst.append(pos_onetype) n_atoms_per_type_lst.append(n_onetype) typeNs = n_atoms_per_type_lst Ntypes = len(n_atoms_per_type_lst) atomtype_lst Apos = np.concatenate(pos_lst).ravel() return Apos, typeNs, Ntypes, atomtype_lst, totalAN
Takes an ase Atoms object and returns numpy arrays and integers which are read by the internal clusgeo. Apos is currently a flattened out numpy array Args: obj(): all_atomtypes(): sort():
juraj-google-style
def MakePmfFromDict(d, name=''): pmf = Pmf(d, name) pmf.Normalize() return pmf
Makes a PMF from a map from values to probabilities. Args: d: dictionary that maps values to probabilities name: string name for this PMF Returns: Pmf object
juraj-google-style
def get_size(self): rec = self.get_rectangle() return (int((rec[2] - rec[0])), int((rec[3] - rec[1])))
Get the size of the tree. Returns: tupel: (width, height)
codesearchnet
def _GroupByDevices(self, saveables): per_device = collections.defaultdict(lambda: []) for saveable in saveables: canonical_device = set((pydev.canonical_name(spec.device) for spec in saveable.specs)) if len(canonical_device) != 1: raise ValueError('All tensors of a saveable object must be on the same device: %s' % saveable.name) per_device[canonical_device.pop()].append(saveable) return sorted(per_device.items(), key=lambda t: t[0])
Group Variable tensor slices per device. TODO(touts): Make sure that all the devices found are on different job/replica/task/cpu|gpu. It would be bad if 2 were on the same device. It can happen if the devices are unspecified. Args: saveables: A list of BaseSaverBuilder.SaveableObject objects. Returns: A list of tuples: (device_name, BaseSaverBuilder.SaveableObject) tuples. The list is sorted by ascending device_name. Raises: ValueError: If the tensors of a saveable are on different devices.
github-repos
def build_pos_args_table(full_alias, args, start_index): pos_args_placeholder = get_placeholders(full_alias, check_duplicates=True) pos_args = args[start_index: start_index + len(pos_args_placeholder)] if len(pos_args_placeholder) != len(pos_args): error_msg = INSUFFICIENT_POS_ARG_ERROR.format(full_alias, len(pos_args_placeholder), '' if len(pos_args_placeholder) == 1 else 's', len(pos_args)) raise CLIError(error_msg) for i, pos_arg in enumerate(pos_args): pos_args[i] = pos_arg.replace('"', '\\"') return dict(zip(pos_args_placeholder, pos_args))
Build a dictionary where the key is placeholder name and the value is the position argument value. Args: full_alias: The full alias (including any placeholders). args: The arguments that the user inputs in the terminal. start_index: The index at which we start ingesting position arguments. Returns: A dictionary with the key beign the name of the placeholder and its value being the respective positional argument.
juraj-google-style
def parse_original_feature_from_example(example, feature_name): feature = get_example_features(example)[feature_name] feature_type = feature.WhichOneof('kind') original_value = proto_value_for_feature(example, feature_name) return OriginalFeatureList(feature_name, original_value, feature_type)
Returns an `OriginalFeatureList` for the specified feature_name. Args: example: An example. feature_name: A string feature name. Returns: A filled in `OriginalFeatureList` object representing the feature.
juraj-google-style
def _send_impression_event(self, experiment, variation, user_id, attributes): impression_event = self.event_builder.create_impression_event(experiment, variation.id, user_id, attributes) self.logger.debug('Dispatching impression event to URL %s with params %s.' % ( impression_event.url, impression_event.params )) try: self.event_dispatcher.dispatch_event(impression_event) except: self.logger.exception('Unable to dispatch impression event!') self.notification_center.send_notifications(enums.NotificationTypes.ACTIVATE, experiment, user_id, attributes, variation, impression_event)
Helper method to send impression event. Args: experiment: Experiment for which impression event is being sent. variation: Variation picked for user for the given experiment. user_id: ID for user. attributes: Dict representing user attributes and values which need to be recorded.
juraj-google-style
def add_dos_dict(self, dos_dict, key_sort_func=None): if key_sort_func: keys = sorted(dos_dict.keys(), key=key_sort_func) else: keys = dos_dict.keys() for label in keys: self.add_dos(label, dos_dict[label])
Add a dictionary of doses, with an optional sorting function for the keys. Args: dos_dict: dict of {label: Dos} key_sort_func: function used to sort the dos_dict keys.
juraj-google-style
def do_not_descend_map(self): return self._do_not_descend_map
A map from parents to symbols that should not be descended into. This map can be edited, but it should not be edited once traversal has begun. Returns: The map marking symbols to not explore.
github-repos
def play_random(env, steps): try: done = True progress = tqdm(range(steps)) for _ in progress: if done: _ = env.reset() action = env.action_space.sample() (_, reward, done, info) = env.step(action) progress.set_postfix(reward=reward, info=info) env.render() except KeyboardInterrupt: pass env.close()
Play the environment making uniformly random decisions. Args: env (gym.Env): the initialized gym environment to play steps (int): the number of random steps to take Returns: None
codesearchnet
def NCHWToNHWC(input_tensor): if isinstance(input_tensor, tensor.Tensor): return array_ops.transpose(input_tensor, [0, 2, 3, 1]) else: return [input_tensor[0], input_tensor[2], input_tensor[3], input_tensor[1]]
Convert the input from NCHW format to NHWC. Args: input_tensor: a 4-D tensor, or a 4-element array representing the same. Returns: the converted tensor or a shape array
github-repos
def map_texture_to_surface(texture, surface): (texture_x, texture_y) = texture (surface_h, surface_w) = surface.shape surface_x = np.clip(np.int32(((surface_w * texture_x) - 1e-09)), 0, (surface_w - 1)) surface_y = np.clip(np.int32(((surface_h * texture_y) - 1e-09)), 0, (surface_h - 1)) surface_z = surface[(surface_y, surface_x)] return surface_z
Returns values on a surface for points on a texture. Args: texture (texture): the texture to trace over the surface surface (surface): the surface to trace along Returns: an array of surface heights for each point in the texture. Line separators (i.e. values that are ``nan`` in the texture) will be ``nan`` in the output, so the output will have the same dimensions as the x/y axes in the input texture.
codesearchnet
def is_closed(self): for t in self.smi_vector: found = False for s in self.sm_vector: if self.observation_table[s] == self.observation_table[t]: self.equiv_classes[t] = s found = True break if not found: return False, t return True, None
_check if the observation table is closed. Args: None Returns: tuple (bool, str): True if the observation table is closed and false otherwise. If the table is not closed the escaping string is returned.
juraj-google-style
def read_at(self, d, **kwargs): try: return np.array([self._read_at(depth, **kwargs) for depth in d]) except: return self._read_at(d, **kwargs)
Read the log at a specific depth or an array of depths. Args: d (float or array-like) interpolation (str) index(bool) return_basis (bool) Returns: float or ndarray.
codesearchnet