code stringlengths 20 4.93k | docstring stringlengths 33 1.27k | source stringclasses 3
values |
|---|---|---|
def get_email_message(self, message_uid, message_type='text/plain'):
self._mail.select('inbox')
result = self._mail.uid('fetch', message_uid, '(RFC822)')
msg = email.message_from_string(result[1][0][1])
try:
for part in msg.walk():
if (part.get_content_type() == message_type):
... | Fetch contents of email.
Args:
message_uid (int): IMAP Message UID number.
Kwargs:
message_type: Can be 'text' or 'html' | codesearchnet |
def config_updated_since(self, sentry_unit, filename, mtime, sleep_time=20, retry_count=30, retry_sleep_time=10):
unit_name = sentry_unit.info['unit_name']
self.log.debug(('Checking that %s updated since %s on %s' % (filename, mtime, unit_name)))
time.sleep(sleep_time)
file_mtime = None
tries = 0
... | Check if file was modified after a given time.
Args:
sentry_unit (sentry): The sentry unit to check the file mtime on
filename (string): The file to check mtime of
mtime (float): The epoch time to check against
sleep_time (int): Initial sleep time (s) before looking for file
retry_sleep_time (int): Time (s) to sleep b... | codesearchnet |
def store_checksums(dataset_name, sizes_checksums):
path = _get_path(dataset_name)
original_data = _get_sizes_checksums(path)
new_data = original_data.copy()
new_data.update(sizes_checksums)
if (original_data == new_data):
return
with tf.io.gfile.GFile(path, 'w') as f:
for (url, ... | Store given checksums and sizes for specific dataset.
Content of file is never disgarded, only updated. This is to ensure that if
process is killed right after first download finishes, checksums registered
during previous runs aren't lost.
It is the responsibility of the caller not to call function multiple times in
... | codesearchnet |
def index_list_for_sort_order(x: List[Any], key: Callable[([Any], Any)]=None, reverse: bool=False) -> List[int]:
def key_with_user_func(idx_val: Tuple[(int, Any)]):
return key(idx_val[1])
if key:
sort_key = key_with_user_func
else:
sort_key = itemgetter(1)
index_value_list = sor... | Returns a list of indexes of ``x``, IF ``x`` WERE TO BE SORTED.
Args:
x: data
key: function to be applied to the data to generate a sort key; this
function is passed as the ``key=`` parameter to :func:`sorted`;
the default is ``itemgetter(1)``
reverse: reverse the sort order?
Returns:
list of integer index values
Ex... | codesearchnet |
def get_substrates(self, material_id, number=50, orient=None):
req = '/materials/{}/substrates?n={}'.format(material_id, number)
if orient:
req += '&orient={}'.format(' '.join(map(str, orient)))
return self._make_request(req) | Get a substrate list for a material id. The list is in order of
increasing elastic energy if a elastic tensor is available for
the material_id. Otherwise the list is in order of increasing
matching area.
Args:
material_id (str): Materials Project material_id, e.g. 'mp-123'.
orient (list) : substrate orientation to loo... | codesearchnet |
def MeasureCosts(self, item):
op_perf_bytes_list, run_time, step_stats_bytes = tf_cluster.TF_MeasureCosts(item.tf_item, self._tf_cluster, self._generate_timeline)
op_perfs = [op_performance_data_pb2.OpPerformance.FromString(op_perf_bytes) for op_perf_bytes in op_perf_bytes_list]
return (op_perfs, run_time, ... | Returns the cost of running the specified item.
Args:
item: The item for which to measure the costs.
Returns: The triplet op_perfs, runtime, step_stats. | github-repos |
def frame(data, window_length, hop_length):
num_samples = data.shape[0]
num_frames = (1 + int(np.floor(((num_samples - window_length) / hop_length))))
shape = ((num_frames, window_length) + data.shape[1:])
strides = (((data.strides[0] * hop_length),) + data.strides)
return np.lib.stride_tricks.as_st... | Convert array into a sequence of successive possibly overlapping frames.
An n-dimensional array of shape (num_samples, ...) is converted into an
(n+1)-D array of shape (num_frames, window_length, ...), where each frame
starts hop_length points after the preceding one.
This is accomplished using stride_tricks, so the ... | codesearchnet |
def anchored_pairs(self, anchor):
pairs = OrderedDict()
for term in self.keys:
score = self.get_pair(anchor, term)
if score:
pairs[term] = score
return utils.sort_dict(pairs) | Get distances between an anchor term and all other terms.
Args:
anchor (str): The anchor term.
Returns:
OrderedDict: The distances, in descending order. | codesearchnet |
def get_parent(self):
if (not isinstance(self.parent, Expression)):
raise FiqlObjectException(('Parent must be of %s not %s' % (Expression, type(self.parent))))
return self.parent | Get the parent ``Expression`` for this object.
Returns:
Expression: The ``Expression`` which contains this object.
Raises:
FiqlObjectException: Parent is ``None``. | codesearchnet |
def __init__(self, text, quiet=False):
self.__text = text
self.reliable = True
self.quiet = quiet
self.detect(text) | Detector of the language used in `text`.
Args:
text (string): unicode string. | juraj-google-style |
def result_type(*dtypes):
if len(dtypes) == 0:
return config.floatx()
for dtype in dtypes:
if dtype in FLOAT8_TYPES:
raise ValueError(f'There is no implicit conversions from float8 dtypes to others. You must cast it internally. Received: {dtypes}')
return _lattice_result_type(*(c... | Returns the type from applying the Keras type promotion rules.
In general, each argument is first parsed by `backend.standardize_dtype`,
and the resulting dtype is determined by the least upper bound of the type
promotion lattice.
Note: This function attempts to match the result of `jnp.result_type`.
Args:
dtypes: I... | github-repos |
def _CheckIsSocket(self, file_entry):
if (definitions.FILE_ENTRY_TYPE_SOCKET not in self._file_entry_types):
return False
return file_entry.IsSocket() | Checks the is_socket find specification.
Args:
file_entry (FileEntry): file entry.
Returns:
bool: True if the file entry matches the find specification, False if not. | codesearchnet |
def UpdateNumberOfWarnings(
self, number_of_consumed_warnings, number_of_produced_warnings):
consumed_warnings_delta = 0
if number_of_consumed_warnings is not None:
if number_of_consumed_warnings < self.number_of_consumed_warnings:
raise ValueError(
'Number of consumed warni... | Updates the number of warnings.
Args:
number_of_consumed_warnings (int): total number of warnings consumed by
the process.
number_of_produced_warnings (int): total number of warnings produced by
the process.
Returns:
bool: True if either number of warnings has increased.
Raises:
ValueError: if the consumed or produc... | juraj-google-style |
def _build_path(self):
if (not self.path):
self.path = '/'
if self.uri_parameters:
self.path = ((self.path + ';') + requote_uri(self.uri_parameters))
if self.query:
self.path = ((self.path + '?') + self.query)
if self.params:
try:
if self.query:
... | Constructs the actual request URL with accompanying query if any.
Returns:
None: But does modify self.path, which contains the final
request path sent to the server. | codesearchnet |
def ask_question(self, field_name, pattern=NAME_PATTERN, is_required=False,
password=False):
input_value = ""
question = ("Insert the field using the pattern below:"
"\n{}\n{}: ".format(pattern[0], field_name))
while not input_value:
... | Ask a question and get the input values.
This method will validade the input values.
Args:
field_name(string): Field name used to ask for input value.
pattern(tuple): Pattern to validate the input value.
is_required(bool): Boolean value if the input value is required.
password(bool): Boolean value to get input passwor... | juraj-google-style |
def __init__(self, value=None):
super(VendorIdentification, self).__init__(
value, Tags.VENDOR_IDENTIFICATION) | Construct a VendorIdentification object.
Args:
value (str): A string describing a KMIP vendor. Optional, defaults
to None. | juraj-google-style |
def GetMetaData(self, request):
if (request.timeout == 0):
raise ValueError("Requests library can't handle timeout of 0")
result = requests.request('GET', request.url, headers=request.headers, timeout=request.timeout)
result.raise_for_status()
if (not result.ok):
raise requests.RequestEx... | Get metadata from local metadata server.
Any failed URL check will fail the whole action since our bios/service
checks may not always correctly identify cloud machines. We don't want to
wait on multiple DNS timeouts.
Args:
request: CloudMetadataRequest object
Returns:
rdf_cloud.CloudMetadataResponse object
Raises:
Va... | codesearchnet |
def create_with_claims(self, claims):
new_kwargs = dict(self._kwargs)
new_kwargs.update(claims)
result = self.__class__(self._service_account_email, self._signer, scopes=self._scopes, private_key_id=self._private_key_id, client_id=self.client_id, user_agent=self._user_agent, **new_kwargs)
result.token_u... | Create credentials that specify additional claims.
Args:
claims: dict, key-value pairs for claims.
Returns:
ServiceAccountCredentials, a copy of the current service account
credentials with updated claims to use when obtaining access
tokens. | codesearchnet |
def _BuildFindSpecsFromRegistrySourceKey(self, key_path):
find_specs = []
for key_path_glob in path_helper.PathHelper.ExpandRecursiveGlobs(
key_path, '\\'):
logger.debug('building find spec from key path glob: {0:s}'.format(
key_path_glob))
key_path_glob_upper = key_path_glob... | Build find specifications from a Windows Registry source type.
Args:
key_path (str): Windows Registry key path defined by the source.
Returns:
list[dfwinreg.FindSpec]: find specifications for the Windows Registry
source type. | juraj-google-style |
def _axis_gather(params, indices, axis):
if axis > 1:
if not isinstance(params, ragged_tensor.RaggedTensor):
params = ragged_tensor.RaggedTensor.from_tensor(params, ragged_rank=1, row_splits_dtype=indices.row_splits.dtype)
return params.with_values(_gather(params.values, indices, axis - ... | Helper that implements ragged gather when axis>0 and batch_dims==0.
Args:
params: The tensor from which to gather values.
indices: The indices of values to gather.
axis: The axis in `params` to gather `indices` from.
Returns:
A potentially ragged tensor. | github-repos |
def _update_repo(repo_config, store, tags_only):
repo_path = store.clone(repo_config['repo'], repo_config['rev'])
cmd_output('git', 'fetch', cwd=repo_path)
tag_cmd = ('git', 'describe', 'origin/master', '--tags')
if tags_only:
tag_cmd += ('--abbrev=0',)
else:
tag_cmd += ('--exa... | Updates a repository to the tip of `master`. If the repository cannot
be updated because a hook that is configured does not exist in `master`,
this raises a RepositoryCannotBeUpdatedError
Args:
repo_config - A config for a repository | juraj-google-style |
def to_string(self, format_, fps=None, **kwargs):
fp = io.StringIO()
self.to_file(fp, format_, fps=fps, **kwargs)
return fp.getvalue() | Get subtitle file as a string.
See :meth:`SSAFile.save()` for full description.
Returns:
str | codesearchnet |
def log_every_n(level, msg, n, *args):
count = _GetNextLogCountPerToken(_GetFileAndLine())
log_if(level, msg, not count % n, *args) | Log 'msg % args' at level 'level' once per 'n' times.
Logs the 1st call, (N+1)st call, (2N+1)st call, etc.
Not threadsafe.
Args:
level: The level at which to log.
msg: The message to be logged.
n: The number of times this should be called before it is logged.
*args: The args to be substituted into the msg. | github-repos |
def verify_controller_module(module):
required_attributes = ('create', 'destroy', 'MOBLY_CONTROLLER_CONFIG_NAME')
for attr in required_attributes:
if not hasattr(module, attr):
raise signals.ControllerError(
'Module %s missing required controller module attribute'
... | Verifies a module object follows the required interface for
controllers.
The interface is explained in the docstring of
`base_test.BaseTestClass.register_controller`.
Args:
module: An object that is a controller module. This is usually
imported with import statements or loaded by importlib.
Raises:
ControllerError: ... | juraj-google-style |
def _validate_ids(self, resource_ids):
for resource_id in resource_ids:
if self._id_regex.fullmatch(resource_id) is None:
LOGGER.debug('Invalid resource id requested: %s', resource_id)
raise _ResponseFailed(self._status.INVALID_ID) | Validates a list of ids, raising a ResponseFailed error if invalid.
Args:
resource_id (list of str): The ids to validate
Raises:
ResponseFailed: The id was invalid, and a status of INVALID_ID
will be sent with the response. | juraj-google-style |
def write(self, output_stream, kmip_version=enums.KMIPVersion.KMIP_1_0):
local_stream = BytearrayStream()
if self._cryptographic_parameters:
self._cryptographic_parameters.write(local_stream, kmip_version=kmip_version)
if self._initialization_vector:
self._initialization_vector.write(local_s... | Write the data encoding the DerivationParameters struct to a stream.
Args:
output_stream (stream): A data stream in which to encode object
data, supporting a write method; usually a BytearrayStream
object.
kmip_version (KMIPVersion): An enumeration defining the KMIP
version with which the object will be encoded. Optio... | codesearchnet |
def __init__(self, channel):
self.Capabilities = channel.unary_unary(
'/gnmi.gNMI/Capabilities',
request_serializer=gnmi__pb2.CapabilityRequest.SerializeToString,
response_deserializer=gnmi__pb2.CapabilityResponse.FromString,
)
self.Get = channel.unary_unary(
'/gnmi.... | Constructor.
Args:
channel: A grpc.Channel. | juraj-google-style |
def __init__(self, sql, module=None):
self._sql = sql
self._module = module | Initializes the SqlStatement.
Args:
sql: a string containing a SQL query with optional variable references.
module: if defined in a %%sql cell, the parent SqlModule object for the SqlStatement. | juraj-google-style |
def predict_dataset(self, x, **kwargs):
printout = kwargs.get("printout", None)
pred = []
res = []
x.columns = ["A", "B"]
for idx, row in x.iterrows():
a = scale(row['A'].reshape((len(row['A']), 1)))
b = scale(row['B'].reshape((len(row['B']), 1)))... | Generic dataset prediction function.
Runs the score independently on all pairs.
Args:
x (pandas.DataFrame): a CEPC format Dataframe.
kwargs (dict): additional arguments for the algorithms
Returns:
pandas.DataFrame: a Dataframe with the predictions. | juraj-google-style |
def inspect_network(self, net_id, verbose=None, scope=None):
params = {}
if verbose is not None:
if version_lt(self._version, '1.28'):
raise InvalidVersion('verbose was introduced in API 1.28')
params['verbose'] = verbose
if scope is not None:
... | Get detailed information about a network.
Args:
net_id (str): ID of network
verbose (bool): Show the service details across the cluster in
swarm mode.
scope (str): Filter the network by scope (``swarm``, ``global``
or ``local``). | juraj-google-style |
def from_string(species_string: str):
m = re.search(r"([A-Z][a-z]*)([0-9.]*)([+\-]*)(.*)", species_string)
if m:
sym = m.group(1)
if m.group(2) == "" and m.group(3) == "":
oxi = 0
else:
oxi = 1 if m.group(2) == "" else float(m.... | Returns a Dummy from a string representation.
Args:
species_string (str): A string representation of a dummy
species, e.g., "X2+", "X3+".
Returns:
A DummySpecie object.
Raises:
ValueError if species_string cannot be intepreted. | juraj-google-style |
def path_compute(
p: tcod.path.AStar, ox: int, oy: int, dx: int, dy: int
) -> bool:
return bool(lib.TCOD_path_compute(p._path_c, ox, oy, dx, dy)) | Find a path from (ox, oy) to (dx, dy). Return True if path is found.
Args:
p (AStar): An AStar instance.
ox (int): Starting x position.
oy (int): Starting y position.
dx (int): Destination x position.
dy (int): Destination y position.
Returns:
bool: True if a valid path was found. Otherwise False. | juraj-google-style |
def _example_from_complex_def(self, prop_spec):
if ('schema' not in prop_spec):
return [{}]
elif ('type' not in prop_spec['schema']):
definition_name = self.get_definition_name_from_ref(prop_spec['schema']['$ref'])
if self.build_one_definition_example(definition_name):
return... | Get an example from a property specification.
In case there is no "type" key in the root of the dictionary.
Args:
prop_spec: property specification you want an example of.
Returns:
An example. | codesearchnet |
def as_dict(self):
out = {}
for prop in self:
propval = getattr(self, prop)
if hasattr(propval, 'for_json'):
out[prop] = propval.for_json()
elif isinstance(propval, list):
out[prop] = [getattr(x, 'for_json', (lambda : x))() for x in propval]
elif isinstanc... | Return a dictionary containing the current values
of the object.
Returns:
(dict): The object represented as a dictionary | codesearchnet |
def list_documents(self, page_size=None):
(parent, _) = self._parent_info()
iterator = self._client._firestore_api.list_documents(parent, self.id, page_size=page_size, show_missing=True, metadata=self._client._rpc_metadata)
iterator.collection = self
iterator.item_to_value = _item_to_document_ref
re... | List all subdocuments of the current collection.
Args:
page_size (Optional[int]]): The maximum number of documents
in each page of results from this request. Non-positive values
are ignored. Defaults to a sensible value set by the API.
Returns:
Sequence[~.firestore_v1beta1.collection.DocumentReference]:
iterator of s... | codesearchnet |
def __init__(self, manager, obj_cls, _list):
self.manager = manager
self._obj_cls = obj_cls
self._list = _list | Creates an objects list from a GitlabList.
You should not create objects of this type, but use managers list()
methods instead.
Args:
manager: the RESTManager to attach to the objects
obj_cls: the class of the created objects
_list: the GitlabList holding the data | juraj-google-style |
def get_by_resource(self, resource_uri):
uri = self.URI + self.RESOURCES_PATH + '/' + resource_uri
return self._client.get(id_or_uri=uri) | Gets all the labels for the specified resource
Args:
resource_uri: The resource URI
Returns:
dict: Resource Labels | juraj-google-style |
def minimum(station_code):
temp = None
fin = None
try:
fin = open(('%s/%s' % (env.WEATHER_DATA_PATH, _basename(station_code, 'ddy'))))
except IOError:
logger.info('File not found')
download_extract(_eere_url(station_code))
fin = open(('%s/%s' % (env.WEATHER_DATA_PATH, _ba... | Extreme Minimum Design Temperature for a location.
Degrees in Celcius
Args:
station_code (str): Weather Station Code
Returns:
float degrees Celcius | codesearchnet |
def _url(self, url=None, parameters=None):
uri = (url or self._settings['url'])
if (url and self._settings['base_url']):
uri = ('%s/%s' % (self._settings['base_url'], url))
uri += '.json'
if parameters:
uri += ('?%s' % urllib.urlencode(parameters))
return uri | Build destination URL.
Kwargs:
url (str): Destination URL
parameters (dict): Additional GET parameters to append to the URL
Returns:
str. URL | codesearchnet |
def patchify_image(self, image: 'torch.Tensor', patch_size: Optional[Dict[str, int]]=None) -> 'torch.Tensor':
requires_backends(self, ['torch'])
patch_size = patch_size if patch_size is not None else self.patch_size
patch_height, patch_width = (patch_size['height'], patch_size['width'])
batch_size, chan... | Convert an image into a tensor of patches.
Args:
image (`torch.Tensor`):
Image to convert. Shape: [batch, channels, height, width]
patch_size (`Dict[str, int]`, *optional*, defaults to `self.patch_size`):
Dictionary in the format `{"height": int, "width": int}` specifying the size of the patches. | github-repos |
def create_template(self, s, provider_name=None):
if (provider_name is None):
provider_name = self.supported_providers[0]
return template_exception_handler((lambda : self.get_provider(provider_name).create_template(s)), self.error_context) | Creates a template from the given string based on the specified provider or the provider with
highest precedence.
Args:
s: The string to convert to a template.
provider_name: The name of the provider to use to create the template. | codesearchnet |
def ParseFileObject(self, parser_mediator, file_object):
evt_file = pyevt.file()
evt_file.set_ascii_codepage(parser_mediator.codepage)
try:
evt_file.open_file_object(file_object)
except IOError as exception:
parser_mediator.ProduceExtractionWarning(
'unable to open file with ... | Parses a Windows EventLog (EVT) file-like object.
Args:
parser_mediator (ParserMediator): mediates interactions between parsers
and other components, such as storage and dfvfs.
file_object (dfvfs.FileIO): a file-like object. | juraj-google-style |
async def ban_user(channel, user):
data = datatools.get_data()
server_id = channel.server.id
try:
await client.ban(user)
except discord.errors.Forbidden:
await client.send_typing(channel)
embed = ui_embed.error(channel, "Ban Error", "I do not have the permissions to ban th... | Bans a user from a server
Args:
channel: The channel to send the warning message in
user: The user to give the warning to | juraj-google-style |
def add_prefix(self, name, *args, **kwargs):
if os.path.exists(self.join(name)):
raise LagoPrefixAlreadyExistsError(name, self.path)
self.prefixes[name] = self.prefix_class(
self.join(name), *args, **kwargs
)
self.prefixes[name].initialize()
if s... | Adds a new prefix to the workdir.
Args:
name(str): Name of the new prefix to add
*args: args to pass along to the prefix constructor
*kwargs: kwargs to pass along to the prefix constructor
Returns:
The newly created prefix
Raises:
LagoPrefixAlreadyExistsError: if prefix name already exists in the
workdir | juraj-google-style |
def collective_dr_squared( self ):
return sum( np.square( sum( [ atom.dr for atom in self.atoms ] ) ) ) | Squared sum of total displacements for these atoms.
Args:
None
Returns:
(Float): The square of the summed total displacements for these atoms. | juraj-google-style |
def dedent(self, node, dirty=True):
if (node.id not in self._subitems):
return
del self._subitems[node.id]
node.super_list_item_id = None
node.parent_item = None
if dirty:
node.touch(True) | Dedent an item. Does nothing if the target is not indented under this item.
Args:
node (gkeepapi.node.ListItem): Item to dedent.
dirty (bool): Whether this node should be marked dirty. | codesearchnet |
def _get_data(filenames):
if filenames:
data = ""
for filename in filenames:
with open(filename, "rb") as f:
data += f.read()
else:
data = sys.stdin.read()
return data | Read data from file(s) or STDIN.
Args:
filenames (list): List of files to read to get data. If empty or
None, read from STDIN. | juraj-google-style |
def fit(self, train_events, test_events, n_epoch=1):
for e in train_events:
self.__validate(e)
self.rec.users[e.user.index]['known_items'].add(e.item.index)
self.item_buffer.append(e.item.index)
for e in test_events:
self.__vali... | Train a model using the first 30% positive events to avoid cold-start.
Evaluation of this batch training is done by using the next 20% positive events.
After the batch SGD training, the models are incrementally updated by using the 20% test events.
Args:
train_events (list of Event): Positive training events (0-30%).... | juraj-google-style |
def create_position_ids_from_input_ids(input_ids: tf.Tensor, padding_idx: int, past_key_values_length: Optional[int]=0) -> tf.Tensor:
mask = tf.cast(tf.math.not_equal(input_ids, padding_idx), dtype=tf.int32)
incremental_indices = (tf.math.cumsum(mask, axis=1) + past_key_values_length) * mask
return tf.cast(... | Replace non-padding symbols with their position numbers. Position numbers begin at padding_idx+1. Padding
symbols are ignored. This is modified from fairseq's `utils.make_positions`.
Args:
x: tf.Tensor x:
Returns: tf.Tensor | github-repos |
def _FormatMessages(self, format_string, short_format_string, event_values):
message_string = self._FormatMessage(format_string, event_values)
if short_format_string:
short_message_string = self._FormatMessage(short_format_string, event_values)
else:
short_message_string = message_string
... | Determines the formatted message strings.
Args:
format_string (str): message format string.
short_format_string (str): short message format string.
event_values (dict[str, object]): event values.
Returns:
tuple(str, str): formatted message string and short message string. | codesearchnet |
def object_download(self, bucket, key, start_offset=0, byte_count=None):
args = {'alt': 'media'}
headers = {}
if start_offset > 0 or byte_count is not None:
header = 'bytes=%d-' % start_offset
if byte_count is not None:
header += '%d' % byte_count
headers['Range'] = header
... | Reads the contents of an object as text.
Args:
bucket: the name of the bucket containing the object.
key: the key of the object to be read.
start_offset: the start offset of bytes to read.
byte_count: the number of bytes to read. If None, it reads to the end.
Returns:
The text content within the object.
Raises:
Except... | juraj-google-style |
def dayname(year, month, day):
legal_date(year, month, day)
yearday = (((month - 1) * 28) + day)
if isleap(((year + YEAR_EPOCH) - 1)):
dname = data.day_names_leap[(yearday - 1)]
else:
dname = data.day_names[(yearday - 1)]
return (MONTHS[(month - 1)], dname) | Give the name of the month and day for a given date.
Returns:
tuple month_name, day_name | codesearchnet |
def path_get_origin(p: tcod.path.AStar) -> Tuple[int, int]:
x = ffi.new("int *")
y = ffi.new("int *")
lib.TCOD_path_get_origin(p._path_c, x, y)
return x[0], y[0] | Get the current origin position.
This point moves when :any:`path_walk` returns the next x,y step.
Args:
p (AStar): An AStar instance.
Returns:
Tuple[int, int]: An (x, y) point. | juraj-google-style |
def get_storage_account(access_token, subscription_id, rgname, account_name):
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourcegroups/', rgname, '/providers/Microsoft.Storage/storageAccounts/', account_name, '?api-version=', STORAGE_API])
return do_get(endpoint, access_token) | Get the properties for the named storage account.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
account_name (str): Name of the new storage account.
Returns:
HTTP response. JSON body of storage account properties. | codesearchnet |
def info(self, user_id):
resp = self._rtm_client.get('v1/user.info?user_id={}'.format(user_id))
if resp.is_fail():
raise RTMServiceError('Failed to get user information', resp)
return resp.data['result'] | Gets user information by user id
Args:
user_id(int): the id of user
Returns:
User
Throws:
RTMServiceError when request failed | codesearchnet |
def safe_logit(p: Union[float, int]) -> Optional[float]:
r
if p > 1 or p < 0:
return None
if p == 1:
return float("inf")
if p == 0:
return float("-inf")
return math.log(p / (1 - p)) | r"""
Returns the logit (log odds) of its input probability
.. math::
\alpha = logit(p) = log(x / (1 - x))
Args:
p: :math:`p`
Returns:
:math:`\alpha`, or ``None`` if ``x`` is not in the range [0, 1]. | juraj-google-style |
def __init__(self, identifier=None, session_identifier=None):
super(TaskStart, self).__init__()
self.identifier = identifier
self.session_identifier = session_identifier
self.timestamp = None | Initializes a task start attribute container.
Args:
identifier (Optional[str]): unique identifier of the task.
The identifier should match that of the corresponding
task completion information.
session_identifier (Optional[str]): identifier of the session the task
is part of. | juraj-google-style |
def get_strip_metadata(self, catID):
self.logger.debug('Retrieving strip catalog metadata')
url = ('%(base_url)s/record/%(catID)s?includeRelationships=false' % {'base_url': self.base_url, 'catID': catID})
r = self.gbdx_connection.get(url)
if (r.status_code == 200):
return r.json()['properties']
... | Retrieves the strip catalog metadata given a cat ID.
Args:
catID (str): The source catalog ID from the platform catalog.
Returns:
metadata (dict): A metadata dictionary .
TODO: have this return a class object with interesting information exposed. | codesearchnet |
def convert_to_tensor_or_sparse_tensor(value, dtype=None, name=None):
if dtype is not None:
dtype = dtypes.as_dtype(dtype)
if isinstance(value, SparseTensorValue):
value = SparseTensor.from_value(value)
if isinstance(value, SparseTensor):
if dtype and (not dtype.is_compatible_with(va... | Converts value to a `SparseTensor` or `Tensor`.
Args:
value: A `SparseTensor`, `SparseTensorValue`, or an object whose type has a
registered `Tensor` conversion function.
dtype: Optional element type for the returned tensor. If missing, the type
is inferred from the type of `value`.
name: Optional name to use if a new... | github-repos |
def seat_slot(self):
if (self.type == EventType.TOUCH_FRAME):
raise AttributeError(_wrong_prop.format(self.type))
return self._libinput.libinput_event_touch_get_seat_slot(self._handle) | The seat slot of the touch event.
A seat slot is a non-negative seat wide unique identifier of an active
touch point.
Events from single touch devices will be represented as one individual
touch point per device.
For events not of type :attr:`~libinput.constant.EventType.TOUCH_DOWN`,
:attr:`~libinput.constant.EventT... | codesearchnet |
def reboot(self, target_mode=None, timeout_ms=None):
return self._simple_command('reboot', arg=target_mode,
timeout_ms=timeout_ms) | Reboots the device.
Args:
target_mode: Normal reboot when unspecified (or None). Can specify
other target modes, such as 'recovery' or 'bootloader'.
timeout_ms: Optional timeout in milliseconds to wait for a response.
Returns:
Usually the empty string. Depends on the bootloader and the target_mode. | juraj-google-style |
def export_mt_variants(variants, sample_id):
document_lines = []
for variant in variants:
line = []
position = variant.get('position')
change = '>'.join([variant.get('reference'), variant.get('alternative')])
line.append(position)
line.append(change)
line.append((... | Export mitochondrial variants for a case to create a MT excel report
Args:
variants(list): all MT variants for a case, sorted by position
sample_id(str) : the id of a sample within the case
Returns:
document_lines(list): list of lines to include in the document | codesearchnet |
def _get_connection(self, cluster):
if ('connection' not in cluster):
cluster['connection'] = self._connection_class(socketTimeoutMS=self._network_timeout, w=1, j=self.j, **cluster['params'])
return cluster['connection'] | Return a connection to a Cluster.
Return a MongoClient or a MongoReplicaSetClient for the given Cluster.
This is done in a lazy manner (if there is already a Client connected to
the Cluster, it is returned and no other Client is created).
Args:
cluster: A dict containing information about a cluster.
Returns:
A Mongo... | codesearchnet |
def graph_structure(self, x1x2):
with argscope([tf.layers.conv2d], activation=lambda x: tf.nn.leaky_relu(x, 0.1),
padding='valid', strides=2, kernel_size=3,
data_format='channels_first'), \
argscope([tf.layers.conv2d_transpose], padding='same', ac... | Architecture of FlowNetCorr in Figure 2 of FlowNet 1.0.
Args:
x: 2CHW. | juraj-google-style |
def get_flat_tensors_for_gradients(xs):
return nest.flatten([_get_tensors_for_gradient(x) for x in xs]) | Returns a flat list of Tensors that should be differentiated for `xs`.
Args:
xs: A list of `Tensor`s or `CompositeTensor`s.
Returns:
A flat list of `Tensor`s constructed from `xs`, where `Tensor` values are
left as-is, and `CompositeTensor`s are replaced with
`_get_tensors_for_gradient(x)`. | github-repos |
def _parse_directory(self):
if self._parser.has_option('storage', 'directory'):
directory = self._parser.get('storage', 'directory')
if (directory == CUSTOM_APPS_DIR):
raise ConfigError('{} cannot be used as a storage directory.'.format(CUSTOM_APPS_DIR))
else:
directory = MAC... | Parse the storage directory in the config.
Returns:
str | codesearchnet |
def _head_object(s3_conn, bucket, key):
try:
return s3_conn.head_object(Bucket=bucket, Key=key)
except botocore.exceptions.ClientError as e:
if (e.response['Error']['Code'] == '404'):
return None
else:
raise | Retrieve information about an object in S3 if it exists.
Args:
s3_conn (botocore.client.S3): S3 connection to use for operations.
bucket (str): name of the bucket containing the key.
key (str): name of the key to lookup.
Returns:
dict: S3 object information, or None if the object does not exist.
See the AWS documenta... | codesearchnet |
def delete_nsg(access_token, subscription_id, resource_group, nsg_name):
endpoint = ''.join([get_rm_endpoint(),
'/subscriptions/', subscription_id,
'/resourceGroups/', resource_group,
'/providers/Microsoft.Network/networkSecurityGroups/', ... | Delete network security group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
resource_group (str): Azure resource group name.
nsg_name (str): Name of the NSG.
Returns:
HTTP response. | juraj-google-style |
def check_for_replay(name, names_to_seq_id, msg, config, context=None):
prev_seq_id = names_to_seq_id.get(name, None)
cur_seq_id = msg.get('seq_id', None)
if ((prev_seq_id is None) or (cur_seq_id is None)):
return [msg]
if (cur_seq_id <= prev_seq_id):
return []
if ((cur_seq_id == (pr... | Check to see if messages need to be replayed.
Args:
name (str): The consumer's name.
names_to_seq_id (dict): A dictionary that maps names to the last seen sequence ID.
msg (dict): The latest message that has arrived.
config (dict): A configuration dictionary. This dictionary should contain, at a
minimum, two keys. The... | codesearchnet |
def organize_models(self, outdir, force_rerun=False):
uniprot_to_swissmodel = defaultdict(list)
for u, models in self.all_models.items():
for m in models:
original_filename = '{}_{}_{}_{}'.format(m['from'], m['to'], m['template'], m['coordinate_id'])
... | Organize and rename SWISS-MODEL models to a single folder with a name containing template information.
Args:
outdir (str): New directory to copy renamed models to
force_rerun (bool): If models should be copied again even if they already exist
Returns:
dict: Dictionary of lists, UniProt IDs as the keys and new file pa... | juraj-google-style |
def build_model(self, token_encoder_model, sentence_encoder_model, trainable_embeddings=True, output_activation='softmax'):
if (not isinstance(token_encoder_model, SequenceEncoderBase)):
raise ValueError('`token_encoder_model` should be an instance of `{}`'.format(SequenceEncoderBase))
if (not isinstanc... | Builds a model that first encodes all words within sentences using `token_encoder_model`, followed by
`sentence_encoder_model`.
Args:
token_encoder_model: An instance of `SequenceEncoderBase` for encoding tokens within sentences. This model
will be applied across all sentences to create a sentence encoding.
sentence_e... | codesearchnet |
def RegisterHasher(cls, hasher_class):
hasher_name = hasher_class.NAME.lower()
if (hasher_name in cls._hasher_classes):
raise KeyError('hasher class already set for name: {0:s}.'.format(hasher_class.NAME))
cls._hasher_classes[hasher_name] = hasher_class | Registers a hasher class.
The hasher classes are identified based on their lower case name.
Args:
hasher_class (type): class object of the hasher.
Raises:
KeyError: if hasher class is already set for the corresponding name. | codesearchnet |
def Execute(self, message):
self.message = message
if message:
self.require_fastpoll = message.require_fastpoll
args = None
try:
if self.message.args_rdf_name:
if not self.in_rdfvalue:
raise RuntimeError("Did not expect arguments, got %s." %
... | This function parses the RDFValue from the server.
The Run method will be called with the specified RDFValue.
Args:
message: The GrrMessage that we are called to process.
Returns:
Upon return a callback will be called on the server to register
the end of the function and pass back exceptions.
Raises:
RuntimeErro... | juraj-google-style |
def get_dsub_version():
filename = os.path.join(os.path.dirname(__file__), 'dsub/_dsub_version.py')
with open(filename, 'r') as versionfile:
for line in versionfile:
if line.startswith('DSUB_VERSION ='):
version = line.partition('=')[2]
return version.strip().... | Get the dsub version out of the _dsub_version.py source file.
Setup.py should not import dsub version from dsub directly since ambiguity in
import order could lead to an old version of dsub setting the version number.
Parsing the file directly is simpler than using import tools (whose interface
varies between python 2... | codesearchnet |
def variable_product_dict(variabledict: dict[str, cfg.Variable], limit: int=DEEP_VARIABLE_LIMIT):
return [dict(d) for d in _variable_product_items(variabledict.items(), ComplexityLimit(limit))] | Take the Cartesian product of variables in the values of a dict.
This Cartesian product is taken using the dict keys as the indices into the
input and output dicts. So:
variable_product_dict({"x": Variable(a, b), "y": Variable(c, d)})
==
[{"x": a, "y": c}, {"x": a, "y": d}, {"x": b, "y": c}, {"x": b, "y": d}]
This is ... | github-repos |
def __init__(self, resolver_context):
super(SQLiteBlobFileSystem, self).__init__(resolver_context)
self._file_object = None
self._number_of_rows = None | Initializes a file system.
Args:
resolver_context (Context): resolver context. | juraj-google-style |
def list_storage_accounts_rg(access_token, subscription_id, rgname):
endpoint = ''.join([get_rm_endpoint(), '/subscriptions/', subscription_id, '/resourcegroups/', rgname, '/providers/Microsoft.Storage/storageAccounts', '?api-version=', STORAGE_API])
return do_get(endpoint, access_token) | List the storage accounts in the specified resource group.
Args:
access_token (str): A valid Azure authentication token.
subscription_id (str): Azure subscription id.
rgname (str): Azure resource group name.
Returns:
HTTP response. JSON body list of storage accounts. | codesearchnet |
def encode(self, sequence):
sequence = super().encode(sequence)
sequence = self.tokenize(sequence)
vector = [self.stoi.get(token, self.unknown_index) for token in sequence]
if self.append_eos:
vector.append(self.eos_index)
return torch.tensor(vector) | Encodes a ``sequence``.
Args:
sequence (str): String ``sequence`` to encode.
Returns:
torch.Tensor: Encoding of the ``sequence``. | codesearchnet |
def get_object_id_from_graph(access_token=None):
if access_token is None:
access_token = get_graph_token_from_msi()
endpoint = 'https:
headers = {'Authorization': 'Bearer ' + access_token, 'Host': GRAPH_RESOURCE_HOST}
ret = requests.get(endpoint, headers=headers)
return ret.json()['id'... | Return the object ID for the Graph user who owns the access token.
Args:
access_token (str): A Microsoft Graph access token. (Not an Azure access token.)
If not provided, attempt to get it from MSI_ENDPOINT.
Returns:
An object ID string for a user or service principal. | juraj-google-style |
def verify(self):
if any(((not i) for i in (self.chat_uid, self.module_id))):
raise ValueError('Chat data is incomplete.')
if (not isinstance(self.chat_type, ChatType)):
raise ValueError('Invalid chat type.')
if (self.chat_type == ChatType.Group):
if any((((not isinstance(i, EFBChat)... | Verify the completeness of the data.
Raises:
ValueError: When this chat is invalid. | codesearchnet |
def create_input_data_based_on_hw_requirement(num_chip, max_unique_ids_per_partition, per_sc_vocab_size, per_sc_sample_count, num_minibatches_per_physical_sparse_core):
num_sc_per_chip = 4
num_physical_replica = num_chip * num_sc_per_chip
col_ids = []
row_ids = []
gains = []
smallest_num_divisio... | Create the coo tensor based on hardware requirements.
Args:
num_chip: number of chips in the tpu system.
max_unique_ids_per_partition: max unique ids per physical replica
per_sc_vocab_size: per sc shard of table size.
per_sc_sample_count: per sc sample count.
num_minibatches_per_physical_sparse_core: per sc minibatch ... | github-repos |
def execute(self, try_limit=5, try_interval=0.5, timeout=30):
return Promise(no_error(self._execute), u'Executing {!r}'.format(self), try_limit=try_limit, try_interval=try_interval, timeout=timeout).fulfill() | Execute this query, retrying based on the supplied parameters.
Keyword Args:
try_limit (int): The number of times to retry the query.
try_interval (float): The number of seconds to wait between each try (float).
timeout (float): The maximum number of seconds to spend retrying (float).
Returns:
The transformed results... | codesearchnet |
class MaskFormerSwinPatchMerging(nn.Module):
def __init__(self, input_resolution: Tuple[int], dim: int, norm_layer: nn.Module=nn.LayerNorm) -> None:
super().__init__()
self.input_resolution = input_resolution
self.dim = dim
self.reduction = nn.Linear(4 * dim, 2 * dim, bias=False)
... | Patch Merging Layer.
Args:
input_resolution (`Tuple[int]`):
Resolution of input feature.
dim (`int`):
Number of input channels.
norm_layer (`nn.Module`, *optional*, defaults to `nn.LayerNorm`):
Normalization layer class. | github-repos |
def set_parent(self, node):
self._parent = node
if node is None:
self._depth = 0
else:
self._depth = node.get_depth() + 1 | Attach node to its parent.
Args:
node: Parent node.
Note:
``node`` can be ``None``. In that case, the node is detached from its previous parent. | juraj-google-style |
def _deserialize(cls, serialization):
return cls(*serialization) | Reconstructs a TypeSpec from a value returned by `serialize`.
Args:
serialization: A value returned by _serialize. In some contexts,
`namedtuple`s in `serialization` may not have the identical type that
was returned by `_serialize` (but its type will still be a `namedtuple`
type with the same type name and field name... | github-repos |
def set_invite_only(self, invite_only):
join_rule = "invite" if invite_only else "public"
try:
self.client.api.set_join_rule(self.room_id, join_rule)
self.invite_only = invite_only
return True
except MatrixRequestError:
return False | Set how the room can be joined.
Args:
invite_only(bool): If True, users will have to be invited to join
the room. If False, anyone who knows the room link can join.
Returns:
True if successful, False if not | juraj-google-style |
def _GetInstanceAndProjectAttributes(self, metadata_dict):
metadata_dict = (metadata_dict or {})
try:
instance_data = metadata_dict['instance']['attributes']
except KeyError:
instance_data = {}
self.logger.warning('Instance attributes were not found.')
try:
project_data =... | Get dictionaries for instance and project attributes.
Args:
metadata_dict: json, the deserialized contents of the metadata server.
Returns:
tuple, two dictionaries for instance and project attributes. | codesearchnet |
def write_temporary_file(content, prefix='', suffix=''):
temp = tempfile.NamedTemporaryFile(prefix=prefix, suffix=suffix, mode='w+t', delete=False)
temp.writelines(content)
temp.close()
return temp.name | Generating a temporary file with content.
Args:
content (str): file content (usually a script, Dockerfile, playbook or config file)
prefix (str): the filename starts with this prefix (default: no prefix)
suffix (str): the filename ends with this suffix (default: no suffix)
Returns:
str: name of the temporary file
No... | juraj-google-style |
def Exponential(cls,
mean: 'TensorFluent',
batch_size: Optional[int] = None) -> Tuple[Distribution, 'TensorFluent']:
rate = 1 / mean.tensor
dist = tf.distributions.Exponential(rate)
batch = mean.batch
if not batch and batch_size is not None:
t... | Returns a TensorFluent for the Exponential sampling op with given mean parameter.
Args:
mean: The mean parameter of the Exponential distribution.
batch_size: The size of the batch (optional).
Returns:
The Exponential distribution and a TensorFluent sample drawn from the distribution. | juraj-google-style |
def GetAnalyzerInstances(cls, analyzer_names):
analyzer_instances = []
for analyzer_name, analyzer_class in iter(cls.GetAnalyzers()):
if analyzer_name in analyzer_names:
analyzer_instances.append(analyzer_class())
return analyzer_instances | Retrieves instances for all the specified analyzers.
Args:
analyzer_names (list[str]): names of the analyzers to retrieve.
Returns:
list[BaseAnalyzer]: analyzer instances. | juraj-google-style |
def str_to_v1_str(xml_str):
if str_is_v1(xml_str):
return xml_str
etree_obj = str_to_etree(xml_str)
strip_v2_elements(etree_obj)
etree_replace_namespace(etree_obj, d1_common.types.dataoneTypes_v1.Namespace)
return etree_to_str(etree_obj) | Convert a API v2 XML doc to v1 XML doc.
Removes elements that are only valid for v2 and changes namespace to v1.
If doc is already v1, it is returned unchanged.
Args:
xml_str : str
API v2 XML doc. E.g.: ``SystemMetadata v2``.
Returns:
str : API v1 XML doc. E.g.: ``SystemMetadata v1``. | juraj-google-style |
def method(cache_name, key_prefix=None):
def decorator(func):
if ((func.__name__ in ['cause_repertoire', 'effect_repertoire']) and (not config.CACHE_REPERTOIRES)):
return func
@wraps(func)
def wrapper(obj, *args, **kwargs):
cache = getattr(obj, cache_name)
... | Caching decorator for object-level method caches.
Cache key generation is delegated to the cache.
Args:
cache_name (str): The name of the (already-instantiated) cache
on the decorated object which should be used to store results
of this method.
*key_prefix: A constant to use as part of the cache key in addition
to th... | codesearchnet |
def inflate_plugin(self, identifier, definition=None, cls=None):
cls = self.get_plugin(identifier, cls)
return cls(**definition or {}) | Inflate a plugin thanks to it's identifier, definition and class.
Args:
identifier (str): the plugin identifier.
definition (dict): the kwargs to instantiate the plugin with.
cls (str): "provider", "checker", or None.
Returns:
Provider/Checker: instance of plugin. | juraj-google-style |
def __init__(self, params=None):
super().__init__()
if params:
if (params.get("MAGMOM") and isinstance(params["MAGMOM"][0], (int, float))) \
and (params.get("LSORBIT") or params.get("LNONCOLLINEAR")):
val = []
... | Creates an Incar object.
Args:
params (dict): A set of input parameters as a dictionary. | juraj-google-style |
def copy(self):
req = type(self)()
req.__dict__ = self.__dict__.copy()
req._headers = self.headers.copy()
return req | Copies the current Request object instance for side-effects purposes.
Returns:
pook.Request: copy of the current Request instance. | codesearchnet |
def __neg__(self: EventSetOrNode) -> EventSetOrNode:
from temporian.core.operators.scalar import multiply_scalar
return multiply_scalar(input=self, value=-1) | Negates an [`EventSet`][temporian.EventSet] element-wise.
Example:
```python
>>> a = tp.event_set(
... timestamps=[1, 2],
... features={"M": [1, -5], "N": [-1.0, 5.5]},
... )
>>> -a
indexes: ...
'M': [-1 5]
'N': [ 1. -5.5]
...
```
Returns:
Negated EventSet. | github-repos |
def create_resource(self, function, args=None, kwargs=None):
closure = ResourceClosure(function, self._cluster.resource_cancellation_mgr, args=args, kwargs=kwargs)
return self._register_and_schedule_resource_closure(closure) | Asynchronously creates a per-worker resource represented by a `RemoteValue`.
Args:
function: the resource function to be run remotely. It should be a
`tf.function`, a concrete function or a Python function.
args: positional arguments to be passed to the function.
kwargs: keyword arguments to be passed to the function.... | github-repos |
def update(self, **kwargs):
do_simple_update = kwargs.get('simple_update', True)
no_of_updates = 0
for model in self:
no_of_updates += 1
model._load_data(kwargs)
model.save(internal=True)
return no_of_updates | Updates the matching objects for specified fields.
Note:
Post/pre save hooks and signals will NOT triggered.
Unlike RDBMS systems, this method makes individual save calls
to backend DB store. So this is exists as more of a comfortable
utility method and not a performance enhancement.
Keyword Args:
\*\*kwargs: Fields... | codesearchnet |
def __init__(self, campfire, id):
super(Room, self).__init__(campfire)
self._load(id) | Initialize.
Args:
campfire (:class:`Campfire`): Campfire instance
password (str): Room ID | juraj-google-style |
def get_edge_by_index(self, source_index: int, target_index: int) -> Optional[Edge]:
edge = self._edges.get((source_index, target_index))
if edge is not None:
return edge
return self._edges.get((target_index, source_index)) | Returns the edge connecting the nodes with the specified indices if such an edge exists.
Arguments:
source_index (int): The index of one of the endpoints of queried edge.
target_index (int): The index of the other endpoint of the queried edge.
Returns:
The edge connecting the nodes with the specified indices
or `None... | juraj-google-style |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.