code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def patch(self, payload, append_to_arrays=True): if (not isinstance(payload, dict)): raise ValueError("The 'payload' parameter must be provided a dictionary object.") payload = self.__class__.set_id_in_fkeys(payload) if append_to_arrays: for key in payload: val = payload[key] ...
Patches current record and udpates the current instance's 'attrs' attribute to reflect the new changes. Args: payload - hash. This will be JSON-formatted prior to sending the request. Returns: `dict`. The JSON formatted response. Raises: `requests.exceptions.HTTPError`: The status code is not ok.
codesearchnet
def write(self, ostream, kmip_version=enums.KMIPVersion.KMIP_1_0): tstream = BytearrayStream() self.application_namespace.write(tstream, kmip_version=kmip_version) self.application_data.write(tstream, kmip_version=kmip_version) self.length = tstream.length() super(Appl...
Write the data encoding the ApplicationSpecificInformation object to a stream. Args: ostream (Stream): A data stream in which to encode object data, supporting a write method; usually a BytearrayStream object. kmip_version (KMIPVersion): An enumeration defining the KMIP version with which the object will be encoded. O...
juraj-google-style
def add_annotation(self, state_or_vector, text, **kwargs): if (isinstance(state_or_vector, (list, np.ndarray, tuple)) and (len(state_or_vector) == 3)): vec = state_or_vector else: raise Exception(('Position needs to be specified by a qubit ' + 'state or a 3D vector.')) self.annotations.appen...
Add a text or LaTeX annotation to Bloch sphere, parametrized by a qubit state or a vector. Args: state_or_vector (array_like): Position for the annotation. Qobj of a qubit or a vector of 3 elements. text (str): Annotation text. You can use LaTeX, but remember to use raw string e.g. r"$\\langle x \\rangle$" or escape b...
codesearchnet
def put(self, resource_json: Dict[str, Any], parent_bundle: Optional[Dict[str, Any]]=None) -> None: if parent_bundle is None: self.resources_by_uri[resource_json['url']] = resource_json else: self.resources_by_uri[resource_json['url']] = parent_bundle
Puts the given resource into this collection. Adds the resource represented by `resource_json` found inside `parent_bundle` into this collection for subsequent lookup via the Get method. `parent_bundle` may be None if `resource_json` is not located inside a bundle. Args: resource_json: The JSON object representing th...
github-repos
def extract_images(self, f): print('Extracting', f.name) with gzip.GzipFile(fileobj=f) as bytestream: magic = self._read32(bytestream) if magic != 2051: raise ValueError('Invalid magic number %d in MNIST image file: %s' % ...
Extract the images into a 4D uint8 numpy array [index, y, x, depth]. Args: f: A file object that can be passed into a gzip reader. Returns: data: A 4D unit8 numpy array [index, y, x, depth]. Raises: ValueError: If the bytestream does not start with 2051.
juraj-google-style
def get_stream_action_type(stream_arn): stream_type_map = {'kinesis': awacs.kinesis.Action, 'dynamodb': awacs.dynamodb.Action} stream_type = stream_arn.split(':')[2] try: return stream_type_map[stream_type] except KeyError: raise ValueError(("Invalid stream type '%s' in arn '%s'" % (stre...
Returns the awacs Action for a stream type given an arn Args: stream_arn (str): The Arn of the stream. Returns: :class:`awacs.aws.Action`: The appropriate stream type awacs Action class Raises: ValueError: If the stream type doesn't match kinesis or dynamodb.
codesearchnet
def __fill_buffer(self, size=0): read_size = min(max(size, self.__buffer_size), MAX_BLOB_FETCH_SIZE) self.__buffer = fetch_data(self.__blob_key, self.__position, ((self.__position + read_size) - 1)) self.__buffer_position = 0 self.__eof = (len(self.__buffer) < read_size)
Fills the internal buffer. Args: size: Number of bytes to read. Will be clamped to [self.__buffer_size, MAX_BLOB_FETCH_SIZE].
codesearchnet
def _peek(self, chars=1): line = self._socket.recv(chars, socket.MSG_PEEK) logger.debug('Server sent (peek): ' + line.rstrip()) return line
Peek at the data in the server response. Peeking should only be done when the response can be predicted. Make sure that the socket will not block by requesting too much data from it while peeking. Args: chars -- the number of characters to peek.
juraj-google-style
def subgroup_tile(cls, tile_assignment, subgroup_modes): if not isinstance(tile_assignment, _np.ndarray): raise TypeError('SubgroupTile assignment must be of type np.ndarray') if not isinstance(subgroup_modes, list): raise TypeError('subgroup_modes in subgroup manual must be of type list') i...
Returns a subgroup manual sharding attribute. This is similar to tile(), but tile_assignment has one or more dimension than the tensor, and subgroup_modes define the sharding types in the last dimensions of tile_assignment. Args: tile_assignment: An np.ndarray describing the topology of the tiling and which device wi...
github-repos
def all_tokens(self, delimiter=' '): tokens = set() for label in self: tokens = tokens.union(set(label.tokenized(delimiter=delimiter))) return tokens
Return a list of all tokens occurring in the label-list. Args: delimiter (str): The delimiter used to split labels into tokens (see :meth:`audiomate.annotations.Label.tokenized`). Returns: :class:`set`: A set of distinct tokens.
codesearchnet
def initialize(self): return self._initializer
Initialize underlying iterators. Returns: A list of any initializer ops that should be run.
github-repos
def residual_block_v1(x, filters, kernel_size=3, stride=1, conv_shortcut=True, name=None): if backend.image_data_format() == 'channels_last': bn_axis = 3 else: bn_axis = 1 if conv_shortcut: shortcut = layers.Conv2D(4 * filters, 1, strides=stride, name=name + '_0_conv')(x) sho...
A residual block for ResNet*_v1. Args: x: Input tensor. filters: No of filters in the bottleneck layer. kernel_size: Kernel size of the bottleneck layer. Defaults to `3`. stride: Stride of the first layer. Defaults to `1`. conv_shortcut: Use convolution shortcut if `True`, otherwise use identity shortcut. Defaults to ...
github-repos
def reorder_resources(self, resource_ids, hxl_update=True): dataset_id = self.data.get('id') if (not dataset_id): raise HDXError('Dataset has no id! It must be read, created or updated first.') data = {'id': dataset_id, 'order': resource_ids} self._write_to_hdx('reorder', data, 'package_id') ...
Reorder resources in dataset according to provided list. If only some resource ids are supplied then these are assumed to be first and the other resources will stay in their original order. Args: resource_ids (List[str]): List of resource ids hxl_update (bool): Whether to call package_hxl_update. Defaults to True. Re...
codesearchnet
def merge_and_fit(self, segment): self.points = sort_segment_points(self.points, segment.points) return self
Merges another segment with this one, ordering the points based on a distance heuristic Args: segment (:obj:`Segment`): Segment to merge with Returns: :obj:`Segment`: self
juraj-google-style
def remove_item(self, item): for idx, _item in enumerate(self.items): if item == _item: del self.items[idx] return True return False
Remove the specified item from the menu. Args: item (MenuItem): the item to be removed. Returns: bool: True if the item was removed; False otherwise.
juraj-google-style
def __expand_meta_datas(meta_datas, meta_datas_expanded): if isinstance(meta_datas, dict): meta_datas_expanded.append(meta_datas) elif isinstance(meta_datas, list): for meta_data in meta_datas: __expand_meta_datas(meta_data, meta_datas_expanded)
expand meta_datas to one level Args: meta_datas (dict/list): maybe in nested format Returns: list: expanded list in one level Examples: >>> meta_datas = [ [ dict1, dict2 ], dict3 ] >>> meta_datas_expanded = [] >>> __expand_meta_datas(meta_datas, meta_datas_expanded) >>> print(meta_datas_expanded) [dict1, dict2, dict...
codesearchnet
def UpdateChainAndProcess(self, parser_mediator, registry_key, **kwargs): parser_mediator.AppendToParserChain(self) try: self.Process(parser_mediator, registry_key, **kwargs) finally: parser_mediator.PopFromParserChain()
Updates the parser chain and processes a Windows Registry key or value. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. registry_key (dfwinreg.WinRegistryKey): Windows Registry key. Raises: ValueError: If the Windows Registry key is not se...
codesearchnet
def draw(self, filename, color=True): verify_dependencies(['pgv']) if not hasattr(self, '_results'): raise RuntimeError("Graph cannot be drawn before it is executed. " "Try calling run() first.") g = pgv.AGraph(directed=True) g.node_at...
Render a plot of the graph via pygraphviz. Args: filename (str): Path to save the generated image to. color (bool): If True, will color graph nodes based on their type, otherwise will draw a black-and-white graph.
juraj-google-style
def update_script(self, script_body): uri = "{}/script".format(self.data['uri']) return self._helper.update(script_body, uri=uri)
Updates the configuration script of the enclosure-group with the specified URI. Args: id_or_uri: Resource id or resource uri. script_body: Configuration script. Returns: dict: Updated enclosure group.
juraj-google-style
def map(self, map_fn: Callable[..., _Tout], *trees: Tree[_Tin], is_leaf: Optional[LeafFn]=None) -> Tree[_Tout]: return self.backend.map(map_fn, *trees, is_leaf=is_leaf)
Same as `tree.map_structure`. Args: map_fn: Worker function *trees: Nested input to pass to the `map_fn` is_leaf: Don't recurse into leaf if `is_leaf(node)` is `True` Returns: The nested structure after `map_fn` has been applied.
github-repos
def fit_effective_mass(distances, energies, parabolic=True): if parabolic: fit = np.polyfit(distances, energies, 2) c = (2 * fit[0]) else: def f(x, alpha, d): top = (np.sqrt(((((4 * alpha) * d) * (x ** 2)) + 1)) - 1) bot = (2 * alpha) return (top / bo...
Fit the effective masses using either a parabolic or nonparabolic fit. Args: distances (:obj:`numpy.ndarray`): The x-distances between k-points in reciprocal Angstroms, normalised to the band extrema. energies (:obj:`numpy.ndarray`): The band eigenvalues normalised to the eigenvalue of the band extrema. parabolic (:ob...
codesearchnet
def _GetAttributeContainerByIndex(self, container_type, index): sequence_number = (index + 1) query = 'SELECT _data FROM {0:s} WHERE rowid = {1:d}'.format(container_type, sequence_number) try: self._cursor.execute(query) except sqlite3.OperationalError as exception: raise IOError('Unable...
Retrieves a specific attribute container. Args: container_type (str): attribute container type. index (int): attribute container index. Returns: AttributeContainer: attribute container or None if not available. Raises: IOError: when there is an error querying the storage file. OSError: when there is an error queryin...
codesearchnet
def _runDecodeProtoTests(self, fields, case_sizes, batch_shape, batch, message_type, message_format, sanitize, force_disordered=False): if force_disordered: assert not sanitize extra_fields = test_example_pb2.ExtraFields() extra_fields.string_value = 'IGNORE ME' extra_fields.bool_val...
Run decode tests on a batch of messages. Args: fields: list of test_example_pb2.FieldSpec (types and expected values) case_sizes: expected sizes array batch_shape: the shape of the input tensor of serialized messages batch: list of serialized messages message_type: descriptor name for messages message_format: format o...
github-repos
def ReadManyFromPath(filepath): with io.open(filepath, mode="r", encoding="utf-8") as filedesc: return ReadManyFromFile(filedesc)
Reads a Python object stored in a specified YAML file. Args: filepath: A filepath to the YAML file. Returns: A Python data structure corresponding to the YAML in the given file.
juraj-google-style
def console_print(con: tcod.console.Console, x: int, y: int, fmt: str) -> None: lib.TCOD_console_printf(_console(con), x, y, _fmt(fmt))
Print a color formatted string on a console. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. fmt (AnyStr): A unicode or bytes string optionaly using color codes. .. deprecated:: 8.5 Use :any:`Console.print_` instead.
juraj-google-style
def set_clbit(self, clbit, element): self.clbit_layer[self.cregs.index(clbit)] = element
Sets the clbit to the element Args: clbit (cbit): Element of self.cregs. element (DrawElement): Element to set in the clbit
juraj-google-style
def auto_shard_dataset(dataset, num_shards, index, num_replicas_in_sync=None): if isinstance(dataset, distribute_types.DistributedDatasetInterface): return dataset.auto_shard(num_shards, index) if dataset.options().experimental_distribute.auto_shard_policy != AutoShardPolicy.OFF: if num_replicas...
Shard the input pipeline by sharding the underlying list of files. Args: dataset: A `tf.data.Dataset` instance, typically the result of a bunch of dataset transformations. num_shards: A `tf.int64` scalar `tf.Tensor`, representing the number of shards operating in parallel. Same usage as in `tf.data.Dataset.shard`. ind...
github-repos
def _maybe_download_corpora(tmp_dir, dataset_split): cnn_filename = "cnn_stories.tgz" cnn_finalpath = os.path.join(tmp_dir, "cnn/stories/") dailymail_filename = "dailymail_stories.tgz" dailymail_finalpath = os.path.join(tmp_dir, "dailymail/stories/") if not tf.gfile.Exists(cnn_finalpath): cnn_file = ge...
Download corpora if necessary and unzip them. Args: tmp_dir: directory containing dataset. dataset_split: whether we're in train/dev/test mode. Returns: List of all files generated and path to file containing train/dev/test split info.
juraj-google-style
def AddAnalysisReport(self, analysis_report): self._RaiseIfNotWritable() analysis_report = self._PrepareAttributeContainer(analysis_report) self.analysis_reports.append(analysis_report)
Adds an analysis report. Args: analysis_report (AnalysisReport): analysis report. Raises: IOError: when the storage writer is closed. OSError: when the storage writer is closed.
juraj-google-style
def CompileFilter(self, filter_expression): filter_parser = pfilter.BaseParser(filter_expression).Parse() matcher = filter_parser.Compile(pfilter.PlasoAttributeFilterImplementation) self._filter_expression = filter_expression self._matcher = matcher
Compiles the filter expression. The filter expression contains an object filter expression. Args: filter_expression (str): filter expression. Raises: ParseError: if the filter expression cannot be parsed.
codesearchnet
def history(self, image): res = self._get(self._url('/images/{0}/history', image)) return self._result(res, True)
Show the history of an image. Args: image (str): The image to show history for Returns: (str): The history of the image Raises: :py:class:`docker.errors.APIError` If the server returns an error.
codesearchnet
def run(self, data, max_epochs=1): self.state = State(dataloader=data, epoch=0, max_epochs=max_epochs, metrics={}) try: self._logger.info("Engine run starting with max_epochs={}.".format(max_epochs)) start_time = time.time() self._fire_event(Events.STARTED)...
Runs the process_function over the passed data. Args: data (Iterable): Collection of batches allowing repeated iteration (e.g., list or `DataLoader`). max_epochs (int, optional): max epochs to run for (default: 1). Returns: State: output state.
juraj-google-style
def union_of_bboxes(height, width, bboxes, erosion_rate=0.0, to_int=False): x1, y1 = width, height x2, y2 = 0, 0 for b in bboxes: w, h = b[2] - b[0], b[3] - b[1] lim_x1, lim_y1 = b[0] + erosion_rate * w, b[1] + erosion_rate * h lim_x2, lim_y2 = b[2] - erosion_rate * w, b[3] - er...
Calculate union of bounding boxes. Args: height (float): Height of image or space. width (float): Width of image or space. bboxes (list): List like bounding boxes. Format is `[x_min, y_min, x_max, y_max]`. erosion_rate (float): How much each bounding box can be shrinked, useful for erosive cropping. Set this in range ...
juraj-google-style
def register_proto_function(collection_name, proto_type=None, to_proto=None, from_proto=None) -> None: if to_proto and (not callable(to_proto)): raise TypeError('to_proto must be callable.') if from_proto and (not callable(from_proto)): raise TypeError('from_proto must be callable.') _proto_...
Registers `to_proto` and `from_proto` functions for collection_name. `to_proto` function converts a Python object to the corresponding protocol buffer, and returns the protocol buffer. `from_proto` function converts protocol buffer into a Python object, and returns the object.. Args: collection_name: Name of the col...
github-repos
def _check_root_tag(self, root): supported = self.supported_tags() if (root.tag in supported): return error = 'Document root element ({0}) not one of ({1})' raise UnsupportedRootElementError(message=error.format(root.tag, supported), expected=supported, found=root.tag)
Check that the XML element tree has a supported root element. Args: root (etree.Element) Raises: UnsupportedRootElementError
codesearchnet
def get_client(self, name): mech = self.get(name) return mech if isinstance(mech, ClientMechanism) else None
Like :meth:`.get`, but only mechanisms inheriting :class:`ClientMechanism` will be returned. Args: name: The SASL mechanism name. Returns: The mechanism object or ``None``
juraj-google-style
def check_mailfy(self, query, kwargs={}): data = self.launchQueryForMode(query=query, mode="mailfy") if self._somethingFound(data, mode="mailfy"): return data return None
Verifying a mailfy query in this platform. This might be redefined in any class inheriting from Platform. The only condition is that any of this should return a dictionary as defined. Args: ----- query: The element to be searched. kwargs: Dictionary with extra parameters. Just in case. Return: ------- Returns the co...
juraj-google-style
def _run(broker, graph=None, root=None, context=None, inventory=None): if (not root): context = (context or HostContext) broker[context] = context() return dr.run(graph, broker=broker) if os.path.isdir(root): return process_dir(broker, root, graph, context, inventory=inventory) ...
run is a general interface that is meant for stand alone scripts to use when executing insights components. Args: root (str): None will causes a host collection in which command and file specs are run. A directory or archive path will cause collection from the directory or archive, and only file type specs or those th...
codesearchnet
def get_params_and_defaults(param_list, db): return [[p, d] for p, d in db.get_all_values_of_all_params().items()]
Deduce [parameter, default] pairs from simulations available in the db. Args: param_list (list): List of parameters to query for. db (DatabaseManager): Database where to query for defaults.
juraj-google-style
def _dedup_strings(device_strs): new_device_strs = [] for device_str, vals in itertools.groupby(device_strs): num = len(list(vals)) if num == 1: new_device_strs.append(device_str) else: new_device_strs.append('%s (x%d)' % (device_str, num)) return new_device_s...
Groups together consecutive identical strings. For example, given: ['GPU 1', 'GPU 2', 'GPU 2', 'GPU 3', 'GPU 3', 'GPU 3'] This function returns: ['GPU 1', 'GPU 2 (x2)', 'GPU 3 (x3)'] Args: device_strs: A list of strings, each representing a device. Returns: A copy of the input, but identical consecutive strings are ...
github-repos
def add_skip_connection(self, u, v, connection_type): if connection_type not in [self.CONCAT_CONNECT, self.ADD_CONNECT]: raise ValueError( "connection_type should be NetworkDescriptor.CONCAT_CONNECT " "or NetworkDescriptor.ADD_CONNECT." ) ...
Add a skip-connection to the descriptor. Args: u: Number of convolutional layers before the starting point. v: Number of convolutional layers before the ending point. connection_type: Must be either CONCAT_CONNECT or ADD_CONNECT.
juraj-google-style
def _send(self, line): if not line.endswith('\r\n'): if line.endswith('\n'): logger.debug('Fixing bare LF before sending data to socket') line = line[0:-1] + '\r\n' else: logger.debug( 'Fixing missing CRLF befor...
Write a line of data to the server. Args: line -- A single line of data to write to the socket.
juraj-google-style
def get_paths(self, key): final_paths = [] if key in self.__cli: paths = self.__cli[key] or [] from_conf = False else: paths = self.__config.get(key) or [] from_conf = True for path in flatten_list(paths): final_path ...
Same as `ConfigParser.get_path` for a list of paths. Args: key: str, the key to lookup the paths with Returns: list: The paths.
juraj-google-style
class PerceiverMultimodalPostprocessor(nn.Module): def __init__(self, modalities: Mapping[str, PostprocessorType], input_is_dict: bool=False): super().__init__() self.modalities = nn.ModuleDict(modalities) self.input_is_dict = input_is_dict def forward(self, inputs: torch.Tensor, pos: ...
Multimodal postprocessing for Perceiver. Can be used to combine modality-specific postprocessors into a single postprocessor. Args: modalities (`Mapping[str, PostprocessorType]`): Dictionary mapping modality name to postprocessor class for that modality. input_is_dict (`bool`, *optional*, defaults to `False`): If True...
github-repos
def __init__(self, filename, events=None): self.filename = os.path.abspath(filename) self.stat = os.stat(self.filename) self.start_datetime, self.end_datetime = None, None self._events = [] self._events_by_baseclass = collections.defaultdict(list) if events is ...
List of ABINIT events. Args: filename: Name of the file events: List of Event objects
juraj-google-style
def auto_forward(auto=True): global __auto_forward_state prev = __auto_forward_state __auto_forward_state = auto yield __auto_forward_state = prev
Context for dynamic graph execution mode. Args: auto (bool): Whether forward computation is executed during a computation graph construction. Returns: bool
juraj-google-style
def get_value(self, tau): tau = np.asarray(tau) (alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag) = self.coefficients k = get_kernel_value(alpha_real, beta_real, alpha_complex_real, alpha_complex_imag, beta_complex_real, beta_complex_imag, tau.flatten()) ...
Compute the value of the term for an array of lags Args: tau (array[...]): An array of lags where the term should be evaluated. Returns: The value of the term for each ``tau``. This will have the same shape as ``tau``.
codesearchnet
def _ParseHeader(self, format_type, value_data): data_type_map_name = self._HEADER_DATA_TYPE_MAP_NAMES.get(format_type, None) if (not data_type_map_name): raise errors.ParseError('Unsupported format type: {0:d}'.format(format_type)) data_type_map = self._GetDataTypeMap(data_type_map_name) try: ...
Parses the header. Args: format_type (int): format type. value_data (bytes): value data. Returns: AppCompatCacheHeader: header. Raises: ParseError: if the value data could not be parsed.
codesearchnet
def seek(self, offset: int, whence: Literal[0, 1, 2]=0) -> int:
Changes the current position of the file. Args: offset: Offset from the position to a reference point. whence: The reference point, with 0 meaning the beginning of the file, 1 meaning the current position, or 2 meaning the end of the file. Returns: The position from the beginning of the file.
github-repos
def clear_signature_defs(tflite_model): model = tflite_model if not isinstance(tflite_model, bytearray): model = bytearray(tflite_model) return signature_def_util.ClearSignatureDefs(model)
Clears SignatureDefs from the Metadata of a TfLite flatbuffer buffer. Args: tflite_model: TFLite model buffer to remove signature_defs. Returns: buffer: A TFLite model binary identical to model buffer with no SignatureDef metadata. Raises: ValueError: tflite_model buffer does not contain a valid TFLite model.
github-repos
def restore(self, state): self.storage.restore(state.get('storage')) dump_walker = state.get('dump_walker') if dump_walker is not None: dump_walker = self.storage.restore_walker(dump_walker) self.dump_walker = dump_walker self.next_id = state.get('next_id'...
Restore the state of this subsystem from a prior call to dump(). Calling restore must be properly sequenced with calls to other subsystems that include stream walkers so that their walkers are properly restored. Args: state (dict): The results of a prior call to dump().
juraj-google-style
def delete_user_role(self, user, role): self.project_service.set_auth(self._token_project) self.project_service.delete_user_role(user, role)
Remove role from given user. Args: user (string): User name. role (string): Role to remove. Raises: requests.HTTPError on failure.
codesearchnet
def _DefaultValueConstructorForField(field): if _IsMapField(field): return _GetInitializeDefaultForMap(field) if (field.label == _FieldDescriptor.LABEL_REPEATED): if (field.has_default_value and (field.default_value != [])): raise ValueError(('Repeated field default value not empty l...
Returns a function which returns a default value for a field. Args: field: FieldDescriptor object for this field. The returned function has one argument: message: Message instance containing this field, or a weakref proxy of same. That function in turn returns a default value for this field. The default value may r...
codesearchnet
def _DocPackageFromTop(self, packages, showprivate=False, showinh=False): appIndex = '' if not isinstance(packages, list): packages = [packages] if os.path.exists('content'): shutil.rmtree('content') os.makedirs('content') appIndex += r % ('API ...
Generates all of the documentation for given packages and appends new tocrees to the index. All documentation pages will be under the set relative path. Args: packages (list(module)): A package or list of packages that contain submodules to document showprivate (bool): A flag for whether or not to display private memb...
juraj-google-style
def _make_tensor_trace_fun(self, tensor_name, tensor_trace_order): def _print_tensor(tensor_name, num_elements, tensor, output_tensor): if self._parameters.is_brief_mode(): if tensor_name not in tensor_trace_order.tensorname_to_cache_idx: raise ValueError('Tensor %s wit...
Makes the tensor tracing function called by outside compilation. Args: tensor_name: name of the tensor being traced. tensor_trace_order: TensorTraceOrder object holding tensorname to id map. Returns: A function to be passed as the first argument to outside compilation. Raises: RuntimeError: If the trace mode is inval...
github-repos
def delete_metadata(self, resource, keys): self.metadata_service.set_auth(self._token_metadata) self.metadata_service.delete(resource, keys)
Deletes the given key-value pairs associated with the given resource. Will attempt to delete all key-value pairs even if some fail. Args: resource (intern.resource.boss.BossResource) keys (list) Raises: HTTPErrorList on failure.
codesearchnet
def returns_collection(return_type: FhirPathDataType) -> bool: return return_type and return_type.returns_collection()
Indicates if return_type will evaluate to a collection. Args: return_type: The data type to describe. Returns: True in the following circumstances - `return_type` represents an element with cardinality greater than one. - `return_type` represents an element with a cardinality less than or equal to one, but that eleme...
github-repos
def create(cls, endpoint_name, json_body, original_response): if endpoint_name == "property/value_report": return ValueReportResponse(endpoint_name, json_body, original_response) if endpoint_name == "property/rental_report": return RentalReportResponse(endpoint_name, j...
Factory for creating the correct type of Response based on the data. Args: endpoint_name (str) - The endpoint of the request, such as "property/value" json_body - The response body in json format. original_response (response object) - server response returned from an http request.
juraj-google-style
def podcasts(self, *, device_id=None): if (device_id is None): device_id = self.device_id podcast_list = [] for chunk in self.podcasts_iter(device_id=device_id, page_size=49995): podcast_list.extend(chunk) return podcast_list
Get a listing of subsribed podcast series. Paramaters: device_id (str, Optional): A mobile device ID. Default: Use ``device_id`` of the :class:`MobileClient` instance. Returns: list: Podcast series dict.
codesearchnet
def index_file(self, f, overwrite=False): if isinstance(f, six.string_types): f = self.layout.get_file(f) if ((f.path in self.file_index) and (not overwrite)): return if ('suffix' not in f.entities): return md = self._get_metadata(f.path) for (md_key, md_val) in md.items(): ...
Index metadata for the specified file. Args: f (BIDSFile, str): A BIDSFile or path to an indexed file. overwrite (bool): If True, forces reindexing of the file even if an entry already exists.
codesearchnet
def GetEventFormatter(self, event): data_type = getattr(event, 'data_type', None) if (not data_type): return None return formatters_manager.FormattersManager.GetFormatterObject(event.data_type)
Retrieves the event formatter for a specific event type. Args: event (EventObject): event. Returns: EventFormatter: event formatter or None.
codesearchnet
def getCard(self, name): cards = self.projectCards for card in cards: if card.name.upper() == name.upper(): return card return None
Retrieve card object for given card name. Args: name (str): Name of card to be retrieved. Returns: :class:`.ProjectCard` or None: Project card object. Will return None if the card is not available.
juraj-google-style
def add_payload(self, key, val, append=False): if append: self._payload.setdefault(key, []).append(val) else: self._payload[key] = val
Add a key value pair to payload for this request. Args: key (str): The payload key. val (str): The payload value. append (bool, default:False): Indicate whether the value should be appended or overwritten.
juraj-google-style
def assert_broadcastable(weights, values): with ops.name_scope(None, 'assert_broadcastable', (weights, values)) as scope: with ops.name_scope(None, 'weights', (weights,)) as weights_scope: weights = ops.convert_to_tensor(weights, name=weights_scope) weights_shape = array_ops.shape(we...
Asserts `weights` can be broadcast to `values`. In `tf.losses` and `tf.metrics`, we support limited weight broadcasting. We let weights be either scalar, or the same rank as the target values, with each dimension either 1, or the same as the corresponding values dimension. Args: weights: `Tensor` of weights. values: ...
github-repos
def _get_parser_call_method(self, parser_to_method): def inner_call(args=None, instance=None): 'Allows to call the method invoked from the command line or\n provided argument.\n\n Args:\n args: list of arguments to parse, defaults to command line\n argume...
Return the parser special method 'call' that handles sub-command calling. Args: parser_to_method: mapping of the parser registered name to the method it is linked to
codesearchnet
def forward(self, x): embeddings = self.embedding_convPxP(x).flatten(2) embeddings = nn.functional.pad(embeddings, (1, 0)) embeddings = embeddings.permute(0, 2, 1) batch_size, sequence_length, embedding_dim = embeddings.shape embeddings = embeddings + self.positional_encoding_1d(batch_size, sequence...
Forward pass Args: x (torch.Tensor - NCHW): Input feature tensor Returns: torch.Tensor - Transformer output embeddings of shape (batch_size, sequence_length, embedding_dim)
github-repos
def _parent_info(self): parent_doc = self.parent if (parent_doc is None): parent_path = _helpers.DOCUMENT_PATH_DELIMITER.join((self._client._database_string, 'documents')) else: parent_path = parent_doc._document_path expected_prefix = _helpers.DOCUMENT_PATH_DELIMITER.join((parent_path, ...
Get fully-qualified parent path and prefix for this collection. Returns: Tuple[str, str]: Pair of * the fully-qualified (with database and project) path to the parent of this collection (will either be the database path or a document path). * the prefix to a document in this collection.
codesearchnet
def build(X_df=None, y_df=None): if (X_df is None): (X_df, _) = load_data() if (y_df is None): (_, y_df) = load_data() features = get_contrib_features() mapper_X = ballet.feature.make_mapper(features) X = mapper_X.fit_transform(X_df) encoder_y = get_target_encoder() y = encod...
Build features and target Args: X_df (DataFrame): raw variables y_df (DataFrame): raw target Returns: dict with keys X_df, features, mapper_X, X, y_df, encoder_y, y
codesearchnet
def matmul_without_tf32(a, b, *args, **kwargs): if config.tensor_float_32_execution_enabled() and a.dtype == 'float32': a = math_ops.cast(a, 'float64') b = math_ops.cast(b, 'float64') ret = math_ops.matmul(a, b, *args, **kwargs) return math_ops.cast(ret, a.dtype) elif config.tens...
Run matmul but cast float32 inputs to float64 if TensorFloat-32 is enabled. This effectively runs matmul without TensorFloat-32. It should only be used in tests when verifying some other op or functions works correctly, e.g. to test `tf.linalg.sqrtm` by matrix multiplying the output of the op by itself. In such cases,...
github-repos
def output_sector_csv(self, csv_path, file_dict_key, out_path): csv_file = (csv_path + '{0}_{1}_{2}_{3}.csv'.format(file_dict_key, self.ensemble_name, self.member, self.run_date.strftime(self.date_format))) if exists(csv_file): csv_data = pd.read_csv(csv_file) if (self.inds is None): ...
Segment forecast tracks to only output data contined within a region in the CONUS, as defined by the mapfile. Args: csv_path(str): Path to the full CONUS csv file. file_dict_key(str): Dictionary key for the csv files, currently either 'track_step' or 'track_total' out_path (str): Path to output new segmented csv files...
codesearchnet
def slice_element_urls(element_definition: ElementDefinition) -> List[str]: result: List[str] = [] if proto_utils.field_is_set(element_definition, 'type'): type_refs: List[StructureDefinition] = proto_utils.get_value_at_field(element_definition, 'type') profile_lists = [cast(Any, t).profile for ...
Returns the list of profile urls for the given slice element. Args: element_definition: The `ElementDefinition` whose profile urls we are retrieving. Returns: A list of strings representing the element's profile urls.
github-repos
def _validate_alias_command_level(alias, command): alias_collision_table = AliasManager.build_collision_table([alias]) if (not alias_collision_table): return command_collision_table = AliasManager.build_collision_table([command]) alias_collision_levels = alias_collision_table.get(alias.split()[0...
Make sure that if the alias is a reserved command, the command that the alias points to in the command tree does not conflict in levels. e.g. 'dns' -> 'network dns' is valid because dns is a level 2 command and network dns starts at level 1. However, 'list' -> 'show' is not valid because list and show are both reserve...
codesearchnet
def from_versions(cls, versions): range = cls(None) range.bounds = [] for version in dedup(sorted(versions)): lower = _LowerBound(version, True) upper = _UpperBound(version, True) bound = _Bound(lower, upper) range.bounds.append(bound) ...
Create a range from a list of versions. This method creates a range that contains only the given versions and no other. Typically the range looks like (for eg) "==3|==4|==5.1". Args: versions: List of Version objects. Returns: `VersionRange` object.
juraj-google-style
def _check_sensor_platform_consistency(self, sensor): ref_sensor = SENSORS.get(self.platform, None) if ref_sensor and not sensor == ref_sensor: logger.error('Sensor-Platform mismatch: {} is not a payload ' 'of {}. Did you choose the correct reader?' ...
Make sure sensor and platform are consistent Args: sensor (str) : Sensor name from YAML dataset definition Raises: ValueError if they don't match
juraj-google-style
def compute(self, batch_values, accumulator=None): pass
Compute a step in this computation, returning a new accumulator. This method computes a step of the computation described by this Combiner. If an accumulator is passed, the data in that accumulator is also used; so compute(batch_values) results in f(batch_values), while compute(batch_values, accumulator) results in me...
github-repos
def pretty_print_config_to_json(self, configs): descriptor = self.get_directory_list_doc(configs) return json.dumps(descriptor, sort_keys=True, indent=2, separators=(',', ': '))
JSON string description of a protorpc.remote.Service in a discovery doc. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: string, The directory list document as a JSON string.
juraj-google-style
def isclose(x1, x2, rtol=1e-05, atol=1e-08, equal_nan=False): if any_symbolic_tensors((x1, x2)): return Isclose(equal_nan=equal_nan).symbolic_call(x1, x2, rtol, atol) return backend.numpy.isclose(x1, x2, rtol, atol, equal_nan)
Return whether two tensors are element-wise almost equal. Args: x1: First input tensor. x2: Second input tensor. rtol: Relative tolerance. atol: Absolute tolerance. equal_nan: If `True`, element-wise NaNs are considered equal. Returns: Output boolean tensor.
github-repos
def _CompareFwdConv2D(self, tensor_in_sizes, filter_in_sizes, conv_strides, padding): x1 = np.random.rand(*tensor_in_sizes).astype(np.float32) x2 = np.random.rand(*filter_in_sizes).astype(np.float32) with self.cached_session(use_gpu=False): t1 = constant_op.constant(x1, shape=tensor_in_sizes) ...
Verifies that DeepConv2D and Conv2D produce the same values. Args: tensor_in_sizes: Input tensor dimensions in [batch, input_rows, input_cols, input_depth]. filter_in_sizes: Filter tensor dimensions in [kernel_rows, kernel_cols, input_depth, output_depth]. conv_strides: [row_stride, col_stride] for the convolution; pa...
github-repos
def trigger(self, event_name, *args, **kwargs): ev = Event(event_name, self) ev.trigger(*args, **kwargs) return ev
Trigger an event on this context. Params: event_name (string): Event name to trigger Args and kwargs are passed to each handler - see the bubbler.Event class for more information. Returns: bubbler.Event: Event instance after execution of all handlers
juraj-google-style
def jsonRender(self, def_buf): try: ret_dict = SerialBlock() ret_dict[Field.Meter_Address] = self.getMeterAddress() for fld in def_buf: compare_fld = fld.upper() if ((not ('RESERVED' in compare_fld)) and (not ('CRC' in compare_fld))): ret_dict[str(fld)...
Translate the passed serial block into string only JSON. Args: def_buf (SerialBlock): Any :class:`~ekmmeters.SerialBlock` object. Returns: str: JSON rendering of meter record.
codesearchnet
def get_ethernet_networks(self): network_uris = self.data.get('networkUris') networks = [] if network_uris: for uri in network_uris: networks.append(self._ethernet_networks.get_by_uri(uri)) return networks
Gets a list of associated ethernet networks of an uplink set. Args: id_or_uri: Can be either the uplink set id or the uplink set uri. Returns: list: Associated ethernet networks.
juraj-google-style
def ndtr(x, name='ndtr'): with tf.name_scope(name): x = tf.convert_to_tensor(value=x, name='x') if (dtype_util.as_numpy_dtype(x.dtype) not in [np.float32, np.float64]): raise TypeError(('x.dtype=%s is not handled, see docstring for supported types.' % x.dtype)) return _ndtr(x)
Normal distribution function. Returns the area under the Gaussian probability density function, integrated from minus infinity to x: ``` 1 / x ndtr(x) = ---------- | exp(-0.5 t**2) dt sqrt(2 pi) /-inf = 0.5 (1 + erf(x / sqrt(2))) = 0.5 erfc(x / sqrt(2)) ``` Args: x: `Tensor` of type `float32`, `float64`...
codesearchnet
def convert_item_to_command_line_arg(self, action, key, value): args = [] if action is None: command_line_key = \ self.get_command_line_key_for_unknown_config_file_setting(key) else: command_line_key = action.option_strings[-1] ...
Converts a config file or env var key + value to a list of commandline args to append to the commandline. Args: action: The argparse Action object for this setting, or None if this config file setting doesn't correspond to any defined configargparse arg. key: string (config file key or env var name) value: parsed valu...
juraj-google-style
def run(self, text): for pp in self.pre_processors: text = pp.run(text) return text
Run each substitution on ``text``. Args: text (string): the input text. Returns: string: text after all substitutions have been sequentially applied.
juraj-google-style
def right_margin(self, margin): if margin >=1 and margin <=255: self.send(chr(27)+'Q'+chr(margin)) else: raise RuntimeError('Invalid margin parameter in function rightMargin')
Specify the right margin. Args: margin: The right margin, in character width, must be less than the media's width. Returns: None Raises: RuntimeError: Invalid margin parameter
juraj-google-style
def updateAccount(self, subject, person, vendorSpecific=None): response = self.updateAccountResponse(subject, person, vendorSpecific) return self._read_boolean_response(response)
See Also: updateAccountResponse() Args: subject: person: vendorSpecific: Returns:
juraj-google-style
def get_box_field(self, box_key, field_key = None): self._raise_unimplemented_error() uri = '/'.join([self.api_uri, self.boxes_suffix, box_key, self.fields_suffix ]) if field_key: uri = '/'.join([uri, field_key]) return self._req('get', uri)
Gets one/all field in a box Args: box_key key for pipeline field_key key for field (default: None i.e. ALL) returns status code, field dict or list thereof
juraj-google-style
def _compute_enlarge_labels(self, locator, base_index): base_index_type = type(base_index) locator_as_index = base_index_type(locator) nan_labels = locator_as_index.difference(base_index) common_labels = locator_as_index.intersection(base_index) if (len(common_labels) == 0): raise KeyError('...
Helper for _enlarge_axis, compute common labels and extra labels. Returns: nan_labels: The labels needs to be added
codesearchnet
def get(self, url): self._driver.get(url) if self.bot_diary: self.bot_diary.add_auto_entry( "I went on", target=url, take_screenshot=True ) if BROME_CONFIG['proxy_driver']['intercept_javascript_error']: ...
Navigate to a specific url This specific implementation inject a javascript script to intercept the javascript error Configurable with the "proxy_driver:intercept_javascript_error" config Args: url (str): the url to navigate to Returns: bool
juraj-google-style
def delete(self, id, **kwargs): if id is None: path = self.path else: if not isinstance(id, int): id = id.replace('/', '%2F') path = '%s/%s' % (self.path, id) self.gitlab.http_delete(path, **kwargs)
Delete an object on the server. Args: id: ID of the object to delete **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabDeleteError: If the server cannot perform the request
juraj-google-style
def add_property_orders(query_proto, *orders): for order in orders: proto = query_proto.order.add() if order[0] == '-': order = order[1:] proto.direction = query_pb2.PropertyOrder.DESCENDING else: proto.direction = query_pb2.PropertyOrder.ASCENDING proto.property.name = order
Add ordering constraint for the given datastore.Query proto message. Args: query_proto: datastore.Query proto message. orders: list of propertype name string, default to ascending order and set descending if prefixed by '-'. Usage: >>> add_property_orders(query_proto, 'foo') # sort by foo asc >>> add_property_orders...
juraj-google-style
def _send_notification(self, handle, value): value_len = len(value) value = bytes(value) payload = struct.pack("<BHB%ds" % value_len, 0xFF, handle, value_len, value) response = self._send_command(2, 5, payload) result, = unpack("<H", response.payload) if resul...
Send a notification to all connected clients on a characteristic Args: handle (int): The handle we wish to notify on value (bytearray): The value we wish to send
juraj-google-style
def egress(self, envelope, http_headers, operation, binding_options): if self._logger.isEnabledFor(logging.INFO): service_name = operation.binding.wsdl.services.keys()[0] self._logger.info(_REQUEST_LOG_LINE, service_name, operation.name, binding_options['address']) if s...
Overrides the egress function ror request logging. Args: envelope: An Element with the SOAP request data. http_headers: A dict of the current http headers. operation: The SoapOperation instance. binding_options: An options dict for the SOAP binding. Returns: A tuple of the envelope and headers.
juraj-google-style
def produce(self, **kwargs): produce_args = self._produce_params.copy() produce_args.update(kwargs) if self._class: return getattr(self.instance, self.produce_method)(**produce_args) produce_args.update(self._hyperparameters) return self.primitive(**produce_args)
Call the primitive function, or the predict method of the primitive. The given keyword arguments will be passed directly to the primitive, if it is a simple function, or to the `produce` method of the primitive instance specified in the JSON annotation, if it is a class. If any of the arguments expected by the fit me...
codesearchnet
def sort_dependencies(self, image, dependencies=None): if dependencies is None: dependencies = OrderedDict() if image in dependencies: return requires = self.ymldefs[image].get('requires', []) for dep in requires: self.sort_dependencies(d...
Topologically sort the docker commands by their requirements Note: Circular "requires" dependencies are assumed to have already been checked in get_external_base_image, they are not checked here Args: image (str): process this docker image's dependencies dependencies (OrderedDict): running cache of sorted dependencie...
juraj-google-style
def Feed(self, size=512): data = self.file_object.read(size) Lexer.Feed(self, data) return len(data)
Feed data into the buffer. Args: size: optional data size to read form the file-like object.
codesearchnet
def get_group_by_name(self, group_name: str) -> typing.Optional['Group']: VALID_STR.validate(group_name, 'get_group_by_name') for group in self.groups: if (group.group_name == group_name): return group return None
Gets a group from its name Args: group_name: Returns: Group
codesearchnet
def euler_angles_1q(unitary_matrix): if (unitary_matrix.shape != (2, 2)): raise QiskitError('euler_angles_1q: expected 2x2 matrix') phase = (la.det(unitary_matrix) ** ((- 1.0) / 2.0)) U = (phase * unitary_matrix) if (abs(U[(0, 0)]) > _CUTOFF_PRECISION): theta = (2 * math.acos(abs(U[(0, 0...
Compute Euler angles for a single-qubit gate. Find angles (theta, phi, lambda) such that unitary_matrix = phase * Rz(phi) * Ry(theta) * Rz(lambda) Args: unitary_matrix (ndarray): 2x2 unitary matrix Returns: tuple: (theta, phi, lambda) Euler angles of SU(2) Raises: QiskitError: if unitary_matrix not 2x2, or failure
codesearchnet
def getPaddingNum(chars): match = PRINTF_SYNTAX_PADDING_RE.match(chars) if match: return int(match.group(1)) try: return sum([PAD_MAP[char] for char in chars]) except KeyError: msg = "Detected an unsupported padding character: \"{}\"." ...
Given a supported group of padding characters, return the amount of padding. Args: chars (str): a supported group of padding characters Returns: int: Raises: ValueError: if unsupported padding character is detected
juraj-google-style
def __init__(self, table, info): self._table = table self._info = info
Initializes a TableMetadata instance. Args: table: the Table object this belongs to. info: The BigQuery information about this table as a Python dictionary.
juraj-google-style