code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def result(self): self.wait() if self._fatal_error: raise self._fatal_error return self._result
Get the result for a job. This will block if the job is incomplete. Returns: The result for the Job. Raises: An exception if the Job resulted in an exception.
codesearchnet
def _GeneratePathString(self, mediator, pathspec, hashes): display_name = mediator.GetDisplayNameForPathSpec(pathspec) path_string = '{0:s}:'.format(display_name) for (hash_name, hash_value) in sorted(hashes.items()): path_string = '{0:s} {1:s}={2:s}'.format(path_string, hash_name, hash_value) r...
Generates a string containing a pathspec and its hashes. Args: mediator (AnalysisMediator): mediates interactions between analysis plugins and other components, such as storage and dfvfs. pathspec (dfvfs.Pathspec): the path specification) to generate a string for. hashes (dict[str, str]): mapping of hash attribute nam...
codesearchnet
def describe_enum(enum_definition): enum_descriptor = EnumDescriptor() enum_descriptor.name = enum_definition.definition_name().split('.')[(- 1)] values = [] for number in enum_definition.numbers(): value = enum_definition.lookup_by_number(number) values.append(describe_enum_value(value)...
Build descriptor for Enum class. Args: enum_definition: Enum class to provide descriptor for. Returns: Initialized EnumDescriptor instance describing the Enum class.
codesearchnet
def import_mapping(connection_id, mapping): url = os.path.join(settings.HEROKU_CONNECT_API_ENDPOINT, 'connections', connection_id, 'actions', 'import') response = requests.post( url=url, json=mapping, headers=_get_authorization_headers() ) response.ra...
Import Heroku Connection mapping for given connection. Args: connection_id (str): Heroku Connection connection ID. mapping (dict): Heroku Connect mapping. Raises: requests.HTTPError: If an error occurs uploading the mapping. ValueError: If the mapping is not JSON serializable.
juraj-google-style
def GetFeedMapping(client, feed, placeholder_type): feed_mapping_service = client.GetService('FeedMappingService', 'v201809') attribute_mappings = {} more_pages = True selector = {'fields': ['FeedMappingId', 'AttributeFieldMappings'], 'predicates': [{'field': 'FeedId', 'operator': 'EQUALS', 'values': [f...
Gets the Feed Mapping for a given Feed. Args: client: an AdWordsClient instance. feed: the Feed we are retrieving the Feed Mapping for. placeholder_type: the Placeholder Type we are looking for. Returns: A dictionary containing the Feed Mapping.
codesearchnet
def _calculate_hash(files, root): file_hash = hashlib.md5() for fname in sorted(files): f = os.path.join(root, fname) file_hash.update((fname + "\0").encode()) with open(f, "rb") as fd: for chunk in iter(lambda: fd.read(4096), ""): if not chunk: ...
Returns a hash of all of the given files at the given root. Args: files (list[str]): file names to include in the hash calculation, relative to ``root``. root (str): base directory to analyze files in. Returns: str: A hash of the hashes of the given files.
juraj-google-style
def _init_boto3_clients(self): try: profile = self._config.get('environment', {}).get('profile') region = self._config.get('environment', {}).get('region') if profile: self._b3Sess = boto3.session.Session(profile_name=profile) else: ...
The utililty requires boto3 clients to Cloud Formation and S3. Here is where we make them. Args: None Returns: Good or Bad; True or False
juraj-google-style
def from_json(cls, data): optional_keys = ('city', 'state', 'country', 'latitude', 'longitude', 'time_zone', 'elevation', 'station_id', 'source') for key in optional_keys: if (key not in data): data[key] = None return cls(data['city'], data['state'], data['country'], data['latitude'], da...
Create a location from a dictionary. Args: data: { "city": "-", "latitude": 0, "longitude": 0, "time_zone": 0, "elevation": 0}
codesearchnet
def _fdopen_ver2(self, file_des, mode='r', bufsize=None): if (not is_int_type(file_des)): raise TypeError('an integer is required') try: return FakeFileOpen(self.filesystem).call(file_des, mode=mode) except IOError as exc: self.filesystem.raise_os_error(exc.errno, exc.filename)
Returns an open file object connected to the file descriptor file_des. Args: file_des: An integer file descriptor for the file object requested. mode: Additional file flags. Currently checks to see if the mode matches the mode of the requested file object. bufsize: ignored. (Used for signature compliance with __builti...
codesearchnet
def AddServiceDescriptor(self, service_desc): if not isinstance(service_desc, descriptor.ServiceDescriptor): raise TypeError('Expected instance of descriptor.ServiceDescriptor.') self._service_descriptors[service_desc.full_name] = service_desc
Adds a ServiceDescriptor to the pool. Args: service_desc: A ServiceDescriptor.
juraj-google-style
def __resource_descriptor(self, resource_path, methods): descriptor = {} method_map = {} sub_resource_index = collections.defaultdict(list) sub_resource_map = {} resource_path_tokens = resource_path.split('.') for service, protorpc_meth_info in methods: method_info = getattr(protorpc...
Describes a resource. Args: resource_path: string, the path of the resource (e.g., 'entries.items') methods: list of tuples of type (endpoints.Service, protorpc.remote._RemoteMethodInfo), the methods that serve this resource. Returns: Dictionary describing the resource.
juraj-google-style
def PrepareMatches(self, file_system): if self._location is not None: self._location_segments = self._SplitPath( self._location, file_system.PATH_SEPARATOR) elif self._location_regex is not None: path_separator = file_system.PATH_SEPARATOR if path_separator == '\\': ...
Prepare find specification for matching. Args: file_system (FileSystem): file system.
juraj-google-style
def _ParseAndValidateRecord(self, parser_mediator, text_file_object): try: title = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) url = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) timestamp = text_file_object.readline(size=self._MAXIMUM_LINE_SIZE) popularity_index =...
Parses and validates an Opera global history record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. text_file_object (dfvfs.TextFile): text file. Returns: bool: True if the record was successfully parsed.
juraj-google-style
def cleanup(context): for name in ('work_dir', 'artifact_dir', 'task_log_dir'): path = context.config[name] if os.path.exists(path): log.debug('rm({})'.format(path)) rm(path) makedirs(path)
Clean up the work_dir and artifact_dir between task runs, then recreate. Args: context (scriptworker.context.Context): the scriptworker context.
codesearchnet
def vr60baro(msg): d = hex2bin(data(msg)) if d[34] == '0': return None sign = int(d[35]) value = bin2int(d[36:45]) if value == 0 or value == 511: return 0 value = value - 512 if sign else value roc = value * 32 return roc
Vertical rate from barometric measurement, this value may be very noisy. Args: msg (String): 28 bytes hexadecimal message (BDS60) string Returns: int: vertical rate in feet/minutes
juraj-google-style
def memory_zones(self): count = self.num_memory_zones() if count == 0: return list() buf = (structs.JLinkMemoryZone * count)() res = self._dll.JLINK_GetMemZones(buf, count) if res < 0: raise errors.JLinkException(res) return list(buf)
Gets all memory zones supported by the current target. Some targets support multiple memory zones. This function provides the ability to get a list of all the memory zones to facilate using the memory zone routing functions. Args: self (JLink): the ``JLink`` instance Returns: A list of all the memory zones as ``JLi...
juraj-google-style
def create_ltp_package(aleph_record, book_id, ebook_fn, data, url, urn_nbn=None): (root_dir, orig_dir, meta_dir) = _create_package_hierarchy(book_id=book_id) original_fn = os.path.join(orig_dir, fn_composers.original_fn(book_id, ebook_fn)) with open(original_fn, 'wb') as f: f.write(data) metadat...
Create LTP package as it is specified in specification v1.0 as I understand it. Args: aleph_record (str): XML containing full aleph record. book_id (str): UUID of the book. ebook_fn (str): Original filename of the ebook. data (str/bytes): Ebook's content. url (str): URL of the publication used when the URL can't be fo...
codesearchnet
def identify(text): filtered_text = set(list(text)).intersection(ALL_CHARS) if (len(filtered_text) is 0): return None if filtered_text.issubset(SHARED_CHARS): return EITHER if filtered_text.issubset(TRAD_CHARS): return TRAD if filtered_text.issubset(SIMP_CHARS): retur...
Identify whether a string is simplified or traditional Chinese. Returns: None: if there are no recognizd Chinese characters. EITHER: if the test is inconclusive. TRAD: if the text is traditional. SIMP: if the text is simplified. BOTH: the text has characters recognized as being solely traditional and other characters ...
codesearchnet
def test_noninlined_funcdef(self, mode): self._maybe_skip(mode) with ops.device(_get_device(mode)): random_seed.set_random_seed(0) x = _input([8, 8]) y = _matmul_act(x) y = _example_noninlined_funcdef(y) optimizer = gradient_descent.GradientDescentOptimizer(learning_rate=...
Test graph with non-inlined function subgraph. This requires the grappler pass to handle an OpDef that only appears in the graph's function registry instead of the global op registry. Args: mode: Either 'cuda' or 'mkl'.
github-repos
def run_function_on_all_workers(self, function, run_on_other_drivers=False): if (self.mode is None): self.cached_functions_to_run.append(function) else: pickled_function = pickle.dumps(function) function_to_run_id = hashlib.sha1(pickled_function).digest() key = (b'FunctionsToRun:...
Run arbitrary code on all of the workers. This function will first be run on the driver, and then it will be exported to all of the workers to be run. It will also be run on any new workers that register later. If ray.init has not been called yet, then cache the function and export it later. Args: function (Callable)...
codesearchnet
def scrape(self, url): if isinstance(url, str) is False: raise TypeError("The type of url must be str.") if self.readable_web_pdf is not None and self.readable_web_pdf.is_pdf_url(url) is True: web_data = self.readable_web_pdf.url_to_text(url) else: w...
Execute Web-Scraping. The target dom objects are in self.__dom_object_list. Args: url: Web site url. Returns: The result. this is a string. @TODO(chimera0): check URLs format.
juraj-google-style
def _normalize_field_name(self, field_name) -> str: if isinstance(field_name, tuple): field_name, _ = field_name return field_name
Normalizes a field name into a string by extracting the field name if it was specified as a reference to a HStore key (as a tuple). Arguments: field_name: The field name to normalize. Returns: The normalized field name.
juraj-google-style
def model_fn(self, x: core.Tensor) -> Mapping[str, core.Tensor]: if math_ops.reduce_sum(x) > 10.0: out = math_ops.matmul(x, self.filters_0) out = nn_ops.bias_add(out, self.bias_0) return {'output': out} out = math_ops.matmul(x, self.filters_1) out = nn_ops.bias_add(out, self.bias_1) ...
Runs the input tensor to a branched operations. The graph is branched by a condition whether the sum of elements of `x` is greater than 10. Args: x: Input tensor. Returns: A map of: output key -> output result.
github-repos
def sample_from_likelihood(self, n_timesteps=10): self.latent_state_sequences = lmap( lambda A: ltake( n_timesteps, iterate( lambda s: pd.Series(A @ s.values, index=s.index), self.s0 ), ), self.tran...
Sample a collection of observed state sequences from the likelihood model given a collection of transition matrices. Args: n_timesteps: The number of timesteps for the sequences.
juraj-google-style
def __init__(self, request, async, callback=None, callbacks=dict(), root_object=None): self._uses_authentication = True self._has_timeouted = False self._ignore_request_idle = False self._xhr_timeout = 3000 self._response = None self._error_message = No...
Intializes a new connection for a given request NURESTConnection object is in charge of the HTTP call. It relies on request library Args: request: the NURESTRequest to send callback: the method that will be fired after sending callbacks: a dictionary of user callbacks. Should contains local and remote callbacks
juraj-google-style
def get_sendback(self, uuid, key): def send_back_callback(data): self.sendResponse( serializers.serialize(data), uuid, key ) return send_back_callback
Return function for sending progress messages back to original caller. Args: uuid (str): UUID of the received message. key (str): Routing key. Returns: fn reference: Reference to function which takes only one data \ argument.
juraj-google-style
def get_capture_handler_config_by_name(self, name): handler_confs = [] for (address, stream_capturer) in self._stream_capturers.iteritems(): handler_data = stream_capturer[0].dump_handler_config_data() for h in handler_data: if (h['handler']['name'] == name): handler_...
Return data for handlers of a given name. Args: name: Name of the capture handler(s) to return config data for. Returns: Dictionary dump from the named capture handler as given by the :func:`SocketStreamCapturer.dump_handler_config_data` method.
codesearchnet
def __init__(self, file_handle): if not file_handle.writable(): raise ValueError('Output stream must be writable') self._file_handle = file_handle self._coder = RowAsDictJsonCoder()
Initialize an JsonRowWriter. Args: file_handle (io.IOBase): Output stream to write to.
github-repos
def precision(truth, recommend, k=None): if len(recommend) == 0: if len(truth) == 0: return 1. return 0. if k is None: k = len(recommend) return count_true_positive(truth, recommend[:k]) / float(k)
Precision@k. Args: truth (numpy 1d array): Set of truth samples. recommend (numpy 1d array): Ordered set of recommended samples. k (int): Top-k items in `recommend` will be recommended. Returns: float: Precision@k.
juraj-google-style
def _validate_isvalid_orcid(self, isvalid_orcid, field, value): if isvalid_orcid and 'ORCID' in value: try: res = search_orcid(value['ORCID']) except ConnectionError: warn('network not available, ORCID not validated.') return ...
Checks for valid ORCID if given. Args: isvalid_orcid (`bool`): flag from schema indicating ORCID to be checked. field (`str`): 'author' value (`dict`): dictionary of author metadata. The rule's arguments are validated against this schema: {'isvalid_orcid': {'type': 'bool'}, 'field': {'type': 'str'}, 'value': {'type':...
juraj-google-style
def direct_normal_illuminance(self, value=999999.0): if (value is not None): try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float for field `direct_normal_illuminance`'.format(value)) if (value < 0.0): raise Valu...
Corresponds to IDD Field `direct_normal_illuminance` will be missing if >= 999900 Args: value (float): value for IDD Field `direct_normal_illuminance` Unit: lux value >= 0.0 Missing value: 999999.0 if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueEr...
codesearchnet
def run(self, xml, **kwargs): kwargs['output'] = self.__graph__() if isinstance(xml, str): try: self.source = etree.XML(xml) except ValueError: try: self.source = etree.XML(xml.encode()) except: raise ValueError('Cannot run erro...
Method takes either an etree.ElementTree or raw XML text as the first argument. Args: xml(etree.ElementTree or text
codesearchnet
def db_insert_record(self, table_name, columns): bindings = ('?,' * len(columns)).strip(',') values = [None] * len(columns) sql = 'INSERT INTO {} ({}) VALUES ({})'.format(table_name, ', '.join(columns), bindings) cur = self.db_conn.cursor() cur.execute(sql, values)
Insert records into DB. Args: table_name (str): The name of the table. columns (list): List of columns for insert statement.
juraj-google-style
def __init__(self, message=None, host=None): self.message = message self.hostname = str(host) if host else None
Initialize the GeneralError object. Args: message (str): Custom message to be passed to the exceptions. Defaults to *None*. If *None* then the general class *__doc__* is used. host (str): Custom string which can be used to enhance the exception message by adding the "`host`: " prefix to the message string. Defaults to...
juraj-google-style
async def getTempCortex(mods=None): with s_common.getTempDir() as dirn: async with await Cortex.anit(dirn) as core: if mods: for mod in mods: await core.loadCoreModule(mod) async with core.getLocalProxy() as prox: yield prox
Get a proxy to a cortex backed by a temporary directory. Args: mods (list): A list of modules which are loaded into the cortex. Notes: The cortex and temporary directory are town down on exit. This should only be called from synchronous code. Returns: Proxy to the cortex.
juraj-google-style
def RegisterDecoder(cls, decoder): encoding_method = decoder.ENCODING_METHOD.lower() if (encoding_method in cls._decoders): raise KeyError('Decoder for encoding method: {0:s} already set.'.format(decoder.ENCODING_METHOD)) cls._decoders[encoding_method] = decoder
Registers a decoder for a specific encoding method. Args: decoder (type): decoder class. Raises: KeyError: if the corresponding decoder is already set.
codesearchnet
def _add_parameters(self, parameter_map, parameter_list): for parameter in parameter_list: if parameter.get('$ref'): parameter = self.specification['parameters'].get(parameter.get('$ref').split('/')[(- 1)]) parameter_map[parameter['name']] = parameter
Populates the given parameter map with the list of parameters provided, resolving any reference objects encountered. Args: parameter_map: mapping from parameter names to parameter objects parameter_list: list of either parameter objects or reference objects
codesearchnet
def get_ccc_handle_from_uuid(self, uuid): if uuid in self.uuid_cccds: return self.uuid_cccds[uuid].handle char = self.get_characteristic_from_uuid(uuid) if char is None: return None ccc = char.get_descriptor_by_uuid(UUID_GATT_CCC) if cc...
Utility function to retrieve the client characteristic configuration descriptor handle for a given characteristic. Args: uuid (str): a string containing the hex-encoded UUID Returns: None if an error occurs, otherwise an integer handle.
juraj-google-style
def _GetElementDataTypeDefinition(self, data_type_definition): if not data_type_definition: raise errors.FormatError('Missing data type definition') element_data_type_definition = getattr( data_type_definition, 'element_data_type_definition', None) if not element_data_type_definition: ...
Retrieves the element data type definition. Args: data_type_definition (DataTypeDefinition): data type definition. Returns: DataTypeDefinition: element data type definition. Raises: FormatError: if the element data type cannot be determined from the data type definition.
juraj-google-style
def Lease(self, request, global_params=None): config = self.GetMethodConfig('Lease') return self._RunMethod(config, request, global_params=global_params)
Leases a dataflow WorkItem to run. Args: request: (DataflowProjectsLocationsJobsWorkItemsLeaseRequest) input message global_params: (StandardQueryParameters, default: None) global arguments Returns: (LeaseWorkItemResponse) The response message.
github-repos
def get_pending_computer_name(): current = get_computer_name() pending = __utils__['reg.read_value']('HKLM', 'SYSTEM\\CurrentControlSet\\Services\\Tcpip\\Parameters', 'NV Hostname')['vdata'] if pending: return (pending if (pending != current) else None) return False
Get a pending computer name. If the computer name has been changed, and the change is pending a system reboot, this function will return the pending computer name. Otherwise, ``None`` will be returned. If there was an error retrieving the pending computer name, ``False`` will be returned, and an error message will be l...
codesearchnet
def configure_and_build(self, show_progress=True, optimized=True, skip_configuration=False): if not skip_configuration: configuration_command = ['python', 'waf', 'configure', '--enable-examples', '--disable-gtk', '--...
Configure and build the ns-3 code. Args: show_progress (bool): whether or not to display a progress bar during compilation. optimized (bool): whether to use an optimized build. If False, use a standard ./waf configure. skip_configuration (bool): whether to skip the configuration step, and only perform compilation.
juraj-google-style
def get_security_group_id(name='', env='', region=''): vpc_id = get_vpc_id(env, region) LOG.info('Find %s sg in %s [%s] in %s', name, env, region, vpc_id) url = '{0}/securityGroups/{1}/{2}/{3}?vpcId={4}'.format(API_URL, env, region, name, vpc_id) response = requests.get(url, verify=GATE_CA_BUNDLE, cert=...
Get a security group ID. Args: name (str): Security Group name to find. env (str): Deployment environment to search. region (str): AWS Region to search. Returns: str: ID of Security Group, e.g. sg-xxxx. Raises: AssertionError: Call to Gate API was not successful. SpinnakerSecurityGroupError: Security Group _name_ wa...
codesearchnet
def run_bottleneck_on_image(sess, image_data, image_data_tensor, decoded_image_tensor, resized_input_tensor, bottleneck_tensor): resized_input_values = sess.run(decoded_image_tensor, {image_data_tensor: image_data}) bottleneck_values = sess.run(bottleneck_tensor, {resized_input_tensor: resized_input_values}) ...
Runs inference on an image to extract the 'bottleneck' summary layer. Args: sess: Current active TensorFlow Session. image_data: String of raw JPEG data. image_data_tensor: Input data layer in the graph. decoded_image_tensor: Output of initial image resizing and preprocessing. resized_input_tensor: The input node of t...
codesearchnet
def supervised_to_dict(dataset, text2self): def my_fn(inputs, targets): if text2self: return {'targets': targets} else: return {'inputs': inputs, 'targets': targets} return dataset.map(my_fn, num_parallel_calls=tf.data.experimental.AUTOTUNE)
Turns a supervised dataset into a dataset with a feature dictionary. if text2self, then the features dictionary contains a "targets" key. else, the features dictionary contains "inputs" and "targets" keys. Args: dataset: a tf.data.Dataset text2self: a boolean Returns: a tf.data.Dataset
codesearchnet
def process_equities(equities: List[str], mask: types.IntTensor=None) -> Tuple[List[str], List[int]]: equity_list = cashflow_streams.to_list(equities) if mask is not None: return (equity_list, mask) mask, mask_map, num_unique_equities = cashflow_streams.create_mask(equity_list) equity_types = [m...
Extracts unique equities and computes an integer mask. #### Example ```python process_equities(["GOOG", "MSFT", "GOOG", "GOOG"]) # Returns (['GOOG', 'MSFT'], [0, 1, 0, 0]) ``` Args: equities: A list of equity names. mask: An optional integer mask for the sorted equity sequence. If supplied, becomes a no-op. Returns...
github-repos
def mark_typed_object(self, name, type_object): if (not hasattr(type_object, 'dump')): raise ArgumentError(('The passed type object %s is missing required method: dump()' % type_object)) if (not hasattr(type_object, 'Restore')): raise ArgumentError(('The passed type object %s is missing required...
Mark a property as containing a serializable object. This convenience method allows you to avoid having to call ``mark_complex()`` whenever you need to serialize a complex object. This method requires that property ``name`` be a single class that contains a dump() method and a Restore() class method where type_object....
codesearchnet
def update_hash(src_file): hash_file = (local.path(src_file) + '.hash') new_hash = 0 with open(hash_file, 'w') as h_file: new_hash = get_hash_of_dirs(src_file) h_file.write(str(new_hash)) return new_hash
Update the hash for the given file. Args: src: The file name. root: The path of the given file.
codesearchnet
def _stream_data(self, chunk=None): self._stream_sm_running = True if (chunk is None): chunk = self._next_streaming_chunk(20) if ((chunk is None) or (len(chunk) == 0)): self._stream_sm_running = False return try: self._send_notification(StreamingChar.value_handle, chunk) ...
Stream reports to the ble client in 20 byte chunks Args: chunk (bytearray): A chunk that should be sent instead of requesting a new chunk from the pending reports.
codesearchnet
def get_commits(self, since_sha=None): assert self.tempdir cmd = ['git', 'log', '--first-parent', '--reverse', COMMIT_FORMAT] if since_sha: commits = [self.get_commit(since_sha)] cmd.append('{}..HEAD'.format(since_sha)) else: commits = [] cmd.append('HEAD') output = c...
Returns a list of Commit objects. Args: since_sha - (optional) A sha to search from
codesearchnet
def normalize(model: typing.Dict[str, typing.Any]) -> typing.Dict[str, typing.Dict[str, int]]: is_old_format = all([isinstance(v, int) for v in model.values()]) if is_old_format: output = {} sorted_items = sorted(model.items(), key=lambda x: x[0]) groups = itertools.groupby(sorted_items,...
Updates a model to the latest format. Does nothing if it's updated already. Args: model: A model. Returns: An updated model.
github-repos
def add(self, decorations): added = 0 if isinstance(decorations, list): not_repeated = (set(decorations) - set(self._decorations)) self._decorations.extend(list(not_repeated)) added = len(not_repeated) elif (decorations not in self._decorations): self._decorations.append(deco...
Add text decorations on a CodeEditor instance. Don't add duplicated decorations, and order decorations according draw_order and the size of the selection. Args: decorations (sourcecode.api.TextDecoration) (could be a list) Returns: int: Amount of decorations added.
codesearchnet
def user_has_access(self, user): if (ROLE_ADMIN in user.roles): return True if self.enabled: if (not self.required_roles): return True for role in self.required_roles: if (role in user.roles): return True return False
Check if a user has access to view information for the account Args: user (:obj:`User`): User object to check Returns: True if user has access to the account, else false
codesearchnet
def create_dir(path): full_path = abs_path(path) if not os.path.exists(full_path): try: os.makedirs(full_path) except OSError as e: if e.errno != errno.EEXIST: raise
Creates a directory if it does not exist already. Args: path: The path of the directory to create.
github-repos
def prepare_subprocess_cmd(subprocess_cmd): help_cmd = subprocess_cmd + ['--helpfull'] help_output = subprocess.run(help_cmd, stdout=subprocess.PIPE).stdout help_output = help_output.decode('ascii') if 'python' in subprocess_cmd[0]: valid_flags = parse_helpfull_output(help_output) else:...
Prepares a subprocess command by running --helpfull and masking flags. Args: subprocess_cmd: List[str], what would be passed into subprocess.call() i.e. ['python', 'train.py', '--flagfile=flags'] Returns: ['python', 'train.py', '--train_flag=blah', '--more_flags']
juraj-google-style
def obtain(self, dest): url, rev_options = self.get_url_rev_options(self.url) if not os.path.exists(dest): self.fetch_new(dest, url, rev_options) return rev_display = rev_options.to_display() if self.is_repository_directory(dest): e...
Install or update in editable mode the package represented by this VersionControl object. Args: dest: the repository directory in which to install or update.
juraj-google-style
def compute(self, x): q_learning = copy(self.__greedy_q_learning) q_learning.epsilon_greedy_rate = x[0] q_learning.alpha_value = x[1] q_learning.gamma_value = x[2] if self.__init_state_key is not None: q_learning.learn(state_key=self.__init_state_key, limit=i...
Compute cost. Args: x: `np.ndarray` of explanatory variables. Returns: cost
juraj-google-style
def Write(self, output_writer): if self._title: output_writer.Write(' if not self._columns: self._columns = ['' for _ in range(0, self._number_of_columns)] output_writer.Write(' | '.join(self._columns)) output_writer.Write('\n') output_writer.Write(' | '.join(['---' for _ in self...
Writes the table to the output writer. Args: output_writer (OutputWriter): output writer.
juraj-google-style
def word_matches(s1, s2, n=3): return __matches(s1, s2, word_ngrams, n=n)
Word-level n-grams that match between two strings Args: s1: a string s2: another string n: an int for the n in n-gram Returns: set: the n-grams found in both strings
codesearchnet
def get_video_features(self, pixel_values: torch.FloatTensor, qformer_input_ids: torch.LongTensor, qformer_attention_mask: Optional[torch.LongTensor]=None, interpolate_pos_encoding: Optional[bool]=False, return_dict: Optional[bool]=False): batch_size, frames, channel, height, width = pixel_values.shape pixel_va...
Encodes images into continuous embeddings that can be forwarded to the language model. Args: pixel_values (`torch.FloatTensor` of shape `(batch_size, num_channels, image_size, image_size)`): The tensors corresponding to the input images.
github-repos
def symm_reduce(self, coords_set, threshold=1e-6): surf_sg = SpacegroupAnalyzer(self.slab, 0.1) symm_ops = surf_sg.get_symmetry_operations() unique_coords = [] coords_set = [self.slab.lattice.get_fractional_coords(coords) for coords in coords_set] ...
Reduces the set of adsorbate sites by finding removing symmetrically equivalent duplicates Args: coords_set: coordinate set in cartesian coordinates threshold: tolerance for distance equivalence, used as input to in_coord_list_pbc for dupl. checking
juraj-google-style
def from_file(cls, filename): with zopen(filename) as f: return cls.from_string(f.read())
Read an Fiesta input from a file. Currently tested to work with files generated from this class itself. Args: filename: Filename to parse. Returns: FiestaInput object
juraj-google-style
def _ReadMemberHeader(self, file_object): file_offset = file_object.get_offset() member_header = self._ReadStructure(file_object, file_offset, self._MEMBER_HEADER_SIZE, self._MEMBER_HEADER, 'member header') if (member_header.signature != self._GZIP_SIGNATURE): raise errors.FileFormatError('Unsupport...
Reads a member header. Args: file_object (FileIO): file-like object to read from. Raises: FileFormatError: if the member header cannot be read.
codesearchnet
def to_barrier_key(cls, barrier_index_key): barrier_index_path = barrier_index_key.to_path() (pipeline_kind, dependent_pipeline_id, unused_kind, purpose) = barrier_index_path[(- 4):] barrier_record_path = (pipeline_kind, dependent_pipeline_id, _BarrierRecord.kind(), purpose) return db.Key.from_path(*bar...
Converts a _BarrierIndex key to a _BarrierRecord key. Args: barrier_index_key: db.Key for a _BarrierIndex entity. Returns: db.Key for the corresponding _BarrierRecord entity.
codesearchnet
def compose_tree_path(tree, issn=False): if issn: return join( "/", ISSN_DOWNLOAD_KEY, basename(tree.issn) ) return join( "/", PATH_DOWNLOAD_KEY, quote_plus(tree.path).replace("%2F", "/"), )
Compose absolute path for given `tree`. Args: pub (obj): :class:`.Tree` instance. issn (bool, default False): Compose URL using ISSN. Returns: str: Absolute path of the tree, without server's address and protocol.
juraj-google-style
def proto_refactor_files(dest_dir, namespace, namespace_path): for (dn, dns, fns) in os.walk(dest_dir): for fn in fns: fn = os.path.join(dn, fn) if fnmatch.fnmatch(fn, '*.proto'): data = proto_refactor(fn, namespace, namespace_path) with open(fn, 'w') ...
This method runs the refactoring on all the Protobuf files in the Dropsonde repo. Args: dest_dir (str): directory where the Protobuf files lives. namespace (str): the desired package name (i.e. "dropsonde.py2") namespace_path (str): the desired path corresponding to the package name (i.e. "dropsonde/py2")
codesearchnet
def Add(self, file_desc_proto): proto_name = file_desc_proto.name if (proto_name not in self._file_desc_protos_by_file): self._file_desc_protos_by_file[proto_name] = file_desc_proto elif (self._file_desc_protos_by_file[proto_name] != file_desc_proto): raise DescriptorDatabaseConflictingDefin...
Adds the FileDescriptorProto and its types to this database. Args: file_desc_proto: The FileDescriptorProto to add. Raises: DescriptorDatabaseConflictingDefinitionError: if an attempt is made to add a proto with the same name but different definition than an exisiting proto in the database.
codesearchnet
def SendSourceFiles(self, request, context): return debug_service_pb2.EventReply()
Base implementation of the handling of SendSourceFiles calls. The base implementation does nothing with the incoming request. Override in an implementation of the server if necessary. Args: request: A `DebuggedSourceFiles` proto, containing the path, content, size and last-modified timestamp of source files. context:...
github-repos
def _abort_workflow(pb: ProcessingBlock, workflow_stage_dict: dict, docker: DockerSwarmClient): _abort_flag = False if _abort_flag: for workflow_stage in pb.workflow_stages: for service_id, _ in \ workflow_stage_dict[workflow_stage.id]['servi...
Abort the workflow. TODO(BMo): This function currently does nothing as the abort flag is hardcoded to False! This function is used by `execute_processing_block`. Args: pb (ProcessingBlock): Configuration database Processing block object. workflow_stage_dict (dict): Workflow stage metadata dictionary. docker (DockerC...
juraj-google-style
def CreateCampaignWithBiddingStrategy(client, bidding_strategy_id, budget_id): campaign_service = client.GetService('CampaignService', version='v201809') campaign = {'name': ('Interplanetary Cruise operation = {'operator': 'ADD', 'operand': campaign} response = campaign_service.mutate([operation]) ...
Create a Campaign with a Shared Bidding Strategy. Args: client: AdWordsClient the client to run the example with. bidding_strategy_id: string the bidding strategy ID to use. budget_id: string the shared budget ID to use. Returns: dict An object representing a campaign.
codesearchnet
def snapshot(self, name): return self.get_data( "volumes/%s/snapshots/" % self.id, type=POST, params={"name": name} )
Create a snapshot of the volume. Args: name: string - a human-readable name for the snapshot
juraj-google-style
def get_dos(self, partial_dos=False, npts_mu=10000, T=None): spin = (self.data.spin if isinstance(self.data.spin, int) else 1) (energies, densities, vvdos, cdos) = BL.BTPDOS(self.eband, self.vvband, npts=npts_mu) if (T is not None): densities = BL.smoothen_DOS(energies, densities, T) tdos = Dos(...
Return a Dos object interpolating bands Args: partial_dos: if True, projections will be interpolated as well and partial doses will be return. Projections must be available in the loader. npts_mu: number of energy points of the Dos T: parameter used to smooth the Dos
codesearchnet
def __wizard(rho, epsilon=None): if epsilon is None: epsilon = 0. dim = len(rho) rho_wizard = np.zeros([dim, dim]) v, w = np.linalg.eigh(rho) for j in range(dim): if v[j] < epsilon: tmp = v[j] v[j] = 0. x = 0. for...
Returns the nearest positive semidefinite operator to an operator. This method is based on reference [1]. It constrains positivity by setting negative eigenvalues to zero and rescaling the positive eigenvalues. Args: rho (array_like): the input operator. epsilon(float or None): threshold (>=0) for truncating small ei...
juraj-google-style
def tensordot(x1, x2, axes=2): if any_symbolic_tensors((x1, x2)): return Tensordot(axes=axes).symbolic_call(x1, x2) return backend.numpy.tensordot(x1, x2, axes=axes)
Compute the tensor dot product along specified axes. Args: x1: First tensor. x2: Second tensor. axes: - If an integer, N, sum over the last N axes of `x1` and the first N axes of `x2` in order. The sizes of the corresponding axes must match. - Or, a list of axes to be summed over, first sequence applying to `x1`, seco...
github-repos
def has_checked_field(self, locator, **kwargs): kwargs["checked"] = True return self.has_selector("field", locator, **kwargs)
Checks if the page or current node has a radio button or checkbox with the given label, value, or id, that is currently checked. Args: locator (str): The label, name, or id of a checked field. **kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`. Returns: bool: Whether it exists.
juraj-google-style
def read(self, size=None): if not self._is_open: raise IOError('Not opened.') return self._vshadow_store.read(size)
Reads a byte string from the file-like object at the current offset. The function will read a byte string of the specified size or all of the remaining data if no size was specified. Args: size (Optional[int]): number of bytes to read, where None is all remaining data. Returns: bytes: data read. Raises: IOError: if...
juraj-google-style
def _ConvertBool(value, require_str): if require_str: if value == 'true': return True elif value == 'false': return False else: raise ParseError('Expected "true" or "false", not {0}.'.format(value)) if not isinstance(value, bool): raise ParseError('Expected true or false withou...
Convert a boolean value. Args: value: A scalar value to convert. require_str: If True, value must be a str. Returns: The bool parsed. Raises: ParseError: If a boolean value couldn't be consumed.
juraj-google-style
def LockedWrite(self, cache_data): if isinstance(cache_data, six.text_type): cache_data = cache_data.encode(encoding=self._encoding) with self._thread_lock: if (not self._EnsureFileExists()): return False with self._process_lock_getter() as acquired_plock: if (not...
Acquire an interprocess lock and write a string. This method safely acquires the locks then writes a string to the cache file. If the string is written successfully the function will return True, if the write fails for any reason it will return False. Args: cache_data: string or bytes to write. Returns: bool: succes...
codesearchnet
def spec_filled(self, pos_args, kw_args): req_names = self.arg_names if (len(self.arg_defaults) > 0): req_names = req_names[:(- len(self.arg_defaults))] req = [x for x in req_names if (x not in kw_args)] return (len(req) <= len(pos_args))
Check if we have enough arguments to call this function. Args: pos_args (list): A list of all the positional values we have. kw_args (dict): A dict of all of the keyword args we have. Returns: bool: True if we have a filled spec, False otherwise.
codesearchnet
def dismiss_prompt(self, text=None, wait=None): with self.driver.dismiss_modal("prompt", text=text, wait=wait): yield
Execute the wrapped code, dismissing a prompt. Args: text (str | RegexObject, optional): Text to match against the text in the modal. wait (int | float, optional): Maximum time to wait for the modal to appear after executing the wrapped code. Raises: ModalNotFound: If a modal dialog hasn't been found.
juraj-google-style
def generate_argument_parser(cls, tree, actions={}): (cur_as, cur_subas) = tree parser = devassistant_argparse.ArgumentParser(argument_default=argparse.SUPPRESS, usage=argparse.SUPPRESS, add_help=False) cls.add_default_arguments_to(parser) for arg in cur_as.args: arg.add_argument_to(parser) ...
Generates argument parser for given assistant tree and actions. Args: tree: assistant tree as returned by devassistant.assistant_base.AssistantBase.get_subassistant_tree actions: dict mapping actions (devassistant.actions.Action subclasses) to their subaction dicts Returns: instance of devassistant_argparse.ArgumentPa...
codesearchnet
def ChunkedDecoderLayer(feature_depth, feedforward_depth, num_heads, dropout, chunk_selector, mode): return layers.Serial(layers.Residual(layers.Map(layers.LayerNorm()), layers.ChunkedCausalMultiHeadedAttention(feature_depth, num_heads=num_heads, dropout=dropout, chunk_selector=chunk_selector, mode=mode), layers.Ma...
Transformer decoder layer operating on chunks. Args: feature_depth: int: depth of embedding feedforward_depth: int: depth of feed-forward layer num_heads: int: number of attention heads dropout: float: dropout rate (how much to drop out) chunk_selector: a function from chunk number to list of chunks to attend. mode: ...
codesearchnet
def get_gui_hint(self, hint): if hint == 'type': if self.kwargs.get('action') == 'store_true' or self.kwargs.get('nargs') == 0: return 'bool' elif self.kwargs.get('action') == 'store_const': return 'const' ...
Returns the value for specified gui hint (or a sensible default value, if this argument doesn't specify the hint). Args: hint: name of the hint to get value for Returns: value of the hint specified in yaml or a sensible default
juraj-google-style
def _combine_eq_sets(eq_sets, operations): UNIT = np.eye(3) def all_equivalent_atoms_of_i(i, eq_sets, ops): 'WORKS INPLACE on operations\n ' visited = set([i]) tmp_eq_sets = {j: (eq_sets[j] - visited) for j in eq_sets[i]} while tmp_eq_sets: new_tmp_eq_sets...
Combines the dicts of _get_equivalent_atom_dicts into one Args: eq_sets (dict) operations (dict) Returns: dict: The returned dictionary has two possible keys: ``eq_sets``: A dictionary of indices mapping to sets of indices, each key maps to indices of all equivalent atoms. The keys are guaranteed to be not equivalen...
codesearchnet
def frame(self, locator=None, *args, **kwargs): self.switch_to_frame(self._find_frame(locator, *args, **kwargs)) try: yield finally: self.switch_to_frame("parent")
Execute the wrapped code within the given iframe using the given frame or frame name/id. May not be supported by all drivers. Args: locator (str | Element, optional): The name/id of the frame or the frame's element. Defaults to the only frame in the document.
juraj-google-style
def stop(self, **kwargs): return self.client.api.stop(self.id, **kwargs)
Stops a container. Similar to the ``docker stop`` command. Args: timeout (int): Timeout in seconds to wait for the container to stop before sending a ``SIGKILL``. Default: 10 Raises: :py:class:`docker.errors.APIError` If the server returns an error.
juraj-google-style
def _get_other_names(self, line): m = re.search(self.compound_regex['other_names'][0], line, re.IGNORECASE) if m: self.other_names.append(m.group(1).strip())
Parse and extract any other names that might be recorded for the compound Args: line (str): line of the msp file
juraj-google-style
def AddExtensionDescriptor(self, extension): if (not (isinstance(extension, descriptor.FieldDescriptor) and extension.is_extension)): raise TypeError('Expected an extension descriptor.') if (extension.extension_scope is None): self._toplevel_extensions[extension.full_name] = extension try: ...
Adds a FieldDescriptor describing an extension to the pool. Args: extension: A FieldDescriptor. Raises: AssertionError: when another extension with the same number extends the same message. TypeError: when the specified extension is not a descriptor.FieldDescriptor.
codesearchnet
def send_notice(self, room_id, text_content, timestamp=None): body = { "msgtype": "m.notice", "body": text_content } return self.send_message_event(room_id, "m.room.message", body, timestamp=timestamp)
Perform PUT /rooms/$room_id/send/m.room.message with m.notice msgtype Args: room_id (str): The room ID to send the event in. text_content (str): The m.notice body to send. timestamp (int): Set origin_server_ts (For application services only)
juraj-google-style
def imag(input, name=None): with ops.name_scope(name, 'Imag', [input]) as name: input = ops.convert_to_tensor(input, name='input') if input.dtype.is_complex: return gen_math_ops.imag(input, Tout=input.dtype.real_dtype, name=name) else: return array_ops.zeros_like(inpu...
Returns the imaginary part of a complex (or real) tensor. Given a tensor `input`, this operation returns a tensor of type `float` that is the imaginary part of each element in `input` considered as a complex number. If `input` is real, a tensor of all zeros is returned. For example: ```python x = tf.constant([-2.25 ...
github-repos
def readCmd(cls, cmd): args = shlex.split(cmd) proc = subprocess.Popen(args, stdout=subprocess.PIPE) (proc_stdout, proc_stderr) = proc.communicate(input=None) return proc_stdout.decode()
run command and return the str format stdout Args: cmd: string Returns: str: what the command's echo
codesearchnet
def register_sub_command(self, sub_command, additional_ids=[]): self.__register_sub_command(sub_command, sub_command.command_desc().command) self.__additional_ids.update(additional_ids) for id in additional_ids: self.__register_sub_command(sub_command, id)
Register a command as a subcommand. It will have it's CommandDesc.command string used as id. Additional ids can be provided. Args: sub_command (CommandBase): Subcommand to register. additional_ids (List[str]): List of additional ids. Can be empty.
juraj-google-style
def conv_block(x, growth_rate, name): bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 x1 = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-05, name=name + '_0_bn')(x) x1 = layers.Activation('relu', name=name + '_0_relu')(x1) x1 = layers.Conv2D(4 * growth_rate, 1, use_bias=Fal...
A building block for a dense block. Args: x: input tensor. growth_rate: float, growth rate at dense layers. name: string, block label. Returns: Output tensor for the block.
github-repos
def is45(msg): if allzeros(msg): return False d = hex2bin(data(msg)) if wrongstatus(d, 1, 2, 3): return False if wrongstatus(d, 4, 5, 6): return False if wrongstatus(d, 7, 8, 9): return False if wrongstatus(d, 10, 11, 12): return False if wrongstatus(d...
Check if a message is likely to be BDS code 4,5. Meteorological hazard report Args: msg (String): 28 bytes hexadecimal message string Returns: bool: True or False
codesearchnet
def __fill_buffer(self, size=0): read_size = min(max(size, self.__buffer_size), MAX_BLOB_FETCH_SIZE) self.__buffer = fetch_data(self.__blob_key, self.__position, self.__position + read_size - 1) self.__buffer_position = 0 self.__eof = len(self.__buffer) < read_size
Fills the internal buffer. Args: size: Number of bytes to read. Will be clamped to [self.__buffer_size, MAX_BLOB_FETCH_SIZE].
juraj-google-style
def fleet_id_to_slug(did): try: fleet_slug = IOTileFleetSlug(did) except ValueError: raise ArgumentError('Unable to recognize {} as a fleet id'.format(did)) return str(fleet_slug)
Converts a fleet id into a correct fleet slug. Args: did (long) : A fleet id did (string) : A device slug in the form of XXXX, XXXX-XXXX-XXXX, g--XXXX, g--XXXX-XXXX-XXXX Returns: str: The device slug in the g--XXXX-XXXX-XXX format Raises: ArgumentError: if the ID is not in the [1, 16**12] range, or if not a valid stri...
codesearchnet
def _should_invoke_v2_op(): if not _ops.executing_eagerly_outside_functions(): return False if not _summary_ops_v2.has_default_writer(): warnings.warn('Cannot activate TF2 compatibility support for TF1 summary ops: default summary writer not found.') return False if _get_step_for_v2(...
Check if v2 op can be invoked. When calling TF1 summary op in eager mode, if the following conditions are met, v2 op will be invoked: - The outermost context is eager mode. - A default TF2 summary writer is present. - A step is set for the writer (using `tf.summary.SummaryWriter.as_default`, `tf.summary.experimental.s...
github-repos
def anm_score(self, x, y): gp = GaussianProcessRegressor().fit(x, y) y_predict = gp.predict(x) indepscore = normalized_hsic(y_predict - y, x) return indepscore
Compute the fitness score of the ANM model in the x->y direction. Args: a (numpy.ndarray): Variable seen as cause b (numpy.ndarray): Variable seen as effect Returns: float: ANM fit score
juraj-google-style
def _SkipFieldValue(tokenizer): if tokenizer.TryConsumeByteString(): while tokenizer.TryConsumeByteString(): pass return if (not tokenizer.TryConsumeIdentifier() and not tokenizer.TryConsumeInt64() and not tokenizer.TryConsumeUint64() and not tokenizer.TryConsumeFloat()): ...
Skips over a field value. Args: tokenizer: A tokenizer to parse the field name and values. Raises: ParseError: In case an invalid field value is found.
juraj-google-style
def _matrix_conv(self, m1, m2): n = m1[0, 0, 0].shape.as_list()[0] if n != m2[0, 0, 0].shape.as_list()[0]: raise ValueError(f'The entries in matrices m1 and m2 must have the same dimensions. Received m1[0, 0, 0].shape={m1[0, 0, 0].shape} and m2[0, 0, 0].shape={m2[0, 0, 0].shape}.') k = int(np.cbrt(l...
Matrix convolution. Args: m1: is a k x k x k dictionary, each element is a n x n matrix. m2: is a l x l x l dictionary, each element is a n x n matrix. Returns: (k + l - 1) x (k + l - 1) x (k + l - 1) dictionary each element is a n x n matrix. Raises: ValueError: if the entries of m1 and m2 are of different dimensio...
github-repos