code
stringlengths
20
4.93k
docstring
stringlengths
33
1.27k
source
stringclasses
3 values
def sget_steptime(self, cycle, step, dataset_number=None): dataset_number = self._validate_dataset_number(dataset_number) if (dataset_number is None): self._report_empty_dataset() return cycle_index_header = self.headers_normal.cycle_index_txt step_time_header = self.headers_normal.step_...
Returns step time for cycle, step. Convinience function; same as issuing dfdata[(dfdata[cycle_index_header] == cycle) & (dfdata[step_index_header] == step)][step_time_header] Args: cycle: cycle number step: step number dataset_number: the dataset number (automatic selection if None) Returns: pandas.Series or None if...
codesearchnet
def execute_command(self, command: str, read: bool=False) -> None: if self.debug: print(command) return 'SIMULATING VALUE' if read else True else: print('\nCOMMAND\n', command, '\n' + '-' * 40) try: cmd = subprocess.run(command, shell=True, capture_output=read, text=T...
Helper function that either executes or prints each command. Args: command - a command line command, typically a gcloud command. read - if True, the commands output is passed back to the caller. Returns: Bool - if command value is not required, simply indicate successor failure. String - if read is specified, the com...
github-repos
def _parse_vars(self, tokens): key_values = {} for token in tokens: if token.startswith(' break else: (k, v) = token.split('=', 1) key = k.strip() key_values[key] = v.strip() return key_values
Given an iterable of tokens, returns variables and their values as a dictionary. For example: ['dtap=prod', 'comment=some comment'] Returns: {'dtap': 'prod', 'comment': 'some comment'}
codesearchnet
def pot_string_from_file(filename='feff.inp'): with zopen(filename, "rt") as f_object: f = f_object.readlines() ln = -1 pot_str = ["POTENTIALS\n"] pot_tag = -1 pot_data = 0 pot_data_over = 1 sep_line_pattern = [re.comp...
Reads Potential parameters from a feff.inp or FEFFPOT file. The lines are arranged as follows: ipot Z element lmax1 lmax2 stoichometry spinph Args: filename: file name containing potential data. Returns: FEFFPOT string.
juraj-google-style
def patch_toText(self, patches): text = [] for patch in patches: text.append(str(patch)) return ''.join(text)
Take a list of patches and return a textual representation. Args: patches: Array of Patch objects. Returns: Text representation of patches.
codesearchnet
def list_attributes(self, name): result = self.client.service.getListAttributes(name, self.proxy_id) if isinstance(result, list) and len(result) == 1: return result[0] return result
Look up the attributes of a list. Args: name (str): The name of the list Returns: dict: attributes of the list
juraj-google-style
def create_token(self, token_name, project_name, dataset_name, is_public): url = self.url() + '/nd/resource/dataset/{}'.format( dataset_name) + '/project/{}'.format(project_name) + \ '/token/{}/'...
Creates a token with the given parameters. Arguments: project_name (str): Project name dataset_name (str): Dataset name project is based on token_name (str): Token name is_public (int): 1 is public. 0 is not public Returns: bool: True if project created, false if not created.
juraj-google-style
def Scan(self, scan_context, auto_recurse=True, scan_path_spec=None): if not scan_context: raise ValueError('Invalid scan context.') scan_context.updated = False if scan_path_spec: scan_node = scan_context.GetScanNode(scan_path_spec) else: scan_node = scan_context.GetUnscannedS...
Scans for supported formats. Args: scan_context (SourceScannerContext): source scanner context. auto_recurse (Optional[bool]): True if the scan should automatically recurse as far as possible. scan_path_spec (Optional[PathSpec]): path specification to indicate where the source scanner should continue scanning, where N...
juraj-google-style
def update_nanopubstore_start_dt(url: str, start_dt: str): hostname = urllib.parse.urlsplit(url)[1] start_dates_doc = state_mgmt.get(start_dates_doc_key) if not start_dates_doc: start_dates_doc = { "_key": start_dates_doc_key, "start_dates": [{"nanopubstore": hostname,...
Add nanopubstore start_dt to belapi.state_mgmt collection Args: url: url of nanopubstore start_dt: datetime of last query against nanopubstore for new ID's
juraj-google-style
def __init__(self, seed_fn, desc=None): if desc is None: desc = u'Query({})'.format(getattr(seed_fn, '__name__', '')) self.seed_fn = seed_fn self.transforms = [] self.desc_stack = [] self.desc = desc
Configure the `Query`. Args: seed_fn (callable): Callable with no arguments that produces a list of values. Keyword Args: desc (str): A description of the query, used in log messages. If not provided, defaults to the name of the seed function. Returns: Query
juraj-google-style
def add_triple(self, p, o, auto_refresh=True): self.rdf.graph.add((self.uri, p, self._handle_object(o))) self._handle_triple_refresh(auto_refresh)
add triple by providing p,o, assumes s = subject Args: p (rdflib.term.URIRef): predicate o (): object auto_refresh (bool): whether or not to update object-like self.rdf.triples Returns: None: adds triple to self.rdf.graph
juraj-google-style
def get_associated_resource(self, task): if (not task): raise HPOneViewUnknownType(MSG_INVALID_TASK) if ((task['category'] != 'tasks') and (task['category'] != 'backups')): raise HPOneViewUnknownType(MSG_UNKNOWN_OBJECT_TYPE) if (task['type'] == 'TaskResourceV2'): resource_uri = task[...
Retrieve a resource associated with a task. Args: task: task dict Returns: tuple: task (updated), the entity found (dict)
codesearchnet
def plot_vec(axis, step, var): xmesh, ymesh, vec1, vec2 = get_meshes_vec(step, var) dipz = step.geom.nztot if conf.field.shift: vec1 = np.roll(vec1, conf.field.shift, axis=0) vec2 = np.roll(vec2, conf.field.shift, axis=0) if step.geom.spherical or conf.plot.ratio is None: d...
Plot vector field. Args: axis (:class:`matplotlib.axes.Axes`): the axis handler of an existing matplotlib figure where the vector field should be plotted. step (:class:`~stagpy.stagyydata._Step`): a step of a StagyyData instance. var (str): the vector field name.
juraj-google-style
def set_session(self, headers=None): if (headers is None): headers = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36'} elif (not isinstance(headers, dict)): raise TypeError('"headers" must be a dict object'...
Init session with default or custom headers Args: headers: A dict of headers (default None, thus using the default header to init the session)
codesearchnet
def StringEscape(self, string, match, **_): precondition.AssertType(string, Text) if match.group(1) in "'\"rnbt": self.string += compatibility.UnescapeString(string) else: self.string += string
Escape backslashes found inside a string quote. Backslashes followed by anything other than ['"rnbt] will just be included in the string. Args: string: The string that matched. match: The match object (m.group(1) is the escaped code)
juraj-google-style
def _get_bundles_by_type(self, type): bundles = {} bundle_definitions = self.config.get(type) if bundle_definitions is None: return bundles for bundle_name, paths in bundle_definitions.items(): bundle_files = [] for path ...
Get a dictionary of bundles for requested type. Args: type: 'javascript' or 'css'
juraj-google-style
def __init__(self, streaming_buffer, writer_spec=None): self._streaming_buffer = streaming_buffer self._no_dup = False if writer_spec: self._no_dup = writer_spec.get(self._NO_DUPLICATE, False) if self._no_dup: self._seg_index = int(streaming_buffer.name.rsplit("-", ...
Initialize a GoogleCloudStorageOutputWriter instance. Args: streaming_buffer: an instance of writable buffer from cloudstorage_api. writer_spec: the specification for the writer.
juraj-google-style
def __init__(self, specification_store, signature_identifiers): super(SignaturesFileEntryFilter, self).__init__() self._file_scanner = None self._signature_identifiers = [] self._file_scanner = self._GetScanner( specification_store, signature_identifiers)
Initializes a signature-based file entry filter. Args: specification_store (FormatSpecificationStore): a specification store. signature_identifiers (list[str]): signature identifiers.
juraj-google-style
def calculate_sun_from_hoy(self, hoy, is_solar_time=False): datetime = DateTime.from_hoy(hoy, self.is_leap_year) return self.calculate_sun_from_date_time(datetime, is_solar_time)
Get Sun data for an hour of the year. Args: datetime: Ladybug datetime is_solar_time: A boolean to indicate if the input hour is solar time (Default: False). Returns: A sun object for this particular time
juraj-google-style
def preemphasis(signal, shift=1, cof=0.98): rolled_signal = np.roll(signal, shift) return signal - cof * rolled_signal
preemphasising on the signal. Args: signal (array): The input signal. shift (int): The shift step. cof (float): The preemphasising coefficient. 0 equals to no filtering. Returns: array: The pre-emphasized signal.
juraj-google-style
def __init__(self, operator, left, right): super(BinaryComposition, self).__init__(operator, left, right) self.operator = operator self.left = left self.right = right self.validate()
Construct an expression that connects two expressions with an operator. Args: operator: unicode, specifying where the field was declared left: Expression on the left side of the binary operator right: Expression on the right side of the binary operator Returns: new BinaryComposition object
juraj-google-style
async def update(self, service_id: str, version: str, *, image: str=None, rollback: bool=False) -> bool: if ((image is None) and (rollback is False)): raise ValueError('You need to specify an image.') inspect_service = (await self.inspect(service_id)) spec = inspect_service['Spec'] if (image is ...
Update a service. If rollback is True image will be ignored. Args: service_id: ID or name of the service. version: Version of the service that you want to update. rollback: Rollback the service to the previous service spec. Returns: True if successful.
codesearchnet
def _generate_G_points(self, kpoint): gpoints = [] for i in range(2 * self._nbmax[2] + 1): i3 = i - 2 * self._nbmax[2] - 1 if i > self._nbmax[2] else i for j in range(2 * self._nbmax[1] + 1): j2 = j - 2 * self._nbmax[1] - 1 if j > self._nbmax[1] else j ...
Helper function to generate G-points based on nbmax. This function iterates over possible G-point values and determines if the energy is less than G_{cut}. Valid values are appended to the output array. This function should not be called outside of initialization. Args: kpoint (np.array): the array containing the cur...
juraj-google-style
def Normalize(self, fraction=1.0): if self.log: raise ValueError("Pmf is under a log transform") total = self.Total() if total == 0.0: raise ValueError('total probability is zero.') logging.warning('Normalize: total probability is zero.') ...
Normalizes this PMF so the sum of all probs is fraction. Args: fraction: what the total should be after normalization Returns: the total probability before normalizing
juraj-google-style
def make_pixel_mask(image: 'torch.Tensor', output_size: Tuple[int, int]) -> 'torch.Tensor': input_height, input_width = image.shape[-2:] batch_size = image.size(0) mask = torch.zeros((batch_size, *output_size), dtype=torch.long) mask[:input_height, :input_width] = 1 return mask
Make a pixel mask for the image, where 1 indicates a valid pixel and 0 indicates padding. Args: image (`np.ndarray`): Image to make the pixel mask for. output_size (`Tuple[int, int]`): Output size of the mask.
github-repos
def get_audience(self, audience_id): audience = self.audience_id_map.get(audience_id) if audience: return audience self.logger.error('Audience ID "%s" is not in datafile.' % audience_id) self.error_handler.handle_error(exceptions.InvalidAudienceException((enums.Errors.INVALID_AUDIENCE_ERROR...
Get audience object for the provided audience ID. Args: audience_id: ID of the audience. Returns: Dict representing the audience.
juraj-google-style
def get(self, branch='master', filename=''): file_contents = '' if self.runway_dir: file_contents = self.local_file(filename=filename) else: file_contents = self.remote_file(branch=branch, filename=filename) return file_contents
Retrieve _filename_ from GitLab. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of Git repository, or _runway_dir_ if specified. Returns: str: Contents of file.
juraj-google-style
def load_strain(self, strain_id, strain_genome_file): strain_gp = GEMPRO(gem_name=strain_id, genome_path=strain_genome_file, write_protein_fasta_files=False) self.strains.append(strain_gp) return self.strains.get_by_id(strain_id)
Load a strain as a new GEM-PRO by its ID and associated genome file. Stored in the ``strains`` attribute. Args: strain_id (str): Strain ID strain_genome_file (str): Path to strain genome file
codesearchnet
def SignFile(self, in_filename, out_filename=None): if out_filename is None: out_filename = "%s.signed" % in_filename args = [ "-certs", self.cert, "-key", self.key, "-n", self.application, "-t", "http: "-in", in_filename, "-out", out_filename ] try: output_log...
Sign a file using osslsigncode. Args: in_filename: file to read from out_filename: file to output to, if none we output to the same filename as the input with a .signed suffix. Returns: output filename string Raises: pexpect.ExceptionPexpect: if the expect invocation of osslsigncode fails. SigningError: for signing f...
juraj-google-style
def save_checkpoint(model, filename, optimizer=None, meta=None): if (meta is None): meta = {} elif (not isinstance(meta, dict)): raise TypeError('meta must be a dict or None, but got {}'.format(type(meta))) meta.update(mmcv_version=mmcv.__version__, time=time.asctime()) mmcv.mkdir_or_exi...
Save checkpoint to file. The checkpoint will have 3 fields: ``meta``, ``state_dict`` and ``optimizer``. By default ``meta`` will contain version and time info. Args: model (Module): Module whose params are to be saved. filename (str): Checkpoint filename. optimizer (:obj:`Optimizer`, optional): Optimizer to be saved....
codesearchnet
def _check_sensor_platform_consistency(self, sensor): ref_sensor = SENSORS.get(self.platform, None) if (ref_sensor and (not (sensor == ref_sensor))): logger.error('Sensor-Platform mismatch: {} is not a payload of {}. Did you choose the correct reader?'.format(sensor, self.platform))
Make sure sensor and platform are consistent Args: sensor (str) : Sensor name from YAML dataset definition Raises: ValueError if they don't match
codesearchnet
def read_config(config_filepath, logger=logging.getLogger('ProsperCommon')): config_parser = configparser.ConfigParser(interpolation=ExtendedInterpolation(), allow_no_value=True, delimiters='=', inline_comment_prefixes=' logger.debug('config_filepath=%s', config_filepath) with open(config_filepath, 'r') as ...
fetch and parse config file Args: config_filepath (str): path to config file. abspath > relpath logger (:obj:`logging.Logger`): logger to catch error msgs
codesearchnet
def Check(self, error, filename, linenum): if Match('T(EST|est)', self.current_function): base_trigger = self._TEST_TRIGGER else: base_trigger = self._NORMAL_TRIGGER trigger = (base_trigger * (2 ** _VerboseLevel())) if (self.lines_in_function > trigger): error_level = int(math.lo...
Report if too many lines in function body. Args: error: The function to call with any errors found. filename: The name of the current file. linenum: The number of the line to check.
codesearchnet
def instance_default(self, obj): return self.property.themed_default(obj.__class__, self.name, obj.themed_values())
Get the default value that will be used for a specific instance. Args: obj (HasProps) : The instance to get the default value for. Returns: object
juraj-google-style
def exists(self, path: str) -> bool: raise NotImplementedError
Check if the provided path exists on the FileSystem. Args: path: string path that needs to be checked. Returns: boolean flag indicating if path exists
github-repos
def _eligible_features_from_example_handler(self, request): features_list = inference_utils.get_eligible_features(self.examples[0:NUM_EXAMPLES_TO_SCAN], NUM_MUTANTS) return http_util.Respond(request, features_list, 'application/json')
Returns a list of JSON objects for each feature in the example. Args: request: A request for features. Returns: A list with a JSON object for each feature. Numeric features are represented as {name: observedMin: observedMax:}. Categorical features are repesented as {name: samples:[]}.
codesearchnet
def plot_stacked_hist(self, key="wall_time", nmax=5, ax=None, **kwargs): ax, fig, plt = get_ax_fig_plt(ax=ax) mpi_rank = "0" timers = self.timers(mpi_rank=mpi_rank) n = len(timers) names, values = [], [] rest = np.zeros(n) for idx, sname in enumerate(s...
Plot stacked histogram of the different timers. Args: key: Keyword used to extract data from the timers. Only the first `nmax` sections with largest value are show. mmax: Maximum nuber of sections to show. Other entries are grouped together in the `others` section. ax: matplotlib :class:`Axes` or None if a new figure ...
juraj-google-style
def array(self): url = '{}/{}'.format(__endpoint__, self.type.RESOURCE) return RestClient.get(url, self.params)[self.type.RESOURCE]
Get all resources and return the result as an array Returns: array of str: Array of resources
codesearchnet
def createCategoryFilter(self, retina_name, filter_name, body, ): resourcePath = '/classify/create_category_filter' method = 'POST' queryParams = {} headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'} postData = None queryParams['...
get filter for classifier Args: filter_name, str: A unique name for the filter. (required) body, FilterTrainingObject: The list of positive and negative (optional) example items. (required) retina_name, str: The retina name (required) Returns: CategoryFilter
juraj-google-style
class AlignVisionBlock(nn.Module): def __init__(self, config: AlignVisionConfig, in_dim: int, out_dim: int, stride: int, expand_ratio: int, kernel_size: int, drop_rate: float, id_skip: bool, adjust_padding: bool): super().__init__() self.expand_ratio = expand_ratio self.expand = True if sel...
This corresponds to the block module of original the EfficientNet vision encoder implementation. Args: config ([`AlignVisionConfig`]): Model configuration class. in_dim (`int`): Number of input channels. out_dim (`int`): Number of output channels. stride (`int`): Stride size to be used in convolution layers. expand_ra...
github-repos
def trace_buffer_capacity(self): cmd = enums.JLinkTraceCommand.GET_CONF_CAPACITY data = ctypes.c_uint32(0) res = self._dll.JLINKARM_TRACE_Control(cmd, ctypes.byref(data)) if (res == 1): raise errors.JLinkException('Failed to get trace buffer size.') return data.value
Retrieves the trace buffer's current capacity. Args: self (JLink): the ``JLink`` instance. Returns: The current capacity of the trace buffer. This is not necessarily the maximum possible size the buffer could be configured with.
codesearchnet
def filter(self, filter_fn=None, desc=None, **kwargs): if ((filter_fn is not None) and kwargs): raise TypeError('Must supply either a filter_fn or attribute filter parameters to filter(), but not both.') if ((filter_fn is None) and (not kwargs)): raise TypeError('Must supply one of filter_fn or ...
Return a copy of this query, with some values removed. Example usages: .. code:: python # Returns a query that matches even numbers q.filter(filter_fn=lambda x: x % 2) # Returns a query that matches elements with el.description == "foo" q.filter(description="foo") Keyword Args: filter_fn (callable): If specified, ...
codesearchnet
def format_ascii(sensor_graph): cmdfile = CommandFile('Sensor Graph', '1.0') cmdfile.add('set_online', False) cmdfile.add('clear') cmdfile.add('reset') for node in sensor_graph.dump_nodes(): cmdfile.add('add_node', node) for streamer in sensor_graph.streamers: other = 255 ...
Format this sensor graph as a loadable ascii file format. This includes commands to reset and clear previously stored sensor graphs. NB. This format does not include any required configuration variables that were specified in this sensor graph, so you should also output tha information separately in, e.g. the config ...
codesearchnet
def _VerifyValues(self, tensor_in_sizes, filter_in_sizes, stride, padding, data_type, use_gpu, grouped_conv=False, data_format='NHWC', dilations=None, tolerance=None): input_size = 1 filter_size = 1 for s in tensor_in_sizes: input_size *= s for s in filter_in_sizes: filter_size *= s ...
Verifies the output values of the convolution function. Args: tensor_in_sizes: Input tensor dimensions in [batch, input_rows, input_cols, input_depth]. filter_in_sizes: Filter tensor dimensions in [filter_rows, filter_cols, input_depth, depth_multiplier]. stride: Stride. padding: Padding type. data_type: The data type...
github-repos
def install_event_handlers(self, categories=None, handlers=None): if categories is not None and handlers is not None: raise ValueError("categories and handlers are mutually exclusive!") from .events import get_event_handler_classes if categories: raise NotImplem...
Install the `EventHandlers for this `Node`. If no argument is provided the default list of handlers is installed. Args: categories: List of categories to install e.g. base + can_change_physics handlers: explicit list of :class:`EventHandler` instances. This is the most flexible way to install handlers. .. note:: cat...
juraj-google-style
def remove_bond(self, idx1, idx2): for obbond in ob.OBMolBondIter(self._obmol): if (((obbond.GetBeginAtomIdx() == idx1) and (obbond.GetEndAtomIdx() == idx2)) or ((obbond.GetBeginAtomIdx() == idx2) and (obbond.GetEndAtomIdx() == idx1))): self._obmol.DeleteBond(obbond)
Remove a bond from an openbabel molecule Args: idx1: The atom index of one of the atoms participating the in bond idx2: The atom index of the other atom participating in the bond
codesearchnet
def convert_videos_to_summaries(input_videos, output_videos, target_videos, tag, decode_hparams, display_ground_truth=False): fps = decode_hparams.frames_per_second border_percent = decode_hparams.border_percent max_outputs = decode_hparams.max_di...
Converts input, output and target videos into video summaries. Args: input_videos: 5-D NumPy array, (NTHWC) conditioning frames. output_videos: 5-D NumPy array, (NTHWC) model predictions. target_videos: 5-D NumPy array, (NTHWC) target frames. tag: tf summary tag. decode_hparams: HParams. display_ground_truth: Whether ...
juraj-google-style
def generate_json_schema(cls, schema, context=DEFAULT_DICT): schema = cls._get_schema(schema) return cls(context=context).dump(schema).data
Generate a JSON Schema from a Marshmallow schema. Args: schema (marshmallow.Schema|str): The Marshmallow schema, or the Python path to one, to create the JSON schema for. Keyword Args: file_pointer (file, optional): The path or pointer to the file to write this schema to. If not provided, the schema will be dumped to...
codesearchnet
def _clean_url(url): if (url == 'default'): url = DEFAULT_SERVER_HTTP_URL if url.startswith('ws'): raise ValueError('url should be the http or https URL for the server, not the websocket URL') return url.rstrip('/')
Produce a canonical Bokeh server URL. Args: url (str) A URL to clean, or "defatul". If "default" then the ``BOKEH_SERVER_HTTP_URL`` will be returned. Returns: str
codesearchnet
def encode(self, tf_graph_predictions): row = [] for col in self._header: row.append(str(tf_graph_predictions[col])) return ','.join(row)
Encodes the graph json prediction into csv. Args: tf_graph_predictions: python dict. Returns: csv string.
juraj-google-style
def lookup_value(self, api_name, key): if api_name in self._cache: return self._cache[api_name].get(key, None) return None
Add the value of an API call to the cache. Args: api_name: a string name of the API. Keys and values are segmented by api_name. key: a string key for the specific call.
juraj-google-style
def make_triple(sub, pred, obj): return "{s} {p} {o} .".format(s=sub, p=pred, o=obj)
Takes a subject predicate and object and joins them with a space in between Args: sub -- Subject pred -- Predicate obj -- Object Returns str
juraj-google-style
def build_inputs_with_special_tokens(self, token_ids_0: List[int], token_ids_1: Optional[List[int]]=None) -> List[int]: if token_ids_1 is None: return [self.cls_token_id] + token_ids_0 + [self.sep_token_id] cls = [self.cls_token_id] sep = [self.sep_token_id] return cls + token_ids_0 + sep + toke...
Build model inputs from a sequence or a pair of sequence for sequence classification tasks by concatenating and adding special tokens. A ConvBERT sequence has the following format: - single sequence: `[CLS] X [SEP]` - pair of sequences: `[CLS] A [SEP] B [SEP]` Args: token_ids_0 (`List[int]`): List of IDs to which the...
github-repos
def to_sql(self, view: views.View, limit: Optional[int]=None) -> str: encoder = _spark_interpreter.SparkSqlInterpreter(value_set_codes_table='VALUESET_VIEW') dataset = f'{self._fhir_dataset}' sql_generator = runner_utils.RunnerSqlGenerator(view, encoder, dataset, self._snake_case_resource_tables) sql_st...
Returns the SQL used to run the given view in Spark. Args: view: the view used to generate the SQL. limit: optional limit to attach to the generated SQL. Returns: The SQL used to run the given view.
github-repos
def copy_workspace(self, uri, new_name): payload = {'isPublic': True, 'newName': new_name} return self._api.request('post', (((('/api/documents/' + uri['did']) + '/workspaces/') + uri['wvm']) + '/copy'), body=payload)
Copy the current workspace. Args: - uri (dict): the uri of the workspace being copied. Needs to have a did and wid key. - new_name (str): the new name of the copied workspace. Returns: - requests.Response: Onshape response data
codesearchnet
def compile_keywords(keywords): mdt = [] cz_keywords = [] en_keywords = [] for keyword in keywords: keyword = keyword_to_info(keyword.encode('utf-8')) if (not keyword): continue cz_keywords.append({'uid': keyword['uid'], 'zahlavi': keyword['zahlavi'], 'zdroj': 'czenas...
Translate `keywords` to full keyword records as they are used in Aleph. Returns tuple with three lists, each of which is later used in different part of the MRC/MARC record. Args: keywords (list): List of keyword strings. Returns: tuple: (mdt_list, cz_keyword_list, en_keyword_list)
codesearchnet
def GetVShadowStoreByPathSpec(self, path_spec): store_index = vshadow.VShadowPathSpecGetStoreIndex(path_spec) if (store_index is None): return None return self._vshadow_volume.get_store(store_index)
Retrieves a VSS store for a path specification. Args: path_spec (PathSpec): path specification. Returns: pyvshadow.store: a VSS store or None if not available.
codesearchnet
def add_string_parameters(self, string): if isinstance(string, list): for x in string: self.add_string_parameters(x) return self._parameters.append((('{ "value": "' + string) + '" }'))
Add given string parameters to the internal list. Args: string (list of str or str): A string or list of strings to add to the parameters.
codesearchnet
def file_digest(source): hash_sha256 = hashlib.sha256() should_close = False if isinstance(source, six.string_types): should_close = True source = open(source, 'rb') for chunk in iter((lambda : source.read(_BUFFER_SIZE)), b''): hash_sha256.update(chunk) if should_close: ...
Calculates SHA256 digest of a file. Args: source: either a file-like object or a path to file
codesearchnet
def _new_open_bin(self, remaining_rect): factories_to_delete = set() new_bin = None for key, binfac in self._empty_bins.items(): a_rectangle_fits = False for _, rect in remaining_rect.items(): if binfac.fits_inside(rec...
Extract the next bin where at least one of the rectangles in rem Arguments: remaining_rect (dict): rectangles not placed yet Returns: PackingAlgorithm: Initialized empty packing bin. None: No bin big enough for the rectangle was found
juraj-google-style
def find_element(driver, elem_path, by=CSS, timeout=TIMEOUT, poll_frequency=0.5): wait = WebDriverWait(driver, timeout, poll_frequency) return wait.until(EC.presence_of_element_located((by, elem_path)))
Find and return an element once located find_element locates an element on the page, waiting for up to timeout seconds. The element, when located, is returned. If not located, a TimeoutException is raised. Args: driver (selenium webdriver or element): A driver or element elem_path (str): String used to located the el...
codesearchnet
def create_issues_report(self, timeout=(- 1)): uri = '{}/issues/'.format(self.data['uri']) return self._helper.create_report(uri, timeout)
Creates an unexpected zoning report for a SAN. Args: timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: list: A list of FCIssueResponse dict.
codesearchnet
def save_features(self, train_features, test_features, feature_names, feature_list_id): self.save_feature_names(feature_names, feature_list_id) self.save_feature_list(train_features, 'train', feature_list_id) self.save_feature_list(test_features, 'test', feature_list_id)
Save features for the training and test sets to disk, along with their metadata. Args: train_features: A NumPy array of features for the training set. test_features: A NumPy array of features for the test set. feature_names: A list containing the names of the feature columns. feature_list_id: The name for this feature...
juraj-google-style
def channel_interpolate(layer1, n_channel1, layer2, n_channel2): def inner(T): batch_n = T(layer1).get_shape().as_list()[0] arr1 = T(layer1)[(..., n_channel1)] arr2 = T(layer2)[(..., n_channel2)] weights = (np.arange(batch_n) / float((batch_n - 1))) S = 0 for n in ra...
Interpolate between layer1, n_channel1 and layer2, n_channel2. Optimize for a convex combination of layer1, n_channel1 and layer2, n_channel2, transitioning across the batch. Args: layer1: layer to optimize 100% at batch=0. n_channel1: neuron index to optimize 100% at batch=0. layer2: layer to optimize 100% at batch=...
codesearchnet
def add_cohp_dict(self, cohp_dict, key_sort_func=None): if key_sort_func: keys = sorted(cohp_dict.keys(), key=key_sort_func) else: keys = cohp_dict.keys() for label in keys: self.add_cohp(label, cohp_dict[label])
Adds a dictionary of COHPs with an optional sorting function for the keys. Args: cohp_dict: dict of the form {label: Cohp} key_sort_func: function used to sort the cohp_dict keys.
codesearchnet
def returns(desc=None, printer=None, data=True): if data is False: raise ArgumentError("Specifying non data return type in returns is no longer supported") def _returns(func): annotated(func) func.custom_returnvalue(printer, desc) return func return _returns
Specify how the return value of this function should be handled. Args: desc (str): A deprecated description of the return value printer (callable): A callable function that can format this return value data (bool): A deprecated parameter for specifying that this function returns data.
juraj-google-style
def recursive_copy(source, destination): if os.path.isdir(source): copy_tree(source, destination)
A wrapper around distutils.dir_util.copy_tree but won't throw any exception when the source directory does not exist. Args: source (str): source path destination (str): destination path
juraj-google-style
def update_ports(self, ports, id_or_uri, timeout=-1): resources = merge_default_values(ports, {'type': 'port'}) uri = self._client.build_uri(id_or_uri) + "/update-ports" return self._client.update(resources, uri, timeout)
Updates the interconnect ports. Args: id_or_uri: Can be either the interconnect id or the interconnect uri. ports (list): Ports to update. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: The...
juraj-google-style
def google_maps_geoloc_link(data): if isinstance(data, str): lat_lon = ip_geoloc(data) if (lat_lon is None): return '' (lat, lon) = lat_lon else: (lat, lon) = data loc = ('%s,%s' % (lat, lon)) return ('https:
Get a link to google maps pointing on this IP's geolocation. Args: data (str/tuple): IP address or (latitude, longitude). Returns: str: a link to google maps pointing on this IP's geolocation.
codesearchnet
def put_content(self, url, content): cache_path = self._url_to_path(url) try: dir = os.path.dirname(cache_path) os.makedirs(dir) except OSError as e: if e.errno != errno.EEXIST: raise Error('Failed to create cache directories...
Stores the content of a resource into the disk cache. Args: url: The url of the resource content: The content of the resource Raises: CacheError: If the content cannot be put in cache
juraj-google-style
def renew(self, requested_timeout=None): if self._has_been_unsubscribed: raise SoCoException('Cannot renew subscription once unsubscribed') if (not self.is_subscribed): raise SoCoException('Cannot renew subscription before subscribing') if (self.time_left == 0): raise SoCoException('...
Renew the event subscription. You should not try to renew a subscription which has been unsubscribed, or once it has expired. Args: requested_timeout (int, optional): The period for which a renewal request should be made. If None (the default), use the timeout requested on subscription.
codesearchnet
def depth(script, iterations=3, viewpoint=(0, 0, 0), selected=False): filter_xml = ''.join([' <filter name="Depth Smooth">\n', ' <Param name="stepSmoothNum" ', 'value="{:d}" '.format(iterations), 'description="Smoothing steps" ', 'type="RichInt" ', '/>\n', ' <Param name="viewPoint" ', 'x="{}" '.format(viewpo...
A laplacian smooth that is constrained to move vertices only along the view direction. Args: script: the FilterScript object or script filename to write the filter to. iterations (int): The number of times that the whole algorithm (normal smoothing + vertex fitting) is iterated. viewpoint (vector tuple or list): The p...
codesearchnet
def format_search_results(self, search_results): formatted_lines = [] for search_result in search_results: lines = self._format_search_result(search_result) formatted_lines.extend(lines) return formatted_lines
Format search results. Args: search_results (list of `ResourceSearchResult`): Search to format. Returns: List of 2-tuple: Text and color to print in.
codesearchnet
def set_message(self, title, msg, typ, url=None): return self.user.send_notification(title=title, message=msg, typ=typ, url=url)
Sets user notification message. Args: title: Msg. title msg: Msg. text typ: Msg. type url: Additional URL (if exists) Returns: Message ID.
juraj-google-style
def _AssertValidators(self, validators): for validator in sorted(validators, key=(lambda validator: validator.insertion_index)): try: validator.verify(self) except exceptions.ValidationError as e: message = validator.print_flags_with_values(self) raise exceptions....
Assert if all validators in the list are satisfied. Asserts validators in the order they were created. Args: validators: Iterable(validators.Validator), validators to be verified Raises: AttributeError: if validators work with a non-existing flag. IllegalFlagValueError: if validation fails for at least one validator
codesearchnet
def validate_to_schema(nanopub, schema) -> Tuple[(bool, List[Tuple[(str, str)]])]: v = jsonschema.Draft4Validator(schema) messages = [] errors = sorted(v.iter_errors(nanopub), key=(lambda e: e.path)) for error in errors: for suberror in sorted(error.context, key=(lambda e: e.schema_path)): ...
Validate nanopub against jsonschema for nanopub Args: nanopub (Mapping[str, Any]): nanopub dict schema (Mapping[str, Any]): nanopub schema Returns: Tuple[bool, List[str]]: bool: Is valid? Yes = True, No = False List[Tuple[str, str]]: Validation issues, empty if valid, tuple is ('Error|Warning', msg) e.g. [('ERROR', ...
codesearchnet
def all_sum(tensors): return _apply_all_reduce('sum', tensors)
Returns a list of tensors with the all-reduce sum across `tensors`. The computation is done with an all-reduce operation, so if only some of the returned tensors are evaluated then the computation will hang. Args: tensors: The input tensors across which to sum; must be assigned to GPU devices. Returns: List of tenso...
github-repos
def on_predict_begin(self, logs=None):
Called at the beginning of prediction. Subclasses should override for any actions to run. Args: logs: Dict. Currently no data is passed to this argument for this method but that may change in the future.
github-repos
def optimize_for_inference(input_graph_def: graph_pb2.GraphDef, input_node_names: Sequence[str], output_node_names: Sequence[str], placeholder_type_enum: int, toco_compatible: bool=False, placeholder_to_const_names=None) -> graph_pb2.GraphDef: ensure_graph_is_valid(input_graph_def) optimized_graph_def = input_g...
Applies a series of inference optimizations on the input graph. Args: input_graph_def: A GraphDef containing a training model. input_node_names: A list of names of the nodes that are fed inputs during inference. output_node_names: A list of names of the nodes that produce the final results. placeholder_type_enum: The ...
github-repos
def _normalize_mlengine_job_id(job_id): match = re.search('\\d|\\{{2}', job_id) if (match and (match.start() == 0)): job = 'z_{}'.format(job_id) else: job = job_id tracker = 0 cleansed_job_id = '' for m in re.finditer('\\{{2}.+?\\}{2}', job): cleansed_job_id += re.sub('[^...
Replaces invalid MLEngine job_id characters with '_'. This also adds a leading 'z' in case job_id starts with an invalid character. Args: job_id: A job_id str that may have invalid characters. Returns: A valid job_id representation.
codesearchnet
def get_array_from_hist2D(hist: Hist, set_zero_to_NaN: bool=True, return_bin_edges: bool=False) -> Tuple[(np.ndarray, np.ndarray, np.ndarray)]: shape = (hist.GetYaxis().GetNbins(), hist.GetXaxis().GetNbins()) hist_array = np.array([hist.GetBinContent(x) for x in range(1, hist.GetNcells()) if ((not hist.IsBinUnd...
Extract x, y, and bin values from a 2D ROOT histogram. Converts the histogram into a numpy array, and suitably processes it for a surface plot by removing 0s (which can cause problems when taking logs), and returning a set of (x, y) mesh values utilziing either the bin edges or bin centers. Note: This is a different ...
codesearchnet
def __init__(self, metagraph, ignore_colocation=True, ignore_user_placement=False): self._metagraph = metagraph self._item_graph = meta_graph_pb2.MetaGraphDef() self._item_graph.CopyFrom(metagraph) self._ignore_colocation = ignore_colocation self._ignore_user_placement = ignore_user_placement se...
Creates an Item. Args: metagraph: a TensorFlow metagraph. ignore_colocation: if set, the tool will ignore all the colocation constraints generated by TensorFlow. ignore_user_placement: if set, all the placement annotations annotated in the metagraph will be ignored. Raises: ValueError: the metagraph is incomplete or i...
github-repos
def register(self, token, regexp): self._tokens.append((token, re.compile(regexp)))
Register a token. Args: token (Token): the token class to register regexp (str): the regexp for that token
juraj-google-style
def __init__(self, value=None, length=0): super().__init__(value) self.length = length self._fmt = '!{}{}'.format(self.length, 's')
Create a Char with the optional parameters below. Args: value: The character to be build. length (int): Character size.
juraj-google-style
def parse_vhdl_file(fname): with open(fname, 'rt') as fh: text = fh.read() return parse_vhdl(text)
Parse a named VHDL file Args: fname(str): Name of file to parse Returns: Parsed objects.
juraj-google-style
def heightmap_rain_erosion(hm: np.ndarray, nbDrops: int, erosionCoef: float, sedimentationCoef: float, rnd: Optional[tcod.random.Random]=None) -> None: lib.TCOD_heightmap_rain_erosion(_heightmap_cdata(hm), nbDrops, erosionCoef, sedimentationCoef, (rnd.random_c if rnd else ffi.NULL))
Simulate the effect of rain drops on the terrain, resulting in erosion. ``nbDrops`` should be at least hm.size. Args: hm (numpy.ndarray): A numpy.ndarray formatted for heightmap functions. nbDrops (int): Number of rain drops to simulate. erosionCoef (float): Amount of ground eroded on the drop's path. sedimentationCo...
codesearchnet
def read_html_file(data_dir, fileroot, encoding=None): fname = os.path.join(data_dir, RAW_HTML_DIRNAME, (fileroot + RAW_HTML_EXT)) encodings = ((encoding,) if encoding else ('utf-8', 'iso-8859-1')) for encoding in encodings: try: with io.open(fname, mode='rt', encoding=encoding) as f: ...
Read the HTML file corresponding to identifier ``fileroot`` in the raw HTML directory below the root ``data_dir``. Args: data_dir (str) fileroot (str) encoding (str) Returns: str
codesearchnet
def begin_scan(self, callback=None, interval=DEF_SCAN_INTERVAL, window=DEF_SCAN_WINDOW): logger.debug('configuring scan parameters') self.api.ble_cmd_gap_set_scan_parameters(interval, window, 1) self._set_state(self._STATE_CONFIGURE_SCAN) self.api.ble_cmd_gap_discover(1) self._wait_for_state(self._S...
Begins a BLE scan and returns immediately. Using this method you can begin a BLE scan and leave the dongle in scanning mode in the background. It will remain in scanning mode until you call the :meth:`end_scan` method or the :meth:`reset` method. Args: callback (callbable): a callback that will be called for each new...
codesearchnet
def split(input_file, file_1, file_2, no_in_first_file): with open(input_file) as f: feat_collection = geojson.load(f) features = feat_collection['features'] feat_collection_1 = geojson.FeatureCollection(features[0:no_in_first_file]) feat_collection_2 = geojson.FeatureCollection(feat...
Split a geojson in two separate files. Args: input_file (str): Input filename. file_1 (str): Output file name 1. file_2 (str): Output file name 2. no_features (int): Number of features in input_file to go to file_1. output_file (str): Output file name.
juraj-google-style
def find_backend(line: str) -> Optional[str]: if _re_test_backend.search(line) is None: return None backends = [b[0] for b in _re_backend.findall(line)] backends.sort() return '_and_'.join(backends)
Find one (or multiple) backend in a code line of the init. Args: line (`str`): A code line of the main init. Returns: Optional[`str`]: If one (or several) backend is found, returns it. In the case of multiple backends (the line contains `if is_xxx_available() and `is_yyy_available()`) returns all backends joined on `...
github-repos
def dump(config): return yaml.safe_dump( config.to_primitive(), default_flow_style=False, encoding='utf-8', allow_unicode=True)
Dumps a stacker Config object as yaml. Args: config (:class:`Config`): the stacker Config object. stream (stream): an optional stream object to write to. Returns: str: the yaml formatted stacker Config.
juraj-google-style
def read_nmr_efg_tensor(self): header_pattern = 'Electric field gradients \\(V/A\\^2\\)\\n-*\\n ion\\s+V_xx\\s+V_yy\\s+V_zz\\s+V_xy\\s+V_xz\\s+V_yz\\n-*\\n' row_pattern = '\\d+\\s+([-\\d\\.]+)\\s+([-\\d\\.]+)\\s+([-\\d\\.]+)\\s+([-\\d\\.]+)\\s+([-\\d\\.]+)\\s+([-\\d\\.]+)' footer_pattern = '-*\\n' data ...
Parses the NMR Electric Field Gradient Raw Tensors Returns: A list of Electric Field Gradient Tensors in the order of Atoms from OUTCAR
codesearchnet
def copy(self, **override_parameters_kwargs): parameters = dict(self.parameters, **override_parameters_kwargs) return type(self)(**parameters)
Creates a deep copy of the distribution. Note: the copy distribution may continue to depend on the original initialization arguments. Args: **override_parameters_kwargs: String/value dictionary of initialization arguments to override with new values. Returns: distribution: A new instance of `type(self)` initialized ...
github-repos
def with_stack(cls, stack, severity, message, **kwargs): stack = _dedup_opcodes(stack) if stack else None opcode = stack[-1].current_opcode if stack else None if opcode is None: return cls(severity, message, **kwargs) else: return cls(severity, message, filename=opcode.code.filename, lin...
Return an error using a stack for position information. Args: stack: A list of state.Frame or state.SimpleFrame objects. severity: The error level (error or warning), an integer. message: The error message string. **kwargs: Additional keyword args to pass onto the class ctor. Returns: An Error object.
github-repos
def exp(x): return math_ops.exp(x)
Element-wise exponential. Args: x: Tensor or variable. Returns: A tensor.
github-repos
def _GetDateTime(self, filetime): if filetime == 0: return dfdatetime_semantic_time.SemanticTime('Not set') return dfdatetime_filetime.Filetime(timestamp=filetime)
Retrieves the date and time from a FILETIME timestamp. Args: filetime (int): FILETIME timestamp. Returns: dfdatetime.DateTimeValues: date and time.
juraj-google-style
def __init__(self, config_file=None, config_header=None): self.config_file = config_file or CONFIG self.config_header = config_header self.config = parser.Parser() self.config.read(self.config_file)
Constructor. Args: config_file: string, the location of the config file. config_header: string, the message to write at the top of the config.
juraj-google-style
def GetEstimatedYear(self): if self._preferred_year: return self._preferred_year if self._knowledge_base.year: return self._knowledge_base.year year = self._GetEarliestYearFromFileEntry() if (not year): year = self._GetLatestYearFromFileEntry() if (not year): year = t...
Retrieves an estimate of the year. This function determines the year in the following manner: * see if the user provided a preferred year; * see if knowledge base defines a year e.g. derived from preprocessing; * determine the year based on the file entry metadata; * default to the current year; Returns: int: estimat...
codesearchnet
def divide_to_patches(image: np.array, patch_size: int, input_data_format) -> List[np.array]: patches = [] height, width = get_image_size(image, channel_dim=input_data_format) for i in range(0, height, patch_size): for j in range(0, width, patch_size): if input_data_format == ChannelDime...
Divides an image into patches of a specified size. Args: image (`np.array`): The input image. patch_size (`int`): The size of each patch. input_data_format (`ChannelDimension` or `str`): The channel dimension format of the input image. Returns: list: A list of np.array representing the patches.
github-repos
def _GetActualMessage(self): if six.PY2: return self._actual.message return self._actual.args[0] if self._actual.args else ''
Returns the "message" portion of an exception. Many Python 2 exceptions have a "message" attribute, so return that directly in Python 2. However, this attribute is never present in Python 3, so return the first argument passed to the exception instance as the message. Returns: String
github-repos