code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def get_init(self): <NEW_LINE> <INDENT> return self.graph.get_graph_parameter("init")
Getter. :return: The initialized global variables of the graph.
625941bc21bff66bcd684825
def delete_zcs_image(session, zcs_image_id, return_type=None, **kwargs): <NEW_LINE> <INDENT> verify_zcs_image_id(zcs_image_id) <NEW_LINE> path = '/api/images/{0}.json'.format(zcs_image_id) <NEW_LINE> return session.delete_api(path=path, return_type=return_type, **kwargs)
Deletes a Zadara Container Services (ZCS) image. There must not be any spawned containers using this image. This action is irreversible. :type session: zadarapy.session.Session :param session: A valid zadarapy.session.Session object. Required. :type zcs_image_id: str :param zcs_image_id: The ZCS image 'name' value as returned by get_all_zcs_images. For example: 'img-00000001'. Required. :type return_type: str :param return_type: If this is set to the string 'json', this function will return a JSON string. Otherwise, it will return a Python dictionary. Optional (will return a Python dictionary by default). :rtype: dict, str :returns: A dictionary or JSON data set as a string depending on return_type parameter.
625941bc10dbd63aa1bd2a7c
def get_name(self, number, text=""): <NEW_LINE> <INDENT> content = super().get_content(number) <NEW_LINE> return content.name
Get the name to represent the content, here name of the content
625941bcbd1bec0571d90508
def ColorShade(hexcode, factor=0.6): <NEW_LINE> <INDENT> r, g, b = tuple(int(hexcode.lstrip("#")[i:i+2], 16)*factor for i in (0, 2 ,4)) <NEW_LINE> newhex = "#{0:02x}{1:02x}{2:02x}".format(int(r), int(g), int(b)).upper() <NEW_LINE> return newhex
Creates a shade as determined by <factor>
625941bc8e7ae83300e4ae9c
@plugin.interval(24 * 60 * 60) <NEW_LINE> def _clean_cache(bot): <NEW_LINE> <INDENT> if bot.memory['safety_cache_lock'].acquire(False): <NEW_LINE> <INDENT> LOGGER.info('Starting safety cache cleanup...') <NEW_LINE> try: <NEW_LINE> <INDENT> cutoff = time.time() - (7 * 24 * 60 * 60) <NEW_LINE> old_keys = [] <NEW_LINE> for key, data in bot.memory['safety_cache'].items(): <NEW_LINE> <INDENT> if data['fetched'] <= cutoff: <NEW_LINE> <INDENT> old_keys.append(key) <NEW_LINE> <DEDENT> <DEDENT> for key in old_keys: <NEW_LINE> <INDENT> bot.memory['safety_cache'].pop(key, None) <NEW_LINE> <DEDENT> overage = len(bot.memory['safety_cache']) - cache_limit <NEW_LINE> if overage > 0: <NEW_LINE> <INDENT> extra_keys = sorted( (data.fetched, key) for (key, data) in bot.memory['safety_cache'].items())[:overage] <NEW_LINE> for (_, key) in extra_keys: <NEW_LINE> <INDENT> bot.memory['safety_cache'].pop(key, None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> bot.memory['safety_cache_lock'].release() <NEW_LINE> <DEDENT> LOGGER.info('Safety cache cleanup finished.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOGGER.info( 'Skipping safety cache cleanup: Cache is locked, ' 'cleanup already running.')
Cleans up old entries in URL safety cache.
625941bc3346ee7daa2b2c3a
def populate_hostid_cache(self): <NEW_LINE> <INDENT> temp_dict = {} <NEW_LINE> data = self.zapi.host.get(output=['hostid', 'host']) <NEW_LINE> for host in data: <NEW_LINE> <INDENT> temp_dict[host['hostid']] = host['host'] <NEW_LINE> <DEDENT> return temp_dict
DESCRIPTION: Populate hostid cache
625941bcaad79263cf39090d
def _set_topleft(el, pos): <NEW_LINE> <INDENT> left, top = pos <NEW_LINE> x_shift = 0 <NEW_LINE> y_shift = 0 <NEW_LINE> if left is not None: <NEW_LINE> <INDENT> x_shift = left - el.get_storer_rect().left <NEW_LINE> <DEDENT> if top is not None: <NEW_LINE> <INDENT> y_shift = top - el.get_storer_rect().top <NEW_LINE> <DEDENT> el.move((x_shift, y_shift))
Set element <el>'s topleft using its storer_rect as reference.
625941bc4428ac0f6e5ba6c2
def close(self): <NEW_LINE> <INDENT> if not self.is_closed: <NEW_LINE> <INDENT> while not self.is_closed: <NEW_LINE> <INDENT> for window in self._windows: <NEW_LINE> <INDENT> self._terminate(window.handle)
Closes SAP GUI windows. :return: None
625941bc91f36d47f21ac3c0
def deploy(): <NEW_LINE> <INDENT> clone_repo_if_needed() <NEW_LINE> with cd(env.project_root): <NEW_LINE> <INDENT> run_with_failure('git pull --all', 'Updating all repos failed.') <NEW_LINE> run_with_failure('git checkout %s' % env.branch, 'Updating %s branch failed.' % env.branch) <NEW_LINE> run_with_failure('git pull', 'Git pull failed.') <NEW_LINE> run("git submodule init") <NEW_LINE> run("git submodule update") <NEW_LINE> if env.just_cloned: <NEW_LINE> <INDENT> create_virtualenv() <NEW_LINE> <DEDENT> run_with_failure(env.activate + "pip install -r requirements.txt", 'Installing requirements failed.') <NEW_LINE> run_with_failure(env.activate + "python manage.py syncdb --migrate --noinput", "Syncdb failed.") <NEW_LINE> run_with_failure(env.activate + "python manage.py collectstatic --noinput", "Collectstatic failed.") <NEW_LINE> run("touch jogaora_app/wsgi.py")
Updates the server and restarts it
625941bc3eb6a72ae02ec3a5
def synch(self): <NEW_LINE> <INDENT> self.logger('INFO', 'Begin synching ' + str(self.localdir) + ' <=> ' + self.bucketname) <NEW_LINE> self.validate_local_folder() <NEW_LINE> if not self.TEMP_DB_DIR.exists(): <NEW_LINE> <INDENT> self.logger('DEBUG', 'Create temporary folder') <NEW_LINE> self.TEMP_DB_DIR.mkdir() <NEW_LINE> <DEDENT> temp_db = self.bucketname + '-' + now().format('YYYY-MM-DD-HH-mm-ss') <NEW_LINE> self.logger('DEBUG', 'Create database file tmp/' + temp_db) <NEW_LINE> with dbm.open(str(self.TEMP_DB_DIR / temp_db), 'c') as remote_file_db: <NEW_LINE> <INDENT> self.logger('INFO', 'Check for download') <NEW_LINE> self.download_remote_files(remote_file_db) <NEW_LINE> self.logger('INFO', 'Check for upload') <NEW_LINE> self.upload_local_files(remote_file_db) <NEW_LINE> self.logger('INFO', 'Bucket and local folder are synched!') <NEW_LINE> <DEDENT> if self.purge: <NEW_LINE> <INDENT> self.logger('DEBUG', 'removing temporary database files...') <NEW_LINE> for file in self.TEMP_DB_DIR.iterdir(): <NEW_LINE> <INDENT> file.unlink() <NEW_LINE> <DEDENT> <DEDENT> self.logger('DEBUG', 'Cleaning up completed')
This overrides the original synch() and rebuilds an *online* version of the code :return:
625941bc9b70327d1c4e0ca4
def StartInterrogationHunt(self): <NEW_LINE> <INDENT> flow_name = compatibility.GetName(flows_discovery.Interrogate) <NEW_LINE> flow_args = flows_discovery.InterrogateArgs(lightweight=False) <NEW_LINE> description = "Interrogate run by cron to keep host info fresh." <NEW_LINE> hunt_id = hunt.CreateAndStartHunt( flow_name, flow_args, self.username, client_limit=0, client_rate=config.CONFIG["Cron.interrogate_client_rate"], crash_limit=config.CONFIG["Cron.interrogate_crash_limit"], description=description, duration=config.CONFIG["Cron.interrogate_duration"], output_plugins=self.GetOutputPlugins()) <NEW_LINE> self.Log("Started hunt %s.", hunt_id)
Starts an interrogation hunt on all available clients.
625941bc3c8af77a43ae366e
def shift_fields(expr, shift): <NEW_LINE> <INDENT> return Shifter()(expr, shift=shift)
Returns an expression with all :class:`Field`\ s shifted by ``shift``--i.e., with ``shift`` added elementwise to each :class:`Field`'s ``offset`` attribute. :arg expr: The expression(s) to be mapped. :arg shift: A :class:`tuple`. .. versionadded:: 2020.1
625941bc7d43ff24873a2b6e
def reaction_stats_strs(self, ): <NEW_LINE> <INDENT> reaction_stats_strs = [] <NEW_LINE> for reactant_pair_str, c_j, reactant_pair in sorted((sorted([repr(d) for d in k]), v, k) for k, v in self.possible_hybridization_reactions.items()): <NEW_LINE> <INDENT> d1, d2 = tuple(reactant_pair) <NEW_LINE> domain1, domain2 = reactant_pair_str <NEW_LINE> is_forming = d1.partner is None <NEW_LINE> is_intra = (d1.strand.complex is not None and d1.strand.complex == d2.strand.complex) or (d1.strand == d2.strand) <NEW_LINE> reaction_attr = self.reaction_attrs[reactant_pair] <NEW_LINE> stat = ( "%18s %9s %s %18s" % (domain1, "hybrid" if is_forming else "de-hyb", "intra" if is_intra else "inter", domain2), ((": %0.03e x 1 x 1 = %03e" if reaction_attr.is_forming else ": %0.03e x 1 = %03e") % (c_j, self.hybridization_propensity_functions[reactant_pair])) ) <NEW_LINE> reaction_stats_strs.append("".join(stat)) <NEW_LINE> <DEDENT> for reactant_pair_str, c_j, reactant_pair in sorted((sorted([repr(d) for d in k]), v, k) for k, v in self.possible_stacking_reactions.items()): <NEW_LINE> <INDENT> d1, d2 = tuple(reactant_pair) <NEW_LINE> domain1, domain2 = reactant_pair_str <NEW_LINE> reaction_attr = self.reaction_attrs[reactant_pair] <NEW_LINE> reaction_desc = REACTION_NAMES[reaction_attr.is_forming][reaction_attr.reaction_type] <NEW_LINE> stat = ("%42s %9s %s %42s" % (domain1, reaction_desc[:8], "intra" if reaction_attr.is_intra else "inter", domain2), ((": %0.03e x 1 x 1 = %03e" if reaction_attr.is_forming else ": %0.03e x 1 = %03e") % (c_j, self.stacking_propensity_functions[reactant_pair])) ) <NEW_LINE> reaction_stats_strs.append("".join(stat)) <NEW_LINE> <DEDENT> return reaction_stats_strs
Generate a string with reaction stats.
625941bc85dfad0860c3ad2a
def dev_parsed(**kwargs): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for line in _dev(**kwargs).split('\n'): <NEW_LINE> <INDENT> match = _PHY_RE.match(line) <NEW_LINE> if match: <NEW_LINE> <INDENT> phy = match.group('phy').replace('#', '') <NEW_LINE> result[phy] = {'interfaces': []} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> match = _INTERFACE_RE.match(line) <NEW_LINE> if match: <NEW_LINE> <INDENT> result[phy]['interfaces'].append(match.group('interface')) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return result
Parse the results of 'iw dev'. Args: **kwargs: Passed to the underlying subprocess call. Returns: A dict of the the form: {'phyX': 'interfaces': ['interfaceN', ...]}
625941bcd7e4931a7ee9dded
def test_viewing_a_single_question(self): <NEW_LINE> <INDENT> response = self.client.get( '/api/v1/questions/1', data=json.dumps(self.questions), content_type='application/json') <NEW_LINE> self.assertEqual(response.status_code, 200)
Test user can view a single question.
625941bc24f1403a92600a3a
def show_table(table): <NEW_LINE> <INDENT> labels = [ 'ID', 'Full ID', 'Item name', 'Manufacturer', 'Purchase date', 'Durbility', ] <NEW_LINE> ui.print_table(table, labels)
Display a table Args: table: list of lists to be displayed. Returns: None
625941bcd486a94d0b98e016
def __setitem__(self, index, value): <NEW_LINE> <INDENT> list.__setitem__(self, index, protocol.base_rep[value])
Replaces a single Token of the Message with another Token. >>> m = NOT(GOF) >>> m[2] = DRW; print m NOT ( DRW ) >>> m[-1] = 42; print m NOT ( DRW 42 >>> m[3] IntegerToken(42) >>> m[-2] = [YES, KET] #doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): ... TypeError: list objects are unhashable
625941bc9c8ee82313fbb645
def Build(sess, document_source, FLAGS): <NEW_LINE> <INDENT> task_context = FLAGS["task_context"] <NEW_LINE> arg_prefix = FLAGS["arg_prefix"] <NEW_LINE> num_actions = FLAGS["num_actions"] <NEW_LINE> feature_sizes = FLAGS["feature_sizes"] <NEW_LINE> domain_sizes = FLAGS["domain_sizes"] <NEW_LINE> embedding_dims = FLAGS["embedding_dims"] <NEW_LINE> hidden_layer_sizes = map(int, FLAGS["hidden_layer_sizes"].split(',')) <NEW_LINE> beam_size = FLAGS["beam_size"] <NEW_LINE> max_steps = FLAGS["max_steps"] <NEW_LINE> batch_size = FLAGS["batch_size"] <NEW_LINE> corpus_name = FLAGS["input"] <NEW_LINE> slim_model = FLAGS["slim_model"] <NEW_LINE> model_path = FLAGS["model_path"] <NEW_LINE> parser = structured_graph_builder.StructuredGraphBuilder( num_actions, feature_sizes, domain_sizes, embedding_dims, hidden_layer_sizes, gate_gradients=True, arg_prefix=arg_prefix, beam_size=beam_size, max_steps=max_steps) <NEW_LINE> parser.AddEvaluation(task_context, batch_size, corpus_name=corpus_name, evaluation_max_steps=max_steps, document_source=document_source) <NEW_LINE> parser.AddSaver(slim_model) <NEW_LINE> sess.run(parser.inits.values()) <NEW_LINE> parser.saver.restore(sess, model_path) <NEW_LINE> return parser.evaluation['documents']
Builds a sub-network, which will be either the tagger or the parser Args: sess: tensorflow session to use document_source: the input of serialized document objects to process Flags: (taken from FLAGS argument) num_actions: number of possible golden actions feature_sizes: size of each feature vector domain_sizes: number of possible feature ids in each feature vector embedding_dims: embedding dimension for each feature group hidden_layer_sizes: Comma separated list of hidden layer sizes. arg_prefix: Prefix for context parameters. beam_size: Number of slots for beam parsing. max_steps: Max number of steps to take. task_context: Path to a task context with inputs and parameters for feature extractors. input: Name of the context input to read data from. graph_builder: 'greedy' or 'structured' batch_size: Number of sentences to process in parallel. slim_model: Whether to expect only averaged variables. model_path: Path to model parameters. Return: returns the tensor which will contain the serialized document objects.
625941bc1f037a2d8b9460d0
def _expected_blocks(self, block_types=None, get_parent=False): <NEW_LINE> <INDENT> blocks = {} <NEW_LINE> def add_block(xblock): <NEW_LINE> <INDENT> children = xblock.get_children() <NEW_LINE> if block_types is None or xblock.category in block_types: <NEW_LINE> <INDENT> parent = None <NEW_LINE> if get_parent: <NEW_LINE> <INDENT> item = xblock.get_parent() <NEW_LINE> parent = unicode(item.location) if item is not None else None <NEW_LINE> <DEDENT> blocks[unicode(xblock.location)] = { u'id': unicode(xblock.location), u'type': xblock.category, u'display_name': xblock.display_name, u'format': xblock.format, u'graded': xblock.graded, u'parent': parent, u'children': [unicode(child.location) for child in children] } <NEW_LINE> <DEDENT> for child in children: <NEW_LINE> <INDENT> add_block(child) <NEW_LINE> <DEDENT> <DEDENT> course = self.store.get_course(self.course.id, depth=None) <NEW_LINE> add_block(course) <NEW_LINE> return blocks
Construct expected blocks. Arguments: block_types (list): List of required block types. Possible values include sequential, vertical, html, problem, video, and discussion. The type can also be the name of a custom type of block used for the course. get_parent (bool): If True then add child's parent location else parent is set to None Returns: dict: Information about required block types.
625941bcd58c6744b4257b31
def has_constraint_violation(A, vr, epsilon=1E-8): <NEW_LINE> <INDENT> lhs = 1 - A[0, 1:].sum() <NEW_LINE> rhs = dot(vr[:, 1:], A[1:, 0]) <NEW_LINE> rhs = -1 * rhs.min() <NEW_LINE> if abs(lhs - rhs) > epsilon: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Check for constraint violations in transformation matrix. Parameters ---------- A : ndarray The transformation matrix. vr : ndarray The right eigenvectors. epsilon : float, optional Tolerance of constraint violation. Returns ------- truth : bool Whether or not the violation exists Notes ------- Checks constraints using Eqn 4.25 in [1]. References ---------- .. [1] Deuflhard P, Weber, M., "Robust perron cluster analysis in conformation dynamics," Linear Algebra Appl., vol 398 pp 161-184 2005.
625941bc63f4b57ef0000ff1
def _default_platform(): <NEW_LINE> <INDENT> if os.environ.get('PIGLIT_PLATFORM'): <NEW_LINE> <INDENT> return os.environ.get('PIGLIT_PLATFORM') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> plat = core.PIGLIT_CONFIG.get('core', 'platform') <NEW_LINE> if plat not in core.PLATFORMS: <NEW_LINE> <INDENT> print('Platform is not valid\n' 'valid platforms are: {}'.format(core.PLATFORMS), file=sys.stderr) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return plat <NEW_LINE> <DEDENT> except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): <NEW_LINE> <INDENT> return 'mixed_glx_egl'
Logic to determine the default platform to use This assumes that the platform can only be set on Linux, it probably works on BSD. This is only relevant if piglit is built with waffle support. When waffle support lands for Windows and if it ever happens for OSX, this will need to be extended. On Linux this will try in order, 1) An option provided via the -p/--platform option (this is handled in argparse, not in this function) 2) PIGLIT_PLATFORM from the environment 3) [core]:platform from the config file 4) mixed_glx_egl
625941bc1f5feb6acb0c4a25
def has_uncommitted_files(): <NEW_LINE> <INDENT> uncommitted_files = subprocess.check_output(GIT_IS_DIRTY_CMD.split(' ')) <NEW_LINE> return bool(len(uncommitted_files))
Returns true if the repo contains modified files that are uncommitted. Ignores untracked files.
625941bc94891a1f4081b979
def check_connectivity( self, resource_group_name, network_watcher_name, source, destination, custom_headers=None, raw=False, polling=True, **operation_config): <NEW_LINE> <INDENT> raw_result = self._check_connectivity_initial( resource_group_name=resource_group_name, network_watcher_name=network_watcher_name, source=source, destination=destination, custom_headers=custom_headers, raw=True, **operation_config ) <NEW_LINE> def get_long_running_output(response): <NEW_LINE> <INDENT> deserialized = self._deserialize('ConnectivityInformation', response) <NEW_LINE> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) <NEW_LINE> if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) <NEW_LINE> elif polling is False: polling_method = NoPolling() <NEW_LINE> else: polling_method = polling <NEW_LINE> return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
Verifies the possibility of establishing a direct TCP connection from a virtual machine to a given endpoint including another VM or an arbitrary remote server. :param resource_group_name: The name of the network watcher resource group. :type resource_group_name: str :param network_watcher_name: The name of the network watcher resource. :type network_watcher_name: str :param source: :type source: ~azure.mgmt.network.v2018_01_01.models.ConnectivitySource :param destination: :type destination: ~azure.mgmt.network.v2018_01_01.models.ConnectivityDestination :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns ConnectivityInformation or ClientRawResponse<ConnectivityInformation> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.network.v2018_01_01.models.ConnectivityInformation] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[~azure.mgmt.network.v2018_01_01.models.ConnectivityInformation]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
625941bc76d4e153a657ea01
def throttle(self, last_io=None): <NEW_LINE> <INDENT> if last_io is not None: <NEW_LINE> <INDENT> self._last_io_datetime = last_io <NEW_LINE> <DEDENT> reference = datetime.now() <NEW_LINE> if self._last_io_datetime is None: <NEW_LINE> <INDENT> self._last_io_datetime = reference <NEW_LINE> return <NEW_LINE> <DEDENT> if self.request_rate_per_sec <= 0.0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> elapsed = (reference - self._last_io_datetime).total_seconds() <NEW_LINE> if elapsed < self.request_rate_per_sec: <NEW_LINE> <INDENT> self.logger.debug('Throttling for {}s...'.format( self.request_rate_per_sec - elapsed)) <NEW_LINE> sleep(self.request_rate_per_sec - elapsed) <NEW_LINE> <DEDENT> self._last_io_datetime = reference <NEW_LINE> return
A common throttle control
625941bc5166f23b2e1a502a
def _delete_file(file_name: str): <NEW_LINE> <INDENT> os.remove(file_name)
Removes the image from the local file system.
625941bc91af0d3eaac9b8e6
def set_source_colorvalue(self, color, lighten=0): <NEW_LINE> <INDENT> self.set_source_rgba( color.red / 255. + lighten, color.green / 255. + lighten, color.blue / 255. + lighten, color.alpha)
Set the source pattern from a ``cssutils.ColorValue`` object.
625941bccad5886f8bd26eb3
def shortenLine(self, content, max_length, **kwargs): <NEW_LINE> <INDENT> return shorten_line(content, max_length, **kwargs)
Return the supplied *line* shortened to the number to *max_length*. The *\*\*kwargs* are keyword arguments passed the `shorten_line` function.
625941bc67a9b606de4a7d8d
def check_self_colision(snake): <NEW_LINE> <INDENT> head = snake[0] <NEW_LINE> if snake.count(head) > 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
Intersection check
625941bc5fcc89381b1e158e
def load_json(filename): <NEW_LINE> <INDENT> with open(filename, 'r') as fhnd: <NEW_LINE> <INDENT> return json.load(fhnd)
Load contents of given JSON file and return them as data structures.
625941bc38b623060ff0acc0
def get_normalized_args(): <NEW_LINE> <INDENT> normalized_args = {} <NEW_LINE> args = request.args <NEW_LINE> for key, value in list(args.items()): <NEW_LINE> <INDENT> normalized_args[key.lower()] = value <NEW_LINE> <DEDENT> return normalized_args
Function converting parameter names to lowercase strings :param serv: Service object :param args: parameters of the TJS request :return: request response
625941bc45492302aab5e191
def main(): <NEW_LINE> <INDENT> cd_to_script_path() <NEW_LINE> output_folder = get_output_folder() <NEW_LINE> clear_the_cache = process_command_line_params() <NEW_LINE> if clear_the_cache: <NEW_LINE> <INDENT> clear_cache() <NEW_LINE> <DEDENT> load_cache() <NEW_LINE> module_initialisations() <NEW_LINE> copy_resources(output_folder) <NEW_LINE> render_all_pages(output_folder)
What it says: the main function
625941bc7047854f462a12dd
def create_imageDB(filenamestr, saveURI, echo=False): <NEW_LINE> <INDENT> imagedb.connect2db(saveURI, echo) <NEW_LINE> files = glob.glob(filenamestr) <NEW_LINE> images = [] <NEW_LINE> for filepath in files: <NEW_LINE> <INDENT> run, camcol, filter, field = filepath2frameId(filepath) <NEW_LINE> images.append(imagedb.Image(run, camcol, filter, field, filepath)) <NEW_LINE> <DEDENT> with imagedb.session_scope() as session: <NEW_LINE> <INDENT> session.add_all(images) <NEW_LINE> session.commit()
Finds all paths to matching files given by filenamestr, extracts their frame identifiers and stores them in a database given by saveURI. Examples -------- Filenamestr can contain wildcards, f.e.: >>> create_imageDB("/path/to/dir_containing_subdirs/*/*.png", "sqlite:///foo.db") will find all /path/to/dir/subdirs/frame-run-camcol-filter-frame.png styled filenames and add their frame identifiers and paths to foo DB. Parameters ----------- filenamestr : str wildcarded string that will be used to match all desired image files saveURI : str URI containing type and location of the images database
625941bc0fa83653e4656e8d
def prepare_csv_read(data, field_names, *args, **kwargs): <NEW_LINE> <INDENT> if hasattr(data, "readlines") or isinstance(data, list): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif isinstance(data, basestring): <NEW_LINE> <INDENT> data = open(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Unable to handle data of type `%s'" % type(data)) <NEW_LINE> <DEDENT> return csv.DictReader(data, field_names, *args, **kwargs)
Prepare various input types for CSV parsing >>> list(prepare_csv_read(open("real_file.csv"), ... ("type", "bool", "string"))) [{'bool': 'true', 'type': 'file', 'string': 'test'}] >>> test_list = ['James,Rowe', 'ell,caro'] >>> list(prepare_csv_read(test_list, ("first", "last"))) [{'last': 'Rowe', 'first': 'James'}, {'last': 'caro', 'first': 'ell'}] :type data: ``file`` like object, ``list``, ``str`` :param data: Data to read :type field_names: ``tuple`` of ``str`` :param field_names: Ordered names to assign to fields :rtype: `csv.DictReader` :return: CSV reader suitable for parsing :raise TypeError: Invalid value for data
625941bc9f2886367277a761
def read_all(self, connection): <NEW_LINE> <INDENT> connection.setblocking(0) <NEW_LINE> data = self._read(connection, self.rec_buf_size) <NEW_LINE> while len(data) > 0: <NEW_LINE> <INDENT> self._read_buffer += data <NEW_LINE> data = self._read(connection, self.rec_buf_size) <NEW_LINE> <DEDENT> self._flush_read() <NEW_LINE> return self._chunk
Read from a connection until the bytestream dries up. Sets the connection to non-blocking mode, and then repeatedly reads from it until a socket.error is raised, which happens when the buffer is empty. this is a bit fragile, in the sense that part of a message might be delayed, in which case it'll return immediately without seeing the rest. Does not close the connection. Returns: bytes object.
625941bc287bf620b61d393e
def __smbpasswd(self, username, password): <NEW_LINE> <INDENT> if domaincontroller_enabled(): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> command = '/usr/local/bin/smbpasswd -D 0 -s -a "%s"' % (username) <NEW_LINE> smbpasswd = self._pipeopen(command) <NEW_LINE> smbpasswd.communicate("%s\n%s\n" % (password, password)) <NEW_LINE> return smbpasswd.returncode == 0
Add the user ``username'' to samba using ``password'' as the current password Returns: True whether the user has been successfully added and False otherwise
625941bcd53ae8145f87a146
def correct_offset_in_file(file_path, lat_field, lng_field, out_fields): <NEW_LINE> <INDENT> temp_file = tempfile.NamedTemporaryFile(delete=False) <NEW_LINE> with open(file_path, 'rb') as csv_file: <NEW_LINE> <INDENT> csv_reader = csv.DictReader(remove_bom_from_first(csv_file)) <NEW_LINE> csv_reader.next() <NEW_LINE> corrected_rows = correct_china_offset(csv_reader, lat_field, lng_field) <NEW_LINE> csv_writer = csv.DictWriter(temp_file, out_fields) <NEW_LINE> csv_writer.writeheader() <NEW_LINE> csv_writer.writerows(corrected_rows) <NEW_LINE> temp_file.close() <NEW_LINE> <DEDENT> return temp_file.name
Generates a temp file with corrected coordinates for the given file_path. Params: :file_path: Path to the file whose coordinates should be corrected. :lat_field: Name of the field in which latitude coordinates are found. :lng_field: Name of the field in which longitude coordinates are found. :out_fields: Names (in order) of the fields which should be written in the output. Returns: Path to generated temp file.
625941bc4e696a04525c931d
def london(): <NEW_LINE> <INDENT> print("\nYou made it to London, and you've almost completed your bucketlist.") <NEW_LINE> print("You are still being chased by the police and have to leave Europe.") <NEW_LINE> print("Luckily the last item of your bucketlist is to stay at a beach bungalow in Thailand.") <NEW_LINE> print("In that way you could easily flee Europe and still complete your bucketlist.") <NEW_LINE> input("\nPress enter to go to the Port of London...") <NEW_LINE> print(LONDON_INFO) <NEW_LINE> print(ASCII_LONDON) <NEW_LINE> gameChoice()
Sixth room of the game. The player is now in London. Still being chased by the police for what the player has done in Berlin, the player now has to come up with a plan to flee Europe and still complete his/ her bucketlist.
625941bcff9c53063f47c0c6
def call_at_to(start_datetime, end_datetime, f, *args, **kwargs): <NEW_LINE> <INDENT> delta = start_datetime - datetime.datetime.now() <NEW_LINE> seconds_ahead = delta.days * SECONDS_IN_DAY + delta.seconds + delta.microseconds / US_IN_SECOND <NEW_LINE> delta = end_datetime - datetime.datetime.now() <NEW_LINE> seconds_before_end = delta.days * SECONDS_IN_DAY + delta.seconds + delta.microseconds / US_IN_SECOND <NEW_LINE> delta = end_datetime - start_datetime <NEW_LINE> seconds_duration = delta.days * SECONDS_IN_DAY + delta.seconds + delta.microseconds / US_IN_SECOND <NEW_LINE> if seconds_ahead > 0 and seconds_duration > FIVE_MINUTES: <NEW_LINE> <INDENT> log.msg("Will call in %d seconds" % (seconds_ahead)) <NEW_LINE> return reactor.callLater(seconds_ahead, f, *args, **kwargs) <NEW_LINE> <DEDENT> elif seconds_before_end > FIVE_MINUTES: <NEW_LINE> <INDENT> log.msg("Obs in progress, starting now!") <NEW_LINE> return reactor.callLater(1, f, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.msg("Not scheduling; Obs in the past or too short") <NEW_LINE> <DEDENT> return None
Run f(*args, **kwargs) at datetime.
625941bc50812a4eaa59c1f6
def filter_counter(self, counter, min=2, max=100000000): <NEW_LINE> <INDENT> records_filterd = {} <NEW_LINE> counter_all_records = 0 <NEW_LINE> for item in counter: <NEW_LINE> <INDENT> counter_all_records += 1 <NEW_LINE> if max > counter[item] >= min: <NEW_LINE> <INDENT> records_filterd[item] = counter[item] <NEW_LINE> <DEDENT> <DEDENT> self.stat['user_record_events'] = counter_all_records <NEW_LINE> self.stat['records_filtered'] = len(records_filterd) <NEW_LINE> return records_filterd
Filter the counted records. Returns: List with record numbers.
625941bcfff4ab517eb2f30b
def exercise_complex(show_result=True): <NEW_LINE> <INDENT> if show_result is False: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> c_abs = 1.2 <NEW_LINE> c_angle = 20 <NEW_LINE> c_angle_rad = np.deg2rad(c_angle) <NEW_LINE> a = c_abs * np.cos(c_angle_rad) <NEW_LINE> b = c_abs * np.sin(c_angle_rad) <NEW_LINE> c = a + b*1j <NEW_LINE> c_conj = np.conj(c) <NEW_LINE> c_inv = 1 / c <NEW_LINE> generate_figure(figsize=(5, 2.5), xlim=[-0.25, 1.75], ylim=[-0.5, 0.5]) <NEW_LINE> v1 = plot_vector(c, color='k') <NEW_LINE> v2 = plot_vector(c_conj, color='b') <NEW_LINE> v3 = plot_vector(c_inv, color='r') <NEW_LINE> plt.legend([v1, v2, v3], ['$c$', r'$\overline{c}$', '$c^{-1}$']) <NEW_LINE> def rotate_complex(c, r): <NEW_LINE> <INDENT> c_angle_rad = np.angle(c) - np.deg2rad(r) <NEW_LINE> c_abs = np.abs(c) <NEW_LINE> a = c_abs * np.cos(c_angle_rad) <NEW_LINE> b = c_abs * np.sin(c_angle_rad) <NEW_LINE> c_rot = a + b*1j <NEW_LINE> return c_rot <NEW_LINE> <DEDENT> c = 1 + 0.5*1j <NEW_LINE> generate_figure(figsize=(5, 2.5), xlim=[-0.25, 1.75], ylim=[-0.25, 0.75]) <NEW_LINE> v1 = plot_vector(c, color='k') <NEW_LINE> v2 = plot_vector(rotate_complex(c, 10), color='b') <NEW_LINE> v3 = plot_vector(rotate_complex(c, 20), color='g') <NEW_LINE> v4 = plot_vector(rotate_complex(c, 30), color='r') <NEW_LINE> plt.legend([v1, v2, v3, v4], ['$c$', '$r=10$', '$r=20$', '$r=30$'])
Exercise 1: Rotate Complex Number Notebook: PCP_06_complex.ipynb Args: show_result: Show result (Default value = True)
625941bc55399d3f05588584
def draw_module_names(ax, vertical=False, color='white'): <NEW_LINE> <INDENT> if cfg.geometry == '9M': <NEW_LINE> <INDENT> if vertical is True: <NEW_LINE> <INDENT> y = 952 <NEW_LINE> for j in range(3): <NEW_LINE> <INDENT> x = 256 <NEW_LINE> for i in range(6): <NEW_LINE> <INDENT> t_str = 'T#{:02d}'.format(cfg.Eiger9M.T[i+j*6]) <NEW_LINE> ax.text(x, y, t_str, color=color, horizontalalignment='center', weight='normal') <NEW_LINE> x += 512 <NEW_LINE> <DEDENT> y += 1024 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> x = 512 <NEW_LINE> for j in range(3): <NEW_LINE> <INDENT> y = 3000 <NEW_LINE> for i in range(6): <NEW_LINE> <INDENT> t_str = 'T#{:02d}'.format(cfg.Eiger9M.T[i+j*6]) <NEW_LINE> ax.text(x, y, t_str, color=color, horizontalalignment='center', weight='normal') <NEW_LINE> y -= 512 <NEW_LINE> <DEDENT> x += 1024 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('Only for 9M currently')
Write out the names of the modules in the 9M image. Names are fetched from sls_detector.config.Eiger9M Parameters ---------- ax: mpl.axes axes to draw the names on vertical: bool, optional Defaults to False, set to True if the image is rotated color: str Textcolor
625941bc711fe17d82542243
def get_tracking_work_size(self): <NEW_LINE> <INDENT> return sum([len(ca) - 1 if len(ca) > 0 else 0 for ca in self.channel_accumulator.values()])
:return:
625941bc046cf37aa974cc1b
def test_case(self, instance, node): <NEW_LINE> <INDENT> if 'end' in node.attribute.values: <NEW_LINE> <INDENT> return node.attribute.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> example_value = instance.get_value(node.attribute) <NEW_LINE> for child in node.children: <NEW_LINE> <INDENT> if example_value == child[0]: <NEW_LINE> <INDENT> return self.test_case(instance=instance, node=child[1]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return 'unknown'
Recursively pass a given instance through the decision tree and return the answer :param instance: (Example) the information to classify :param node: (Node) current node :return: (str) the trained classification for the given decision tree
625941bc9b70327d1c4e0ca5
def get_average_pln_for_day(self, currency: Currency, date: datetime.date) -> Optional[Decimal]: <NEW_LINE> <INDENT> url = QuotatorNBP._make_url(currency.value, date) <NEW_LINE> return self._read_average_pln(url)
Implements QuotesProviderProtocol
625941bc7b180e01f3dc46d6
@login_required(login_url='login') <NEW_LINE> def my_likes(request): <NEW_LINE> <INDENT> posts = TablePost.objects.filter(table_like_set__like_from=request.user, table_like_set__like=True) <NEW_LINE> for post in posts: <NEW_LINE> <INDENT> post.is_liked = True <NEW_LINE> <DEDENT> context = { 'posts': posts, } <NEW_LINE> return render(request, 'house/my_likes.html.django', context)
The 'my_likes' view displays all posts we liked on the website.
625941bca4f1c619b28aff11
def git_check_no_modified_files(): <NEW_LINE> <INDENT> command = [ 'git', 'submodule', 'foreach', '--recursive', 'git', 'diff', '--quiet', 'HEAD'] <NEW_LINE> subprocess.check_call(command) <NEW_LINE> command = [ 'git', 'diff', '--quiet', 'HEAD'] <NEW_LINE> subprocess.check_call(command)
Make sure that there are no modified files.
625941bc009cb60464c63285
def get_all(context, session): <NEW_LINE> <INDENT> query = session.query(models.MetadefResourceType) <NEW_LINE> resource_types = query.all() <NEW_LINE> resource_types_list = [] <NEW_LINE> for rt in resource_types: <NEW_LINE> <INDENT> resource_types_list.append(rt.as_dict()) <NEW_LINE> <DEDENT> return resource_types_list
Get a list of all resource types
625941bc796e427e537b0494
def process_compare_data(params): <NEW_LINE> <INDENT> for cur_date in rrule(MONTHLY, dtstart=params['ini_date'], until=params['end_date']): <NEW_LINE> <INDENT> next_date = cur_date + relativedelta( months=1) - relativedelta(minutes=1) <NEW_LINE> logging.info('Querying: cur_date: %s; next_date: %s', cur_date.date().isoformat(), next_date.date().isoformat()) <NEW_LINE> if params['query_gen']: <NEW_LINE> <INDENT> for gen_type in params['dessem_sagic_name']: <NEW_LINE> <INDENT> pparams = list() <NEW_LINE> for d_name, item in params['dessem_sagic_name'][gen_type][ 'by_cepelname'].items(): <NEW_LINE> <INDENT> s_name = list(set(item['ons_sagic'])) <NEW_LINE> if (params['compare_plants'] and d_name not in params['compare_plants']): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> pparams.append((params, cur_date, next_date, GEN_TYPE[gen_type], d_name, s_name)) <NEW_LINE> <DEDENT> results = Parallel(n_jobs=10, verbose=10, backend="threading")( map(delayed(query_compare_data), pparams)) <NEW_LINE> if not all(results): <NEW_LINE> <INDENT> logging.warning('Not all parallel jobs were successful!') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if params['query_cmo']: <NEW_LINE> <INDENT> pparams = list() <NEW_LINE> for subsis in ['se', 'ne', 'n', 's']: <NEW_LINE> <INDENT> pparams.append((params, cur_date, next_date, 'cmo', subsis, ['cmo'])) <NEW_LINE> <DEDENT> results = Parallel(n_jobs=10, verbose=10, backend="threading")( map(delayed(query_compare_data), pparams))
Processa dados para comparacao entre DESSEM e SAGIC
625941bccc40096d61595823
def shape_mask_shift(shape_mask, mean_shift): <NEW_LINE> <INDENT> vshift = int(np.ceil(mean_shift[0])) <NEW_LINE> hshift = int(np.ceil(mean_shift[1])) <NEW_LINE> if vshift > 0: <NEW_LINE> <INDENT> shape_mask = np.delete(shape_mask, np.s_[-abs(vshift):], axis = 0) <NEW_LINE> shape_mask = np.insert(shape_mask, np.s_[0:abs(vshift)], False, axis = 0) <NEW_LINE> <DEDENT> elif vshift < 0: <NEW_LINE> <INDENT> shape_mask = np.delete(shape_mask, np.s_[0:abs(vshift)], axis = 0) <NEW_LINE> shape_mask = np.insert(shape_mask, np.s_[-abs(vshift):], False, axis = 0) <NEW_LINE> <DEDENT> if hshift > 0: <NEW_LINE> <INDENT> shape_mask = np.delete(shape_mask, np.s_[-abs(hshift):], axis = 1) <NEW_LINE> shape_mask = np.insert(shape_mask, np.s_[0:abs(hshift)], False, axis = 1) <NEW_LINE> <DEDENT> elif hshift < 0: <NEW_LINE> <INDENT> shape_mask = np.delete(shape_mask, np.s_[0:abs(hshift)], axis = 1) <NEW_LINE> shape_mask = np.insert(shape_mask, np.s_[-abs(hshift):], False, axis = 1) <NEW_LINE> <DEDENT> return shape_mask
Shifts the before-and-after images so that old particles will be masked and not counted in the new image
625941bcf548e778e58cd44e
def to_dict(self): <NEW_LINE> <INDENT> if self.failure: <NEW_LINE> <INDENT> failure = self.failure.to_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> failure = None <NEW_LINE> <DEDENT> return { 'failure': failure, 'meta': self.meta, 'name': self.name, 'results': self.results, 'state': self.state, 'version': self.version, 'intention': self.intention, 'uuid': self.uuid, }
Translates the internal state of this object to a ``dict``. :returns: this atom detail in ``dict`` form
625941bc15fb5d323cde09dd
def remove_comments(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for token in self.lexems: <NEW_LINE> <INDENT> if token.tag == 'COMMENT': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result.append(token) <NEW_LINE> <DEDENT> <DEDENT> return result
Removes the comments from the token list --- Args: None Return : None
625941bc50485f2cf553cc6a
def densenet121(expandSize, pretrained=False, **kwargs): <NEW_LINE> <INDENT> model = DenseNet(expandSize, num_init_features=64, growth_rate=32, block_config=(6, 12, 24, 16), **kwargs) <NEW_LINE> if pretrained: <NEW_LINE> <INDENT> model.load_state_dict(model_zoo.load_url(model_urls['densenet121'])) <NEW_LINE> <DEDENT> return model
Densenet-121 model from `"Densely Connected Convolutional Networks" <https://arxiv.org/pdf/1608.06993.pdf>` Args: pretrained (bool): If True, returns a model pre-trained on ImageNet
625941bc63d6d428bbe443c1
def getSystem(self, block="AUTO"): <NEW_LINE> <INDENT> if block is True: <NEW_LINE> <INDENT> self.wait() <NEW_LINE> <DEDENT> elif block == "AUTO" and self._is_blocked: <NEW_LINE> <INDENT> self.wait() <NEW_LINE> <DEDENT> if self.isError(): <NEW_LINE> <INDENT> _warnings.warn("The process exited with an error!") <NEW_LINE> <DEDENT> restart = "%s/%s.crd" % (self._work_dir, self._name) <NEW_LINE> if _os.path.isfile(restart): <NEW_LINE> <INDENT> if "is_lambda1" in self._property_map: <NEW_LINE> <INDENT> is_lambda1 = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> is_lambda1 = False <NEW_LINE> <DEDENT> new_system = _System(_SireIO.MoleculeParser.read([restart, self._top_file], self._property_map)) <NEW_LINE> old_system = self._system.copy() <NEW_LINE> sire_system, mapping = _SireIO.updateCoordinatesAndVelocities( old_system._sire_object, new_system._sire_object, self._mapping, is_lambda1, self._property_map, self._property_map) <NEW_LINE> old_system._sire_object = sire_system <NEW_LINE> self._mapping = mapping <NEW_LINE> if "space" in new_system._sire_object.propertyKeys(): <NEW_LINE> <INDENT> box = new_system._sire_object.property("space") <NEW_LINE> old_system._sire_object.setProperty(self._property_map.get("space", "space"), box) <NEW_LINE> <DEDENT> return old_system <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Get the latest molecular system. Parameters ---------- block : bool Whether to block until the process has finished running. Returns ------- system : :class:`System <BioSimSpace._SireWrappers.System>` The latest molecular system.
625941bc91af0d3eaac9b8e7
def next(self): <NEW_LINE> <INDENT> self.current_position += self.current_batch_size <NEW_LINE> if self.no_batch_left(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self.current_position + self.minibatch_size <= self.total(): <NEW_LINE> <INDENT> self.current_batch_size = self.minibatch_size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.current_batch_size = self.total() - self.current_position <NEW_LINE> <DEDENT> self.current_batch_indices = self.indices[self.current_position:self .current_position + self.current_batch_size] <NEW_LINE> self.current_input_length = max( self.data['clips'][0, ind, 1] for ind in self.current_batch_indices) <NEW_LINE> self.current_output_length = max( self.data['clips'][1, ind, 1] for ind in self.current_batch_indices)
Move to the next batch.
625941bc97e22403b379ce6a
def truez(p,t,w): <NEW_LINE> <INDENT> r0m = 287.04 <NEW_LINE> g = 9.80616 <NEW_LINE> ratio = 0.621971 <NEW_LINE> arg = (r0m/g)*(t/p)*(1.0+(1.0/ratio-1.0)*w/1000.0) <NEW_LINE> deltap = num.diff(p) <NEW_LINE> deltap = num.append(deltap,num.zeros(1)) <NEW_LINE> arg = arg*deltap <NEW_LINE> z = -arg.cumsum() <NEW_LINE> return z
z = truez(p,t,w) compute true citation altitudes using the hydrostatic equation inputs: p pressure in mbar t air temperature in K w water vapor mixing ratio in g/kg
625941bc851cf427c661a3e4
def _setup_annotation_colors(params): <NEW_LINE> <INDENT> raw = params['raw'] <NEW_LINE> segment_colors = params.get('segment_colors', dict()) <NEW_LINE> if raw.annotations is not None: <NEW_LINE> <INDENT> ann_order = raw.annotations.onset.argsort(axis=0) <NEW_LINE> descriptions = raw.annotations.description[ann_order] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> descriptions = list() <NEW_LINE> <DEDENT> color_keys = np.union1d(descriptions, params['added_label']) <NEW_LINE> color_cycle = cycle(np.delete(COLORS, 2)) <NEW_LINE> for _ in np.intersect1d(list(color_keys), list(segment_colors.keys())): <NEW_LINE> <INDENT> next(color_cycle) <NEW_LINE> <DEDENT> for idx, key in enumerate(color_keys): <NEW_LINE> <INDENT> if key in segment_colors: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif key.lower().startswith('bad') or key.lower().startswith('edge'): <NEW_LINE> <INDENT> segment_colors[key] = 'red' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> segment_colors[key] = next(color_cycle) <NEW_LINE> <DEDENT> <DEDENT> params['segment_colors'] = segment_colors
Set up colors for annotations.
625941bcd8ef3951e324340f
def set_vibration(self, left=0, right=0, vibration_time=1): <NEW_LINE> <INDENT> self.left_vibration, self.right_vibration = min(int(left), 255), min(int(right), 255) <NEW_LINE> self.vibration_time = min(int(vibration_time), 200)
Set the level of vibration on the corresponding oculus touch for the specified number of iterations/time. The level of vibration of each controller is between 0 and 255.
625941bc7d43ff24873a2b6f
def list_upcoming_birthdays(self): <NEW_LINE> <INDENT> status = True <NEW_LINE> query = 'select INDI, NAME, BIRT from indi where DEAT == "NA"' <NEW_LINE> thirty_days = relativedelta(days=30) <NEW_LINE> people = self.tool.query_info(query) <NEW_LINE> for person in people: <NEW_LINE> <INDENT> if self.tool.dates_within(str(self.tool.today.year) + person[2][-6:], str(self.tool.today + thirty_days), 30, 'days'): <NEW_LINE> <INDENT> status = False <NEW_LINE> print("US38: {} {}'s birthday will occur in the next 30 days on {}." .format(person[0], person[1], person[2])) <NEW_LINE> <DEDENT> <DEDENT> return status
Author Youhao US38 List all living people in a GEDCOM file whose birthdays occur in the next 30 days :return:bool
625941bc45492302aab5e192
def compare_password(self, password): <NEW_LINE> <INDENT> encodedpassword = password.encode('utf-8') <NEW_LINE> return bcrypt.hashpw(encodedpassword, self.get('password')) == self.get('password')
' PURPOSE ' Given a password, compare it to the saved password. ' PARAMETERS ' <str password> ' RETURNS ' <bool is_same> True if passwords match, False if not.
625941bcd4950a0f3b08c223
def build_latex(): <NEW_LINE> <INDENT> global args <NEW_LINE> latex = args.build_type+'latex' if args.build_type!='dvi' else 'latex' <NEW_LINE> if in_path(latex): <NEW_LINE> <INDENT> aux_fname = '%s.aux' % args.base_name <NEW_LINE> aux_hash = hash_file(aux_fname) <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> if system('%s -interaction=batchmode %s >/dev/null' % (latex, args.outf_name), i+1): <NEW_LINE> <INDENT> print('=== Error in build:') <NEW_LINE> system("grep -A15 -m1 '^!' %s.log" % args.base_name, i+1) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> new_aux_hash = hash_file(aux_fname) <NEW_LINE> redo_because_of_bibtex = (i==0 and build_bibtex(aux_fname)) <NEW_LINE> if new_aux_hash == aux_hash and not redo_because_of_bibtex: <NEW_LINE> <INDENT> print('=== No change in %s; build finished' % aux_fname) <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> aux_hash = new_aux_hash <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print('*** Error: "%s" not found in PATH, skipping build.' % latex, file=args.errf) <NEW_LINE> print('*** Use "-o %s" instead, and run latex on that one yourself.' % args.outf_name, file=args.errf)
Run *latex/bibtex until .aux file no longer changes (max 5 times).
625941bc16aa5153ce36234a
def permute(listPerm): <NEW_LINE> <INDENT> np = [] <NEW_LINE> for i in range(0,len(listPerm)): <NEW_LINE> <INDENT> for p in range (i, len(listPerm)): <NEW_LINE> <INDENT> if i != p: <NEW_LINE> <INDENT> np.append((listPerm[i], listPerm[p])) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return np
Return all possible and different permutations of a list
625941bc92d797404e30405b
def __init__(self, pooling_type, n_dim, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> pool_cls = getattr(torch.nn, "%sPool%dd" % (pooling_type, n_dim)) <NEW_LINE> self.pool = pool_cls(*args, **kwargs)
Parameters ---------- pooling_type : str Type of Pooling, case sensitive. Supported values are * ``Max`` * ``Avg`` * ``AdaptiveAvg`` * ``AdaptiveMax`` n_dim : int number of dimensions *args : positional arguments of the chosen pooling class **kwargs : keyword arguments of the chosen pooling class
625941bcf8510a7c17cf95cc
@csrf_exempt <NEW_LINE> @api_view(['GET']) <NEW_LINE> @authentication_classes((TokenAuthentication, BasicAuthentication, SessionAuthentication)) <NEW_LINE> @permission_classes((IsAuthenticated,)) <NEW_LINE> def do_working_dir_dir_json(request, uid): <NEW_LINE> <INDENT> return directory_json(request, "work", uid)
get: List directory content as JSON (working area) List directory content as JSON
625941bc66656f66f7cbc07c
def get_num_outputs(self): <NEW_LINE> <INDENT> if self.hide_acc_from_interface: <NEW_LINE> <INDENT> return 2 * self.n_task_dims <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 3 * self.n_task_dims
Get number of environment outputs. Returns ---------- n : int number of environment outputs
625941bc07d97122c4178757
def create_formatter(axis_naming, axis_title, column_list): <NEW_LINE> <INDENT> return html.Details( id='{}-formatting'.format(axis_naming), children=[ html.Summary( '{} Formatting'.format(axis_title), style={ 'font-size': '14px', 'color': '#444', 'padding': '0px 0px 0px 5px' } ), html.Div( [ html.Div( Dropdown( placeholder='Axis scaling', id='{}-type'.format(axis_naming), options=[ {'label': i, 'value': i} for i in ['Linear', 'Log Scale', 'Ln()', 'Log10()', 'e^()', '10^()'] ], value='Linear' ), style={ 'width': '49%', 'display': 'inline-block' } ), html.Div( Dropdown( placeholder='Axis orientation', id='{}-orientation'.format(axis_naming), options=[ {'label': i, 'value': i} for i in ['increasing', 'decreasing'] ], value='increasing', ), style={ 'float': 'right', 'width': '49%', 'display': 'inline-block', } ), html.Div( [ html.Div( Dropdown( placeholder='Operator', id='{}-operator'.format(axis_naming), options=[ {'label': i, 'value': i} for i in ['+', '-', '/', 'x'] ], value='' ), style={ 'width': '70px', 'display': 'table-cell', 'vertical-align': 'top' } ), html.Div( Dropdown( placeholder='Column', id='{}-combined-column'.format(axis_naming), options=[ {'label': i, 'value': i} for i in column_list ], value='' ), style={ 'display': 'table-cell', 'width': 'auto', 'vertical-align': 'top', 'padding': '0px 0px 0px 5px' } ) ], style={ 'display': 'table', 'width': '100%' } ) ], style={ 'padding': '0px 0px 0px 15px', } ) ], style={ 'display': 'none' } )
Return collapsable formatting div
625941bc4d74a7450ccd4094
def do_health_checks(self, list_of_ips): <NEW_LINE> <INDENT> threads = [] <NEW_LINE> results = [] <NEW_LINE> for count, ip in enumerate(list_of_ips): <NEW_LINE> <INDENT> thread = threading.Thread( target = self._do_tcp_check, name = "%s:%s" % (self.thread_name, ip), args = (ip, results)) <NEW_LINE> thread.start() <NEW_LINE> threads.append(thread) <NEW_LINE> <DEDENT> for thread in threads: <NEW_LINE> <INDENT> thread.join() <NEW_LINE> <DEDENT> return results, []
Perform a health check on a list of IP addresses. Each check (we use a TCP connection attempt) is run in its own thread. Gather up the results and return the list of those addresses that failed the test and the list of questionable IPs. TODO: Currently, this starts a thread for every single address we want to check. That's probably not a good idea if we have thousands of addresses. Therefore, we should implement some batching for large sets.
625941bccc40096d61595824
def _set_color(self, ida_color=None, qt_color=None): <NEW_LINE> <INDENT> if ida_color is not None: <NEW_LINE> <INDENT> r = ida_color & 255 <NEW_LINE> g = (ida_color >> 8) & 255 <NEW_LINE> b = (ida_color >> 16) & 255 <NEW_LINE> <DEDENT> if qt_color is not None: <NEW_LINE> <INDENT> r = (qt_color >> 16) & 255 <NEW_LINE> g = (qt_color >> 8) & 255 <NEW_LINE> b = qt_color & 255 <NEW_LINE> <DEDENT> ida_color = r | g << 8 | b << 16 <NEW_LINE> qt_color = r << 16 | g << 8 | b <NEW_LINE> css = "QPushButton {background-color: #%06x; color: #%06x;}" <NEW_LINE> self._color_button.setStyleSheet(css % (qt_color, qt_color)) <NEW_LINE> self._color = ida_color
Sets the color of the user color button.
625941bc596a89723608999c
def remove_percona_repository(host, repo_file): <NEW_LINE> <INDENT> with host.sudo("root"): <NEW_LINE> <INDENT> cmd = "sudo rm -f {}".format(repo_file) <NEW_LINE> result = host.run(cmd) <NEW_LINE> assert result.rc == 0, result.stderr
Delete repository file
625941bc9f2886367277a762
def get_ids_and_tickers(conn): <NEW_LINE> <INDENT> cur = conn.cursor() <NEW_LINE> cur.execute('SELECT id, ticker FROM symbol') <NEW_LINE> rows = cur.fetchall() <NEW_LINE> cur.close() <NEW_LINE> return [(row[0], row[1]) for row in rows]
Retrieves list of ids and corresponding ticker for all symbols in the symbol table. Returns: list: [(id, ticker) for every ticker found in the database]
625941bc187af65679ca4fef
def run(self): <NEW_LINE> <INDENT> def wrapper(): <NEW_LINE> <INDENT> self.logger.info('Waiting for messages...') <NEW_LINE> self._channel.start_consuming() <NEW_LINE> <DEDENT> return wrapper
Start the AMQP consumer.
625941bcc4546d3d9de72903
def generate_sql_verification_data(sql_results_instance): <NEW_LINE> <INDENT> results_string = "" <NEW_LINE> all_rows = sql_results_instance["ResultSet"]["Rows"] <NEW_LINE> for row_index in range(1, len(all_rows)): <NEW_LINE> <INDENT> results_string += f"{generate_sql_verification_data_row(all_rows[row_index])}" <NEW_LINE> if (row_index + 1) < len(all_rows): <NEW_LINE> <INDENT> results_string += "\n" <NEW_LINE> <DEDENT> <DEDENT> return results_string
Generates the verification text for a given result sql. Keyword arguments: sql_results_instance -- the sql results returned from athena show_columns -- boolean for whether to show column names or not in the results
625941bc293b9510aa2c316a
def __init__(self, text="", elements=None, normal_params=None, press_params=None, value=False, namestyle=None, type_="checkbox", check_img=None): <NEW_LINE> <INDENT> namestyle=style.STYLE_INSERTER_NAME if namestyle is None else namestyle <NEW_LINE> super(Checker, self).__init__("", elements, normal_params, press_params) <NEW_LINE> if value: <NEW_LINE> <INDENT> self._checked = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._checked = False <NEW_LINE> <DEDENT> self._name_element = self._get_name_element(text, namestyle) <NEW_LINE> self.add_elements(list([self._name_element])) <NEW_LINE> self._type = type_ <NEW_LINE> painter = self._gen_painter() <NEW_LINE> self.set_painter(painter, False) <NEW_LINE> self._check_img = self._get_check_img(check_img) <NEW_LINE> params = {"size": style.CHECK_SIZE, "color": style.COLOR_HOVER_CHECK, "pressed": True} <NEW_LINE> if self._type == "checkbox": <NEW_LINE> <INDENT> painter_class = painterstyle.CHECKBOX_PAINTER <NEW_LINE> <DEDENT> elif self._type == "radio": <NEW_LINE> <INDENT> painter_class = painterstyle.RADIO_PAINTER <NEW_LINE> <DEDENT> self.normal_params.polite_set( "params hover", { "painter": painter_class, "params": params}) <NEW_LINE> self.normal_params.polite_set("typ hover", "redraw")
Checkable check or radio box. <text>: text before the box. <value>: True for checked, False for not checked. <type_>: can be either 'checkbox' or 'radio'. <check_img>: if not None, define the image used for the box.
625941bc56ac1b37e62640a7
def getTagName(self): <NEW_LINE> <INDENT> return self._tagName
Return the tag name for this Element. Parameters: self: This object. Return value: String containing the tag name for this Element. Description: Return the tag name for this Element as a string.
625941bc56b00c62f0f14529
def get_dateformat(self): <NEW_LINE> <INDENT> return _DATE_FORMAT_RE
Returns the dateformat.
625941bc4f6381625f11490f
def test_remove_cart(): <NEW_LINE> <INDENT> cafe.cart = {'pop-tarts': 1} <NEW_LINE> cafe.remove_cart('pop-tarts') <NEW_LINE> assert 'pop-tarts' not in cafe.cart
Tests that if one of an item is in the cart, it is completely removed
625941bc1f5feb6acb0c4a26
def _passes_reverse_restriction(): <NEW_LINE> <INDENT> if test.get_function_object() in ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS: <NEW_LINE> <INDENT> reverse_tests = ParallelProcessor.__REVERSE_PARALLEL_RESTRICTIONS[test.get_function_object()] <NEW_LINE> for class_object, suite_mapping in list(ParallelProcessor.__PARALLELS.items()): <NEW_LINE> <INDENT> for test_mapping in suite_mapping["tests"]: <NEW_LINE> <INDENT> if test_mapping["test"].get_function_object() in reverse_tests: <NEW_LINE> <INDENT> if test_mapping["thread"].is_alive(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return True
If current suite is part of parallel restriction in another suite which is currently active, can't run it. :return: BOOLEAN
625941bc8a349b6b435e8045
def setup_model(**config): <NEW_LINE> <INDENT> engine = engine_from_config(config) <NEW_LINE> session = scoped_session(sessionmaker(bind=engine, extension=ZopeTransactionExtension())) <NEW_LINE> init_model(session) <NEW_LINE> create_tables()
sets up collection model according to configuration
625941bccc0a2c11143dcd62
def rgetLambda(self): <NEW_LINE> <INDENT> return _core.CGPSumCache_rgetLambda(self)
rgetLambda(CGPSumCache self) -> MatrixXd & Parameters: self: limix::CGPSumCache *
625941bc627d3e7fe0d68d20
def addPlot(self,tag,_type,colum,_db = None): <NEW_LINE> <INDENT> if colum not in self.to_draw : <NEW_LINE> <INDENT> self.to_draw[colum]=defaultdict(list) <NEW_LINE> <DEDENT> if _db : <NEW_LINE> <INDENT> self.to_draw[colum][tag].append([_db, _type, _db.getX(self.plotint), _db.getRecord(tag,_type,self.plotint)]) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.to_draw[colum][tag].append([self.main_db, _type, self.main_db.getX(self.plotint), self.main_db.getRecord(tag,_type,self.plotint)])
tag = Reptag , _type = incell|outcell|other , _db = instance of DataManager
625941bc4527f215b584c32d
def _build_children(self): <NEW_LINE> <INDENT> if self._page: <NEW_LINE> <INDENT> children = {'page': self._page} <NEW_LINE> children.update(self._build_menus()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> children = {} <NEW_LINE> <DEDENT> return children
Build the children for the layout based on the users permissions
625941bc4f6381625f114910
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PowerFormFormDataRecipient): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict()
Returns true if both objects are equal
625941bc97e22403b379ce6b
def connect(self, db): <NEW_LINE> <INDENT> counter = 0 <NEW_LINE> while not self.connected and not counter>20: <NEW_LINE> <INDENT> self.connected = False <NEW_LINE> try: <NEW_LINE> <INDENT> self.dbase = self.pool[db] <NEW_LINE> self.connected = True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> l.log_exception('MyMongo.connect') <NEW_LINE> sleep(0.2)
connect to mongodb
625941bc10dbd63aa1bd2a7e
def getIcon(self): <NEW_LINE> <INDENT> return ":/icons/Draft_Array"
Set the icon in the tree view.
625941bcd99f1b3c44c67468
def rename(self, old_name, new_name, add_postfix_if_exists=True): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._conn.rename(old_name, new_name) <NEW_LINE> <DEDENT> except ftplib.error_perm as e: <NEW_LINE> <INDENT> if 'existant' in e.message: <NEW_LINE> <INDENT> new_name = new_name.split('.') <NEW_LINE> new_name = "-new.".join(new_name) <NEW_LINE> self.rename(old_name, new_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise
rename / move a file
625941bc8a43f66fc4b53f3b
def max_stock_profit(trading_info): <NEW_LINE> <INDENT> best_profit = None <NEW_LINE> for price in range(len(trading_info)-1): <NEW_LINE> <INDENT> potential = - trading_info[price] + max(trading_info[price+1:]) <NEW_LINE> if best_profit == None or potential > best_profit: <NEW_LINE> <INDENT> best_profit = potential <NEW_LINE> <DEDENT> <DEDENT> return best_profit
given list of trading values where indices are minutes past opening time and values is stock price, determine when best time is to buy and sell. Must sell after buy.
625941bc3317a56b86939b3b
def mergeKLists(self, lists): <NEW_LINE> <INDENT> if len(lists) == 0 or (lists[0] is None and len(lists) <= 1): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> n = len(lists) <NEW_LINE> dummy = ListNode() <NEW_LINE> tuplelist = [(lists[i].val, lists[i]) for i in range(n) if lists[i]] <NEW_LINE> print(tuplelist) <NEW_LINE> if tuplelist: <NEW_LINE> <INDENT> heapq.heapify(tuplelist) <NEW_LINE> val, heapnode = heapq.heappop(tuplelist) <NEW_LINE> node = ListNode(heapnode.val) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if heapnode.next: <NEW_LINE> <INDENT> heapnode = heapnode.next <NEW_LINE> heapq.heappush(tuplelist, (heapnode.val, heapnode)) <NEW_LINE> <DEDENT> dummy.next = node <NEW_LINE> while tuplelist: <NEW_LINE> <INDENT> val, heapnode = heapq.heappop(tuplelist) <NEW_LINE> node.next = ListNode(val) <NEW_LINE> node = node.next <NEW_LINE> if heapnode.next: <NEW_LINE> <INDENT> heapnode = heapnode.next <NEW_LINE> heapq.heappush(tuplelist, (heapnode.val, heapnode)) <NEW_LINE> <DEDENT> <DEDENT> return dummy.next
:type lists: List[ListNode] :rtype: ListNode 合并 k 个排序链表,返回合并后的排序链表。请分析和描述算法的复杂度。 [ 1->4->5, 1->3->4, 2->6 ]
625941bc925a0f43d2549d46
def usart_set_parity(): <NEW_LINE> <INDENT> pass
__NATIVE__ PmReturn_t retval = PM_RET_OK; pPmObj_t p0, p1; uint32_t usart; uint32_t parity; /* If wrong number of args, raise TypeError */ if (NATIVE_GET_NUM_ARGS() != 2) { PM_RAISE_WITH_INFO(retval, PM_RET_EX_TYPE, "incorrect number of arguments"); return retval; } p0 = NATIVE_GET_LOCAL(0); /* If arg is not an int, raise TypeError */ if (OBJ_GET_TYPE(p0) != OBJ_TYPE_INT) { PM_RAISE_WITH_INFO(retval, PM_RET_EX_TYPE, "expected int"); return retval; } usart = ((pPmInt_t)p0)->val; p1 = NATIVE_GET_LOCAL(1); /* If arg is not an int, raise TypeError */ if (OBJ_GET_TYPE(p1) != OBJ_TYPE_INT) { PM_RAISE_WITH_INFO(retval, PM_RET_EX_TYPE, "expected int"); return retval; } parity = ((pPmInt_t)p1)->val; usart_set_parity(usart, parity); NATIVE_SET_TOS(PM_NONE); return retval;
625941bcd6c5a10208143f1a
def warning(self, message): <NEW_LINE> <INDENT> self.messageLevel = WARNING <NEW_LINE> self.logger.warning(message)
Log a message with level warning.
625941bc3346ee7daa2b2c3c
def __add_encryptednumber(self, other: '__class__') -> '__class__': <NEW_LINE> <INDENT> if self.public_key != other.public_key: <NEW_LINE> <INDENT> raise ValueError("add two numbers have different public key!") <NEW_LINE> <DEDENT> x, y = self.__align_exponent(self, other) <NEW_LINE> encryptednumber = self.__raw_add(x.ciphertext( False), y.ciphertext(False), x.exponent) <NEW_LINE> return encryptednumber
return PaillierEncryptedNumber: z = E(x) + E(y)
625941bc66656f66f7cbc07d
def is_legal(self, x, y): <NEW_LINE> <INDENT> return (0 <= x < self.xsize) and (0 <= y < self.ysize)
Returns true if the x,y coordinates are legal for this board.
625941bc7d847024c06be18c
def restart_server(): <NEW_LINE> <INDENT> run('cd %(path)s/releases/current; %(path)s/releases/current/site-restart' % env) <NEW_LINE> sudo('/etc/init.d/nginx restart')
Restart the web server
625941bc21a7993f00bc7bbe
def lookup1(required, provided, name=u'', default=None): <NEW_LINE> <INDENT> pass
Lookup a value using a single required interface A value is looked up based on a single required specifications, a provided interface, and a name, which must be text.
625941bcd7e4931a7ee9ddef
def _get_root_dn(self): <NEW_LINE> <INDENT> return desktop.conf.LDAP.BASE_DN.get()
Returns the configured base DN (DC=desktop,DC=local).
625941bc656771135c3eb73f
@task(publish_linux) <NEW_LINE> def install_linux(c): <NEW_LINE> <INDENT> c.run('cp bin/Release/net6.0/linux-x64/publish/jpackages /home/itang/.local/bin/jpackages')
install for linux
625941bc379a373c97cfaa1d
def get_text_field_value(self): <NEW_LINE> <INDENT> return self.text_field_web_element.get_attribute("value")
Returns the value of the text field.
625941bc2eb69b55b151c77e
def newton_method(x_train, y_train, max_epoch=10000, epsilon=1e-4, _lambda=0.0): <NEW_LINE> <INDENT> order = np.shape(x_train)[1] - 1 <NEW_LINE> w = np.random.normal(0, 0.01, size=order + 1) <NEW_LINE> w = np.mat(w).T <NEW_LINE> for epoch in range(max_epoch): <NEW_LINE> <INDENT> loss0 = get_loss(w, x_train, y_train, _lambda) <NEW_LINE> w = w - np.dot((np.dot(x_train.T, x_train) + _lambda * np.eye(order + 1)).I, np.dot(x_train.T, np.dot(x_train, w) - y_train) + _lambda * w) <NEW_LINE> loss1 = get_loss(w, x_train, y_train, _lambda) <NEW_LINE> if math.fabs(loss1 - loss0) < epsilon: <NEW_LINE> <INDENT> return w <NEW_LINE> <DEDENT> <DEDENT> return w
牛顿法求解 :param x_train: 训练集的x, 矩阵, 每行形如 [1, x, x^2, ... x^n] :param y_train: 训练集的y, 列向量 :param max_epoch: 最大迭代轮数,若到达该轮还未到达min_loss, 返回 :param epsilon: 精度阈值 :param _lambda: 正则项的lambda系数, 默认不含正则项 :return: 返回求解得带的参数w, 列向量
625941bc1f037a2d8b9460d2
def detect(self, img, degree=DEFAULT_DEGREE, debug=False, min_size=150): <NEW_LINE> <INDENT> image_list = self.rotate_image(img, degree) <NEW_LINE> detected_pols = [] <NEW_LINE> for image, rotation_matrix in image_list: <NEW_LINE> <INDENT> detected_rectangles, w = self.hog.detectMultiScale(image, winStride=(8, 8), padding=(32, 32), scale=1.05) <NEW_LINE> if debug: <NEW_LINE> <INDENT> self.draw_detections(image, detected_rectangles) <NEW_LINE> cv2.imshow("test", image) <NEW_LINE> cv2.waitKey(0) <NEW_LINE> <DEDENT> inv_mat = cv2.invertAffineTransform(rotation_matrix) <NEW_LINE> for x, y, w, h in detected_rectangles: <NEW_LINE> <INDENT> if w < min_size: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> p1 = inv_mat.dot(np.array([x, y, 1])).tolist() <NEW_LINE> p2 = inv_mat.dot(np.array([x + w, y, 1])).tolist() <NEW_LINE> p3 = inv_mat.dot(np.array([x + w, y + h, 1])).tolist() <NEW_LINE> p4 = inv_mat.dot(np.array([x, y + h, 1])).tolist() <NEW_LINE> polygon = [p1, p2, p3, p4] <NEW_LINE> detected_pols.append(polygon) <NEW_LINE> <DEDENT> <DEDENT> return detected_pols
Detect people in the image. :param debug: show each rotated image and press to continue :param img: source image :param degree: delta angle for rotations. :param min_size: minimum height in pixels for a person
625941bcd58c6744b4257b33
def handleKey(self, key): <NEW_LINE> <INDENT> return False
Handler for user input. This returns true if the key press was consumed, false otherwise. Arguments: key - keycode for the key pressed
625941bc6e29344779a624e8
def verifyStatistic(statistic): <NEW_LINE> <INDENT> return statistic
if statistic is a statistic object, return same. else get statistic
625941bcb5575c28eb68ded1
def set_option(self, name, value): <NEW_LINE> <INDENT> if name not in self.settable: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("This option is not settable from a SConscript file: %s"%name) <NEW_LINE> <DEDENT> if name == 'num_jobs': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> if value < 1: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("A positive integer is required: %s"%repr(value)) <NEW_LINE> <DEDENT> <DEDENT> elif name == 'max_drift': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) <NEW_LINE> <DEDENT> <DEDENT> elif name == 'duplicate': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = str(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("A string is required: %s"%repr(value)) <NEW_LINE> <DEDENT> if value not in SCons.Node.FS.Valid_Duplicates: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("Not a valid duplication style: %s" % value) <NEW_LINE> <DEDENT> SCons.Node.FS.set_duplicate(value) <NEW_LINE> <DEDENT> elif name == 'diskcheck': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = diskcheck_convert(value) <NEW_LINE> <DEDENT> except ValueError as v: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("Not a valid diskcheck value: %s"%v) <NEW_LINE> <DEDENT> if 'diskcheck' not in self.__dict__: <NEW_LINE> <INDENT> SCons.Node.FS.set_diskcheck(value) <NEW_LINE> <DEDENT> <DEDENT> elif name == 'stack_size': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) <NEW_LINE> <DEDENT> <DEDENT> elif name == 'md5_chunksize': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SCons.Errors.UserError("An integer is required: %s"%repr(value)) <NEW_LINE> <DEDENT> <DEDENT> elif name == 'warn': <NEW_LINE> <INDENT> if SCons.Util.is_String(value): <NEW_LINE> <INDENT> value = [value] <NEW_LINE> <DEDENT> value = self.__SConscript_settings__.get(name, []) + value <NEW_LINE> SCons.Warnings.process_warn_strings(value) <NEW_LINE> <DEDENT> elif name == 'no_progress': <NEW_LINE> <INDENT> SCons.Script.Main.progress_display.set_mode(False) <NEW_LINE> <DEDENT> self.__SConscript_settings__[name] = value
Sets an option from an SConscript file.
625941bcd4950a0f3b08c224