code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
def __init__(self): <NEW_LINE> <INDENT> if not self.fpath.exists(): <NEW_LINE> <INDENT> with resource_path("planetarypy.pdstools.data", self.fname) as p: <NEW_LINE> <INDENT> self.config = self.read_from_file(p) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.config = self.read_from_file()
|
Initialize index database.
Will copy the package's version to user's home folder at init,
so that user doesn't need to edit file in package to add new indices.
Adding new index URLs to the package's config file pds_indices_db.toml
is highly encouraged via pull request.
|
625941bb4527f215b584c31f
|
def one_hot(inputs, num_classes): <NEW_LINE> <INDENT> inshape = inputs.get_shape().as_list() <NEW_LINE> assert len(inshape) <= 2 <NEW_LINE> for shcomp in inshape: <NEW_LINE> <INDENT> assert shcomp is not None <NEW_LINE> <DEDENT> input_vec = tf.reshape(inputs, (-1, 1)) <NEW_LINE> table = tf.constant(np.identity(num_classes, dtype=np.float32)) <NEW_LINE> embeddings = tf.nn.embedding_lookup(table, tf.cast(input_vec, tf.int32)) <NEW_LINE> outshape = inshape + [num_classes, ] <NEW_LINE> output = tf.reshape(embeddings, outshape) <NEW_LINE> return output
|
One hot encoding with fixed number of classes.
# noqa: E501
See also: http://stackoverflow.com/questions/35226198/is-this-one-hot-encoding-in-tensorflow-fast-or-flawed-for-any-reason
|
625941bba219f33f34628838
|
def knight(p1, p2): <NEW_LINE> <INDENT> if p1 == p2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> board = generate_empty_board(8, 8, 0) <NEW_LINE> start_x, start_y = cell2indexes(p1) <NEW_LINE> target_x, target_y = cell2indexes(p2) <NEW_LINE> board[start_x][start_y] = 's' <NEW_LINE> board[target_x][target_y] = 't' <NEW_LINE> POSSIBLE_MOVES = ((-1, -2), (-2, -1), (-2, 1), (-1, 2), (1, 2), (2, 1), (2, -1), (1, -2)) <NEW_LINE> step = 0 <NEW_LINE> coordinates = [(start_x, start_y)] <NEW_LINE> while coordinates: <NEW_LINE> <INDENT> step += 1 <NEW_LINE> next_coordinates = [] <NEW_LINE> for curr_x, curr_y in coordinates: <NEW_LINE> <INDENT> for x, y in POSSIBLE_MOVES: <NEW_LINE> <INDENT> x = curr_x + x <NEW_LINE> y = curr_y + y <NEW_LINE> if not 0 <= x <= 7 or not 0 <= y <= 7: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not board[x][y]: <NEW_LINE> <INDENT> board[x][y] = step <NEW_LINE> next_coordinates.append((x, y)) <NEW_LINE> <DEDENT> elif board[x][y] == 't': <NEW_LINE> <INDENT> return step <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> coordinates = next_coordinates
|
BFS (breadth-first search) algorithm for finding least number of moves from p1 to p2
Given two different positions on a chess board, find the least number of moves it would take a knight to get from
one to the other. The positions will be passed as two arguments in algebraic notation. For example,
knight("a3", "b5") should return 1.
The knight is not allowed to move off the board. The board is 8x8.
For information on knight moves, see https://en.wikipedia.org/wiki/Knight_%28chess%29
For information on algebraic notation, see https://en.wikipedia.org/wiki/Algebraic_notation_%28chess%29
:param p1: (str) | Start position
:param p2: (str) | Target position
:return: (int) | Minimal amount of steps to reach destination cell
>>> knight('a1', 'c1')
2
>>> knight('a1', 'f1')
3
>>> knight('a1', 'f7')
5
>>> knight('f7', 'a1')
5
>>> knight('b3', 'f7')
4
>>> knight('b5', 'a3')
1
|
625941bb187af65679ca4fe2
|
def make_race_tree(self, frame, height): <NEW_LINE> <INDENT> race_tree = ttk.Treeview(frame, columns=('id', 'date', 'goal', 'numentrants', 'timestamp'), displaycolumns=('id', 'date', 'goal', 'numentrants'), height=height) <NEW_LINE> race_tree.heading('id', text="Race ID", command=lambda: self.sort_tree_column_int(race_tree, 'id', False)) <NEW_LINE> race_tree.heading('date', text='Date Recorded', command=lambda: self.sort_tree_column(race_tree, 'date', False)) <NEW_LINE> race_tree.heading('goal', text='Goal', command=lambda: self.sort_tree_column(race_tree, 'goal', False)) <NEW_LINE> race_tree.heading('numentrants', text= "Entrants", command=lambda: self.sort_tree_column_int(race_tree, 'numentrants', False)) <NEW_LINE> race_tree.column('#0', width=10) <NEW_LINE> race_tree.column('id', width=100, anchor=E) <NEW_LINE> race_tree.column('date', width=150, anchor=E) <NEW_LINE> race_tree.column('goal', width=150, anchor=E) <NEW_LINE> race_tree.column('numentrants', width=75, anchor=E) <NEW_LINE> scroll_bar = ttk.Scrollbar(frame, orient=VERTICAL, command=race_tree.yview) <NEW_LINE> race_tree.configure(yscrollcommand=scroll_bar.set) <NEW_LINE> return race_tree, scroll_bar
|
Creates a treeview widget of races of specified height
Returns tree and x/y scroll bars
|
625941bbcc40096d61595816
|
def create_checks_set(number, status): <NEW_LINE> <INDENT> while number > 0: <NEW_LINE> <INDENT> final_status = status if number == 1 else 'OK' <NEW_LINE> Check.objects.create( id=number, name='TestCheck', status=final_status, plugin=NagPlugin.objects.get(id=10), target_port=3000, run_freq=10, service=Service.objects.get(id=2) ) <NEW_LINE> number -= 1 <NEW_LINE> <DEDENT> return Check.objects.filter(service=2)
|
Helper function for creating a set of checks for further testing
:param number: int - number of checks
:param status: str - checks status
:return: Queryset of checks.
|
625941bba79ad161976cc009
|
def get_plugin_error(plugin_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> plugin = import_plugin(plugin_name) <NEW_LINE> if getattr(plugin, "is_fake_plugin", None): <NEW_LINE> <INDENT> return plugin.exc_info <NEW_LINE> <DEDENT> <DEDENT> except ImportError: <NEW_LINE> <INDENT> return sys.exc_info()
|
Return None if plugin is loaded without error, else
return a tuple of exception information
|
625941bb4c3428357757c1ee
|
def delete_n_tasks(app_id): <NEW_LINE> <INDENT> delete_memoized(n_tasks, app_id)
|
Reset n_tasks value in cache
|
625941bb7d847024c06be17d
|
def test_parse_generate_missing_client_id_pattern(self): <NEW_LINE> <INDENT> args = ("generate") <NEW_LINE> with self.assertRaises(SystemExit): <NEW_LINE> <INDENT> with CoreCLITests.RedirectStdStreams( stdout=self.devnull, stderr=self.devnull): <NEW_LINE> <INDENT> cut = CLI() <NEW_LINE> cut.parse_command_args(args)
|
Test CLI.parse_command_args().
'generate' subcommand with no client id pattern argument.
|
625941bb94891a1f4081b96c
|
@manager.command <NEW_LINE> def test(coverage=False): <NEW_LINE> <INDENT> if coverage and not os.environ.get('MENU_COVERAGE'): <NEW_LINE> <INDENT> import sys <NEW_LINE> os.environ['FLASKY_COVERAGE'] = '1' <NEW_LINE> os.execvp(sys.executable, [sys.executable] + sys.argv) <NEW_LINE> <DEDENT> import unittest <NEW_LINE> tests = unittest.TestLoader().discover('tests') <NEW_LINE> unittest.TextTestRunner(verbosity=2).run(tests) <NEW_LINE> if COV: <NEW_LINE> <INDENT> COV.stop() <NEW_LINE> COV.save() <NEW_LINE> print('Coverage Summary:') <NEW_LINE> COV.report() <NEW_LINE> basedir = os.path.abspath(os.path.dirname(__file__)) <NEW_LINE> covdir = os.path.join(basedir, 'tmp/coverage') <NEW_LINE> COV.html_report(directory=covdir) <NEW_LINE> print('HTML version: file://%s/index.html' % covdir) <NEW_LINE> COV.erase()
|
Run the unit tests
|
625941bb0c0af96317bb80ad
|
def patch_lifecycle_rules( self, buckets=None, references=None, lifecycle=None, bucket_ids=None, bucket_names=None, ids=None, names=None, confirm_date=None, async_req=False, _return_http_data_only=False, _preload_content=True, _request_timeout=None, ): <NEW_LINE> <INDENT> kwargs = dict( lifecycle=lifecycle, bucket_ids=bucket_ids, bucket_names=bucket_names, ids=ids, names=names, confirm_date=confirm_date, async_req=async_req, _return_http_data_only=_return_http_data_only, _preload_content=_preload_content, _request_timeout=_request_timeout, ) <NEW_LINE> kwargs = {k: v for k, v in kwargs.items() if v is not None} <NEW_LINE> endpoint = self._lifecycle_rules_api.api21_lifecycle_rules_patch_with_http_info <NEW_LINE> _process_references(buckets, ['bucket_ids', 'bucket_names'], kwargs) <NEW_LINE> _process_references(references, ['ids', 'names'], kwargs) <NEW_LINE> return self._call_api(endpoint, kwargs)
|
Modify an existing lifecycle rule by name or id. If `ids` is specified,
`bucket_names` or `bucket_ids` is also required.
Args:
buckets (list[FixedReference], optional):
A list of buckets to query for. Overrides bucket_ids and bucket_names keyword arguments.
references (list[FixedReference], optional):
A list of references to query for. Overrides ids and names keyword arguments.
lifecycle (LifecycleRulePatch, required):
bucket_ids (list[str], optional):
A list of bucket IDs. If after filtering, there is not at least one resource
that matches each of the elements of `bucket_ids`, then an error is returned.
This cannot be provided together with the `bucket_names` query parameter. This
can be provided with the `ids` query parameter but not with `names`.
bucket_names (list[str], optional):
A list of bucket names. If there is not at least one resource that matches each
of the elements of `bucket_names`, then an error is returned. This cannot be
provided together with the `bucket_ids` query parameter. This can be provided
with the `ids` query parameter but not with `names`.
ids (list[str], optional):
A list of resource IDs. If after filtering, there is not at least one resource
that matches each of the elements of `ids`, then an error is returned. This
cannot be provided together with the `name` or `names` query parameters.
names (list[str], optional):
A list of resource names. If there is not at least one resource that matches
each of the elements of `names`, then an error is returned.
confirm_date (bool, optional):
If set to `true`, then confirm the date of `keep_current_version_until` is
correct.
async_req (bool, optional):
Request runs in separate thread and method returns
multiprocessing.pool.ApplyResult.
_return_http_data_only (bool, optional):
Returns only data field.
_preload_content (bool, optional):
Response is converted into objects.
_request_timeout (int, optional):
Total request timeout in seconds.
Returns:
ValidResponse: If the call was successful.
ErrorResponse: If the call was not successful.
Raises:
PureError: If calling the API fails.
ValueError: If a parameter is of an invalid type.
TypeError: If invalid or missing parameters are used.
|
625941bb435de62698dfdb17
|
def getCommand(): <NEW_LINE> <INDENT> cmd = int( input("Command number: ") ) <NEW_LINE> return cmd
|
:return: Gets a user command and returns it
|
625941bb796e427e537b0487
|
def round(self, stamp): <NEW_LINE> <INDENT> if self == self.SECOND: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0) <NEW_LINE> <DEDENT> elif self == self.MINUTE: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0) <NEW_LINE> <DEDENT> elif self == self.MINUTE5: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=(stamp.minute // 5) * 5) <NEW_LINE> <DEDENT> elif self == self.MINUTE10: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=(stamp.minute // 10) * 10) <NEW_LINE> <DEDENT> elif self == self.MINUTE15: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=(stamp.minute // 15) * 15) <NEW_LINE> <DEDENT> elif self == self.MINUTE20: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=(stamp.minute // 20) * 20) <NEW_LINE> <DEDENT> elif self == self.MINUTE30: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=(stamp.minute // 30) * 30) <NEW_LINE> <DEDENT> elif self == self.HOUR: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0) <NEW_LINE> <DEDENT> elif self == self.HOUR2: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 2) * 2) <NEW_LINE> <DEDENT> elif self == self.HOUR3: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 3) * 3) <NEW_LINE> <DEDENT> elif self == self.HOUR4: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 4) * 4) <NEW_LINE> <DEDENT> elif self == self.HOUR6: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 6) * 6) <NEW_LINE> <DEDENT> elif self == self.HOUR8: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 8) * 8) <NEW_LINE> <DEDENT> elif self == self.HOUR12: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=(stamp.hour // 12) * 12) <NEW_LINE> <DEDENT> elif self == self.DAY: <NEW_LINE> <INDENT> return stamp.replace(microsecond=0, second=0, minute=0, hour=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError("An unexpected interval type (perhaps there's a bug or pending work here) tried " "to round a timestamp")
|
Rounds a timestamp down to the relevant interval. E.g. the MINUTE interval will round down,
removing the seconds (setting them to 0), while the MINUTE5 will, also, round down the minutes
in chunks of 5.
:param stamp: The stamp to round down.
:return: The rounded stamp.
|
625941bbd99f1b3c44c6745a
|
def if_false_raise(predicate, exception_lambda): <NEW_LINE> <INDENT> if predicate: <NEW_LINE> <INDENT> raise exception_lambda()
|
Raises the exception returned by the exception_lambda if predicate is true.
|
625941bbadb09d7d5db6c656
|
def get_ca_definition(self): <NEW_LINE> <INDENT> if self.row_converter.obj.id is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> cad = models.CustomAttributeDefinition <NEW_LINE> cache = self.row_converter.block_converter.get_ca_definitions_cache() <NEW_LINE> definition = cache.get((self.row_converter.obj.id, self.display_name)) <NEW_LINE> if not definition: <NEW_LINE> <INDENT> definition = cad.query.filter(and_( cad.definition_id == self.row_converter.obj.id, cad.title == self.display_name )).first() <NEW_LINE> <DEDENT> return definition
|
Get custom attribute definition for a specific object.
|
625941bb4f88993c3716bf30
|
def num_mutations(self): <NEW_LINE> <INDENT> return sum(len(set(aas)) != 1 for aas in self.residues)
|
Get number of mutations (positions with more than one type of residue)
|
625941bb8a349b6b435e8038
|
def classify_outer_product_args_pattern(args): <NEW_LINE> <INDENT> args = list(args) <NEW_LINE> pattern = None <NEW_LINE> def _verify_motor_locations(args, pattern): <NEW_LINE> <INDENT> if pattern == OuterProductArgsPattern.PATTERN_1: <NEW_LINE> <INDENT> pos_movable = list(range(0, len(args), 4)) <NEW_LINE> <DEDENT> elif pattern == OuterProductArgsPattern.PATTERN_2: <NEW_LINE> <INDENT> pos_movable = [0] + list(range(4, len(args), 5)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(f"Unknown pattern '{pattern}'") <NEW_LINE> <DEDENT> for n, element in enumerate(args): <NEW_LINE> <INDENT> flag = isinstance(element, Movable) <NEW_LINE> if n in pos_movable: <NEW_LINE> <INDENT> flag = not flag <NEW_LINE> <DEDENT> if flag: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> div_4, div_5 = not(len(args) % 4), (len(args) > 4) and not((len(args) - 4) % 5) <NEW_LINE> if not div_4 and not div_5: <NEW_LINE> <INDENT> raise ValueError(f"Wrong number of elements in 'args': len(args) = {len(args)}") <NEW_LINE> <DEDENT> args_valid = False <NEW_LINE> if div_4 and not div_5: <NEW_LINE> <INDENT> pattern = OuterProductArgsPattern.PATTERN_1 <NEW_LINE> args_valid = _verify_motor_locations(args, pattern) <NEW_LINE> <DEDENT> elif not div_4 and div_5: <NEW_LINE> <INDENT> pattern = OuterProductArgsPattern.PATTERN_2 <NEW_LINE> args_valid = _verify_motor_locations(args, pattern) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for p in OuterProductArgsPattern: <NEW_LINE> <INDENT> if _verify_motor_locations(args, p): <NEW_LINE> <INDENT> pattern = p <NEW_LINE> args_valid = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not args_valid: <NEW_LINE> <INDENT> raise ValueError(f"Incorrect order of elements in the argument list 'args': " f"some of the movable objects (motors) are out of place " f"(args = {args})") <NEW_LINE> <DEDENT> return pattern
|
Classifies the pattern of grid scan arguments in the list `args`.
Checks the argument list for consistency, in particular checks
to location of movable objects (motors) in the list.
Should be used together with the function `chunk_outer_product_args`.
Parameters
----------
args: iterable
The list of grid scan arguments. Two pattern of arguments
are supported. See the description of the identical parameter
for the `chunk_outer_product_args`.
Returns
-------
pattern: OuterProductArgsPattern
Detected pattern
Raises
------
ValueError is raised if the pattern can not be identified or the list
is inconsistent.
|
625941bb8e7ae83300e4ae90
|
def G1DListMutatorRealRange(genome, **args): <NEW_LINE> <INDENT> if args["pmut"] <= 0.0: return 0 <NEW_LINE> listSize = len(genome) <NEW_LINE> mutations = args["pmut"] * (listSize) <NEW_LINE> if mutations < 1.0: <NEW_LINE> <INDENT> mutations = 0 <NEW_LINE> for it in xrange(listSize): <NEW_LINE> <INDENT> if Util.randomFlipCoin(args["pmut"]): <NEW_LINE> <INDENT> genome[it] = rand_uniform(genome.getParam("rangemin", Consts.CDefRangeMin), genome.getParam("rangemax", Consts.CDefRangeMax)) <NEW_LINE> mutations += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for it in xrange(int(round(mutations))): <NEW_LINE> <INDENT> which_gene = rand_randint(0, listSize-1) <NEW_LINE> genome[which_gene] = rand_uniform(genome.getParam("rangemin", Consts.CDefRangeMin), genome.getParam("rangemax", Consts.CDefRangeMax)) <NEW_LINE> <DEDENT> <DEDENT> return mutations
|
Simple real range mutator for G1DList
Accepts the *rangemin* and *rangemax* genome parameters, both optional.
|
625941bbeab8aa0e5d26da23
|
def call_action(self,topmenuaction,mainaction=True): <NEW_LINE> <INDENT> if len(self.columns[0]) != 0: <NEW_LINE> <INDENT> ltopmenu=self.columns[0][self.columns_idx[0]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ltopmenu = '' <NEW_LINE> <DEDENT> if len(self.columns[1]) != 0: <NEW_LINE> <INDENT> ltopmenuentryname=self.columns[1][self.columns_idx[1]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ltopmenuentryname = '' <NEW_LINE> <DEDENT> if len(self.columns[2]) != 0: <NEW_LINE> <INDENT> lmenuitem_or_foldername=self.columns[2][self.columns_idx[2]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lmenuitem_or_foldername = '' <NEW_LINE> <DEDENT> if len(self.columns[3]) != 0: <NEW_LINE> <INDENT> lfolderitemname=self.columns[3][self.columns_idx[3]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lfolderitemname = '' <NEW_LINE> <DEDENT> laction_name = ltopmenu + ltopmenuentryname + lmenuitem_or_foldername + lfolderitemname <NEW_LINE> if ltopmenuentryname != '': <NEW_LINE> <INDENT> if mainaction: <NEW_LINE> <INDENT> if self._global_main_action != None: <NEW_LINE> <INDENT> _action_func = self._global_main_action <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _action_func = self._get_action(laction_name,'main') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self._global_alternate_action != None: <NEW_LINE> <INDENT> _action_func = self._global_alternate_action <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _action_func = self._get_action(laction_name,'alternate') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if _action_func != None : <NEW_LINE> <INDENT> if topmenuaction: <NEW_LINE> <INDENT> if ltopmenuentryname != '': <NEW_LINE> <INDENT> _action_func(ltopmenu,ltopmenuentryname,self.get_topmenuentry_from_name(ltopmenu,ltopmenuentryname),'','',None) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not (self.is_a_folder(ltopmenu,ltopmenuentryname,lmenuitem_or_foldername)): <NEW_LINE> <INDENT> _action_func(ltopmenu,ltopmenuentryname,self.get_topmenuentry_from_name(ltopmenu,ltopmenuentryname),'',lmenuitem_or_foldername,self.get_menuitem_from_name(ltopmenu,ltopmenuentryname,lmenuitem_or_foldername)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _action_func(ltopmenu,ltopmenuentryname,self.get_topmenuentry_from_name(ltopmenu,ltopmenuentryname),lmenuitem_or_foldername,lfolderitemname,self.get_menuitem_in_folder_from_name(ltopmenu,ltopmenuentryname,lmenuitem_or_foldername,lfolderitemname))
|
used to call main/alternate action on selected item that is memorized
|
625941bb3346ee7daa2b2c2e
|
@app.route('/add_player', methods=('GET', 'POST')) <NEW_LINE> def add_player(): <NEW_LINE> <INDENT> if request.method == 'POST': <NEW_LINE> <INDENT> first_name = request.form['first_name'] <NEW_LINE> last_name = request.form['last_name'] <NEW_LINE> squad_number = request.form['squad_number'] <NEW_LINE> if not first_name: <NEW_LINE> <INDENT> error = 'Name is required' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db = get_db() <NEW_LINE> db.execute( 'INSERT INTO players (id, first_name, second_name,squad_number)' ' VALUES (?, ?, ?,?)', (None, first_name, last_name, squad_number) ) <NEW_LINE> db.commit() <NEW_LINE> return redirect(url_for('index')) <NEW_LINE> <DEDENT> <DEDENT> return render_template('add_player.html')
|
Create a new post for the current user.
|
625941bb925a0f43d2549d38
|
def StringToArray(str_X): <NEW_LINE> <INDENT> str_X = str_X.replace('\n', '],\n[') <NEW_LINE> str_X = '[[' + str_X + ']]' <NEW_LINE> str_X = str_X.replace(' ', ',') <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> str_X = str_X.replace('0' + str(i), str(i)) <NEW_LINE> <DEDENT> return np.array(eval(str_X))
|
This function takes the text given by the problem and returns a
2D list which is more amenable to the manipulation we will be doing.
|
625941bb4428ac0f6e5ba6b6
|
def fetch_url(url, timeout=10, retry=False, max_attempts=3): <NEW_LINE> <INDENT> timer = Timer() <NEW_LINE> logger.debug("Fetching %s ..", url) <NEW_LINE> for i in range(1, max_attempts + 1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with SignalTimeout(timeout, swallow_exc=False): <NEW_LINE> <INDENT> response = urlopen(url) <NEW_LINE> status_code = response.getcode() <NEW_LINE> if status_code != 200: <NEW_LINE> <INDENT> exc_type = (NotFoundError if status_code == 404 else InvalidResponseError) <NEW_LINE> raise exc_type("URL returned unexpected status code %s! (%s)" % (status_code, url)) <NEW_LINE> <DEDENT> response_body = response.read() <NEW_LINE> logger.debug("Took %s to fetch %s.", timer, url) <NEW_LINE> return response_body <NEW_LINE> <DEDENT> <DEDENT> except (NotFoundError, TimeoutException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if retry and i < max_attempts: <NEW_LINE> <INDENT> logger.warning("Failed to fetch %s, retrying (%i/%i, error was: %s)", url, i, max_attempts, e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise
|
Fetch a URL, optionally retrying on failure.
:param url: The URL to fetch (a string).
:param timeout: The maximum time in seconds that's allowed to pass before
the request is aborted (a number, defaults to 10 seconds).
:param retry: Whether to retry on failure (defaults to :data:`False`).
:param max_attempts: The maximum number of attempts when retrying is
enabled (an integer, defaults to three).
:returns: The response body (a byte string).
:raises: Any of the following exceptions can be raised:
- :exc:`NotFoundError` when the URL returns a 404 status code.
- :exc:`InvalidResponseError` when the URL returns a status code
that isn't 200.
- `stopit.TimeoutException`_ when the request takes longer
than `timeout` seconds (refer to the linked documentation for
details).
- Any exception raised by Python's standard library in the last
attempt (assuming all attempts raise an exception).
.. _stopit.TimeoutException: https://pypi.org/project/stopit/#exception
|
625941bb56b00c62f0f1451c
|
def cpta (self,widget,): <NEW_LINE> <INDENT> self.window4.show_all()
|
Abre a menu de capital.
|
625941bbfff4ab517eb2f2fe
|
def configure(cfg, size, add_swap=True): <NEW_LINE> <INDENT> composites = dict() <NEW_LINE> for c_name, c_val in cfg: <NEW_LINE> <INDENT> if '.' not in c_name: <NEW_LINE> <INDENT> raise RuntimeError("syntax error in composite config '{}' " "(must be: 'name.attribute')" .format(c_name)) <NEW_LINE> <DEDENT> name, attr = c_name.lower().rsplit('.', 1) <NEW_LINE> if name not in composites: <NEW_LINE> <INDENT> composites[name] = Composite(len(composites), name) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> composites[name].config(attr, c_val, size) <NEW_LINE> <DEDENT> except RuntimeError as err: <NEW_LINE> <INDENT> raise RuntimeError( "syntax error in composite config value at '{}':\n{}" .format(name, err)) <NEW_LINE> <DEDENT> <DEDENT> if add_swap: <NEW_LINE> <INDENT> add_swapped_targets(composites) <NEW_LINE> <DEDENT> return composites
|
read INI like configuration from <cfg> and return all the defined
composites. <size> is the overall frame size which all proportional
(floating point) coordinates are related to.
|
625941bb5f7d997b87174960
|
def insert(self, key, value): <NEW_LINE> <INDENT> if key < self.key: <NEW_LINE> <INDENT> if self.left: <NEW_LINE> <INDENT> self.left.insert(key, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.left = Tree(key, value) <NEW_LINE> <DEDENT> <DEDENT> elif key > self.key: <NEW_LINE> <INDENT> if self.right: <NEW_LINE> <INDENT> self.right.insert(key, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.right = Tree(key, value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Attempt to insert duplicate value")
|
Insert a new element into the tree in the correct position.
|
625941bb26068e7796caeb9e
|
def calculate_score(cards): <NEW_LINE> <INDENT> if sum(cards) == 21 and len(cards) == 2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if 11 in cards and sum(cards) > 21: <NEW_LINE> <INDENT> cards.remove(11) <NEW_LINE> cards.append(1) <NEW_LINE> <DEDENT> return sum(cards)
|
Take a list of cards and calculate the total
|
625941bb4e4d5625662d42a1
|
def _get_colored_segmentation_image(img, seg, colors, n_classes, do_augment=False, augment_name='aug_all', custom_aug=None): <NEW_LINE> <INDENT> seg_img = np.zeros_like(seg) <NEW_LINE> if do_augment: <NEW_LINE> <INDENT> if custom_aug is not None: <NEW_LINE> <INDENT> img, seg[:, :, 0] = custom_augment_seg(img, seg[:, :, 0], augmentation_function=custom_aug) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> img, seg[:, :, 0] = augment_seg(img, seg[:, :, 0], augmentation_name=augment_name) <NEW_LINE> <DEDENT> <DEDENT> for c in range(n_classes): <NEW_LINE> <INDENT> seg_img[:, :, 0] += ((seg[:, :, 0] == c) * (colors[c][0])).astype('uint8') <NEW_LINE> seg_img[:, :, 1] += ((seg[:, :, 0] == c) * (colors[c][1])).astype('uint8') <NEW_LINE> seg_img[:, :, 2] += ((seg[:, :, 0] == c) * (colors[c][2])).astype('uint8') <NEW_LINE> <DEDENT> return img, seg_img
|
Return a colored segmented image
|
625941bb63f4b57ef0000fe5
|
def gnc_customer_get_type(): <NEW_LINE> <INDENT> return _gnucash_core_c.gnc_customer_get_type()
|
gnc_customer_get_type() -> GType
|
625941bb24f1403a92600a2e
|
def build_database(s3_bucket): <NEW_LINE> <INDENT> athena = SESSION.client('athena') <NEW_LINE> output = 's3://{s3_bucket}/tables'.format(s3_bucket=s3_bucket) <NEW_LINE> config = { 'OutputLocation': output, 'EncryptionConfiguration': { 'EncryptionOption': 'SSE_S3' } } <NEW_LINE> response = athena.start_query_execution( QueryString="create database if not exists aws_logs;", ResultConfiguration=config )
|
Build the logs database in Athena
|
625941bb21a7993f00bc7bb0
|
def convert_to_tvm_func(pyfunc): <NEW_LINE> <INDENT> local_pyfunc = pyfunc <NEW_LINE> def cfun(args, type_codes, num_args, ret, _): <NEW_LINE> <INDENT> num_args = num_args.value if isinstance(num_args, ctypes.c_int) else num_args <NEW_LINE> pyargs = (C_TO_PY_ARG_SWITCH[type_codes[i]](args[i]) for i in range(num_args)) <NEW_LINE> try: <NEW_LINE> <INDENT> rv = local_pyfunc(*pyargs) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> msg = traceback.format_exc() <NEW_LINE> msg = py2cerror(msg) <NEW_LINE> _LIB.TVMAPISetLastError(c_str(msg)) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> if rv is not None: <NEW_LINE> <INDENT> if isinstance(rv, tuple): <NEW_LINE> <INDENT> raise ValueError("PackedFunction can only support one return value") <NEW_LINE> <DEDENT> temp_args = [] <NEW_LINE> values, tcodes, _ = _make_tvm_args((rv,), temp_args) <NEW_LINE> if not isinstance(ret, TVMRetValueHandle): <NEW_LINE> <INDENT> ret = TVMRetValueHandle(ret) <NEW_LINE> <DEDENT> if _LIB.TVMCFuncSetReturn(ret, values, tcodes, ctypes.c_int(1)) != 0: <NEW_LINE> <INDENT> raise get_last_ffi_error() <NEW_LINE> <DEDENT> _ = temp_args <NEW_LINE> _ = rv <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> handle = PackedFuncHandle() <NEW_LINE> f = TVMPackedCFunc(cfun) <NEW_LINE> pyobj = ctypes.py_object(f) <NEW_LINE> ctypes.pythonapi.Py_IncRef(pyobj) <NEW_LINE> if _LIB.TVMFuncCreateFromCFunc(f, pyobj, TVM_FREE_PYOBJ, ctypes.byref(handle)) != 0: <NEW_LINE> <INDENT> raise get_last_ffi_error() <NEW_LINE> <DEDENT> return _make_packed_func(handle, False)
|
Convert a python function to TVM function
Parameters
----------
pyfunc : python function
The python function to be converted.
Returns
-------
tvmfunc: tvm.nd.Function
The converted tvm function.
|
625941bbd18da76e23532398
|
def q21(): <NEW_LINE> <INDENT> mass_b=random.randint(1,20)*50 <NEW_LINE> mass_a=random.randint(1,20)*10 <NEW_LINE> count_a = random.randint(30,300) <NEW_LINE> count_b = mass_b * count_a * 1000 /mass_a <NEW_LINE> qtext='If a sample of mass $%s mg$ taken from a specimen of total mass $%s g$ contains %s nanomoles of a bioactive compound, how many moles are present in the whole specimen?'%( mass_a, mass_b, count_a) <NEW_LINE> qanswer='%s x 10e%s moles'%(round(mantissa(count_b),3), exponent(count_b)-9) <NEW_LINE> qcat='A6 Scaling and Aliquots' <NEW_LINE> return {'title':qcat, 'question':qtext,'answers':qanswer}
|
scaling and aliquots
|
625941bb460517430c394052
|
def insert(conn, key, value, sync=False): <NEW_LINE> <INDENT> conn.put(bytes(str(key), config['encoding']), bytes(str(value), config['encoding']), sync=sync)
|
Insert the value with the special key.
Args:
conn: the leveldb dir pointer.
value:
key:
sync(bool) – whether to use synchronous writes.
Returns:
|
625941bba05bb46b383ec6e9
|
def loading_animation(): <NEW_LINE> <INDENT> im1 = helpers.add_text_to_image(Image.open("../assets/loading_1.png").convert("1"), "Loading...", ASSET_UBUNTU_MONO_ME) <NEW_LINE> im2 = helpers.add_text_to_image(Image.open("../assets/loading_2.png").convert("1"), "Loading...", ASSET_UBUNTU_MONO_ME) <NEW_LINE> im1 = helpers.add_text_to_image(im1, "PiBike v" + VERSION, ASSET_UBUNTU_MONO_SM) <NEW_LINE> im2 = helpers.add_text_to_image(im2, "PiBike v" + VERSION, ASSET_UBUNTU_MONO_SM) <NEW_LINE> im1 = helpers.grow_to_display_footprint(im1, background=0) <NEW_LINE> im2 = helpers.grow_to_display_footprint(im2, background=0) <NEW_LINE> while not m.ready: <NEW_LINE> <INDENT> m.disp.show_image(im1) <NEW_LINE> time.sleep(0.3) <NEW_LINE> m.disp.show_image(im2) <NEW_LINE> time.sleep(0.3)
|
Displays a loading animation
|
625941bb7047854f462a12d1
|
def __init__(self, player_id: PlayerId, player_configuration: PlayerConfiguration, held_socket: socket.SocketType): <NEW_LINE> <INDENT> AbsExternalPlayer.__init__(self, player_id, player_configuration) <NEW_LINE> MessageSocket.__init__(self, held_socket)
|
Construct a ProxyPlayer with its corresponding PlayerState
:param player_id: The id of the Player
:param player_configuration: This Silly Player's state
:param held_socket: The socket this Player communicates through
|
625941bb67a9b606de4a7d81
|
def _log(self, time_elapsed: float) -> None: <NEW_LINE> <INDENT> time_text = f"{time_elapsed:{self.fmt}} seconds" <NEW_LINE> if self.text: <NEW_LINE> <INDENT> self.logger(self.text.format(time_text))
|
Do the actual logging of elapsed time
Args:
The time elapsed in seconds.
|
625941bb76d4e153a657e9f5
|
def message_ports_in(self): <NEW_LINE> <INDENT> return _blocks_swig5.vco_c_sptr_message_ports_in(self)
|
message_ports_in(vco_c_sptr self) -> swig_int_ptr
|
625941bb2eb69b55b151c770
|
def gesture_set_high_g(): <NEW_LINE> <INDENT> lis_int1(0x2a, 0x30, 0x00)
|
Configure accelerometer to detect high-g condition and set interrupt
|
625941bb711fe17d82542236
|
def timer_handler(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> usb.control.set_feature(dev, FEATURE_ENDPOINT_HALT, ep_to_halt) <NEW_LINE> if (usb.control.get_status(dev, ep_to_halt) != 1): <NEW_LINE> <INDENT> raise RuntimeError('Invalid endpoint status after halt operation') <NEW_LINE> <DEDENT> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> ctrl_error.set() <NEW_LINE> log('Endpoint {:#04x} halt failed ({!r}).'.format(ep_to_halt.bEndpointAddress, err)) <NEW_LINE> <DEDENT> time.sleep(POST_HALT_DELAY)
|
Halt an endpoint using a USB control request.
|
625941bb0fa83653e4656e81
|
def write_html_template(self, ctx, file_name, template_dict): <NEW_LINE> <INDENT> path = os.path.join(os.path.dirname(ctx.endpoint_file), file_name) <NEW_LINE> if not os.path.exists(path): <NEW_LINE> <INDENT> ctx.response.set_status(404) <NEW_LINE> ctx.response.out.write('unable to find file: %s' % path) <NEW_LINE> return <NEW_LINE> <DEDENT> ctx.response.out.write(template.render(path, template_dict))
|
Writes a templated html to the output stream.
Args:
ctx - The request context.
file_name - The name of the file. The path will be determined by the directory of the endpoint module file.
template_dict - A dictionary with template variables
|
625941bbbe8e80087fb20b0d
|
def read_transactions(currencies, reader): <NEW_LINE> <INDENT> for index, row in enumerate(reader): <NEW_LINE> <INDENT> if index == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sold_currency = row[TransColumns.SOLD_CURRENCY].lower() <NEW_LINE> transaction = { DataColumns.REFNUM: row[TransColumns.REFNUM], DataColumns.DATE: datetime.strptime( row[TransColumns.DATE], '%Y/%m/%d'), DataColumns.AMOUNT: "-" + row[TransColumns.SOLD_AMOUNT], DataColumns.RATIO: row[TransColumns.RATIO], DataColumns.BOUGHT_AMOUNT: row[TransColumns.BOUGHT_AMOUNT], DataColumns.BOUGHT_CURRENCY: row[TransColumns.BOUGHT_CURRENCY], DataColumns.STATUS: row[TransColumns.STATUS], } <NEW_LINE> for attr in ["PAYEE", "VARIABLE_SYMBOL", "TYPE"]: <NEW_LINE> <INDENT> if row[TransColumns.__members__[attr]]: <NEW_LINE> <INDENT> transaction[DataColumns.__members__[attr]] = row[ TransColumns.__members__[attr]] <NEW_LINE> <DEDENT> <DEDENT> currencies[sold_currency].append(transaction) <NEW_LINE> bought_currency = row[TransColumns.BOUGHT_CURRENCY].lower() <NEW_LINE> transaction = { DataColumns.REFNUM: row[TransColumns.REFNUM], DataColumns.DATE: datetime.strptime( row[TransColumns.DATE], '%Y/%m/%d'), DataColumns.AMOUNT: row[TransColumns.BOUGHT_AMOUNT], DataColumns.SOLD_CURRENCY: row[TransColumns.SOLD_CURRENCY], DataColumns.RATIO: row[TransColumns.RATIO], DataColumns.STATUS: row[TransColumns.STATUS], } <NEW_LINE> for attr in ["PAYEE", "VARIABLE_SYMBOL", "TYPE"]: <NEW_LINE> <INDENT> if row[TransColumns.__members__[attr]]: <NEW_LINE> <INDENT> transaction[DataColumns.__members__[attr]] = row[ TransColumns.__members__[attr]] <NEW_LINE> <DEDENT> <DEDENT> currencies[bought_currency].append(transaction)
|
Creates internal transactions for the given transactions input.
Parameters
----------
currencies : dict(str -> list of dictionaries)
The dictionary will be updated for new transactions.
reader : CSV reader for transactions file.
|
625941bb31939e2706e4cd33
|
def do_vds_command(request, do_json=True, data=None): <NEW_LINE> <INDENT> response = None <NEW_LINE> try: <NEW_LINE> <INDENT> response = urllib2.urlopen(request, data) <NEW_LINE> if response is not None and hasattr(response, 'getcode'): <NEW_LINE> <INDENT> if response.getcode() == 202: <NEW_LINE> <INDENT> info = json.loads(response.read()) <NEW_LINE> url = info['task-url'] <NEW_LINE> return do_retry(url) <NEW_LINE> <DEDENT> if response.getcode() == 204: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> if do_json: <NEW_LINE> <INDENT> return json.loads(response.read()) <NEW_LINE> <DEDENT> return response.read() <NEW_LINE> <DEDENT> except (ValueError, TypeError, StopIteration, AttributeError) as ex: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> except (urllib2.HTTPError, urllib2.URLError) as ex: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not hasattr(ex, 'read'): <NEW_LINE> <INDENT> msg = ex.message <NEW_LINE> raise <NEW_LINE> <DEDENT> msg = ex.read() <NEW_LINE> msg = json.loads(msg) <NEW_LINE> if 'detail' in msg: <NEW_LINE> <INDENT> msg = msg['detail'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if 'message' in msg: <NEW_LINE> <INDENT> msg = msg['message'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> msg = str(ex) <NEW_LINE> <DEDENT> <DEDENT> ex.message = msg <NEW_LINE> raise <NEW_LINE> <DEDENT> except (ValueError, TypeError, StopIteration, KeyError): <NEW_LINE> <INDENT> if len(ex.message) == 0: <NEW_LINE> <INDENT> ex.message = 'Exception, no details available' <NEW_LINE> <DEDENT> raise ex <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if response is not None and hasattr(response, 'close'): <NEW_LINE> <INDENT> response.close() <NEW_LINE> <DEDENT> <DEDENT> raise
|
issue urllib2 request, returns far side dict
|
625941bb94891a1f4081b96d
|
def rotate1(self, image, name, angle=45): <NEW_LINE> <INDENT> for i in range(1,8): <NEW_LINE> <INDENT> m = rotate(image, angle*i, resize=True) <NEW_LINE> print('rotated shape :') <NEW_LINE> print(m.shape) <NEW_LINE> if self.is_save: <NEW_LINE> <INDENT> io.imsave(self.root+str(angle*i)+'_'+name,m) <NEW_LINE> <DEDENT> yield m
|
background remove after rotate
|
625941bb21bff66bcd68481a
|
def solve_linear_system_LU(matrix, syms): <NEW_LINE> <INDENT> if matrix.rows != matrix.cols - 1: <NEW_LINE> <INDENT> raise ValueError("Rows should be equal to columns - 1") <NEW_LINE> <DEDENT> A = matrix[:matrix.rows, :matrix.rows] <NEW_LINE> b = matrix[:, matrix.cols - 1:] <NEW_LINE> soln = A.LUsolve(b) <NEW_LINE> solutions = {} <NEW_LINE> for i in range(soln.rows): <NEW_LINE> <INDENT> solutions[syms[i]] = soln[i, 0] <NEW_LINE> <DEDENT> return solutions
|
Solves the augmented matrix system using ``LUsolve`` and returns a
dictionary in which solutions are keyed to the symbols of *syms* as ordered.
Explanation
===========
The matrix must be invertible.
Examples
========
>>> from sympy import Matrix, solve_linear_system_LU
>>> from sympy.abc import x, y, z
>>> solve_linear_system_LU(Matrix([
... [1, 2, 0, 1],
... [3, 2, 2, 1],
... [2, 0, 0, 1]]), [x, y, z])
{x: 1/2, y: 1/4, z: -1/2}
See Also
========
LUsolve
|
625941bbf9cc0f698b1404c7
|
def set_volumes_for_nzo(self): <NEW_LINE> <INDENT> none_counter = 0 <NEW_LINE> found_counter = 0 <NEW_LINE> for nzf in self.nzo.files + self.nzo.finished_files: <NEW_LINE> <INDENT> nzf.setname, nzf.vol = analyze_rar_filename(nzf.filename) <NEW_LINE> if nzf.setname: <NEW_LINE> <INDENT> found_counter += 1 <NEW_LINE> if nzf.setname not in self.total_volumes: <NEW_LINE> <INDENT> self.total_volumes[nzf.setname] = 0 <NEW_LINE> <DEDENT> self.total_volumes[nzf.setname] = max(self.total_volumes[nzf.setname], nzf.vol) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> none_counter += 1 <NEW_LINE> <DEDENT> <DEDENT> if none_counter > found_counter: <NEW_LINE> <INDENT> self.total_volumes = {}
|
Loop over all files to detect the names
|
625941bb38b623060ff0acb4
|
def _download_cifar10(local_path): <NEW_LINE> <INDENT> dataset_path = os.path.join(local_path, 'cifar10') <NEW_LINE> if not os.path.exists(dataset_path): <NEW_LINE> <INDENT> os.makedirs(dataset_path) <NEW_LINE> <DEDENT> print("************** Downloading the Cifar10 dataset **************") <NEW_LINE> remote_url = "http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz" <NEW_LINE> file_name = os.path.join(dataset_path, remote_url.split('/')[-1]) <NEW_LINE> if not os.path.exists(file_name.replace('.gz', '')): <NEW_LINE> <INDENT> _fetch_and_unzip(remote_url, file_name) <NEW_LINE> <DEDENT> return os.path.join(dataset_path, 'cifar-10-batches-bin')
|
Download the dataset from http://www.cs.toronto.edu/~kriz/cifar.html.
|
625941bbb830903b967e97da
|
def __getitem__(self,idx): <NEW_LINE> <INDENT> return self.hframe._group_to_series(idx)
|
For HFrame.loc[idx] access.
|
625941bbff9c53063f47c0ba
|
def symmetric_distance_matrix( self, threshold: float, search_type: str = "tanimoto", a: float = 0, b: float = 0, n_workers: int = 4, ) -> sparse.csr.csr_matrix: <NEW_LINE> <INDENT> if search_type == "tversky": <NEW_LINE> <INDENT> if a != b: <NEW_LINE> <INDENT> raise Exception("tversky with a != b is asymmetric") <NEW_LINE> <DEDENT> search_func = TverskySearch <NEW_LINE> args = (threshold, a, b) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> search_func = TanimotoSearch <NEW_LINE> args = (threshold,) <NEW_LINE> <DEDENT> from tqdm import tqdm <NEW_LINE> idxs = np.arange(self.fps.shape[0], dtype=np.uint32) <NEW_LINE> np.random.shuffle(idxs) <NEW_LINE> def run(idx): <NEW_LINE> <INDENT> np_query = self.fps[idx] <NEW_LINE> bounds = get_bounds_range( np_query, threshold, a, b, self.popcnt_bins, search_type ) <NEW_LINE> sym_bounds = (max(idx + 1, bounds[0]), bounds[1]) <NEW_LINE> return search_func(np_query, self.fps, *args, *sym_bounds) <NEW_LINE> <DEDENT> rows = [] <NEW_LINE> cols = [] <NEW_LINE> data = [] <NEW_LINE> if n_workers == 1: <NEW_LINE> <INDENT> for idx in tqdm(idxs, total=idxs.shape[0]): <NEW_LINE> <INDENT> np_res = run(idx) <NEW_LINE> for r in np_res: <NEW_LINE> <INDENT> rows.append(idx) <NEW_LINE> cols.append(r["idx"]) <NEW_LINE> data.append(r["coeff"]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with cf.ThreadPoolExecutor(max_workers=n_workers) as executor: <NEW_LINE> <INDENT> future_to_idx = {executor.submit(run, idx,): idx for idx in idxs} <NEW_LINE> for future in tqdm(cf.as_completed(future_to_idx), total=idxs.shape[0]): <NEW_LINE> <INDENT> idx = future_to_idx[future] <NEW_LINE> np_res = future.result() <NEW_LINE> for r in np_res: <NEW_LINE> <INDENT> rows.append(idx) <NEW_LINE> cols.append(r["idx"]) <NEW_LINE> data.append(r["coeff"]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> csr_matrix = sparse.csr_matrix( (data + data, (rows + cols, cols + rows)), shape=(self.fps.shape[0], self.fps.shape[0]), ) <NEW_LINE> csr_matrix.data = 1 - csr_matrix.data <NEW_LINE> return csr_matrix
|
Computes the Tanimoto similarity matrix of the set.
Parameters
----------
threshold : float
Similarity threshold.
search_type : str
Type of search.
a : float
alpha in Tversky search.
b : float
beta in Tversky search.
n_workers : int
Number of threads to use.
Returns
-------
results : numpy array
Similarity results.
|
625941bb498bea3a759b9975
|
def create(self, user=None, name=None, plan=None, coupon=None, token=None): <NEW_LINE> <INDENT> if token is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if coupon: <NEW_LINE> <INDENT> self.coupon = coupon.upper() <NEW_LINE> <DEDENT> customer = PaymentCustomer.create(token=token, email=user.email, plan=plan, coupon=self.coupon) <NEW_LINE> user.payment_id = customer.id <NEW_LINE> user.name = name <NEW_LINE> user.previous_plan = plan <NEW_LINE> user.coins = add_subscription_coins(user.coins, Subscription.get_plan_by_id( user.previous_plan), Subscription.get_plan_by_id(plan), user.cancelled_subscription_on) <NEW_LINE> user.cancelled_subscription_on = None <NEW_LINE> self.user_id = user.id <NEW_LINE> self.plan = plan <NEW_LINE> if coupon: <NEW_LINE> <INDENT> coupon = Coupon.query.filter(Coupon.code == self.coupon).first() <NEW_LINE> coupon.redeem() <NEW_LINE> <DEDENT> credit_card = CreditCard(user_id=user.id, **CreditCard.extract_card_params(customer)) <NEW_LINE> db.session.add(user) <NEW_LINE> db.session.add(credit_card) <NEW_LINE> db.session.add(self) <NEW_LINE> db.session.commit() <NEW_LINE> return True
|
Create a recurring subscription.
:param user: User to apply the subscription to
:type user: User instance
:param name: User's billing name
:type name: str
:param plan: Plan identifier
:type plan: str
:param coupon: Coupon code to apply
:type coupon: str
:param token: Token returned by JavaScript
:type token: str
:return: bool
|
625941bbab23a570cc250045
|
def linear_fitting(): <NEW_LINE> <INDENT> x = np.array([1, 2, 3, 4, 5]) <NEW_LINE> y = np.array([4, 4.5, 6, 8, 8.5]) <NEW_LINE> a, b = least_square_algorithm(x, y, 1) <NEW_LINE> f = lambda x: a + b * x <NEW_LINE> print(f"The linear fitting's square error is {calculate_square_error(f(x), y)}") <NEW_LINE> draw_curve(x, y, f, 'linear_fitting.png')
|
Fitting experiment 1
|
625941bb377c676e9127206f
|
def get_theme_config(self) -> Tuple[str, Dict]: <NEW_LINE> <INDENT> theme_name = getattr(self.config, "revealjs_theme", "sphinx_revealjs") <NEW_LINE> theme_options = getattr(self.config, "revealjs_theme_options", {}) <NEW_LINE> config = raw_json(theme_options.get("revealjs_config", "")) <NEW_LINE> theme_options["revealjs_config"] = config <NEW_LINE> return theme_name, theme_options
|
Find and return configuration about theme (name and option params).
Find theme and merge options.
|
625941bb8e7ae83300e4ae91
|
def file_filter(self, full_path, base_paths): <NEW_LINE> <INDENT> matched_base_path = '' <NEW_LINE> for base_path in base_paths: <NEW_LINE> <INDENT> if full_path.startswith(base_path) and len(base_path) > len(matched_base_path): <NEW_LINE> <INDENT> matched_base_path = base_path <NEW_LINE> <DEDENT> <DEDENT> relative_path = full_path[len(matched_base_path):] <NEW_LINE> if os.path.isdir(full_path): <NEW_LINE> <INDENT> path = relative_path <NEW_LINE> file_name = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path, file_name = os.path.split(relative_path) <NEW_LINE> <DEDENT> path, folder_name = os.path.split(path) <NEW_LINE> import_config = dict(config['import']) <NEW_LINE> if 'singletons' not in import_config or not import_config[ 'singletons']: <NEW_LINE> <INDENT> while len(folder_name) > 0: <NEW_LINE> <INDENT> matched = self.folder_name_album_regex.match( folder_name) is not None <NEW_LINE> matched = not matched if self.invert_folder_album_result else matched <NEW_LINE> if not matched: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> path, folder_name = os.path.split(path) <NEW_LINE> <DEDENT> matched = self.file_name_album_regex.match( file_name) is not None <NEW_LINE> matched = not matched if self.invert_file_album_result else matched <NEW_LINE> if not matched: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> while len(folder_name) > 0: <NEW_LINE> <INDENT> matched = self.folder_name_singleton_regex.match( folder_name) is not None <NEW_LINE> matched = not matched if self.invert_folder_singleton_result else matched <NEW_LINE> if not matched: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> path, folder_name = os.path.split(path) <NEW_LINE> <DEDENT> matched = self.file_name_singleton_regex.match( file_name) is not None <NEW_LINE> matched = not matched if self.invert_file_singleton_result else matched <NEW_LINE> if not matched: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True
|
Checks if the configured regular expressions allow the import of the
file given in full_path.
|
625941bb31939e2706e4cd34
|
def render(self): <NEW_LINE> <INDENT> size = (self.width,self.height) <NEW_LINE> img = Image.new("RGB", size ) <NEW_LINE> for layer in self._layers: <NEW_LINE> <INDENT> img = layer.render( img ) or img <NEW_LINE> <DEDENT> self._image = img <NEW_LINE> return self._image
|
Render this CAPTCHA, returning a PIL image
|
625941bb92d797404e30404e
|
def localize(self): <NEW_LINE> <INDENT> return f""
|
All attributes xStats when all cards are on the team
|
625941bb3d592f4c4ed1cf43
|
def __onDel(self): <NEW_LINE> <INDENT> if self.__currentItem is not None: <NEW_LINE> <INDENT> self.bpointsList.deleteBreak()
|
Triggered when a breakpoint should be deleted
|
625941bb8e05c05ec3eea237
|
def __init__(self, fp, codec, sync=False): <NEW_LINE> <INDENT> RecordIO.Stream.__init__(self, fp, codec) <NEW_LINE> if 'w' not in self._fp.mode and 'a' not in self._fp.mode and '+' not in self._fp.mode: <NEW_LINE> <INDENT> raise RecordIO.InvalidFileHandle( 'Filehandle supplied to RecordWriter does not appear to be writeable!') <NEW_LINE> <DEDENT> self.set_sync(sync)
|
Initialize a Writer from the file pointer fp.
If sync=True is supplied, then all mutations are fsynced after write, otherwise
standard filesystem buffering is employed.
|
625941bba17c0f6771cbdf18
|
def get_main_dir(): <NEW_LINE> <INDENT> if main_is_frozen(): <NEW_LINE> <INDENT> return os.path.dirname(sys.executable) <NEW_LINE> <DEDENT> return os.path.dirname(sys.argv[0])
|
Returns the path to the currently executing script or exe.
|
625941bbbf627c535bc1309b
|
def add(self, synchronous=True, timeout=None, **kwargs): <NEW_LINE> <INDENT> kwargs = kwargs.copy() <NEW_LINE> if 'data' not in kwargs: <NEW_LINE> <INDENT> kwargs['data'] = dict() <NEW_LINE> <DEDENT> if 'component_ids' not in kwargs['data']: <NEW_LINE> <INDENT> kwargs['data']['components'] = [_payload(self.get_fields(), self.get_values())] <NEW_LINE> <DEDENT> kwargs.update(self._server_config.get_client_kwargs()) <NEW_LINE> response = client.put(self.path('add'), **kwargs) <NEW_LINE> return _handle_response(response, self._server_config, synchronous, timeout)
|
Add provided Content View Component.
:param synchronous: What should happen if the server returns an HTTP
202 (accepted) status code? Wait for the task to complete if
``True``. Immediately return the server's response otherwise.
:param timeout: Maximum number of seconds to wait until timing out.
Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``.
:param kwargs: Arguments to pass to requests.
:returns: The server's response, with all JSON decoded.
:raises: ``requests.exceptions.HTTPError`` If the server responds with
an HTTP 4XX or 5XX message.
|
625941bb96565a6dacc8f599
|
def ensure_exists(self): <NEW_LINE> <INDENT> req, trans = self.client._request_and_transport() <NEW_LINE> if not self.id and self.subscriber: <NEW_LINE> <INDENT> subscription_data = {'subscriber': self.subscriber, 'ttl': self.ttl, 'options': self.options } <NEW_LINE> subscription = core.subscription_create(trans, req, self.queue_name, subscription_data) <NEW_LINE> if subscription and 'subscription_id' in subscription: <NEW_LINE> <INDENT> self.id = subscription['subscription_id'] <NEW_LINE> <DEDENT> <DEDENT> if self.id: <NEW_LINE> <INDENT> sub = core.subscription_get(trans, req, self.queue_name, self.id) <NEW_LINE> self.subscriber = sub.get('subscriber') <NEW_LINE> self.ttl = sub.get('ttl') <NEW_LINE> self.options = sub.get('options') <NEW_LINE> self.age = sub.get('age') <NEW_LINE> self.confirmed = sub.get('confirmed')
|
Ensures subscription exists
This method is not race safe, the subscription could've been deleted
right after it was called.
|
625941bbcc40096d61595817
|
def which_to_validate(): <NEW_LINE> <INDENT> global LogDAG <NEW_LINE> n = config['links'] <NEW_LINE> arr = LogDAG[(-1*n):] <NEW_LINE> ids = [] <NEW_LINE> for i in arr: <NEW_LINE> <INDENT> ids.append(i['blockid']) <NEW_LINE> <DEDENT> return ids
|
This function should contain a nice algorithm that specifies
which blocks it will link to, based on some way of assigning
weight to blocks.
Also, it should verify that it not links to blocks from its own hostname.
However, we just take the last n blocks for now.
|
625941bb15fb5d323cde09d0
|
def p_statement_declr(p): <NEW_LINE> <INDENT> names[p[2]] = 0
|
statement : declaration NAME SEMICOLON
|
625941bb63d6d428bbe443b5
|
def checkOneInstanceGucValueByShowing(instance): <NEW_LINE> <INDENT> key = g_opts.gucStr.split(':')[0].strip() <NEW_LINE> value = g_opts.gucStr.split(':')[1].strip().split(",") <NEW_LINE> g_logger.debug( "Check if the value of guc {0} is {1}. " "Instance data dir is: {2}".format(key, value, instance.datadir)) <NEW_LINE> sql = "show %s;" % key <NEW_LINE> g_logger.debug("Command to check value is: %s" % sql) <NEW_LINE> retryTimes = 300 <NEW_LINE> for _ in range(retryTimes): <NEW_LINE> <INDENT> (status, output) = ClusterCommand.execSQLCommand( sql, g_opts.user, "", instance.port, "postgres", "-m", IsInplaceUpgrade=True) <NEW_LINE> g_logger.debug("SQL [{0}] perform output: {1}".format(sql, output)) <NEW_LINE> if status == 0 and output != "": <NEW_LINE> <INDENT> g_logger.debug("Output is: %s" % output) <NEW_LINE> checkValue = output.strip() <NEW_LINE> if str(checkValue) in value: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> raise Exception(ErrorCode.GAUSS_521["GAUSS_52102"] % key + " expect value %s" % (str(value)))
|
check dn guc value by "show guc" in database in every node
:param instance:
:return:
|
625941bbfb3f5b602dac3556
|
def TSP(t,m): <NEW_LINE> <INDENT> Cx = [] <NEW_LINE> Cy = [] <NEW_LINE> for i in range(0,m): <NEW_LINE> <INDENT> Cx.append(random.randint(0,1000)) <NEW_LINE> Cy.append(random.randint(0,1000)) <NEW_LINE> <DEDENT> T = t <NEW_LINE> T0 = T <NEW_LINE> i = 1 <NEW_LINE> d = [] <NEW_LINE> time = [] <NEW_LINE> for k in range(0,40000): <NEW_LINE> <INDENT> T = T0 / (1 + 0.0000001 * (np.square(k))) <NEW_LINE> d_old = dst_fun(Cx,Cy) <NEW_LINE> a = b = 1 <NEW_LINE> while (a == b): <NEW_LINE> <INDENT> a = random.randint(0,m - 1) <NEW_LINE> b = random.randint(0,m - 1) <NEW_LINE> <DEDENT> Cx[a],Cx[b] = Cx[b],Cx[a] <NEW_LINE> Cy[a],Cy[b] = Cy[b],Cy[a] <NEW_LINE> d_new = dst_fun(Cx,Cy) <NEW_LINE> alpha = np.minimum(1,np.exp(-((d_new - d_old) / T))) <NEW_LINE> acc = random.random() <NEW_LINE> if acc <= alpha: <NEW_LINE> <INDENT> d.append(d_new) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> d.append(d_old) <NEW_LINE> Cx[a],Cx[b] = Cx[b],Cx[a] <NEW_LINE> Cy[a],Cy[b] = Cy[b],Cy[a] <NEW_LINE> <DEDENT> <DEDENT> for k in range(0,len(d)): <NEW_LINE> <INDENT> time.append(k) <NEW_LINE> <DEDENT> plt.plot(time,d) <NEW_LINE> plt.title('The convegence path') <NEW_LINE> plt.xlabel('time') <NEW_LINE> plt.ylabel('total path length') <NEW_LINE> plt.show() <NEW_LINE> plt.plot(Cx,Cy,marker = 'o',mec = 'r',mfc = 'r') <NEW_LINE> plt.title('Finally chosen route') <NEW_LINE> plt.xlabel('x position') <NEW_LINE> plt.ylabel('y position') <NEW_LINE> plt.show() <NEW_LINE> print(d[len(d) - 1])
|
Traveling Salesman Problem using Simulated Annealing Problem
|
625941bbfbf16365ca6f6083
|
def __array__(self, dtype=None): <NEW_LINE> <INDENT> ret = take_1d(self.categories.values, self._codes) <NEW_LINE> if dtype and not is_dtype_equal(dtype, self.categories.dtype): <NEW_LINE> <INDENT> return np.asarray(ret, dtype) <NEW_LINE> <DEDENT> if is_extension_array_dtype(ret): <NEW_LINE> <INDENT> ret = np.asarray(ret) <NEW_LINE> <DEDENT> return ret
|
The numpy array interface.
Returns
-------
values : numpy array
A numpy array of either the specified dtype or,
if dtype==None (default), the same dtype as
categorical.categories.dtype
|
625941bb0383005118ecf4aa
|
def add_scene(self, scene): <NEW_LINE> <INDENT> self.scenes.append(scene) <NEW_LINE> if self.active_scene == None: <NEW_LINE> <INDENT> self.active_scene = self.scenes[-1]
|
Adds an empty scene
|
625941bb57b8e32f52483364
|
def guess_virtualenv(source_file, venv_root): <NEW_LINE> <INDENT> full_path = os.path.abspath(source_file) <NEW_LINE> dir_components = os.path.dirname(full_path).split(os.sep) <NEW_LINE> virtualenv_base = os.path.expanduser(venv_root) <NEW_LINE> used_components = [os.sep] <NEW_LINE> for component in dir_components: <NEW_LINE> <INDENT> if not component: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> used_components.append(component) <NEW_LINE> virtualenv_path = os.path.join(virtualenv_base, component) <NEW_LINE> if os.path.exists(virtualenv_path): <NEW_LINE> <INDENT> return os.path.join(*used_components), virtualenv_path <NEW_LINE> <DEDENT> <DEDENT> return None, None
|
Return the paths to the project root and the virtualenv that
corresponds to this source file, if any.
The virtualenv name must match the name of one of the containing
directories.
|
625941bb26068e7796caeb9f
|
def __init__(self, layer): <NEW_LINE> <INDENT> self.layer = layer <NEW_LINE> self.batch = layer.batch <NEW_LINE> self.hidden_shape = layer.hidden_shape <NEW_LINE> self.num_channels = layer.num_channels <NEW_LINE> self.input_shape = layer.input_shape <NEW_LINE> self.num_bases = layer.num_bases <NEW_LINE> x = torch.tensor(layer.init_bias) <NEW_LINE> self.bias = x.repeat( self.num_bases ) <NEW_LINE> self.block_shape = self.layer.block_shape <NEW_LINE> win_shape = layer.btmup_window_shape <NEW_LINE> self.Wb = 0.001 * torch.randn(self.num_bases, self.num_channels, win_shape[0], win_shape[1]) <NEW_LINE> self.Wb_inc = torch.zeros((self.num_bases, self.num_channels, win_shape[0], win_shape[1])) <NEW_LINE> self.Wt = 0 <NEW_LINE> self.pos_states = torch.zeros((self.num_bases,self.hidden_shape[0],self.hidden_shape[1])) <NEW_LINE> self.pos_probs = torch.zeros((self.num_bases,self.hidden_shape[0],self.hidden_shape[1])) <NEW_LINE> self.bias_inc = torch.zeros(self.num_bases) <NEW_LINE> self.pooling_units = torch.zeros((self.batch,self.num_bases,self.layer.output_shape[0],self.layer.output_shape[1])) <NEW_LINE> self.pooling_probs = torch.zeros((self.batch, self.num_bases,self.layer.output_shape[0],self.layer.output_shape[1])) <NEW_LINE> self.pos_activation = 0 <NEW_LINE> self.neg_activation = 0 <NEW_LINE> if self.layer.use_cuda: <NEW_LINE> <INDENT> self.Wb = self.Wb.cuda() <NEW_LINE> self.Wb_inc = self.Wb_inc.cuda() <NEW_LINE> self.pos_states = self.pos_states.cuda() <NEW_LINE> self.pos_probs = self.pos_probs.cuda() <NEW_LINE> self.pooling_units = self.pooling_units.cuda() <NEW_LINE> self.pooling_probs = self.pooling_probs.cuda()
|
Constructor
Input:
layer -- the layer to which the base belongs to
|
625941bb1d351010ab8559e2
|
def msg2usernames(msg, **config): <NEW_LINE> <INDENT> if not _cache.is_configured: <NEW_LINE> <INDENT> _cache.configure(**config['fmn.rules.cache']) <NEW_LINE> <DEDENT> key = "|".join(['usernames', msg['msg_id']]).encode('utf-8') <NEW_LINE> creator = lambda: fedmsg.meta.msg2usernames(msg, **config) <NEW_LINE> return _cache.get_or_create(key, creator)
|
Return cached fedmsg.meta.msg2usernames(...)
|
625941bbe1aae11d1e749b7a
|
def get_var(name): <NEW_LINE> <INDENT> frame = inspect.currentframe() <NEW_LINE> path = frame.f_code.co_filename <NEW_LINE> index = path.rindex(os.path.sep) <NEW_LINE> path = path[:index] <NEW_LINE> frame = frame.f_back.f_back <NEW_LINE> code = frame.f_code <NEW_LINE> try: <NEW_LINE> <INDENT> while code.co_name != 'wrapper' or code.co_filename[:index] != path: <NEW_LINE> <INDENT> frame = frame.f_back <NEW_LINE> code = frame.f_code <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> report(RuntimeError, 'Validation functions must be decorated.', prepend='Invalid validation call: ') <NEW_LINE> <DEDENT> if not isinstance(name, (list, tuple)): <NEW_LINE> <INDENT> name = (name,) <NEW_LINE> <DEDENT> lookups = name[1:] <NEW_LINE> name = name[0] <NEW_LINE> try: <NEW_LINE> <INDENT> var = frame.f_back.f_locals[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> report(NameError, 'must refer to an argument of {!r}.', frame.f_back.f_code.co_name, var_name='name', var_value=name, prepend='Invalid validation call: ') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for i, lookup in enumerate(lookups): <NEW_LINE> <INDENT> var = var[lookup] <NEW_LINE> <DEDENT> <DEDENT> except LookupError: <NEW_LINE> <INDENT> report(LookupError, 'must have the key or index, {!r}.', lookup, var_name=[name] + list(lookups[:i])) <NEW_LINE> <DEDENT> return var
|
Retrieve the value of a variable through call stack inspection.
`name` must refer to a variable in the parent scope of the function or
method decorated by `magni.utils.validation.decorate_validation` which is
closest to the top of the call stack. If `name` is a string then there must
be a variable of that name in that scope. If `name` is a set-like object
then there must be a variable having the first value in that set-like
object as name. The remaining values are used as keys/indices on the
variable to obtain the variable to be validated. For example, the `name`
('name', 0, 'key') refers to the variable "name[0]['key']".
Parameters
----------
name : None
The name of the variable to be retrieved.
Returns
-------
var : None
The value of the retrieved variable.
Notes
-----
The present function searches the call stack from top to bottom until it
finds a function named 'wrapper' defined in this file. That is, until it
finds a decorated validation function. The present function then looks up
the variable indicated by `name` in the parent scope of that decorated
validation function.
|
625941bb24f1403a92600a2f
|
def prepare_execution(self, exe): <NEW_LINE> <INDENT> self._excution = exe <NEW_LINE> for pcontract in self.pcontracts: <NEW_LINE> <INDENT> self.get_data(pcontract) <NEW_LINE> <DEDENT> self._init_main_data(self._main_pcontract)
|
数据加载,关键数据变量初始化, 设置执行器。
Args:
exe (ExecuteUnit): 执行器。
|
625941bbdd821e528d63b071
|
def run(self): <NEW_LINE> <INDENT> self.task.start() <NEW_LINE> tstart = time.time() <NEW_LINE> while self.task.isAlive(): <NEW_LINE> <INDENT> if self.task.status: <NEW_LINE> <INDENT> gobject.idle_add(self.status.set_text, self.task.status) <NEW_LINE> <DEDENT> seconds_elapsed = time.time() - tstart <NEW_LINE> if self.task.denom == 0: <NEW_LINE> <INDENT> text = '{} - '.format(self.task.numer)+ format_time(seconds_elapsed)+' Elapsed' <NEW_LINE> gobject.idle_add(self.progressbar.pulse) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fract = float(self.task.numer)/self.task.denom <NEW_LINE> text = '{} / {} - '.format(self.task.numer, self.task.denom)+ self.ETA(fract, seconds_elapsed)+' Remaining' <NEW_LINE> gobject.idle_add(self.progressbar.set_fraction, fract) <NEW_LINE> <DEDENT> gobject.idle_add(self.progressbar.set_text, text) <NEW_LINE> time.sleep(0.1) <NEW_LINE> <DEDENT> self.quit()
|
Run method, this is the code that runs while thread is alive.
|
625941bb596a897236089990
|
def draw_ground(ground,curve,hor_line): <NEW_LINE> <INDENT> ground.begin_fill() <NEW_LINE> ground.speed(11) <NEW_LINE> ground.hideturtle() <NEW_LINE> ground.color(124,252,0) <NEW_LINE> ground.penup() <NEW_LINE> ground.goto(-350,hor_line) <NEW_LINE> ground.lt(370) <NEW_LINE> ground.pendown() <NEW_LINE> ground.pensize(10) <NEW_LINE> ground.circle(-(curve),360,curve//10) <NEW_LINE> ground.end_fill()
|
Draws ground in the color of grass.
:param ground: Is the turtle that is going to be used.
:param curve: The curvature of the horizon line, the more it is the less curved it is.
:param hor_line: The vertical locus of the horizon.
:return:
|
625941bb4f6381625f114903
|
def checkTemplate(fName, options): <NEW_LINE> <INDENT> start, end = getIndex(fName, options) <NEW_LINE> if start == None or end == None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> exp = None <NEW_LINE> if options.allow: <NEW_LINE> <INDENT> exp = re.compile(options.allow) <NEW_LINE> <DEDENT> handler = open(fName, 'r') <NEW_LINE> lines = handler.readlines() <NEW_LINE> add = 0 <NEW_LINE> for index, line in enumerate(lines): <NEW_LINE> <INDENT> if exp and checkRe(exp, line) and index < start: <NEW_LINE> <INDENT> add += 1 <NEW_LINE> <DEDENT> elif options.blank and line == os.linesep and (index < start or index > end): <NEW_LINE> <INDENT> add += 1 <NEW_LINE> <DEDENT> <DEDENT> if len(lines) - 1 == end - start + add: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
|
checks if the file
is a proper template or not
file should only contain a single signature
if allow option is set, allows allowed line
extra lines are allowed before or after signature
|
625941bb0a50d4780f666d55
|
def getQualifiedURL(uri=None): <NEW_LINE> <INDENT> schema, stdport = (('http', '80'), ('https', '443'))[isSSL()] <NEW_LINE> host = os.environ.get('HTTP_HOST', '') <NEW_LINE> if not host: <NEW_LINE> <INDENT> host = os.environ.get('SERVER_NAME', 'localhost') <NEW_LINE> port = os.environ.get('SERVER_PORT', '80') <NEW_LINE> if port != stdport: host = host + ":" + port <NEW_LINE> <DEDENT> result = "%s://%s" % (schema, host) <NEW_LINE> if uri: result = result + uri <NEW_LINE> return result
|
Return a full URL starting with schema, servername, and port.
Specifying uri causes it to be appended to the server root URL
(uri must then start with a slash).
|
625941bb3539df3088e2e211
|
def test_get_http_proxy_osx(self): <NEW_LINE> <INDENT> proxy.__grains__['os'] = 'Darwin' <NEW_LINE> mock = MagicMock(return_value='Enabled: Yes\nServer: 192.168.0.1\nPort: 3128\nAuthenticated Proxy Enabled: 0') <NEW_LINE> expected = { 'enabled': True, 'server': '192.168.0.1', 'port': '3128' } <NEW_LINE> with patch.dict(proxy.__salt__, {'cmd.run': mock}): <NEW_LINE> <INDENT> out = proxy.get_http_proxy() <NEW_LINE> mock.assert_called_once_with('networksetup -getwebproxy Ethernet') <NEW_LINE> self.assertEqual(expected, out)
|
Test to make sure that we correctly get the current proxy info
on OSX
|
625941bb8a43f66fc4b53f2e
|
def prepare_data_for_extraction(self, billboard_data, data_type): <NEW_LINE> <INDENT> for chart_name in billboard_data: <NEW_LINE> <INDENT> for key, data in billboard_data[chart_name].items(): <NEW_LINE> <INDENT> getattr(self.ranking_data, data_type)[key] = {BILLBOARD_KEY: data, YOUTUBE_KEY: {}}
|
Use the data extracted from billboard as a starting point to create the generic dict
that will save all of the extracted data on the songs and albums using the billboard data
and the service name as keys
:param billboard_data: the data extracted from billboard
:param data_type: the type of the data (songs, albums etc...)
|
625941bbcc40096d61595818
|
def empty_buckets(self): <NEW_LINE> <INDENT> amount_empty = 0 <NEW_LINE> for bucket in self._buckets: <NEW_LINE> <INDENT> if bucket.head is None: <NEW_LINE> <INDENT> amount_empty += 1 <NEW_LINE> <DEDENT> <DEDENT> return amount_empty
|
Returns:
The number of empty buckets in the table
|
625941bb94891a1f4081b96e
|
def above(self, elevation): <NEW_LINE> <INDENT> return self.prune(lambda ts: self.at(ts)['elevation'] >= elevation)
|
Function to return portion of transit above a certain elevation.
|
625941bb23e79379d52ee42d
|
def matrice_fila(a): <NEW_LINE> <INDENT> initial = 0 <NEW_LINE> nr = a.count('\n') <NEW_LINE> lista = [] <NEW_LINE> for i in range(nr): <NEW_LINE> <INDENT> final = a.find('\n', initial) <NEW_LINE> lin = a[initial:final] <NEW_LINE> lin_lista = lin.split(',') <NEW_LINE> lista.append(lin_lista) <NEW_LINE> initial = final + 1 <NEW_LINE> <DEDENT> L = [] <NEW_LINE> M = [] <NEW_LINE> for item in lista: <NEW_LINE> <INDENT> if str(item[9]).isupper(): <NEW_LINE> <INDENT> if M != []: <NEW_LINE> <INDENT> L.append(M) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> M = [] <NEW_LINE> M.append(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> M.append(item) <NEW_LINE> <DEDENT> <DEDENT> M.append(item) <NEW_LINE> L.append(M) <NEW_LINE> return L
|
Transforma un text intr-o lista de linii - MATRICE.
|
625941bb8a349b6b435e803a
|
def test_progress_requires_validate(view_client, test_wizard_view): <NEW_LINE> <INDENT> view = test_wizard_view.as_view() <NEW_LINE> data = { 'test_wizard_view-current_step': '0', '0-number': '1', } <NEW_LINE> response = view_client.post(view, data) <NEW_LINE> assert response.status_code == 200 <NEW_LINE> data = { 'test_wizard_view-current_step': '1', '1-choice': 'test', } <NEW_LINE> response = view_client.post(view, data) <NEW_LINE> assert response.status_code == 200 <NEW_LINE> data = { 'wizard_store_and_goto_step': '1', 'test_wizard_view-current_step': '2', } <NEW_LINE> response = view_client.post(view, data) <NEW_LINE> assert response.status_code == 200 <NEW_LINE> resp_view = response.context_data['view'] <NEW_LINE> assert resp_view.progress == [ ('0', resp_view.get_form_list()['0'], 'reachable_with_validate'), ('1', resp_view.get_form_list()['1'], 'active'), ('2', resp_view.get_form_list()['2'], 'reachable_with_validate'), ]
|
Check if `reachable_with_validate` if the user navigates back
|
625941bbe1aae11d1e749b7b
|
def inherits_from(obj, a_class): <NEW_LINE> <INDENT> if type(obj) != a_class: <NEW_LINE> <INDENT> return issubclass(type(obj), a_class) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
|
inherits_from - This function will check if object class is an instance
of a class that inherited (directly or indirectly) from a specified class
Args:
obj - This is the object that we're comparing to see if instance
a_class - This is the specific class we're checking to see if inherited
from
Return:
The checking of the type of obj will return True, if not Return False
|
625941bb5fc7496912cc384b
|
def st_min(data, d_var, data_out, mydate): <NEW_LINE> <INDENT> tempos=[te for te in data_out[d_var].keys() if 'min' in te] <NEW_LINE> data_min=np.amin(data, axis=0) <NEW_LINE> for tempo in tempos: <NEW_LINE> <INDENT> if 'min_per_d' == tempo: <NEW_LINE> <INDENT> my_k=mydate.strftime('%m%d') <NEW_LINE> <DEDENT> if 'min_per_m'==tempo: <NEW_LINE> <INDENT> my_k=mydate.strftime('%m') <NEW_LINE> <DEDENT> if 'min_per_y'==tempo: <NEW_LINE> <INDENT> my_k=mydate.strftime('%Y') <NEW_LINE> <DEDENT> if 'min_per_h'==tempo: <NEW_LINE> <INDENT> for my_h in range(24): <NEW_LINE> <INDENT> my_k=mydate.strftime('%m')+"{:02}".format(my_h) <NEW_LINE> np.fmin(data_out[d_var]['min_per_h'][my_k], data[my_h], out=data_out[d_var]['min_per_h'][my_k], ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> np.fmin(data_out[d_var][tempo][my_k], data_min, out=data_out[d_var][tempo][my_k], )
|
Calcula mínimos para cada intervalo, para todas las temporalidades
data: arreglo de datos a procesar
d_var:variable
data_out: diccionario de salida
mydate: fecha de los datos
|
625941bb4428ac0f6e5ba6b8
|
def get_muxed_solar_signal(): <NEW_LINE> <INDENT> results = [] <NEW_LINE> multiplexer = cantools.database.can.Signal( name='SOLAR_SLAVE_INDEX', start=0, length=16, is_multiplexer=True ) <NEW_LINE> results.append(multiplexer) <NEW_LINE> for i in range(NUM_SOLAR_SLAVE_MODULES): <NEW_LINE> <INDENT> voltage = cantools.database.can.Signal( name='MODULE_VOLTAGE_{0:03d}'.format(i), start=16, length=16, multiplexer_ids=[i], multiplexer_signal=results[0], is_float=False, decimal=None ) <NEW_LINE> current = cantools.database.can.Signal( name='MODULE_CURRENT_{0:03d}'.format(i), start=32, length=16, byte_order='little_endian', multiplexer_ids=[i], multiplexer_signal=results[0], is_float=False, decimal=None ) <NEW_LINE> temperature = cantools.database.can.Signal( name='MODULE_TEMP_{0:03d}'.format(i), start=48, length=16, byte_order='little_endian', multiplexer_ids=[i], multiplexer_signal=results[0], is_float=False, decimal=None ) <NEW_LINE> results.append(voltage) <NEW_LINE> results.append(current) <NEW_LINE> results.append(temperature) <NEW_LINE> <DEDENT> return results
|
Get the MUXed signals for the Solar Sense Data message.
|
625941bb925a0f43d2549d3b
|
def check_tw_name(name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> client = twitter.Api() <NEW_LINE> user = client.GetUserTimeline(name) <NEW_LINE> <DEDENT> except Twitter.TwitterError: <NEW_LINE> <INDENT> pass
|
Checks Twitter API for a legit username. Throws a friendly error if not found
|
625941bb090684286d50eba8
|
@udf <NEW_LINE> def _ibis_sqlite_regex_replace(string, pattern, replacement): <NEW_LINE> <INDENT> return re.sub(pattern, replacement, string)
|
Replace occurences of `pattern` in `string` with `replacement`.
Parameters
----------
string : str
pattern : str
replacement : str
Returns
-------
result : str
|
625941bb0a50d4780f666d56
|
def test_future_question(self): <NEW_LINE> <INDENT> future_question = create_question(question_text='Future question.', days=5,choice1="Yes") <NEW_LINE> url = reverse('polls:detail', args=(future_question.id,)) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, 404)
|
The detail view of a question with a pub_date in the future
returns a 404 not found.
|
625941bb8e05c05ec3eea238
|
def matchLine(path, line_number, text): <NEW_LINE> <INDENT> datafile = open(path) <NEW_LINE> line_file = datafile.readline() <NEW_LINE> line_file = line_file.rstrip() <NEW_LINE> line_no = 1 <NEW_LINE> while line_file != "": <NEW_LINE> <INDENT> if line_no == line_number: <NEW_LINE> <INDENT> if line_file == text: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> line_no = line_no+1 <NEW_LINE> line_file = datafile.readline() <NEW_LINE> line_file = line_file.rstrip()
|
path = used for defining the file to be checked
line_number = used to identify the line that will be checked
text = string containing the text to match
|
625941bbdc8b845886cb53fb
|
def get_parse_dates(file): <NEW_LINE> <INDENT> parse_dates = [ 'click_time', 'attributed_time' ] <NEW_LINE> if 'test' in file: <NEW_LINE> <INDENT> parse_dates.remove('attributed_time') <NEW_LINE> <DEDENT> return parse_dates
|
Return a list of columns for which dates have to be parsed.
|
625941bb0383005118ecf4ab
|
def clean_up_object(new): <NEW_LINE> <INDENT> if not new.get_parent() == parent: <NEW_LINE> <INDENT> new.set_parent(parent) <NEW_LINE> <DEDENT> cmds.reorder(str(new), f=True) <NEW_LINE> cmds.reorder(str(new), r=outliner_index) <NEW_LINE> new.transform.set_pivot(pivot) <NEW_LINE> new.attr['rotate'] = list(transforms.rotate) <NEW_LINE> new.attr['scale'] = list(transforms.scale) <NEW_LINE> return cmds.rename(str(new), name.split('|')[-1])
|
Cleans up after `polyUnite` / `polySeparate`
|
625941bbec188e330fd5a66c
|
def test_post_calls_process_location(self): <NEW_LINE> <INDENT> self.set_state('requested_location') <NEW_LINE> self.post({'RecordingUrl': 'test'}) <NEW_LINE> self.assertState('processed_location')
|
calls process_location on a ClientCall POST
|
625941bb7d43ff24873a2b64
|
def test_get_user(self): <NEW_LINE> <INDENT> resp = self.client.get('/api/v1.0/user/2') <NEW_LINE> j = json.loads(resp.get_data(as_text=True)) <NEW_LINE> self.assertTrue(j['username'] == 'Admin')
|
测试获取指定用户信息
|
625941bb3eb6a72ae02ec39b
|
def create_drl_transform(ebs): <NEW_LINE> <INDENT> n_el = ebs.shape[0] <NEW_LINE> mtx_drl = nm.tile(nm.eye(24, dtype=nm.float64), (n_el, 1, 1)) <NEW_LINE> for ii in range(4): <NEW_LINE> <INDENT> nh = ebs[:, ii, -1, :] <NEW_LINE> mtx = nm.c_[nm.c_[1.0 - nh[:, 0]**2, -nh[:, 0]*nh[:, 1], -nh[:, 0]*nh[:, 2]], nm.c_[-nh[:, 0]*nh[:, 1], 1.0 - nh[:, 1]**2, -nh[:, 1]*nh[:, 2]], nh].reshape(n_el, 3, 3) <NEW_LINE> mtx_drl[:, 12+ii:12+ii+9:4, 12+ii:12+ii+9:4] = mtx <NEW_LINE> <DEDENT> return mtx_drl
|
Create the transformation matrix for locking of the drilling rotations.
|
625941bbb5575c28eb68dec5
|
def clear(self, canvas): <NEW_LINE> <INDENT> childWidgets = [c for c in canvas.children() if c.isWidgetType()] <NEW_LINE> for child in childWidgets: <NEW_LINE> <INDENT> child.setParent(None) <NEW_LINE> <DEDENT> canvas.setMinimumSize(600, 400)
|
Clears the canvas of its contents so that it is blank.
|
625941bb24f1403a92600a30
|
def update(self, i, val): <NEW_LINE> <INDENT> aggregated_diff = val - self.nums[i] <NEW_LINE> self.nums[i] = val <NEW_LINE> if i in self.index_to_diff: <NEW_LINE> <INDENT> aggregated_diff = self.index_to_diff[i] + aggregated_diff <NEW_LINE> if aggregated_diff == 0: <NEW_LINE> <INDENT> del self.index_to_diff[i] <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self.index_to_diff[i] = aggregated_diff
|
:type i: int
:type val: int
:rtype: void
|
625941bb167d2b6e31218a5d
|
def itb_bayesian_fatality_rates(): <NEW_LINE> <INDENT> fatality_rate = { 2: 0.0, 3: 0.0, 4: 0.0, 5: 0.0, 6: 3.41733122522e-05, 7: 0.000387804494226, 8: 0.001851451786, 9: 0.00787294191661, 10: 0.0314512157378, } <NEW_LINE> return fatality_rate
|
ITB fatality model based on a Bayesian approach.
This model was developed by Institut Teknologi Bandung (ITB) and
implemented by Dr. Hyeuk Ryu, Geoscience Australia.
Reference:
An Empirical Fatality Model for Indonesia Based on a Bayesian Approach
by W. Sengara, M. Suarjana, M.A. Yulman, H. Ghasemi, and H. Ryu
submitted for Journal of the Geological Society
:returns: Fatality rates as medians
It comes worden_berngamma_log_fat_rate_inasafe_10.csv in InaSAFE 3.
:rtype: dict
|
625941bb26238365f5f0ed31
|
def read_video_timestamps(filename): <NEW_LINE> <INDENT> _check_av_available() <NEW_LINE> container = av.open(filename, metadata_errors='ignore') <NEW_LINE> video_frames = [] <NEW_LINE> video_fps = None <NEW_LINE> if container.streams.video: <NEW_LINE> <INDENT> if _can_read_timestamps_from_packets(container): <NEW_LINE> <INDENT> video_frames = [x for x in container.demux(video=0) if x.pts is not None] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> video_frames = _read_from_stream(container, 0, float("inf"), container.streams.video[0], {'video': 0}) <NEW_LINE> <DEDENT> video_fps = float(container.streams.video[0].average_rate) <NEW_LINE> <DEDENT> container.close() <NEW_LINE> return [x.pts for x in video_frames], video_fps
|
List the video frames timestamps.
Note that the function decodes the whole video frame-by-frame.
Parameters
----------
filename : str
path to the video file
Returns
-------
pts : List[int]
presentation timestamps for each one of the frames in the video.
video_fps : int
the frame rate for the video
|
625941bbd58c6744b4257b28
|
def test_user_authenticated(self): <NEW_LINE> <INDENT> self.client.logout() <NEW_LINE> self.client.login(username='usuario', password='123456') <NEW_LINE> response = self.client.get(reverse('login'), follow = True) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertRedirects(response, reverse('profile')) <NEW_LINE> self.assertTemplateUsed(response, 'account/profile.html') <NEW_LINE> self.assertEqual(type(response.context['form']), UpdateForm)
|
Testa a pagina de login para usuario ja autenticado, o usuario deve ser redirecionado para a view profile.
|
625941bb099cdd3c635f0b23
|
def json_dump( obj, fp, **kwargs ): <NEW_LINE> <INDENT> json.dump(convert_to_dict(obj), fp, **kwargs)
|
Force use of unicode.
|
625941bb99fddb7c1c9de259
|
def square(x): <NEW_LINE> <INDENT> return tf.square(x)
|
Element-wise square.
# Arguments
x: Tensor or variable.
# Returns
A tensor.
|
625941bb44b2445a33931f66
|
def finish(self): <NEW_LINE> <INDENT> self.log.info("Cleanup for %s", self.name) <NEW_LINE> os._exit(0)
|
Clean up code when process exits
|
625941bb21bff66bcd68481c
|
def test_stop(self): <NEW_LINE> <INDENT> pass
|
Test case for stop
Stop specified input in all nodes
|
625941bb23849d37ff7b2f59
|
def get_queryable_dept(self): <NEW_LINE> <INDENT> return self.get_random_value(DEPT)
|
Returns a valid dept to be queried
|
625941bbadb09d7d5db6c659
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.