code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
def draw(self, x, y, dc, styler): <NEW_LINE> <INDENT> device = self.device <NEW_LINE> for j1, j2, x1, y1, child in self.iter_boxes(0, x, y): <NEW_LINE> <INDENT> r = Rect(x1, y1, x1+child.width, y1+child.height) <NEW_LINE> if device.intersects(dc, r): <NEW_LINE> <INDENT> child.draw(x1, y1, dc, styler)
|
Draws box and all child boxes at origin (x, y).
|
625941bc71ff763f4b54955d
|
def has_attribute(self, key): <NEW_LINE> <INDENT> return self.get_attribute(key) is not None
|
True if self.attributes includes key.
|
625941bc3317a56b86939b3f
|
def checkcohrand(sts,sta): <NEW_LINE> <INDENT> Cp=[] <NEW_LINE> stsi=sts.copy() <NEW_LINE> for k in range(0,100): <NEW_LINE> <INDENT> for tr in stsi: <NEW_LINE> <INDENT> tr.data=np.random.randn(tr.data.size) <NEW_LINE> <DEDENT> stsi.filter('bandpass',freqmin=2,freqmax=30.) <NEW_LINE> vl = checkcoh(stsi,sta) <NEW_LINE> Cp.append(vl['Cpstat'][0]) <NEW_LINE> <DEDENT> Cp = np.array(Cp) <NEW_LINE> perr=vl['stds'][0] <NEW_LINE> return Cp,perr
|
:param sts: data
:param sta: templates
|
625941bc3eb6a72ae02ec3ab
|
def sign_body(body, apikey="12345678"): <NEW_LINE> <INDENT> a = ["".join(i) for i in body.items() if i[1] and i[0] != "sign"] <NEW_LINE> strA = "".join(sorted(a)) <NEW_LINE> strsigntemp = strA + apikey <NEW_LINE> def jiamimd5(src): <NEW_LINE> <INDENT> m = hashlib.md5() <NEW_LINE> m.update(src.encode('utf-8')) <NEW_LINE> return m.hexdigest() <NEW_LINE> <DEDENT> sign = jiamimd5(strsigntemp.lower()) <NEW_LINE> return sign
|
请求body sign签名
|
625941bc9b70327d1c4e0caa
|
def render_html_form(action_url, selections1, selections2, time_group='days', select1=None, select2=None): <NEW_LINE> <INDENT> return get_lookup().get_template('form_data.mako').render( selections1=selections1, selections2=selections2, time_group=time_group, select1=select1, select2=select2, action_url=action_url )
|
Render a HTML form that can be used to query the data in bitmapist.
:param :action_url The action URL of the <form> element. The form will always to a GET request.
:param :selections1 A list of selections that the user can filter by, example `[ ('Are Active', 'active'), ]`
:param :selections2 A list of selections that the user can filter by, example `[ ('Played song', 'song:play'), ]`
:param :time_group What data should be clustred by, can be `days`, `weeks` or `months`
:param :select1 What is the current selected filter (first)
:param :select2 What is the current selected filter (second)
|
625941bc85dfad0860c3ad30
|
def parse_sensor_list (sensors, convert_reading, convert_number, sensor_type): <NEW_LINE> <INDENT> status_key = sensor_type + "_Status" <NEW_LINE> number_key = sensor_type + "_Number" <NEW_LINE> reading_key = sensor_type + "_Reading" <NEW_LINE> parsed = [None] * len (sensors) <NEW_LINE> for i, sensor in sensors.items (): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> idx = int (i) - 1 <NEW_LINE> status = sensor.pop (status_key, None) <NEW_LINE> if (status and (status != "ns")): <NEW_LINE> <INDENT> sensor["Health"] = enums.Health (status, convert = True) <NEW_LINE> <DEDENT> if (convert_number and (number_key in sensor)): <NEW_LINE> <INDENT> sensor[number_key] = int (sensor[number_key][:-1], 16) <NEW_LINE> <DEDENT> if (reading_key in sensor): <NEW_LINE> <INDENT> reading = sensor[reading_key] <NEW_LINE> if (reading == "Disabled"): <NEW_LINE> <INDENT> sensor["State"] = enums.State ("Disabled") <NEW_LINE> del sensor[reading_key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sensor["State"] = enums.State ("Enabled") <NEW_LINE> if ((not reading) or (reading == "No Reading")): <NEW_LINE> <INDENT> del sensor[reading_key] <NEW_LINE> <DEDENT> elif (convert_reading): <NEW_LINE> <INDENT> reading = view_helper.extract_number (reading) <NEW_LINE> if (reading): <NEW_LINE> <INDENT> sensor[reading_key] = reading <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del sensor[reading_key] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> parsed[idx] = sensor <NEW_LINE> del sensors[i] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return filter (None, parsed)
|
Parse the dictionary list of sensor data and convert it into an array used by the REST template.
:param sensors: The dictionary list of sensor data to parse. As the sensor data is parsed, the
information will be removed from this dictionary.
:param convert_reading: A flag indicating if the sensor reading should be converted to a numeber
or if the raw string should be retained.
:param convert_number: A flag indicating if the sensor number string should be converted to an
integer or if the raw string should be retained.
:param sensor_type: The type of sensor information being parsed.
:return An array containing the parsed sensor data.
|
625941bcc4546d3d9de72908
|
def test_get_artists_fail(self): <NEW_LINE> <INDENT> artists = self.handle.get_artists(None) <NEW_LINE> self.assertIsNone(artists)
|
Try to get artists when no tablatures has been added.
|
625941bcfbf16365ca6f6094
|
def dispatch(self, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> if getattr(perms, self.object.__class__.__name__.lower())(self.request.user, getattr(self, 'permission', 'view'), self.object): <NEW_LINE> <INDENT> return super(ProblemMixin, self).dispatch(*args, **kwargs) <NEW_LINE> <DEDENT> raise PermissionDenied
|
Problem permission check.
|
625941bc16aa5153ce36234f
|
@app.route('/add', methods=['GET', 'POST']) <NEW_LINE> def add_item(): <NEW_LINE> <INDENT> if logged_in_user() is None: <NEW_LINE> <INDENT> return redirect(url_for('login')) <NEW_LINE> <DEDENT> if request.method == 'POST': <NEW_LINE> <INDENT> title = request.form['item_name'] <NEW_LINE> description = request.form['description'] <NEW_LINE> category = Category.query.get(int(request.form['category'])) <NEW_LINE> a = Item(title, description, category, logged_in_user()) <NEW_LINE> db.session.add(a) <NEW_LINE> db.session.commit() <NEW_LINE> return redirect(HOME_URI) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> categories = Category.query.all() <NEW_LINE> return render_template('add_item_form.html', categories=categories)
|
Shows the 'add item' form and processes/saves submissions.
:return: A Flask view
|
625941bc7d847024c06be190
|
def __init__(self, *args): <NEW_LINE> <INDENT> _itkVectorContainerPython.vectoritkPointD3_swiginit(self,_itkVectorContainerPython.new_vectoritkPointD3(*args))
|
__init__(self) -> vectoritkPointD3
__init__(self, vectoritkPointD3 arg0) -> vectoritkPointD3
__init__(self, size_type size) -> vectoritkPointD3
__init__(self, size_type size, value_type value) -> vectoritkPointD3
|
625941bc091ae35668666e3b
|
def set_station(self, station, signal): <NEW_LINE> <INDENT> self.station_status[station] = signal
|
Sets station [0,..., 7] to True or False (On | Off) in memory.
Use set_shift_register_values() to activate GPIO
|
625941bc9c8ee82313fbb64b
|
def error(update:Update, context: CallbackContext): <NEW_LINE> <INDENT> logger.warning('Update "%s" caused error "%s"', update, context.error) <NEW_LINE> with open('errorLog.txt','a') as outfile: <NEW_LINE> <INDENT> outfile.write('Update: '+str(update)+' caused error '+str(context.error)+ '\n')
|
Log Errors caused by Updates.
|
625941bc6fb2d068a760ef71
|
def regen_keys(): <NEW_LINE> <INDENT> for fn_ in os.listdir(__opts__['pki_dir']): <NEW_LINE> <INDENT> path = os.path.join(__opts__['pki_dir'], fn_) <NEW_LINE> try: <NEW_LINE> <INDENT> os.remove(path) <NEW_LINE> <DEDENT> except os.error: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> with salt.transport.client.ReqChannel.factory(__opts__) as channel: <NEW_LINE> <INDENT> log.debug('Recreating channel to force key regen')
|
Used to regenerate the minion keys.
CLI Example:
.. code-block:: bash
salt '*' saltutil.regen_keys
|
625941bc24f1403a92600a40
|
def calculate_test_score_only(student_answers, model_answers): <NEW_LINE> <INDENT> correct_answers = 0 <NEW_LINE> for question in student_answers['question_id']: <NEW_LINE> <INDENT> student_answer_to_question = student_answers[student_answers['question_id'] == question] <NEW_LINE> model_answers_to_question = model_answers[model_answers['question_id'] == question] <NEW_LINE> if not model_answers_to_question.empty: <NEW_LINE> <INDENT> if student_answer_to_question['answer_id'].iloc[0] in model_answers_to_question['id'].unique(): <NEW_LINE> <INDENT> correct_answers += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return correct_answers/student_answers.shape[0]
|
TODO
:param student_answers:
:param model_answers:
:return:
|
625941bcb5575c28eb68ded5
|
def _wait_for_env_operation_finish(self, eb_client, env_name, original_request_id, pending_status, expected_health, operation_name, action_name, wait_timeout, poll_delay, include_deleted = 'false', initial_delay = 0, ): <NEW_LINE> <INDENT> if pending_status is None and expected_health is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> prompt.action(BaseOpMessage.WaitForEnv.format(env_name, action_name)) <NEW_LINE> prompt.info(BaseOpMessage.UserCanInterrupt) <NEW_LINE> _time.sleep(initial_delay) <NEW_LINE> polling_start_time = _time.time() <NEW_LINE> event_start_time = None if original_request_id is not None else misc.unixtime_to_utc(_time.time()) <NEW_LINE> while _time.time() - polling_start_time < wait_timeout: <NEW_LINE> <INDENT> env_response = eb_client.describe_environments(environment_names = env_name, include_deleted = include_deleted) <NEW_LINE> if len(env_response.result) < 1: <NEW_LINE> <INDENT> raise EnvironmentNotExistError(BaseOpMessage.EnvNotExist.format(env_name)) <NEW_LINE> <DEDENT> if pending_status is None: <NEW_LINE> <INDENT> if expected_health is not None and env_response.result[0].health.lower() == expected_health.lower(): <NEW_LINE> <INDENT> break; <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if env_response.result[0].status.lower() != pending_status.lower(): <NEW_LINE> <INDENT> if expected_health is None: <NEW_LINE> <INDENT> break; <NEW_LINE> <DEDENT> elif env_response.result[0].health.lower() == expected_health.lower(): <NEW_LINE> <INDENT> break; <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> log.info('Received response for DescribeEnvironemnts call.') <NEW_LINE> self._log_api_result(operation_name, 'DescribeEnvironments', env_response.result) <NEW_LINE> log.info('Retrieving events for Environment "{0}" after UTC time {1}.'. format(env_name, event_start_time)) <NEW_LINE> event_response = eb_client.describe_events(None, env_name, request_id = original_request_id, start_time = event_start_time) <NEW_LINE> self._log_api_result(operation_name, 'DescribeEvents', event_response.result) <NEW_LINE> if len(event_response.result) > 0: <NEW_LINE> <INDENT> event_response.result.reverse() <NEW_LINE> for event in event_response.result: <NEW_LINE> <INDENT> log.info('{0}\t{1}\t{2}'.format (event.event_date, event.severity, event.message)) <NEW_LINE> prompt.plain('{0}\t{1}\t{2}'.format (event.event_date, event.severity, event.message)) <NEW_LINE> event_start_time = misc.unixtime_to_utc(event.event_date_raw + 0.001) <NEW_LINE> <DEDENT> <DEDENT> _time.sleep(poll_delay) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.error('Breach timeout threshold of waiting environment {0}.'. format(action_name)) <NEW_LINE> <DEDENT> return env_response.result
|
Loop polling environment status while it is in specified pending_status
and/or health state, until status changes and/or health state meet expectation,
or reach wait_timeout threshold. While polling retrieve events related to
specified request_id or all recent events if not specified.
|
625941bcd164cc6175782c24
|
def test_cleanme(self): <NEW_LINE> <INDENT> temp_dir = self.mkdir(self.tmpdir, 'temp') <NEW_LINE> bin_dir = self.mkdir(self.tmpdir, 'bin') <NEW_LINE> source_dir = self.mkdir(self.tmpdir, 'source') <NEW_LINE> build_rules_py = os.path.join(self.tmpdir, 'build_rules.py') <NEW_LINE> save_text_file(build_rules_py, [ _IMPORT_BURGER, _DEF_CLEAN, '\tburger.clean_directories(working_directory, ("temp", "bin"))', _RETURN_ZERO] ) <NEW_LINE> self.assertTrue(os.path.isfile(build_rules_py)) <NEW_LINE> makeprojects.clean(self.tmpdir) <NEW_LINE> self.assertFalse(os.path.isdir(temp_dir)) <NEW_LINE> self.assertFalse(os.path.isdir(bin_dir)) <NEW_LINE> self.assertTrue(os.path.isdir(source_dir))
|
Test to see if cleanme loads build_rules.py.
|
625941bcdc8b845886cb540b
|
def get_log_conditional_probability_gradient(self, node, value=None): <NEW_LINE> <INDENT> node = self.get_node(node) <NEW_LINE> gradient = node.zeros() <NEW_LINE> print_debug(" summing over dependencies: ") <NEW_LINE> for dependence in self.get_node_dependencies(node): <NEW_LINE> <INDENT> print_debug(" - dependence: %s" % name(dependence)) <NEW_LINE> if not dependence.has_log_conditional_probability_gradient_node(node): <NEW_LINE> <INDENT> raise ( "Dependence '%s' does not have a method to compute the gradient of the log probability of the conditional probability of '%s' ( -> '%s'). " % ( dependence.get_name(), name(node), dependence.get_variable_name_from_node(node))) <NEW_LINE> <DEDENT> <DEDENT> for dependence in self.get_node_dependencies(node): <NEW_LINE> <INDENT> gradient += (dependence.get_log_conditional_probability_gradient_node(node, value)).reshape(gradient.shape) <NEW_LINE> <DEDENT> return gradient
|
Returns the gradient of the log of the conditional probability of the given node.
|
625941bc627d3e7fe0d68d25
|
@contextlib.contextmanager <NEW_LINE> def no_internet(verbose=False): <NEW_LINE> <INDENT> already_disabled = INTERNET_OFF <NEW_LINE> turn_off_internet(verbose=verbose) <NEW_LINE> try: <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if not already_disabled: <NEW_LINE> <INDENT> turn_on_internet(verbose=verbose)
|
Context manager to temporarily disable internet access (if not already
disabled). If it was already disabled before entering the context manager
(i.e. `turn_off_internet` was called previously) then this is a no-op and
leaves internet access disabled until a manual call to `turn_on_internet`.
|
625941bc5166f23b2e1a5030
|
def make_sigma(S_R, S_Rprime): <NEW_LINE> <INDENT> sigma_redundant = [] <NEW_LINE> for b1 in S_R: <NEW_LINE> <INDENT> for b2 in S_Rprime: <NEW_LINE> <INDENT> if b1[0] in b2: <NEW_LINE> <INDENT> sigma_redundant.append((b1, b2)) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sigma = remove_redundant_items(sigma_redundant) <NEW_LINE> sigma.sort() <NEW_LINE> return sigma
|
Given quotient set of refining relation and quotient set of relation that is refined, make mapping sigma between them
|
625941bc7b25080760e39332
|
def get_sig_coverage(af_ip, af_api_key, sample_data, hash_counters): <NEW_LINE> <INDENT> print('Searching Autofocus for current signature coverage...') <NEW_LINE> search_values = {"apiKey": af_api_key, "coverage": 'true', "sections": ["coverage"], } <NEW_LINE> headers = {"Content-Type": "application/json"} <NEW_LINE> hashvalue = sample_data['sha256hash'] <NEW_LINE> search_url = f'https://{af_ip}/api/v1.0/sample/{hashvalue}/analysis' <NEW_LINE> try: <NEW_LINE> <INDENT> search = requests.post(search_url, headers=headers, data=json.dumps(search_values)) <NEW_LINE> search.raise_for_status() <NEW_LINE> <DEDENT> except requests.exceptions.HTTPError: <NEW_LINE> <INDENT> print(search) <NEW_LINE> print(search.text) <NEW_LINE> print('\nCorrect errors and rerun the application\n') <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> results_analysis = {} <NEW_LINE> results_analysis = json.loads(search.text) <NEW_LINE> sample_data['dns_sig'] = results_analysis['coverage']['dns_sig'] <NEW_LINE> sample_data['wf_av_sig'] = results_analysis['coverage']['wf_av_sig'] <NEW_LINE> sample_data['fileurl_sig'] = results_analysis['coverage']['fileurl_sig'] <NEW_LINE> if sample_data['verdict'] == 'malware': <NEW_LINE> <INDENT> sig_search = json.dumps(sample_data) <NEW_LINE> if sig_search.find('true') != -1: <NEW_LINE> <INDENT> hash_counters['mal_active_sig'] += 1 <NEW_LINE> <DEDENT> elif sig_search.find('true') == -1 and sig_search.find('false') != -1: <NEW_LINE> <INDENT> hash_counters['mal_inactive_sig'] += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hash_counters['mal_no_sig'] += 1 <NEW_LINE> <DEDENT> <DEDENT> print(f"get_sig_coverage() returns {sample_data}") <NEW_LINE> return sample_data, hash_counters
|
for sample hits, second query to find signature coverage in sample analysis
|
625941bc30c21e258bdfa372
|
def create_constraint_str(self, param_prefix=""): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return "{0:f} < {1} < {2:f}".format(self.width[0], param_prefix + "Width", self.width[2]) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return ""
|
Returns a constraints string for the Fit algorithm
:param param_prefix: An optional prefix for the parameter name
|
625941bcab23a570cc250057
|
def Bar(xs, ys, **options): <NEW_LINE> <INDENT> options = _UnderrideColor(options) <NEW_LINE> options = _Underride(options, linewidth=0, alpha=0.6) <NEW_LINE> pyplot.bar(xs, ys, **options)
|
Plots a line.
:param xs: sequence of x value
:param ys: sequence of y value
:param options: keyword args passed to pyplot.bar
|
625941bc99cbb53fe6792abe
|
def test_06_degressive_limit(self): <NEW_LINE> <INDENT> asset = self.asset_model.create({ 'name': 'test asset', 'profile_id': self.ref('account_asset_management.' 'account_asset_profile_car_5Y'), 'purchase_value': 1000, 'salvage_value': 100, 'date_start': time.strftime('%Y-07-07'), 'method_time': 'year', 'method': 'degr-limit', 'method_progress_factor': 0.40, 'method_number': 5, 'method_period': 'year', 'prorata': False, }) <NEW_LINE> asset.compute_depreciation_board() <NEW_LINE> asset.refresh() <NEW_LINE> self.assertEqual(len(asset.depreciation_line_ids), 6) <NEW_LINE> self.assertAlmostEqual(asset.depreciation_line_ids[1].amount, 400.00, places=2) <NEW_LINE> self.assertAlmostEqual(asset.depreciation_line_ids[2].amount, 240.00, places=2) <NEW_LINE> self.assertAlmostEqual(asset.depreciation_line_ids[3].amount, 144.00, places=2) <NEW_LINE> self.assertAlmostEqual(asset.depreciation_line_ids[4].amount, 86.40, places=2) <NEW_LINE> self.assertAlmostEqual(asset.depreciation_line_ids[5].amount, 29.60, places=2)
|
Degressive with annual depreciation.
|
625941bcff9c53063f47c0cd
|
def list_clusters(self) -> List[license_proto.ClusterStatus]: <NEW_LINE> <INDENT> return self._req(Service.LICENSE, "ListClusters").clusters
|
List clusters registered with the license service.
Returns
-------
List[license_proto.ClusterStatus]
A list of protobuf objects that return info on a cluster.
|
625941bc3c8af77a43ae3675
|
def get_activation(name_list): <NEW_LINE> <INDENT> activation = [] <NEW_LINE> for n_ in name_list: <NEW_LINE> <INDENT> if n_ is None: <NEW_LINE> <INDENT> activation.append(None) <NEW_LINE> <DEDENT> elif n_ == 'relu': <NEW_LINE> <INDENT> activation.append(tf.nn.relu) <NEW_LINE> <DEDENT> elif n_ == 'leaky_relu': <NEW_LINE> <INDENT> activation.append(tf.nn.leaky_relu) <NEW_LINE> <DEDENT> elif n_ == 'sigmoid': <NEW_LINE> <INDENT> activation.append(tf.sigmoid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Unrecognized activation class: {}".format(n_)) <NEW_LINE> exit(1) <NEW_LINE> <DEDENT> <DEDENT> return activation
|
Converts list of activation names into tf activation function
Args:
name_list: list of string format names for the activation function
Returns:
activation: list of tf activation functions
|
625941bcc432627299f04b1b
|
def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> submission = sub_api.create_submission(STUDENT_ITEM, ANSWER) <NEW_LINE> training_api.on_start(submission['uuid']) <NEW_LINE> self.submission_uuid = submission['uuid']
|
Create a submission.
|
625941bc0fa83653e4656e94
|
def icc(graph, message): <NEW_LINE> <INDENT> m = len(message) <NEW_LINE> if graph.num < m: <NEW_LINE> <INDENT> sys.exit() <NEW_LINE> <DEDENT> g = gc(Graph(graph.num, graph.edge)) <NEW_LINE> for i in range(m, g.num): <NEW_LINE> <INDENT> message.append(0) <NEW_LINE> <DEDENT> for i in range(g.num): <NEW_LINE> <INDENT> if message[i] == 1: <NEW_LINE> <INDENT> nodes = g.get_neighbor_nodes(i) <NEW_LINE> for j in range(1, max(g.vertex) + 2): <NEW_LINE> <INDENT> if j == g.vertex[i]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> flag = True <NEW_LINE> for k in nodes: <NEW_LINE> <INDENT> if j == g.vertex[k] and (k < i or message[k] == 0): <NEW_LINE> <INDENT> flag = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if flag: <NEW_LINE> <INDENT> g.vertex[i] = j <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return g
|
ICC Algorithm.
|
625941bca17c0f6771cbdf2a
|
def is_in_place(place): <NEW_LINE> <INDENT> return is_link(Is_in) & link_predicate(lambda link: link.where == place)
|
Return a link predicate that selects Is_in nodes for a given place.
|
625941bcad47b63b2c509e57
|
def Stop(self): <NEW_LINE> <INDENT> if self._running: <NEW_LINE> <INDENT> self.after_cancel(self._timer) <NEW_LINE> self._elapsedtime = time.time() - self._start <NEW_LINE> self._setTime(self._elapsedtime) <NEW_LINE> self._running = 0
|
Stop the timer, ignore if stopped.
|
625941bcde87d2750b85fc66
|
@singledispatch <NEW_LINE> def stationize(df, aggr='prom'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> df = df.xs(aggr, 1, 'aggr') <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> stations = df.columns.get_level_values('station') <NEW_LINE> if len(stations.get_duplicates()) > 0: <NEW_LINE> <INDENT> df.columns = df.columns.get_level_values('sensor_code') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> df.columns = stations <NEW_LINE> <DEDENT> return df
|
Return a copy of a DataFrame with only station codes as column labels. If the resulting set of column lables is not unique (more than one sensor for the same variable at the same station), the returned copy has the ``sensor_code`` as column labels.
:param df: Input DataFrame with :class:`pandas.MultiIndex` in columns. If :obj:`str`, interpret it as the key to load from the ``.h5`` file specified as :attr:`data.CEAZAMet.station_data`.
:type df: :class:`~pandas.DataFrame` (or :obj:`str`)
:param aggr: If the input DataFrame has several ``aggr`` levels (e.g. ``prom``, ``min``, ``max``), return this one.
:type aggr: :obj:`str`
:returns: DataFrame with simple column index (containing station labels, or sensor codes in case the station index in not unique).
:rtype: :class:`~pandas.DataFrame`
|
625941bc377c676e91272082
|
def find_test_class(self, test_file_content): <NEW_LINE> <INDENT> match_classes = [ (m.group(1), m.end()) for m in re.finditer(r'\s?class\s+(\w+)\s?\(', test_file_content)] <NEW_LINE> if match_classes: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [ (c, p) for (c, p) in match_classes if "Test" in c or "test" in c][-1] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return match_classes[-1]
|
Try to find the test class, return None if can't be found
|
625941bcbf627c535bc130ad
|
def test_invalid_checksum(self): <NEW_LINE> <INDENT> for test in INVALID_CHECKSUM: <NEW_LINE> <INDENT> self.assertRaises(ValueError, b32decode, test)
|
Test validation (failure) of invalid checksums.
|
625941bc01c39578d7e74d13
|
def find_checked_path(): <NEW_LINE> <INDENT> checked_path_cmd = "find " + os.path.join(ASCEND_ROOT_PATH, "ascenddk") + " -maxdepth 1 -mindepth 1 -type d -print" <NEW_LINE> ret = util.execute(checked_path_cmd, print_output_flag=True) <NEW_LINE> if ret[0] is False: <NEW_LINE> <INDENT> return False, [] <NEW_LINE> <DEDENT> found_path = [] <NEW_LINE> for each_path in ret[1]: <NEW_LINE> <INDENT> if each_path not in GLOBAL_IGNORE_PATH: <NEW_LINE> <INDENT> found_path.append(each_path) <NEW_LINE> <DEDENT> <DEDENT> return True, found_path
|
find static check base path
|
625941bca4f1c619b28aff17
|
def set_lights_rgb(hass, lights, rgb): <NEW_LINE> <INDENT> for light in lights: <NEW_LINE> <INDENT> if is_on(hass, light): <NEW_LINE> <INDENT> turn_on(hass, light, rgb_color=rgb, transition=30)
|
Set color of array of lights.
|
625941bcb7558d58953c4df1
|
def DescribeComputeEnvCreateInfo(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> params = request._serialize() <NEW_LINE> body = self.call("DescribeComputeEnvCreateInfo", params) <NEW_LINE> response = json.loads(body) <NEW_LINE> if "Error" not in response["Response"]: <NEW_LINE> <INDENT> model = models.DescribeComputeEnvCreateInfoResponse() <NEW_LINE> model._deserialize(response["Response"]) <NEW_LINE> return model <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> code = response["Response"]["Error"]["Code"] <NEW_LINE> message = response["Response"]["Error"]["Message"] <NEW_LINE> reqid = response["Response"]["RequestId"] <NEW_LINE> raise TencentCloudSDKException(code, message, reqid) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, TencentCloudSDKException): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TencentCloudSDKException(e.message, e.message)
|
Views compute environment creation information.
:param request: Request instance for DescribeComputeEnvCreateInfo.
:type request: :class:`tencentcloud.batch.v20170312.models.DescribeComputeEnvCreateInfoRequest`
:rtype: :class:`tencentcloud.batch.v20170312.models.DescribeComputeEnvCreateInfoResponse`
|
625941bc50485f2cf553cc70
|
def check(path): <NEW_LINE> <INDENT> error_message = '`%s` does not appear to be a template repo.' <NEW_LINE> cwd = os.getcwd() <NEW_LINE> template_attr = path <NEW_LINE> if template_attr: <NEW_LINE> <INDENT> if not os.path.isdir(os.path.join(cwd, '.git')): <NEW_LINE> <INDENT> sys.exit(error_message % cwd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(os.path.join(cwd, template_attr)): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> sys.exit(error_message % cwd) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not os.path.isdir(os.path.join(cwd, '.git')): <NEW_LINE> <INDENT> sys.exit(error_message % cwd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(template_attr): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> sys.exit(error_message % cwd) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not cwd.endswith('/'): <NEW_LINE> <INDENT> cwd += '/' <NEW_LINE> <DEDENT> return cwd
|
Determine if a command that expects to execute inside a template
repo can continue. Checks for a .git directory and subsequently for
a template.yaml file either in the working directory or the
directory specified by the --template argument. If check passes,
returns path to template repo.
|
625941bc4a966d76dd550ee4
|
def check_set_attr(self, attr): <NEW_LINE> <INDENT> self.try_set_attr(attr) <NEW_LINE> return self.is_set_attr(attr)
|
Run try_set_attr() and return the result of is_set_attr(), i.e. True
or False. Most important shortcut method.
Examples
--------
::
def get_foo(self):
if self.check_set_attr('bar'):
return self.bar * 2
else:
return None
which is the same as ::
def get_foo(self):
self.try_set_attr('bar):
if self.is_set_attr('bar'):
return self.bar * 2
else:
return None
|
625941bc3d592f4c4ed1cf53
|
def textInteractionFlags(self): <NEW_LINE> <INDENT> pass
|
QGraphicsTextItem.textInteractionFlags() -> Qt.TextInteractionFlags
|
625941bc0383005118ecf4bc
|
def get_release_date(self): <NEW_LINE> <INDENT> return information.RELEASE_DATE
|
Retrieves the current base (plugin manager) release date.
@rtype: String
@return: The current base (plugin manager) release date.
|
625941bc097d151d1a222d33
|
def toDFA(self): <NEW_LINE> <INDENT> s0 = self.start <NEW_LINE> e_s0 = tuple(self.epsilon_closure(s0)) <NEW_LINE> stack = [e_s0] <NEW_LINE> tranistion = {} <NEW_LINE> alphabet = self.alphabet <NEW_LINE> label = [e_s0] <NEW_LINE> while len(stack) != 0: <NEW_LINE> <INDENT> s = stack.pop() <NEW_LINE> label_s = label.index(s) if s in label else len(label) <NEW_LINE> s_table = tranistion.get(label_s,{}) <NEW_LINE> for char in alphabet: <NEW_LINE> <INDENT> next_s = self.moveto(s,char) <NEW_LINE> if not next_s: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> next_s_epsilon_closure = tuple(self.epsilon_closure(next_s)) <NEW_LINE> if next_s_epsilon_closure not in label: <NEW_LINE> <INDENT> stack.append(next_s_epsilon_closure) <NEW_LINE> label.append(next_s_epsilon_closure) <NEW_LINE> <DEDENT> s_table[char] = [label.index(next_s_epsilon_closure)] <NEW_LINE> <DEDENT> tranistion[label_s] = s_table <NEW_LINE> <DEDENT> finish = [ label.index(i) for i in label if any([ j in i for j in self.finish ]) ] <NEW_LINE> return NFA(alphabet,list(range(len(label))),finish,tranistion)
|
子集构造算法
|
625941bc97e22403b379ce70
|
def test_05_run(self): <NEW_LINE> <INDENT> output = self.p.run() <NEW_LINE> self.assertEqual(output, u'<span class="b1" style="font-weight: bold">Bold</span><span class="b2 c" style="color: red; font-weight: bold">Bold Red</span>')
|
Test 'run' method
|
625941bc15baa723493c3e4b
|
def stackSize(self): <NEW_LINE> <INDENT> return 0
|
QThread.stackSize() -> int
|
625941bc7d43ff24873a2b75
|
def inorderTraversal(self, root): <NEW_LINE> <INDENT> self.res = [] <NEW_LINE> self.fun(root) <NEW_LINE> return self.res
|
:type root: TreeNode
:rtype: List[int]
|
625941bc92d797404e304061
|
def get_hashpipe_key_value_str_ensured(key, instance=0, interval_sec=0.01, re_get=5): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> consistent = True <NEW_LINE> ret = get_hashpipe_key_value(key, instance=instance) <NEW_LINE> val = ret.decode().strip() <NEW_LINE> for i in range(re_get-1): <NEW_LINE> <INDENT> ret = get_hashpipe_key_value(key, instance=instance) <NEW_LINE> consistent = consistent and (val == ret.decode().strip()) <NEW_LINE> time.sleep(interval_sec) <NEW_LINE> <DEDENT> if not consistent: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return val <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return False
|
Returns the value of a key in the hashpipe's status, parsed as a string.
Calls get_hashpipe_key_value 5 times.
Parameters
----------
key: str
The key to get the value of
instance: int
The enumeration of the hashpipe instance whose status is consulted
Returns
-------
str/bytearray/None: The value of the key
|
625941bc8da39b475bd64e48
|
def CalculateNNSfrac(wt): <NEW_LINE> <INDENT> nts = ['A', 'C', 'T', 'G'] <NEW_LINE> nt_counts = {} <NEW_LINE> for nt in nts: <NEW_LINE> <INDENT> nt_counts[nt] = wt.count(nt) <NEW_LINE> <DEDENT> AorT = ( nt_counts['A'] + nt_counts['T'] ) / len(wt) <NEW_LINE> CorG = ( nt_counts['C'] + nt_counts['G'] ) / len(wt) <NEW_LINE> expected_frac = [] <NEW_LINE> AorTpossiblemuts = [2, 12, 18] <NEW_LINE> CorGpossiblemuts = [7, 15, 9] <NEW_LINE> for x in range(3): <NEW_LINE> <INDENT> expected_frac.append(AorTpossiblemuts[x] / sum(AorTpossiblemuts) * AorT + CorGpossiblemuts[x] / sum(CorGpossiblemuts) * CorG) <NEW_LINE> <DEDENT> return expected_frac
|
Calculates the expected fraction of mutations when NNS mutagenesis is used.
wt is the nucleotide sequence of the mutagenized gene.
|
625941bcbe383301e01b5364
|
def GetBackgroundBrush(self, dc): <NEW_LINE> <INDENT> if wx.Platform == '__WXMAC__' : <NEW_LINE> <INDENT> return wx.TRANSPARENT_BRUSH <NEW_LINE> <DEDENT> bkgrd = self.GetBackgroundColour() <NEW_LINE> with wx4c.set_brush_style(wx.BRUSHSTYLE_SOLID) as bstyle: <NEW_LINE> <INDENT> brush = wx.Brush(bkgrd, bstyle) <NEW_LINE> <DEDENT> my_attr = self.GetDefaultAttributes() <NEW_LINE> p_attr = self.GetParent().GetDefaultAttributes() <NEW_LINE> my_def = bkgrd == my_attr.colBg <NEW_LINE> p_def = self.GetParent().GetBackgroundColour() == p_attr.colBg <NEW_LINE> if my_def and not p_def: <NEW_LINE> <INDENT> bkgrd = self.GetParent().GetBackgroundColour() <NEW_LINE> with wx4c.set_brush_style(wx.BRUSHSTYLE_SOLID) as bstyle: <NEW_LINE> <INDENT> brush = wx.Brush(bkgrd, bstyle) <NEW_LINE> <DEDENT> <DEDENT> return brush
|
Get the brush for drawing the background of the button
@return: wx.Brush
@note: used internally when on gtk
|
625941bc596a8972360899a1
|
def pc_nproduced_var(self): <NEW_LINE> <INDENT> return _blocks_swig5.threshold_ff_sptr_pc_nproduced_var(self)
|
pc_nproduced_var(threshold_ff_sptr self) -> float
|
625941bc30bbd722463cbc9b
|
def mutate(self, record): <NEW_LINE> <INDENT> mutator = self._generator.choice( a=self._mutators, p=self._weights, ) <NEW_LINE> return mutator.mutate(record)
|
Return a mutant of `record`.
Parameters
----------
record : :class:`.MoleculeRecord`
The molecule to be mutated.
Returns
-------
:class:`.MutationRecord`
A record of the mutation. The exact subclass of
:class:`.MutationRecord` depends on which mutator was
used.
None : :class:`NoneType`
If `record` cannot be mutated.
|
625941bc73bcbd0ca4b2bf55
|
def calcSaleProfit(self): <NEW_LINE> <INDENT> if self.saleDate != None: <NEW_LINE> <INDENT> result = (self.salePrice - self.purchasePrice) * self.volume <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = None <NEW_LINE> <DEDENT> return result
|
Calculates the sale profit based on the sale value minus the acquistion value of the asset
Return :
- (float) profit of the sale of the asset. If the asset has not been sold the result is -1
|
625941bc60cbc95b062c6421
|
def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.validated_data['user'] = self.request.user <NEW_LINE> try: <NEW_LINE> <INDENT> source = Source.objects.get( id=serializer.validated_data['source_id'], organisation=self.request.organisation) <NEW_LINE> <DEDENT> except Source.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> serializer.validated_data['user'] = self.request.user <NEW_LINE> serializer.validated_data['source'] = source <NEW_LINE> try: <NEW_LINE> <INDENT> return super().perform_create(serializer) <NEW_LINE> <DEDENT> except IntegrityError: <NEW_LINE> <INDENT> return None
|
Set current user.
|
625941bc32920d7e50b280a5
|
def combine_output(out, sep=''): <NEW_LINE> <INDENT> output = sep.join((decode(out[0]), decode(out[1]))) <NEW_LINE> return ANSI_COLOR_RE.sub('', output)
|
Return stdout and/or stderr combined into a string, stripped of ANSI colors.
|
625941bc4e696a04525c9324
|
def importx(self): <NEW_LINE> <INDENT> filenames = tkFileDialog.askopenfilenames() <NEW_LINE> self.filepath = filenames.lstrip('{').rstrip('}').split('} {')
|
Get input files route
|
625941bc099cdd3c635f0b34
|
def load_obj(objname): <NEW_LINE> <INDENT> if not os.path.exists('tmp/%s.pickle' % objname): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> obj = None <NEW_LINE> with open('tmp/%s.pickle' % objname, 'rb') as f: <NEW_LINE> <INDENT> obj = pickle.load(f) <NEW_LINE> <DEDENT> return obj
|
Load python object from pickle file.
Args:
objname: name of the object
Returns:
Loaded object, None if no such object.
|
625941bcf7d966606f6a9ed9
|
def changelist_view(self,request): <NEW_LINE> <INDENT> param_dict=QueryDict(mutable=True) <NEW_LINE> if request.GET: <NEW_LINE> <INDENT> param_dict['_changlistfilter']=request.GET.urlencode() <NEW_LINE> <DEDENT> base_add_url = reverse("{2}:{0}_{1}_add".format(self.app_label, self.model_name, self.site.namespace)) <NEW_LINE> add_url="{0}?{1}".format(base_add_url,param_dict.urlencode()) <NEW_LINE> self.request=request <NEW_LINE> condition={} <NEW_LINE> from extraapp.utils.my_page import PageInfo <NEW_LINE> all_count=self.model_class.objects.filter(**condition).count() <NEW_LINE> base_page_url=self.changelist_param_url() <NEW_LINE> page_param_dict=copy.deepcopy(request.GET) <NEW_LINE> page_param_dict._mutable=True <NEW_LINE> page_param_dict["page"]=1 <NEW_LINE> page_obj=PageInfo(request.GET.get("page"),all_count,base_page_url,page_param_dict) <NEW_LINE> result_list=self.model_class.objects.filter(**condition)[page_obj.start:page_obj.end] <NEW_LINE> action_list=[] <NEW_LINE> for item in self.action_list: <NEW_LINE> <INDENT> tpl={'name':item.__name__,'text':item.text} <NEW_LINE> action_list.append(tpl) <NEW_LINE> <DEDENT> if request.method=="POST": <NEW_LINE> <INDENT> func_name_str=request.POST.get('action') <NEW_LINE> ret=getattr(self,func_name_str)(request) <NEW_LINE> action_page_url=self.changelist_param_url() <NEW_LINE> if ret: <NEW_LINE> <INDENT> action_page_url=self.changelist_param_url(request.GET) <NEW_LINE> <DEDENT> return redirect(action_page_url) <NEW_LINE> <DEDENT> from extraapp.utils.filter_code import FilterList <NEW_LINE> filter_list=[] <NEW_LINE> for option in self.filter_list: <NEW_LINE> <INDENT> if option.is_func: <NEW_LINE> <INDENT> data_list=option.field_or_func(self,option,request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from django.db.models import ForeignKey,ManyToManyField <NEW_LINE> field=self.model_class._meta.get_field(option.field_or_func) <NEW_LINE> if isinstance(field,ForeignKey): <NEW_LINE> <INDENT> data_list=FilterList(option,field.rel.model.objects.all(),request) <NEW_LINE> <DEDENT> elif isinstance(field,ManyToManyField): <NEW_LINE> <INDENT> data_list = FilterList(option,field.rel.model.objects.all(), request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data_list = FilterList(option,field.model.objects.all(), request) <NEW_LINE> <DEDENT> <DEDENT> filter_list.append(data_list) <NEW_LINE> <DEDENT> context={ 'result_list':result_list, 'list_display':self.list_display, 'BaseExtraAdmin_obj':self, 'add_url':add_url, 'page_str':page_obj.pager(), 'action_list':action_list, 'filter_list':filter_list } <NEW_LINE> return render(request, 'exapp/change_list.html', context )
|
查看列表
:param requset:
:return:
|
625941bcd268445f265b4d46
|
def create(self): <NEW_LINE> <INDENT> adscan.fs.makedirs(self.dirname) <NEW_LINE> for i in xrange(0, self.browser_space_size): <NEW_LINE> <INDENT> dirname = '%s/%d' % (self.dirname, i) <NEW_LINE> adscan.fs.makedirs(dirname)
|
Create the browser workspace.
|
625941bc462c4b4f79d1d5a8
|
def isWordGuessed(secretWord, lettersGuessed): <NEW_LINE> <INDENT> if len(set(secretWord).intersection(set(lettersGuessed))) == len(set(secretWord)): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
|
secretWord: string, the random word the user is trying to guess. This is selected on line 9.
lettersGuessed: list of letters that have been guessed so far.
returns: boolean, True only if all the letters of secretWord are in lettersGuessed;
False otherwise
|
625941bca219f33f34628847
|
def search_from_start_to_end(self): <NEW_LINE> <INDENT> epoch = 0 <NEW_LINE> points = [self.start_url] <NEW_LINE> results = [] <NEW_LINE> [self.point_list.append(i) for i in points] <NEW_LINE> while self.end_url not in results: <NEW_LINE> <INDENT> print("Epoh", epoch) <NEW_LINE> loop = asyncio.new_event_loop() <NEW_LINE> results = loop.run_until_complete(self.next_epoh(range(self.epohs[epoch],len(self.point_list)))) <NEW_LINE> for url, urls in results: <NEW_LINE> <INDENT> self.graph[url] = [] <NEW_LINE> urls = set(urls) <NEW_LINE> for child_url in urls: <NEW_LINE> <INDENT> if not 'https://' in child_url: <NEW_LINE> <INDENT> child_url = ''.join(('https://en.wikipedia.org',child_url)) <NEW_LINE> <DEDENT> if child_url not in self.point_list: <NEW_LINE> <INDENT> self.point_list.append(child_url) <NEW_LINE> self.graph[url].append(len(self.point_list) - 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.graph[url].append(self.point_list.index(child_url)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> epoch += 1 <NEW_LINE> self.epohs[epoch] = len(points) <NEW_LINE> loop.close() <NEW_LINE> <DEDENT> visited = set() <NEW_LINE> self.deep_search(visited, self.graph, self.start_url) <NEW_LINE> print(self.answer)
|
Function start epoch from first url and search via all directions
1 processed all available and not visited urls its one epoch
|
625941bc4527f215b584c332
|
def _get_all_items_annotated(self): <NEW_LINE> <INDENT> items = [] <NEW_LINE> def collect(x, parent_collapsed): <NEW_LINE> <INDENT> visible = x.visible and not parent_collapsed <NEW_LINE> items.append((visible, x)) <NEW_LINE> for i in x.children: <NEW_LINE> <INDENT> if i: <NEW_LINE> <INDENT> collect(i, parent_collapsed or x.collapsed) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for x in self.children: <NEW_LINE> <INDENT> collect(x, False) <NEW_LINE> <DEDENT> return items
|
Get a flat list of all TreeItem instances in this Tree,
including visibility information due to collapsed parents.
|
625941bc56ac1b37e62640ad
|
@app.route('/profile/<userid>') <NEW_LINE> def profile_userid(userid): <NEW_LINE> <INDENT> user = FormData.query.filter_by(userid=userid).first() <NEW_LINE> return render_template('profile_user.html', user=user)
|
Render the website's profile/<userid> page.
|
625941bc85dfad0860c3ad31
|
def kresli(neuspesny_pokus): <NEW_LINE> <INDENT> if neuspesny_pokus == 0: <NEW_LINE> <INDENT> with open('pokus0.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 1: <NEW_LINE> <INDENT> with open('pokus1.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 2: <NEW_LINE> <INDENT> with open('pokus2.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 3: <NEW_LINE> <INDENT> with open('pokus3.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 4: <NEW_LINE> <INDENT> with open('pokus4.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 5: <NEW_LINE> <INDENT> with open('pokus5.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 6: <NEW_LINE> <INDENT> with open('pokus6.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 7: <NEW_LINE> <INDENT> with open('pokus7.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 8: <NEW_LINE> <INDENT> with open('pokus8.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah <NEW_LINE> <DEDENT> <DEDENT> elif neuspesny_pokus == 9: <NEW_LINE> <INDENT> with open('pokus9.txt', encoding='utf-8') as subor: <NEW_LINE> <INDENT> obsah = subor.read() <NEW_LINE> return obsah
|
Kresli sibenicu
|
625941bccc0a2c11143dcd68
|
def convert_item(self, item): <NEW_LINE> <INDENT> command, label, type, container = self.read_item(item) <NEW_LINE> if container: <NEW_LINE> <INDENT> command, label, type, container = self.read_item(item) <NEW_LINE> c = Container(label, "") <NEW_LINE> children = c.children <NEW_LINE> for i in range(container.getCount()): <NEW_LINE> <INDENT> _child = self.convert_item(container.getByIndex(i)) <NEW_LINE> if _child: <NEW_LINE> <INDENT> children.append(_child) <NEW_LINE> <DEDENT> <DEDENT> return c <NEW_LINE> <DEDENT> elif type == 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return Item(label, "", self.convert_command(command)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return Separator()
|
Convert entry to bookmarks item in new format.
|
625941bca8ecb033257d2fae
|
def meas_profile_scatterfig(df, sitename, var, ylabel, ylimit=[-155,0]): <NEW_LINE> <INDENT> measdict = dtool.measurement_h_v_dict(df.columns, var) <NEW_LINE> nplots = len(measdict.keys()) <NEW_LINE> fig, ax = plt.subplots(1, nplots, figsize=(7, 5), sharey=True) <NEW_LINE> if nplots==1: ax = [ax] <NEW_LINE> fig.canvas.set_window_title(sitename + ' ' + var + ' profile') <NEW_LINE> for i, pnum in enumerate(sorted(measdict.keys())): <NEW_LINE> <INDENT> for d in measdict[pnum]: <NEW_LINE> <INDENT> colname = pnum + '_' + d + '_Avg' <NEW_LINE> ax[i].plot(df[colname],np.tile(-int(d), [len(df), 1]), marker='o', ls='None', label=str(d)+'cm' ) <NEW_LINE> <DEDENT> ax[i].set_title('Profile ' + pnum) <NEW_LINE> ax[i].set_ylim(ylimit) <NEW_LINE> ax[i].set_xlabel(ylabel) <NEW_LINE> if i==0: <NEW_LINE> <INDENT> ax[i].set_ylabel('Depth (cm)') <NEW_LINE> <DEDENT> <DEDENT> return fig
|
Make a scatterplot for sensors in a measurement profile
|
625941bc23849d37ff7b2f69
|
def _GetCodegenFromFlags(args): <NEW_LINE> <INDENT> discovery_doc = _GetDiscoveryDocFromFlags(args) <NEW_LINE> names = util.Names( args.strip_prefix, args.experimental_name_convention, args.experimental_capitalize_enums) <NEW_LINE> if args.client_json: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(args.client_json) as client_json: <NEW_LINE> <INDENT> f = json.loads(client_json.read()) <NEW_LINE> web = f.get('installed', f.get('web', {})) <NEW_LINE> client_id = web.get('client_id') <NEW_LINE> client_secret = web.get('client_secret') <NEW_LINE> <DEDENT> <DEDENT> except IOError: <NEW_LINE> <INDENT> raise exceptions.NotFoundError( 'Failed to open client json file: %s' % args.client_json) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> client_id = args.client_id <NEW_LINE> client_secret = args.client_secret <NEW_LINE> <DEDENT> if not client_id: <NEW_LINE> <INDENT> logging.warning('No client ID supplied') <NEW_LINE> client_id = '' <NEW_LINE> <DEDENT> if not client_secret: <NEW_LINE> <INDENT> logging.warning('No client secret supplied') <NEW_LINE> client_secret = '' <NEW_LINE> <DEDENT> client_info = util.ClientInfo.Create( discovery_doc, args.scope, client_id, client_secret, args.user_agent, names, args.api_key) <NEW_LINE> outdir = os.path.expanduser(args.outdir) or client_info.default_directory <NEW_LINE> if os.path.exists(outdir) and not args.overwrite: <NEW_LINE> <INDENT> raise exceptions.ConfigurationValueError( 'Output directory exists, pass --overwrite to replace ' 'the existing files.') <NEW_LINE> <DEDENT> if not os.path.exists(outdir): <NEW_LINE> <INDENT> os.makedirs(outdir) <NEW_LINE> <DEDENT> return gen_client_lib.DescriptorGenerator( discovery_doc, client_info, names, args.root_package, outdir, base_package=args.base_package, protorpc_package=args.protorpc_package, init_wildcards_file=(args.init_file == 'wildcards'), use_proto2=args.experimental_proto2_output, unelidable_request_methods=args.unelidable_request_methods, apitools_version=args.apitools_version)
|
Create a codegen object from flags.
|
625941bc5fdd1c0f98dc010a
|
def calc_grav(self): <NEW_LINE> <INDENT> self.change_y += self.y_velocity_multiplier <NEW_LINE> if self.rect.y >= SCREEN_HEIGHT - self.rect.height and self.change_y >= 0: <NEW_LINE> <INDENT> self.change_y = 0 <NEW_LINE> self.change_x = 0 <NEW_LINE> self.rect.y = SCREEN_HEIGHT - self.rect.height <NEW_LINE> self.on_ground = True
|
Calculate effect of gravity.
|
625941bc3cc13d1c6d3c7255
|
def test_code_formatting(): <NEW_LINE> <INDENT> if flake8.__version__ <= "2": <NEW_LINE> <INDENT> msg = ("Module was designed to be tested with flake8 >= 2.0. " "Please update.") <NEW_LINE> warnings.warn(msg) <NEW_LINE> <DEDENT> test_dir = os.path.dirname(os.path.abspath(inspect.getfile( inspect.currentframe()))) <NEW_LINE> root_dir = os.path.dirname(os.path.dirname(test_dir)) <NEW_LINE> if not os.path.exists(os.path.join(root_dir, "setup.py")): <NEW_LINE> <INDENT> msg = "Could not find project root." <NEW_LINE> raise Exception(msg) <NEW_LINE> <DEDENT> files = [] <NEW_LINE> for dirpath, _, filenames in os.walk(root_dir): <NEW_LINE> <INDENT> filenames = [_i for _i in filenames if os.path.splitext(_i)[-1] == os.path.extsep + "py"] <NEW_LINE> if not filenames: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for py_file in filenames: <NEW_LINE> <INDENT> full_path = os.path.join(dirpath, py_file) <NEW_LINE> files.append(full_path) <NEW_LINE> <DEDENT> <DEDENT> flake8_style = flake8.engine.get_style_guide( parse_argv=False, config_file=flake8.main.DEFAULT_CONFIG) <NEW_LINE> report = flake8_style.check_files(files) <NEW_LINE> assert report.get_count() == 0
|
Tests the formatting and other things with flake8.
|
625941bc97e22403b379ce71
|
def newcomments(self): <NEW_LINE> <INDENT> return self.make_request('newcomments')
|
Retrieve Newly Submitted Comments
|
625941bcbaa26c4b54cb0ffb
|
def __centerOnScreen (self): <NEW_LINE> <INDENT> resolution = QtGui.QDesktopWidget().screenGeometry() <NEW_LINE> self.move((resolution.width() / 2) - (self.frameSize().width() / 2), (resolution.height() / 2) - (self.frameSize().height() / 2))
|
Centers the window on the screen.
|
625941bcab23a570cc250058
|
def main(): <NEW_LINE> <INDENT> import sys <NEW_LINE> n = int(sys.argv[1]) <NEW_LINE> ans = is_prime(n) <NEW_LINE> if ans: <NEW_LINE> <INDENT> print("{0} is Prime Number".format(n)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("{0} is Composite Number".format(n))
|
Main function
|
625941bc5f7d997b87174973
|
def is_in_dict(phrases, word_freq): <NEW_LINE> <INDENT> return set(phrase for phrase in phrases if phrase in word_freq)
|
Judge if the phrase in phrases list are noticed in the given dictionary word_freq
|
625941bc91f36d47f21ac3c8
|
def to_array(x, y, z, resize_hist=True, **kwargs): <NEW_LINE> <INDENT> binstat_pars = inspect.signature(binned_statistic_2d).parameters.values() <NEW_LINE> binstat_adict = {par.name:par.default for par in binstat_pars} <NEW_LINE> binstat_adict.update({ 'x': x, 'y': y, 'values': z, 'range': [[0, xsize], [0, ysize]] }) <NEW_LINE> binstat_adict.update({k:v for k, v in kwargs.items() if k in binstat_adict.keys()}) <NEW_LINE> binned = binned_statistic_2d(**binstat_adict) <NEW_LINE> hist = binned.statistic.T <NEW_LINE> if resize_hist: <NEW_LINE> <INDENT> resize_pars = inspect.signature(resize).parameters.values() <NEW_LINE> resize_adict = {par.name:par.default for par in resize_pars} <NEW_LINE> resize_adict.update({'image': hist, 'output_shape': (ysize, xsize)}) <NEW_LINE> resize_adict.update({k:v for k, v in kwargs.items() if k in resize_adict.keys()}) <NEW_LINE> return resize(**resize_adict) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return hist
|
given a set of points, create a 2d array of desired dimensions
by first binning it and the resizing.
|
625941bc21a7993f00bc7bc3
|
def compile(self, seed=42): <NEW_LINE> <INDENT> ops.reset_default_graph() <NEW_LINE> self._log_params() <NEW_LINE> with tf.device("/gpu:0"): <NEW_LINE> <INDENT> logger.info("Building graph...") <NEW_LINE> tf.set_random_seed(seed) <NEW_LINE> self.global_step = tf.get_variable(name="global_step", shape=[], dtype='int32', initializer=tf.constant_initializer(0), trainable=False) <NEW_LINE> self._create_placeholders() <NEW_LINE> self._setup_graph_def() <NEW_LINE> self._add_scalar_summary(self.loss) <NEW_LINE> if self.eval_metric is not None: <NEW_LINE> <INDENT> self._add_scalar_summary(self.eval_metric) <NEW_LINE> <DEDENT> <DEDENT> self._is_graph_build = True
|
Build the graph, ostensibly by setting up the placeholders and then
creating/compiling the forward pass.
Note: Clear the previous tensorflow graph definitions
:param seed: int, optional (default=0)
The graph-level seed to use when building the graph.
|
625941bcadb09d7d5db6c66a
|
def noOfNodesUsed (self, noOfNodesMax) : <NEW_LINE> <INDENT> if not self.isMalleable or self.scaleCurveFlag == 0 : <NEW_LINE> <INDENT> return noOfNodesMax <NEW_LINE> <DEDENT> elif self.scaleCurveFlag == 1 : <NEW_LINE> <INDENT> noOfNodesOptimal = noOfNodesMax <NEW_LINE> while noOfNodesOptimal >= 2 and self.scaleCurveArray [noOfNodesOptimal - 2] >= self.scaleCurveArray [noOfNodesOptimal - 1] : <NEW_LINE> <INDENT> noOfNodesOptimal -= 1 <NEW_LINE> <DEDENT> return noOfNodesOptimal
|
Function that returns the no. of nodes used if N nodes are available.
No. of nodes depend on the scheduling algorithm.
If application is not malleable, it will return noOfNodesMax.
|
625941bcd10714528d5ffbb9
|
def list_nodes_full(call=None): <NEW_LINE> <INDENT> if call == "action": <NEW_LINE> <INDENT> raise SaltCloudSystemExit( "list_nodes_full must be called with -f or --function" ) <NEW_LINE> <DEDENT> items = query(method="servers") <NEW_LINE> ret = {} <NEW_LINE> for node in items["servers"]: <NEW_LINE> <INDENT> ret[node["name"]] = {} <NEW_LINE> for item in node: <NEW_LINE> <INDENT> value = node[item] <NEW_LINE> ret[node["name"]][item] = value <NEW_LINE> <DEDENT> <DEDENT> return ret
|
Return a list of the BareMetal servers that are on the provider.
|
625941bc66673b3332b91f69
|
@receiver(post_migrate, sender=Gymnasium) <NEW_LINE> def create_new_group(sender, **kwargs): <NEW_LINE> <INDENT> groups_permissions = { "gymnasium_staff": [ 'Can add gymnasium', 'Can change gymnasium', 'Can delete gymnasium', ] } <NEW_LINE> for key, value in groups_permissions.items(): <NEW_LINE> <INDENT> group, created = Group.objects.get_or_create(name=key) <NEW_LINE> if created: <NEW_LINE> <INDENT> for perm_name in value: <NEW_LINE> <INDENT> perm = Permission.objects.get(name='Can add gymn') <NEW_LINE> group.permissions.add(perm)
|
Create new group to handle the gymnasium.
|
625941bc23e79379d52ee43f
|
def _newline_after(func): <NEW_LINE> <INDENT> @wraps(func) <NEW_LINE> def decorated(self, *args, **kwargs): <NEW_LINE> <INDENT> func(self, *args, **kwargs) <NEW_LINE> self._newline() <NEW_LINE> <DEDENT> return decorated
|
Decorator to append a newline after the function
|
625941bc3c8af77a43ae3676
|
def test_custom_reserved_names(self): <NEW_LINE> <INDENT> custom_reserved_names = ['foo', 'bar', 'eggs', 'spam'] <NEW_LINE> class CustomReservedNamesForm(forms.RegistrationForm): <NEW_LINE> <INDENT> reserved_names = custom_reserved_names <NEW_LINE> <DEDENT> for reserved_name in custom_reserved_names: <NEW_LINE> <INDENT> data = self.valid_data.copy() <NEW_LINE> data[self.user_model.USERNAME_FIELD] = reserved_name <NEW_LINE> form = CustomReservedNamesForm(data=data) <NEW_LINE> self.assertFalse(form.is_valid()) <NEW_LINE> self.assertTrue( text_type(validators.RESERVED_NAME) in form.errors[self.user_model.USERNAME_FIELD] )
|
Reserved names can be overridden by an attribute.
|
625941bc9b70327d1c4e0cac
|
def _check(self): <NEW_LINE> <INDENT> if not os.path.isdir(self.deps_directory): <NEW_LINE> <INDENT> log.debug('Initializing the vmcloak-deps repository.') <NEW_LINE> self.init() <NEW_LINE> return False <NEW_LINE> <DEDENT> return True
|
Checks whether the dependency repository has been initialized.
|
625941bcd486a94d0b98e01e
|
def select2_meta_factory(model, meta_fields=None, widgets=None, attrs=None): <NEW_LINE> <INDENT> widgets = widgets or {} <NEW_LINE> meta_fields = meta_fields or {} <NEW_LINE> for field in model._meta.fields: <NEW_LINE> <INDENT> if isinstance(field, ForeignKey) or field.choices: <NEW_LINE> <INDENT> widgets.update({field.name: Select2(select2attrs=attrs)}) <NEW_LINE> <DEDENT> <DEDENT> for field in model._meta.many_to_many: <NEW_LINE> <INDENT> widgets.update({field.name: Select2Multiple(select2attrs=attrs)}) <NEW_LINE> <DEDENT> meta_fields.update({'model': model, 'widgets': widgets}) <NEW_LINE> meta = type('Meta', (object,), meta_fields) <NEW_LINE> return meta
|
Returns `Meta` class with Select2-enabled widgets for fields
with choices (e.g. ForeignKey, CharField, etc) for use with
ModelForm.
Attrs argument is select2 widget attributes (width, for example).
|
625941bcd18da76e235323ac
|
def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.id and not self.added_on: <NEW_LINE> <INDENT> self.added_on = datetime.datetime.today() <NEW_LINE> <DEDENT> return super(Movie, self).save(*args, **kwargs)
|
Update timestamps.
|
625941bc6e29344779a624ed
|
def integrate_RK4(self): <NEW_LINE> <INDENT> func=self.dR <NEW_LINE> t=self.t; t.append(self.t_start) <NEW_LINE> w=self.R; w.append(array([self.R_start, self.dR_start])) <NEW_LINE> self.Pg.append(self.calculate_Pgas(self.R_start)) <NEW_LINE> while t[-1]<self.t_start+self.t_run: <NEW_LINE> <INDENT> if w[-1][0]<0.6*self.R0: <NEW_LINE> <INDENT> if w[-1][0]<0.06*self.R0: <NEW_LINE> <INDENT> t.append(t[-1]+0.02*self.dt_fine) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t.append(t[-1]+self.dt_fine) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> t.append(t[-1]+self.dt_coarse) <NEW_LINE> <DEDENT> h=t[-1]-t[-2] <NEW_LINE> k1=func(w[-1],t[-2]) <NEW_LINE> k2=func(w[-1]+0.5*h*k1,t[-2]+0.5*h) <NEW_LINE> k3=func(w[-1]+0.5*h*k2,t[-2]+0.5*h) <NEW_LINE> k4=func(w[-1]+h*k3,t[-1]) <NEW_LINE> w.append(w[-1]+(h*k1+2*h*k2+2*h*k3+h*k4)/6.) <NEW_LINE> self.Pg.append(self.calculate_Pgas(w[-1][0]))
|
fourth order Runge-Kutta scheme for time-integration
|
625941bccc0a2c11143dcd69
|
def get_restaurants(self, tab): <NEW_LINE> <INDENT> data = self.http_get(RestUrl.restaurants(tab)) <NEW_LINE> return get_restaurants(tab, data)
|
:type tab: meican.models.Tab
:rtype: list[meican.models.Restaurant]
|
625941bcb5575c28eb68ded7
|
def get_credentials(self): <NEW_LINE> <INDENT> credential_path = 'annette/data/gmail-credentials.json' <NEW_LINE> store = Storage(credential_path) <NEW_LINE> credentials = store.get() <NEW_LINE> flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args() <NEW_LINE> if not credentials or credentials.invalid: <NEW_LINE> <INDENT> flow = client.flow_from_clientsecrets(self.client_secret_file, self.scopes) <NEW_LINE> flow.user_agent = self.application_name <NEW_LINE> credentials = tools.run_flow(flow, store, flags) <NEW_LINE> _utils.logger.debug('Storing credentials to ' + credential_path) <NEW_LINE> <DEDENT> http = credentials.authorize(httplib2.Http()) <NEW_LINE> service = discovery.build('gmail', 'v1', http=http, cache_discovery=False) <NEW_LINE> return service
|
Gets user credentials from storage.
If credentials not found or invalid, the OAuth2 flow is completed to obtain the new
credentials.
:return: Gmail Service object
|
625941bc63d6d428bbe443c8
|
def test_multipleOrigins(self): <NEW_LINE> <INDENT> origin = Origin() <NEW_LINE> origin.public_id = 'smi:ch.ethz.sed/origin/37465' <NEW_LINE> origin.time = UTCDateTime(0) <NEW_LINE> origin.latitude = 12 <NEW_LINE> origin.latitude_errors.confidence_level = 95 <NEW_LINE> origin.longitude = 42 <NEW_LINE> origin.depth_type = 'from location' <NEW_LINE> self.assertEquals(origin.latitude, 12) <NEW_LINE> self.assertEquals(origin.latitude_errors.confidence_level, 95) <NEW_LINE> self.assertEquals(origin.latitude_errors.uncertainty, None) <NEW_LINE> self.assertEquals(origin.longitude, 42) <NEW_LINE> origin2 = Origin() <NEW_LINE> origin2.latitude = 13.4 <NEW_LINE> self.assertEquals(origin2.depth_type, None) <NEW_LINE> self.assertEquals(origin2.resource_id, None) <NEW_LINE> self.assertEquals(origin2.latitude, 13.4) <NEW_LINE> self.assertEquals(origin2.latitude_errors.confidence_level, None) <NEW_LINE> self.assertEquals(origin2.longitude, None)
|
Parameters of multiple origins should not interfere with each other.
|
625941bc1f5feb6acb0c4a2d
|
def test_reading_alignments_cath3(self): <NEW_LINE> <INDENT> path = "Stockholm/cath3.sth" <NEW_LINE> alignments = stockholm.AlignmentIterator(path) <NEW_LINE> alignment = next(alignments) <NEW_LINE> self.assertRaises(StopIteration, next, alignments) <NEW_LINE> self.check_alignment_cath3(alignment) <NEW_LINE> stream = StringIO() <NEW_LINE> writer = stockholm.AlignmentWriter(stream) <NEW_LINE> alignments = [alignment] <NEW_LINE> n = writer.write_file(alignments, mincount=1, maxcount=1) <NEW_LINE> self.assertEqual(n, 1) <NEW_LINE> stream.seek(0) <NEW_LINE> alignments = stockholm.AlignmentIterator(stream) <NEW_LINE> alignment = next(alignments) <NEW_LINE> stream.close() <NEW_LINE> self.check_alignment_cath3(alignment)
|
Test parsing CATH record 1.10.275.10/FF/000026.
|
625941bca05bb46b383ec6fd
|
def search(self, subject=None, predicate=None, object=None, gid=None, **kwargs): <NEW_LINE> <INDENT> q = super(TripleManager, self).get_query_set() <NEW_LINE> if subject: <NEW_LINE> <INDENT> if isinstance(subject, models.Model): <NEW_LINE> <INDENT> q = q.filter(_subject_id=subject.id, _subject_type=ContentType.objects.get_for_model(type(subject))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = q.filter(_subject_text=str(subject)) <NEW_LINE> <DEDENT> <DEDENT> if predicate: <NEW_LINE> <INDENT> if isinstance(predicate, models.Model): <NEW_LINE> <INDENT> q = q.filter(_predicate_id=predicate.id, _predicate_type=ContentType.objects.get_for_model(type(predicate))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = q.filter(_predicate_text=str(predicate)) <NEW_LINE> <DEDENT> <DEDENT> if object: <NEW_LINE> <INDENT> if isinstance(object, models.Model): <NEW_LINE> <INDENT> q = q.filter(_object_id=object.id, _object_type=ContentType.objects.get_for_model(type(object))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> q = q.filter(_object_text=str(object)) <NEW_LINE> <DEDENT> <DEDENT> if gid: <NEW_LINE> <INDENT> if not isinstance(gid, GraphId): <NEW_LINE> <INDENT> q = q.filter(gid__value=str(gid)) <NEW_LINE> <DEDENT> elif isinstance(gid, GraphId): <NEW_LINE> <INDENT> q = q.filter(gid=gid) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert Exception, "Unknown graph ID value: %s" % gid <NEW_LINE> <DEDENT> <DEDENT> return q
|
Queries fact objects matching the given criteria.
|
625941bc91af0d3eaac9b8ee
|
def testCombined(self): <NEW_LINE> <INDENT> for phase in (constants.HOOKS_PHASE_PRE, constants.HOOKS_PHASE_POST): <NEW_LINE> <INDENT> expect = [] <NEW_LINE> for fbase, ecode, rs in [("00succ", 0, HKR_SUCCESS), ("10fail", 1, HKR_FAIL), ("20inv.", 0, HKR_SKIP), ]: <NEW_LINE> <INDENT> fname = "%s/%s" % (self.ph_dirs[phase], fbase) <NEW_LINE> f = open(fname, "w") <NEW_LINE> f.write("#!/bin/sh\nexit %d\n" % ecode) <NEW_LINE> f.close() <NEW_LINE> self.torm.append((fname, False)) <NEW_LINE> os.chmod(fname, 0o700) <NEW_LINE> expect.append((self._rname(fname), rs, "")) <NEW_LINE> <DEDENT> self.assertEqual(self.hr.RunHooks(self.hpath, phase, {}), expect)
|
Test success, failure and skip all in one test
|
625941bc442bda511e8be2f6
|
def __init__(self, engine_name, cwd=None): <NEW_LINE> <INDENT> super(Engine, self).__init__(engine_name) <NEW_LINE> self._engine_name = engine_name <NEW_LINE> self._cwd = cwd
|
Returns an ENVI Py Engine object based on the engine_name.
:param engine_name: A String specifying the name of the requested engine.
:return: None
|
625941bccad5886f8bd26ebb
|
def _get_query_string(self): <NEW_LINE> <INDENT> query_args = {'Query':self.search_terms, '$top':self.top, '$skip':self.skip, '$format':self.frmt, 'Adult':self.adult, 'Market':self.market} <NEW_LINE> return '?'+urllib.urlencode(query_args)
|
Return URL safe query string
|
625941bca79ad161976cc01e
|
def run(self, exposure, referenceLines, detectorMap, pfsConfig, fiberTraces=None) -> Struct: <NEW_LINE> <INDENT> if self.config.doSubtractContinuum: <NEW_LINE> <INDENT> if fiberTraces is None: <NEW_LINE> <INDENT> raise RuntimeError("No fiberTraces provided for continuum subtraction") <NEW_LINE> <DEDENT> with self.continuum.subtractionContext(exposure.maskedImage, fiberTraces, detectorMap, referenceLines): <NEW_LINE> <INDENT> phot = self.photometerLines(exposure, referenceLines, detectorMap, pfsConfig, fiberTraces) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> phot = self.photometerLines(exposure, referenceLines, detectorMap, pfsConfig, fiberTraces) <NEW_LINE> <DEDENT> if fiberTraces is not None: <NEW_LINE> <INDENT> self.correctFluxNormalizations(phot.lines, fiberTraces) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.log.warn("Not normalizing measured line fluxes") <NEW_LINE> <DEDENT> self.log.info("Photometered %d lines", len(phot.lines)) <NEW_LINE> return phot
|
Photometer lines on an arc
We perform a simultaneous fit of PSFs to each of the lines.
This method optionally performs continuum subtraction before handing
off to the ``photometerLines`` method to do the actual photometry.
Parameters
----------
exposure : `lsst.afw.image.Exposure`
Arc exposure on which to centroid lines.
referenceLines : `pfs.drp.stella.ReferenceLineSet`
List of reference lines.
detectorMap : `pfs.drp.stella.DetectorMap`
Mapping between fiberId,wavelength and x,y.
pfsConfig : `pfs.datamodel.PfsConfig`, optional
Top-end configuration, for specifying good fibers.
fiberTraces : `pfs.drp.stella.FiberTraceSet`, optional
Position and profile of fiber traces. Required for continuum
subtraction and/or flux normalisation.
Returns
-------
lines : `pfs.drp.stella.ArcLineSet`
Centroided lines.
apCorr : `pfs.drp.stella.FocalPlaneFunction`
Aperture correction.
|
625941bc45492302aab5e199
|
def write_stderr(s): <NEW_LINE> <INDENT> sys.stderr.write(s) <NEW_LINE> sys.stderr.flush()
|
Write and flush immediately to stderr, as per Supervisor requirement.
|
625941bcbe8e80087fb20b20
|
def initcurvature(img): <NEW_LINE> <INDENT> global runningcur <NEW_LINE> global smoothcurvature <NEW_LINE> warped = roadPerspectiveTransFormation(pfile_cb, img, hood_pixels=0) <NEW_LINE> lrlanes = lane_detection_pipeline(warped, pfile_cb, kernels = 5,hood_pixels=0) <NEW_LINE> lrlanes = remove_noise(lrlanes, threshold = 0.08) <NEW_LINE> lxlane, lylane, rxlane, rylane = detect_lanes(lrlanes, slabs) <NEW_LINE> lfitx, lfity = fitlane(lrlanes, lylane, lxlane, poly, num_pts) <NEW_LINE> rfitx, rfity = fitlane(lrlanes, rylane, rxlane, poly, num_pts) <NEW_LINE> lcurvature = lanecurvature1(lrlanes, lfity, lfitx, poly) <NEW_LINE> rcurvature = lanecurvature1(lrlanes, rfity, rfitx, poly) <NEW_LINE> avgcurvature = (lcurvature+rcurvature)/2.0 <NEW_LINE> runningcur = np.ones(window_width)*avgcurvature <NEW_LINE> smoothcurvature = np.ones(2)*avgcurvature <NEW_LINE> vposition = ( img.shape[1]/2 - (lfitx[-1]+rfitx[-1])/2)*xm_per_pix <NEW_LINE> return runningcur, smoothcurvature
|
Determine curvature in video frame at time t = 0
:param img: image at time t = 0 in video stream
:return:
List of running average values
|
625941bc287bf620b61d3946
|
def test_orbeon_search_read_with_unknown_ERP_fieds(self): <NEW_LINE> <INDENT> domain = [('id', '=', self.runner_form_c_erp_fields_v1.id)] <NEW_LINE> rec = self.runner_model.orbeon_search_read_data(domain, ['xml']) <NEW_LINE> root = self.assertXmlDocument(rec['xml']) <NEW_LINE> unknown_erp_field = runner_xml_parser.xml_parser_erp_fields.UNKNOWN_ERP_FIELD <NEW_LINE> self.assertXpathsOnlyOne(root, ['//ERP.unknown_field']) <NEW_LINE> self.assertXpathValues(root, './/ERP.unknown_field/text()', [(unknown_erp_field)]) <NEW_LINE> self.assertXpathsOnlyOne(root, ['//ERP.unknown_field_id.name']) <NEW_LINE> self.assertXpathValues(root, './/ERP.unknown_field_id.name/text()', [(unknown_erp_field)]) <NEW_LINE> self.assertXpathsOnlyOne(root, ['//ERP.company_id.unknown_field']) <NEW_LINE> self.assertXpathValues(root, './/ERP.company_id.unknown_field/text()', [(unknown_erp_field)]) <NEW_LINE> self.assertXpathsOnlyOne(root, ['//ERP.company_id.unknown_field_id.name']) <NEW_LINE> self.assertXpathValues(root, './/ERP.company_id.unknown_field_id.name/text()', [(unknown_erp_field)])
|
Test reading a runner form with unknown ERP-fields (model-object).
|
625941bc31939e2706e4cd47
|
def __init__(self, tag=None, value=None, last=False): <NEW_LINE> <INDENT> super(Event, self).__setattr__('_Event__event', { 'tag': tag, 'value': value, 'last': last})
|
Construct an immutable event from a tag and a value.
TODO: Could we also allow initialization from a dict?
@param tag
@param value
@param last: true if this is the last event in a signal
|
625941bc8e05c05ec3eea24b
|
def test_as_array2d(self): <NEW_LINE> <INDENT> test_arrays = self._test_arrays('BaseArrayDouble2d') <NEW_LINE> original_arrays = self._test_arrays('BaseArrayDouble2d') <NEW_LINE> for test_array, original_array in zip(test_arrays, original_arrays): <NEW_LINE> <INDENT> self.assertEqual(test_array.sum(), test.test_as_array2d(test_array)) <NEW_LINE> if hasattr(test_array, "toarray"): <NEW_LINE> <INDENT> self.compare_arrays(test_array, original_array) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.compare_arrays(test_array, np.zeros_like(original_array))
|
...Test behavior of as_array2d method
|
625941bc92d797404e304062
|
def __init__(self, mdp, discount = 0.9, iterations = 100): <NEW_LINE> <INDENT> self.mdp = mdp <NEW_LINE> self.discount = discount <NEW_LINE> self.iterations = iterations <NEW_LINE> self.values = util.Counter() <NEW_LINE> states = self.mdp.getStates() <NEW_LINE> for i in range(iterations): <NEW_LINE> <INDENT> temp = util.Counter() <NEW_LINE> for state in states: <NEW_LINE> <INDENT> best = float("-inf") <NEW_LINE> actions = mdp.getPossibleActions(state) <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> transitions = self.mdp.getTransitionStatesAndProbs(state, action) <NEW_LINE> sumTransitions = 0 <NEW_LINE> for transition in transitions: <NEW_LINE> <INDENT> reward = self.mdp.getReward(state, action, transition[0]) <NEW_LINE> sumTransitions += transition[1]*(reward + discount*self.values[transition[0]]) <NEW_LINE> <DEDENT> best = max(best, sumTransitions) <NEW_LINE> <DEDENT> if best != float("-inf"): <NEW_LINE> <INDENT> temp[state] = best <NEW_LINE> <DEDENT> <DEDENT> for state in states: <NEW_LINE> <INDENT> self.values[state] = temp[state]
|
Your value iteration agent should take an mdp on
construction, run the indicated number of iterations
and then act according to the resulting policy.
Some useful mdp methods you will use:
mdp.getStates()
mdp.getPossibleActions(state)
mdp.getTransitionStatesAndProbs(state, action)
mdp.getReward(state, action, nextState)
mdp.isTerminal(state)
|
625941bc8c0ade5d55d3e898
|
@blueprint.route('/json', methods=['POST']) <NEW_LINE> @blueprint.route('', methods=['POST'], strict_slashes=False) <NEW_LINE> @utils.auth.requires_login(redirect=False) <NEW_LINE> def create(): <NEW_LINE> <INDENT> form = GenericImageDatasetForm() <NEW_LINE> fill_form_if_cloned(form) <NEW_LINE> if not form.validate_on_submit(): <NEW_LINE> <INDENT> if request_wants_json(): <NEW_LINE> <INDENT> return flask.jsonify({'errors': form.errors}), 400 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return flask.render_template('datasets/images/generic/new.html', form=form), 400 <NEW_LINE> <DEDENT> <DEDENT> job = None <NEW_LINE> try: <NEW_LINE> <INDENT> job = GenericImageDatasetJob( username=utils.auth.get_username(), name=form.dataset_name.data, group=form.group_name.data, mean_file=form.prebuilt_mean_file.data.strip(), ) <NEW_LINE> if form.method.data == 'prebuilt': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('method not supported') <NEW_LINE> <DEDENT> force_same_shape = form.force_same_shape.data <NEW_LINE> job.tasks.append( tasks.AnalyzeDbTask( job_dir=job.dir(), database=form.prebuilt_train_images.data, purpose=form.prebuilt_train_images.label.text, force_same_shape=force_same_shape, ) ) <NEW_LINE> if form.prebuilt_train_labels.data: <NEW_LINE> <INDENT> job.tasks.append( tasks.AnalyzeDbTask( job_dir=job.dir(), database=form.prebuilt_train_labels.data, purpose=form.prebuilt_train_labels.label.text, force_same_shape=force_same_shape, ) ) <NEW_LINE> <DEDENT> if form.prebuilt_val_images.data: <NEW_LINE> <INDENT> job.tasks.append( tasks.AnalyzeDbTask( job_dir=job.dir(), database=form.prebuilt_val_images.data, purpose=form.prebuilt_val_images.label.text, force_same_shape=force_same_shape, ) ) <NEW_LINE> if form.prebuilt_val_labels.data: <NEW_LINE> <INDENT> job.tasks.append( tasks.AnalyzeDbTask( job_dir=job.dir(), database=form.prebuilt_val_labels.data, purpose=form.prebuilt_val_labels.label.text, force_same_shape=force_same_shape, ) ) <NEW_LINE> <DEDENT> <DEDENT> save_form_to_job(job, form) <NEW_LINE> scheduler.add_job(job) <NEW_LINE> if request_wants_json(): <NEW_LINE> <INDENT> return flask.jsonify(job.json_dict()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return flask.redirect(flask.url_for('digits.dataset.views.show', job_id=job.id())) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> if job: <NEW_LINE> <INDENT> scheduler.delete_job(job) <NEW_LINE> <DEDENT> raise
|
Creates a new GenericImageDatasetJob
Returns JSON when requested: {job_id,name,status} or {errors:[]}
|
625941bc30dc7b7665901843
|
def get_default_val(self): <NEW_LINE> <INDENT> val = self.default <NEW_LINE> while callable(val): <NEW_LINE> <INDENT> val = val() <NEW_LINE> <DEDENT> return val
|
Helper to expand default value (support callables)
|
625941bca05bb46b383ec6fe
|
def _get(self, object='user', path=None, params=None): <NEW_LINE> <INDENT> if params is None: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> result = self.client.get(object=object, path=path, params=params) <NEW_LINE> return result
|
GET requests for the User object.
|
625941bc31939e2706e4cd48
|
def psetex(self, key, milliseconds, value): <NEW_LINE> <INDENT> return self._execute( [b'PSETEX', key, ascii(milliseconds), value], b'OK')
|
:meth:`~tredis.RedisClient.psetex` works exactly like
:meth:`~tredis.RedisClient.psetex` with the sole difference that the
expire time is specified in milliseconds instead of seconds.
.. versionadded:: 0.2.0
.. note:: **Time complexity**: ``O(1)``
:param key: The key to set
:type key: :class:`str`, :class:`bytes`
:param int milliseconds: Number of milliseconds for TTL
:param value: The value to set
:type value: :class:`str`, :class:`bytes`
:rtype: bool
:raises: :exc:`~tredis.exceptions.RedisError`
|
625941bc2c8b7c6e89b3569c
|
def get_msg(bot): <NEW_LINE> <INDENT> response = bot.method("messages.get", {"count": 1}) <NEW_LINE> if response["items"][0] and response["items"][0]["read_state"] == 0: <NEW_LINE> <INDENT> print(response) <NEW_LINE> bot.method("messages.markAsRead", {"message_ids": response["items"][0]["id"]}) <NEW_LINE> return response
|
получает обновления сообщний
|
625941bc566aa707497f4451
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.