code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
def on_run_start(self, request): <NEW_LINE> <INDENT> self._update_run_calls_state(request.run_call_count, request.fetches, request.feed_dict) <NEW_LINE> if self._run_till_filter_pass: <NEW_LINE> <INDENT> return framework.OnRunStartResponse(framework.OnRunStartAction.DEBUG_RUN, self._get_run_debug_urls()) <NEW_LINE> <DEDENT> run_start_cli = curses_ui.CursesUI() <NEW_LINE> run_start_cli.register_command_handler( "run", self._on_run_start_run_handler, self._on_run_start_parsers["run"].format_help(), prefix_aliases=["r"]) <NEW_LINE> run_start_cli.register_command_handler( "invoke_stepper", self._on_run_start_step_handler, self._on_run_start_parsers["invoke_stepper"].format_help(), prefix_aliases=["s"]) <NEW_LINE> if isinstance(request.fetches, list) or isinstance(request.fetches, tuple): <NEW_LINE> <INDENT> fetch_lines = [fetch.name for fetch in request.fetches] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fetch_lines = [repr(request.fetches)] <NEW_LINE> <DEDENT> if not request.feed_dict: <NEW_LINE> <INDENT> feed_dict_lines = ["(Empty)"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> feed_dict_lines = [] <NEW_LINE> for feed_key in request.feed_dict: <NEW_LINE> <INDENT> feed_dict_lines.append(feed_key.name) <NEW_LINE> <DEDENT> <DEDENT> help_intro = [ "======================================", "About to enter Session run() call #%d:" % request.run_call_count, "", "Fetch(es):" ] <NEW_LINE> help_intro.extend([" " + line for line in fetch_lines]) <NEW_LINE> help_intro.extend(["", "Feed dict(s):"]) <NEW_LINE> help_intro.extend([" " + line for line in feed_dict_lines]) <NEW_LINE> help_intro.extend([ "======================================", "", "Select one of the following commands to proceed ---->", " run:", " Execute the run() call with the debug tensor-watching", " run -n:", " Execute the run() call without the debug tensor-watching", " run -f <filter_name>:", " Keep executing run() calls until a dumped tensor passes ", " a given, registered filter emerge. Registered filter(s):" ]) <NEW_LINE> if self._tensor_filters: <NEW_LINE> <INDENT> filter_names = [] <NEW_LINE> for filter_name in self._tensor_filters: <NEW_LINE> <INDENT> filter_names.append(filter_name) <NEW_LINE> help_intro.append(" * " + filter_name) <NEW_LINE> <DEDENT> run_start_cli.register_tab_comp_context(["run", "r"], filter_names) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> help_intro.append(" (None)") <NEW_LINE> <DEDENT> help_intro.extend(["", "For more details, see help below:" "",]) <NEW_LINE> run_start_cli.set_help_intro(help_intro) <NEW_LINE> title = "run-start: " + self._run_description <NEW_LINE> response = run_start_cli.run_ui( init_command="help", title=title, title_color="yellow") <NEW_LINE> if response == debugger_cli_common.EXPLICIT_USER_EXIT: <NEW_LINE> <INDENT> print( "Note: user exited from debugger CLI: sys.exit(1) called.", file=sys.stderr) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> return response
Overrides on-run-start callback. Invoke the CLI to let user choose what action to take: run / run --no_debug / step. Args: request: An instance of OnSessionInitRequest. Returns: An instance of OnSessionInitResponse. Raises: RuntimeError: If user chooses to prematurely exit the debugger.
625941bb30bbd722463cbc74
def stdev(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return math.sqrt(sum([(x[0] - self._avg)**2 for x in self._history])/(self._count - 1)) <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> return 0.0
Returns the standard deviation of the recorded measurements. @rtype: float @return: standard deviation
625941bb24f1403a92600a1a
def disconnect_by_key(self, obj, name, key): <NEW_LINE> <INDENT> signals = setdefaultattr(obj, self._signal_attr, {}) <NEW_LINE> handlers = signals.get(name, []) <NEW_LINE> handlers[:] = [h for h in handlers if h[0] is not key]
:param obj: the object to disconnect the signal from :type obj: object :param name: the signal to disconnect, typically a string :type name: signal name :param key: the key for this signal handler, as returned by connect_signal(). :type key: Key This function will remove a callback from the list connected to a signal with connect_signal(). The key passed should be the value returned by connect_signal(). If the callback is not connected or already disconnected, this function will simply do nothing.
625941bb26238365f5f0ed1b
def extractDay(self, input): <NEW_LINE> <INDENT> day = self.extractDay(input) <NEW_LINE> if day: <NEW_LINE> <INDENT> return day[0] <NEW_LINE> <DEDENT> return None
Returns the first time-related date found in the input string, or None if not found.
625941bb6fb2d068a760ef4b
def get_queryset(self): <NEW_LINE> <INDENT> if not self.request.user.is_authenticated: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> qs = Course.objects.filter( certifying_organisation=self.certifying_organisation) <NEW_LINE> return qs
Get the queryset for this view. :returns: Course queryset filtered by Organisation :rtype: QuerySet :raises: Http404
625941bb090684286d50eb92
def test_delitem(self): <NEW_LINE> <INDENT> del self.hit[0] <NEW_LINE> self.assertEqual(2, len(self.hit)) <NEW_LINE> self.assertEqual([hsp112, hsp113], self.hit.hsps)
Test Hit.__delitem__.
625941bbbe7bc26dc91cd4b6
def test_grant_to_super_user(self): <NEW_LINE> <INDENT> hosts = Host.objects.user_filter(self.admin) <NEW_LINE> self.assertEqual(hosts.count(), Host.objects.count(), "Superuser can't access to all hosts")
Superuser access to every hosts.
625941bbc432627299f04af5
def build_dynamic_library(exprs, preferred_signature=None): <NEW_LINE> <INDENT> exprs_logic = parse_exprs_if_str(exprs) <NEW_LINE> signature, exprs = combine_signatures_or_rename_preds( exprs_logic, preferred_signature) <NEW_LINE> signature = remove_reserved_predicates(signature) <NEW_LINE> return signature, exprs
Create a dynamic library with types of objects that appear in coq formulae. Optionally, it may receive partially specified signatures for objects using the format by NLTK (e.g. {'_john' : e, '_mary' : e, '_love' : <e,<e,t>>}).
625941bb7b25080760e3930c
def set_acceleration(self, channel, acceleration): <NEW_LINE> <INDENT> logger.info("set acceleration CHAN({0}) = {1}".format(channel, acceleration)) <NEW_LINE> outStr = bytearray([_SET_ACCELERATION_CMD, channel, self.low_bits(acceleration), self.high_bits(acceleration)]) <NEW_LINE> return self.write(outStr)
Sets the acceleration limit of the servo channel @brief This command limits the acceleration of a servo channel’s output. The acceleration limit is a value from 0 to 255 in units of (0.25 microseconds)/(10 ms)/(80 ms), except in special cases. A value of 0 corresponds to no acceleration limit. An acceleration limit causes the speed of a servo to slowly ramp up until it reaches the maximum speed, then to ramp down again as position approaches target, resulting in a relatively smooth motion from one point to another. With acceleration and speed limits, only a few target settings are required to make natural-looking motions that would otherwise be quite complicated to produce. At the minimum acceleration setting of 1, the servo output takes about 3 seconds to move smoothly from a target of 1 ms to a target of 2 ms. The acceleration setting has no effect on channels configured as inputs or digital outputs. @param[in] channel The channel of which to set the speed @param[in] acceleration The acc. limit of the channels output in units of (0.25 microseconds)/(10 ms)/(80 ms) @return True if the acceleration was set successfully, False otherwise
625941bbd6c5a10208143ef9
def extrair_atributos(xml): <NEW_LINE> <INDENT> return dict(ER_ATRIB_VALOR.findall(xml))
Extrair todos os pares atributo-valor de um xml
625941bbbe383301e01b533d
def getTopic(): <NEW_LINE> <INDENT> pass
get the graduation topic
625941bb21bff66bcd684806
def installed(name, recurse=False, force=False): <NEW_LINE> <INDENT> ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} <NEW_LINE> if name not in __salt__['win_servermanager.list_installed'](): <NEW_LINE> <INDENT> ret['changes'] = {'feature': '{0} will be installed recurse={1}'.format(name, recurse)} <NEW_LINE> <DEDENT> elif force and recurse: <NEW_LINE> <INDENT> ret['changes'] = {'feature': 'already installed but might install sub-features'.format(name)} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret['comment'] = 'The feature {0} is already installed'.format(name) <NEW_LINE> return ret <NEW_LINE> <DEDENT> if __opts__['test']: <NEW_LINE> <INDENT> ret['result'] = None <NEW_LINE> return ret <NEW_LINE> <DEDENT> ret['changes'] = {'feature': __salt__['win_servermanager.install'](name, recurse)} <NEW_LINE> ret['result'] = ret['changes']['feature']['Success'] == 'True' <NEW_LINE> if not ret['result']: <NEW_LINE> <INDENT> ret['comment'] = 'failed to install the feature: {0}'.format(ret['changes']['feature']['ExitCode']) <NEW_LINE> <DEDENT> return ret
Install the windows feature name: short name of the feature (the right column in win_servermanager.list_available) recurse: install all sub-features as well force: if the feature is installed but on of its sub-features are not installed set this to True to force the installation of the sub-features Note: Some features requires reboot after un/installation, if so until the server is restarted Other features can not be installed !
625941bbd18da76e23532384
def test_gdal_translate_get_one_band(self): <NEW_LINE> <INDENT> generated_image = terre_image_processing.gdal_translate_get_one_band(self.terre_image_layer.source_file, 1, self.working_dir) <NEW_LINE> baseline = os.path.join(self.data_dir_baseline, "taredji_extract_b1.tif") <NEW_LINE> self.assertTrue(self.checkResult(generated_image, baseline))
Test the gdal function to get a specific band Returns:
625941bb56b00c62f0f14508
def test_index_flavors_basic(self): <NEW_LINE> <INDENT> flavors = self._index_flavors() <NEW_LINE> for flavor in flavors: <NEW_LINE> <INDENT> self._assert_flavor_entity_basic(flavor)
List all flavors
625941bb21a7993f00bc7b9c
def analyze_single(self, target, amount = 5, output = None, extra_svm_feats = []): <NEW_LINE> <INDENT> if self.model is None: <NEW_LINE> <INDENT> raise Exception('Model not loaded.') <NEW_LINE> <DEDENT> if isinstance(target,str): <NEW_LINE> <INDENT> text = target <NEW_LINE> true_y = None <NEW_LINE> result = predict_single_text(text, self.model, extra_svm_feats = extra_svm_feats) <NEW_LINE> decvals = result.decvals <NEW_LINE> <DEDENT> elif isinstance(target,TextInstance): <NEW_LINE> <INDENT> if target.text is None: <NEW_LINE> <INDENT> raise Exception('Please load texts first.') <NEW_LINE> <DEDENT> text, extra_svm_feats, true_y = target.text, target.extra_svm_feats, target.true_y <NEW_LINE> decvals = target.decvals <NEW_LINE> <DEDENT> if isinstance(output, str): <NEW_LINE> <INDENT> output = open(output, 'w') <NEW_LINE> <DEDENT> features, weights, labels = self.model.get_weight(text, extra_svm_feats = extra_svm_feats) <NEW_LINE> nr_labels = len(labels) <NEW_LINE> nr_feats = len(features) <NEW_LINE> if not features or not weights: <NEW_LINE> <INDENT> raise Exception('Invalid instance.') <NEW_LINE> <DEDENT> features = [' '.join(feature) for feature in features] <NEW_LINE> features += ['**decval**'] <NEW_LINE> weights_table = [[0]*nr_labels]*(nr_feats+1) <NEW_LINE> sorted_idx = sorted(xrange(nr_labels), key=lambda i:decvals[i], reverse=True) <NEW_LINE> labels = [labels[idx] for idx in sorted_idx] <NEW_LINE> for feat in xrange(nr_feats): <NEW_LINE> <INDENT> formatter = lambda idx: '{0:.3e}'.format(weights[feat][idx]) <NEW_LINE> weights_table[feat] = [formatter(idx) for idx in sorted_idx] <NEW_LINE> <DEDENT> weights_table[-1] = ['{0:.3e}'.format(decvals[idx]) for idx in sorted_idx] <NEW_LINE> if amount != 0: <NEW_LINE> <INDENT> labels = labels[:amount] <NEW_LINE> <DEDENT> draw_table(features, labels, weights_table, output) <NEW_LINE> if true_y is not None: <NEW_LINE> <INDENT> print('True label: {0}'.format(true_y))
return analyze result of a query results will be stored into disk if output is specified
625941bbe1aae11d1e749b66
def correlation_spectrum(x1, x2, Fs=2 * np.pi, norm=False): <NEW_LINE> <INDENT> x1 = x1 - np.mean(x1) <NEW_LINE> x2 = x2 - np.mean(x2) <NEW_LINE> x1_f = fftpack.fft(x1) <NEW_LINE> x2_f = fftpack.fft(x2) <NEW_LINE> D = np.sqrt(np.sum(x1 ** 2) * np.sum(x2 ** 2)) <NEW_LINE> n = x1.shape[0] <NEW_LINE> ccn = ((np.real(x1_f) * np.real(x2_f) + np.imag(x1_f) * np.imag(x2_f)) / (D * n)) <NEW_LINE> if norm: <NEW_LINE> <INDENT> ccn = ccn / np.sum(ccn) * 2 <NEW_LINE> <DEDENT> f = utils.get_freqs(Fs, n) <NEW_LINE> return f, ccn[0:(n / 2 + 1)]
Calculate the spectral decomposition of the correlation. Parameters ---------- x1,x2: ndarray Two arrays to be correlated. Same dimensions Fs: float, optional Sampling rate in Hz. If provided, an array of frequencies will be returned.Defaults to 2 norm: bool, optional When this is true, the spectrum is normalized to sum to 1 Returns ------- f: ndarray ndarray with the frequencies ccn: ndarray The spectral decomposition of the correlation Notes ----- This method is described in full in: D Cordes, V M Haughton, K Arfanakis, G J Wendt, P A Turski, C H Moritz, M A Quigley, M E Meyerand (2000). Mapping functionally related regions of brain with functional connectivity MR imaging. AJNR American journal of neuroradiology 21:1636-44
625941bb97e22403b379ce4a
def print_result(result): <NEW_LINE> <INDENT> print("Total score: %s" % result["score"]) <NEW_LINE> for module_data in result["results"]: <NEW_LINE> <INDENT> print("Module %s score: %s" % (module_data["module"], module_data["score"])) <NEW_LINE> print("Module %s logfile: %s" % (module_data["module"], module_data["logfile"]))
Prints results well formated.
625941bb462c4b4f79d1d582
def _guess_quote_and_delimiter( data, delimiters): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> for restr in ( '(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)', '(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n|\r\n)', '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'): <NEW_LINE> <INDENT> regexp = re.compile(restr, re.DOTALL | re.MULTILINE) <NEW_LINE> m=regexp.findall(data) <NEW_LINE> if len(m)>0: <NEW_LINE> <INDENT> matches.append((m, regexp)) <NEW_LINE> <DEDENT> <DEDENT> if not matches: <NEW_LINE> <INDENT> return ('', False, None, 0) <NEW_LINE> <DEDENT> quotes = {} <NEW_LINE> delims = {} <NEW_LINE> spaces = 0 <NEW_LINE> for ml, regexp in matches: <NEW_LINE> <INDENT> for m in ml: <NEW_LINE> <INDENT> n = regexp.groupindex['quote'] - 1 <NEW_LINE> key = m[n] <NEW_LINE> if key: <NEW_LINE> <INDENT> quotes[key] = quotes.get(key, 0) + 1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> n = regexp.groupindex['delim'] - 1 <NEW_LINE> key = m[n] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if key and (delimiters is None or key in delimiters): <NEW_LINE> <INDENT> delims[key] = delims.get(key, 0) + 1 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> n = regexp.groupindex['space'] - 1 <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if m[n]: <NEW_LINE> <INDENT> spaces += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> quotechar = reduce(lambda a, b, quotes = quotes: (quotes[a] > quotes[b]) and a or b, list(quotes.keys())) <NEW_LINE> if delims: <NEW_LINE> <INDENT> delim = reduce(lambda a, b, delims = delims: (delims[a] > delims[b]) and a or b, list(delims.keys())) <NEW_LINE> skipinitialspace = delims[delim] == spaces <NEW_LINE> if delim == '\n': <NEW_LINE> <INDENT> delim = '' <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> delim = '' <NEW_LINE> skipinitialspace = 0 <NEW_LINE> <DEDENT> dq_regexp = re.compile( r"((%(delim)s)|^)\W*%(quote)s[^%(delim)s\n]*%(quote)s[^%(delim)s\n]*%(quote)s\W*((%(delim)s)|$)" % {'delim':re.escape(delim), 'quote':quotechar}, re.MULTILINE) <NEW_LINE> if dq_regexp.search(data): <NEW_LINE> <INDENT> doublequote = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> doublequote = False <NEW_LINE> <DEDENT> return (quotechar, doublequote, delim, skipinitialspace)
Looks for text enclosed between two identical quotes (the probable quotechar) which are preceded and followed by the same character (the probable delimiter). For example: ,'some text', The quote with the most wins, same with the delimiter. If there is no quotechar the delimiter can't be determined this way.
625941bb50485f2cf553cc4a
def update_power(self): <NEW_LINE> <INDENT> self.Sbus = self.makeSbus(self.baseMVA, self.bus, self.gen, self.active_generators, self.Cg) <NEW_LINE> self.some_power_changed = False
compute complex bus power injections [generation - load]
625941bb1d351010ab8559cf
def config_set(cwd=None, setting_name=None, setting_value=None, user=None, is_global=False): <NEW_LINE> <INDENT> if setting_name is None or setting_value is None: <NEW_LINE> <INDENT> raise TypeError <NEW_LINE> <DEDENT> if cwd is None and not is_global: <NEW_LINE> <INDENT> raise SaltInvocationError('Either `is_global` must be set to True or ' 'you must provide `cwd`') <NEW_LINE> <DEDENT> scope = '--local' <NEW_LINE> if is_global: <NEW_LINE> <INDENT> scope = '--global' <NEW_LINE> <DEDENT> _check_git() <NEW_LINE> return _git_run('git config {0} {1} {2}'.format(scope, setting_name, setting_value), cwd=cwd, runas=user)
Set a key in the git configuration file (.git/config) of the repository or globally. setting_name The name of the configuration key to set setting_value The (new) value to set cwd : None Options path to the Git repository .. versionchanged:: Helium Made ``cwd`` optional user : None Run git as a user other than what the minion runs as is_global : False Set to True to use the '--global' flag with 'git config' CLI Example: .. code-block:: bash salt '*' git.config_set user.email me@example.com /path/to/repo
625941bb07f4c71912b11339
def store_other_info(self, rights_statement): <NEW_LINE> <INDENT> other_info = models.RightsStatementOtherRightsInformation() <NEW_LINE> other_info.rightsstatement = rights_statement <NEW_LINE> other_info.otherrightsbasis = self.column_value("basis").lower().capitalize() <NEW_LINE> other_info.otherrightsapplicablestartdate = self.column_value("start_date") <NEW_LINE> end_date = self.column_value("end_date") <NEW_LINE> if end_date and end_date.lower() == "open": <NEW_LINE> <INDENT> other_info.otherrightsenddateopen = True <NEW_LINE> <DEDENT> elif end_date: <NEW_LINE> <INDENT> other_info.otherrightsapplicableenddate = end_date <NEW_LINE> <DEDENT> other_info.save() <NEW_LINE> self.store_doc_id( models.RightsStatementOtherRightsDocumentationIdentifier, other_info, "rightsstatementotherrights", "otherrightsdocumentationidentifiertype", "otherrightsdocumentationidentifiervalue", "otherrightsdocumentationidentifierrole", ) <NEW_LINE> if self.column_value("note"): <NEW_LINE> <INDENT> note = models.RightsStatementOtherRightsInformationNote() <NEW_LINE> note.rightsstatementotherrights = other_info <NEW_LINE> note.otherrightsnote = self.column_value("note") <NEW_LINE> note.save()
Store "other" basis column values in the database.
625941bb23e79379d52ee419
def isPalindrome(palindrome, eq=lambda x,y: x==y): <NEW_LINE> <INDENT> length = len(palindrome) <NEW_LINE> for i in range(length/2): <NEW_LINE> <INDENT> if not eq(palindrome[i], palindrome[length-(i+1)]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Returns whether a variable is a palindrome or not using a passed-in equals function.
625941bb3cc13d1c6d3c7235
def columnCount(self, index): <NEW_LINE> <INDENT> return int()
int Akonadi.MessageThreaderProxyModel.columnCount(QModelIndex index)
625941bb8a43f66fc4b53f1b
def print_human_info(**info): <NEW_LINE> <INDENT> output_str = '' <NEW_LINE> for key, value in info.items(): <NEW_LINE> <INDENT> output_str += f'{key.capitalize()}: {value}. ' <NEW_LINE> <DEDENT> print(output_str)
Выводит имя, фамилию, год рождения, город проживания, email, телефон
625941bbcdde0d52a9e52ee1
def getCoverageMetaInfo(self, Id, Chrom): <NEW_LINE> <INDENT> resourcePath = '/coverage/{Id}/{Chrom}/meta' <NEW_LINE> method = 'GET' <NEW_LINE> queryParams = {} <NEW_LINE> headerParams = {} <NEW_LINE> resourcePath = resourcePath.replace('{Chrom}', Chrom) <NEW_LINE> resourcePath = resourcePath.replace('{Id}', Id) <NEW_LINE> return self.__singleRequest__(CoverageMetaResponse.CoverageMetaResponse, resourcePath, method, queryParams, headerParams,verbose=0)
Returns metadata about coverage of a chromosome. Note that HrefCoverage must be available for the provided BAM file :param Id: the Id of a Bam file :param Chrom: chromosome name :returns: a CoverageMetaData instance
625941bba17c0f6771cbdf05
def check_sn(self, data): <NEW_LINE> <INDENT> check_sum = data[-2:] <NEW_LINE> check = self.gen_check(data[2:-2]) <NEW_LINE> return check == check_sum
检查校验码 return protocal_version, cmd
625941bbe64d504609d746f2
def get_Color(self): <NEW_LINE> <INDENT> return super(IMultiLayerFillSymbol, self).get_Color()
Method IFillSymbol.get_Color (from IFillSymbol) OUTPUT Color : IColor**
625941bb73bcbd0ca4b2bf2f
def p_struct_member_declaration(self, p): <NEW_LINE> <INDENT> result = self._symbol_table.get_symbol(p[2]) <NEW_LINE> struct = result.symbol <NEW_LINE> p[0] = ast.struct.Member() <NEW_LINE> p[0].set_line_number(p.lineno(3)) <NEW_LINE> p[0].set_file_name(self._filename) <NEW_LINE> p[0].set_type(struct) <NEW_LINE> p[0].set_name(p[3]) <NEW_LINE> self._symbol_table.add_symbol(p[0])
member-declaration : KW_STRUCT ID ID EOS
625941bb3317a56b86939b1a
def test_chi_transpose_random(self): <NEW_LINE> <INDENT> mats = [self.rand_matrix(4, 4) for _ in range(4)] <NEW_LINE> chans = [Chi(Operator(mat)) for mat in mats] <NEW_LINE> self._compare_transpose_to_operator(chans, mats)
Test transpose of Chi matrices is correct.
625941bb45492302aab5e172
def check_files(user_input): <NEW_LINE> <INDENT> file_ext = ['*.fit*', '*.FIT*'] <NEW_LINE> data_files = [] <NEW_LINE> working_dir = os.path.exists(user_input) <NEW_LINE> if working_dir: <NEW_LINE> <INDENT> os.chdir(user_input) <NEW_LINE> if glob.glob('*.gz*'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sp.check_call('gunzip *.gz', shell=True) <NEW_LINE> <DEDENT> except sp.CalledProcessError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> for files in file_ext: <NEW_LINE> <INDENT> data_files += glob.glob(files) <NEW_LINE> <DEDENT> if not data_files: <NEW_LINE> <INDENT> print('Input directory contains no .fit(s) or .FIT(S) files. Please enter another directory.') <NEW_LINE> return False <NEW_LINE> <DEDENT> print(data_files) <NEW_LINE> return data_files <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Invalid directory. Please try again.') <NEW_LINE> return False
Checks to make sure given directory is a valid directory. If directory exists, returns list of .fit or .fits files.
625941bbbde94217f3682cad
def authenticate(self, content): <NEW_LINE> <INDENT> if content.get('nick') == 'admin': <NEW_LINE> <INDENT> self.set_admin() <NEW_LINE> <DEDENT> return True
Check if current user is admin
625941bb5e10d32532c5ede0
def obj_type_dec(self, obj_type): <NEW_LINE> <INDENT> self.obj_type_count[obj_type] -= 1 <NEW_LINE> if self.obj_type_count[obj_type] == 0: <NEW_LINE> <INDENT> self.cluster_ids.add(obj_type) <NEW_LINE> self.obj_types.remove(obj_type) <NEW_LINE> del self.obj_type_count[obj_type] <NEW_LINE> del self.obj_type_mem[obj_type]
Decrement the number of objects with given type. Remove from set of object types if no objects belong to type.
625941bb50485f2cf553cc4b
def compress(s): <NEW_LINE> <INDENT> x = lister(s) <NEW_LINE> y = map(printer,x) <NEW_LINE> return combineStrings(y)
compresses a sequence of binary digits into a condensed version
625941bb63b5f9789fde6f97
def wait_for_statistic_of_metric(self, meter_name, query=None, period=None): <NEW_LINE> <INDENT> def check_status(): <NEW_LINE> <INDENT> stat_state_resp = self.ceilometer_client.statistics.list( meter_name, q=query, period=period) <NEW_LINE> if len(stat_state_resp) > 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> LOG.debug("Waiting for while metrics will available.") <NEW_LINE> <DEDENT> if not fuel_health.test.call_until_true(check_status, 600, 10): <NEW_LINE> <INDENT> self.fail("Timed out waiting to become alarm") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.ceilometer_client.statistics.list(meter_name, q=query, period=period)
The method is a customization of test.status_timeout().
625941bbb830903b967e97c8
def test_add_second_task_x(client, jmb): <NEW_LINE> <INDENT> global session, tasks_db <NEW_LINE> jmb.add_page("tasks", TasksPage) <NEW_LINE> session = dict(user=User("admin"), wipdbs=dict()) <NEW_LINE> tasks_db = {1: Task(1, "Task 1", "First task")} <NEW_LINE> r = client.post( "/tasks/tasks/add", data=json.dumps( dict( components=[ dict(execName="/tasks", state=dict()), dict(execName="/tasks/page_title", state=dict(title="Add task")), dict( execName="/tasks/tasks", state=dict(mode="add", parent_task_id=None, wip_id=None), ), dict( execName="/tasks/tasks/add", state=dict( form=dict(title="", description=None, error=None), parent_task_id=None, wip_id=None, task_id=None, ), ), dict( execName="/tasks/tasks/add/subtasks", state=dict(mode="list"), ), ], commands=[ dict( type="init", componentExecName="/tasks/tasks/add", initParams=dict( form=dict( title="Task 2", description="Second task", error=None ), parent_task_id=None, wip_id=None, task_id=None, ), mergeExistingParams=True, ), dict( type="call", componentExecName="/tasks/tasks/add", actionName="save", args=list(), kwargs=dict(), ), ], ) ), headers={"x-jembe": True}, ) <NEW_LINE> assert session == dict(user=User(username="admin"), wipdbs={}) <NEW_LINE> assert tasks_db == { 1: Task(1, "Task 1", "First task"), 2: Task(2, "Task 2", "Second task"), } <NEW_LINE> assert r.status_code == 200 <NEW_LINE> json_response = json.loads(r.data) <NEW_LINE> assert len(json_response) == 4 <NEW_LINE> assert len(session["wipdbs"]) == 0 <NEW_LINE> assert json_response[0]["execName"] == "/tasks/page_title" <NEW_LINE> assert json_response[0]["state"] == dict(title="View Task 2") <NEW_LINE> assert json_response[0]["dom"] == ("""<title>View Task 2</title>""") <NEW_LINE> assert json_response[1]["execName"] == "/tasks/tasks" <NEW_LINE> assert json_response[1]["state"] == dict( mode="view", wip_id=None, parent_task_id=None ) <NEW_LINE> assert json_response[1]["dom"] == ( """<template jmb-placeholder="/tasks/tasks/view"></template>""" ) <NEW_LINE> assert json_response[2]["execName"] == "/tasks/tasks/view" <NEW_LINE> assert json_response[2]["state"] == dict(task_id=2, wip_id=None) <NEW_LINE> assert json_response[2]["dom"] == ( """<h1><a href="#" jmb-on:click="$jmb.component('..').display()">Back</a> Task 2</h1>""" """<div>Second task</div>""" "<h2>Sub tasks</h2>" """<div><template jmb-placeholder="/tasks/tasks/view/subtasks"></template></div>""" ) <NEW_LINE> assert json_response[3]["execName"] == "/tasks/tasks/view/subtasks" <NEW_LINE> assert json_response[3]["state"] == dict(mode="list") <NEW_LINE> assert json_response[3]["dom"] == ( "<div><table><tr><th>Task</th><th>Actions</th></tr></table></div>" )
Calling save without properly created seted wipdb_id and task_id
625941bb96565a6dacc8f587
def setZeroes(self, matrix: List[List[int]]) -> None: <NEW_LINE> <INDENT> m = len(matrix) <NEW_LINE> n = len(matrix[0]) <NEW_LINE> columns = [] <NEW_LINE> rows = [] <NEW_LINE> for r in range(0, m): <NEW_LINE> <INDENT> c = [i for i, e in enumerate(matrix[r]) if e == 0] <NEW_LINE> if c: <NEW_LINE> <INDENT> rows.append(r) <NEW_LINE> columns.extend(c) <NEW_LINE> matrix[r] = [0] * n <NEW_LINE> <DEDENT> <DEDENT> columns = set(columns) <NEW_LINE> rows = set(rows) <NEW_LINE> rows_to_visit = [x for x in range(0, m) if x not in rows] <NEW_LINE> for c in columns: <NEW_LINE> <INDENT> for r in rows_to_visit: <NEW_LINE> <INDENT> matrix[r][c] = 0
Do not return anything, modify matrix in-place instead.
625941bb45492302aab5e173
def extract(url): <NEW_LINE> <INDENT> html_str = get_html(url) <NEW_LINE> if html_str == None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> article_temp = extract_text_by_block(html_str) <NEW_LINE> try: <NEW_LINE> <INDENT> article = extract_text_by_tag(html_str, article_temp) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> article = article_temp <NEW_LINE> <DEDENT> return article
抽取正文 :param url: 网页链接 :return:正文文本
625941bbfff4ab517eb2f2ec
def firstBadVersion(self, n): <NEW_LINE> <INDENT> lo, md, hi = 0, 0, n <NEW_LINE> while lo <= hi: <NEW_LINE> <INDENT> md = (lo + hi) // 2 <NEW_LINE> if isBadVersion(md): <NEW_LINE> <INDENT> hi = md - 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lo = md + 1 <NEW_LINE> <DEDENT> <DEDENT> return lo
:type n: int :rtype: int
625941bb287bf620b61d3920
def has_add_permission(self, request, obj=None): <NEW_LINE> <INDENT> return False
Adding stacks manually is not supported.
625941bbab23a570cc250032
def submit_new_login(self, email, realname, password1, password2, *args, **kw): <NEW_LINE> <INDENT> if self.user_mgr.create_user(email, realname, password1, password2): <NEW_LINE> <INDENT> self.user_mgr.create_new_session(email) <NEW_LINE> raise RedirectException(DASHBOARD_URL) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Unknown error.")
Creates a new login.
625941bb7d847024c06be16c
def run(self, params: dict): <NEW_LINE> <INDENT> for func in self.__func_array: <NEW_LINE> <INDENT> if not isinstance(func, str): <NEW_LINE> <INDENT> raise ValueError('__func_array value must str') <NEW_LINE> <DEDENT> if not hasattr(self, func): <NEW_LINE> <INDENT> raise AttributeError('"%s" object has not attribute "%s"' % (self.__class__.__name__, func)) <NEW_LINE> <DEDENT> obj = getattr(self, func) <NEW_LINE> if not callable(obj): <NEW_LINE> <INDENT> raise RuntimeError(obj, 'is not callable') <NEW_LINE> <DEDENT> result = obj(params) <NEW_LINE> if result: <NEW_LINE> <INDENT> return result
启动方法, 指定为run方法.子类中如果更换了启动方法, 需要将启动方法的方法名赋值给 __run 变量 :param params: 一个字典 :return: 返回整个实例的运行结果
625941bbd164cc6175782c00
def combine(label, accesskey, accesskey_marker=DEFAULT_ACCESSKEY_MARKER): <NEW_LINE> <INDENT> assert isinstance(label, unicode) <NEW_LINE> assert isinstance(accesskey, unicode) <NEW_LINE> if len(accesskey) == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> searchpos = 0 <NEW_LINE> accesskeypos = -1 <NEW_LINE> in_entity = False <NEW_LINE> accesskeyaltcasepos = -1 <NEW_LINE> while (accesskeypos < 0) and searchpos < len(label): <NEW_LINE> <INDENT> searchchar = label[searchpos] <NEW_LINE> if searchchar == '&': <NEW_LINE> <INDENT> in_entity = True <NEW_LINE> <DEDENT> elif searchchar == ';': <NEW_LINE> <INDENT> in_entity = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not in_entity: <NEW_LINE> <INDENT> if searchchar == accesskey.upper(): <NEW_LINE> <INDENT> accesskeypos = searchpos <NEW_LINE> <DEDENT> if searchchar == accesskey.lower(): <NEW_LINE> <INDENT> if accesskeyaltcasepos == -1: <NEW_LINE> <INDENT> accesskeyaltcasepos = searchpos <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> searchpos += 1 <NEW_LINE> <DEDENT> if accesskeypos == -1: <NEW_LINE> <INDENT> accesskeypos = accesskeyaltcasepos <NEW_LINE> <DEDENT> if accesskeypos >= 0: <NEW_LINE> <INDENT> return label[:accesskeypos] + accesskey_marker + label[accesskeypos:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Combine a label and and accesskey to form a label+accesskey string We place an accesskey marker before the accesskey in the label and this creates a string with the two combined e.g. "File" + "F" = "&File" :type label: unicode :param label: a label :type accesskey: unicode char :param accesskey: The accesskey :rtype: unicode or None :return: label+accesskey string or None if uncombineable
625941bb63f4b57ef0000fd3
def test_roundtripNestedValue(self): <NEW_LINE> <INDENT> d = self.proxy.callRemote('defer', {'a': self.value}) <NEW_LINE> d.addCallback(self.assertEqual, {'a': self.value}) <NEW_LINE> return d
A C{dict} which contains C{self.value} can be round-tripped over an XMLRPC method call/response.
625941bb5f7d997b8717494e
def makeLoggingPacket(self, name=None): <NEW_LINE> <INDENT> p = self._makePacket() <NEW_LINE> return LoggingPacket(p, name)
Create a direct ethernet server request packet with tracing
625941bbcc0a2c11143dcd48
def test_task_data_flow(self): <NEW_LINE> <INDENT> params = {"func": "pow", "inputs": ["arg", "power", "modulo"], "stored_data_varname": "data"} <NEW_LINE> spec = {"arg": 2, "power": 3, "modulo": None} <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.stored_data["data"], 8) <NEW_LINE> params["outputs"] = ["result"] <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.stored_data["data"], 8) <NEW_LINE> self.assertEqual(action.update_spec["result"], 8) <NEW_LINE> params["chunk_number"] = 0 <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.stored_data["data"], 8) <NEW_LINE> self.assertEqual(action.mod_spec[0]["_push"]["result"], 8) <NEW_LINE> params["args"] = [2, 3] <NEW_LINE> params["inputs"] = ["modulo"] <NEW_LINE> spec = {"modulo": 3} <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.stored_data["data"], 2) <NEW_LINE> self.assertEqual(action.mod_spec[0]["_push"]["result"], 2) <NEW_LINE> params["func"] = afunc.__module__ + "." + afunc.__name__ <NEW_LINE> params["args"] = [3, 3] <NEW_LINE> params["inputs"] = ["array"] <NEW_LINE> spec = {"array": [1, 2]} <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.stored_data["data"], [1, 2]) <NEW_LINE> self.assertEqual(action.mod_spec[0]["_push"]["result"], 1) <NEW_LINE> self.assertEqual(action.mod_spec[1]["_push"]["result"], 2) <NEW_LINE> del params["chunk_number"] <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.update_spec["result"][0], 1) <NEW_LINE> self.assertEqual(action.update_spec["result"][1], 2) <NEW_LINE> params["outputs"] = ["first", "second"] <NEW_LINE> action = PyTask(**params).run_task(spec) <NEW_LINE> self.assertEqual(action.update_spec["first"], 1) <NEW_LINE> self.assertEqual(action.update_spec["second"], 2)
test dataflow parameters: inputs, outputs and chunk_number
625941bb7047854f462a12bf
def getTheClassificaton(listofFour): <NEW_LINE> <INDENT> maxvalue = max(listofFour) <NEW_LINE> if listofFour[0] == maxvalue: <NEW_LINE> <INDENT> return 'financial_disclosure' <NEW_LINE> <DEDENT> elif listofFour[1] == maxvalue: <NEW_LINE> <INDENT> return 'crm' <NEW_LINE> <DEDENT> elif listofFour[2] == maxvalue: <NEW_LINE> <INDENT> return 'marketing_and_sales' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'jobs'
input the list of 4 and output the class defined
625941bbd6c5a10208143efa
def index(request): <NEW_LINE> <INDENT> return render(request, 'SNP_Feature_View/index.html')
Home page.
625941bbb5575c28eb68deb1
def _get_change_tree(self): <NEW_LINE> <INDENT> if self.parent and self.parent.parent: <NEW_LINE> <INDENT> headers_to_change = list(self.parent.parent.get_descendants()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> headers_to_change = list(Header.objects.filter(type=self.type)) <NEW_LINE> <DEDENT> accounts_to_change = [account for header in headers_to_change for account in list(header.account_set.all())[-1:]] <NEW_LINE> return headers_to_change + accounts_to_change
Get extra :class:`Headers<Header>` and :class:`Accounts<Account>`. A change in a :class:`Header` may cause changes in the number of Headers up to it's grandfather. We only save one :class:`Account` under each :class:`Header` because each :class:`Account` will save it's siblings. :returns: Additional instances to save. :rtype: list of :class:`Headers<Header>` and :class:`Accounts<Account>`
625941bbfbf16365ca6f6070
def __init__(self, k): <NEW_LINE> <INDENT> self.lst = [0] * k <NEW_LINE> self.k = k <NEW_LINE> self.front = 0 <NEW_LINE> self.back = 0 <NEW_LINE> self.size = 0
Initialize your data structure here. Set the size of the queue to be k. :type k: int
625941bb92d797404e30403c
def leadColor(*args, **kwargs): <NEW_LINE> <INDENT> pass
leadColor() -> MColor Returns the color for lead objects.
625941bb1f037a2d8b9460b1
def generate_date_filter(date_param_name, date_start=None, date_end=None): <NEW_LINE> <INDENT> search_param = dict() <NEW_LINE> if date_start is not None: <NEW_LINE> <INDENT> search_param[date_param_name + '__gte'] = date_start <NEW_LINE> <DEDENT> if date_end is not None: <NEW_LINE> <INDENT> search_param[date_param_name + '__lt'] = date_end + timedelta(days=1) <NEW_LINE> <DEDENT> return search_param
生成时间筛选
625941bb56ac1b37e6264088
def _parse_color_str(self, color_str): <NEW_LINE> <INDENT> rgb = [int(x) for x in color_str.split(',')] <NEW_LINE> return QColor(rgb[0], rgb[1], rgb[2])
change str to int and set it as a color config :param color_str: str from config file :return: QColor object
625941bbbe8e80087fb20afa
def coinChange(self, coins, amount): <NEW_LINE> <INDENT> dp = [amount + 1 for x in range(amount + 1)] <NEW_LINE> dp[0] = 0 <NEW_LINE> for i in range(1, amount + 1): <NEW_LINE> <INDENT> for coin in coins: <NEW_LINE> <INDENT> if i - coin >= 0: <NEW_LINE> <INDENT> dp[i] = min(dp[i], 1 + dp[i - coin]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dp[-1] if dp[-1] != amount + 1 else -1
:type coins: List[int] :type amount: int :rtype: int
625941bb1f037a2d8b9460b2
def flatten( self: Self_DecayChain, stable_particles: Iterable[Union[Dict[str, int], List[str], str]] = (), ) -> Self_DecayChain: <NEW_LINE> <INDENT> vis_bf = self.bf <NEW_LINE> fs = DaughtersDict(self.decays[self.mother].daughters) <NEW_LINE> if stable_particles: <NEW_LINE> <INDENT> keys = [k for k in self.decays.keys() if k not in stable_particles] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> keys = [k for k in self.decays.keys()] <NEW_LINE> <DEDENT> keys.insert(0, keys.pop(keys.index(self.mother))) <NEW_LINE> further_to_replace = True <NEW_LINE> while further_to_replace: <NEW_LINE> <INDENT> for k in keys: <NEW_LINE> <INDENT> if k in fs: <NEW_LINE> <INDENT> n_k = fs[k] <NEW_LINE> vis_bf *= self.decays[k].bf ** n_k <NEW_LINE> for _ in range(n_k): <NEW_LINE> <INDENT> fs += self.decays[k].daughters <NEW_LINE> <DEDENT> fs[k] -= n_k <NEW_LINE> <DEDENT> <DEDENT> further_to_replace = any(fs[_k] > 0 for _k in keys) <NEW_LINE> <DEDENT> return self.__class__( self.mother, {self.mother: DecayMode(vis_bf, fs, **self.top_level_decay().metadata)}, )
Flatten the decay chain replacing all intermediate, decaying particles, with their final states. Parameters ---------- stable_particles: iterable, optional, default=() If provided, ignores the sub-decays of the listed particles, considering them as stable. Note ---- After flattening the only `DecayMode` metadata kept is that of the top-level decay, i.e. that of the mother particle (nothing else would make sense). Examples -------- >>> dm1 = DecayMode(0.0124, 'K_S0 pi0', model='PHSP') >>> dm2 = DecayMode(0.692, 'pi+ pi-') >>> dm3 = DecayMode(0.98823, 'gamma gamma') >>> dc = DecayChain('D0', {'D0':dm1, 'K_S0':dm2, 'pi0':dm3}) >>> >>> dc.flatten() <DecayChain: D0 -> gamma gamma pi+ pi- (0 sub-decays), BF=0.008479803984> >>> dc.flatten().to_dict() {'D0': [{'bf': 0.008479803984, 'fs': ['gamma', 'gamma', 'pi+', 'pi-'], 'model': 'PHSP', 'model_params': ''}]} >>> dc.flatten(stable_particles=('K_S0', 'pi0')).decays {'D0': <DecayMode: daughters=K_S0 pi0, BF=0.0124>}
625941bb046cf37aa974cbfd
def delete(self, app, version, target='master', artifact=None): <NEW_LINE> <INDENT> if not artifact: <NEW_LINE> <INDENT> artifact = u'%s.tar.gz' % app <NEW_LINE> <DEDENT> artifact_path = os.path.join(app, target, artifact) <NEW_LINE> versions = self.list_versions(app, target, artifact) <NEW_LINE> if version and version not in versions: <NEW_LINE> <INDENT> raise ValueError('Non-existent version: %s', version) <NEW_LINE> <DEDENT> elif not version and not versions: <NEW_LINE> <INDENT> raise ValueError('No versions present in the repository') <NEW_LINE> <DEDENT> latest_path = os.path.join(artifact_path, 'latest') <NEW_LINE> latest = versions[-1] <NEW_LINE> if latest == version: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> versions.remove(version) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if versions: <NEW_LINE> <INDENT> self.store.put(latest_path, '%s\n' % versions[-1]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.store.delete(latest_path) <NEW_LINE> <DEDENT> <DEDENT> path = os.path.join(artifact_path, version) <NEW_LINE> self.store.delete(path, metadata=True)
Delete an object from the repository.
625941bbcad5886f8bd26e95
def test_repeat_orbit_calls_asym_multi_day_0_UT_long_time_gap(self): <NEW_LINE> <INDENT> self.stime += dt.timedelta(days=334) <NEW_LINE> self.testInst.load(date=self.stime) <NEW_LINE> self.testInst.orbits.next() <NEW_LINE> control = self.testInst.copy() <NEW_LINE> for j in range(20): <NEW_LINE> <INDENT> self.testInst.orbits.next() <NEW_LINE> <DEDENT> for j in range(20): <NEW_LINE> <INDENT> self.testInst.orbits.prev() <NEW_LINE> <DEDENT> assert all(control.data == self.testInst.data)
Test successful orbit calls for many different days with a long gap
625941bb099cdd3c635f0b0f
def __init__(self): <NEW_LINE> <INDENT> super(Cursor, self).__init__(image=games.load_image("Sprites/cursor.png"), x=games.mouse.x, y=games.mouse.y) <NEW_LINE> self.mouseClicked = False <NEW_LINE> self.mouseCounter = 0 <NEW_LINE> self.gunShotSound = games.load_sound("Sounds/shot.wav")
Cursor Initializer
625941bb63d6d428bbe443a2
def unregister(self, handler): <NEW_LINE> <INDENT> if not self.is_running_handlers: <NEW_LINE> <INDENT> self.handlers.remove(handler) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.to_remove.add(handler) <NEW_LINE> <DEDENT> return handler
Removes a handler so that it doesn't receive future messages. >>> dispatch.unregister(handler)
625941bb6e29344779a624c8
def bc_get_links(driver, in_link): <NEW_LINE> <INDENT> driver.get(in_link) <NEW_LINE> time.sleep(7) <NEW_LINE> html = driver.page_source <NEW_LINE> soup = BeautifulSoup(html, "html5lib") <NEW_LINE> recipe_links = [] <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> body = soup.find('main', {"class":"content"}) <NEW_LINE> recipe_links += [a['href'] for a in body.find_all('a', {"class":"entry-title-link"}, href=True)] <NEW_LINE> print(recipe_links) <NEW_LINE> next = soup.find('a', text='Next Page »') <NEW_LINE> if next: <NEW_LINE> <INDENT> driver.get(next['href']) <NEW_LINE> time.sleep(7) <NEW_LINE> html = driver.page_source <NEW_LINE> soup = BeautifulSoup(html, "html5lib") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return recipe_links
Inputs link and returns list of recipe links to scrape for BC
625941bb3c8af77a43ae3650
def median_absolute_deviation(timeseries): <NEW_LINE> <INDENT> series = pandas.Series([x[1] for x in timeseries]) <NEW_LINE> median = series.median() <NEW_LINE> demedianed = np.abs(series - median) <NEW_LINE> median_deviation = demedianed.median() <NEW_LINE> if median_deviation == 0: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> test_statistic = demedianed.iget(-1) / median_deviation <NEW_LINE> print('median:'+str(median)) <NEW_LINE> print('median_deviation:'+str(median_deviation)) <NEW_LINE> print('demedianed.iget(-1):'+str(demedianed.iget(-1))) <NEW_LINE> print('test_statistic:'+str(test_statistic)) <NEW_LINE> if test_statistic > 6: <NEW_LINE> <INDENT> return True
A timeseries is anomalous if the deviation of its latest datapoint with respect to the median is X times larger than the median of deviations.
625941bb8e7ae83300e4ae7f
def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SharedDataEntrySchema): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__
Returns true if both objects are equal
625941bbbf627c535bc13089
def replaceInputSeriesName(seriesname): <NEW_LINE> <INDENT> for pat, replacement in Config['input_series_replacements'].items(): <NEW_LINE> <INDENT> if re.match(pat, seriesname, re.IGNORECASE|re.UNICODE): <NEW_LINE> <INDENT> return replacement <NEW_LINE> <DEDENT> <DEDENT> return seriesname
allow specified replacements of series names in cases where default filenames match the wrong series, e.g. missing year gives wrong answer, or vice versa This helps the TVDB query get the right match.
625941bb71ff763f4b549542
def gen_simulated_frb(NFREQ=1536, NTIME=2**10, sim=True, fluence=1.0, spec_ind=0.0, width=0.0005, dm=0, background_noise=None, delta_t=0.00008192, plot_burst=False, freq=(1520., 1220.), FREQ_REF=1400., scintillate=False, scat_tau_ref=0.0, disp_ind=2., conv_dmsmear=False): <NEW_LINE> <INDENT> plot_burst = False <NEW_LINE> t_ref = 0. <NEW_LINE> if len(freq) < 3: <NEW_LINE> <INDENT> freq=np.linspace(freq[0], freq[1], NFREQ) <NEW_LINE> <DEDENT> if background_noise is None: <NEW_LINE> <INDENT> data = np.random.normal(50, 1, NTIME*NFREQ).reshape(NFREQ, NTIME) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = background_noise <NEW_LINE> <DEDENT> if sim is False: <NEW_LINE> <INDENT> return data, [] <NEW_LINE> <DEDENT> ES = EventSimulator(dm=dm, fluence=fluence, width=width, spec_ind=spec_ind) <NEW_LINE> E = Event(t_ref, FREQ_REF, dm, fluence, width, spec_ind, disp_ind, scat_tau_ref) <NEW_LINE> E.add_to_data(delta_t, freq, data, scintillate=scintillate, conv_dmsmear=conv_dmsmear) <NEW_LINE> if plot_burst: <NEW_LINE> <INDENT> subplot(211) <NEW_LINE> imshow(data.reshape(-1, NTIME), aspect='auto', interpolation='nearest', vmin=0, vmax=10) <NEW_LINE> subplot(313) <NEW_LINE> plot(data.reshape(-1, ntime).mean(0)) <NEW_LINE> <DEDENT> return data, [dm, fluence, E.width_max, spec_ind, disp_ind, scat_tau_ref]
Simulate fast radio bursts using the EventSimulator class. Parameters ---------- NFREQ : np.int number of frequencies for simulated array NTIME : np.int number of times for simulated array sim : bool whether or not to simulate FRB or just create noise array spec_ind : tuple range of spectral index width : tuple range of widths in seconds (atm assumed dt=0.0016) scat_tau_ref : tuple scattering timescale at ref freq (seconds) background_noise : if None, simulates white noise. Otherwise should be an array (NFREQ, NTIME) plot_burst : bool generates a plot of the simulated burst conv_dmsmear : bool if True, convolve Gaussian pulse with boxcar to imitate DM-smearing Returns ------- data : np.array data array (NFREQ, NTIME) parameters : tuple [dm, fluence, width, spec_ind, disp_ind, scat_factor]
625941bb2eb69b55b151c75e
def __coerce_type(self, key, value): <NEW_LINE> <INDENT> (default, typ, restrictions) = self._map[key] <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if typ == "choice": <NEW_LINE> <INDENT> assert value in restrictions, "Invalid choice %s for %s. Valid choices include: %s" % (value, key, ", ".join(restrictions)) <NEW_LINE> return value <NEW_LINE> <DEDENT> elif typ in [ "string", "int", "currency", "float", "list", "boolean" ]: <NEW_LINE> <INDENT> return self.__coerce_basic_type(key, value, typ, restrictions) <NEW_LINE> <DEDENT> elif typ == "list": <NEW_LINE> <INDENT> return self.__coerce_list(key, value, typ, restrictions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if type(value) == type({}): <NEW_LINE> <INDENT> return typ().from_datastruct(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value
For validating set functions and also for deserialization, this method ensures the arguments are of the right type according to the map. string accepts only strings int accepts ints or strings list accepts lists of objects or basic types, container type is required choice accepts only certain basic values booleans accepts bools or attempts casting to bools $className accepts an object of a hash to initialize the class More types can be added later.
625941bbbe7bc26dc91cd4b8
def get_all(self, **kwargs): <NEW_LINE> <INDENT> kwarguments = {} <NEW_LINE> if 'language' in kwargs: <NEW_LINE> <INDENT> kwarguments['language'] = kwargs['language'] <NEW_LINE> <DEDENT> return [{'id': p.get_vocabulary_id(), 'concepts': p.get_all(**kwarguments)} for p in self.providers.values()]
Get all concepts from all providers. .. code-block:: python # get all concepts in all providers. registry.get_all() # get all concepts in all providers. # If possible, display the results with a Dutch label. registry.get_all(language='nl') :param string language: Optional. If present, it should be a :term:`language-tag`. This language-tag is passed on to the underlying providers and used when selecting the label to display for each concept. :returns: a list of :class:`dict`. Each dict has two keys: id and concepts.
625941bb50812a4eaa59c1d8
def update(self, **kwargs): <NEW_LINE> <INDENT> for k, v in kwargs.iteritems(): <NEW_LINE> <INDENT> if k in self.attributes: <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> elif k == 'childs': <NEW_LINE> <INDENT> for n in sorted(kwargs['childs']): <NEW_LINE> <INDENT> newChild = self.newChild(**kwargs['childs'][n]) <NEW_LINE> self.addChild(newChild) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.log.warning("!!! Unrecognized attribute: %s. Skipp !!!" % k)
Update class data with given attributes :param kwargs: child data (key must start with self.__attrPrefix__) :type kwargs: dict
625941bb7b180e01f3dc46b7
def all_exercises(self): <NEW_LINE> <INDENT> admin = Profile.objects.get(user__username='davidr') <NEW_LINE> return Exercise.objects.filter(Q(created_by=self) | Q(created_by=admin))
Returns all default and user created exercises.
625941bb3eb6a72ae02ec388
def customMouseRelease(self, event): <NEW_LINE> <INDENT> pass
Summary Args: event (TYPE): Description Returns: TYPE: Description
625941bb2eb69b55b151c75f
def test_search(): <NEW_LINE> <INDENT> root: Tk = tk.Tk() <NEW_LINE> app = JSONTreeFrame(root, json_path="../dat/list.json") <NEW_LINE> try: <NEW_LINE> <INDENT> x = app.find("fuzzy") <NEW_LINE> <DEDENT> except AttributeError as err: <NEW_LINE> <INDENT> assert False <NEW_LINE> <DEDENT> assert True
search should not break in case of numeric fields
625941bb379a373c97cfa9fe
def test_del_wrong_user(self): <NEW_LINE> <INDENT> self.client.login(username='test_user2', password='test_password2') <NEW_LINE> self.client.get('/delete_team/1/', follow=True) <NEW_LINE> self.assertTemplateUsed('scheduler/access_denied.html')
attempt to delete a team without being team admin
625941bb23e79379d52ee41a
def login(number,pwd): <NEW_LINE> <INDENT> login_url = 'http://222.206.65.12/reader/redr_verify.php' <NEW_LINE> data = { 'number':number, 'passwd':pwd, 'returnUrl':'', 'select':'cert_no', } <NEW_LINE> data = urllib.urlencode(data) <NEW_LINE> req = urllib2.Request(url=login_url, data=data) <NEW_LINE> login_ret = opener.open(req).read() <NEW_LINE> if login_ret.find('密码错误') > 0: <NEW_LINE> <INDENT> raise errors.PageError('对不起,密码错误,请查实!!') <NEW_LINE> <DEDENT> elif login_ret.find('您尚未完成身份认证') > 0: <NEW_LINE> <INDENT> raise errors.PageError('您尚未完成图书馆身份认证!') <NEW_LINE> <DEDENT> elif login_ret.find("证件信息") > 0: <NEW_LINE> <INDENT> return True
登陆
625941bbd6c5a10208143efb
def isDone(*args, **kwargs): <NEW_LINE> <INDENT> pass
isDone() -> Bool Indicates whether or not all nodes or plugs have been iterated over in accordance with the direction, traversal, level and filter. If a valid filter is set, the iterator only visits those nodes that match the filter.
625941bb8e71fb1e9831d660
def document(self): <NEW_LINE> <INDENT> self.prepare_document() <NEW_LINE> self._required_attrs(['template', 'name']) <NEW_LINE> doc = [] <NEW_LINE> header_text = "{} FAWS Template".format(self.resource_name()) <NEW_LINE> doc.append(header_text + "\n" + ('=' * len(header_text))) <NEW_LINE> doc.append(self.template.description) <NEW_LINE> doc.append('### Parameters') <NEW_LINE> for name, param in self.template.parameters.items(): <NEW_LINE> <INDENT> doc.append('\n#### ' + name) <NEW_LINE> for prop, value in param.properties.items(): <NEW_LINE> <INDENT> doc.append("- {}: `{}`".format(prop, value)) <NEW_LINE> <DEDENT> <DEDENT> doc.append('\n### Outputs') <NEW_LINE> for name in self.template.outputs.keys(): <NEW_LINE> <INDENT> doc.append("- `{}`".format(name)) <NEW_LINE> <DEDENT> return "\n".join(doc)
Returns documentation for the template
625941bb21bff66bcd684808
def main(global_config, **settings): <NEW_LINE> <INDENT> config = Configurator(settings=settings) <NEW_LINE> config.include('pyramid_chameleon') <NEW_LINE> config.add_static_view('static', 'static', cache_max_age=3600) <NEW_LINE> config.add_route('home', '/') <NEW_LINE> config.add_route('add', '/add') <NEW_LINE> config.add_route('update', '/update') <NEW_LINE> config.add_route('remove', '/remove') <NEW_LINE> config.scan() <NEW_LINE> return config.make_wsgi_app()
This function returns a Pyramid WSGI application.
625941bb1b99ca400220a964
def iterate_transaction(self, start_date, end_date, callback): <NEW_LINE> <INDENT> sql = ('SELECT i.name,t.instrument,t.type,t.price,t.shares,t.fee,t.date FROM [transaction] t, instrument i ' 'WHERE t.instrument = i.rowid AND date >=? AND date<=? ORDER BY date') <NEW_LINE> epoch1 = int(timegm(start_date.timetuple())) <NEW_LINE> epoch2 = int(timegm(end_date.timetuple())) <NEW_LINE> for f in self.exec(sql, (epoch1, epoch2)): <NEW_LINE> <INDENT> callback(f['instrument'], f['name'], f['type'], f['price'], f['shares'], f['fee'], f['date'])
iterate stock transactions, callback signature: callback(instrument id,instrument name,transaction type,price, shares,fee, date)
625941bb21a7993f00bc7b9e
def get(self): <NEW_LINE> <INDENT> context = { "title": "Geographic BLS Sources", "sources": [], } <NEW_LINE> query = "SELECT DISTINCT source FROM states_series" <NEW_LINE> for row in db.session.execute(query): <NEW_LINE> <INDENT> context["sources"].append({ "name": row[0], "url": self.get_detail_url(row[0]), }) <NEW_LINE> <DEDENT> return context
Returns all the distinct sources and their urls
625941bb8e05c05ec3eea225
def __init__(self, loginbutton_properties, credential_label_properties, loginscreen_properties, *args, **kwargs): <NEW_LINE> <INDENT> self.size = Window.size <NEW_LINE> self.login_screen = LoginScreen(loginbutton_properties=loginbutton_properties, credential_label_properties=credential_label_properties, **loginscreen_properties) <NEW_LINE> super(LoginManager, self).__init__(id="LoginManager", transition=FadeTransition(), **kwargs) <NEW_LINE> self.logged_user = None <NEW_LINE> self.setup_screens()
This widget is supposed to be root of application!
625941bbd10714528d5ffb93
def make_html_plot(figure): <NEW_LINE> <INDENT> script, div = bokeh.embed.components(figure, wrap_script=False) <NEW_LINE> script = "<script>;var _runBokehPlot = function() {\n" + script + "\n};\n</script>" <NEW_LINE> return script + "\n" + div
Wrap the bokeh figure into an embeddable HTML element, containing a function that can be called to draw the plot.
625941bb1d351010ab8559d0
def main() -> None: <NEW_LINE> <INDENT> data = aocd.get_data(year=2021, day=25) <NEW_LINE> seafloor = read_map(data) <NEW_LINE> print(f"Part 1: {moves_until_still(seafloor)}")
Calculate and output the solutions based on the real puzzle input.
625941bb9b70327d1c4e0c87
def process_response(self, request, response): <NEW_LINE> <INDENT> if ( hasattr(request, 'resolver_match') and hasattr(request.resolver_match, 'namespaces') and isinstance(request.resolver_match.namespaces, list) and 'api' in request.resolver_match.namespaces ): <NEW_LINE> <INDENT> add_never_cache_headers(response) <NEW_LINE> <DEDENT> return response
Args: https://docs.djangoproject.com/en/1.10/topics/http/middleware/ Returns: HttpResponse
625941bbb7558d58953c4dce
def log_in_interface_graphics(self, usn ="", pw="")->str: <NEW_LINE> <INDENT> items = ["Username: ", "Password: "] <NEW_LINE> input_list = [usn, "*"*len(pw)] <NEW_LINE> data_header = "LOGIN DETAILS" <NEW_LINE> login = self.IFmethods.creation_interface(items, input_list, data_header) <NEW_LINE> return self.defaultheader+self.login_acc_default+login
Is to be triggered every time a new input is made inlogin menu
625941bb1d351010ab8559d1
def getnames(self): <NEW_LINE> <INDENT> return [am.filename for am in self.getmembers()]
Return a list of the (file)names of all the members in the archive in the order they are in the archive.
625941bb50485f2cf553cc4c
def push(self, item): <NEW_LINE> <INDENT> self._size += 1 <NEW_LINE> return self.stack.append(item)
pushes new item to stack
625941bb7d43ff24873a2b51
def pop(self): <NEW_LINE> <INDENT> return _osgAnimation.vectorVec2Keyframe_pop(self)
pop(vectorVec2Keyframe self) -> Vec2Keyframe
625941bb5fdd1c0f98dc00e5
def get_logger(name: str, file_name_path: str = 'yang.log'): <NEW_LINE> <INDENT> exists = False <NEW_LINE> if os.path.isfile(file_name_path): <NEW_LINE> <INDENT> exists = True <NEW_LINE> <DEDENT> FORMAT = '%(asctime)-15s %(levelname)-8s %(filename)s %(name)5s => %(message)s - %(lineno)d' <NEW_LINE> DATEFMT = '%Y-%m-%d %H:%M:%S' <NEW_LINE> logging.basicConfig(datefmt=DATEFMT, format=FORMAT, filename=file_name_path, level=logging.DEBUG) <NEW_LINE> logger = logging.getLogger(name) <NEW_LINE> if not exists: <NEW_LINE> <INDENT> os.chmod(file_name_path, 0o664 | stat.S_ISGID) <NEW_LINE> <DEDENT> return logger
Create formated logger with the specified name and store at path defined by 'file_name_path' argument. Arguments: :param file_name_path (str) filename and path where to save logs. :param name (str) Set name of the logger. :return a logger with the specified name.
625941bb0c0af96317bb809d
def test_SMCP(self): <NEW_LINE> <INDENT> check_instruction("SMCP $C15, 2047($SP)", "fffe07ff") <NEW_LINE> check_instruction("SMCP $C15, -1($SP)", "fffeffff") <NEW_LINE> check_instruction("SMCP $C4, 17362($9)", "f49e43d2") <NEW_LINE> check_instruction("SMCP $C3, 6490($4)", "f34e195a") <NEW_LINE> check_instruction("SMCP $C2, -11232($10)", "f2aed420") <NEW_LINE> check_instruction("SMCP $C6, 201($7)", "f67e00c9") <NEW_LINE> check_instruction("SMCP $C3, -25912($6)", "f36e9ac8") <NEW_LINE> check_instruction("SMCP $C9, -25215($7)", "f97e9d81") <NEW_LINE> check_instruction("SMCP $C0, -26294($7)", "f07e994a") <NEW_LINE> check_instruction("SMCP $C3, 32566($11)", "f3be7f36")
Test the SMCP instruction
625941bb3539df3088e2e1ff
def _getTemplateFromFile( self, name ): <NEW_LINE> <INDENT> filename = self._config.render_template(name, self._variables ) <NEW_LINE> content = None <NEW_LINE> if os.path.isfile(filename): <NEW_LINE> <INDENT> with open( filename, 'r', encoding="utf-8") as myfile: <NEW_LINE> <INDENT> content = myfile.read() <NEW_LINE> <DEDENT> <DEDENT> if not content: <NEW_LINE> <INDENT> content = "<!-- missig:'{}' -->".format( filename ) <NEW_LINE> <DEDENT> return content
Load template from file Parameters ---------- name: str template filename Returns ------- filepath : str Name and path for the new file filename : str Name only for the new file
625941bb7c178a314d6ef30d
def __init__(self, parent=None, useOpenGL=True): <NEW_LINE> <INDENT> self.closed = False <NEW_LINE> QtGui.QGraphicsView.__init__(self, parent) <NEW_LINE> if 'linux' in sys.platform: <NEW_LINE> <INDENT> useOpenGL = False <NEW_LINE> <DEDENT> self.useOpenGL(useOpenGL) <NEW_LINE> palette = QtGui.QPalette() <NEW_LINE> brush = QtGui.QBrush(QtGui.QColor(0,0,0)) <NEW_LINE> brush.setStyle(QtCore.Qt.SolidPattern) <NEW_LINE> palette.setBrush(QtGui.QPalette.Active,QtGui.QPalette.Base,brush) <NEW_LINE> brush = QtGui.QBrush(QtGui.QColor(0,0,0)) <NEW_LINE> brush.setStyle(QtCore.Qt.SolidPattern) <NEW_LINE> palette.setBrush(QtGui.QPalette.Inactive,QtGui.QPalette.Base,brush) <NEW_LINE> brush = QtGui.QBrush(QtGui.QColor(244,244,244)) <NEW_LINE> brush.setStyle(QtCore.Qt.SolidPattern) <NEW_LINE> palette.setBrush(QtGui.QPalette.Disabled,QtGui.QPalette.Base,brush) <NEW_LINE> self.setPalette(palette) <NEW_LINE> self.setFocusPolicy(QtCore.Qt.StrongFocus) <NEW_LINE> self.setFrameShape(QtGui.QFrame.NoFrame) <NEW_LINE> self.setVerticalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) <NEW_LINE> self.setHorizontalScrollBarPolicy(QtCore.Qt.ScrollBarAlwaysOff) <NEW_LINE> self.setTransformationAnchor(QtGui.QGraphicsView.NoAnchor) <NEW_LINE> self.setResizeAnchor(QtGui.QGraphicsView.AnchorViewCenter) <NEW_LINE> self.setViewportUpdateMode(QtGui.QGraphicsView.SmartViewportUpdate) <NEW_LINE> self.setSceneRect(QtCore.QRectF(-1e10, -1e10, 2e10, 2e10)) <NEW_LINE> self.lockedViewports = [] <NEW_LINE> self.lastMousePos = None <NEW_LINE> self.aspectLocked = False <NEW_LINE> self.yInverted = True <NEW_LINE> self.range = QtCore.QRectF(0, 0, 1, 1) <NEW_LINE> self.autoPixelRange = True <NEW_LINE> self.currentItem = None <NEW_LINE> self.clearMouse() <NEW_LINE> self.updateMatrix() <NEW_LINE> self.sceneObj = QtGui.QGraphicsScene() <NEW_LINE> self.setScene(self.sceneObj) <NEW_LINE> self.centralWidget = None <NEW_LINE> self.setCentralItem(QtGui.QGraphicsWidget()) <NEW_LINE> self.centralLayout = QtGui.QGraphicsGridLayout() <NEW_LINE> self.centralWidget.setLayout(self.centralLayout) <NEW_LINE> self.mouseEnabled = False <NEW_LINE> self.scaleCenter = False <NEW_LINE> self.clickAccepted = False
Re-implementation of QGraphicsView that removes scrollbars and allows unambiguous control of the viewed coordinate range. Also automatically creates a QGraphicsScene and a central QGraphicsWidget that is automatically scaled to the full view geometry. By default, the view coordinate system matches the widget's pixel coordinates and automatically updates when the view is resized. This can be overridden by setting autoPixelRange=False. The exact visible range can be set with setRange(). The view can be panned using the middle mouse button and scaled using the right mouse button if enabled via enableMouse().
625941bb4a966d76dd550ec0
def set_path(self, name, path, **kwargs): <NEW_LINE> <INDENT> self.__getattr__(name).path=self._fix_path(path) <NEW_LINE> self.__dict__[name].set(**kwargs) <NEW_LINE> return(self)
Adds a new path attribute with name name. name: name of the attribute. path: path string. kwargs: passed to Info.set().
625941bb090684286d50eb95
def cmdline_params(self, file1_name, file2_name): <NEW_LINE> <INDENT> parameters = [] <NEW_LINE> pm_dict = self.get_dict() <NEW_LINE> for k in pm_dict.keys(): <NEW_LINE> <INDENT> if pm_dict[k]: <NEW_LINE> <INDENT> parameters += ['--' + k] <NEW_LINE> <DEDENT> <DEDENT> parameters += [file1_name, file2_name] <NEW_LINE> return [str(p) for p in parameters]
Synthesize command line parameters. e.g. [ '--ignore-case', 'filename1', 'filename2'] :param file_name1: Name of first file :param type file_name1: str :param file_name2: Name of second file :param type file_name2: str
625941bb55399d3f05588567
def predict_cumulative_hazard_function(self, X): <NEW_LINE> <INDENT> risk_score = numpy.exp(self.predict(X)) <NEW_LINE> n_samples = risk_score.shape[0] <NEW_LINE> funcs = numpy.empty(n_samples, dtype=numpy.object_) <NEW_LINE> for i in range(n_samples): <NEW_LINE> <INDENT> funcs[i] = StepFunction(x=self.cum_baseline_hazard_.x, y=self.cum_baseline_hazard_.y, a=risk_score[i]) <NEW_LINE> <DEDENT> return funcs
Predict cumulative hazard function. The cumulative hazard function for an individual with feature vector :math:`x` is defined as .. math:: H(t \mid x) = \exp(x^\top \beta) H_0(t) , where :math:`H_0(t)` is the baseline hazard function, estimated by Breslow's estimator. Parameters ---------- X : array-like, shape = (n_samples, n_features) Data matrix. Returns ------- cum_hazard : ndarray, shape = (n_samples,) Predicted cumulative hazard functions.
625941bb4c3428357757c1de
def ListGroups(opts, args): <NEW_LINE> <INDENT> desired_fields = ParseFields(opts.output, _LIST_DEF_FIELDS) <NEW_LINE> fmtoverride = { "node_list": (",".join, False), "pinst_list": (",".join, False), "ndparams": (_FmtDict, False), } <NEW_LINE> cl = GetClient(query=True) <NEW_LINE> return GenericList(constants.QR_GROUP, desired_fields, args, None, opts.separator, not opts.no_headers, format_override=fmtoverride, verbose=opts.verbose, force_filter=opts.force_filter, cl=cl)
List node groups and their properties. @param opts: the command line options selected by the user @type args: list @param args: groups to list, or empty for all @rtype: int @return: the desired exit code
625941bb8e71fb1e9831d661
def get_capability(capability: Capability) -> bool: <NEW_LINE> <INDENT> return capability in get_capabilities()
Returns the status for a specific capability.
625941bb97e22403b379ce4d
def _plot3d_options(self, options=None): <NEW_LINE> <INDENT> if options is None: <NEW_LINE> <INDENT> options = self.options() <NEW_LINE> <DEDENT> options_3d = {} <NEW_LINE> if 'rgbcolor' in options: <NEW_LINE> <INDENT> options_3d['rgbcolor'] = options['rgbcolor'] <NEW_LINE> del options['rgbcolor'] <NEW_LINE> <DEDENT> if 'alpha' in options: <NEW_LINE> <INDENT> options_3d['opacity'] = options['alpha'] <NEW_LINE> del options['alpha'] <NEW_LINE> <DEDENT> for o in ('legend_color', 'legend_label', 'zorder'): <NEW_LINE> <INDENT> if o in options: <NEW_LINE> <INDENT> del options[o] <NEW_LINE> <DEDENT> <DEDENT> if len(options) != 0: <NEW_LINE> <INDENT> raise NotImplementedError("Unknown plot3d equivalent for {}".format( ", ".join(options.keys()))) <NEW_LINE> <DEDENT> return options_3d
Translate 2D plot options into 3D plot options. EXAMPLES:: sage: P = line([(-1,-2), (3,5)], alpha=.5, thickness=4) sage: p = P[0]; p Line defined by 2 points sage: q=p.plot3d() sage: q.thickness 4 sage: q.texture.opacity 0.500000000000000
625941bb7cff6e4e81117839
def normalize_data(raw_data): <NEW_LINE> <INDENT> data = [0, 0, 0] <NEW_LINE> data[0] = (raw_data[0] << 4) + (raw_data[1] >> 4) <NEW_LINE> data[1] = (raw_data[2] << 4) + (raw_data[3] >> 4) <NEW_LINE> data[2] = (raw_data[4] << 4) + (raw_data[5] >> 4) <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> if data[i] >> 11 == 1: <NEW_LINE> <INDENT> data[i] -= 2**12 <NEW_LINE> <DEDENT> <DEDENT> return data
Converts raw accelerometer data to normalized values.
625941bb0a50d4780f666d43
def example_integrand(xarr, weight=None): <NEW_LINE> <INDENT> n_dim = xarr.shape[-1] <NEW_LINE> a = tf.constant(0.1, dtype=DTYPE) <NEW_LINE> n100 = tf.cast(100 * n_dim, dtype=DTYPE) <NEW_LINE> pref = tf.pow(1.0 / a / np.sqrt(np.pi), n_dim) <NEW_LINE> coef = tf.reduce_sum(tf.range(n100 + 1)) <NEW_LINE> coef += tf.reduce_sum(tf.square((xarr - 1.0 / 2.0) / a), axis=1) <NEW_LINE> coef -= (n100 + 1) * n100 / 2.0 <NEW_LINE> return pref * tf.exp(-coef)
Example function that integrates to 1
625941bb1b99ca400220a965
def line_counts_as_uncovered(line: str, is_from_cover_annotation_file: bool) -> bool: <NEW_LINE> <INDENT> if is_from_cover_annotation_file: <NEW_LINE> <INDENT> if not line.startswith('! '): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> content = line[2:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> content = line <NEW_LINE> <DEDENT> content = content.strip() <NEW_LINE> if '#' in content: <NEW_LINE> <INDENT> content = content[:content.index('#')].strip() <NEW_LINE> <DEDENT> if any(re.search(pat, content) for pat in IGNORED_LINE_PATTERNS): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (is_from_cover_annotation_file or line_content_counts_as_uncovered_manual(content))
Args: line: The line of code (including coverage annotation). is_from_cover_annotation_file: Whether this line has been annotated. Returns: Does the line count as uncovered?
625941bbd8ef3951e32433f1
def itemset(self, *args): <NEW_LINE> <INDENT> item = args[-1] <NEW_LINE> args = args[:-1] <NEW_LINE> self[args] = item
Set Python scalar into array
625941bb656771135c3eb725
def GetLoadConstructDataString(ode_name): <NEW_LINE> <INDENT> save_load_data_string = (translator.GetBlockCommentDefinition(0, "De-serialize constructor parameters and intiialise a " + ode_name + ".", True) + "template<class Archive>\n" + "inline void load_construct_data(\n" + translator.AddTabs(1) + "Archive & ar, " + ode_name + " * t, const unsigned int file_version)\n" + "{\n" + translator.AddTabs(1) + "std::vector<double> state_variables;\n" + translator.AddTabs(1) + "ar & state_variables;\n" + translator.AddTabs(1) + "\n" + translator.GetCommentDefinition(1, "Invoke inplace constructor to initialise instance", True) + translator.AddTabs(1) + "::new(t)" + ode_name + "(state_variables);\n" + "}\n") <NEW_LINE> return save_load_data_string
Get the string describing the de-serialization of the constructor and initialisation of the ODE system.
625941bba8370b7717052754
def Puff_model(x, y, z, current_time, leak, atm, time, wind, angle): <NEW_LINE> <INDENT> X, Y, Z = np.meshgrid(x,y,z) <NEW_LINE> H = leak.height <NEW_LINE> Q = leak.size <NEW_LINE> Ffactor = leak.factors <NEW_LINE> u = wind <NEW_LINE> theta = angle <NEW_LINE> X2 = X*np.cos(theta) + Y*np.sin(theta) <NEW_LINE> Y2 = -X*np.sin(theta) + Y*np.cos(theta) <NEW_LINE> X2[X2<0]=0 <NEW_LINE> conc = np.zeros([len(x), len(y), len(z)]) <NEW_LINE> f2 = np.zeros([len(x), len(y), len(z)]) <NEW_LINE> f3 = np.zeros([len(x), len(y), len(z)]) <NEW_LINE> time_int = np.zeros([len(x), len(y), len(z)]) <NEW_LINE> if np.mod(current_time,time.Windstep)!=0: <NEW_LINE> <INDENT> times = np.mod(current_time,time.Windstep) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> times = time.Windstep <NEW_LINE> <DEDENT> sigmay = atm.k*X2/(1+X2/atm.a)**atm.p <NEW_LINE> sigmaz = atm.l*X2/(1+X2/atm.a)**atm.q <NEW_LINE> Zm = H + 1.6*Ffactor**(1/3)*X2**(2/3)/u <NEW_LINE> alpha = Q/(2*np.pi*sigmay*sigmaz)**1.5 <NEW_LINE> alpha[alpha==np.inf]=0 <NEW_LINE> f1a = np.exp(-Y2**2/(2*sigmay**2)) <NEW_LINE> f1a[np.isnan(f1a)]=0 <NEW_LINE> f2 = np.exp(-(Z-Zm)**2/(2*sigmaz**2)) <NEW_LINE> f3 = np.exp(-(Z+Zm)**2/(2*sigmaz**2)) <NEW_LINE> c1 = 2*sigmay*sigmaz; <NEW_LINE> pp, qq, rr = X2.shape <NEW_LINE> time_int = np.array([integrate.quad(lambda t: np.exp(-(X2[i,j,k]-u*t)**2/c1[i,j,k]),0,times) for i in range(0,pp) for j in range(0,qq) for k in range(0,rr)]) <NEW_LINE> conc_int = np.reshape(time_int[:,0],X2.shape) <NEW_LINE> conc = alpha*f1a*conc_int*(f2+f3) <NEW_LINE> cppm = conc*1e6/656 <NEW_LINE> return cppm
Puff model that calculates spatial concentration of a given leak at each timestep Inputs: x,y,z: 1-D of spatial coordinates where concentration is calculated current_time: current time-step in the simulation leak: leak size atm: Object containing atmospheric parameters like stability class time: Object containing various time parameters (Windstep, totaltime, etc.) wind: wind speed at time, current_time angle: wind direction at time, current_time Outputs: cppm: preliminary spatial concentration map at time, current_time
625941bb293b9510aa2c314c