code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class DSF_MaxValue(DescriptiveStatisticFn): <NEW_LINE> <INDENT> def __call__(self, d): <NEW_LINE> <INDENT> p = self.value_scale[1] * (d.max_value_bin() + self.value_scale[0]) <NEW_LINE> s = self.selectivity_scale[1] * (self.selectivity(d)+self.selectivity_scale[0]) <NEW_LINE> return {"": Pref(preference=p, selectivity=s)}
Return the peak value of the given distribution
62598fb6a8370b77170f04ce
class Decision(object): <NEW_LINE> <INDENT> def __call__(self, simulation, source): <NEW_LINE> <INDENT> raise NotImplementedError
The base decision class. A decision is a functor that performs selective and structured agent computation during a simulation step.
62598fb6be8e80087fbbf158
class SizeProbe(ProbeModule): <NEW_LINE> <INDENT> def __init__(self, key, echo=True, out=None): <NEW_LINE> <INDENT> ProbeModule.__init__(self, key) <NEW_LINE> self.echo = print if echo is True else echo <NEW_LINE> self.out = out <NEW_LINE> <DEDENT> def do_probe(self, x): <NEW_LINE> <INDENT> size_info = x.shape if hasattr(x, 'shape') else None <NEW_LINE> if self.echo: <NEW_LINE> <INDENT> self.echo('{}: {}'.format(self.key, size_info)) <NEW_LINE> <DEDENT> if self.out is not None: <NEW_LINE> <INDENT> self.out[self.key] = size_info
Inspect the size of upstream data.
62598fb6cc0a2c111447b0ff
class Trie: <NEW_LINE> <INDENT> def __init__(self, c): <NEW_LINE> <INDENT> self.children = [None] * 26 <NEW_LINE> self.char = c <NEW_LINE> self.s_node = [None] * 26 <NEW_LINE> self.is_end_of_word = False <NEW_LINE> self.name_or_surname = [] <NEW_LINE> <DEDENT> def add_child(self, ch, index): <NEW_LINE> <INDENT> if not ch.isalpha(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> self.children[index] = Trie(ch) <NEW_LINE> return self.children[index] <NEW_LINE> <DEDENT> def insert(self, key): <NEW_LINE> <INDENT> if not key: <NEW_LINE> <INDENT> self.is_end_of_word = True <NEW_LINE> return self <NEW_LINE> <DEDENT> index_to_insert = char_to_index(key[0]) <NEW_LINE> if not self.children[index_to_insert]: <NEW_LINE> <INDENT> child = self.add_child(key[0], index_to_insert) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> child = self.children[index_to_insert] <NEW_LINE> <DEDENT> return child.insert(key[1:]) <NEW_LINE> <DEDENT> def get_closure_words(self, prefix, node, is_first_name): <NEW_LINE> <INDENT> if not node: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> name = prefix + node.char <NEW_LINE> if node.is_end_of_word: <NEW_LINE> <INDENT> childs = filter(lambda x: x, node.children) <NEW_LINE> v = get_first_name_last_name_combinations(node, name, is_first_name) <NEW_LINE> for o in childs: <NEW_LINE> <INDENT> v.extend(self.get_closure_words(name, o, is_first_name)) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> v = [] <NEW_LINE> filtered_childs = filter(lambda x: x, node.children) <NEW_LINE> for o in filtered_childs: <NEW_LINE> <INDENT> v.extend(self.get_closure_words(name, o, is_first_name)) <NEW_LINE> <DEDENT> return v <NEW_LINE> <DEDENT> def search(self, key): <NEW_LINE> <INDENT> if not key: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> index_to_insert = char_to_index(key[0]) <NEW_LINE> if not self.children[index_to_insert]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> child = self.children[index_to_insert] <NEW_LINE> return child.search(key[1:])
Trie to make searching faster
62598fb6f548e778e596b694
class DiscreteEnv(Env): <NEW_LINE> <INDENT> def __init__(self, nS, nA, P, isd): <NEW_LINE> <INDENT> self.action_space = spaces.Discrete(nA) <NEW_LINE> self.observation_space = spaces.Discrete(nS) <NEW_LINE> self.nA = nA <NEW_LINE> self.P = P <NEW_LINE> self.isd = isd <NEW_LINE> self.lastaction=None <NEW_LINE> <DEDENT> @property <NEW_LINE> def nS(self): <NEW_LINE> <INDENT> return self.observation_space.n <NEW_LINE> <DEDENT> def _reset(self): <NEW_LINE> <INDENT> self.s = categorical_sample(self.isd) <NEW_LINE> return self.s <NEW_LINE> <DEDENT> def _step(self, a): <NEW_LINE> <INDENT> transitions = self.P[self.s][a] <NEW_LINE> i = categorical_sample([t[0] for t in transitions]) <NEW_LINE> p, s, r, d= transitions[i] <NEW_LINE> self.s = s <NEW_LINE> self.lastaction=a <NEW_LINE> return (s, r, d, {"prob" : p})
Has the following members - nS: number of states - nA: number of actions - P: transitions (*) - isd: initial state distribution (**) (*) dictionary dict of dicts of lists, where P[s][a] == [(probability, nextstate, reward, done), ...] (**) list or array of length nS
62598fb663d6d428bbee289e
class A(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://www.example.com" <NEW_LINE> url = "http://www.example.com/a-1.0.tar.gz" <NEW_LINE> version('1.0', '0123456789abcdef0123456789abcdef') <NEW_LINE> version('2.0', '2.0_a_hash') <NEW_LINE> variant( 'foo', values=('bar', 'baz', 'fee'), default='bar', description='', multi=True ) <NEW_LINE> variant( 'foobar', values=('bar', 'baz', 'fee'), default='bar', description='', multi=False ) <NEW_LINE> variant('bvv', default=True, description='The good old BV variant') <NEW_LINE> depends_on('b', when='foobar=bar') <NEW_LINE> def with_or_without_fee(self, activated): <NEW_LINE> <INDENT> if not activated: <NEW_LINE> <INDENT> return '--no-fee' <NEW_LINE> <DEDENT> return '--fee-all-the-time' <NEW_LINE> <DEDENT> def autoreconf(self, spec, prefix): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def configure(self, spec, prefix): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def build(self, spec, prefix): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def install(self, spec, prefix): <NEW_LINE> <INDENT> pass
Simple package with one optional dependency
62598fb6091ae35668704d0f
class DaggingClassifier(BaseDagging, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self, base_estimator=None, n_estimators=10, random_state=None): <NEW_LINE> <INDENT> super(DaggingClassifier, self).__init__( base_estimator=base_estimator, n_estimators=n_estimators, random_state=random_state, ) <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> check_is_fitted(self, "estimators_") <NEW_LINE> try: <NEW_LINE> <INDENT> preds = self._compute_soft_voting(X) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> preds = self._compute_hard_voting(X) <NEW_LINE> <DEDENT> return self.le_.inverse_transform(preds) <NEW_LINE> <DEDENT> def _compute_hard_voting(self, X): <NEW_LINE> <INDENT> return np.argmax(self.predict_proba(X), axis=1) <NEW_LINE> <DEDENT> def _compute_soft_voting(self, X): <NEW_LINE> <INDENT> predictions = self._predict(X) <NEW_LINE> ret = np.apply_along_axis( lambda x: np.argmax(np.bincount(x)), axis=1, arr=predictions ) <NEW_LINE> return ret <NEW_LINE> <DEDENT> def _predict(self, X): <NEW_LINE> <INDENT> return np.asarray([clf.predict(X) for clf in self.estimators_]).T <NEW_LINE> <DEDENT> def _collect_probas(self, X): <NEW_LINE> <INDENT> return np.asarray([clf.predict_proba(X) for clf in self.estimators_]) <NEW_LINE> <DEDENT> def _predict_proba(self, X): <NEW_LINE> <INDENT> check_is_fitted(self, "estimators_") <NEW_LINE> avg = np.average(self._collect_probas(X), axis=0) <NEW_LINE> return avg <NEW_LINE> <DEDENT> @property <NEW_LINE> def predict_proba(self): <NEW_LINE> <INDENT> return self._predict_proba <NEW_LINE> <DEDENT> def _validate_estimator(self): <NEW_LINE> <INDENT> super(DaggingClassifier, self)._validate_estimator( default=DecisionTreeClassifier() )
A Dagging classifier. This meta classifier creates a number of disjoint, stratified folds out of the data and feeds each chunk of data to a copy of the supplied base classifier. Predictions are made via hard or soft voting. Useful for base classifiers that are quadratic or worse in time behavior, regarding number of instances in the training data. Parameters ---------- base_estimator : object or None, optional (default=None) The base estimator to fit on random subsets of the dataset. If None, then the base estimator is a decision tree. n_estimators : int, optional (default=3) The number of base estimators in the ensemble. random_state : int, RandomState instance or None, optional (default=None) If int, random_state is the seed used by the random number generator; If RandomState instance, random_state is the random number generator; If None, the random number generator is the RandomState instance used by `np.random`. Attributes ---------- base_estimator_ : estimator The base estimator from which the ensemble is grown. estimators_ : list of estimators The collection of fitted base estimators. n_estimators_: int The actual used estimators voting_ : string The method used for voting among classifiers. References ---------- .. [1] Ting, K. M., Witten, I. H.: Stacking Bagged and Dagged Models. In: Fourteenth international Conference on Machine Learning, San Francisco, CA, 367-375, 1997
62598fb6097d151d1a2c1120
class TestEmailTools(BaseCase): <NEW_LINE> <INDENT> def test_email_split(self): <NEW_LINE> <INDENT> cases = [ ("John <12345@gmail.com>", ['12345@gmail.com']), ("d@x; 1@2", ['d@x', '1@2']), ("'(ss)' <123@gmail.com>, 'foo' <foo@bar>", ['123@gmail.com', 'foo@bar']), ('"john@gmail.com"<johnny@gmail.com>', ['johnny@gmail.com']), ('"<jg>" <johnny@gmail.com>', ['johnny@gmail.com']), ] <NEW_LINE> for text, expected in cases: <NEW_LINE> <INDENT> self.assertEqual(email_split(text), expected, 'email_split is broken') <NEW_LINE> <DEDENT> <DEDENT> def test_decode_smtp_header_email(self): <NEW_LINE> <INDENT> cases = [ ('Joe Doe <joe@ex.com>', 'Joe Doe <joe@ex.com>'), ('Joe <joe@ex.com>, Mike <mike@ex.com>', 'Joe <joe@ex.com>, Mike <mike@ex.com>'), ('"Doe, Joe" <joe@ex.com>', '"Doe, Joe" <joe@ex.com>'), ('"Doe, Joe" <joe@ex.com>, "Foo, Mike" <mike@ex.com>', '"Doe, Joe" <joe@ex.com>, "Foo, Mike" <mike@ex.com>'), ("=?utf-8?b?Sm/DqQ==?= <joe@ex.com>", '"Joé" <joe@ex.com>'), ("=?utf-8?b?Sm/DqQ==?= <joe@ex.com>, =?utf-8?b?RsO2w7YsIE1pa2U=?= <mike@ex.com>", '"Joé" <joe@ex.com>, "Föö, Mike" <mike@ex.com>'), ('=?utf-8?b?RG/DqSwg?= =?US-ASCII?Q?Joe?= <joe@ex.com>', '"Doé, ""Joe" <joe@ex.com>'), ('=?utf-8?b?VHLDqXZvciAiQmFuYW5hIiBEdW1vdWxpbg==?= <tbd@ex.com>', '"Trévor \\"Banana\\" Dumoulin" <tbd@ex.com>'), ] <NEW_LINE> for test, truth in cases: <NEW_LINE> <INDENT> self.assertEqual(decode_smtp_header(test, quoted=True), truth)
Test some of our generic utility functions for emails
62598fb667a9b606de5460c0
class HealthView(FlaskView): <NEW_LINE> <INDENT> route_base = '/api/1/inf/gateway/healthcheck' <NEW_LINE> trailing_slash = False <NEW_LINE> def get(self): <NEW_LINE> <INDENT> stime = time() <NEW_LINE> version = pkg_resources.get_distribution('vlab-gateway-api').version <NEW_LINE> return ujson.dumps({'latency' : time() - stime, 'version' : version}), 200
Logic for checking service health
62598fb692d797404e388bdc
class MyQtEnumComboBoxPlugin(QPyDesignerCustomWidgetPlugin): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(MyQtEnumComboBoxPlugin, self).__init__(parent) <NEW_LINE> self.initialized = False <NEW_LINE> <DEDENT> def initialize(self, core): <NEW_LINE> <INDENT> if self.initialized: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.initialized = True <NEW_LINE> <DEDENT> def isInitialized(self): <NEW_LINE> <INDENT> return self.initialized <NEW_LINE> <DEDENT> def createWidget(self, parent): <NEW_LINE> <INDENT> return MyQtEnumComboBox(parent) <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return "MyQtEnumComboBox" <NEW_LINE> <DEDENT> def group(self): <NEW_LINE> <INDENT> return "My PyQt Widgets" <NEW_LINE> <DEDENT> def icon(self): <NEW_LINE> <INDENT> return QIcon(_logo_pixmap) <NEW_LINE> <DEDENT> def toolTip(self): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def whatsThis(self): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def isContainer(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def domXml(self): <NEW_LINE> <INDENT> return '<widget class="MyQtEnumComboBox" name="analogClock">\n' ' <property name="toolTip">\n' ' <string>The current time</string>\n' ' </property>\n' ' <property name="whatsThis">\n' ' <string>The analog clock widget displays the current ' 'time.</string>\n' ' </property>\n' '</widget>\n' <NEW_LINE> <DEDENT> def includeFile(self): <NEW_LINE> <INDENT> return "MyQtEnumComboBox"
MyPyQtEnumComboBoxPlugin(QPyDesignerCustomWidgetPlugin) Provides a Python custom plugin for Qt Designer by implementing the QDesignerCustomWidgetPlugin via a PyQt-specific custom plugin class.
62598fb65fcc89381b2661c5
class SessionState(dict): <NEW_LINE> <INDENT> def __init__(self, shell, prompt_template, speed, aliases=None, envvars=None, test_mode=False, commentecho=False): <NEW_LINE> <INDENT> aliases = aliases or [] <NEW_LINE> envvars = envvars or [] <NEW_LINE> dict.__init__(self, shell=shell, prompt_template=prompt_template, speed=speed, aliases=aliases, envvars=envvars, test_mode=test_mode, commentecho=commentecho) <NEW_LINE> <DEDENT> def add_alias(self, alias): <NEW_LINE> <INDENT> self['aliases'].append(alias) <NEW_LINE> <DEDENT> def add_envvar(self, envvar): <NEW_LINE> <INDENT> self['envvars'].append(envvar) <NEW_LINE> <DEDENT> def set_speed(self, speed): <NEW_LINE> <INDENT> self['speed'] = int(speed) <NEW_LINE> <DEDENT> def set_template(self, template): <NEW_LINE> <INDENT> self['prompt_template'] = template <NEW_LINE> <DEDENT> def set_shell(self, shell): <NEW_LINE> <INDENT> self['shell'] = shell <NEW_LINE> <DEDENT> def _remove_var(self, key, variable): <NEW_LINE> <INDENT> for each in self[key]: <NEW_LINE> <INDENT> value, cmd = each.split('=') <NEW_LINE> if variable == value.strip(): <NEW_LINE> <INDENT> self[key].remove(each) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def remove_alias(self, alias): <NEW_LINE> <INDENT> return self._remove_var('aliases', alias) <NEW_LINE> <DEDENT> def remove_envvar(self, envvar): <NEW_LINE> <INDENT> return self._remove_var('envvars', envvar) <NEW_LINE> <DEDENT> def commentecho(self, doit=None): <NEW_LINE> <INDENT> if doit is not None: <NEW_LINE> <INDENT> doit = doit.lower() <NEW_LINE> self['commentecho'] = (doit == 'true' or doit == 'yes' or doit == '1') <NEW_LINE> <DEDENT> return self['commentecho']
Stores information about a fake terminal session.
62598fb621bff66bcd722d5a
class LoggingError(Exception): <NEW_LINE> <INDENT> pass
This exception is for various errors that occur in the astropy logger, typically when activating or deactivating logger-related features.
62598fb6e5267d203ee6b9f0
class inidseccdcc(models.Model): <NEW_LINE> <INDENT> cobert = u'Cobertura del documento de caracterización cultural' <NEW_LINE> variab = u'Variables culturales caracterizadas' <NEW_LINE> cobert_help = u'Regional, municipal, otros' <NEW_LINE> caracter = models.ForeignKey('inidseccdet', verbose_name = u'Caracterización cultural') <NEW_LINE> cobertur = models.CharField(cobert, max_length = 250, help_text = cobert_help) <NEW_LINE> variable = models.CharField(variab, max_length = 300) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u'13.5.1.1 Cobertura del documento' <NEW_LINE> verbose_name_plural = u'13.5.1.1 Cobertura del documento'
Socioeconómico y Cultural Caracterización Cultural Cobertura del documento de caracterización cultural
62598fb676e4537e8c3ef699
class artistSongs(object): <NEW_LINE> <INDENT> artist = "" <NEW_LINE> songs = [] <NEW_LINE> def __init__(self, artist, songs): <NEW_LINE> <INDENT> self.artist = artist <NEW_LINE> self.songs = songs <NEW_LINE> <DEDENT> def findArtist(self, artistString): <NEW_LINE> <INDENT> queryString = "https://api.genius.com/search" <NEW_LINE> parameters = {'q': artistString} <NEW_LINE> artistResult = transportation.getJsonResult( transportation, queryString, parameters) <NEW_LINE> return artistResult <NEW_LINE> <DEDENT> def getSongObjectsForArtist(self, artistId): <NEW_LINE> <INDENT> page = 1 <NEW_LINE> responseArray = [] <NEW_LINE> songs = None <NEW_LINE> queryString = "https://api.genius.com/artists/" + str(artistId) + "/songs" <NEW_LINE> while page != None and page <= numberOfPagesToGet: <NEW_LINE> <INDENT> parameters = {'per_page': numberOfSongsPerPage, 'page': str(page)} <NEW_LINE> songsCallResponse = transportation.getJsonResult( transportation, queryString, parameters) <NEW_LINE> songs = songsCallResponse['response']['songs'] <NEW_LINE> for song in songs: <NEW_LINE> <INDENT> if song["primary_artist"]["id"] == artistId: <NEW_LINE> <INDENT> responseArray.append(song) <NEW_LINE> <DEDENT> <DEDENT> nextPage = songsCallResponse['response']['next_page'] <NEW_LINE> page = nextPage <NEW_LINE> <DEDENT> return responseArray <NEW_LINE> <DEDENT> def getSongLyrics(self, songUrl): <NEW_LINE> <INDENT> queryString = songUrl <NEW_LINE> pageResult = transportation.getPageResult( transportation, queryString) <NEW_LINE> lyrics = pageResult.find("div", class_="lyrics").get_text() <NEW_LINE> return lyrics
This is the artist class used to house songs... maybe
62598fb644b2445a339b69ec
class Patient(AuditModelBase): <NEW_LINE> <INDENT> MALE = 'M' <NEW_LINE> FEMALE = 'F' <NEW_LINE> SEX_CHOICES = ( ('', ''), (MALE, _('Male')), (FEMALE, _('Female')), ) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> sex = models.CharField(max_length=1, choices=SEX_CHOICES, blank=True, default='') <NEW_LINE> birth_date = models.DateField(blank=True, null=True) <NEW_LINE> death_date = models.DateField(blank=True, null=True) <NEW_LINE> location = models.CharField(max_length=512, blank=True, default='') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Storage model for basic patient information.
62598fb6be383301e02538ed
class DecodeHook(session_run_hook.SessionRunHook): <NEW_LINE> <INDENT> def __init__(self, source, target, output_dir, output_filename="decode.out", every_n_iter=2500): <NEW_LINE> <INDENT> self.source = source <NEW_LINE> self.target = target <NEW_LINE> self.output_dir = output_dir <NEW_LINE> self.output_filename = output_filename <NEW_LINE> self.every_n_iter = every_n_iter <NEW_LINE> <DEDENT> def begin(self): <NEW_LINE> <INDENT> self._iter_count = 0 <NEW_LINE> <DEDENT> def after_run(self, run_context, run_values): <NEW_LINE> <INDENT> sess = run_context.session <NEW_LINE> step = self._iter_count <NEW_LINE> if self._should_trigger_for_step(step): <NEW_LINE> <INDENT> decoded = self._decode(sess, step) <NEW_LINE> self.print_decoded(decoded, step) <NEW_LINE> <DEDENT> self._iter_count += 1 <NEW_LINE> <DEDENT> def print_decoded(self, decoded, step): <NEW_LINE> <INDENT> formatted = ["src: %s\nref: %s\nhyp: %s\n" % (src, ref, hyp) for src, ref, hyp in zip(self.source, self.target, decoded)] <NEW_LINE> report = "[decode] step=%d \n" % step <NEW_LINE> report += "\n".join(formatted) <NEW_LINE> logging.info(report) <NEW_LINE> <DEDENT> def _should_trigger_for_step(self, step): <NEW_LINE> <INDENT> return (step % self.every_n_iter) == 0 <NEW_LINE> <DEDENT> def _decode(self, session, step): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def _save(self, session, step): <NEW_LINE> <INDENT> save_path = "%s/%s.%d" % (self.output_dir, self.output_filename, step) <NEW_LINE> logging.info("Saving checkpoints for %d into %s.", step, save_path)
Prints decoded sentences every N local steps, or at end. params: source: array of source sentences target: array of target sentences output_dir: output_filename: every_n_iter: decode sentences every N interations
62598fb63d592f4c4edbafb3
class NeuralNetwork(NN.NeuralNetwork): <NEW_LINE> <INDENT> def __init__(self,layers,activeFn = 'sigmoid'): <NEW_LINE> <INDENT> NN.NeuralNetwork.__init__(self,layers,activeFn) <NEW_LINE> <DEDENT> def setActivationFn(self,activeFn): <NEW_LINE> <INDENT> return super().setActivationFn(activeFn) <NEW_LINE> <DEDENT> def getLayers(self): <NEW_LINE> <INDENT> return super().getLayers() <NEW_LINE> <DEDENT> def storeWeights(self,filename,comment = 'No Comment'): <NEW_LINE> <INDENT> return super().storeWeights(filename,comment) <NEW_LINE> <DEDENT> def loadWeights(self,filename): <NEW_LINE> <INDENT> return super().loadWeights(filename) <NEW_LINE> <DEDENT> def trainRandom(self,X,Y,learning_rate=1.0,intervals = 100): <NEW_LINE> <INDENT> return super().trainRandom(X,Y,learning_rate,intervals) <NEW_LINE> <DEDENT> def trainSequential(X,Y,learning_rate=1.0,intervals = 100): <NEW_LINE> <INDENT> return super().trainSequential(X,Y,learning_rate,intervals) <NEW_LINE> <DEDENT> def trainWithPlots(self,X,Y,learning_rate = 1.0,intervals = 100,way='max'): <NEW_LINE> <INDENT> return super().trainWithPlots(X,Y,learning_rate,intervals,way) <NEW_LINE> <DEDENT> def trainSample(self,x,y,learning_rate=1.0): <NEW_LINE> <INDENT> return super().trainSample(x,y,learning_rate) <NEW_LINE> <DEDENT> def trainTestSample(self,x,y,learning_rate=1.0,way='max'): <NEW_LINE> <INDENT> return super().trainTestSample(x,y,learning_rate,way) <NEW_LINE> <DEDENT> def compareProb(self,prob,y,way='max'): <NEW_LINE> <INDENT> return super().compareProb(prob,y,way) <NEW_LINE> <DEDENT> def forwardProp(self,x): <NEW_LINE> <INDENT> return super().forwardProp(x) <NEW_LINE> <DEDENT> def backProp(self,a,y,learning_rate): <NEW_LINE> <INDENT> return super().backProp(a,y,learning_rate) <NEW_LINE> <DEDENT> def testBatch(self,X,Y,verbose = False,way='max'): <NEW_LINE> <INDENT> return super().testBatch(X,Y,verbose,way) <NEW_LINE> <DEDENT> def testSample(self,x,y,way='max'): <NEW_LINE> <INDENT> return super().testSample(x,y,way) <NEW_LINE> <DEDENT> def predictProb(self,x): <NEW_LINE> <INDENT> return super().predictProb(x) <NEW_LINE> <DEDENT> def lossFunction(self,x,y): <NEW_LINE> <INDENT> return super().lossFunction(x,y) <NEW_LINE> <DEDENT> def printWeights(self): <NEW_LINE> <INDENT> return super().printWeights() <NEW_LINE> <DEDENT> def printWeight(self,i): <NEW_LINE> <INDENT> return super().printWeight(i)
General Purpose Neural Network activation_dict = {'sigmoid': [sigmoid,sigmoidDerivative], 'tanh': [tanh,tanhDerivative], 'arctan': [arctan,arctanDerivative], 'sin': [sin,sinDerivative], 'gaussian': [gaussian,gaussianDerivative], 'softplus': [softplus,softplusDerivative], }
62598fb6097d151d1a2c1122
class TransformedRV(S.TensorVariable): <NEW_LINE> <INDENT> def __init__(self, type=None, owner=None, index=None, name=None, distribution=None, model=None, transform=None, total_size=None): <NEW_LINE> <INDENT> if type is None: <NEW_LINE> <INDENT> type = distribution.type <NEW_LINE> <DEDENT> super(TransformedRV, self).__init__(type, owner, index, name) <NEW_LINE> self.transformation = transform <NEW_LINE> if distribution is not None: <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.distribution = distribution <NEW_LINE> transformed_name = get_transformed_name(name, transform) <NEW_LINE> self.transformed = model.Var( transformed_name, transform.apply(distribution), total_size=total_size) <NEW_LINE> normalRV = transform.backward(self.transformed) <NEW_LINE> self.tag.test_value = normalRV.tag.test_value <NEW_LINE> self.scaling = _get_scaling(total_size, self.shape, self.ndim) <NEW_LINE> incorporate_methods(source=distribution, destination=self, methods=['random'], wrapper=InstanceMethod) <NEW_LINE> <DEDENT> <DEDENT> def _repr_latex_(self, name=None, dist=None): <NEW_LINE> <INDENT> if self.distribution is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if name is None: <NEW_LINE> <INDENT> name = self.name <NEW_LINE> <DEDENT> if dist is None: <NEW_LINE> <INDENT> dist = self.distribution <NEW_LINE> <DEDENT> return self.distribution._repr_latex_(name=name, dist=dist) <NEW_LINE> <DEDENT> __latex__ = _repr_latex_ <NEW_LINE> @property <NEW_LINE> def init_value(self): <NEW_LINE> <INDENT> return self.tag.test_value
Parameters ---------- type : theano type (optional) owner : theano owner (optional) name : str distribution : Distribution model : Model total_size : scalar Tensor (optional) needed for upscaling logp
62598fb656ac1b37e63022dd
class VolMorphology(ExternalLib): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ExternalLib.Init(self, 'VolMorphology') <NEW_LINE> <DEDENT> def SetupParameters(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> p = list(Image.shape[::-1]) <NEW_LINE> p.extend([Structure.shape[0], self.npt, bool(Gray)]) <NEW_LINE> self.Params = np.array(p, dtype=np.int32) <NEW_LINE> self.Kernel = Structure.astype(np.uint16, copy=True) <NEW_LINE> self.Result = Image.astype(np.uint16, copy=True) <NEW_LINE> self.AddParameters(self.Result, self.Kernel, self.Params) <NEW_LINE> <DEDENT> def Dilate(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> self.SetupParameters(Image, Structure, Gray) <NEW_LINE> self.Execute(self.lib.p_morph_dilate) <NEW_LINE> return self.Result <NEW_LINE> <DEDENT> def Erode(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> self.SetupParameters(Image, Structure, Gray) <NEW_LINE> self.Execute(self.lib.p_morph_erode) <NEW_LINE> return self.Result <NEW_LINE> <DEDENT> def Open(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> self.SetupParameters(Image, Structure, Gray) <NEW_LINE> self.Execute(self.lib.p_morph_open) <NEW_LINE> return self.Result <NEW_LINE> <DEDENT> def Close(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> self.SetupParameters(Image, Structure, Gray) <NEW_LINE> self.Execute(self.lib.p_morph_close) <NEW_LINE> return self.Result <NEW_LINE> <DEDENT> def TopHat(self, Image, Structure, Gray=None): <NEW_LINE> <INDENT> return Image - self.Open(Image, Structure, Gray) <NEW_LINE> <DEDENT> instance = None <NEW_LINE> @classmethod <NEW_LINE> def Create(clz): <NEW_LINE> <INDENT> if clz.instance == None: <NEW_LINE> <INDENT> clz.instance = VolMorphology() <NEW_LINE> <DEDENT> return clz.instance
This is a wrapper class for VolMorphology.{dll,so,dylib} written in C/C++
62598fb67cff6e4e811b5b12
class TupleField(Field): <NEW_LINE> <INDENT> def __init__(self, fields, name=None, default=None): <NEW_LINE> <INDENT> Field.__init__(self, name=name, default=default or (None, ) * len(fields)) <NEW_LINE> res = [] <NEW_LINE> for field in fields: <NEW_LINE> <INDENT> if type(field) is type: <NEW_LINE> <INDENT> if issubclass(field, Field): <NEW_LINE> <INDENT> field = field() <NEW_LINE> <DEDENT> elif issubclass(field, Mapping): <NEW_LINE> <INDENT> field = DictField(field) <NEW_LINE> <DEDENT> <DEDENT> res.append(field) <NEW_LINE> <DEDENT> self.fields = tuple(res) <NEW_LINE> <DEDENT> def _to_python(self, value): <NEW_LINE> <INDENT> return tuple([self.fields[i]._to_python(m) for i, m in enumerate(value)]) <NEW_LINE> <DEDENT> def _to_json(self, value): <NEW_LINE> <INDENT> assert len(self.fields) == len(value) <NEW_LINE> return [self.fields[i]._to_json(m) for i, m in enumerate(value)]
Field type for tuple of other fields, with possibly different types. >>> from couchdb import Server >>> server = Server('http://localhost:5984/') >>> db = server.create('python-tests') >>> class Post(Document): ... title = TextField() ... content = TextField() ... pubdate = DateTimeField(default=datetime.now) ... comments = ListField(TupleField(( ... TextField(), ... TextField(), ... DateTimeField() ... ))) >>> post = Post(title='Foo bar') >>> post.comments.append(('myself', 'Bla bla', ... datetime.now())) >>> len(post.comments) 1 >>> post.store(db) #doctest: +ELLIPSIS <Post ...> >>> post = Post.load(db, post.id) >>> comment = post.comments[0] >>> comment[0] u'myself' >>> comment[1] u'Bla bla' >>> comment[2] #doctest: +ELLIPSIS datetime.datetime(...) >>> del server['python-tests']
62598fb6f548e778e596b697
class _DBRow(object): <NEW_LINE> <INDENT> def __init__(self, parent_schema, table, pk, value_dict): <NEW_LINE> <INDENT> self._schema = parent_schema <NEW_LINE> self._up = table <NEW_LINE> self._pk = pk <NEW_LINE> self._columns = dict() <NEW_LINE> for i, val in value_dict.iteritems(): <NEW_LINE> <INDENT> self._columns[i] = _DBValue(self._schema, self, i, val) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, column_name): return self._columns[column_name] <NEW_LINE> def __repr__(self): return '_DBRow({0._schema!r}, {0._pk!r}, {0._columns!r})'.format(self) <NEW_LINE> def find_refs(self, table_name, column_names): <NEW_LINE> <INDENT> column_names = _tupleize_cols(column_names) <NEW_LINE> table = self._schema[table_name] <NEW_LINE> return table.find_rows(column_names, self._pk_value) <NEW_LINE> <DEDENT> def column_values(self, column_names): <NEW_LINE> <INDENT> column_names = _tupleize_cols(column_names) <NEW_LINE> return tuple([self._columns[name].value for name in column_names]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _pk_value(self): <NEW_LINE> <INDENT> return self.column_values(self._pk)
Contains dict of _DBValue. Public methods of this class, belong to the interface of the module, but class it self should be instantiated only by `_DBTable`.
62598fb6fff4ab517ebcd8da
class Trellis: <NEW_LINE> <INDENT> trell = [] <NEW_LINE> def __init__(self, hmm, observations): <NEW_LINE> <INDENT> self.tracker = [] <NEW_LINE> temp = {} <NEW_LINE> for label in hmm.labels: <NEW_LINE> <INDENT> temp[label] = [0,None] <NEW_LINE> <DEDENT> for observation in observations: <NEW_LINE> <INDENT> self.tracker.append([observation, copy.deepcopy(temp)]) <NEW_LINE> <DEDENT> self._fill_in(hmm) <NEW_LINE> <DEDENT> def _fill_in(self, hmm): <NEW_LINE> <INDENT> for i in range(len(self.tracker)): <NEW_LINE> <INDENT> for hidden_state in hmm.labels: <NEW_LINE> <INDENT> observation = self.tracker[i][0] <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> p_xz = hmm.emit(hidden_state, observation, is_first=True) <NEW_LINE> self.tracker[i][1][hidden_state][0] = p_xz <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> max = None <NEW_LINE> guess = None <NEW_LINE> transition_prob = None <NEW_LINE> for other_state in hmm.labels: <NEW_LINE> <INDENT> p_zz = hmm.transition(hidden_state, other_state) <NEW_LINE> mu_zk_1 = self.tracker[i-1][1][other_state][0] <NEW_LINE> transition_prob = mu_zk_1 * p_zz <NEW_LINE> if max == None or transition_prob > max: <NEW_LINE> <INDENT> max = transition_prob <NEW_LINE> guess = other_state <NEW_LINE> <DEDENT> print(max, guess) <NEW_LINE> <DEDENT> p_xz = hmm.emit(hidden_state, observation) <NEW_LINE> max *= p_xz <NEW_LINE> self.tracker[i][1][hidden_state][0] = max <NEW_LINE> self.tracker[i][1][hidden_state][1] = guess <NEW_LINE> <DEDENT> <DEDENT> for entry in self.tracker: <NEW_LINE> <INDENT> print(entry) <NEW_LINE> <DEDENT> print('===') <NEW_LINE> <DEDENT> <DEDENT> def return_max(self): <NEW_LINE> <INDENT> hidden_states = [] <NEW_LINE> hidden = None <NEW_LINE> for i in range(len(self.tracker)-1,-1,-1): <NEW_LINE> <INDENT> if hidden == None: <NEW_LINE> <INDENT> max = None <NEW_LINE> guess = None <NEW_LINE> for k in self.tracker[i][1]: <NEW_LINE> <INDENT> if max == None or self.tracker[i][1][k][0] > max: <NEW_LINE> <INDENT> max = self.tracker[i][1][k][0] <NEW_LINE> token = self.tracker[i][1][k][1] <NEW_LINE> guess = k <NEW_LINE> <DEDENT> <DEDENT> hidden_states.append(guess) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hidden_states.append(hidden) <NEW_LINE> hidden = self.tracker[i][1][hidden][1] <NEW_LINE> <DEDENT> <DEDENT> hidden_states.reverse() <NEW_LINE> return hidden_states
As taken from https://stackoverflow.com/a/9730066
62598fb62c8b7c6e89bd38b7
class RelatedLocalRoleAdapter(object): <NEW_LINE> <INDENT> implements(ILocalRoleProvider) <NEW_LINE> def __init__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> <DEDENT> def getRoles(self, principal): <NEW_LINE> <INDENT> if not self.related_roles.get(principal, []): <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> return tuple(self.related_roles.get(principal)) <NEW_LINE> <DEDENT> def getAllRoles(self): <NEW_LINE> <INDENT> for principal, roles in self.related_roles.items(): <NEW_LINE> <INDENT> yield (principal, tuple(roles)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def related_roles(self): <NEW_LINE> <INDENT> return get_all_related_roles(self.context)
borg.localrole adapter to set related localroles following annotation
62598fb6d268445f26639bfd
class ConcatenateDataset(DatasetV2): <NEW_LINE> <INDENT> def __init__(self, input_dataset, dataset_to_concatenate, name=None): <NEW_LINE> <INDENT> self._input_dataset = input_dataset <NEW_LINE> self._dataset_to_concatenate = dataset_to_concatenate <NEW_LINE> try: <NEW_LINE> <INDENT> self._structure = tf_nest.map_structure( lambda ts1, ts2: ts1.most_specific_compatible_type(ts2), input_dataset.element_spec, dataset_to_concatenate.element_spec) <NEW_LINE> <DEDENT> except (TypeError, ValueError) as e: <NEW_LINE> <INDENT> raise TypeError( f"Incompatible dataset elements:\n" f" {input_dataset.element_spec} vs. " f" {dataset_to_concatenate.element_spec}") from e <NEW_LINE> <DEDENT> self._input_datasets = [input_dataset, dataset_to_concatenate] <NEW_LINE> self._metadata = dataset_metadata_pb2.Metadata() <NEW_LINE> if name: <NEW_LINE> <INDENT> self._metadata.name = _validate_and_encode(name) <NEW_LINE> <DEDENT> kwargs = self._flat_structure <NEW_LINE> if name or compat.forward_compatible(2021, 9, 30): <NEW_LINE> <INDENT> kwargs["metadata"] = self._metadata.SerializeToString() <NEW_LINE> <DEDENT> variant_tensor = gen_dataset_ops.concatenate_dataset( input_dataset._variant_tensor, dataset_to_concatenate._variant_tensor, **kwargs) <NEW_LINE> super(ConcatenateDataset, self).__init__(variant_tensor) <NEW_LINE> <DEDENT> def _inputs(self): <NEW_LINE> <INDENT> return self._input_datasets <NEW_LINE> <DEDENT> @property <NEW_LINE> def element_spec(self): <NEW_LINE> <INDENT> return self._structure
A `Dataset` that concatenates its input with given dataset.
62598fb67b25080760ed75a5
class VoltageMapVoltages(object): <NEW_LINE> <INDENT> openapi_types = { 'voltage': 'float', 'pmt_index': 'int' } <NEW_LINE> attribute_map = { 'voltage': 'voltage', 'pmt_index': 'pmt_index' } <NEW_LINE> def __init__(self, voltage=None, pmt_index=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._voltage = None <NEW_LINE> self._pmt_index = None <NEW_LINE> self.discriminator = None <NEW_LINE> if voltage is not None: <NEW_LINE> <INDENT> self.voltage = voltage <NEW_LINE> <DEDENT> if pmt_index is not None: <NEW_LINE> <INDENT> self.pmt_index = pmt_index <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def voltage(self): <NEW_LINE> <INDENT> return self._voltage <NEW_LINE> <DEDENT> @voltage.setter <NEW_LINE> def voltage(self, voltage): <NEW_LINE> <INDENT> self._voltage = voltage <NEW_LINE> <DEDENT> @property <NEW_LINE> def pmt_index(self): <NEW_LINE> <INDENT> return self._pmt_index <NEW_LINE> <DEDENT> @pmt_index.setter <NEW_LINE> def pmt_index(self, pmt_index): <NEW_LINE> <INDENT> self._pmt_index = pmt_index <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, VoltageMapVoltages): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, VoltageMapVoltages): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fb64f6381625f19953a
@BlockchainInstance.inject <NEW_LINE> class Amount(GrapheneAmount): <NEW_LINE> <INDENT> def define_classes(self): <NEW_LINE> <INDENT> from graphenecommon.price import Price <NEW_LINE> self.asset_class = Asset <NEW_LINE> self.price_class = Price
This class deals with Amounts of any asset to simplify dealing with the tuple:: (amount, asset) :param list args: Allows to deal with different representations of an amount :param float amount: Let's create an instance with a specific amount :param str asset: Let's you create an instance with a specific asset (symbol) :param peerplays.peerplays.peerplays blockchain_instance: peerplays instance :returns: All data required to represent an Amount/Asset :rtype: dict :raises ValueError: if the data provided is not recognized .. code-block:: python from peerplays.amount import Amount from peerplays.asset import Asset a = Amount("1 USD") b = Amount(1, "USD") c = Amount("20", Asset("USD")) a + b a * 2 a += b a /= 2.0 Way to obtain a proper instance: * ``args`` can be a string, e.g.: "1 USD" * ``args`` can be a dictionary containing ``amount`` and ``asset_id`` * ``args`` can be a dictionary containing ``amount`` and ``asset`` * ``args`` can be a list of a ``float`` and ``str`` (symbol) * ``args`` can be a list of a ``float`` and a :class:`peerplays.asset.Asset` * ``amount`` and ``asset`` are defined manually An instance is a dictionary and comes with the following keys: * ``amount`` (float) * ``symbol`` (str) * ``asset`` (instance of :class:`peerplays.asset.Asset`) Instances of this class can be used in regular mathematical expressions (``+-*/%``) such as: .. code-block:: python Amount("1 USD") * 2 Amount("15 GOLD") + Amount("0.5 GOLD")
62598fb699fddb7c1ca62e64
class EFC(object): <NEW_LINE> <INDENT> def __init__(self, years=[2017]): <NEW_LINE> <INDENT> self.years = years <NEW_LINE> self.df = pd.DataFrame() <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> init_df = pd.DataFrame({'pypeds_init': [True]}) <NEW_LINE> for year in self.years: <NEW_LINE> <INDENT> year_info = get_efc(year) <NEW_LINE> year_fpath = zip_parser(url=year_info['url'], survey=year_info['survey']) <NEW_LINE> tmp_df = read_survey(year_fpath) <NEW_LINE> tmp_df.columns = tmp_df.columns.str.lower() <NEW_LINE> tmp_df.columns = tmp_df.columns.str.strip() <NEW_LINE> tmp_df['survey_year'] = int(year) <NEW_LINE> tmp_df['fall_year'] = int(year) <NEW_LINE> init_df = init_df.append(tmp_df, ignore_index=True, sort=False) <NEW_LINE> <DEDENT> pd.options.mode.chained_assignment = None <NEW_LINE> init_df = init_df.loc[init_df.pypeds_init != True,] <NEW_LINE> init_df.drop(columns=['pypeds_init'], inplace=True) <NEW_LINE> self.df = self.df.append(init_df, ignore_index=True) <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> return (self.df) <NEW_LINE> <DEDENT> def transform(self, state=None, line=None, cols=None, regions=None): <NEW_LINE> <INDENT> tmpdf = self.df <NEW_LINE> if state is not None: <NEW_LINE> <INDENT> assert isinstance(state, list), 'state must a list' <NEW_LINE> if len(state) > 0: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp_f = tmp.loc[tmp.efcstate.isin(state)] <NEW_LINE> tmpdf = tmp_f <NEW_LINE> <DEDENT> <DEDENT> if line is not None: <NEW_LINE> <INDENT> assert isinstance(line, list), 'line must a list' <NEW_LINE> if len(line) > 0: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp_f = tmp.loc[tmp.line.isin(line)] <NEW_LINE> tmpdf = tmp_f <NEW_LINE> <DEDENT> <DEDENT> if cols is not None: <NEW_LINE> <INDENT> assert isinstance(cols, list), 'cols must be a list' <NEW_LINE> if len(cols) > 0: <NEW_LINE> <INDENT> tmp = tmpdf <NEW_LINE> tmp_f = tmp >> select(cols) <NEW_LINE> tmpdf = tmp_f <NEW_LINE> <DEDENT> <DEDENT> if regions: <NEW_LINE> <INDENT> r = datasets.region_xwalk() <NEW_LINE> r = r >> select(['ipeds_code','name','ipeds_region', 'postal code']) <NEW_LINE> r = r.rename(columns={"ipeds_code": "line", "ipeds_region":"res_region", "postal code":"res_zip", "name":"res_name"}) <NEW_LINE> r['line'] = r['line'].astype('float64') <NEW_LINE> tmp = tmpdf <NEW_LINE> tmp_f = pd.merge(left=tmp, right=r, on="line", how="left") <NEW_LINE> tmpdf = tmp_f <NEW_LINE> <DEDENT> self.df = tmpdf
Residence and migration of first-time freshman from the Fall Enrollment survey.
62598fb67047854f4633f4ce
class get_hostpool_info_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRING, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.success = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('get_hostpool_info_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRING, 0) <NEW_LINE> oprot.writeString(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62598fb69f288636728188b7
class CloseOpenBankPaymentOrderResult(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.OutOrderId = None <NEW_LINE> self.ChannelOrderId = None <NEW_LINE> self.OrderStatus = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.OutOrderId = params.get("OutOrderId") <NEW_LINE> self.ChannelOrderId = params.get("ChannelOrderId") <NEW_LINE> self.OrderStatus = params.get("OrderStatus") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
云企付-关单响应
62598fb697e22403b383aff9
class MetricCalculator(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.accuracy = 0 <NEW_LINE> self.loss_accumulated = 0 <NEW_LINE> self.average_loss = 0 <NEW_LINE> self.updated_cnt = 0 <NEW_LINE> self.predicted_labels_holder = [] <NEW_LINE> self.actual_labels_holder = [] <NEW_LINE> <DEDENT> def update(self, outputs, labels, loss): <NEW_LINE> <INDENT> self.updated_cnt += 1 <NEW_LINE> predicted_labels = outputs.max(1)[1] <NEW_LINE> self.predicted_labels_holder.append(predicted_labels) <NEW_LINE> self.actual_labels_holder.append(labels) <NEW_LINE> self.loss_accumulated += loss <NEW_LINE> <DEDENT> def calculate_metric(self): <NEW_LINE> <INDENT> predicted_labels = torch.cat(self.predicted_labels_holder).cpu().numpy() <NEW_LINE> actual_labels = torch.cat(self.actual_labels_holder).cpu().numpy() <NEW_LINE> self.accuracy = accuracy_score(actual_labels, predicted_labels) <NEW_LINE> self.average_loss = self.loss_accumulated / self.updated_cnt <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.accuracy = 0 <NEW_LINE> self.loss_accumulated = 0 <NEW_LINE> self.average_loss = 0 <NEW_LINE> self.updated_cnt = 0 <NEW_LINE> <DEDENT> def export(self): <NEW_LINE> <INDENT> return { 'loss': self.average_loss, 'accuracy': self.accuracy, }
loss와 accuracy를 기록하기 위한 도구입니다.
62598fb655399d3f05626608
class VisibleOnHomepageExtension(ContentExtension): <NEW_LINE> <INDENT> is_visible_on_homepage = meta_property() <NEW_LINE> def extend_form(self, form_class, request): <NEW_LINE> <INDENT> if self.parent_id is None: <NEW_LINE> <INDENT> return form_class <NEW_LINE> <DEDENT> class VisibleOnHomepageForm(form_class): <NEW_LINE> <INDENT> is_visible_on_homepage = BooleanField( label=_("Visible on homepage"), fieldset=_("Visibility")) <NEW_LINE> <DEDENT> return VisibleOnHomepageForm
Extends any class that has a meta dictionary field with the ability to a boolean indicating if the page should be shown on the homepage or not.
62598fb64a966d76dd5eefcb
class MorphFaceRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Images = None <NEW_LINE> self.Urls = None <NEW_LINE> self.GradientInfos = None <NEW_LINE> self.Fps = None <NEW_LINE> self.OutputType = None <NEW_LINE> self.OutputWidth = None <NEW_LINE> self.OutputHeight = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Images = params.get("Images") <NEW_LINE> self.Urls = params.get("Urls") <NEW_LINE> if params.get("GradientInfos") is not None: <NEW_LINE> <INDENT> self.GradientInfos = [] <NEW_LINE> for item in params.get("GradientInfos"): <NEW_LINE> <INDENT> obj = GradientInfo() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.GradientInfos.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.Fps = params.get("Fps") <NEW_LINE> self.OutputType = params.get("OutputType") <NEW_LINE> self.OutputWidth = params.get("OutputWidth") <NEW_LINE> self.OutputHeight = params.get("OutputHeight")
MorphFace请求参数结构体
62598fb644b2445a339b69ed
class NativeTypeError(TypeError): <NEW_LINE> <INDENT> pass
Type is unable to be constructed from a serialized value.
62598fb64527f215b58e9fca
class MtimeFileWatcher(object): <NEW_LINE> <INDENT> def __init__(self, directory): <NEW_LINE> <INDENT> self._directory = directory <NEW_LINE> self._quit_event = threading.Event() <NEW_LINE> self._filename_to_mtime = None <NEW_LINE> self._has_changes = False <NEW_LINE> self._has_changes_lock = threading.Lock() <NEW_LINE> self._watcher_thread = threading.Thread(target=self._watch_changes) <NEW_LINE> self._watcher_thread.daemon = True <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> self._watcher_thread.start() <NEW_LINE> <DEDENT> def quit(self): <NEW_LINE> <INDENT> self._quit_event.set() <NEW_LINE> <DEDENT> def has_changes(self): <NEW_LINE> <INDENT> with self._has_changes_lock: <NEW_LINE> <INDENT> has_changes = self._has_changes <NEW_LINE> self._has_changes = False <NEW_LINE> <DEDENT> return has_changes <NEW_LINE> <DEDENT> def _watch_changes(self): <NEW_LINE> <INDENT> while not self._quit_event.wait(1): <NEW_LINE> <INDENT> self._check_for_changes() <NEW_LINE> <DEDENT> <DEDENT> def _check_for_changes(self): <NEW_LINE> <INDENT> if self._has_changed_paths(): <NEW_LINE> <INDENT> with self._has_changes_lock: <NEW_LINE> <INDENT> self._has_changes = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _has_changed_paths(self): <NEW_LINE> <INDENT> self._filename_to_mtime, old_filename_to_mtime = ( self._generate_filename_to_mtime(), self._filename_to_mtime) <NEW_LINE> return (old_filename_to_mtime is not None and self._filename_to_mtime != old_filename_to_mtime) <NEW_LINE> <DEDENT> def _generate_filename_to_mtime(self): <NEW_LINE> <INDENT> filename_to_mtime = {} <NEW_LINE> num_files = 0 <NEW_LINE> for dirname, dirnames, filenames in os.walk(self._directory, followlinks=True): <NEW_LINE> <INDENT> watcher_common.remove_ignored_dirs(dirnames) <NEW_LINE> filenames = [f for f in filenames if not watcher_common.ignore_file(f)] <NEW_LINE> for filename in filenames + dirnames: <NEW_LINE> <INDENT> if num_files == 10000: <NEW_LINE> <INDENT> warnings.warn( 'There are too many files in your application for ' 'changes in all of them to be monitored. You may have to ' 'restart the development server to see some changes to your ' 'files.') <NEW_LINE> return filename_to_mtime <NEW_LINE> <DEDENT> num_files += 1 <NEW_LINE> path = os.path.join(dirname, filename) <NEW_LINE> try: <NEW_LINE> <INDENT> mtime = os.path.getmtime(path) <NEW_LINE> <DEDENT> except (IOError, OSError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filename_to_mtime[path] = mtime <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return filename_to_mtime
Monitors a directory tree for changes using mtime polling.
62598fb6f548e778e596b698
class StripController: <NEW_LINE> <INDENT> def __init__(self, host, port, beatProcessor, valProcessor): <NEW_LINE> <INDENT> self.strip = Adafruit_NeoPixel(LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS) <NEW_LINE> self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> self.recent_vals = [] <NEW_LINE> self.max_vol = 1 <NEW_LINE> self.floor_vol = 0 <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.beatProcessor = beatProcessor <NEW_LINE> self.valProcessor = valProcessor <NEW_LINE> self.NORMALIZE_SAMPLES = 1000 <NEW_LINE> <DEDENT> def normalize(self, val): <NEW_LINE> <INDENT> if len(self.recent_vals) == self.NORMALIZE_SAMPLES: <NEW_LINE> <INDENT> self.recent_vals = self.recent_vals[1:] <NEW_LINE> <DEDENT> self.recent_vals.append(val) <NEW_LINE> self.floor_vol = 0.5*np.median(self.recent_vals) <NEW_LINE> self.max_vol = 2.5*np.mean(self.recent_vals) - self.floor_vol <NEW_LINE> return max((val - self.floor_vol)/self.max_vol, 0.01) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.strip.begin() <NEW_LINE> self.s.bind((self.host, self.port)) <NEW_LINE> self.s.listen(1) <NEW_LINE> conn, addr = self.s.accept() <NEW_LINE> print('Connected by', addr) <NEW_LINE> try: <NEW_LINE> <INDENT> c = 0 <NEW_LINE> while 1: <NEW_LINE> <INDENT> data = conn.recv(1024) <NEW_LINE> if not data: <NEW_LINE> <INDENT> c+=1 <NEW_LINE> if c == 50: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> data = data.split() <NEW_LINE> try: <NEW_LINE> <INDENT> val = self.normalize(float(data[-1])) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if data[0] == "beat": <NEW_LINE> <INDENT> self.beatProcessor.process(self.strip, val, -1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.valProcessor.process(self.strip, val, -1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> conn.close()
Controller for neopixel things
62598fb601c39578d7f12e6e
class ModeratorRequestHandler(LoggedInRequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> super(ModeratorRequestHandler,self).get() <NEW_LINE> if not self.user_info.moderator: <NEW_LINE> <INDENT> raise BasicRequestHandlerException(403, 'Not moderator') <NEW_LINE> <DEDENT> <DEDENT> def post(self): <NEW_LINE> <INDENT> super(ModeratorRequestHandler,self).post() <NEW_LINE> if not self.user_info.moderator: <NEW_LINE> <INDENT> raise BasicRequestHandlerException(403, 'Not moderator')
Обработчик запроса, проверяющий право модерирования
62598fb67cff6e4e811b5b14
class PXEAndIPMINativeDriver(base.BaseDriver): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.power = ipminative.NativeIPMIPower() <NEW_LINE> self.deploy = pxe.PXEDeploy() <NEW_LINE> self.rescue = self.deploy <NEW_LINE> self.vendor = pxe.VendorPassthru()
PXE + Native IPMI driver. This driver implements the `core` functionality, combining :class:`ironic.drivers.modules.ipminative.NativeIPMIPower` for power on/off and reboot with :class:`ironic.driver.modules.pxe.PXE` for image deployment. Implementations are in those respective classes; this class is merely the glue between them.
62598fb67b180e01f3e490cb
class MediaCategory(models.Model): <NEW_LINE> <INDENT> MEDIA_CATEGORY_CHOICES = ( ('Action & Adventure', 'Action & Adventure'), ('Ads & Promotional', 'Ads & Promotional'), ('Anime & Animation', 'Anime & Animation'), ('Art & Experimental', 'Art & Experimental'), ('Business', 'Business'), ('Children & Family', 'Children & Family'), ('Comedy', 'Comedy'), ('Dance', 'Dance'), ('Documentary', 'Documentary'), ('Drama', 'Drama'), ('Educational', 'Educational'), ('Faith & Spirituality', 'Faith & Spirituality'), ('Health & Fitness', 'Health & Fitness'), ('Foreign', 'Foreign'), ('Gaming', 'Gaming'), ('Gay & Lesbian', 'Gay & Lesbian'), ('Home Video', 'Home Video'), ('Horror', 'Horror'), ('Independent', 'Independent'), ('Mature & Adult', 'Mature & Adult'), ('Movie (feature)', 'Movie (feature)'), ('Movie (short)', 'Movie (short)'), ('Movie Trailer', 'Movie Trailer'), ('Music & Musical', 'Music & Musical'), ('Nature', 'Nature'), ('News', 'News'), ('Political', 'Political'), ('Religious', 'Religious'), ('Romance', 'Romance'), ('Independent', 'Independent'), ('Sci-Fi & Fantasy', 'Sci-Fi & Fantasy'), ('Science & Technology', 'Science & Technology'), ('Special Interest', 'Special Interest'), ('Sports', 'Sports'), ('Stock Footage', 'Stock Footage'), ('Thriller', 'Thriller'), ('Travel', 'Travel'), ('TV Show', 'TV Show'), ('Western', 'Western'), ) <NEW_LINE> name = models.CharField(max_length=50, choices=MEDIA_CATEGORY_CHOICES) <NEW_LINE> slug = models.SlugField(blank=True, unique=False, help_text='A <a href="http://docs.djangoproject.com/en/dev/ref/models/fields/#slugfield">slug</a> is a URL-friendly nickname. For example, a slug for "Games & Hobbies" is "games-hobbies".') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['slug'] <NEW_LINE> verbose_name = 'category (Media RSS)' <NEW_LINE> verbose_name_plural = 'categories (Media RSS)' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s' % (self.name)
Category model for Media RSS
62598fb657b8e32f52508197
class test_popup_wizard(TransactionCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(test_popup_wizard, self).setUp() <NEW_LINE> self.env.ref('core.jd').credit_limit = 100000 <NEW_LINE> self.order = self.env.ref('sell.sell_order_2') <NEW_LINE> self.order.sell_order_done() <NEW_LINE> self.delivery = self.env['sell.delivery'].search( [('order_id', '=', self.order.id)]) <NEW_LINE> self.hd_stock = self.env.ref('warehouse.hd_stock') <NEW_LINE> self.warehouse_inventory = self.env.ref('warehouse.warehouse_inventory')
发货单缺货向导
62598fb6adb09d7d5dc0a682
class Suggestion(models.Model): <NEW_LINE> <INDENT> STATE_NEW = 0 <NEW_LINE> STATE_IN_PROGRESS = 1 <NEW_LINE> STATE_COMPLETED = 2 <NEW_LINE> STATE_REJECTED = 3 <NEW_LINE> STATE_SPAM = 4 <NEW_LINE> RESOLVED_STATES = (STATE_COMPLETED, STATE_REJECTED) <NEW_LINE> OPEN_STATES = (STATE_NEW, STATE_IN_PROGRESS) <NEW_LINE> STATE_CHOICES = ( (STATE_NEW, _(u'New')), (STATE_IN_PROGRESS, _(u'In progress')), (STATE_COMPLETED, _(u'Completed')), (STATE_REJECTED, _(u'Rejected')), (STATE_SPAM, _(u'Spam')), ) <NEW_LINE> state = models.IntegerField(choices=STATE_CHOICES, default=STATE_NEW) <NEW_LINE> name = models.CharField( max_length=128, help_text=_(u'Name of video/collection of videos')) <NEW_LINE> url = models.URLField( max_length=255, help_text=_(u'Link to video/collection of videos')) <NEW_LINE> comment = models.TextField( blank=True, help_text=_(u'Additional information, urls, etc (optional)')) <NEW_LINE> whiteboard = models.CharField( max_length=255, blank=True, default=u'', help_text=_(u'Editor notes for this suggestion.')) <NEW_LINE> resolution = models.CharField( max_length=128, blank=True, default=u'', help_text=_(u'Describe how this suggestion was resolved.')) <NEW_LINE> submitted = models.DateTimeField(auto_now_add=True) <NEW_LINE> resolved = models.DateTimeField(blank=True, null=True) <NEW_LINE> is_reviewed = models.BooleanField(default=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta(object): <NEW_LINE> <INDENT> verbose_name = _(u'suggestion') <NEW_LINE> verbose_name_plural = _(u'suggestions') <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.state in Suggestion.RESOLVED_STATES: <NEW_LINE> <INDENT> self.resolved = datetime.now() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.resolved = None <NEW_LINE> <DEDENT> super(Suggestion, self).save(*args, **kwargs)
Represents a suggestion for videos to be added to the site.
62598fb64527f215b58e9fcb
class GoogleShopping(): <NEW_LINE> <INDENT> def __init__( self, data ): <NEW_LINE> <INDENT> self.soup = BeautifulSoup( data, 'lxml' ) <NEW_LINE> <DEDENT> def parse( self ): <NEW_LINE> <INDENT> shpu = self.soup.findAll( "a", "_po" ) <NEW_LINE> durl = self.soup.findAll( "h3", "r" ) <NEW_LINE> links = [ x['href'] for x in shpu ] + [ x.findAll('a')[0]['href'] for x in durl ] <NEW_LINE> return links <NEW_LINE> <DEDENT> def numOfProds( self ): <NEW_LINE> <INDENT> prods = self.soup.findAll( 'div', 'pag-n-to-n-txt' ) <NEW_LINE> rg = 'of ([0-9]+)' <NEW_LINE> num = int(re.findAll( rg, prods[0].text )[0]) <NEW_LINE> <DEDENT> def getNextPage( self ): <NEW_LINE> <INDENT> pgBottom = self.soup.findAll( 'div', "goog-inline-block jfk-button jfk-button-standard jfk-button-narrow jfk-button-collapse-left" ) <NEW_LINE> return pgBottom[0]['href'] <NEW_LINE> <DEDENT> def genUrls( self ): <NEW_LINE> <INDENT> nprods = self.numOfProds() <NEW_LINE> if nprods > 25: <NEW_LINE> <INDENT> npage = self.getNextPage() <NEW_LINE> npages = int(nprods/25) <NEW_LINE> startPages = [ 'start:' + str(25*(i+1)) for i in range(npages) ] <NEW_LINE> return [ 'http://www.google.com' + re.sub( 'start:25', x, npage ) for x in startPages ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return []
Shopping Search Results Parser
62598fb63346ee7daa3376c2
class CardsContext(CardsCollection): <NEW_LINE> <INDENT> def __init__(self, token, client): <NEW_LINE> <INDENT> super(CardsContext, self).__init__(client) <NEW_LINE> self.token = token <NEW_LINE> self.transitions = self.Transitions(self.token, Collection(client, CardTransitionResponse)) <NEW_LINE> <DEDENT> class Transitions(object): <NEW_LINE> <INDENT> _endpoint = 'cardtransitions' <NEW_LINE> def __init__(self, token, collection): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> self.collection = collection <NEW_LINE> <DEDENT> def page(self, count=5, start_index=0, params=None): <NEW_LINE> <INDENT> return self.collection.page(count=count, start_index=start_index, query_params=params, endpoint=self._endpoint + '/card/{}'.format(self.token)) <NEW_LINE> <DEDENT> def stream(self, params=None): <NEW_LINE> <INDENT> return self.collection.stream(query_params=params, endpoint=self._endpoint + '/card/{}'.format(self.token)) <NEW_LINE> <DEDENT> def list(self, params=None, limit=None): <NEW_LINE> <INDENT> return self.collection.list(query_params=params, endpoint=self._endpoint + '/card/{}'.format(self.token), limit=limit) <NEW_LINE> <DEDENT> def create(self, data): <NEW_LINE> <INDENT> return self.collection.create(data, endpoint=self._endpoint) <NEW_LINE> <DEDENT> def find(self, transition_token): <NEW_LINE> <INDENT> return self.collection.find(endpoint=self._endpoint + '/{}'.format(transition_token)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Marqeta.resources.cards.CardsContext.Transitions>' <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Marqeta.resources.cards.CardsContext>'
class to specify sub endpoints for cards
62598fb62c8b7c6e89bd38b9
class MockPlayer(Player): <NEW_LINE> <INDENT> def __init__(self, player, move): <NEW_LINE> <INDENT> Player.__init__(self) <NEW_LINE> self.history = copy.deepcopy(player.history) <NEW_LINE> self.cooperations = player.cooperations <NEW_LINE> self.defections = player.defections <NEW_LINE> self.move = move <NEW_LINE> <DEDENT> def strategy(self, opponent): <NEW_LINE> <INDENT> return self.move
Creates a mock player that enforces a particular next move for a given player.
62598fb63317a56b869be5c7
class IAR_phi(Base): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Data = ['magnitude', 'time', 'error'] <NEW_LINE> <DEDENT> def IAR_phi_kalman(self,x,t,y,yerr,standarized=True,c=0.5): <NEW_LINE> <INDENT> n=len(y) <NEW_LINE> Sighat=np.zeros(shape=(1,1)) <NEW_LINE> Sighat[0,0]=1 <NEW_LINE> if standarized == False: <NEW_LINE> <INDENT> Sighat=np.var(y)*Sighat <NEW_LINE> <DEDENT> xhat=np.zeros(shape=(1,n)) <NEW_LINE> delta=np.diff(t) <NEW_LINE> Q=Sighat <NEW_LINE> phi=x <NEW_LINE> F=np.zeros(shape=(1,1)) <NEW_LINE> G=np.zeros(shape=(1,1)) <NEW_LINE> G[0,0]=1 <NEW_LINE> sum_Lambda=0 <NEW_LINE> sum_error=0 <NEW_LINE> if np.isnan(phi) == True: <NEW_LINE> <INDENT> phi=1.1 <NEW_LINE> <DEDENT> if abs(phi) < 1: <NEW_LINE> <INDENT> for i in range(n-1): <NEW_LINE> <INDENT> Lambda=np.dot(np.dot(G,Sighat),G.transpose())+yerr[i+1]**2 <NEW_LINE> if (Lambda <= 0) or (np.isnan(Lambda) == True): <NEW_LINE> <INDENT> sum_Lambda=n*1e10 <NEW_LINE> break <NEW_LINE> <DEDENT> phi2=phi**delta[i] <NEW_LINE> F[0,0]=phi2 <NEW_LINE> phi2=1-phi**(delta[i]*2) <NEW_LINE> Qt=phi2*Q <NEW_LINE> sum_Lambda=sum_Lambda+np.log(Lambda) <NEW_LINE> Theta=np.dot(np.dot(F,Sighat),G.transpose()) <NEW_LINE> sum_error= sum_error + (y[i]-np.dot(G,xhat[0:1,i]))**2/Lambda <NEW_LINE> xhat[0:1,i+1]=np.dot(F,xhat[0:1,i])+np.dot(np.dot(Theta,np.linalg.inv(Lambda)),(y[i]-np.dot(G,xhat[0:1,i]))) <NEW_LINE> Sighat=np.dot(np.dot(F,Sighat),F.transpose()) + Qt - np.dot(np.dot(Theta,np.linalg.inv(Lambda)),Theta.transpose()) <NEW_LINE> <DEDENT> yhat=np.dot(G,xhat) <NEW_LINE> out=(sum_Lambda + sum_error)/n <NEW_LINE> if np.isnan(sum_Lambda) == True: <NEW_LINE> <INDENT> out=1e10 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> out=1e10 <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def fit(self, data): <NEW_LINE> <INDENT> magnitude = data[0] <NEW_LINE> time = data[1] <NEW_LINE> error = data[2] <NEW_LINE> if np.sum(error)==0: <NEW_LINE> <INDENT> error=np.zeros(len(magnitude)) <NEW_LINE> <DEDENT> ynorm = (magnitude-np.mean(magnitude))/np.sqrt(np.var(magnitude,ddof=1)) <NEW_LINE> deltanorm = error/np.sqrt(np.var(magnitude,ddof=1)) <NEW_LINE> out=sp.optimize.minimize_scalar(self.IAR_phi_kalman,args=(time,ynorm,deltanorm),bounds=(0,1),method="bounded",options={'xatol': 1e-12, 'maxiter': 50000}) <NEW_LINE> phi = out.x <NEW_LINE> try: phi = phi[0][0] <NEW_LINE> except: phi = phi <NEW_LINE> return phi
functions to compute an IAR model with Kalman filter. Author: Felipe Elorrieta.
62598fb6377c676e912f6de9
class ConfigServerGitProperty(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'uri': {'required': True}, } <NEW_LINE> _attribute_map = { 'repositories': {'key': 'repositories', 'type': '[GitPatternRepository]'}, 'uri': {'key': 'uri', 'type': 'str'}, 'label': {'key': 'label', 'type': 'str'}, 'search_paths': {'key': 'searchPaths', 'type': '[str]'}, 'username': {'key': 'username', 'type': 'str'}, 'password': {'key': 'password', 'type': 'str'}, 'host_key': {'key': 'hostKey', 'type': 'str'}, 'host_key_algorithm': {'key': 'hostKeyAlgorithm', 'type': 'str'}, 'private_key': {'key': 'privateKey', 'type': 'str'}, 'strict_host_key_checking': {'key': 'strictHostKeyChecking', 'type': 'bool'}, } <NEW_LINE> def __init__( self, *, uri: str, repositories: Optional[List["GitPatternRepository"]] = None, label: Optional[str] = None, search_paths: Optional[List[str]] = None, username: Optional[str] = None, password: Optional[str] = None, host_key: Optional[str] = None, host_key_algorithm: Optional[str] = None, private_key: Optional[str] = None, strict_host_key_checking: Optional[bool] = None, **kwargs ): <NEW_LINE> <INDENT> super(ConfigServerGitProperty, self).__init__(**kwargs) <NEW_LINE> self.repositories = repositories <NEW_LINE> self.uri = uri <NEW_LINE> self.label = label <NEW_LINE> self.search_paths = search_paths <NEW_LINE> self.username = username <NEW_LINE> self.password = password <NEW_LINE> self.host_key = host_key <NEW_LINE> self.host_key_algorithm = host_key_algorithm <NEW_LINE> self.private_key = private_key <NEW_LINE> self.strict_host_key_checking = strict_host_key_checking
Property of git. All required parameters must be populated in order to send to Azure. :ivar repositories: Repositories of git. :vartype repositories: list[~azure.mgmt.appplatform.v2021_06_01_preview.models.GitPatternRepository] :ivar uri: Required. URI of the repository. :vartype uri: str :ivar label: Label of the repository. :vartype label: str :ivar search_paths: Searching path of the repository. :vartype search_paths: list[str] :ivar username: Username of git repository basic auth. :vartype username: str :ivar password: Password of git repository basic auth. :vartype password: str :ivar host_key: Public sshKey of git repository. :vartype host_key: str :ivar host_key_algorithm: SshKey algorithm of git repository. :vartype host_key_algorithm: str :ivar private_key: Private sshKey algorithm of git repository. :vartype private_key: str :ivar strict_host_key_checking: Strict host key checking or not. :vartype strict_host_key_checking: bool
62598fb6dc8b845886d536ad
class stmt(AST, commonloc): <NEW_LINE> <INDENT> pass
Base class for statement nodes.
62598fb62ae34c7f260ab1d2
class TestCheckFileSize(unittest.TestCase): <NEW_LINE> <INDENT> def testBasic(self): <NEW_LINE> <INDENT> results = check_file_size(size=1) <NEW_LINE> self.assertTrue(results)
Test that the size function returns something.
62598fb6aad79263cf42e8ca
class RepresentativeViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> serializer_class = serializers.ShortRepresentativeSerializer <NEW_LINE> queryset = models.Representative.objects.all()
Listado y vista en detalle de los representantes de la Universidad
62598fb65166f23b2e2434d1
class Meta: <NEW_LINE> <INDENT> model = models.OrderItem <NEW_LINE> fields = ('id', 'order', 'item', 'quantity', 'price', 'user', 'created', 'updated') <NEW_LINE> read_only_fields = ('order', 'price', 'user')
Metaclass definition
62598fb6d486a94d0ba2c0c8
class WangKejun(TSFEDL_BaseModule): <NEW_LINE> <INDENT> def __init__(self, in_features: int, top_module: Optional[nn.Module] = WangKejun_Classifier(256, 5), loss: Callable[[torch.Tensor, torch.Tensor], torch.Tensor] = nn.CrossEntropyLoss(), metrics: Dict[str, Callable[[torch.Tensor, torch.Tensor], torch.Tensor]] = None, optimizer: torch.optim.Optimizer = torch.optim.Adam, **kwargs ): <NEW_LINE> <INDENT> super(WangKejun, self).__init__(in_features, top_module, loss, metrics, optimizer, **kwargs) <NEW_LINE> self.convolutions = nn.Sequential( nn.Conv1d(in_channels=in_features, out_channels=64, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.BatchNorm1d(num_features=64), nn.Conv1d(in_channels=64, out_channels=64, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.MaxPool1d(kernel_size=2, stride=2), nn.Conv1d(in_channels=64, out_channels=128, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.BatchNorm1d(num_features=128), nn.Conv1d(in_channels=128, out_channels=128, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.MaxPool1d(kernel_size=2, stride=2), nn.Conv1d(in_channels=128, out_channels=256, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.BatchNorm1d(num_features=256), nn.Conv1d(in_channels=256, out_channels=256, kernel_size=3, stride=1, bias=True, padding="same"), nn.ELU(), nn.MaxPool1d(kernel_size=2, stride=2) ) <NEW_LINE> self.lstm = nn.LSTM(input_size=256, hidden_size=256, batch_first=True) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.convolutions(x) <NEW_LINE> x = flip_indices_for_conv_to_lstm(x) <NEW_LINE> x, _ = self.lstm(x) <NEW_LINE> if self.classifier is not None: <NEW_LINE> <INDENT> x = self.classifier(x) <NEW_LINE> <DEDENT> return x
CNN 1D-LSTM Parameters ---------- in_features: int Number of features of the input tensors top_module: nn.Module, defaults=WangKejun_Classifier(256, 5) The optional nn.Module to be used as additional top layers. loss: Callable[[torch.Tensor, torch.Tensor], torch.Tensor] The loss function to use. It should accept two Tensors as inputs (predictions, targets) and return a Tensor with the loss. metrics: Dict[str, Callable[[torch.Tensor, torch.Tensor], torch.Tensor]] Dictionary with the name of the metric and a function to compute the metric from two tensors, prediction and true labels. optimizer: torch.optim.Optimizer The pyTorch Optimizer to use. Note that this must be only the class type and not an instance of the class!! **kwargs: dict A dictionary with the parameters of the optimizer. Returns ------- `LightningModule` A pyTorch Lightning Module instance. References ---------- Wang, Kejun, Xiaoxia Qi, and Hongda Liu. "Photovoltaic power forecasting based LSTM-Convolutional Network." Energy 189 (2019): 116225.
62598fb6d486a94d0ba2c0c9
class MedianMetric(BaseMetric): <NEW_LINE> <INDENT> def run(self, dataSlice, slicePoint=None): <NEW_LINE> <INDENT> return np.median(dataSlice[self.colname])
Calculate the median of a simData column slice.
62598fb67b180e01f3e490cc
class Avmu_Exception(Exception): <NEW_LINE> <INDENT> pass
Base exception class that all library exceptions inherit from. This can be used to easily catch all exceptions that are specifically thrown by the ``avmu`` library.
62598fb656ac1b37e63022e2
class TeamSupport(Document): <NEW_LINE> <INDENT> collection = DB.team_support <NEW_LINE> @classmethod <NEW_LINE> def get_team_support(cls, battle_id, team_name): <NEW_LINE> <INDENT> team_support = cls.collection.find_one( {'battle_id': ObjectId(battle_id), 'team_name': team_name}) <NEW_LINE> return cls(team_support) if team_support else None
站队支持
62598fb64f6381625f19953c
class UpdateTypeAlreadyQueued(DMSError): <NEW_LINE> <INDENT> def __init__(self, name, update_type): <NEW_LINE> <INDENT> message = ("Zone '%s' - Update type of '%s' already queued" % (name, update_type)) <NEW_LINE> super().__init__(message) <NEW_LINE> self.data['name'] = name <NEW_LINE> self.data['update_type'] = update_type <NEW_LINE> self.jsonrpc_error = -86
An update of the given type is already queued for the zone * JSONRPC Error: -86 * JSONRPC data keys: * 'name' - domain name * 'update_type' - update type
62598fb667a9b606de5460c7
class SectionListView(ListView): <NEW_LINE> <INDENT> u <NEW_LINE> model = Employees <NEW_LINE> paginate_by = 10 <NEW_LINE> groups = ['А-Г', 'Д-З', 'И-М', 'Н-Р', 'С-Ф', 'Х-Ш','Щ-Я'] <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(SectionListView, self).get_context_data(**kwargs) <NEW_LINE> context['groups'] = self.groups <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self, *args, **kwargs): <NEW_LINE> <INDENT> request = self.request.GET <NEW_LINE> group_num = request.get('group_num', None) <NEW_LINE> is_active = request.get('is_active', None) <NEW_LINE> depart = request.get('depart', None) <NEW_LINE> is_active_condition = Q() <NEW_LINE> group_num_condition = Q() <NEW_LINE> depart_condition = Q() <NEW_LINE> if group_num and hash(group_num) != hash(u'None'): <NEW_LINE> <INDENT> group_num_condition = Q( last_name__regex=r'^[{}].+'.format(self.groups[int(group_num)]) ) <NEW_LINE> <DEDENT> if is_active and hash(is_active) != hash(u'None'): <NEW_LINE> <INDENT> if hash(is_active) == hash(u'True'): <NEW_LINE> <INDENT> is_active_condition = Q( date_of_dismissal__gt=datetime.date.today() ) <NEW_LINE> <DEDENT> elif hash(is_active) == hash(u'False'): <NEW_LINE> <INDENT> is_active_condition = Q( date_of_dismissal__lte=datetime.date.today() ) <NEW_LINE> <DEDENT> <DEDENT> if depart and hash(depart) != hash(u'None'): <NEW_LINE> <INDENT> depart_condition = Q(department__name__exact=depart) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return Employees.objects.filter( is_active_condition, group_num_condition, depart_condition ) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return Employees.objects.all() <NEW_LINE> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> return super(SectionListView, self).get(request, *args, **kwargs)
Список работников
62598fb6796e427e5384e88b
class URLException(BaseException): <NEW_LINE> <INDENT> pass
Raise this when a URL is illegal
62598fb660cbc95b06364435
class SettingsInvalid(BadRequest): <NEW_LINE> <INDENT> ID = "SETTINGS_INVALID" <NEW_LINE> MESSAGE = __doc__
Invalid settings were provided
62598fb64428ac0f6e658619
class GameMaxPlayersExceeded(Exception): <NEW_LINE> <INDENT> pass
MaxPlayersExceeded - exception
62598fb6956e5f7376df56f9
class UFOsToGlyphsRT(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def add_tests(cls, testable): <NEW_LINE> <INDENT> pass
The the whole chain from a collection of UFOs to .glyphs and back
62598fb630bbd722464699f5
class FakeArticle: <NEW_LINE> <INDENT> def __init__(self, settings, metadata, title, url): <NEW_LINE> <INDENT> self.settings = settings <NEW_LINE> self.metadata = metadata <NEW_LINE> self.title = title <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def set_custom_data(self, data): <NEW_LINE> <INDENT> for key, value in data.items(): <NEW_LINE> <INDENT> setattr(self, key, value)
Mock Pelican Article object.
62598fb671ff763f4b5e786e
class Sequence(task.Task): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> self._tasks = config['tasks'] <NEW_LINE> self._waiting = False <NEW_LINE> self._index = 0 <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> if not self._waiting and self._index < len(self._tasks): <NEW_LINE> <INDENT> self._current = api.new_task(self._tasks[self._index]) <NEW_LINE> self._waiting = True <NEW_LINE> self._index += 1 <NEW_LINE> <DEDENT> if api.status_task(self._current)['state'] == api.States.STOPPED: <NEW_LINE> <INDENT> self._waiting = False <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if not self._waiting and self._index >= len(self._tasks): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def status(self): <NEW_LINE> <INDENT> index = self._index <NEW_LINE> return 'Most recently triggered task {} which has a task type of {}.'.format(index, self._tasks[index - 1]['type']) <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parameters(cls): <NEW_LINE> <INDENT> required = {'tasks': '[task]| A list of tasks. Must contain at least two tasks.'} <NEW_LINE> optional = {} <NEW_LINE> return {'required': required, 'optional': optional} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def validate(cls, config): <NEW_LINE> <INDENT> config = api.check_config(config, cls.parameters(), {}) <NEW_LINE> if len(config['tasks']) < 2: <NEW_LINE> <INDENT> raise ValueError('tasks: {} Must contain at least two tasks.'.format(config['tasks'])) <NEW_LINE> <DEDENT> return config
Executes nested tasks in sequence. This task is only stopped after the last nested task has stopped.
62598fb6b7558d5895463725
class NinjaAnt(Ant): <NEW_LINE> <INDENT> name = 'Ninja' <NEW_LINE> food_cost = 6 <NEW_LINE> damage = 1 <NEW_LINE> def __init__(self, armor=1): <NEW_LINE> <INDENT> Insect.__init__(self, armor) <NEW_LINE> <DEDENT> blocks_path = False <NEW_LINE> implemented = True <NEW_LINE> def action(self, colony): <NEW_LINE> <INDENT> for bee in self.place.bees[:]: <NEW_LINE> <INDENT> bee.reduce_armor(self.damage)
NinjaAnt does not block the path and damages all bees in its place.
62598fb6be8e80087fbbf160
class GenericAssociation(BaseAudit): <NEW_LINE> <INDENT> ordering = models.IntegerField(default=1) <NEW_LINE> source_type = models.ForeignKey(ContentType) <NEW_LINE> source_id = models.PositiveIntegerField() <NEW_LINE> source_object = generic.GenericForeignKey('source_type', 'source_id') <NEW_LINE> entity_type = models.ForeignKey( ContentType, related_name="%(app_label)s_%(class)s_related") <NEW_LINE> entity_id = models.PositiveIntegerField() <NEW_LINE> entity_object = generic.GenericForeignKey('entity_type', 'entity_id') <NEW_LINE> def to_dict(self): <NEW_LINE> <INDENT> return { 'username': self.id, 'ordering': self.ordering } <NEW_LINE> <DEDENT> def can_view(self, user, access_key=None): <NEW_LINE> <INDENT> return ( self.source_object.can_view( user=user, access_key=access_key) or self.source_object.project.can_view( user=user, access_key=access_key)) <NEW_LINE> <DEDENT> def can_edit(self, user): <NEW_LINE> <INDENT> return ( self.source_object.can_edit(user=user) or self.source_object.project.can_edit(user=user) ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return '{0}. {1} --> {2}'.format( self.source_id, self.source_type, self.entity_type) <NEW_LINE> <DEDENT> def _reorder_siblings_on_delete(self): <NEW_LINE> <INDENT> from localground.apps.site.models import Audio <NEW_LINE> sibling_models = self.source_object.get_media_siblings( self.entity_type.id) <NEW_LINE> current_index = sibling_models.index(self) <NEW_LINE> sibling_models.pop(current_index) <NEW_LINE> counter = 1 <NEW_LINE> for model in sibling_models: <NEW_LINE> <INDENT> model.ordering = counter <NEW_LINE> model.save() <NEW_LINE> counter += 1 <NEW_LINE> <DEDENT> <DEDENT> def delete(self, **kwargs): <NEW_LINE> <INDENT> self._reorder_siblings_on_delete() <NEW_LINE> super(GenericAssociation, self).delete(**kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> app_label = 'site' <NEW_LINE> unique_together = ( 'source_type', 'source_id', 'entity_type', 'entity_id')
http://weispeaks.wordpress.com/2009/11/04/overcoming-limitations-in-django-using-generic-foreign-keys/ Uses the contenttypes framework to create one big "meta-association table" between media elements (photos, audio files, mapimages, etc.) and groups. See the reference above for more information about the contenttypes framework.
62598fb68a43f66fc4bf2272
class UsersListCreateView(MethodSerializerView, generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = models.Order.objects.all() <NEW_LINE> method_serializer_classes = { ('GET',): ser.OrderListSerializer, ('POST'): ser.OrderCreateSerializer }
API: /users Method: GET/POST
62598fb67b180e01f3e490cd
class TestUSPSBase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> trytond.tests.test_tryton.install_module('shipping_usps') <NEW_LINE> self.Address = POOL.get('party.address') <NEW_LINE> self.USPSConfiguration = POOL.get('usps.configuration') <NEW_LINE> self.CarrierConfig = POOL.get('carrier.configuration') <NEW_LINE> self.Party = POOL.get('party.party') <NEW_LINE> self.PartyContact = POOL.get('party.contact_mechanism') <NEW_LINE> self.Country = POOL.get('country.country') <NEW_LINE> self.CountrySubdivision = POOL.get('country.subdivision') <NEW_LINE> self.Currency = POOL.get('currency.currency') <NEW_LINE> self.Company = POOL.get('company.company') <NEW_LINE> self.User = POOL.get('res.user') <NEW_LINE> assert 'USPS_USERNAME' in os.environ, "USPS_USERNAME not given. Hint:Use export USPS_USERNAME=<username>" <NEW_LINE> assert 'USPS_PASSWORD' in os.environ, "USPS_PASSWORD not given. Hint:Use export USPS_PASSWORD=<password>" <NEW_LINE> <DEDENT> def setup_defaults(self): <NEW_LINE> <INDENT> self.currency, = self.Currency.create([{ 'name': 'United Stated Dollar', 'code': 'USD', 'symbol': 'USD', }]) <NEW_LINE> self.Currency.create([{ 'name': 'Indian Rupee', 'code': 'INR', 'symbol': 'INR', }]) <NEW_LINE> country_us, = self.Country.create([{ 'name': 'United States', 'code': 'US', }]) <NEW_LINE> subdivision_florida, = self.CountrySubdivision.create([{ 'name': 'Florida', 'code': 'US-FL', 'country': country_us.id, 'type': 'state' }]) <NEW_LINE> subdivision_california, = self.CountrySubdivision.create([{ 'name': 'California', 'code': 'US-CA', 'country': country_us.id, 'type': 'state' }]) <NEW_LINE> with Transaction().set_context(company=None): <NEW_LINE> <INDENT> company_party, = self.Party.create([{ 'name': 'Test Party', 'vat_number': '123456', 'addresses': [('create', [{ 'name': 'Amine Khechfe', 'street': '247 High Street', 'zip': '94301-1041', 'city': 'Palo Alto', 'country': country_us.id, 'subdivision': subdivision_california.id, }])] }]) <NEW_LINE> <DEDENT> self.USPSConfiguration.create([{ 'username': os.environ['USPS_USERNAME'], 'password': os.environ['USPS_PASSWORD'], 'is_test': True, }]) <NEW_LINE> self.CarrierConfig.create([{ 'default_validation_provider': 'usps', }]) <NEW_LINE> self.company, = self.Company.create([{ 'party': company_party.id, 'currency': self.currency.id, }]) <NEW_LINE> self.PartyContact.create([{ 'type': 'phone', 'value': '8005551212', 'party': self.company.party.id }]) <NEW_LINE> self.User.write( [self.User(USER)], { 'main_company': self.company.id, 'company': self.company.id, } ) <NEW_LINE> self.address_val = self.USPSConfiguration(1).get_api_instance_of( 'address_val' ) <NEW_LINE> self.city_state_lookup = self.USPSConfiguration(1).get_api_instance_of( 'city_state_lookup' ) <NEW_LINE> CONTEXT.update(self.User.get_preferences(context_only=True))
Test USPS Integration
62598fb6d268445f26639c00
class SignAnnounceWidget(QWidget): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> super(SignAnnounceWidget, self).__init__(parent) <NEW_LINE> self.dialog = parent <NEW_LINE> self.manager = parent.manager <NEW_LINE> self.status_edit = QLineEdit() <NEW_LINE> self.status_edit.setReadOnly(True) <NEW_LINE> self.alias_edit = QLineEdit() <NEW_LINE> self.collateral_edit = PrevOutWidget() <NEW_LINE> self.delegate_edit = QLineEdit() <NEW_LINE> self.delegate_edit.setFont(QFont(util.MONOSPACE_FONT)) <NEW_LINE> for i in [self.alias_edit, self.collateral_edit, self.delegate_edit]: <NEW_LINE> <INDENT> i.setReadOnly(True) <NEW_LINE> <DEDENT> self.mapper = QDataWidgetMapper() <NEW_LINE> self.mapper.setSubmitPolicy(QDataWidgetMapper.ManualSubmit) <NEW_LINE> self.mapper.setModel(self.dialog.masternodes_widget.proxy_model) <NEW_LINE> model = self.dialog.masternodes_widget.model <NEW_LINE> self.mapper.addMapping(self.alias_edit, model.ALIAS) <NEW_LINE> self.mapper.addMapping(self.collateral_edit, model.VIN, 'string') <NEW_LINE> self.mapper.addMapping(self.delegate_edit, model.DELEGATE) <NEW_LINE> self.sign_button = QPushButton(_('Activate Masternode')) <NEW_LINE> self.sign_button.setEnabled(False) <NEW_LINE> self.sign_button.clicked.connect(self.sign_announce) <NEW_LINE> status_box = QHBoxLayout() <NEW_LINE> status_box.setContentsMargins(0, 0, 0, 0) <NEW_LINE> status_box.addWidget(QLabel(_('Status:'))) <NEW_LINE> status_box.addWidget(self.status_edit, stretch=1) <NEW_LINE> vbox = QVBoxLayout() <NEW_LINE> vbox.addLayout(status_box) <NEW_LINE> form = QFormLayout() <NEW_LINE> form.addRow(_('Alias:'), self.alias_edit) <NEW_LINE> form.addRow(_('Collateral MUE Output:'), self.collateral_edit) <NEW_LINE> form.addRow(_('Masternode Private Key:'), self.delegate_edit) <NEW_LINE> vbox.addLayout(form) <NEW_LINE> vbox.addLayout(util.Buttons(self.sign_button)) <NEW_LINE> self.setLayout(vbox) <NEW_LINE> <DEDENT> def set_mapper_index(self, row): <NEW_LINE> <INDENT> self.status_edit.clear() <NEW_LINE> self.status_edit.setStyleSheet(util.BLACK_FG) <NEW_LINE> self.mapper.setCurrentIndex(row) <NEW_LINE> mn = self.dialog.masternodes_widget.masternode_for_row(row) <NEW_LINE> status_text = '%s can be activated' % mn.alias <NEW_LINE> can_sign = True <NEW_LINE> try: <NEW_LINE> <INDENT> self.manager.check_can_sign_masternode(mn.alias) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> status_text = str(e) <NEW_LINE> can_sign = False <NEW_LINE> <DEDENT> self.status_edit.setText(_(status_text)) <NEW_LINE> self.sign_button.setEnabled(can_sign) <NEW_LINE> <DEDENT> def sign_announce(self): <NEW_LINE> <INDENT> self.mapper.submit() <NEW_LINE> self.dialog.sign_announce(str(self.alias_edit.text()))
Widget that displays information about signing a Masternode Announce.
62598fb60fa83653e46f4fd9
class CloudServerActionCreatedSchema(BaseEventSchema): <NEW_LINE> <INDENT> cloudserveraction_id = fields.String()
Schema for the CloudServerActionCreated events.
62598fb623849d37ff8511ab
class transaction(object): <NEW_LINE> <INDENT> def __init__(self, TXInputs, TXOutputs): <NEW_LINE> <INDENT> self.TXInputs = TXInputs <NEW_LINE> self.TXOutputs = TXOutputs <NEW_LINE> self.ID = hashlib.sha256(pickle.dumps( self)).hexdigest().encode("utf-8") <NEW_LINE> <DEDENT> def isCoinbase(self): <NEW_LINE> <INDENT> return len(self.TXInputs) == 1 and bytes(0) == self.TXInputs[0]['TXid'] and -1 == self.TXInputs[0]['Vout']
交易类,实例化就是一个UTXO交易,有id,Vin,Vout
62598fb663b5f9789fe85266
@ddt.ddt <NEW_LINE> @unittest.skip <NEW_LINE> class FindAssetTest(unittest.TestCase): <NEW_LINE> <INDENT> perf_test = True <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(FindAssetTest, self).setUp() <NEW_LINE> self.export_dir = mkdtemp() <NEW_LINE> self.addCleanup(rmtree, self.export_dir, ignore_errors=True) <NEW_LINE> <DEDENT> @ddt.data(*itertools.product( MODULESTORE_SETUPS, ASSET_AMOUNT_PER_TEST, )) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_generate_find_timings(self, source_ms, num_assets): <NEW_LINE> <INDENT> if CodeBlockTimer is None: <NEW_LINE> <INDENT> raise SkipTest("CodeBlockTimer undefined.") <NEW_LINE> <DEDENT> desc = "FindAssetTest:{}:{}".format( SHORT_NAME_MAP[source_ms], num_assets, ) <NEW_LINE> with CodeBlockTimer(desc): <NEW_LINE> <INDENT> with CodeBlockTimer("fake_assets"): <NEW_LINE> <INDENT> make_asset_xml(num_assets, ASSET_XML_PATH) <NEW_LINE> validate_xml(ASSET_XSD_PATH, ASSET_XML_PATH) <NEW_LINE> <DEDENT> with MongoContentstoreBuilder().build() as source_content: <NEW_LINE> <INDENT> with source_ms.build(source_content) as source_store: <NEW_LINE> <INDENT> source_course_key = source_store.make_course_key('a', 'course', 'course') <NEW_LINE> asset_key = source_course_key.make_asset_key( AssetMetadata.GENERAL_ASSET_TYPE, 'silly_cat_picture.gif' ) <NEW_LINE> with CodeBlockTimer("initial_import"): <NEW_LINE> <INDENT> import_from_xml( source_store, 'test_user', TEST_DATA_ROOT, course_dirs=TEST_COURSE, static_content_store=source_content, target_course_id=source_course_key, create_course_if_not_present=True, raise_on_failure=True, ) <NEW_LINE> <DEDENT> with CodeBlockTimer("find_nonexistent_asset"): <NEW_LINE> <INDENT> __ = source_store.find_asset_metadata(asset_key) <NEW_LINE> <DEDENT> for sort in ALL_SORTS: <NEW_LINE> <INDENT> with CodeBlockTimer("get_asset_list:{}-{}".format( sort[0], 'asc' if sort[1] == ModuleStoreEnum.SortOrder.ascending else 'desc' )): <NEW_LINE> <INDENT> start_middle = num_assets / 2 <NEW_LINE> __ = source_store.get_all_asset_metadata( source_course_key, 'asset', start=0, sort=sort, maxresults=50 ) <NEW_LINE> __ = source_store.get_all_asset_metadata( source_course_key, 'asset', start=start_middle, sort=sort, maxresults=50 )
This class exists to time asset finding in different modulestore classes with different amounts of asset metadata.
62598fb64428ac0f6e65861b
class FieldGroupSizeError(Exception): <NEW_LINE> <INDENT> def __init__(self, field, index, alignment): <NEW_LINE> <INDENT> message = ( f"{field.__class__.__name__}: Field alignment size " f"'{field.alignment.byte_size}' does not match field group size " f"'{alignment.byte_size}' at index ({index.byte}, {index.bit}).") <NEW_LINE> super().__init__(message)
Raised if the alignment size of a field does not match with its field group.
62598fb6627d3e7fe0e06faa
class RomanAPI(ButlerInterface): <NEW_LINE> <INDENT> ROMAN = 1 <NEW_LINE> @classmethod <NEW_LINE> def get_uid(cls) -> UUID: <NEW_LINE> <INDENT> return ROMAN_INTERFACE_UID
Roman Service Request Codes.
62598fb65fcc89381b2661c9
class _VirtualNonCollection(DAVNonCollection): <NEW_LINE> <INDENT> def __init__(self, path, environ): <NEW_LINE> <INDENT> DAVNonCollection.__init__(self, path, environ) <NEW_LINE> <DEDENT> def getContentLength(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getContentType(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getCreationDate(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getDisplayName(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def getDisplayInfo(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getEtag(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def getLastModified(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def supportRanges(self): <NEW_LINE> <INDENT> return False
Abstract base class for all non-collection resources.
62598fb630bbd722464699f6
class Area: <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, pile_type: Type[Pile], num_piles: int): <NEW_LINE> <INDENT> self._pile_type = pile_type <NEW_LINE> self._num_piles = num_piles <NEW_LINE> self._piles: List[Type[Pile]] = [pile_type(i) for i in range(num_piles)] <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._piles = [self._pile_type(i) for i in range(self._num_piles)] <NEW_LINE> <DEDENT> def add_card_force(self, a_card: card.Card, pos: int): <NEW_LINE> <INDENT> self._piles[pos].add(a_card) <NEW_LINE> <DEDENT> def add_card(self, a_card: card.Card, pos: Optional[int] = None) -> int: <NEW_LINE> <INDENT> piles = range(self._num_piles) if pos is None else [pos] <NEW_LINE> for pile in piles: <NEW_LINE> <INDENT> if self._piles[pile].valid(a_card): <NEW_LINE> <INDENT> self._piles[pile].add(a_card) <NEW_LINE> return pile <NEW_LINE> <DEDENT> <DEDENT> return -1 <NEW_LINE> <DEDENT> def remove(self, pile: int) -> None: <NEW_LINE> <INDENT> self._piles[pile].remove() <NEW_LINE> <DEDENT> def top_card(self, pile: int) -> Union[None, card.Card]: <NEW_LINE> <INDENT> return self._piles[pile].top_card <NEW_LINE> <DEDENT> @property <NEW_LINE> def isstart(self) -> bool: <NEW_LINE> <INDENT> for pile in self._piles: <NEW_LINE> <INDENT> if not pile.isstart: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def piles(self) -> List[List[card.Card]]: <NEW_LINE> <INDENT> return [pile.pile for pile in self._piles]
Provides a area abstraction.
62598fb6f9cc0f698b1c5349
class DevelopmentConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> BCRYPT_LOG_ROUNDS = 4 <NEW_LINE> WTF_CSRF_ENABLED = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'db.sqlite') <NEW_LINE> DEBUG_TB_ENABLED = True
Development configuration.
62598fb62ae34c7f260ab1d6
class TwistedHttpConnectionAdapter(object): <NEW_LINE> <INDENT> def __init__(self, request): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> self.request.transport.reactor.callFromThread(self._write, data) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.request.transport.reactor.callFromThread(self._close) <NEW_LINE> <DEDENT> def _write(self, data): <NEW_LINE> <INDENT> if not self.request.finished: <NEW_LINE> <INDENT> self.request.write(data) <NEW_LINE> <DEDENT> <DEDENT> def _close(self): <NEW_LINE> <INDENT> if not self.request.finished: <NEW_LINE> <INDENT> self.request.setResponseCode(200) <NEW_LINE> self.request.finish()
Twisted http connection adapter
62598fb6851cf427c66b83b0
class HandShake(object): <NEW_LINE> <INDENT> def __init__(self, way, shakeway): <NEW_LINE> <INDENT> self._waitedconn = {} <NEW_LINE> self._way = way <NEW_LINE> self._shakeway = shakeway <NEW_LINE> <DEDENT> def __call__(self, conn, eventloop): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while b'\n' not in self._waitedconn.setdefault(conn, b""): <NEW_LINE> <INDENT> content = conn.recv(2048) <NEW_LINE> self._waitedconn[conn] = self._waitedconn[conn] + content <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logging.error("unknown failure:{}".format(e)) <NEW_LINE> conn.close() <NEW_LINE> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> peername, way = self._waitedconn[conn].decode().strip().split(":") <NEW_LINE> try: <NEW_LINE> <INDENT> self._shakeway[self._way, way](eventloop, conn, peername) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> logging.error( "KeyError failure:selfway-{}:way-{}".format(self._way, way)) <NEW_LINE> conn.close() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._waitedconn.pop(conn, None)
Waiting for the other side choice of connection.
62598fb67c178a314d78d598
class DataUpdate(DataUpdate): <NEW_LINE> <INDENT> resource = 'users' <NEW_LINE> def forwards(self, mongodb_collection, mongodb_database): <NEW_LINE> <INDENT> for user in mongodb_collection.find({}): <NEW_LINE> <INDENT> stages = get_resource_service(self.resource).get_invisible_stages_ids(user.get(config.ID_FIELD)) <NEW_LINE> print(mongodb_collection.update({'_id': user.get(config.ID_FIELD)}, {'$set': { 'invisible_stages': stages }})) <NEW_LINE> <DEDENT> <DEDENT> def backwards(self, mongodb_collection, mongodb_database): <NEW_LINE> <INDENT> print(mongodb_collection.update({}, {'$unset': {'invisible_stages': []}}, upsert=False, multi=True))
Updates the user collection with invisible stages. Refer to https://dev.sourcefabric.org/browse/SD-5077 for more information
62598fb6bf627c535bcb159b
class YapcapPacket(BitStructure): <NEW_LINE> <INDENT> _formats = {} <NEW_LINE> def __init__(self, data, base_cls): <NEW_LINE> <INDENT> BitStructure.__init__(self, self.__class__.__name__) <NEW_LINE> self.data = data <NEW_LINE> self.base_cls = base_cls <NEW_LINE> self.protocols = [] <NEW_LINE> <DEDENT> def decode(self): <NEW_LINE> <INDENT> self.base_cls.decode(self) <NEW_LINE> <DEDENT> def add_field(self, data, name, bits, format_cls=None, force_assign=False): <NEW_LINE> <INDENT> if format_cls: <NEW_LINE> <INDENT> self._formats[name] = format_cls <NEW_LINE> <DEDENT> field = BitField(name, bits) <NEW_LINE> self.append(field) <NEW_LINE> num_bits = 1 <NEW_LINE> if num_bits % 8 == 0 or force_assign: <NEW_LINE> <INDENT> self.set_bytes(data[0:self.size()]) <NEW_LINE> data = data[self.size():] <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def add_custom_field(self, data, field, format_cls=None, force_assign=False): <NEW_LINE> <INDENT> if format_cls: <NEW_LINE> <INDENT> self._formats[name] = format_cls <NEW_LINE> <DEDENT> field.set_bytes(data[0:field.size()]) <NEW_LINE> data = data[field.size():] <NEW_LINE> self.append(field) <NEW_LINE> return data <NEW_LINE> <DEDENT> def has_field(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self[key] <NEW_LINE> return True <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def get_field(self, key): <NEW_LINE> <INDENT> for field in self.fields(): <NEW_LINE> <INDENT> if field.name() == key: <NEW_LINE> <INDENT> return field <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> if key in self._formats and self.has_field(key): <NEW_LINE> <INDENT> return self._formats[key].output(self.get_field(key)) <NEW_LINE> <DEDENT> elif self.has_field(key): <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Yapcap Packet Class
62598fb63d592f4c4edbafbb
class Collection(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._all_values = {} <NEW_LINE> <DEDENT> def add(self, dimensions, value): <NEW_LINE> <INDENT> name = json.dumps(dimensions) <NEW_LINE> if name in self._all_values: <NEW_LINE> <INDENT> raise DuplicateValueException( "Counter {} is already defined".format(name)) <NEW_LINE> <DEDENT> self._all_values[name] = value <NEW_LINE> <DEDENT> def start(self, basepath): <NEW_LINE> <INDENT> values = list(self._all_values.items()) <NEW_LINE> del self._all_values <NEW_LINE> get_size = lambda pair: pair[1]._get_size() <NEW_LINE> counter_order = lambda pair: (pair[1]._get_size(), pair[0]) <NEW_LINE> values.sort(key=counter_order) <NEW_LINE> offset = 0 <NEW_LINE> scheme = [] <NEW_LINE> offsets = {} <NEW_LINE> for size, pairs in groupby(values, key=get_size): <NEW_LINE> <INDENT> if size & (size-1) == 0: <NEW_LINE> <INDENT> if offset % size: <NEW_LINE> <INDENT> pad = size - offset % size <NEW_LINE> offset += pad <NEW_LINE> scheme.append('pad {}'.format(pad)) <NEW_LINE> <DEDENT> <DEDENT> elif size % 8 == 0: <NEW_LINE> <INDENT> if offset % 8: <NEW_LINE> <INDENT> pad = size - offset % 8 <NEW_LINE> offset += pad <NEW_LINE> scheme.append('pad {}'.format(pad)) <NEW_LINE> <DEDENT> <DEDENT> for name, value in pairs: <NEW_LINE> <INDENT> offsets[value] = offset <NEW_LINE> offset += size <NEW_LINE> _, typ = value._get_type() <NEW_LINE> scheme.append(typ + ': ' + name) <NEW_LINE> <DEDENT> <DEDENT> size = offset <NEW_LINE> path = basepath + '.values' <NEW_LINE> tmppath = basepath + '.tmp' <NEW_LINE> metapath = basepath + '.meta' <NEW_LINE> if os.path.exists(metapath): <NEW_LINE> <INDENT> os.unlink(metapath) <NEW_LINE> <DEDENT> if os.path.exists(path): <NEW_LINE> <INDENT> os.unlink(path) <NEW_LINE> <DEDENT> if os.path.exists(tmppath): <NEW_LINE> <INDENT> os.unlink(tmppath) <NEW_LINE> <DEDENT> with open(tmppath, 'w+b') as f: <NEW_LINE> <INDENT> f.write(b'\x00' * offset) <NEW_LINE> f.flush() <NEW_LINE> mem = memoryview(mmap.mmap(f.fileno(), offset)) <NEW_LINE> <DEDENT> os.rename(tmppath, path) <NEW_LINE> with open(tmppath, 'wt') as f: <NEW_LINE> <INDENT> f.write('\n'.join(scheme)) <NEW_LINE> <DEDENT> os.rename(tmppath, metapath) <NEW_LINE> for value, offset in offsets.items(): <NEW_LINE> <INDENT> vtype, _ = value._get_type() <NEW_LINE> size = value._get_size() <NEW_LINE> value._memoryview = mem[offset:offset+size].cast(vtype) <NEW_LINE> <DEDENT> return ActiveCollection(basepath)
A collection of statistics parameters It's just a singleton, which is hold in this module, but we use it as a regular class for unittests
62598fb67d847024c075c4b5
class TlsValidationContextSdsTrust(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "SecretName": (str, True), }
`TlsValidationContextSdsTrust <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-appmesh-virtualnode-tlsvalidationcontextsdstrust.html>`__
62598fb656b00c62f0fb29b3
class PokemonScreen(foreground_object.ForegroundObject): <NEW_LINE> <INDENT> def __init__(self, screen, pokemonMenuNode, party, startPoke): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.menuNode = pokemonMenuNode <NEW_LINE> self.party = party <NEW_LINE> self.currentPoke = startPoke <NEW_LINE> self.currentPage = 0 <NEW_LINE> self.loadPokemon() <NEW_LINE> self.loadPage() <NEW_LINE> self.busy = True <NEW_LINE> <DEDENT> def loadPokemon(self): <NEW_LINE> <INDENT> self.poke = self.party[self.currentPoke] <NEW_LINE> speciesNode = self.poke.speciesNode <NEW_LINE> mainNode = data.getChild(self.menuNode, "main") <NEW_LINE> fFont = font.Font(os.path.join(settings.path, "data", data.getAttr(mainNode, "font", data.D_STRING))) <NEW_LINE> size = ((self.screen.get_width()/2)-(OBJECTBUFFER*2), (self.screen.get_height()/2)-(OBJECTBUFFER*2)) <NEW_LINE> fn = os.path.join(settings.path, "data", data.getAttr(mainNode, "box", data.D_STRING)) <NEW_LINE> self.mainBox = box.Box(size, fn).convert(self.screen) <NEW_LINE> fFont.writeText("Lv%i" % self.poke.level, self.mainBox, (BORDER, BORDER)) <NEW_LINE> fFont.writeText(self.poke.getName(), self.mainBox, ((size[0]-fFont.calcWidth(self.poke.getName()))/2, BORDER)) <NEW_LINE> self.battler = self.poke.getBattler() <NEW_LINE> self.mainBox.blit(self.battler, ((size[0]-self.battler.get_width())/2, size[1]-self.battler.get_height()-BORDER)) <NEW_LINE> <DEDENT> def loadPage(self): <NEW_LINE> <INDENT> if self.currentPage == 0: <NEW_LINE> <INDENT> self.page = InfoPage(self.screen, data.getChild(self.menuNode, "info"), self.poke) <NEW_LINE> <DEDENT> elif self.currentPage == 1: <NEW_LINE> <INDENT> self.page = SkillsPage(self.screen, data.getChild(self.menuNode, "skills"), self.poke) <NEW_LINE> <DEDENT> elif self.currentPage == 2: <NEW_LINE> <INDENT> self.page = MovesPage(self.screen, data.getChild(self.menuNode, "moves"), self.poke) <NEW_LINE> <DEDENT> <DEDENT> def inputButton(self, button): <NEW_LINE> <INDENT> if button == game_input.BT_B: <NEW_LINE> <INDENT> self.busy = False <NEW_LINE> <DEDENT> elif button == game_input.BT_LEFT: <NEW_LINE> <INDENT> if self.currentPage > 0: <NEW_LINE> <INDENT> self.currentPage -= 1 <NEW_LINE> self.loadPage() <NEW_LINE> <DEDENT> <DEDENT> elif button == game_input.BT_RIGHT: <NEW_LINE> <INDENT> if self.currentPage < 2: <NEW_LINE> <INDENT> self.currentPage += 1 <NEW_LINE> self.loadPage() <NEW_LINE> <DEDENT> <DEDENT> elif button == game_input.BT_UP: <NEW_LINE> <INDENT> if self.currentPoke > 0: <NEW_LINE> <INDENT> self.currentPoke -= 1 <NEW_LINE> self.loadPokemon() <NEW_LINE> self.loadPage() <NEW_LINE> <DEDENT> <DEDENT> elif button == game_input.BT_DOWN: <NEW_LINE> <INDENT> if self.currentPoke < len(self.party)-1: <NEW_LINE> <INDENT> self.currentPoke += 1 <NEW_LINE> self.loadPokemon() <NEW_LINE> self.loadPage() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def draw(self): <NEW_LINE> <INDENT> self.screen.fill((255,200,150)) <NEW_LINE> self.page.draw() <NEW_LINE> self.screen.blit(self.mainBox, (OBJECTBUFFER,OBJECTBUFFER))
The pokemon summary screen object.
62598fb6d7e4931a7ef3c18f
class ResourceEnvironment: <NEW_LINE> <INDENT> def __init__( self, resource_configs, common_configs, dependency_database, *, reverse_order=False): <NEW_LINE> <INDENT> self._resource_configs = resource_configs <NEW_LINE> self._common_configs = common_configs <NEW_LINE> self._database = dependency_database <NEW_LINE> self._reverse_order = reverse_order <NEW_LINE> <DEDENT> def update(self, resource_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> config = self._resource_configs[resource_id] <NEW_LINE> <DEDENT> except LookupError: <NEW_LINE> <INDENT> raise DoxhooksLookupError( resource_id, self._resource_configs, "`resource_configs`") <NEW_LINE> <DEDENT> resource = config.make(id=resource_id, **self._common_configs) <NEW_LINE> resource.update() <NEW_LINE> <DEDENT> @property <NEW_LINE> def _resource_ids(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> resource_ids = tuple(self._resource_configs.keys()) <NEW_LINE> <DEDENT> except (AttributeError, TypeError): <NEW_LINE> <INDENT> resource_ids = range(len(self._resource_configs)) <NEW_LINE> <DEDENT> return reversed(resource_ids) if self._reverse_order else resource_ids <NEW_LINE> <DEDENT> def update_all(self): <NEW_LINE> <INDENT> for resource_id in self._resource_ids: <NEW_LINE> <INDENT> self.update(resource_id) <NEW_LINE> <DEDENT> <DEDENT> def update_dependents(self, input_path, *, input_root=None): <NEW_LINE> <INDENT> if input_root is None: <NEW_LINE> <INDENT> path = input_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> path = os.path.relpath(input_path, input_root) <NEW_LINE> <DEDENT> path = normalise_path(path) <NEW_LINE> dependent_ids = self._database.retrieve_products(path) <NEW_LINE> if not dependent_ids: <NEW_LINE> <INDENT> console.info("No dependency data for {!r}.".format(path)) <NEW_LINE> return <NEW_LINE> <DEDENT> update_ids = [] <NEW_LINE> for id_ in self._resource_ids: <NEW_LINE> <INDENT> if id_ in dependent_ids: <NEW_LINE> <INDENT> update_ids.append(id_) <NEW_LINE> dependent_ids.remove(id_) <NEW_LINE> <DEDENT> <DEDENT> if dependent_ids: <NEW_LINE> <INDENT> raise DoxhooksLookupError( ", ".join(dependent_ids), self._resource_configs, "`resource_configs`") <NEW_LINE> <DEDENT> resource_count = len(update_ids) <NEW_LINE> plural = "" if resource_count == 1 else "s" <NEW_LINE> console.info( "Found {} resource{} dependent on {!r}." .format(resource_count, plural, path)) <NEW_LINE> for resource_id in update_ids: <NEW_LINE> <INDENT> self.update(resource_id)
An environment in which information resources are updated. Class Interface --------------- update Update a resource configured in this environment. update_all Update all resources configured in this environment. update_dependents Update all resources that depend on a given input file.
62598fb663d6d428bbee28a8
@pytest.mark.usefixtures('clean_artifacts', 'create_environment', 'get_mkv') <NEW_LINE> class TestGoldenPath: <NEW_LINE> <INDENT> def test_pre_proc(self): <NEW_LINE> <INDENT> self.mkv.pre_process() <NEW_LINE> out = pathlib.Path('tests/processing/0_analyze/orig_Stage 0 Test Good.mkv') <NEW_LINE> assert self.mkv.video.copy_count == 1 <NEW_LINE> assert self.mkv.video.copy_indices == [0] <NEW_LINE> assert self.mkv.audio.copy_count == 1 <NEW_LINE> assert self.mkv.audio.copy_indices == [1] <NEW_LINE> assert self.mkv.subs.copy_count == 0 <NEW_LINE> assert not self.mkv.intervene['needed'] <NEW_LINE> assert out.exists() <NEW_LINE> <DEDENT> def test_set_command(self): <NEW_LINE> <INDENT> self.mkv._set_command() <NEW_LINE> expected = [ 'ffmpeg', '-hide_banner', '-i', 'tests\\processing\\0_analyze\\orig_Stage 0 Test Good.mkv', '-map', '0:0', '-map', '0:1', '-map_metadata', '0', '-metadata', 'title=Stage 0 Test Good', '-metadata:s:v:0', 'title=h264 Remux', '-metadata:s:a:0', 'title=DTS-HD MA 7.1', '-c', 'copy', 'tests\\processing\\0_analyze\\Stage 0 Test Good.mkv' ] <NEW_LINE> assert len(self.mkv.cmd_list) == 1 <NEW_LINE> assert self.mkv.cmd_list[0] == expected <NEW_LINE> <DEDENT> def test_execute(self): <NEW_LINE> <INDENT> out = pathlib.Path('tests/processing/0_analyze/Stage 0 Test Good.mkv') <NEW_LINE> self.mkv.run_commands() <NEW_LINE> assert out.exists() <NEW_LINE> <DEDENT> def test_post_proc(self): <NEW_LINE> <INDENT> archive = pathlib.Path('tests/processing/_archive/orig_Stage 0 Test Good.mkv') <NEW_LINE> out = pathlib.Path('tests/processing/1_remux/Stage 0 Test Good.mkv') <NEW_LINE> self.mkv.post_process() <NEW_LINE> assert archive.exists() <NEW_LINE> assert out.exists()
The MKV used in this test class has the following streams: Input #0, matroska,webm, from 'Stage 0 Test Good.mkv': Metadata: title : Stage 0 Test Good creation_time : 2017-05-29T06:23:24.000000Z ENCODER : Lavf57.83.100 Duration: 00:00:05.26, start: 0.000000, bitrate: 14704 kb/s Stream #0:0(eng): Video: h264 (High), yuv420p(progressive), 1920x1080 [SAR 1:1 DAR 16:9], 23.98 fps, 23.98 tbr, 1k tbn, 47.95 tbc (default) Metadata: title : h264 Remux DURATION : 00:00:05.255000000 Stream #0:1(eng): Audio: dts (DTS-HD MA), 48000 Hz, 7.1, s16p (default) Metadata: title : DTS-HD MA 7.1 DURATION : 00:00:05.008000000 Stream #0:2(eng): Subtitle: hdmv_pgs_subtitle (default) Metadata: title : English-PGS DURATION : 00:00:00.000000000 Expected behavior: - MKV should complete each step of stage_0 with no errors and no user intervention
62598fb67b25080760ed75ad
class GCSRecordInputReader(GCSInputReader): <NEW_LINE> <INDENT> def __getstate__(self): <NEW_LINE> <INDENT> result = self.__dict__.copy() <NEW_LINE> if "_record_reader" in result: <NEW_LINE> <INDENT> result.pop("_record_reader") <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> if not hasattr(self, "_cur_handle") or self._cur_handle is None: <NEW_LINE> <INDENT> self._cur_handle = next(super(GCSRecordInputReader, self)) <NEW_LINE> <DEDENT> if not hasattr(self, "_record_reader") or self._record_reader is None: <NEW_LINE> <INDENT> self._record_reader = records.RecordsReader(self._cur_handle) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> content = self._record_reader.read() <NEW_LINE> self._slice_ctx.incr(self.COUNTER_IO_READ_BYTE, len(content)) <NEW_LINE> self._slice_ctx.incr(self.COUNTER_IO_READ_MSEC, int(time.time() - start_time) * 1000) <NEW_LINE> return content <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> self._cur_handle = None <NEW_LINE> self._record_reader = None
Read data from a Google Cloud Storage file using LevelDB format. See the GCSInputReader for additional configuration options.
62598fb67b180e01f3e490ce
class TlacASCII(TlacFile): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> TlacFile.__init__(self, filename) <NEW_LINE> self.filename = filename <NEW_LINE> self.rawdat = [] <NEW_LINE> self.dat = {} <NEW_LINE> self.icollst = [] <NEW_LINE> self.set_icollst() <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if(not key in self.dat): <NEW_LINE> <INDENT> self.load(key) <NEW_LINE> <DEDENT> return self.dat[key] <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self.icollst <NEW_LINE> <DEDENT> def load(self, keys): <NEW_LINE> <INDENT> if(not isinstance(keys, list)): <NEW_LINE> <INDENT> keys = [keys ] <NEW_LINE> <DEDENT> icols = self.keys_to_icols(keys) <NEW_LINE> if(len(icols) == len(self.icollst)): <NEW_LINE> <INDENT> icols = None <NEW_LINE> <DEDENT> self.rawdat.append(np.loadtxt(self.filename, usecols = icols)) <NEW_LINE> for i,k in enumerate(keys): <NEW_LINE> <INDENT> if(k in self.dat): <NEW_LINE> <INDENT> raise Exception("Key " + k + " already loaded") <NEW_LINE> <DEDENT> if(len(keys) == 1): <NEW_LINE> <INDENT> self.dat[k] = self.rawdat[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dat[k] = self.rawdat[-1][:,i] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_all(self): <NEW_LINE> <INDENT> self.load(self.icollst) <NEW_LINE> <DEDENT> def keys_to_icols(self, keys): <NEW_LINE> <INDENT> return [ self.icollst.index(k) for k in keys ] <NEW_LINE> <DEDENT> def set_icollst(self): <NEW_LINE> <INDENT> f = open(self.filename, "r") <NEW_LINE> l = f.readline() <NEW_LINE> f.close() <NEW_LINE> if(l[:1] != "#"): <NEW_LINE> <INDENT> raise Exception("Expected first line commented in " + self.filename) <NEW_LINE> <DEDENT> l = l[1:].strip() <NEW_LINE> cols = l.split(" ") <NEW_LINE> r = [] <NEW_LINE> for i, ccol in enumerate(cols): <NEW_LINE> <INDENT> name, rest = ccol.split("(") <NEW_LINE> name = name.strip() <NEW_LINE> rest = rest[:-1].split("-") <NEW_LINE> if(len(rest) > 1): <NEW_LINE> <INDENT> a = int(rest[0]) <NEW_LINE> b = int(rest[1]) <NEW_LINE> for j in range(0, b - a + 1): <NEW_LINE> <INDENT> r.append(name + str(j)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> r.append(name) <NEW_LINE> <DEDENT> <DEDENT> self.icollst = r
For reading tlac *dat and *uvdat files.
62598fb666656f66f7d5a4eb
class MediasTestCase(OrigamiTestCase): <NEW_LINE> <INDENT> def test_get_medias_list(self): <NEW_LINE> <INDENT> result = self.simulate_get("/medias", headers={"Authorization": "Bearer " + self.token}).json <NEW_LINE> target = [ { "id": 1, "media_type": "audio", "media_name": "audio1.mp3", "url": None }, { "id": 2, "media_type": "video", "media_name": "video1.mp4", "url": None }, { "id": 3, "media_type": "text", "media_name": "text1.txt", "url": None }, { "id": 4, "media_type": "image", "media_name": "image1.jpg", "url": None }, { "id": 5, "media_type": "audio", "media_name": "audio2.mp3", "url": None }, { "id": 6, "media_type": "audio", "media_name": "audio3.mp3", "url": None }, { "id": 7, "media_type": "image", "media_name": "image2.jpg", "url": None } ] <NEW_LINE> self.assertEqual(target, result) <NEW_LINE> <DEDENT> def test_get_media_types(self): <NEW_LINE> <INDENT> result = self.simulate_get("/medias/image", headers={"Authorization": "Bearer " + self.token}).json <NEW_LINE> target = [ { "id": 4, "media_type": "image", "media_name": "image1.jpg", "url": None }, { "id": 7, "media_type": "image", "media_name": "image2.jpg", "url": None } ] <NEW_LINE> self.assertEqual(target, result) <NEW_LINE> <DEDENT> def test_get_media(self): <NEW_LINE> <INDENT> result = self.simulate_get("/medias/audio/1", headers={"Authorization": "Bearer " + self.token}).json <NEW_LINE> target = { "id": 1, "media_type": "audio", "media_name": "audio1.mp3", "url": None } <NEW_LINE> self.assertEqual(target, result) <NEW_LINE> <DEDENT> def test_get_media_nonexistent(self): <NEW_LINE> <INDENT> result = self.simulate_get("/media/audio/5", headers={"Authorization": "Bearer " + self.token}).status <NEW_LINE> target = falcon.HTTP_404 <NEW_LINE> self.assertEqual(result, target)
Class for testing medias.
62598fb656ac1b37e63022e6
class TagDetail(generics.RetrieveAPIView): <NEW_LINE> <INDENT> queryset = Tag.objects.all() <NEW_LINE> serializer_class = TagSerializer
Retrieve a tag. No need to update or destroy a tag. The system will clean up unused tags periodically. TODO
62598fb62c8b7c6e89bd38bf
class StatementStructure: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.input_origin = None <NEW_LINE> self.segments_full = [] <NEW_LINE> self.segments_imp = [] <NEW_LINE> self.segments_graph = [] <NEW_LINE> self.segments_standard = [] <NEW_LINE> self.result = None <NEW_LINE> self.result_desc = None
descrip the nl structure after parse
62598fb6796e427e5384e88f
class Dropout3d(_DropoutNd): <NEW_LINE> <INDENT> def forward(self, input): <NEW_LINE> <INDENT> return F.dropout3d(input, self.p, self.training, self.inplace)
Randomly zeroes whole channels of the input tensor. The channels to zero are randomized on every forward call. Usually the input comes from :class:`nn.Conv3d` modules. As described in the paper `Efficient Object Localization Using Convolutional Networks`_ , if adjacent pixels within feature maps are strongly correlated (as is normally the case in early convolution layers) then i.i.d. dropout will not regularize the activations and will otherwise just result in an effective learning rate decrease. In this case, :func:`nn.Dropout3d` will help promote independence between feature maps and should be used instead. Args: p (float, optional): probability of an element to be zeroed. inplace (bool, optional): If set to ``True``, will do this operation in-place Shape: - Input: :math:`(N, C, D, H, W)` - Output: :math:`(N, C, D, H, W)` (same shape as input) Examples:: >>> m = nn.Dropout3d(p=0.2) >>> input = torch.randn(20, 16, 4, 32, 32) >>> output = m(input) .. _Efficient Object Localization Using Convolutional Networks: http://arxiv.org/abs/1411.4280
62598fb697e22403b383b000
class EnabledButtonTests(TavastiaTestCase): <NEW_LINE> <INDENT> test_qml_file = "%s/%s.qml" % (os.path.dirname(os.path.realpath(__file__)),"EnabledButtonTests") <NEW_LINE> def test_can_select_button(self): <NEW_LINE> <INDENT> btn = self.app.select_single('Button') <NEW_LINE> self.assertThat(btn, Not(Is(None))) <NEW_LINE> <DEDENT> def test_clicked_signal_emitted(self): <NEW_LINE> <INDENT> btn = self.app.select_single('Button') <NEW_LINE> signal = btn.watch_signal('clicked(QVariant)') <NEW_LINE> self.mouse.move_to_object(btn) <NEW_LINE> self.mouse.click() <NEW_LINE> self.assertThat(signal.was_emitted, Equals(True)) <NEW_LINE> self.assertThat(signal.num_emissions, Equals(1)) <NEW_LINE> <DEDENT> def test_entered_signal_emitted(self): <NEW_LINE> <INDENT> btn = self.app.select_single('Button') <NEW_LINE> signal = btn.watch_signal('hoveredChanged()') <NEW_LINE> self.mouse.move_to_object(btn) <NEW_LINE> self.assertThat(signal.was_emitted, Equals(True)) <NEW_LINE> self.assertThat(signal.num_emissions, Equals(1)) <NEW_LINE> self.assertThat(btn.hovered, Eventually(Equals(True))) <NEW_LINE> <DEDENT> def test_exited_signal_emitted(self): <NEW_LINE> <INDENT> btn = self.app.select_single('Button') <NEW_LINE> self.mouse.move_to_object(btn) <NEW_LINE> signal = btn.watch_signal('hoveredChanged()') <NEW_LINE> self.mouse.move(0,0) <NEW_LINE> self.assertThat(signal.was_emitted, Equals(True)) <NEW_LINE> self.assertThat(signal.num_emissions, Equals(1)) <NEW_LINE> self.assertThat(btn.hovered, Eventually(Equals(False))) <NEW_LINE> <DEDENT> def test_can_press_button(self): <NEW_LINE> <INDENT> btn = self.app.select_single('Button') <NEW_LINE> self.mouse.move_to_object(btn) <NEW_LINE> self.mouse.press() <NEW_LINE> self.addCleanup(self.mouse.release) <NEW_LINE> self.assertThat(btn.pressed, Eventually(Equals(True)))
Tests for an enabled Button component.
62598fb6167d2b6e312b706e
class VtableEntry(object): <NEW_LINE> <INDENT> def __init__(self, offset, names, value, is_undefined): <NEW_LINE> <INDENT> self.offset = offset <NEW_LINE> self.names = names <NEW_LINE> self.value = value <NEW_LINE> self.is_undefined = is_undefined <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.offset < other.offset
This class contains an entry in Vtable. The names attribute constains all the possible symbol names for this entry due to symbol aliasing. Attributes: offset: Offset with respect to vtable. names: A list of possible symbol names of the entry. value: Value of the entry. is_undefined: If entry has a symbol, whether symbol is undefined or not.
62598fb671ff763f4b5e7872
class WorkflowsIndividualApi(object): <NEW_LINE> <INDENT> def __init__(self, api_client=None): <NEW_LINE> <INDENT> config = Configuration() <NEW_LINE> if api_client: <NEW_LINE> <INDENT> self.api_client = api_client <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not config.api_client: <NEW_LINE> <INDENT> config.api_client = ApiClient() <NEW_LINE> <DEDENT> self.api_client = config.api_client <NEW_LINE> <DEDENT> <DEDENT> def workflows_update_workflow_status_post(self, workflow_reference_id, body, **kwargs): <NEW_LINE> <INDENT> kwargs['_return_http_data_only'] = True <NEW_LINE> if kwargs.get('callback'): <NEW_LINE> <INDENT> return self.workflows_update_workflow_status_post_with_http_info(workflow_reference_id, body, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> (data) = self.workflows_update_workflow_status_post_with_http_info(workflow_reference_id, body, **kwargs) <NEW_LINE> return data <NEW_LINE> <DEDENT> <DEDENT> def workflows_update_workflow_status_post_with_http_info(self, workflow_reference_id, body, **kwargs): <NEW_LINE> <INDENT> all_params = ['workflow_reference_id', 'body'] <NEW_LINE> all_params.append('callback') <NEW_LINE> all_params.append('_return_http_data_only') <NEW_LINE> all_params.append('_preload_content') <NEW_LINE> all_params.append('_request_timeout') <NEW_LINE> params = locals() <NEW_LINE> for key, val in iteritems(params['kwargs']): <NEW_LINE> <INDENT> if key not in all_params: <NEW_LINE> <INDENT> raise TypeError( "Got an unexpected keyword argument '%s'" " to method workflows_update_workflow_status_post" % key ) <NEW_LINE> <DEDENT> params[key] = val <NEW_LINE> <DEDENT> del params['kwargs'] <NEW_LINE> if ('workflow_reference_id' not in params) or (params['workflow_reference_id'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `workflow_reference_id` when calling `workflows_update_workflow_status_post`") <NEW_LINE> <DEDENT> if ('body' not in params) or (params['body'] is None): <NEW_LINE> <INDENT> raise ValueError("Missing the required parameter `body` when calling `workflows_update_workflow_status_post`") <NEW_LINE> <DEDENT> collection_formats = {} <NEW_LINE> path_params = {} <NEW_LINE> query_params = [] <NEW_LINE> if 'workflow_reference_id' in params: <NEW_LINE> <INDENT> query_params.append(('workflowReferenceId', params['workflow_reference_id'])) <NEW_LINE> <DEDENT> header_params = {} <NEW_LINE> form_params = [] <NEW_LINE> local_var_files = {} <NEW_LINE> body_params = None <NEW_LINE> if 'body' in params: <NEW_LINE> <INDENT> body_params = params['body'] <NEW_LINE> <DEDENT> header_params['Accept'] = self.api_client. select_header_accept(['application/json']) <NEW_LINE> header_params['Content-Type'] = self.api_client. select_header_content_type(['application/json']) <NEW_LINE> auth_settings = ['OAuth2'] <NEW_LINE> return self.api_client.call_api('/workflows/update-workflow-status', 'POST', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='Workflow1', auth_settings=auth_settings, callback=params.get('callback'), _return_http_data_only=params.get('_return_http_data_only'), _preload_content=params.get('_preload_content', True), _request_timeout=params.get('_request_timeout'), collection_formats=collection_formats)
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen
62598fb68a349b6b43686337
class BlastIDIndex(object): <NEW_LINE> <INDENT> def __init__(self, seqDB): <NEW_LINE> <INDENT> self.seqDB = seqDB <NEW_LINE> self.seqInfoDict = BlastIDInfoDict(self) <NEW_LINE> <DEDENT> id_delimiter='|' <NEW_LINE> def unpack_id(self, id): <NEW_LINE> <INDENT> return id.split(self.id_delimiter) <NEW_LINE> <DEDENT> def index_unpacked_ids(self, unpack_f=None): <NEW_LINE> <INDENT> if unpack_f is None: <NEW_LINE> <INDENT> unpack_f=self.unpack_id <NEW_LINE> <DEDENT> t={} <NEW_LINE> for id in self.seqDB: <NEW_LINE> <INDENT> for s in unpack_f(id): <NEW_LINE> <INDENT> if s == id: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> s=s.upper() <NEW_LINE> try: <NEW_LINE> <INDENT> if t[s]!=id and t[s] is not None: <NEW_LINE> <INDENT> t[s]=None <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> t[s]=id <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for id in t.itervalues(): <NEW_LINE> <INDENT> if id is not None: <NEW_LINE> <INDENT> self._unpacked_dict=t <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self._unpacked_dict={} <NEW_LINE> <DEDENT> def get_real_id(self, bogusID, unpack_f=None): <NEW_LINE> <INDENT> if unpack_f is None: <NEW_LINE> <INDENT> unpack_f = self.unpack_id <NEW_LINE> <DEDENT> if not hasattr(self, '_unpacked_dict'): <NEW_LINE> <INDENT> self.index_unpacked_ids(unpack_f) <NEW_LINE> <DEDENT> for s in unpack_f(bogusID): <NEW_LINE> <INDENT> s = s.upper() <NEW_LINE> try: <NEW_LINE> <INDENT> id = self._unpacked_dict[s] <NEW_LINE> if id is not None: <NEW_LINE> <INDENT> return id <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> raise KeyError("no key '%s' in database %s" % (bogusID, repr(self.seqDB))) <NEW_LINE> <DEDENT> def __getitem__(self, seqID): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.seqDB[seqID] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self.seqDB[self.get_real_id(seqID)] <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, seqID): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.seqInfoDict[seqID] <NEW_LINE> return True <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return False
This class acts as a wrapper around a regular seqDB, and handles the mangled IDs returned by BLAST to translate them to the correct ID. Since NCBI treats FASTA ID as a blob into which they like to stuff many fields... and then NCBI BLAST mangles those IDs when it reports hits, so they no longer match the true ID... we are forced into contortions to rescue the true ID from mangled IDs. Our workaround strategy: since NCBI packs the FASTA ID with multiple IDs (GI, GB, RefSeq ID etc.), we can use any of these identifiers that are found in a mangled ID, by storing a mapping of these sub-identifiers to the true FASTA ID.
62598fb6956e5f7376df56fb
class ListChainService(ListCommand): <NEW_LINE> <INDENT> resource = 'chain_service' <NEW_LINE> log = logging.getLogger(__name__ + '.ListChainService') <NEW_LINE> list_columns = ['id', 'chain_id', 'service_id', 'sequence_number'] <NEW_LINE> pagination_support = True <NEW_LINE> sorting_support = True <NEW_LINE> def add_known_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( '--chain_id', help='Chain ID')
List ChainServices that belong to a given tenant.
62598fb6bf627c535bcb159d
class ProblemPage(PageObject): <NEW_LINE> <INDENT> url = None <NEW_LINE> def is_browser_on_page(self): <NEW_LINE> <INDENT> return self.q(css='.xblock-student_view').present <NEW_LINE> <DEDENT> @property <NEW_LINE> def problem_name(self): <NEW_LINE> <INDENT> return self.q(css='.problem-header').text[0]
View of problem page.
62598fb65fdd1c0f98e5e08a
class DimensionRegistryTest(TestCase): <NEW_LINE> <INDENT> def test_registry_contains_dimension(self): <NEW_LINE> <INDENT> time = registry.get_dimension('time') <NEW_LINE> self.assertIsNotNone(time) <NEW_LINE> self.assertIsInstance(time, models.TimeDimension) <NEW_LINE> <DEDENT> def test_registry_size(self): <NEW_LINE> <INDENT> self.assertEquals(len(registry.get_dimension_ids()), 25) <NEW_LINE> <DEDENT> def test_registry_rejects_unknown_keys(self): <NEW_LINE> <INDENT> with self.assertRaises(KeyError): <NEW_LINE> <INDENT> registry.get_dimension('made_up_dimension_key')
Test the dimension registry
62598fb6d486a94d0ba2c0ce
class Random: <NEW_LINE> <INDENT> def __init__(self,seed=5555): <NEW_LINE> <INDENT> self.seed = seed <NEW_LINE> self.m_v = np.uint64(4101842887655102017) <NEW_LINE> self.m_w = np.uint64(1) <NEW_LINE> self.m_u = np.uint64(1) <NEW_LINE> self.m_u = np.uint(self.seed) ^ self.m_v <NEW_LINE> self.int64() <NEW_LINE> self.m_v = self.m_u <NEW_LINE> self.int64() <NEW_LINE> self.m_w = self.m_v <NEW_LINE> self.int64() <NEW_LINE> <DEDENT> def int64(self): <NEW_LINE> <INDENT> self.m_u = np.uint64(self.m_u * 2862933555777941757) + np.uint64(7046029254386353087) <NEW_LINE> self.m_v ^= self.m_v >> np.uint64(17) <NEW_LINE> self.m_v ^= self.m_v << np.uint64(31) <NEW_LINE> self.m_v ^= self.m_v >> np.uint64(8) <NEW_LINE> self.m_w = np.uint64(np.uint64(4294957665)*(self.m_w & np.uint64(0xffffffff))) + np.uint64((self.m_w >> np.uint64(32))) <NEW_LINE> x = np.uint64(self.m_u ^ (self.m_u << np.uint64(21))) <NEW_LINE> x ^= x >> np.uint64(35) <NEW_LINE> x ^= x << np.uint64(4) <NEW_LINE> with np.errstate(over='ignore'): <NEW_LINE> <INDENT> return (x + self.m_v)^self.m_w
A random number generator class
62598fb6aad79263cf42e8cf
class FlaskGroup(AppGroup): <NEW_LINE> <INDENT> def __init__(self, add_default_commands=True, create_app=None, **extra): <NEW_LINE> <INDENT> AppGroup.__init__(self, **extra) <NEW_LINE> self.create_app = create_app <NEW_LINE> if add_default_commands: <NEW_LINE> <INDENT> self.add_command(run_command) <NEW_LINE> self.add_command(shell_command) <NEW_LINE> <DEDENT> self._loaded_plugin_commands = False <NEW_LINE> <DEDENT> def _load_plugin_commands(self): <NEW_LINE> <INDENT> if self._loaded_plugin_commands: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> import pkg_resources <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> self._loaded_plugin_commands = True <NEW_LINE> return <NEW_LINE> <DEDENT> for ep in pkg_resources.iter_entry_points('flask.commands'): <NEW_LINE> <INDENT> self.add_command(ep.load(), ep.name) <NEW_LINE> <DEDENT> self._loaded_plugin_commands = True <NEW_LINE> <DEDENT> def get_command(self, ctx, name): <NEW_LINE> <INDENT> self._load_plugin_commands() <NEW_LINE> rv = AppGroup.get_command(self, ctx, name) <NEW_LINE> if rv is not None: <NEW_LINE> <INDENT> return rv <NEW_LINE> <DEDENT> info = ctx.ensure_object(ScriptInfo) <NEW_LINE> try: <NEW_LINE> <INDENT> rv = info.load_app().cli.get_command(ctx, name) <NEW_LINE> if rv is not None: <NEW_LINE> <INDENT> return rv <NEW_LINE> <DEDENT> <DEDENT> except NoAppException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def list_commands(self, ctx): <NEW_LINE> <INDENT> self._load_plugin_commands() <NEW_LINE> rv = set(click.Group.list_commands(self, ctx)) <NEW_LINE> info = ctx.ensure_object(ScriptInfo) <NEW_LINE> try: <NEW_LINE> <INDENT> rv.update(info.load_app().cli.list_commands(ctx)) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return sorted(rv) <NEW_LINE> <DEDENT> def main(self, *args, **kwargs): <NEW_LINE> <INDENT> obj = kwargs.get('obj') <NEW_LINE> if obj is None: <NEW_LINE> <INDENT> obj = ScriptInfo(create_app=self.create_app) <NEW_LINE> <DEDENT> kwargs['obj'] = obj <NEW_LINE> kwargs.setdefault('auto_envvar_prefix', 'FLASK') <NEW_LINE> return AppGroup.main(self, *args, **kwargs)
Special subclass of the :class:`AppGroup` group that supports loading more commands from the configured Flask app. Normally a developer does not have to interface with this class but there are some very advanced use cases for which it makes sense to create an instance of this. For information as of why this is useful see :ref:`custom-scripts`. :param add_default_commands: if this is True then the default run and shell commands wil be added. :param create_app: an optional callback that is passed the script info and returns the loaded app.
62598fb63d592f4c4edbafbd
class VERSE_SCENE_panel(bpy.types.Panel): <NEW_LINE> <INDENT> bl_space_type = 'PROPERTIES' <NEW_LINE> bl_region_type = 'WINDOW' <NEW_LINE> bl_context = 'scene' <NEW_LINE> bl_label = 'Verse Scenes' <NEW_LINE> bl_description = 'Panel with Verse scenes shared at Verse server' <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> if wm.verse_connected is True: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def draw(self, context): <NEW_LINE> <INDENT> scene = context.scene <NEW_LINE> layout = self.layout <NEW_LINE> row = layout.row() <NEW_LINE> row.template_list('VERSE_SCENE_UL_slot', 'verse_scenes_widget_id', scene, 'verse_scenes', scene, 'cur_verse_scene_index', rows=3) <NEW_LINE> col = row.column(align=True) <NEW_LINE> col.menu('scene.verse_scene_menu', icon='DOWNARROW_HLT', text="")
GUI of Verse scene shared at Verse server
62598fb6be8e80087fbbf164
class Page(object): <NEW_LINE> <INDENT> qtb3_url = 'http://118.178.112.3:8006' <NEW_LINE> def __init__(self,driver,base_url = qtb3_url,parent =None): <NEW_LINE> <INDENT> self.driver = driver <NEW_LINE> self.base_url = base_url <NEW_LINE> self.timeout = 30 <NEW_LINE> self.parent = parent <NEW_LINE> <DEDENT> def _openPage(self, url): <NEW_LINE> <INDENT> url = self.base_url + url <NEW_LINE> self.driver.get(url) <NEW_LINE> b=self.driver.current_url <NEW_LINE> c=self.base_url + self.url <NEW_LINE> assert self.onPage(), 'Did not land on %s' % url <NEW_LINE> <DEDENT> def openPage(self): <NEW_LINE> <INDENT> self._openPage(self.url) <NEW_LINE> <DEDENT> def onPage(self): <NEW_LINE> <INDENT> return self.driver.current_url == self.base_url + self.url
'页面基础类,用于所有页面的继承
62598fb6d7e4931a7ef3c191