code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class NTRansition(SCPINode, SCPIQuery): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "NTRansition" <NEW_LINE> args = [] | STATus:DEVice:SINTegrity:AMPLitude:NTRansition
Arguments: | 62598fa899fddb7c1ca62d7e |
class h6(html_tag): <NEW_LINE> <INDENT> pass | Represents the sixth-highest ranking heading. | 62598fa8a79ad16197769f8f |
class ContentImageInline(admin.TabularInline): <NEW_LINE> <INDENT> model = ContentImage <NEW_LINE> extra = 1 | Use admin's "inline" feature to allow editing from the parent model page. | 62598fa83cc13d1c6d465697 |
class ContentProcessorMakoTest(TestCase): <NEW_LINE> <INDENT> def test_template(self): <NEW_LINE> <INDENT> item = MagicMock() <NEW_LINE> item.node.name = "localhost" <NEW_LINE> item.attributes = { 'context': { 'number': "47", }, 'encoding': "latin-1", } <NEW_LINE> item._template_content = "Hi fröm ${number}@${node.name}!" <NEW_LINE> self.assertEqual( files.content_processor_mako(item), "Hi fröm 47@localhost!".encode("latin-1"), ) | Tests bundlewrap.items.files.content_processor_mako. | 62598fa863b5f9789fe85091 |
class ScriptNW(NodeWriter): <NEW_LINE> <INDENT> def start(self, node): <NEW_LINE> <INDENT> if 'type' in node and 'math/tex' in node['type']: <NEW_LINE> <INDENT> self.write('<%s' % node.name) <NEW_LINE> att = ' '.join(['%s="%s"' % (k, v) for k, v in node.items()]) <NEW_LINE> if att != '': <NEW_LINE> <INDENT> self.write(' %s' % att) <NEW_LINE> <DEDENT> self.write('>') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.writer['__default__'].start(node) <NEW_LINE> <DEDENT> <DEDENT> def data(self, node): <NEW_LINE> <INDENT> self.writer['__default__'].data(node) <NEW_LINE> <DEDENT> def end(self, node): <NEW_LINE> <INDENT> if 'type' in node and 'math/tex' in node['type']: <NEW_LINE> <INDENT> self.write('</%s>' % node.name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.writer['__default__'].end(node) | Handles some special types of scripts. | 62598fa88e7ae83300ee8fcd |
class BoolTest(object): <NEW_LINE> <INDENT> def __init__(self, b=True, s="true",): <NEW_LINE> <INDENT> self.b = b <NEW_LINE> self.s = s <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.b = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.s = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('BoolTest') <NEW_LINE> if self.b is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('b', TType.BOOL, 1) <NEW_LINE> oprot.writeBool(self.b) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.s is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('s', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.s.encode('utf-8') if sys.version_info[0] == 2 else self.s) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- b
- s | 62598fa8d486a94d0ba2bef9 |
class NewRule(Model): <NEW_LINE> <INDENT> name = StringType(required=True) <NEW_LINE> expr = StringType(required=True) <NEW_LINE> position = IntType() <NEW_LINE> stop_condition = BooleanType(default=False) <NEW_LINE> tags = ListType(StringType) <NEW_LINE> class Options: <NEW_LINE> <INDENT> serialize_when_none = False | New filter rule. | 62598fa88da39b475be0310e |
class strategy: <NEW_LINE> <INDENT> def __init__(self, pop_size, genome_size, srate, mrate): <NEW_LINE> <INDENT> self.n = pop_size <NEW_LINE> self._max_l = genome_size <NEW_LINE> self.srate = srate <NEW_LINE> self.mrate = mrate <NEW_LINE> <DEDENT> def generate_from_lengths (self, ls): <NEW_LINE> <INDENT> return np.stack( [random_padded_individual(l,self._max_l) for l in ls]) <NEW_LINE> <DEDENT> def cut_matrix(self, _n): <NEW_LINE> <INDENT> return np.stack( [rpad(np.ones(l),self._max_l) for l in np.random.randint(MIN_L,self._max_l,size=_n)]) <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> individual_lengths = np.random.randint(MIN_L, self._max_l, size=self.n) <NEW_LINE> individual_fitnesses = np.full(self.n,np.nan) <NEW_LINE> individual_genotypes = self.generate_from_lengths(individual_lengths) <NEW_LINE> return individual_lengths, individual_fitnesses, individual_genotypes <NEW_LINE> <DEDENT> def _x(self, a,b): <NEW_LINE> <INDENT> c = np.concatenate([a,b],axis=1) <NEW_LINE> def f(row): <NEW_LINE> <INDENT> r = row[~np.isnan(row)] <NEW_LINE> return rpad(r, self._max_l) if len(r) < self._max_l else r[:self._max_l] <NEW_LINE> <DEDENT> return np.apply_along_axis(f, 1, c) <NEW_LINE> <DEDENT> def X(self,pool): <NEW_LINE> <INDENT> chromosomes = pool * self.cut_matrix(len(pool)) <NEW_LINE> split = int(len(pool) / 2) <NEW_LINE> return np.concatenate([self._x(chromosomes[split:], chromosomes[:split]), self._x(chromosomes[:split], chromosomes[split:])]) <NEW_LINE> <DEDENT> def selection(self,individuals): <NEW_LINE> <INDENT> K = int(self.srate*self.n) <NEW_LINE> mates = 2*int(self.n-K) <NEW_LINE> replacement = self.n-mates <NEW_LINE> scores = individuals[:,1] <NEW_LINE> scores = scores - scores.max() <NEW_LINE> scores = scores / scores.sum() <NEW_LINE> selected = np.random.choice(range(self.n),mates+replacement,p=scores) <NEW_LINE> return selected[:mates], selected[mates:] <NEW_LINE> <DEDENT> def mutations(self, rate): <NEW_LINE> <INDENT> mar = np.zeros(self.n*self._max_l) <NEW_LINE> idx = np.random.randint(self.n*self._max_l, size=int(self.n*self._max_l*self.mrate)) <NEW_LINE> mar[idx]= np.random.randint(CODON_MAX, size=len(idx)) <NEW_LINE> return mar.reshape((self.n,self._max_l)) <NEW_LINE> <DEDENT> def select_and_mutate(self, meta, genes): <NEW_LINE> <INDENT> pool_idx,replace_idx = self.selection(meta) <NEW_LINE> return np.concatenate([genes[replace_idx], self.X(genes[pool_idx])]) + self.mutations(self.mrate) | This is a vanilla strategy used by sparkge. In principle this would be extended or changed for another bundle of functions. | 62598fa8be8e80087fbbef8e |
class Bullet(Projectile): <NEW_LINE> <INDENT> def expiryCheck(self): <NEW_LINE> <INDENT> if Vector(self.pos).y > self.parent.height: <NEW_LINE> <INDENT> self.expired = True | A kind of projectile that moves vertically upward and kills aliens
| 62598fa85166f23b2e243303 |
class TenantLib: <NEW_LINE> <INDENT> def __init__(self, tenant_config_loader: TenantConfigLoader, private_key_path: str, skills: List[Skill]) -> None: <NEW_LINE> <INDENT> self._tenant_config_loader = tenant_config_loader <NEW_LINE> self._tenants: Dict[int, Tenant] = {} <NEW_LINE> self._private_key_path = private_key_path <NEW_LINE> self._skills = skills <NEW_LINE> <DEDENT> def load_tenant(self, tenant_id: int) -> Tenant: <NEW_LINE> <INDENT> if tenant_id not in self._tenants: <NEW_LINE> <INDENT> tenant_config = self._tenant_config_loader.load(tenant_id) <NEW_LINE> loader = HandlerLoader( tenant_config=tenant_config, private_key_path=self._private_key_path, skills=self._skills, ) <NEW_LINE> self._tenants[tenant_id] = Tenant(handlers=loader.load()) <NEW_LINE> <DEDENT> return self._tenants[tenant_id] | Creates and holds tenants registry.
Tenants are identified by chat user ID.
Each tenant has a set of handlers parameterized with tenant config. | 62598fa88e7ae83300ee8fce |
class _QueuePutDataManager(ObjectDataManager): <NEW_LINE> <INDENT> def __init__(self, queue, method, args=()): <NEW_LINE> <INDENT> super(_QueuePutDataManager, self).__init__(target=queue, call=method, args=args) <NEW_LINE> <DEDENT> def tpc_vote(self, tx): <NEW_LINE> <INDENT> if self.target.full(): <NEW_LINE> <INDENT> raise QFull() | A data manager that checks if the queue is full before putting.
Overrides :meth:`tpc_vote` for efficiency. | 62598fa8236d856c2adc93d2 |
class TestGetMove(unittest.TestCase): <NEW_LINE> <INDENT> def test_exit_on_q_entry(self): <NEW_LINE> <INDENT> with mock.patch('builtins.input', return_value='q'): <NEW_LINE> <INDENT> self.assertRaises(SystemExit, minesweep.get_move) <NEW_LINE> <DEDENT> <DEDENT> def test_return_is_tuple(self): <NEW_LINE> <INDENT> with mock.patch('builtins.input', return_value='9 8'): <NEW_LINE> <INDENT> move = minesweep.get_move() <NEW_LINE> <DEDENT> self.assertEqual(type(move), tuple) <NEW_LINE> <DEDENT> def test_return_tuple_contains_two_values(self): <NEW_LINE> <INDENT> with mock.patch('builtins.input', return_value='9 8'): <NEW_LINE> <INDENT> move = minesweep.get_move() <NEW_LINE> <DEDENT> self.assertEqual(len(move), 2) <NEW_LINE> <DEDENT> def test_return_values_are_integers(self): <NEW_LINE> <INDENT> with mock.patch('builtins.input', return_value='9 8'): <NEW_LINE> <INDENT> move = minesweep.get_move() <NEW_LINE> <DEDENT> self.assertTrue(type(move[0]) == int and type(move[1]) == int) <NEW_LINE> <DEDENT> def test_return_values(self): <NEW_LINE> <INDENT> with mock.patch('builtins.input', return_value='9 8'): <NEW_LINE> <INDENT> move = minesweep.get_move() <NEW_LINE> <DEDENT> self.assertEqual(move, (8,7)) | test function get_move | 62598fa8435de62698e9bd22 |
class DebugPanel(object): <NEW_LINE> <INDENT> has_content = False <NEW_LINE> context = {} <NEW_LINE> def __init__(self, context={}): <NEW_LINE> <INDENT> self.context.update(context) <NEW_LINE> self.slug = slugify(self.name) <NEW_LINE> <DEDENT> def dom_id(self): <NEW_LINE> <INDENT> return 'djDebug%sPanel' % (self.name.replace(' ', '')) <NEW_LINE> <DEDENT> def nav_title(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def nav_subtitle(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def title(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def url(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def content(self): <NEW_LINE> <INDENT> if self.has_content: <NEW_LINE> <INDENT> context = self.context.copy() <NEW_LINE> context.update(self.get_stats()) <NEW_LINE> return render_to_string(self.template, context) <NEW_LINE> <DEDENT> <DEDENT> def record_stats(self, stats): <NEW_LINE> <INDENT> toolbar = DebugToolbarMiddleware.get_current() <NEW_LINE> panel_stats = toolbar.stats.get(self.slug) <NEW_LINE> if panel_stats: <NEW_LINE> <INDENT> panel_stats.update(stats) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> toolbar.stats[self.slug] = stats <NEW_LINE> <DEDENT> <DEDENT> def get_stats(self): <NEW_LINE> <INDENT> toolbar = DebugToolbarMiddleware.get_current() <NEW_LINE> return toolbar.stats.get(self.slug, {}) <NEW_LINE> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_view(self, request, view_func, view_args, view_kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> pass | Base class for debug panels. | 62598fa897e22403b383ae38 |
class Deployment: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.commands() <NEW_LINE> <DEDENT> def commands(self): <NEW_LINE> <INDENT> op_con_list = {} <NEW_LINE> data = db_con_list(print_op=False) <NEW_LINE> connections_orignal = data.get('connections') <NEW_LINE> connections = {k:{x:y for x,y in v.items() if x not in ['password', 'edit_date']} for k, v in connections_orignal.items()} <NEW_LINE> for k, v in connections.items(): <NEW_LINE> <INDENT> op_con_list.update({len(op_con_list): {k: v}}) <NEW_LINE> <DEDENT> conection_id = None <NEW_LINE> for k, v in connections_orignal.items(): <NEW_LINE> <INDENT> print(k, v) <NEW_LINE> if v.get('deploy') is True: <NEW_LINE> <INDENT> sys.stdout.write(f"\nExisting deploy connection found. Connection id {k}\n") <NEW_LINE> conection_id = k <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if conection_id is None: <NEW_LINE> <INDENT> sys.stdout.write("\nChoose connection where to deploy framework\n") <NEW_LINE> con_list = create_option_list(op_con_list, f'Select connection you want to use\n{lines}') <NEW_LINE> write_line(txt=con_list) <NEW_LINE> while True: <NEW_LINE> <INDENT> option = input_handler() <NEW_LINE> if option not in op_con_list.keys(): <NEW_LINE> <INDENT> write_line(6) <NEW_LINE> continue <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> conection_id = list(op_con_list.get(option).keys())[0] <NEW_LINE> <DEDENT> deploy_stat = Deploy(conection_id).status() <NEW_LINE> if deploy_stat == 400: <NEW_LINE> <INDENT> sys.stdout.write(f"\nDeployment failed") <NEW_LINE> exit() <NEW_LINE> <DEDENT> conn = connections_orignal.get(conection_id) <NEW_LINE> conn.update({'deploy': True}) <NEW_LINE> with open(database_config, 'wb') as fw: <NEW_LINE> <INDENT> pickle.dump(data, fw) | class for deploying crawler framework | 62598fa8f548e778e596b4d0 |
class RedditorListingMixin(BaseListingMixin, GildedListingMixin): <NEW_LINE> <INDENT> @cachedproperty <NEW_LINE> def comments(self) -> SubListing: <NEW_LINE> <INDENT> return SubListing(self._reddit, self._path, "comments") <NEW_LINE> <DEDENT> @cachedproperty <NEW_LINE> def submissions(self) -> SubListing: <NEW_LINE> <INDENT> return SubListing(self._reddit, self._path, "submitted") <NEW_LINE> <DEDENT> def downvoted( self, **generator_kwargs: Union[str, int, Dict[str, str]] ) -> Iterator[Any]: <NEW_LINE> <INDENT> return ListingGenerator( self._reddit, urljoin(self._path, "downvoted"), **generator_kwargs ) <NEW_LINE> <DEDENT> def gildings( self, **generator_kwargs: Union[str, int, Dict[str, str]] ) -> Iterator[Any]: <NEW_LINE> <INDENT> return ListingGenerator( self._reddit, urljoin(self._path, "gilded/given"), **generator_kwargs ) <NEW_LINE> <DEDENT> def hidden( self, **generator_kwargs: Union[str, int, Dict[str, str]] ) -> Iterator[Any]: <NEW_LINE> <INDENT> return ListingGenerator( self._reddit, urljoin(self._path, "hidden"), **generator_kwargs ) <NEW_LINE> <DEDENT> def saved( self, **generator_kwargs: Union[str, int, Dict[str, str]] ) -> Iterator[Any]: <NEW_LINE> <INDENT> return ListingGenerator( self._reddit, urljoin(self._path, "saved"), **generator_kwargs ) <NEW_LINE> <DEDENT> def upvoted( self, **generator_kwargs: Union[str, int, Dict[str, str]] ) -> Iterator[Any]: <NEW_LINE> <INDENT> return ListingGenerator( self._reddit, urljoin(self._path, "upvoted"), **generator_kwargs ) | Adds additional methods pertaining to Redditor instances. | 62598fa876e4537e8c3ef4d9 |
class EmailEMLForm(forms.Form): <NEW_LINE> <INDENT> error_css_class = 'error' <NEW_LINE> required_css_class = 'required' <NEW_LINE> source = forms.ChoiceField(required=True, widget=forms.Select(attrs={'class': 'no_clear'}), label=form_consts.Email.SOURCE) <NEW_LINE> source_method = forms.CharField(required=False, widget=forms.TextInput, label=form_consts.Email.SOURCE_METHOD) <NEW_LINE> source_reference = forms.CharField(required=False, widget=forms.TextInput(attrs={'size': '90'}), label=form_consts.Email.SOURCE_REFERENCE) <NEW_LINE> campaign = forms.ChoiceField(required=False, widget=forms.Select) <NEW_LINE> campaign_confidence = forms.ChoiceField(required=False, widget=forms.Select) <NEW_LINE> filedata = forms.FileField(required=True) <NEW_LINE> def __init__(self, username, *args, **kwargs): <NEW_LINE> <INDENT> super(EmailEMLForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['source'].choices = [(c.name, c.name) for c in get_source_names(True, True, username)] <NEW_LINE> self.fields['source'].initial = get_user_organization(username) <NEW_LINE> self.fields['campaign'].choices = [("","")] <NEW_LINE> self.fields['campaign'].choices += [(c.name, c.name ) for c in get_item_names(Campaign, True)] <NEW_LINE> self.fields['campaign_confidence'].choices = [("", ""), ("low", "low"), ("medium", "medium"), ("high", "high")] <NEW_LINE> add_bucketlist_to_form(self) <NEW_LINE> add_ticket_to_form(self) | Django form for uploading an EML email. | 62598fa855399d3f05626450 |
class _UsmUserOwnPrivKeyChange_Type(KeyChange): <NEW_LINE> <INDENT> defaultHexValue = "" | Custom type usmUserOwnPrivKeyChange based on KeyChange | 62598fa84a966d76dd5eee0e |
class Card(models.Model): <NEW_LINE> <INDENT> number = models.CharField(max_length=16, validators=[MinLengthValidator(16)], null=False) <NEW_LINE> pin = models.CharField(max_length=4, validators=[MinLengthValidator(4)], null=False) <NEW_LINE> cvv = models.CharField(max_length=3, validators=[MinLengthValidator(3)], null=False) <NEW_LINE> blocked = models.BooleanField(default=False) <NEW_LINE> cash = models.IntegerField(default=0, null=False) <NEW_LINE> person = models.ForeignKey( Person, on_delete=models.CASCADE, null=True ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return 'Card {}'.format(self.number) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['cash'] | Store card data with number, pin, cvv, cash
Related to Person model | 62598fa86aa9bd52df0d4df5 |
class Scorer(): <NEW_LINE> <INDENT> def __init__(self, scoring_params): <NEW_LINE> <INDENT> self.available_methods=ScoringMethods() <NEW_LINE> self.scoring_params = scoring_params <NEW_LINE> self.default_sorting = 'association_score' <NEW_LINE> <DEDENT> def score(self, evs, stringency, datatypes, sortby=None ): <NEW_LINE> <INDENT> targets = {} <NEW_LINE> diseases = {} <NEW_LINE> disease_with_data = set() <NEW_LINE> if sortby is None: <NEW_LINE> <INDENT> sortby = self.default_sorting <NEW_LINE> <DEDENT> counter = 0 <NEW_LINE> for es_result in evs: <NEW_LINE> <INDENT> counter+=1 <NEW_LINE> ev = es_result['_source'] <NEW_LINE> ev_score = ev['harmonic-sum'] <NEW_LINE> ev_counts = ev['evidence_count'] <NEW_LINE> '''target data''' <NEW_LINE> target = ev['target']['id'] <NEW_LINE> if target not in targets: <NEW_LINE> <INDENT> targets[target] = Score(type = Score.TARGET, key = target, name = "") <NEW_LINE> <DEDENT> targets[target].add_precomputed_score(ev_score, datatypes, ev_counts) <NEW_LINE> disease = ev['disease']['id'] <NEW_LINE> if disease != "cttv_root": <NEW_LINE> <INDENT> if ev['is_direct']: <NEW_LINE> <INDENT> disease_with_data.add(disease) <NEW_LINE> <DEDENT> diseases[disease] = Score(type = Score.DISEASE, key = disease, name = "") <NEW_LINE> diseases[disease].add_precomputed_score(ev_score, datatypes, ev_counts) <NEW_LINE> <DEDENT> <DEDENT> parametrized_targets = self.apply_scoring_params(targets, stringency) <NEW_LINE> parametrized_diseases = self.apply_scoring_params(diseases, stringency) <NEW_LINE> sorted_targets = sorted(parametrized_targets.values(),key=lambda v: v.scores[sortby][sortby], reverse=True) <NEW_LINE> sorted_diseases = sorted(parametrized_diseases.values(),key=lambda v: v.scores[sortby][sortby], reverse=True) <NEW_LINE> for i,score in enumerate(sorted_targets): <NEW_LINE> <INDENT> sorted_targets[i] = score.finalise() <NEW_LINE> <DEDENT> for i,score in enumerate(sorted_diseases): <NEW_LINE> <INDENT> sorted_diseases[i]=score.finalise() <NEW_LINE> <DEDENT> return sorted_targets, sorted_diseases, counter, list(disease_with_data) <NEW_LINE> <DEDENT> def apply_scoring_params(self, score_values, stringency): <NEW_LINE> <INDENT> def recurse(d, score_name, stringency): <NEW_LINE> <INDENT> if isinstance(d, dict): <NEW_LINE> <INDENT> for k,v in d.items(): <NEW_LINE> <INDENT> if (k == score_name) and (isinstance(v, float) or isinstance(v, int)): <NEW_LINE> <INDENT> d[k]=v/stringency <NEW_LINE> <DEDENT> elif isinstance(v, dict): <NEW_LINE> <INDENT> recurse(v, score_name, stringency) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return d <NEW_LINE> <DEDENT> for k,v in score_values.items(): <NEW_LINE> <INDENT> for score_name in score_values[k].scores: <NEW_LINE> <INDENT> score_values[k].scores[score_name] = recurse(score_values[k].scores[score_name], score_name, stringency) <NEW_LINE> <DEDENT> <DEDENT> return score_values | Calculate scores for a list of evidencestring documents | 62598fa84f88993c371f04a0 |
class PlayerGeneralSplits(_PlayerDashboard): <NEW_LINE> <INDENT> _endpoint = 'playerdashboardbygeneralsplits' <NEW_LINE> def location(self): <NEW_LINE> <INDENT> return _api_scrape(self.json, 1) <NEW_LINE> <DEDENT> def win_losses(self): <NEW_LINE> <INDENT> return _api_scrape(self.json, 2) <NEW_LINE> <DEDENT> def pre_post_all_star(self): <NEW_LINE> <INDENT> return _api_scrape(self.json, 3) <NEW_LINE> <DEDENT> def starting_position(self): <NEW_LINE> <INDENT> return _api_scrape(self.json, 4) <NEW_LINE> <DEDENT> def days_rest(self): <NEW_LINE> <INDENT> return _api_scrape(self.json, 5) | Contains stats pertaining to location, wins and losses, pre/post all star
break, starting position, and numbers of days rest
Args:
:player_id: ID of the player to look up
:team_id: ID of the team to look up
:measure_type: Specifies type of measure to use (Base, Advanced, etc.)
:per_mode: Mode to measure statistics (Totals, PerGame, Per36, etc.)
:plus_minus: Whether or not to consider plus minus (Y or N)
:pace_adjust: Whether or not to pace adjust stats (Y or N)
:rank: Whether or not to consider rank (Y or N)
:league_id: ID for the league to look in (Default is 00)
:season: Season given to look up
:season_type: Season type to consider (Regular / Playoffs)
:po_round: Playoff round
:outcome: Filter out by wins or losses
:location: Filter out by home or away
:month: Specify month to filter by
:season_segment: Filter by pre/post all star break
:date_from: Filter out games before a specific date
:date_to: Filter out games after a specific date
:opponent_team_id: Opponent team ID to look up
:vs_conference: Filter by conference
:vs_division: Filter by division
:game_segment: Filter by half / overtime
:period: Filter by quarter / specific overtime
:shot_clock_range: Filter statistics by range in shot clock
:last_n_games: Filter by number of games specified in N
Attributes:
:json: Contains the full json dump to play around with | 62598fa821bff66bcd722b92 |
class UserSettings(FieldsetForm): <NEW_LINE> <INDENT> def __init__(self, address_choices, *args, **kwargs): <NEW_LINE> <INDENT> super(UserSettings, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['address'] = forms.ChoiceField( choices=(address_choices), widget=forms.Select(), error_messages={'required': _("Please choose an address.")}, required=True, label=_('Default email address')) <NEW_LINE> <DEDENT> id = forms.IntegerField( label=_('ID'), initial=9, widget=forms.HiddenInput(), required=False, error_messages={ 'invalid': _('Please provide an integer ID.')}) <NEW_LINE> mailing_list = forms.CharField( label=_('Mailing list'), widget=forms.HiddenInput(), required=False) <NEW_LINE> display_name = forms.CharField( label=_('Display name'), required=False) <NEW_LINE> preferred_language = forms.ChoiceField( label=_('Default/Preferred language'), widget=forms.Select(), error_messages={ 'required': _("Please choose a language.")}, required=False, choices=( ("", _("Please choose")), ("English (USA)", "English (USA)"))) <NEW_LINE> password = forms.CharField( label=_('Change password'), widget=forms.PasswordInput, required=False, error_messages={'required': _('Please enter your password.'), 'invalid': _('Please enter a valid password.')}) <NEW_LINE> conf_password = forms.CharField( label=_('Confirm password'), widget=forms.PasswordInput, required=False, error_messages={'required': _('Please enter your password.'), 'invalid': _('Please enter a valid password.')}) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> layout = [["User settings", "display_name", "password", "conf_password", "preferred_language", "address"]] | Form handling the user settings.
| 62598fa8cc0a2c111447af3c |
class Node: <NEW_LINE> <INDENT> def __init__(self, val, isLeaf, topLeft, topRight, bottomLeft, bottomRight): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.isLeaf = isLeaf <NEW_LINE> self.topLeft = topLeft <NEW_LINE> self.topRight = topRight <NEW_LINE> self.bottomLeft = bottomLeft <NEW_LINE> self.bottomRight = bottomRight | Definition for a QuadTree node. | 62598fa830bbd7224646990e |
class GenericPointWiseLossEvaluator(AbstractEvaluator): <NEW_LINE> <INDENT> def __init__(self, loss_wrapper, label='loss', score_format='%f', batch_target_key='target'): <NEW_LINE> <INDENT> super(GenericPointWiseLossEvaluator, self).__init__() <NEW_LINE> self._loss_wrapper = loss_wrapper <NEW_LINE> self._label = label <NEW_LINE> self._score_format = score_format <NEW_LINE> self._batch_target_key = batch_target_key <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._loss = 0 <NEW_LINE> self._examples_nb = 0 <NEW_LINE> <DEDENT> def step(self, output, batch, last_activation=None): <NEW_LINE> <INDENT> current_loss = self._loss_wrapper.calculate_loss(output, batch, None, last_activation).item() <NEW_LINE> self._loss += current_loss * batch[self._batch_target_key].shape[0] <NEW_LINE> self._examples_nb += batch[self._batch_target_key].shape[0] <NEW_LINE> <DEDENT> def calculate(self): <NEW_LINE> <INDENT> return GenericEvaluatorResults( self._loss / self._examples_nb, self._label, self._score_format, is_max_better=False ) | Adapter that uses an object of a class derived from AbstractLossWrapper to calculate the loss during evaluation. | 62598fa801c39578d7f12cac |
class ItemCSVPipeline(object): <NEW_LINE> <INDENT> file_dict = {} <NEW_LINE> exporter_dict = {} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.item_list = calaccess_crawler.get_items() <NEW_LINE> self.file_dir = os.environ.get( 'SCRAPY_EXPORT_DIR', os.path.dirname(__file__) ) <NEW_LINE> for item_klass in self.item_list: <NEW_LINE> <INDENT> file_name = "{}.csv".format(item_klass.__name__) <NEW_LINE> file_path = os.path.join(self.file_dir, file_name) <NEW_LINE> file_obj = open(file_path, 'wb') <NEW_LINE> self.file_dict[item_klass.__name__] = file_obj <NEW_LINE> exporter = CsvItemExporter(file_obj) <NEW_LINE> self.exporter_dict[item_klass.__name__] = exporter <NEW_LINE> <DEDENT> [e.start_exporting() for e in self.exporter_dict.values()] <NEW_LINE> <DEDENT> def spider_closed(self, spider): <NEW_LINE> <INDENT> [e.finish_exporting() for e in self.exporter_dict.values()] <NEW_LINE> [f.close() for f in self.file_dict.values()] <NEW_LINE> <DEDENT> def process_item(self, item, spider): <NEW_LINE> <INDENT> exporter = self.exporter_dict[type(item).__name__] <NEW_LINE> exporter.export_item(item) <NEW_LINE> return item | Exports each item's records as a separate CSV file. | 62598fa8498bea3a75a57a4a |
class ConsoleLogStatusHandler(AbstractStatusTimetableHandler): <NEW_LINE> <INDENT> def add_timetable_error(self, e: Exception) -> None: <NEW_LINE> <INDENT> if isinstance(e, TimetableExistError): <NEW_LINE> <INDENT> print(f'Расписание {e.timetable} уже существует.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(f'Ошибка добавления расписания: {e}') <NEW_LINE> <DEDENT> <DEDENT> def add_timetable_ok(self, timetable: 'uaviak_parser.structures.timetable') -> None: <NEW_LINE> <INDENT> print(f'Расписание {timetable} успешно добавлено') | Логирует добавление расписания в консоль.
| 62598fa83cc13d1c6d465699 |
class GetAccountArg(bb.Struct): <NEW_LINE> <INDENT> __slots__ = [ '_account_id_value', '_account_id_present', ] <NEW_LINE> _has_required_fields = True <NEW_LINE> def __init__(self, account_id=None): <NEW_LINE> <INDENT> self._account_id_value = None <NEW_LINE> self._account_id_present = False <NEW_LINE> if account_id is not None: <NEW_LINE> <INDENT> self.account_id = account_id <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def account_id(self): <NEW_LINE> <INDENT> if self._account_id_present: <NEW_LINE> <INDENT> return self._account_id_value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError("missing required field 'account_id'") <NEW_LINE> <DEDENT> <DEDENT> @account_id.setter <NEW_LINE> def account_id(self, val): <NEW_LINE> <INDENT> val = self._account_id_validator.validate(val) <NEW_LINE> self._account_id_value = val <NEW_LINE> self._account_id_present = True <NEW_LINE> <DEDENT> @account_id.deleter <NEW_LINE> def account_id(self): <NEW_LINE> <INDENT> self._account_id_value = None <NEW_LINE> self._account_id_present = False <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(GetAccountArg, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'GetAccountArg(account_id={!r})'.format( self._account_id_value, ) | :ivar users.GetAccountArg.account_id: A user's account identifier. | 62598fa8462c4b4f79dbb93a |
class Posterize(Operation): <NEW_LINE> <INDENT> def __init__(self, prob, magnitude): <NEW_LINE> <INDENT> super(Posterize, self).__init__(prob, magnitude) <NEW_LINE> <DEDENT> def __call__(self, image): <NEW_LINE> <INDENT> if random.uniform(0, 1) > self.prob: <NEW_LINE> <INDENT> return image <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> magnitude_range = np.linspace(4, 8, 10) <NEW_LINE> bits = int(round(magnitude_range[self.magnitude])) <NEW_LINE> return ImageOps.posterize(image, bits) | Reduce the number of bits for each pixel magnitude bits. | 62598fa8baa26c4b54d4f1de |
class B(A): <NEW_LINE> <INDENT> pass | def chapo(self):
print("this is last function from last class class object") | 62598fa83317a56b869be4e1 |
class Register(BaseRequest): <NEW_LINE> <INDENT> def handler_function(self): <NEW_LINE> <INDENT> args = self.get_request_data() <NEW_LINE> username = args.get('username', None) <NEW_LINE> password1 = args.get('password1', None) <NEW_LINE> password2 = args.get('password2', None) <NEW_LINE> key = args.get('registerKey', None) <NEW_LINE> if key != 'szx2018': <NEW_LINE> <INDENT> return self.response_failure(username + u'验证码错误') <NEW_LINE> <DEDENT> if password1 != password2: <NEW_LINE> <INDENT> return self.response_failure(username + u'两次密码不一致') <NEW_LINE> <DEDENT> if re.findall(' ', username): <NEW_LINE> <INDENT> return self.response_failure(u'账号不能有空格符号') <NEW_LINE> <DEDENT> if re.findall(' ', password1): <NEW_LINE> <INDENT> return self.response_failure(u'密码不能有空格符号') <NEW_LINE> <DEDENT> user = DBOps.getOneDoc(DBCollonfig.users, {'username': username}) <NEW_LINE> if user: <NEW_LINE> <INDENT> return self.response_failure(username + u'用户已存在') <NEW_LINE> <DEDENT> userId = self.createUser(username, password1) <NEW_LINE> LogDBOps.writeLog(userId, LogDBConfig.doRegister) <NEW_LINE> LogDBOps.writeLog(userId, LogDBConfig.doLogin) <NEW_LINE> self.response_success() <NEW_LINE> <DEDENT> def createUser(self, username, password): <NEW_LINE> <INDENT> userNum = DBOps.getDocNum(DBCollonfig.users) <NEW_LINE> userId = DBCollonfig.startNum + userNum + 1 <NEW_LINE> now = datetime.now().strftime('%Y-%m-%d %H:%M:%S') <NEW_LINE> user = { '_id': userId, 'username': username, 'password': Encrypt.password_encrypt(password), 'permissions': {p: 0 for p in UserConfig.permissions}, 'createTime': now, 'createTimeStamp': TimeUtil.time_conversion(now, 1), 'lastLogin': now, 'company': '', 'tel': '', 'email': '', 'qq': '' } <NEW_LINE> DBOps.insertDoc(DBCollonfig.users, user) <NEW_LINE> self.result['result'] = { 'userObj': AccountUtil.resUserData(user), 'token': Authentication.generateToken(userId) } <NEW_LINE> return userId | 注册 | 62598fa83d592f4c4edbadfb |
class LocaleMiddleware(object): <NEW_LINE> <INDENT> response_redirect_class = HttpResponseRedirect <NEW_LINE> def process_request(self, request): <NEW_LINE> <INDENT> language = translation.get_language_from_request( request, check_path=self.is_language_prefix_patterns_used) <NEW_LINE> translation.activate(language) <NEW_LINE> request.LANGUAGE_CODE = translation.get_language() <NEW_LINE> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> language = translation.get_language() <NEW_LINE> language_from_path = translation.get_language_from_path(request.path_info) <NEW_LINE> if (response.status_code == 404 and not language_from_path and self.is_language_prefix_patterns_used): <NEW_LINE> <INDENT> urlconf = getattr(request, 'urlconf', None) <NEW_LINE> language_path = '/%s%s' % (language, request.path_info) <NEW_LINE> path_valid = is_valid_path(language_path, urlconf) <NEW_LINE> if (not path_valid and settings.APPEND_SLASH and not language_path.endswith('/')): <NEW_LINE> <INDENT> path_valid = is_valid_path("%s/" % language_path, urlconf) <NEW_LINE> <DEDENT> if path_valid: <NEW_LINE> <INDENT> script_prefix = get_script_prefix() <NEW_LINE> language_url = "%s://%s%s" % ( request.scheme, request.get_host(), request.get_full_path().replace( script_prefix, '%s%s/' % (script_prefix, language), 1 ) ) <NEW_LINE> return self.response_redirect_class(language_url) <NEW_LINE> <DEDENT> <DEDENT> if not (self.is_language_prefix_patterns_used and language_from_path): <NEW_LINE> <INDENT> patch_vary_headers(response, ('Accept-Language',)) <NEW_LINE> <DEDENT> if 'Content-Language' not in response: <NEW_LINE> <INDENT> response['Content-Language'] = language <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def is_language_prefix_patterns_used(self): <NEW_LINE> <INDENT> for url_pattern in get_resolver(None).url_patterns: <NEW_LINE> <INDENT> if isinstance(url_pattern, LocaleRegexURLResolver): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | This is a very simple middleware that parses a request
and decides what translation object to install in the current
thread context. This allows pages to be dynamically
translated to the language the user desires (if the language
is available, of course). | 62598fa88da39b475be03110 |
class SourcePrms: <NEW_LINE> <INDENT> def __init__(self, posn): <NEW_LINE> <INDENT> self.initialPosn = Vec3(posn) <NEW_LINE> <DEDENT> def getInitialPosn(self): <NEW_LINE> <INDENT> return self.initialPosn <NEW_LINE> <DEDENT> def getPosn(self, t): <NEW_LINE> <INDENT> raise NotImplementedException() | Base class for point-source distubance which generates elastic wave. | 62598fa87d43ff2487427399 |
class DeleteBruteAttacksRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Ids = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Ids = params.get("Ids") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | DeleteBruteAttacks请求参数结构体
| 62598fa824f1403a9268584a |
class CacheInvalidationTestCase(KALiteTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(CacheInvalidationTestCase, self).setUp() <NEW_LINE> post_save.disconnect(receiver=updates.invalidate_on_video_update, sender=VideoFile) <NEW_LINE> VideoFile.objects.create(youtube_id="blah1", flagged_for_download=False, percent_complete=100) <NEW_LINE> VideoFile.objects.create(youtube_id="blah2", flagged_for_download=False, percent_complete=100) <NEW_LINE> post_save.connect(receiver=updates.invalidate_on_video_update, sender=VideoFile) <NEW_LINE> <DEDENT> @patch('kalite.caching.invalidate_all_caches') <NEW_LINE> def test_cache_invaldation_occurs_exactly_once(self, mock_func): <NEW_LINE> <INDENT> call_command("videoscan") <NEW_LINE> actual = mock_func.call_count <NEW_LINE> self.assertEqual(actual, 1, "The call count should be exactly 1. Actual count: {actual}".format(actual=actual)) | Test that cache invalidation only happens ONCE per videoscan, even if multiple deletions are made.
See issue 3621. | 62598fa8236d856c2adc93d3 |
class UUIDUser(AbstractUser): <NEW_LINE> <INDENT> id = models.UUIDField(default=uuid.uuid4, primary_key=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'auth' | A user with a UUID as primary key | 62598fa82c8b7c6e89bd36f3 |
@final <NEW_LINE> class BooleanConditionVisitor(BaseNodeVisitor): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self._same_nodes: List[ast.BoolOp] = [] <NEW_LINE> self._isinstance_calls: List[ast.BoolOp] = [] <NEW_LINE> <DEDENT> def visit_BoolOp(self, node: ast.BoolOp) -> None: <NEW_LINE> <INDENT> self._check_same_elements(node) <NEW_LINE> self._check_isinstance_calls(node) <NEW_LINE> self.generic_visit(node) <NEW_LINE> <DEDENT> def _get_all_names( self, node: ast.BoolOp, ) -> List[str]: <NEW_LINE> <INDENT> self._same_nodes.append(node) <NEW_LINE> names = [] <NEW_LINE> for operand in node.values: <NEW_LINE> <INDENT> if isinstance(operand, ast.BoolOp): <NEW_LINE> <INDENT> names.extend(self._get_all_names(operand)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> names.append(source.node_to_string(operand)) <NEW_LINE> <DEDENT> <DEDENT> return names <NEW_LINE> <DEDENT> def _check_same_elements(self, node: ast.BoolOp) -> None: <NEW_LINE> <INDENT> if node in self._same_nodes: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> operands = self._get_all_names(node) <NEW_LINE> if len(set(operands)) != len(operands): <NEW_LINE> <INDENT> self.add_violation(SameElementsInConditionViolation(node)) <NEW_LINE> <DEDENT> <DEDENT> def _check_isinstance_calls(self, node: ast.BoolOp) -> None: <NEW_LINE> <INDENT> if not isinstance(node.op, ast.Or): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for var_name in _duplicated_isinstance_call(node): <NEW_LINE> <INDENT> self.add_violation( UnmergedIsinstanceCallsViolation(node, text=var_name), ) | Ensures that boolean conditions are correct. | 62598fa87047854f4633f307 |
class LabelCacheTests(unittest.TestCase): <NEW_LINE> <INDENT> cache = LabelCache() <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.cache.empty() <NEW_LINE> <DEDENT> def test_write(self): <NEW_LINE> <INDENT> self.cache.write('sort', 20) <NEW_LINE> self.cache.write('L1', 16) <NEW_LINE> self.assertTrue('sort' in self.cache.cache) <NEW_LINE> self.assertTrue('L1' in self.cache.cache) <NEW_LINE> self.assertEqual(20, self.cache.cache['sort']) <NEW_LINE> self.assertEqual(16, self.cache.cache['L1']) <NEW_LINE> <DEDENT> def test_write_conflict(self): <NEW_LINE> <INDENT> self.cache.write('sort', 20) <NEW_LINE> self.assertRaises(RuntimeError, self.cache.write, *['sort', 50]) <NEW_LINE> <DEDENT> def test_miss(self): <NEW_LINE> <INDENT> hit, index = self.cache.query('sort') <NEW_LINE> self.assertFalse(hit) <NEW_LINE> self.assertEqual(0, index) <NEW_LINE> <DEDENT> def test_hit(self): <NEW_LINE> <INDENT> self.cache.write('sort', 20) <NEW_LINE> self.cache.write('L1', 16) <NEW_LINE> hit, index = self.cache.query('sort') <NEW_LINE> self.assertTrue(hit) <NEW_LINE> self.assertEqual(20, index) <NEW_LINE> hit, index = self.cache.query('L1') <NEW_LINE> self.assertTrue(hit) <NEW_LINE> self.assertEqual(16, index) <NEW_LINE> <DEDENT> def test_data(self): <NEW_LINE> <INDENT> c1 = LabelCache() <NEW_LINE> c1.write('sort', 20) <NEW_LINE> c2 = LabelCache() <NEW_LINE> hit, index = c2.query('sort') <NEW_LINE> self.assertTrue(hit) <NEW_LINE> self.assertEqual(20, index) <NEW_LINE> c2.write('L1', 50) <NEW_LINE> hit, index = c1.query('L1') <NEW_LINE> self.assertTrue(hit) <NEW_LINE> self.assertEqual(50, index) <NEW_LINE> self.assertDictEqual(c1.cache, c2.cache) <NEW_LINE> self.assertDictEqual(c1.cache, self.cache.cache) | Tests basic functionality of the label cache. | 62598fa8f9cc0f698b1c525f |
class TelnetController: <NEW_LINE> <INDENT> def __init__(self, host_name, user_name, password, prompt): <NEW_LINE> <INDENT> self.host_name = host_name <NEW_LINE> self.user_name = user_name <NEW_LINE> self.password = password <NEW_LINE> self.prompt = prompt <NEW_LINE> self.tn = None <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tn = telnetlib.Telnet(self.host_name) <NEW_LINE> self.tn.read_until('login: ') <NEW_LINE> self.tn.write(self.user_name + '\n') <NEW_LINE> if self.password: <NEW_LINE> <INDENT> self.tn.read_until('assword: ') <NEW_LINE> self.tn.write(self.password + '\n') <NEW_LINE> <DEDENT> self.tn.read_until(self.prompt) <NEW_LINE> return self <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ConnectionError('Could not establish connection at "%s".' % self.host_name) <NEW_LINE> <DEDENT> <DEDENT> def run_command(self, command): <NEW_LINE> <INDENT> self.tn.write(command + '\n') <NEW_LINE> response = self.tn.read_until(self.prompt) <NEW_LINE> return self.__strip_output(command, response) <NEW_LINE> <DEDENT> def logout(self): <NEW_LINE> <INDENT> self.tn.close() <NEW_LINE> <DEDENT> def run_atomic_command(self, command): <NEW_LINE> <INDENT> self.login() <NEW_LINE> command_output = self.run_command(command) <NEW_LINE> self.logout() <NEW_LINE> return command_output <NEW_LINE> <DEDENT> def __strip_output(self, command, response): <NEW_LINE> <INDENT> lines = response.splitlines() <NEW_LINE> if command in lines[0]: <NEW_LINE> <INDENT> lines.pop(0) <NEW_LINE> <DEDENT> lines.pop() <NEW_LINE> lines = [item + '\n' for item in lines] <NEW_LINE> return ''.join(lines) | Connect to remote host with TELNET and issue commands.
@ivar host_name: Host name or IP address
@ivar user_name: User name
@ivar password: Password
@ivar prompt: Command prompt (or partial string matching the end of the prompt)
@ivar tn: Instance of a telnetlib.Telnet object | 62598fa844b2445a339b6906 |
class BaseChannel(object): <NEW_LINE> <INDENT> cache_timeout = 60*60*4 <NEW_LINE> short_name = None <NEW_LINE> long_name = None <NEW_LINE> icon_path = None <NEW_LINE> description = None <NEW_LINE> root_url = '' <NEW_LINE> swf_url = None <NEW_LINE> is_abstract = True <NEW_LINE> status = STATUS_GOOD <NEW_LINE> default_action = 'browse' <NEW_LINE> __metaclass__ = ChannelMetaClass <NEW_LINE> def __init__(self, plugin, **kwargs): <NEW_LINE> <INDENT> self.plugin = plugin <NEW_LINE> self.args = kwargs <NEW_LINE> self.cache_timeout = int(self.plugin.get_setting('default_cache_timeout')) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_channel_entry_info(self): <NEW_LINE> <INDENT> info = { 'Title': self.long_name, 'Thumb': self.icon_path, 'action': self.default_action, 'remote_url': None, 'channel': self.short_name, 'use_rtmp': 0, } <NEW_LINE> return info <NEW_LINE> <DEDENT> def action_browse(self): <NEW_LINE> <INDENT> rurl = self.get_url(self.args['remote_url']) <NEW_LINE> self.plugin.add_list_item({'Title': 'Hi!'}) <NEW_LINE> self.plugin.end_list() <NEW_LINE> <DEDENT> def get_url(self, url=None): <NEW_LINE> <INDENT> if url is None: <NEW_LINE> <INDENT> url = self.get_root_url() <NEW_LINE> <DEDENT> return "%s%s" % (self.base_url, url) <NEW_LINE> <DEDENT> def get_root_url(self): <NEW_LINE> <INDENT> return self.root_url <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> action = self.args.get('action', 'browse') <NEW_LINE> if not hasattr(self, 'action_%s' % (action,)): <NEW_LINE> <INDENT> raise ChannelException("No Such Action: %s" % (action,)) <NEW_LINE> <DEDENT> action_method = getattr(self, 'action_%s' % (action, )) <NEW_LINE> return action_method() | The Base of all Channel classes. | 62598fa8b7558d589546355d |
class TMB_Panel: <NEW_LINE> <INDENT> bl_space_type = 'PROPERTIES' <NEW_LINE> bl_region_type = 'WINDOW' <NEW_LINE> bl_options = {'DEFAULT_CLOSED'} <NEW_LINE> bl_context = "render" <NEW_LINE> bl_category = 'True Motion Blur add-on' <NEW_LINE> COMPAT_ENGINES = {'BLENDER_EEVEE', 'BLENDER_WORKBENCH'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return (context.engine in cls.COMPAT_ENGINES) | Not an Operator class, doesn't need to be registered as Operator | 62598fa885dfad0860cbfa0b |
class Domain(models.Model): <NEW_LINE> <INDENT> TYPES = [ ('MASTER', 'MASTER'), ('SLAVE', 'SLAVE'), ('NATIVE', 'NATIVE'), ] <NEW_LINE> id = models.IntegerField(primary_key=True) <NEW_LINE> name = models.CharField(max_length=255, null=False) <NEW_LINE> type = models.CharField(max_length=6, null=False, choices=TYPES) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> managed = False <NEW_LINE> db_table = 'domains' | PowerDNS Domain
Model to access domain in the PowerDNS db.
See https://doc.powerdns.com/authoritative/backends/generic-postgresql.html#default-schema | 62598fa86aa9bd52df0d4df7 |
class ClassAutoBalancedLoader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def loadNextBatch(self, maxinstances = None): <NEW_LINE> <INDENT> pass | Future Implement Queue | 62598fa816aa5153ce400430 |
class DenseOpticalFlow(IOpticalFlow): <NEW_LINE> <INDENT> def set1stFrame(self, frame): <NEW_LINE> <INDENT> self.prev = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) <NEW_LINE> <DEDENT> def apply(self, frame): <NEW_LINE> <INDENT> next = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY) <NEW_LINE> flow = cv2.calcOpticalFlowFarneback(self.prev, next, None, 0.5, 3, 15, 3, 5, 1.2, 0) <NEW_LINE> result = self.makeResult(frame, next, flow) <NEW_LINE> self.prev = next <NEW_LINE> return result <NEW_LINE> <DEDENT> def makeResult(self, disp, grayFrame, flow): <NEW_LINE> <INDENT> return frame.copy() | Abstract class for DenseOpticalFlow expressions | 62598fa863d6d428bbee26e0 |
class ConnScan(common.AbstractScanCommand): <NEW_LINE> <INDENT> scanners = [PoolScanConn] <NEW_LINE> meta_info = dict( author = 'Brendan Dolan-Gavitt', copyright = 'Copyright (c) 2007,2008 Brendan Dolan-Gavitt', contact = 'bdolangavitt@wesleyan.edu', license = 'GNU General Public License 2.0', url = 'http://moyix.blogspot.com/', os = 'WIN_32_XP_SP2', version = '1.0', ) <NEW_LINE> @staticmethod <NEW_LINE> def is_valid_profile(profile): <NEW_LINE> <INDENT> return (profile.metadata.get('os', 'unknown') == 'windows' and profile.metadata.get('major', 0) == 5) <NEW_LINE> <DEDENT> def render_text(self, outfd, data): <NEW_LINE> <INDENT> self.table_header(outfd, [(self.offset_column(), "[addrpad]"), ("Local Address", "25"), ("Remote Address", "25"), ("Pid", "") ]) <NEW_LINE> for tcp_obj in data: <NEW_LINE> <INDENT> local = "{0}:{1}".format(tcp_obj.LocalIpAddress, tcp_obj.LocalPort) <NEW_LINE> remote = "{0}:{1}".format(tcp_obj.RemoteIpAddress, tcp_obj.RemotePort) <NEW_LINE> self.table_row(outfd, tcp_obj.obj_offset, local, remote, tcp_obj.Pid) <NEW_LINE> <DEDENT> <DEDENT> def unified_output(self, data): <NEW_LINE> <INDENT> return TreeGrid([("Offset(P)", Address), ("LocalAddress", str), ("RemoteAddress", str), ("PID", int)], self.generator(data)) <NEW_LINE> <DEDENT> def generator(self, data): <NEW_LINE> <INDENT> for conn in data: <NEW_LINE> <INDENT> local = "{0}:{1}".format(conn.LocalIpAddress, conn.LocalPort) <NEW_LINE> remote = "{0}:{1}".format(conn.RemoteIpAddress, conn.RemotePort) <NEW_LINE> yield (0, [Address(conn.obj_offset), str(local), str(remote), int(conn.Pid)]) | Pool scanner for tcp connections | 62598fa81f5feb6acb162b4f |
class Min(Aggregation): <NEW_LINE> <INDENT> def __init__(self, column_name): <NEW_LINE> <INDENT> self._column_name = column_name <NEW_LINE> <DEDENT> def get_aggregate_data_type(self, table): <NEW_LINE> <INDENT> column = table.columns[self._column_name] <NEW_LINE> if isinstance(column.data_type, (Date, DateTime, Number, TimeDelta)): <NEW_LINE> <INDENT> return column.data_type <NEW_LINE> <DEDENT> <DEDENT> def validate(self, table): <NEW_LINE> <INDENT> column = table.columns[self._column_name] <NEW_LINE> if not isinstance(column.data_type, (Date, DateTime, Number, TimeDelta)): <NEW_LINE> <INDENT> raise DataTypeError('Min can only be applied to columns containing DateTime, Date or Number data.') <NEW_LINE> <DEDENT> <DEDENT> def run(self, table): <NEW_LINE> <INDENT> column = table.columns[self._column_name] <NEW_LINE> data = column.values_without_nulls() <NEW_LINE> if data: <NEW_LINE> <INDENT> return min(data) | Find the minimum value in a column.
This aggregation can be applied to columns containing :class:`.Date`,
:class:`.DateTime`, or :class:`.Number` data.
:param column_name:
The name of the column to be searched. | 62598fa821bff66bcd722b94 |
class Solution: <NEW_LINE> <INDENT> def lowestCommonAncestorII(self, root, A, B): <NEW_LINE> <INDENT> check_none_list = [root, A, B] <NEW_LINE> if any(map(lambda x: x is None, check_none_list)): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> visited = set() <NEW_LINE> visited.add(root) <NEW_LINE> A_ptr = A <NEW_LINE> while A_ptr is not None and A_ptr != root: <NEW_LINE> <INDENT> visited.add(A_ptr) <NEW_LINE> A_ptr = A_ptr.parent <NEW_LINE> <DEDENT> B_ptr = B <NEW_LINE> while B_ptr is not None and B_ptr != root: <NEW_LINE> <INDENT> if B_ptr in visited: <NEW_LINE> <INDENT> return B_ptr <NEW_LINE> <DEDENT> B_ptr = B_ptr.parent <NEW_LINE> <DEDENT> return root | @param root: The root of the tree
@param A and B: Two node in the tree
@return: The lowest common ancestor of A and B | 62598fa867a9b606de545efa |
class RatbagdDevice(_RatbagdDBus): <NEW_LINE> <INDENT> CAP_NONE = 0 <NEW_LINE> CAP_QUERY_CONFIGURATION = 1 <NEW_LINE> CAP_RESOLUTION = 100 <NEW_LINE> CAP_SWITCHABLE_RESOLUTION = 101 <NEW_LINE> CAP_PROFILE = 200 <NEW_LINE> CAP_SWITCHABLE_PROFILE = 201 <NEW_LINE> CAP_DISABLE_PROFILE = 202 <NEW_LINE> CAP_DEFAULT_PROFILE = 203 <NEW_LINE> CAP_BUTTON = 300 <NEW_LINE> CAP_BUTTON_KEY = 301 <NEW_LINE> CAP_BUTTON_MACROS = 302 <NEW_LINE> CAP_LED = 400 <NEW_LINE> __gsignals__ = { "active-profile-changed": (GObject.SignalFlags.RUN_FIRST, None, (GObject.TYPE_PYOBJECT,)), } <NEW_LINE> def __init__(self, object_path): <NEW_LINE> <INDENT> _RatbagdDBus.__init__(self, "Device", object_path) <NEW_LINE> result = self._get_dbus_property("Profiles") or [] <NEW_LINE> self._profiles = [RatbagdProfile(objpath) for objpath in result] <NEW_LINE> for profile in self._profiles: <NEW_LINE> <INDENT> profile.connect("notify::is-active", self._on_active_profile_changed) <NEW_LINE> <DEDENT> <DEDENT> def _on_active_profile_changed(self, profile, pspec): <NEW_LINE> <INDENT> if profile.is_active: <NEW_LINE> <INDENT> self.emit("active-profile-changed", self._profiles[profile.index]) <NEW_LINE> <DEDENT> <DEDENT> @GObject.Property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._get_dbus_property("Id") <NEW_LINE> <DEDENT> @GObject.Property <NEW_LINE> def capabilities(self): <NEW_LINE> <INDENT> return self._get_dbus_property("Capabilities") or [] <NEW_LINE> <DEDENT> @GObject.Property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._get_dbus_property("Name") <NEW_LINE> <DEDENT> @GObject.Property <NEW_LINE> def profiles(self): <NEW_LINE> <INDENT> return self._profiles <NEW_LINE> <DEDENT> @GObject.Property <NEW_LINE> def active_profile(self): <NEW_LINE> <INDENT> for profile in self._profiles: <NEW_LINE> <INDENT> if profile.is_active: <NEW_LINE> <INDENT> return profile <NEW_LINE> <DEDENT> <DEDENT> print("No active profile. Please report this bug to the libratbag developers", file=sys.stderr) <NEW_LINE> return self._profiles[0] <NEW_LINE> <DEDENT> def get_svg(self, theme): <NEW_LINE> <INDENT> return self._dbus_call("GetSvg", "s", theme) <NEW_LINE> <DEDENT> def commit(self, callback): <NEW_LINE> <INDENT> self._dbus_call_async("Commit", "", callback=callback) <NEW_LINE> for profile in self._profiles: <NEW_LINE> <INDENT> if profile.dirty: <NEW_LINE> <INDENT> profile._dirty = False <NEW_LINE> profile.notify("dirty") | Represents a ratbagd device. | 62598fa866673b3332c302f9 |
class bitmap_8bpp(bitmap): <NEW_LINE> <INDENT> def __init__(self, width, height): <NEW_LINE> <INDENT> bitmap.__init__(self, 8, width, height) <NEW_LINE> self.palette = [ 0x00FF00FF, 0x00000000, 0x00FF0000, 0x0000FF00, 0x000000FF, 0x00FFFFFF] <NEW_LINE> self.INDEX_MAGENTA = 0 <NEW_LINE> self.INDEX_BLACK = 1 <NEW_LINE> self.INDEX_RED = 2 <NEW_LINE> self.INDEX_GREEN = 3 <NEW_LINE> self.INDEX_BLUE = 4 <NEW_LINE> self.INDEX_WHITE = 5 <NEW_LINE> self.TRANSPARENT_PIXEL = -2 <NEW_LINE> <DEDENT> def create_pixeldata(self): <NEW_LINE> <INDENT> red_width = self.width / 3 <NEW_LINE> green_width = self.width / 3 <NEW_LINE> blue_width = self.width - (red_width + green_width) <NEW_LINE> pad_width_in_bytes = self.get_scanline_padding_bits() / 8 <NEW_LINE> raster = [] <NEW_LINE> for i in range(0, self.height): <NEW_LINE> <INDENT> row = [] <NEW_LINE> row += [chr(self.INDEX_RED)] * red_width <NEW_LINE> row += [chr(self.INDEX_GREEN)] * green_width <NEW_LINE> row += [chr(self.INDEX_BLUE)] * blue_width <NEW_LINE> row += [chr(self.INDEX_MAGENTA)] * pad_width_in_bytes <NEW_LINE> raster.append(row) <NEW_LINE> <DEDENT> self.draw_double_border( raster, chr(self.INDEX_BLACK), chr(self.INDEX_WHITE)) <NEW_LINE> self.apply_top_left_logo( raster, chr(self.INDEX_BLACK), chr(self.INDEX_WHITE)) <NEW_LINE> pixeldata = '' <NEW_LINE> for row in raster: <NEW_LINE> <INDENT> pixeldata += string.join(row, '') <NEW_LINE> <DEDENT> return pixeldata | An uncompressed RGB bitmap that has 8 bits per pixel. | 62598fa832920d7e50bc5f84 |
class RHConfAddPaperReviewManager( RHConfModifReviewingBase ): <NEW_LINE> <INDENT> def _process( self ): <NEW_LINE> <INDENT> params = self._getRequestParams() <NEW_LINE> if "selectedPrincipals" in params: <NEW_LINE> <INDENT> ph = user.PrincipalHolder() <NEW_LINE> for id in self._normaliseListParam( params["selectedPrincipals"] ): <NEW_LINE> <INDENT> if id is not None and id != '': <NEW_LINE> <INDENT> self._target.getConfPaperReview().addPaperReviewManager( ph.getById( id ) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._redirect( urlHandlers.UHConfModifReviewingControl.getURL( self._target ) ) | Only conference managers can add paper review managers,
so this class inherits from RHConferenceModifBase | 62598fa87047854f4633f308 |
class Device(object): <NEW_LINE> <INDENT> def __init__(self, hostname): <NEW_LINE> <INDENT> self.hostname = hostname <NEW_LINE> self.SSHClient = paramiko.SSHClient() <NEW_LINE> self.RemoteShell = None <NEW_LINE> self.username = "" <NEW_LINE> self.password = "" <NEW_LINE> <DEDENT> def connect(self, username, password): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.password = password <NEW_LINE> try: <NEW_LINE> <INDENT> self.SSHClient.set_missing_host_key_policy( paramiko.AutoAddPolicy()) <NEW_LINE> self.SSHClient.connect( self.hostname, username=username, password=password, allow_agent=False, look_for_keys=False) <NEW_LINE> self.RemoteShell = self.SSHClient.invoke_shell() <NEW_LINE> return True <NEW_LINE> <DEDENT> except (ParamikoExcept.SSHException, SocketError) as e: <NEW_LINE> <INDENT> if _debug: <NEW_LINE> <INDENT> sys.stderr.write( "Device.connect to '%s' caused Exception %s" % (self.hostname, e)) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def run(self, commands, outfile): <NEW_LINE> <INDENT> cmd = "" <NEW_LINE> try: <NEW_LINE> <INDENT> while self.RemoteShell.recv_ready(): <NEW_LINE> <INDENT> data = self.RemoteShell.recv(_bufferSize).decode("utf-8") <NEW_LINE> outfile.write(data) <NEW_LINE> <DEDENT> for cmd in commands: <NEW_LINE> <INDENT> if self.RemoteShell.send(cmd + "\n") == 0: <NEW_LINE> <INDENT> raise IOError <NEW_LINE> <DEDENT> time.sleep(2) <NEW_LINE> while self.RemoteShell.recv_ready(): <NEW_LINE> <INDENT> data = self.RemoteShell.recv(_bufferSize).decode("utf-8") <NEW_LINE> outfile.write(data) <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> except (ParamikoExcept.SSHException, SocketError, UnicodeDecodeError) as e: <NEW_LINE> <INDENT> if _debug: <NEW_LINE> <INDENT> sys.stderr.write( "Device.run: Command '%s' caused exception %s" % (cmd, e)) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def superuser(self, password): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def logout(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.RemoteShell.close() <NEW_LINE> self.SSHClient.close() <NEW_LINE> return True <NEW_LINE> <DEDENT> except (ParamikoExcept.SSHException, SocketError) as e: <NEW_LINE> <INDENT> if _debug: <NEW_LINE> <INDENT> sys.stderr.write("Device.logout: Exception %s" % e) | Represents a remote controlled device connected with SSH.
The Device class is a generic that interacts witht the Activity module.
Any specific device must inherit from this Device class and
implement the methods login() and super().
Source:
http://jessenoller.com/blog/2009/02/05/ssh-programming-with-paramiko-completely-different,
http://stackoverflow.com/questions/25101619/reading-output-of-top-command-using-paramiko
Example:
aDevice = Device('localhost', type = Devices.DeviceCisco)
aDevice.connect()
aDevice.super() #if needed
aDevice.run({"","",""},outfile)
aDevice.logout() | 62598fa8498bea3a75a57a4c |
class AarTopology(Topo): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Topo.__init__(self) <NEW_LINE> hosts = [ self.addHost('h%s' % (n + 1)) for n in range(6) ] <NEW_LINE> switches = [ self.addSwitch('s%s' % (n + 1)) for n in range(6) ] <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> self.addLink( hosts[i], switches[0], bw=100, delay="2ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> <DEDENT> for i in range(3, 6): <NEW_LINE> <INDENT> self.addLink( hosts[i], switches[5], bw=100, delay="2ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> <DEDENT> self.addLink( switches[0], switches[1], bw=100, delay="2ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> self.addLink( switches[1], switches[2], bw=100, delay="10ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> self.addLink( switches[1], switches[4], bw=10, delay="2ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> self.addLink( switches[2], switches[3], bw=100, delay="10ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> self.addLink( switches[3], switches[4], bw=10, delay="2ms", max_queue_size=1000, use_htb=True ) <NEW_LINE> self.addLink( switches[3], switches[5], bw=100, delay="2ms", max_queue_size=1000, use_htb=True ) | Application aware routing custom toplogy | 62598fa899fddb7c1ca62d80 |
class PluginManager(object): <NEW_LINE> <INDENT> def __init__(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __instance = None <NEW_LINE> __lock = threading.Lock() <NEW_LINE> __plugin_list = [] <NEW_LINE> @staticmethod <NEW_LINE> def getManager(): <NEW_LINE> <INDENT> PluginManager.__lock.acquire() <NEW_LINE> if not PluginManager.__instance: <NEW_LINE> <INDENT> PluginManager.__instance = super(PluginManager, PluginManager).__new__(PluginManager) <NEW_LINE> super(PluginManager, PluginManager).__init__(PluginManager.__instance) <NEW_LINE> <DEDENT> PluginManager.__lock.release() <NEW_LINE> return PluginManager.__instance <NEW_LINE> <DEDENT> def register(self, plugin): <NEW_LINE> <INDENT> self.__plugin_list.append(plugin) <NEW_LINE> logging.debug('[%s] registers to PluginManager', plugin.name) <NEW_LINE> <DEDENT> def loadPlugin(self, plugin): <NEW_LINE> <INDENT> self.register(plugin) <NEW_LINE> plugin.load() <NEW_LINE> <DEDENT> def reload(self, name): <NEW_LINE> <INDENT> p = self.getPlugin(name) <NEW_LINE> if p: <NEW_LINE> <INDENT> p.unload() <NEW_LINE> p.load() <NEW_LINE> <DEDENT> <DEDENT> def unload(self, name): <NEW_LINE> <INDENT> p = self.getPlugin(name) <NEW_LINE> if p: <NEW_LINE> <INDENT> self.__plugin_list.remove(p) <NEW_LINE> p.unload() <NEW_LINE> <DEDENT> <DEDENT> def getPlugin(self, name): <NEW_LINE> <INDENT> for p in self.__plugin_list: <NEW_LINE> <INDENT> if name == p.name: <NEW_LINE> <INDENT> return p <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def getAllPlugins(self): <NEW_LINE> <INDENT> return self.__plugin_list <NEW_LINE> <DEDENT> def loadPluginFromPath(self, path): <NEW_LINE> <INDENT> if os.path.isfile(path): <NEW_LINE> <INDENT> imp.load_source('', path) <NEW_LINE> logging.debug('[%s] Loading plugin from path %s', 'PluginManager', path) <NEW_LINE> <DEDENT> <DEDENT> def loadAllPlugins(self): <NEW_LINE> <INDENT> p_paths = ConfManager.getManager().getPluginPaths() <NEW_LINE> pluginList = ConfManager.getManager().getValue('common', 'plugins') <NEW_LINE> for p in p_paths: <NEW_LINE> <INDENT> logging.debug('[%s]: loadAllPlugins from %s', 'PluginManager', p) <NEW_LINE> try: <NEW_LINE> <INDENT> dirs = os.listdir(p) <NEW_LINE> for d in dirs: <NEW_LINE> <INDENT> tmp = '%s%s%s' % (p, os.sep, d) <NEW_LINE> if os.path.isdir(tmp): <NEW_LINE> <INDENT> module_name = 'UniFileSync.plugins.%s' % d <NEW_LINE> module_path = '%s%s%sPlugin.py' % (tmp, os.sep, d) <NEW_LINE> imp.load_source('', module_path) <NEW_LINE> pl = {'name': d, 'path': module_path} <NEW_LINE> if pl not in pluginList: <NEW_LINE> <INDENT> pluginList.append(pl) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except OSError as exc: <NEW_LINE> <INDENT> logging.error('loadAllPlugins listdir error %d', OSError.errno) <NEW_LINE> <DEDENT> <DEDENT> ConfManager.getManager().setValue('common', 'plugins', pluginList) <NEW_LINE> logging.debug('[%s]: loadAllPlugins and save %s into configuration', 'PluginManager', pluginList) <NEW_LINE> ConfManager.getManager().save() <NEW_LINE> <DEDENT> def unloadAllPlugins(self): <NEW_LINE> <INDENT> for p in self.__plugin_list: <NEW_LINE> <INDENT> p.unload() <NEW_LINE> <DEDENT> <DEDENT> def debug(self): <NEW_LINE> <INDENT> for p in self.__plugin_list: <NEW_LINE> <INDENT> logging.debug('load %s ...' % p.name) | Plugin Manager | 62598fa88e7ae83300ee8fd1 |
class UnitRegistry(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.units = set() <NEW_LINE> self.units_for_dimensions = collections.defaultdict(list) <NEW_LINE> <DEDENT> def add(self, u): <NEW_LINE> <INDENT> if u in self.units: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.units.add(u) <NEW_LINE> self.units_for_dimensions[u.dim].append(u) <NEW_LINE> <DEDENT> def remove(self, u): <NEW_LINE> <INDENT> self.units.remove(u) <NEW_LINE> dim = u.dim <NEW_LINE> self.units_for_dimensions[dim].remove(u) <NEW_LINE> <DEDENT> def __getitem__(self, x): <NEW_LINE> <INDENT> matching = self.units_for_dimensions.get(x.dim, []) <NEW_LINE> if len(matching) == 0: <NEW_LINE> <INDENT> raise KeyError("Unit not found in registry.") <NEW_LINE> <DEDENT> matching_values = np.array(matching, copy=False) <NEW_LINE> x_flat = np.array(x, copy=False).flatten() <NEW_LINE> floatreps = np.tile(np.abs(x_flat), (len(matching), 1)).T / matching_values <NEW_LINE> floatreps[floatreps == 0] = np.nan <NEW_LINE> if np.all(np.isnan(floatreps)): <NEW_LINE> <INDENT> return matching[0] <NEW_LINE> <DEDENT> deviations = np.nansum((np.log10(floatreps) - 1)**2, axis=0) <NEW_LINE> return matching[deviations.argmin()] | Stores known units for printing in best units.
All a user needs to do is to use the `register_new_unit`
function.
Default registries:
The units module defines three registries, the standard units,
user units, and additional units. Finding best units is done
by first checking standard, then user, then additional. New
user units are added by using the `register_new_unit` function.
Standard units includes all the basic non-compound unit names
built in to the module, including volt, amp, etc. Additional
units defines some compound units like newton metre (Nm) etc.
Methods
-------
add
__getitem__ | 62598fa87cff6e4e811b5959 |
class SinglePost(Handler): <NEW_LINE> <INDENT> def get(self, blog_id): <NEW_LINE> <INDENT> blog = Blog.get_by_id(int(blog_id)) <NEW_LINE> self.render("single-post.html", blog = blog) | renders a page with a single blog post based on the id of the blog post in the data store 'Blog' | 62598fa80c0af96317c562b2 |
class WordFile(models.Model): <NEW_LINE> <INDENT> words = models.FileField() <NEW_LINE> def save(self, request): <NEW_LINE> <INDENT> task_id = handle_uploaded_file(request.FILES['words']) <NEW_LINE> return HttpResponseRedirect('main?task_id=' + task_id) | Корпус с разметкой (файлы *.conll) | 62598fa84a966d76dd5eee12 |
class SubCommentFormTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.subcomment = { 'nickname': 'testClient', 'email': 'test@example.com', 'content': 'my subcomment for test' } <NEW_LINE> <DEDENT> def test_attrs_cannot_empty(self): <NEW_LINE> <INDENT> f = SubCommentForm({}) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> self.assertTrue(f['nickname'].errors) <NEW_LINE> self.assertTrue(f['email'].errors) <NEW_LINE> self.assertTrue(f['content'].errors) <NEW_LINE> <DEDENT> def test_nickname_restrict_character(self): <NEW_LINE> <INDENT> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertTrue(f.is_valid()) <NEW_LINE> self.subcomment['nickname'] = '我的test' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertTrue(f.is_valid()) <NEW_LINE> self.subcomment['nickname'] = '$' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> self.subcomment['nickname'] = '空格 space' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> <DEDENT> def test_email_pattern(self): <NEW_LINE> <INDENT> self.subcomment['email'] = 'test' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> self.subcomment['email'] = 'test@example' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> self.subcomment['email'] = 'test@example.' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) <NEW_LINE> self.subcomment['email'] = 'test@example.com.cn' <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertTrue(f.is_valid()) <NEW_LINE> <DEDENT> def test_content_limit_1000(self): <NEW_LINE> <INDENT> self.subcomment['content'] = '表单测试' * 250 <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertTrue(f.is_valid()) <NEW_LINE> self.subcomment['content'] = '表单测试' * 251 <NEW_LINE> f = SubCommentForm(self.subcomment) <NEW_LINE> self.assertFalse(f.is_valid()) | 测试回复表单 | 62598fa856ac1b37e630211c |
class SingletonRule(ConstantRule): <NEW_LINE> <INDENT> def __init__(self, fun): <NEW_LINE> <INDENT> ConstantRule.__init__(self) <NEW_LINE> self._fun = fun <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Singleton" <NEW_LINE> <DEDENT> def fun(self, x): <NEW_LINE> <INDENT> return self._fun(x) <NEW_LINE> <DEDENT> def valuation(self): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> def count(self, n): <NEW_LINE> <INDENT> if (n == 1): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def list(self, l): <NEW_LINE> <INDENT> if len(l) != 1: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [self.fun(l[0])] | Représente un ensemble composé d'un objet unique de taille 1 | 62598fa830dc7b766599f77d |
class RedisCacheManager(CacheManager, SimpleItem): <NEW_LINE> <INDENT> security = ClassSecurityInfo() <NEW_LINE> security.setPermissionDefault('Change cache managers', ('Manager', )) <NEW_LINE> manage_options = ( {'label': 'Properties', 'action': 'manage_main'}, ) + CacheManager.manage_options + SimpleItem.manage_options <NEW_LINE> meta_type = 'Redis Cache Manager' <NEW_LINE> def __init__(self, ob_id): <NEW_LINE> <INDENT> self.id = ob_id <NEW_LINE> self.title = '' <NEW_LINE> self._settings = { 'request_vars': ('AUTHENTICATED_USER', ), } <NEW_LINE> self._resetCacheId() <NEW_LINE> <DEDENT> def getId(self): <NEW_LINE> <INDENT> return self.id <NEW_LINE> <DEDENT> security.declarePrivate('_remove_data') <NEW_LINE> def _remove_data(self): <NEW_LINE> <INDENT> caches.pop(self.__cacheid, None) <NEW_LINE> <DEDENT> security.declarePrivate('_resetCacheId') <NEW_LINE> def _resetCacheId(self): <NEW_LINE> <INDENT> self.__cacheid = '%s_%f' % (id(self), time.time()) <NEW_LINE> <DEDENT> ZCacheManager_getCache__roles__ = () <NEW_LINE> def ZCacheManager_getCache(self): <NEW_LINE> <INDENT> cacheid = self.__cacheid <NEW_LINE> try: <NEW_LINE> <INDENT> return caches[cacheid] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> cache = RedisCache() <NEW_LINE> cache.initSettings(self._settings) <NEW_LINE> caches[cacheid] = cache <NEW_LINE> return cache <NEW_LINE> <DEDENT> <DEDENT> security.declareProtected(view_management_screens, 'getSettings') <NEW_LINE> def getSettings(self): <NEW_LINE> <INDENT> res = self._settings.copy() <NEW_LINE> return res <NEW_LINE> <DEDENT> security.declareProtected('Change cache managers', 'manage_editProps') <NEW_LINE> def manage_editProps(self, title, settings=None, REQUEST=None): <NEW_LINE> <INDENT> if settings is None: <NEW_LINE> <INDENT> settings = REQUEST <NEW_LINE> <DEDENT> self.title = str(title) <NEW_LINE> request_vars = list(settings['request_vars']) <NEW_LINE> request_vars.sort() <NEW_LINE> self._settings = { 'request_vars': tuple(request_vars), } <NEW_LINE> cache = self.ZCacheManager_getCache() <NEW_LINE> cache.initSettings(self._settings) <NEW_LINE> if REQUEST is not None: <NEW_LINE> <INDENT> return self.manage_main( self, REQUEST, manage_tabs_message='Properties changed.') <NEW_LINE> <DEDENT> <DEDENT> security.declareProtected(view_management_screens, 'manage_main') <NEW_LINE> manage_main = DTMLFile('dtml/propsRCM', globals()) | Manage a RedisCache, which stores rendered data in redis.
This is intended to be used as a low-level cache for
expensive Python code, not for objects published
under their own URLs such as web pages.
RedisCacheManager *can* be used to cache complete publishable
pages, such as DTMLMethods/Documents and Page Templates,
but this is not advised: such objects typically do not attempt
to cache important out-of-band data such as 3xx HTTP responses,
and the client would get an erroneous 200 response.
Such objects should instead be cached with an
AcceleratedHTTPCacheManager and/or downstream
caching. | 62598fa84e4d562566372354 |
class BackwardsCompatibleHTTPClientTests(HTTPClientTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(BackwardsCompatibleHTTPClientTests, self).setUp() <NEW_LINE> config_lib.CONFIG.Set("Network.api", 2) <NEW_LINE> <DEDENT> def testCachedRSAOperations(self): <NEW_LINE> <INDENT> self.SendToServer() <NEW_LINE> self.client_communicator.RunOnce() <NEW_LINE> self.CheckClientQueue() <NEW_LINE> metric_value = stats.STATS.GetMetricValue("grr_rsa_operations") <NEW_LINE> self.assert_(metric_value > 0) <NEW_LINE> for _ in range(5): <NEW_LINE> <INDENT> self.SendToServer() <NEW_LINE> self.client_communicator.RunOnce() <NEW_LINE> self.CheckClientQueue() <NEW_LINE> <DEDENT> self.assertEqual(stats.STATS.GetMetricValue("grr_rsa_operations"), metric_value + 10) | Test that we can talk using the old protocol still (version 2). | 62598fa8f7d966606f747f14 |
@inherit_doc <NEW_LINE> class GeneralJavaMLWritable(JavaMLWritable): <NEW_LINE> <INDENT> def write(self) -> GeneralJavaMLWriter: <NEW_LINE> <INDENT> return GeneralJavaMLWriter(self) | (Private) Mixin for ML instances that provide :py:class:`GeneralJavaMLWriter`. | 62598fa86e29344779b0058c |
class IgnoreQueriesProcessor(BaseProcessor): <NEW_LINE> <INDENT> def __init__(self, ignore_queries): <NEW_LINE> <INDENT> super(IgnoreQueriesProcessor, self).__init__() <NEW_LINE> self.ignore_queries = set(ignore_queries) <NEW_LINE> self.inputs = ['query'] <NEW_LINE> self.outputs = [] <NEW_LINE> <DEDENT> def process(self, row): <NEW_LINE> <INDENT> if row.get('query') in self.ignore_queries: <NEW_LINE> <INDENT> raise SkipRowException <NEW_LINE> <DEDENT> return tuple() | Reject some specific queries | 62598fa8bd1bec0571e1505b |
class RecipeMalts(models.Model): <NEW_LINE> <INDENT> malt_brand = models.CharField(max_length=120) <NEW_LINE> malt_type = models.CharField(max_length=120) <NEW_LINE> malt_extract = models.BooleanField(default=True) <NEW_LINE> dry_malt = models.BooleanField(default=False) <NEW_LINE> recipe = models.ForeignKey('Recipe', related_name='recipe_malts', null=True) <NEW_LINE> amount_by_weight = models.FloatField() <NEW_LINE> status = models.BooleanField(default=True) | Hold descriptors for all the malts used in a recipe | 62598fa816aa5153ce400432 |
class account_tax_template(osv.osv): <NEW_LINE> <INDENT> _inherit = 'account.tax.template' <NEW_LINE> _columns = { 'tax_discount': fields.boolean('Discount this Tax in Prince', help="Mark it for (ICMS, PIS e etc.)."), 'base_reduction': fields.float('Redution', required=True, digits=0, help="Um percentual decimal em % entre 0-1."), 'amount_mva': fields.float('MVA Percent', required=True, digits=0, help="Um percentual decimal em % entre 0-1."), } <NEW_LINE> _defaults = TAX_DEFAULTS | Add fields used to define some brazilian taxes | 62598fa863d6d428bbee26e2 |
class Address(dict): <NEW_LINE> <INDENT> @property <NEW_LINE> def location(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> lat, lng = self['metadata']['latitude'], self['metadata']['longitude'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if not lat or not lng: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return lat, lng <NEW_LINE> <DEDENT> @property <NEW_LINE> def confirmed(self): <NEW_LINE> <INDENT> valid = ['Y', 'S', 'D'] <NEW_LINE> match_code = self.get('analysis', {}).get('dpv_match_code', '') <NEW_LINE> return match_code in valid <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self['input_id'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def index(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self['input_index'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None | Class for handling a single address response | 62598fa863b5f9789fe85095 |
class META(type): <NEW_LINE> <INDENT> def __add__(kls, other_class): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __lshift__(kls, my_mixin): <NEW_LINE> <INDENT> die_if_not_mixin(my_mixin) <NEW_LINE> name = dynamic_name() <NEW_LINE> bases = tuple([my_mixin, kls]) <NEW_LINE> return type(name, bases, {}) <NEW_LINE> <DEDENT> def __rshift__(kls, my_mixin): <NEW_LINE> <INDENT> die_if_not_mixin(my_mixin) <NEW_LINE> name = dynamic_name() <NEW_LINE> bases = tuple([kls, my_mixin]) <NEW_LINE> return type(name, bases, {}) <NEW_LINE> <DEDENT> def _get_subclass(kls, name=None, dct={}, bases=tuple(), **kargs): <NEW_LINE> <INDENT> import new <NEW_LINE> import copy <NEW_LINE> dct = copy.copy(dct) <NEW_LINE> dct.update(kargs) <NEW_LINE> if hasattr(kls, '_subclass_hooks'): <NEW_LINE> <INDENT> name,dct = kls._subclass_hooks(name=name, **dct) <NEW_LINE> <DEDENT> name = name or "DynamicSubclassOf{K}_{U}".format(K=kls.__name__, U=str(uuid.uuid1()).split('-')[-2]) <NEW_LINE> return new.classobj(name, (kls,)+bases, dct) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def enumerate_hooks(mcls): <NEW_LINE> <INDENT> matches = [x for x in dir(mcls) if getattr(getattr(mcls, x, None),'metaclass_hook', False)] <NEW_LINE> return dict( [ [match, getattr(mcls, match)] for match in matches ] ) <NEW_LINE> <DEDENT> def __new__(mcls, name, bases, dct): <NEW_LINE> <INDENT> class_obj = type.__new__(mcls, name, bases, dct) <NEW_LINE> hooks = getattr(mcls, 'hooks', []) <NEW_LINE> if not hooks: <NEW_LINE> <INDENT> hooks = mcls.enumerate_hooks(mcls) <NEW_LINE> <DEDENT> for hook in hooks.values(): <NEW_LINE> <INDENT> hook(mcls, name, bases, dct, class_obj) <NEW_LINE> <DEDENT> return class_obj | the most generic metaclass..
sorry but to avoid MRO issues, this should be the main one used,
and everything should subclass it. | 62598fa84f88993c371f04a2 |
class SimpleGraph(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.graph = {} <NEW_LINE> <DEDENT> def neighbors(self, vertex): <NEW_LINE> <INDENT> return self.graph[vertex] | Simple adjacency matrix graph class | 62598fa8aad79263cf42e705 |
class CustViewer(ida_kernwin.simplecustviewer_t): <NEW_LINE> <INDENT> def __init__(self, ea): <NEW_LINE> <INDENT> ida_kernwin.simplecustviewer_t.__init__(self) <NEW_LINE> self.ea = ea <NEW_LINE> <DEDENT> def jump_in_disassembly(self): <NEW_LINE> <INDENT> ea = self.ea <NEW_LINE> if not ea or not ida_bytes.is_loaded(ea): <NEW_LINE> <INDENT> FELogger.warn("地址错误") <NEW_LINE> return <NEW_LINE> <DEDENT> widget = self.find_disass_view() <NEW_LINE> if not widget: <NEW_LINE> <INDENT> FELogger.warn("无法找到反汇编窗口") <NEW_LINE> return <NEW_LINE> <DEDENT> self.jumpto_in_view(widget, ea) <NEW_LINE> <DEDENT> def jump_in_new_window(self): <NEW_LINE> <INDENT> ea = self.ea <NEW_LINE> if not ea or not ida_bytes.is_loaded(ea): <NEW_LINE> <INDENT> FELogger.warn("地址错误") <NEW_LINE> return <NEW_LINE> <DEDENT> window_name = "D-0x%x" % ea <NEW_LINE> widget = ida_kernwin.open_disasm_window(window_name) <NEW_LINE> if widget: <NEW_LINE> <INDENT> self.jumpto_in_view(widget, ea) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> FELogger.warn("创建新窗口失败") <NEW_LINE> <DEDENT> <DEDENT> def jump_in_hex(self): <NEW_LINE> <INDENT> ea = self.ea <NEW_LINE> if not ea or not ida_bytes.is_loaded(ea): <NEW_LINE> <INDENT> FELogger.warn("地址错误") <NEW_LINE> return <NEW_LINE> <DEDENT> widget = self.find_hex_view() <NEW_LINE> if not widget: <NEW_LINE> <INDENT> FELogger.warn("无法找到十六进制窗口") <NEW_LINE> return <NEW_LINE> <DEDENT> self.jumpto_in_view(widget, ea) <NEW_LINE> <DEDENT> def find_disass_view(self): <NEW_LINE> <INDENT> for c in map(chr, range(65, 75)): <NEW_LINE> <INDENT> widget = ida_kernwin.find_widget('IDA View-%s' % c) <NEW_LINE> if widget: <NEW_LINE> <INDENT> return widget <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def find_hex_view(self): <NEW_LINE> <INDENT> for i in range(1, 10): <NEW_LINE> <INDENT> widget = ida_kernwin.find_widget('Hex View-%d' % i) <NEW_LINE> if widget: <NEW_LINE> <INDENT> return widget <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def jumpto_in_view(self, view, ea): <NEW_LINE> <INDENT> ida_kernwin.activate_widget(view, True) <NEW_LINE> return ida_kernwin.jumpto(ea) | 分析结果窗口显示器 | 62598fa8009cb60464d0144f |
class Table(MultiRowSelection): <NEW_LINE> <INDENT> def __init__(self, table_name, db_expr=None, allow_outdated=None): <NEW_LINE> <INDENT> ReadQuery.__init__(self, internal.Table(self)) <NEW_LINE> self.table_name = table_name <NEW_LINE> self.db_expr = db_expr <NEW_LINE> self.allow_outdated = allow_outdated <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.db_expr is not None: <NEW_LINE> <INDENT> return "<Table %r, db=%r>" % (self.table_name, self.db_expr.db_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "<Table %r>" % self.table_name <NEW_LINE> <DEDENT> <DEDENT> def insert(self, docs, upsert=False): <NEW_LINE> <INDENT> if isinstance(docs, dict): <NEW_LINE> <INDENT> return WriteQuery(internal.Insert(self, [docs], upsert)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return WriteQuery(internal.Insert(self, docs, upsert)) <NEW_LINE> <DEDENT> <DEDENT> def get(self, key, attr_name = "id"): <NEW_LINE> <INDENT> return RowSelection(internal.Get(self, key, attr_name)) <NEW_LINE> <DEDENT> def _write_ref_ast(self, parent, opts): <NEW_LINE> <INDENT> if self.db_expr: <NEW_LINE> <INDENT> parent.db_name = self.db_expr.db_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent.db_name = net.last_connection().db_name <NEW_LINE> <DEDENT> if self.allow_outdated is None: <NEW_LINE> <INDENT> if not 'allow_outdated' in opts or opts['allow_outdated'] is None or opts['allow_outdated'] is False: <NEW_LINE> <INDENT> parent.use_outdated = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent.use_outdated = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> parent.use_outdated = self.allow_outdated <NEW_LINE> <DEDENT> parent.table_name = self.table_name | A ReQL expression that encodes a RethinkDB table. Most data
manipulation operations (such as inserting, selecting, and
updating data) can be chained off of this object. | 62598fa8627d3e7fe0e06ddd |
class Interface2App(App): <NEW_LINE> <INDENT> def build(self): <NEW_LINE> <INDENT> screen = AllImage() <NEW_LINE> screen.name = 'interface2' <NEW_LINE> return screen | The kivy App that runs the main root. All we do is build a AllImage
widget into the root. | 62598fa81b99ca400228f4c8 |
class ColumnShifterCTX_P(object): <NEW_LINE> <INDENT> def __init__(self, dict_db): <NEW_LINE> <INDENT> self.db = dict_db <NEW_LINE> <DEDENT> def define(self, columns, shifts, ignore_unknown=True): <NEW_LINE> <INDENT> db_set = set(self.db.keys()) <NEW_LINE> if not(set(columns) <= db_set): <NEW_LINE> <INDENT> if ignore_unknown: <NEW_LINE> <INDENT> self.columns = list(set(columns).intersection(db_set)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unknown_columns = set(columns) - db_set <NEW_LINE> raise ValueError('Unknown columns {:}'.format(unknown_columns)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.columns = columns <NEW_LINE> <DEDENT> shift_types = [isinstance(i, int) for i in shifts] <NEW_LINE> if not all(shift_types): <NEW_LINE> <INDENT> invalid_types = [shift[i] for i in shift_types if not shift_types[i]] <NEW_LINE> raise ValueError('Int type violation: {:}'.format(invalid_types)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.shifts = sorted(shifts) <NEW_LINE> <DEDENT> if not '(V*)' in set(self.db['ARG'].values()): <NEW_LINE> <INDENT> raise ValueError('(V*) not in ARG') <NEW_LINE> <DEDENT> self.mapper = OrderedDict( {(i, col): '{:}_CTX_P{:+d}'.format(col, i) for col in columns for i in sorted(shifts)}) <NEW_LINE> return self <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> if not ( self.columns or self.shifts or self.mapper): <NEW_LINE> <INDENT> raise Exception('Columns to be shifted are undefined run column_shifter.define') <NEW_LINE> <DEDENT> self.dict_shifted = {col: OrderedDict({}) for _, col in self.mapper.items()} <NEW_LINE> times = [] <NEW_LINE> predicate_d = _predicatedict(self.db) <NEW_LINE> for time, proposition in self.db['P'].items(): <NEW_LINE> <INDENT> predicate_time = predicate_d[proposition] <NEW_LINE> for col in self.columns: <NEW_LINE> <INDENT> for s in self.shifts: <NEW_LINE> <INDENT> new_col = self.mapper[(s, col)] <NEW_LINE> if (predicate_time + s in self.db['P']) and (self.db['P'][predicate_time + s] == proposition): <NEW_LINE> <INDENT> self.dict_shifted[new_col][time] = self.db[col][predicate_time + s] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dict_shifted[new_col][time] = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return self.dict_shifted | Grabs columns around predicate and shifts it
Usage:
See below (main) | 62598fa8e5267d203ee6b83c |
class UserDlRequest(Model): <NEW_LINE> <INDENT> user_id = f.Integer(nullable=False, index=True) <NEW_LINE> dlrequest_id = f.ForeignKey(DlRequest.id) <NEW_LINE> created = f.DateTime(with_timezone=True, nullable=False, default=datetime.utcnow) <NEW_LINE> unique = ( ('user_id', 'dlrequest_id'), ) | Users vs Download Requests. | 62598fa8498bea3a75a57a4e |
class MonitorPMA(Command): <NEW_LINE> <INDENT> log = logging.getLogger(__name__) <NEW_LINE> def take_action(self, parsed_args): <NEW_LINE> <INDENT> self.log.info('monitorPMA service starting...') <NEW_LINE> startService(CONF.hades_exchange, 'hades-monitorPMA', CONF.hades_monitorPMA_topic, 'pike', CONF.hades_monitorPMA_manager) | A command that start monitorPMA service. | 62598fa8a79ad16197769f95 |
class CustomDBAdapter(DBAdapter): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super(CustomDBAdapter, self).__init__(config) <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _open(self, filename, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _save(self, id, data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _replace_if_equals(self, id, sample, data): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _get(self, id): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _list_dir(self, path): <NEW_LINE> <INDENT> raise error.NotSupportedOperation <NEW_LINE> <DEDENT> def _glob(self, path, pattern): <NEW_LINE> <INDENT> raise error.NotSupportedOperation <NEW_LINE> <DEDENT> def _path_exists(self, path): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _on_prepare(self): <NEW_LINE> <INDENT> pass | Для создания нового класса-коннектора к БД, реализуйте следующие методы или
пометьте их raise error.NotSupportedOperation | 62598fa8a219f33f346c6748 |
class MultiModalEncoder(snt.AbstractModule): <NEW_LINE> <INDENT> def __init__(self, encoder_list, mlp_dim, latent_dim, dropout=False, keep_prob=0.5, is_training=True, swap_out_mlp_relu_for_elu=False, name='multi_modal_encoder'): <NEW_LINE> <INDENT> super(MultiModalEncoder, self).__init__(name=name) <NEW_LINE> self.encoder_list = encoder_list <NEW_LINE> self.mlp_dim = mlp_dim <NEW_LINE> self.latent_dim = latent_dim <NEW_LINE> self.dropout = dropout <NEW_LINE> self.keep_prob = keep_prob <NEW_LINE> self.is_training = is_training <NEW_LINE> self.swap_out_mlp_relu_for_elu = swap_out_mlp_relu_for_elu <NEW_LINE> self.mlp_activation_fn = tf.nn.relu <NEW_LINE> if self.swap_out_mlp_relu_for_elu: <NEW_LINE> <INDENT> self.mlp_activation_fn = tf.nn.elu <NEW_LINE> <DEDENT> <DEDENT> def _build(self, modalities, sentinel=None, v=None): <NEW_LINE> <INDENT> raise NotImplementedError | Abstract class for encoding multiple modalities together. | 62598fa899fddb7c1ca62d81 |
class AugMix(object): <NEW_LINE> <INDENT> def __init__(self, prob=0.5, aug_prob_coeff=0.1, mixture_width=3, mixture_depth=1, aug_severity=1): <NEW_LINE> <INDENT> self.prob = prob <NEW_LINE> self.aug_prob_coeff = aug_prob_coeff <NEW_LINE> self.mixture_width = mixture_width <NEW_LINE> self.mixture_depth = mixture_depth <NEW_LINE> self.aug_severity = aug_severity <NEW_LINE> self.augmentations = augmentations <NEW_LINE> <DEDENT> def __call__(self, image): <NEW_LINE> <INDENT> if random.random() > self.prob: <NEW_LINE> <INDENT> return np.asarray(image).copy() <NEW_LINE> <DEDENT> ws = np.float32( np.random.dirichlet([self.aug_prob_coeff] * self.mixture_width)) <NEW_LINE> m = np.float32(np.random.beta(self.aug_prob_coeff, self.aug_prob_coeff)) <NEW_LINE> mix = np.zeros([image.size[1], image.size[0], 3]) <NEW_LINE> for i in range(self.mixture_width): <NEW_LINE> <INDENT> image_aug = image.copy() <NEW_LINE> depth = self.mixture_depth if self.mixture_depth > 0 else np.random.randint(1, 4) <NEW_LINE> for _ in range(depth): <NEW_LINE> <INDENT> op = np.random.choice(self.augmentations) <NEW_LINE> image_aug = op(image_aug, self.aug_severity) <NEW_LINE> <DEDENT> mix += ws[i] * np.asarray(image_aug) <NEW_LINE> <DEDENT> mixed = (1 - m) * image + m * mix <NEW_LINE> return mixed.astype(np.uint8) | Perform AugMix augmentation and compute mixture.
| 62598fa85166f23b2e243309 |
class Authentication(TypedDict): <NEW_LINE> <INDENT> clientCert: ClientCert | Authentication
Attributes:
----------
clientCert: :py:class:`ClientCert` | 62598fa8097d151d1a2c0f5a |
class Task(DBObject): <NEW_LINE> <INDENT> def __init__(self, task_id=[], task_time = [], scaled_model_id=[], hand_id=[], task_type_id=[], task_outcome_id=[], comment = [], parameters = []): <NEW_LINE> <INDENT> DBObject.__init__(self, task_id = task_id, task_time = task_time, scaled_model_id = scaled_model_id, hand_id = hand_id, task_type_id = task_type_id, task_outcome_id = task_outcome_id, comment = comment, parameters = parameters) | @brief Class that encapsulates the database representation of the Tasks in the database. It
derives from DBOject. | 62598fa8379a373c97d98f43 |
class CSSProductions(object): <NEW_LINE> <INDENT> EOF = True | most attributes are set later | 62598fa876e4537e8c3ef4de |
class ExposePIDDevice(microscope.abc.Device): <NEW_LINE> <INDENT> def _do_shutdown(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_pid(self) -> int: <NEW_LINE> <INDENT> return os.getpid() | Test device for testing the device server keep alive. | 62598fa8851cf427c66b81fa |
class GCSQL(CloudDB): <NEW_LINE> <INDENT> def exec_sql(self, sql): <NEW_LINE> <INDENT> print('Executed SQL in Google Cloud SQL') | GC / DB
| 62598fa83d592f4c4edbadff |
class Account: <NEW_LINE> <INDENT> def __init__(self, index, name, answered, attempts): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.name = name <NEW_LINE> self.answered = answered <NEW_LINE> self.attempts = attempts <NEW_LINE> <DEDENT> def addAnswered(self, type): <NEW_LINE> <INDENT> with open(file_path, 'r') as file: <NEW_LINE> <INDENT> data = file.readlines() <NEW_LINE> <DEDENT> number1 = int(data[self.index + 6]) + 1 <NEW_LINE> self.answered[4] = number1 <NEW_LINE> number2 = int(data[self.index + 2 + type]) + 1 <NEW_LINE> self.answered[type] = number2 <NEW_LINE> data[self.index + 6] = str(number1) + "\n" <NEW_LINE> data[self.index + 2 + type] = str(number2) + "\n" <NEW_LINE> with open(file_path, 'w') as file: <NEW_LINE> <INDENT> file.writelines(data) <NEW_LINE> <DEDENT> <DEDENT> def addAttempts(self, type): <NEW_LINE> <INDENT> with open(file_path, 'r') as file: <NEW_LINE> <INDENT> data = file.readlines() <NEW_LINE> <DEDENT> number1 = int(data[self.index + 11]) + 1 <NEW_LINE> self.attempts[4] = number1 <NEW_LINE> number2 = int(data[self.index + 7 + type]) + 1 <NEW_LINE> self.attempts[type] = number2 <NEW_LINE> data[self.index + 11] = str(number1) + "\n" <NEW_LINE> data[self.index + 7 + type] = str(number2) + "\n" <NEW_LINE> with open(file_path, 'w') as file: <NEW_LINE> <INDENT> file.writelines(data) <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> with open(file_path, 'r') as file: <NEW_LINE> <INDENT> data = file.readlines() <NEW_LINE> <DEDENT> for i in range (2, 12): <NEW_LINE> <INDENT> data[self.index + i] = '0\n' <NEW_LINE> <DEDENT> self.answered = ['0', '0', '0', '0', '0'] <NEW_LINE> self.attempts = ['0', '0', '0', '0', '0'] <NEW_LINE> with open(file_path, 'w') as file: <NEW_LINE> <INDENT> file.writelines(data) | An account that stores the user's progress
Attributes
----------
answered : int list
The total number of completed problems for each problem type
attempts : int list
The total number of attempts for each problem type
Methods
-------
addAnswered(type : str) -> None
Increases the number of answered problems for a specific problem type by one
addAttempts(type : str) -> None
Increases the number of attempted problems for a specific problem type by one
reset(None) -> None
Resets the users stats | 62598fa824f1403a9268584c |
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0, position=(0, 0)): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> self.position = position <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return (self.__size) <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError("size must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("size must be >= 0") <NEW_LINE> <DEDENT> self.__size = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def position(self): <NEW_LINE> <INDENT> return (self.__position) <NEW_LINE> <DEDENT> @position.setter <NEW_LINE> def position(self, value): <NEW_LINE> <INDENT> if type(value) != tuple or len(value) != 2: <NEW_LINE> <INDENT> raise TypeError("position must be a tuple of 2 positive integers") <NEW_LINE> <DEDENT> if type(value[0]) != int or type(value[1]) != int: <NEW_LINE> <INDENT> raise TypeError("position must be a tuple of 2 positive integers") <NEW_LINE> <DEDENT> if value[0] < 0 or value[1] < 0: <NEW_LINE> <INDENT> raise TypeError("position must be a tuple of 2 positive integers") <NEW_LINE> <DEDENT> self.__position = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return (self.__size ** 2) <NEW_LINE> <DEDENT> def my_print(self): <NEW_LINE> <INDENT> if self.size == 0: <NEW_LINE> <INDENT> print("") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(self.position[1]): <NEW_LINE> <INDENT> print("") <NEW_LINE> <DEDENT> for i in range(self.size): <NEW_LINE> <INDENT> for j in range(self.position[0]): <NEW_LINE> <INDENT> print(" ", end="") <NEW_LINE> <DEDENT> for j in range(self.size): <NEW_LINE> <INDENT> print("#", end="") <NEW_LINE> <DEDENT> print("") | Defines a Square | 62598fa87047854f4633f30b |
class Rectangle(Shape): <NEW_LINE> <INDENT> def __init__(self, pos_x=0, pos_y=0, width=0, height=0): <NEW_LINE> <INDENT> self.position = Vector2() <NEW_LINE> self.position.x = pos_x <NEW_LINE> self.position.y = pos_y <NEW_LINE> self.size = Vector2() <NEW_LINE> self.size.x = width <NEW_LINE> self.size.y = height <NEW_LINE> <DEDENT> @property <NEW_LINE> def pos_x(self): <NEW_LINE> <INDENT> return self.position.x <NEW_LINE> <DEDENT> @pos_x.setter <NEW_LINE> def pos_x(self, value): <NEW_LINE> <INDENT> self.position.x = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def pos_y(self): <NEW_LINE> <INDENT> return self.position.y <NEW_LINE> <DEDENT> @pos_y.setter <NEW_LINE> def pos_y(self, value): <NEW_LINE> <INDENT> self.position.y = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.size.x <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> self.size.x = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.size.y <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> self.size.y = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def center(self): <NEW_LINE> <INDENT> return self.position + self.size * .5 <NEW_LINE> <DEDENT> @center.setter <NEW_LINE> def center(self, pos_x: Real, pos_y: Real): <NEW_LINE> <INDENT> self.position.set(pos_x - .5 * self.width, pos_y - .5 * self.height) <NEW_LINE> <DEDENT> def move(self, pos_x: Real=0, pos_y: Real=0): <NEW_LINE> <INDENT> self.position += (pos_x, pos_y) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{}: ({}, {}, {}, {})>'.format( self.__class__.__name__, *chain(self.position, self.size)) | Any quadrilateral with four right angles | 62598fa845492302aabfc402 |
class GoogleAnalyticsDeletePreviousDataUploadsOperator(BaseOperator): <NEW_LINE> <INDENT> template_fields = ("impersonation_chain",) <NEW_LINE> def __init__( self, account_id: str, web_property_id: str, custom_data_source_id: str, gcp_conn_id: str = "google_cloud_default", delegate_to: Optional[str] = None, api_version: str = "v3", impersonation_chain: Optional[Union[str, Sequence[str]]] = None, **kwargs, ) -> None: <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.account_id = account_id <NEW_LINE> self.web_property_id = web_property_id <NEW_LINE> self.custom_data_source_id = custom_data_source_id <NEW_LINE> self.gcp_conn_id = gcp_conn_id <NEW_LINE> self.delegate_to = delegate_to <NEW_LINE> self.api_version = api_version <NEW_LINE> self.impersonation_chain = impersonation_chain <NEW_LINE> <DEDENT> def execute(self, context) -> None: <NEW_LINE> <INDENT> ga_hook = GoogleAnalyticsHook( gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to, api_version=self.api_version, impersonation_chain=self.impersonation_chain, ) <NEW_LINE> uploads = ga_hook.list_uploads( account_id=self.account_id, web_property_id=self.web_property_id, custom_data_source_id=self.custom_data_source_id, ) <NEW_LINE> cids = [upload["id"] for upload in uploads] <NEW_LINE> delete_request_body = {"customDataImportUids": cids} <NEW_LINE> ga_hook.delete_upload_data( self.account_id, self.web_property_id, self.custom_data_source_id, delete_request_body, ) | Deletes previous GA uploads to leave the latest file to control the size of the Data Set Quota.
:param account_id: The GA account Id (long) to which the data upload belongs.
:type account_id: str
:param web_property_id: The web property UA-string associated with the upload.
:type web_property_id: str
:param custom_data_source_id: The id to which the data import belongs.
:type custom_data_source_id: str
:param gcp_conn_id: The connection ID to use when fetching connection info.
:type gcp_conn_id: str
:param delegate_to: The account to impersonate using domain-wide delegation of authority,
if any. For this to work, the service account making the request must have
domain-wide delegation enabled.
:type delegate_to: str
:param api_version: The version of the api that will be requested for example 'v3'.
:type api_version: str
:param impersonation_chain: Optional service account to impersonate using short-term
credentials, or chained list of accounts required to get the access_token
of the last account in the list, which will be impersonated in the request.
If set as a string, the account must grant the originating account
the Service Account Token Creator IAM role.
If set as a sequence, the identities from the list must grant
Service Account Token Creator IAM role to the directly preceding identity, with first
account from the list granting this role to the originating account (templated).
:type impersonation_chain: Union[str, Sequence[str]] | 62598fa84e4d562566372356 |
class GetSamplesTest(test.TestCase): <NEW_LINE> <INDENT> def test_raises_if_both_z_and_n_are_none(self): <NEW_LINE> <INDENT> with self.test_session(): <NEW_LINE> <INDENT> dist = normal_lib.Normal(loc=0., scale=1.) <NEW_LINE> z = None <NEW_LINE> n = None <NEW_LINE> seed = None <NEW_LINE> with self.assertRaisesRegexp(ValueError, 'exactly one'): <NEW_LINE> <INDENT> _get_samples(dist, z, n, seed) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_raises_if_both_z_and_n_are_not_none(self): <NEW_LINE> <INDENT> with self.test_session(): <NEW_LINE> <INDENT> dist = normal_lib.Normal(loc=0., scale=1.) <NEW_LINE> z = dist.sample(seed=42) <NEW_LINE> n = 1 <NEW_LINE> seed = None <NEW_LINE> with self.assertRaisesRegexp(ValueError, 'exactly one'): <NEW_LINE> <INDENT> _get_samples(dist, z, n, seed) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_returns_n_samples_if_n_provided(self): <NEW_LINE> <INDENT> with self.test_session(): <NEW_LINE> <INDENT> dist = normal_lib.Normal(loc=0., scale=1.) <NEW_LINE> z = None <NEW_LINE> n = 10 <NEW_LINE> seed = None <NEW_LINE> z = _get_samples(dist, z, n, seed) <NEW_LINE> self.assertEqual((10,), z.get_shape()) <NEW_LINE> <DEDENT> <DEDENT> def test_returns_z_if_z_provided(self): <NEW_LINE> <INDENT> with self.test_session(): <NEW_LINE> <INDENT> dist = normal_lib.Normal(loc=0., scale=1.) <NEW_LINE> z = dist.sample(10, seed=42) <NEW_LINE> n = None <NEW_LINE> seed = None <NEW_LINE> z = _get_samples(dist, z, n, seed) <NEW_LINE> self.assertEqual((10,), z.get_shape()) | Test the private method 'get_samples'. | 62598fa80c0af96317c562b4 |
class ParallelSafetyWarning(RuntimeWarning): <NEW_LINE> <INDENT> pass | Warning category for when an operation in a prange
might not have parallel semantics. | 62598fa830dc7b766599f77f |
class Program(BaseRegister): <NEW_LINE> <INDENT> def __init__(self, thermostat_identifier): <NEW_LINE> <INDENT> program_description = "List or resume non-vacation programs stored on Ecobee thermostat" <NEW_LINE> super(Program, self).__init__("byte", False, "Programs", "", description=program_description) <NEW_LINE> self.thermostat_id = thermostat_identifier <NEW_LINE> self.readable = True <NEW_LINE> self.python_type = str <NEW_LINE> <DEDENT> def set_state(self, program, access_token, resume_all=False): <NEW_LINE> <INDENT> params = {"format": "json"} <NEW_LINE> if not isinstance(program, dict) and not len(program): <NEW_LINE> <INDENT> if not resume_all: <NEW_LINE> <INDENT> _log.warning("No program specified, resuming next event on Ecobee event stack. To learn how to create " "an Ecobee program, Visit " "https://www.ecobee.com/home/developer/api/examples/ex11.shtml for more information") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _log.info("No program specified and resume all is set to true, resuming all stored programs.") <NEW_LINE> <DEDENT> _log.debug("Resuming scheduled Ecobee program(s)") <NEW_LINE> function_body = { "functions": [ { "type": "resumeProgram", "params": { "resumeAll": resume_all } } ] } <NEW_LINE> headers, body = populate_selection_objects(access_token, "thermostats", self.thermostat_id, function_body) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> program_body = { "thermostat": { "program": program } } <NEW_LINE> headers, body = populate_selection_objects(access_token, "registered", self.thermostat_id, program_body) <NEW_LINE> <DEDENT> make_ecobee_request("POST", THERMOSTAT_URL, headers=headers, params=params, json=body) <NEW_LINE> <DEDENT> def get_state(self, ecobee_data): <NEW_LINE> <INDENT> if not ecobee_data: <NEW_LINE> <INDENT> raise ValueError("No Ecobee data from cache available during point scrape.") <NEW_LINE> <DEDENT> for thermostat in ecobee_data.get("thermostatList"): <NEW_LINE> <INDENT> if int(thermostat.get("identifier")) == self.thermostat_id: <NEW_LINE> <INDENT> events_data = thermostat.get("events") <NEW_LINE> if not isinstance(events_data, list): <NEW_LINE> <INDENT> raise ValueError(f"Point name {self.point_name} could not be found in latest Ecobee data") <NEW_LINE> <DEDENT> return [event for event in events_data if event.get("type") != "vacation"] <NEW_LINE> <DEDENT> <DEDENT> raise ValueError(f"Point {self.point_name} not available in Ecobee data.") | Wrapper register for managing Ecobee thermostat programs, and getting program status | 62598fa8e5267d203ee6b83d |
class ArchiveView(BaseCartView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> request = self.request <NEW_LINE> pm = api.portal.get_tool('portal_membership') <NEW_LINE> user = pm.getAuthenticatedMember() <NEW_LINE> handled = [] <NEW_LINE> for obj in self.items: <NEW_LINE> <INDENT> if not user.has_permission("slc: Toggle outdated", obj): <NEW_LINE> <INDENT> api.portal.show_message( message="Could not archive object '{0}': Permission " "denied".format(obj.Title()), request=request, type="warning") <NEW_LINE> continue <NEW_LINE> <DEDENT> outdated_view = obj.restrictedTraverse("object_toggle_outdated") <NEW_LINE> outdated_view.outdated = True <NEW_LINE> obj.reindexObject() <NEW_LINE> handled.append(obj.Title()) <NEW_LINE> <DEDENT> titles = ', '.join(sorted(handled)) <NEW_LINE> message = _( "The following items have been archived: ${titles}", mapping={'titles': titles} ) <NEW_LINE> api.portal.show_message( message=message, request=request, type="success", ) <NEW_LINE> return self.index() | Archive Action implementation that uses slc.outdated to mark items as
outdated. | 62598fa87d847024c075c2f6 |
class OneWire(Entity): <NEW_LINE> <INDENT> def __init__(self, name, device_file, sensor_type): <NEW_LINE> <INDENT> self._name = name+' '+sensor_type.capitalize() <NEW_LINE> self._device_file = device_file <NEW_LINE> self._unit_of_measurement = SENSOR_TYPES[sensor_type][1] <NEW_LINE> self._state = None <NEW_LINE> <DEDENT> def _read_value_raw(self): <NEW_LINE> <INDENT> with open(self._device_file, 'r') as ds_device_file: <NEW_LINE> <INDENT> lines = ds_device_file.readlines() <NEW_LINE> <DEDENT> return lines <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit_of_measurement | Implementation of an One wire Sensor. | 62598fa82c8b7c6e89bd36f7 |
class read_write(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._logfilename = os.path.join(os.path.dirname(os.path.abspath(__file__)), ".download") <NEW_LINE> <DEDENT> @property <NEW_LINE> def read(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(self._logfilename, 'rb', ) as f: <NEW_LINE> <INDENT> logfile = f.read() <NEW_LINE> logfile = json.loads(logfile) <NEW_LINE> if len(logfile) > 20: <NEW_LINE> <INDENT> logfile.pop(0) <NEW_LINE> <DEDENT> <DEDENT> return logfile <NEW_LINE> <DEDENT> except IOError as f: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def write(self,logfile): <NEW_LINE> <INDENT> with open(self._logfilename, "wb") as f: <NEW_LINE> <INDENT> file = json.dumps(logfile) <NEW_LINE> f.write(file) | 保存已下载过的日志 | 62598fa8a17c0f6771d5c167 |
class Client(object): <NEW_LINE> <INDENT> def __init__(self, access_token=None, base_url=None, proxies=None): <NEW_LINE> <INDENT> self._access_token = access_token <NEW_LINE> self._base_url = base_url or DEFAULT_BASE_URL <NEW_LINE> self._proxies = proxies <NEW_LINE> <DEDENT> def get(self, path, **kwargs): <NEW_LINE> <INDENT> return self._request("get", path, **kwargs) <NEW_LINE> <DEDENT> def post(self, path, **kwargs): <NEW_LINE> <INDENT> return self._request("post", path, **kwargs) <NEW_LINE> <DEDENT> def put(self, path, **kwargs): <NEW_LINE> <INDENT> return self._request("put", path, **kwargs) <NEW_LINE> <DEDENT> def delete(self, path, **kwargs): <NEW_LINE> <INDENT> return self._request("delete", path, **kwargs) <NEW_LINE> <DEDENT> def request(self, method, path, **kwargs): <NEW_LINE> <INDENT> return requests.request( method=method, url=path, headers=self._build_headers(), proxies=self._proxies, params=kwargs, ) <NEW_LINE> <DEDENT> def _request(self, method, path, **kwargs): <NEW_LINE> <INDENT> if 'files' in kwargs: <NEW_LINE> <INDENT> kwargs = kwargs.copy() <NEW_LINE> <DEDENT> files = kwargs.pop('files', None) <NEW_LINE> response = requests.request( method=method, url=self._build_url(path), headers=self._build_headers(), proxies=self._proxies, params=kwargs, files=files, ) <NEW_LINE> return self._parse_response(response) <NEW_LINE> <DEDENT> def _build_url(self, path): <NEW_LINE> <INDENT> return self._base_url + path + ".json" <NEW_LINE> <DEDENT> def _build_headers(self): <NEW_LINE> <INDENT> if self._access_token: <NEW_LINE> <INDENT> return { "Authorization": "Bearer %s" % self._access_token, } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def _parse_response(self, response): <NEW_LINE> <INDENT> if 200 <= response.status_code < 300: <NEW_LINE> <INDENT> return self._value_for_response(response) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise self._exception_for_response(response) <NEW_LINE> <DEDENT> <DEDENT> def _value_for_response(self, response): <NEW_LINE> <INDENT> if response.text.strip(): <NEW_LINE> <INDENT> return GenericModel.from_json(response.text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def _exception_for_response(self, response): <NEW_LINE> <INDENT> if response.status_code == 404: <NEW_LINE> <INDENT> return NotFoundError(response.reason) <NEW_LINE> <DEDENT> elif response.status_code == 400 and "OAuthException" in response.text: <NEW_LINE> <INDENT> return InvalidAccessTokenError(response.reason) <NEW_LINE> <DEDENT> elif response.status_code == 401: <NEW_LINE> <INDENT> return UnauthorizedError(response.reason) <NEW_LINE> <DEDENT> elif response.status_code == 429: <NEW_LINE> <INDENT> return RateLimitExceededError(response.reason) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ResponseError("%d error: %s" % ( response.status_code, response.reason, )) | A client for the Yammer API. | 62598fa816aa5153ce400434 |
class BlogCLITests(base_v1.BlogClientTestBase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(BlogCLITests, cls).setUpClass() <NEW_LINE> <DEDENT> def test_blog_create_delete(self): <NEW_LINE> <INDENT> wb = self.blog_admin( 'blog-create', params=self.wb_def) <NEW_LINE> wb_name = self.get_value_of_field(wb, "Name") <NEW_LINE> self.assertTableStruct(wb, ['Field', 'Value']) <NEW_LINE> wbs = self.blog_admin('blog-list') <NEW_LINE> self.assertIn(wb_name, [blog['Name'] for blog in wbs]) <NEW_LINE> wbs = self.blog_admin('blog-list') <NEW_LINE> self.assertIn(wb_name, [blog['Name'] for blog in wbs]) <NEW_LINE> self.blog_admin('blog-delete', params=wb_name) <NEW_LINE> wbs = self.blog_admin('blog-list') <NEW_LINE> self.assertNotIn(wb_name, [blog['Name'] for blog in wbs]) <NEW_LINE> <DEDENT> def test_blog_update(self): <NEW_LINE> <INDENT> wb = self.blog_create(self.wb_def) <NEW_LINE> wb_name = self.get_value_of_field(wb, "Name") <NEW_LINE> init_update_at = self.get_value_of_field(wb, "Updated at") <NEW_LINE> tags = self.get_value_of_field(wb, 'Tags') <NEW_LINE> self.assertNotIn('tag', tags) <NEW_LINE> wb = self.blog_admin( 'blog-update', params=self.wb_def) <NEW_LINE> update_at = self.get_value_of_field(wb, "Updated at") <NEW_LINE> name = self.get_value_of_field(wb, 'Name') <NEW_LINE> tags = self.get_value_of_field(wb, 'Tags') <NEW_LINE> self.assertEqual(wb_name, name) <NEW_LINE> self.assertNotIn('tag', tags) <NEW_LINE> self.assertEqual(init_update_at, update_at) <NEW_LINE> wb = self.blog_admin( 'blog-update', params=self.wb_with_tags_def) <NEW_LINE> self.assertTableStruct(wb, ['Field', 'Value']) <NEW_LINE> update_at = self.get_value_of_field(wb, "Updated at") <NEW_LINE> name = self.get_value_of_field(wb, 'Name') <NEW_LINE> tags = self.get_value_of_field(wb, 'Tags') <NEW_LINE> self.assertEqual(wb_name, name) <NEW_LINE> self.assertIn('tag', tags) <NEW_LINE> self.assertNotEqual(init_update_at, update_at) <NEW_LINE> <DEDENT> def test_blog_get(self): <NEW_LINE> <INDENT> created = self.blog_create(self.wb_with_tags_def) <NEW_LINE> wb_name = self.get_value_of_field(created, "Name") <NEW_LINE> fetched = self.mistral_admin('workbook-get', params=wb_name) <NEW_LINE> created_wb_name = self.get_value_of_field(created, 'Name') <NEW_LINE> fetched_wb_name = self.get_value_of_field(fetched, 'Name') <NEW_LINE> self.assertEqual(created_wb_name, fetched_wb_name) <NEW_LINE> created_wb_tag = self.get_value_of_field(created, 'Tags') <NEW_LINE> fetched_wb_tag = self.get_value_of_field(fetched, 'Tags') <NEW_LINE> self.assertEqual(created_wb_tag, fetched_wb_tag) <NEW_LINE> <DEDENT> def test_blog_get_definition(self): <NEW_LINE> <INDENT> wb = self.workbook_create(self.wb_def) <NEW_LINE> wb_name = self.get_value_of_field(wb, "Name") <NEW_LINE> definition = self.blog_admin( 'blog-get-definition', params=wb_name) <NEW_LINE> self.assertNotIn('404 Not Found', definition) | Test suite checks commands to work with blogs. | 62598fa8460517430c431ff5 |
class F26(FeatureExtractor): <NEW_LINE> <INDENT> @numba.jit(cache=True) <NEW_LINE> def run(self, series): <NEW_LINE> <INDENT> return round(series[-1]-series[-20], 2) | 对于有限长度信号序列时域幅值,求脉冲因子--最大值/绝对均值
| 62598fa826068e7796d4c88c |
class RuleGroupUser(ModelSQL): <NEW_LINE> <INDENT> __name__ = 'ir.rule.group-res.user' <NEW_LINE> rule_group = fields.Many2One('ir.rule.group', 'Rule Group', ondelete='CASCADE', select=True, required=True) <NEW_LINE> user = fields.Many2One('res.user', 'User', ondelete='CASCADE', select=True, required=True) <NEW_LINE> @classmethod <NEW_LINE> def __register__(cls, module_name): <NEW_LINE> <INDENT> TableHandler = backend.get('TableHandler') <NEW_LINE> TableHandler.table_rename('user_rule_group_rel', cls._table) <NEW_LINE> TableHandler.sequence_rename('user_rule_group_rel_id_seq', cls._table + '_id_seq') <NEW_LINE> table = TableHandler(cls, module_name) <NEW_LINE> table.column_rename('rule_group_id', 'rule_group') <NEW_LINE> table.column_rename('user_id', 'user') <NEW_LINE> super(RuleGroupUser, cls).__register__(module_name) | Rule Group - User | 62598fa82c8b7c6e89bd36f8 |
class tmpFolder(object): <NEW_LINE> <INDENT> __slots__ = ('__tmpFolderName',) <NEW_LINE> def __init__(self, path = './'): <NEW_LINE> <INDENT> self.__tmpFolderName = abspath(join(path, str(uuid4()))) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> mkdir(self.__tmpFolderName) <NEW_LINE> return self.__tmpFolderName <NEW_LINE> <DEDENT> def __exit__(self, *exc_info): <NEW_LINE> <INDENT> if exists(self.__tmpFolderName): <NEW_LINE> <INDENT> rmtree(self.__tmpFolderName) | DESCRIPTION
Create a tmp folder.
USAGE
from tmpFile import tmpFolder
with tmpFolder() as tmpFolderPath:
...
# Folder: "tmpFolderPath" will be deleted automatically (if exist)
ARGUMENT
* path = './', str
The path of the tmp folder. | 62598fa810dbd63aa1c70ae5 |
class TestNetworkAdvancedInterVMConnectivity( manager.AdvancedNetworkScenarioTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def resource_setup(cls): <NEW_LINE> <INDENT> super(TestNetworkAdvancedInterVMConnectivity, cls).resource_setup() <NEW_LINE> cls.builder = TestNetworkAdvancedInterVMConnectivity(builder=True) <NEW_LINE> cls.servers_and_keys = cls.builder.setup_topology( os.path.abspath('{0}scenario_advanced_inter_vmcon.yaml'.format( SCPATH))) <NEW_LINE> <DEDENT> @test.attr(type='smoke') <NEW_LINE> @test.services('compute', 'network') <NEW_LINE> def test_network_advanced_inter_vmssh(self): <NEW_LINE> <INDENT> ap_details = self.servers_and_keys[-1] <NEW_LINE> hops = [(ap_details['FIP'].floating_ip_address, ap_details['keypair']['private_key'])] <NEW_LINE> vm1_server = self.servers_and_keys[0]['server'] <NEW_LINE> vm2_server = self.servers_and_keys[1]['server'] <NEW_LINE> vm1_pk = self.servers_and_keys[0]['keypair']['private_key'] <NEW_LINE> vm2_pk = self.servers_and_keys[0]['keypair']['private_key'] <NEW_LINE> vm1 = (vm1_server['addresses'].values()[0][0]['addr'], vm1_pk) <NEW_LINE> vm2 = (vm2_server['addresses'].values()[0][0]['addr'], vm2_pk) <NEW_LINE> nhops = hops + [vm1] <NEW_LINE> LOG.info("testing ssh between {0} and {1}".format( vm1[0], vm2[0])) <NEW_LINE> self._ssh_through_gateway(nhops, vm2) <NEW_LINE> LOG.info("testing ping between {0} and {1}".format( vm1[0], vm2[0])) <NEW_LINE> self._ping_through_gateway(nhops, vm2) <NEW_LINE> LOG.info("test finished, tearing down now ....") | Scenario:
VMs with "default" security groups can
on different networks connected by a common
router should be able to talk to each other
Pre-requisites:
1 tenant
2 network
1 router
2 VMs
Steps:
1. create two networks with subnets
2. create a router
3. connect a router with both subnets
4. launch one VM for each network
5. verify that VMs can ping and ssh each other
Expected results:
Ping should work.
SSH should work. | 62598fa84428ac0f6e658455 |
class Commands(object): <NEW_LINE> <INDENT> def handle(self, cmd, data, client): <NEW_LINE> <INDENT> getattr(self, 'cmd_{0}'.format(cmd), self.unknown_cmd)(data, client) <NEW_LINE> <DEDENT> def unknown_cmd(self, data, client): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def cmd_about(self, data, dAmn): <NEW_LINE> <INDENT> dAmn.say(data('ns'), data('user')+': Basic dAmn Viper bot by photofroggy.') <NEW_LINE> <DEDENT> def cmd_quit(self, data, dAmn): <NEW_LINE> <INDENT> if data('user').lower() != dAmn._admin: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> dAmn.say(data('ns'), data('user')+': Closing down!') <NEW_LINE> dAmn.flag.quitting = True <NEW_LINE> dAmn.disconnect() <NEW_LINE> <DEDENT> def cmd_refresh(self, data, dAmn): <NEW_LINE> <INDENT> if data('user').lower() != dAmn._admin: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> dAmn.say(data('ns'), data('user')+': Refreshing connection!') <NEW_LINE> dAmn.flag.disconnecting = True <NEW_LINE> dAmn.disconnect() | Just a simple object to hold any command callbacks. | 62598fa88c0ade5d55dc362a |
class StockLSTM(object): <NEW_LINE> <INDENT> def __init__(self, is_training, config): <NEW_LINE> <INDENT> self.batch_size = batch_size = config.batch_size <NEW_LINE> self.num_steps = num_steps = config.num_steps <NEW_LINE> size = config.hidden_size <NEW_LINE> self._input_data = tf.placeholder(tf.float32, [batch_size, num_steps]) <NEW_LINE> self._targets = tf.placeholder(tf.float32, [batch_size, num_steps]) <NEW_LINE> lstm_cell = tf.nn.rnn_cell.BasicLSTMCell(size, forget_bias=0.0) <NEW_LINE> if is_training and config.keep_prob < 1: <NEW_LINE> <INDENT> lstm_cell = tf.nn.rnn_cell.DropoutWrapper(lstm_cell, output_keep_prob=config.keep_prob) <NEW_LINE> <DEDENT> cell = tf.nn.rnn_cell.MultiRNNCell([lstm_cell] * config.num_layers) <NEW_LINE> self._initial_state = cell.zero_state(batch_size, tf.float32) <NEW_LINE> iw = tf.get_variable("input_w", [1, size]) <NEW_LINE> ib = tf.get_variable("input_b", [size]) <NEW_LINE> inputs = [tf.nn.xw_plus_b(i_, iw, ib) for i_ in tf.split(1, num_steps, self._input_data)] <NEW_LINE> if is_training and config.keep_prob < 1: <NEW_LINE> <INDENT> inputs = [tf.nn.dropout(input_, config.keep_prob) for input_ in inputs] <NEW_LINE> <DEDENT> outputs, states = tf.nn. rnn.rnn(cell, inputs, initial_state=self._initial_state) <NEW_LINE> rnn_output = tf.reshape(tf.concat(1, outputs), [-1, size]) <NEW_LINE> self._output = output = tf.nn.xw_plus_b(rnn_output, tf.get_variable("out_w", [size, 1]), tf.get_variable("out_b", [1])) <NEW_LINE> self._cost = cost = tf.reduce_mean(tf.square(output - tf.reshape(self._targets, [-1]))) <NEW_LINE> self._final_state = states[-1] <NEW_LINE> if not is_training: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._lr = tf.Variable(0.0, trainable=False) <NEW_LINE> tvars = tf.trainable_variables() <NEW_LINE> grads, _ = tf.clip_by_global_norm(tf.gradients(cost, tvars), config.max_grad_norm) <NEW_LINE> optimizer = tf.train.AdamOptimizer(self.lr) <NEW_LINE> self._train_op = optimizer.apply_gradients(zip(grads, tvars)) <NEW_LINE> <DEDENT> def assign_lr(self, session, lr_value): <NEW_LINE> <INDENT> session.run(tf.assign(self.lr, lr_value)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_data(self): <NEW_LINE> <INDENT> return self._input_data <NEW_LINE> <DEDENT> @property <NEW_LINE> def targets(self): <NEW_LINE> <INDENT> return self._targets <NEW_LINE> <DEDENT> @property <NEW_LINE> def initial_state(self): <NEW_LINE> <INDENT> return self._initial_state <NEW_LINE> <DEDENT> @property <NEW_LINE> def cost(self): <NEW_LINE> <INDENT> return self._cost <NEW_LINE> <DEDENT> @property <NEW_LINE> def output(self): <NEW_LINE> <INDENT> return self._output <NEW_LINE> <DEDENT> @property <NEW_LINE> def final_state(self): <NEW_LINE> <INDENT> return self._final_state <NEW_LINE> <DEDENT> @property <NEW_LINE> def lr(self): <NEW_LINE> <INDENT> return self._lr <NEW_LINE> <DEDENT> @property <NEW_LINE> def train_op(self): <NEW_LINE> <INDENT> return self._train_op | This model predicts a 1D sequence of real numbers (here representing daily stock adjusted
returns normalized by running fixed-length standard deviation) using an LSTM.
It is regularized using the method in [Zaremba et al 2015]
http://arxiv.org/pdf/1409.2329v5.pdf | 62598fa8097d151d1a2c0f5c |
class VersionTests(TestCase): <NEW_LINE> <INDENT> def test_OPENSSL_VERSION_NUMBER(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(OPENSSL_VERSION_NUMBER, int)) <NEW_LINE> <DEDENT> def test_SSLeay_version(self): <NEW_LINE> <INDENT> versions = {} <NEW_LINE> for t in [SSLEAY_VERSION, SSLEAY_CFLAGS, SSLEAY_BUILT_ON, SSLEAY_PLATFORM, SSLEAY_DIR]: <NEW_LINE> <INDENT> version = SSLeay_version(t) <NEW_LINE> versions[version] = t <NEW_LINE> self.assertTrue(isinstance(version, bytes)) <NEW_LINE> <DEDENT> self.assertEqual(len(versions), 5) | Tests for version information exposed by
L{OpenSSL.SSL.SSLeay_version} and
L{OpenSSL.SSL.OPENSSL_VERSION_NUMBER}. | 62598fa83617ad0b5ee06087 |
class Region(object): <NEW_LINE> <INDENT> def __init__(self, beg, end, weight, coupled=True, label=0): <NEW_LINE> <INDENT> if math.isinf(beg): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: beg not finite") <NEW_LINE> <DEDENT> if math.isinf(end): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: end not finite") <NEW_LINE> <DEDENT> if math.isinf(weight): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: weight not finite") <NEW_LINE> <DEDENT> if math.isnan(beg): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: beg not a number") <NEW_LINE> <DEDENT> if math.isnan(end): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: end not a number") <NEW_LINE> <DEDENT> if math.isnan(weight): <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: weight not a number") <NEW_LINE> <DEDENT> if weight < 0.0: <NEW_LINE> <INDENT> raise ValueError("fwdpy11.Region: weight < 0.0") <NEW_LINE> <DEDENT> self.b = float(beg) <NEW_LINE> self.e = float(end) <NEW_LINE> self.w = float(weight) <NEW_LINE> self.c = coupled <NEW_LINE> self.l = label <NEW_LINE> if self.c is True: <NEW_LINE> <INDENT> self.w = (self.e - self.b) * self.w <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> x = 'regions.Region(beg=%s,end=%s,' <NEW_LINE> x += 'weight=%s,coupled=%s,label=%s)' <NEW_LINE> return x % (self.b, self.e, self.w, self.c, self.l) | Representation of a "region" in a simulation.
Attributes:
b: the beginning of the region
e: the end of the region
w: the "weight" assigned to the region
l: A label assigned to the region.
Labels must be integers, and can be used to
'tag' mutations arising in different regions.
See :func:`evolve_regions` for how this class may be used to
parameterize a simulation.
This class is extended by:
* :class:`fwdpy11.Sregion` | 62598fa87cff6e4e811b595d |
@method_decorator(login_required(login_url=reverse_lazy('login')), name='dispatch') <NEW_LINE> class RamblerBaseCreateView(RamblerBaseFormView): <NEW_LINE> <INDENT> pass | Used for creating objects of a specific model, requires the inheriting view
to have as attributes:
- url_route
- model: models.Model
- template_form
requires user to be logged in to do creation. | 62598fa84e4d562566372358 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.