code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class Issuer(NameID): <NEW_LINE> <INDENT> format = base.Attribute(types.String) | Represents the issuer of a SAML assertion or protocol
message [saml-core § 2.2.5]. | 62598faefff4ab517ebcd7e7 |
class Board(models.Model): <NEW_LINE> <INDENT> manufacturer = models.CharField('制造商', max_length=128) <NEW_LINE> model = models.CharField('样式', max_length=64) <NEW_LINE> sn = models.CharField('序列号',max_length=64) <NEW_LINE> server_obj = models.ForeignKey('Server', related_name='board',on_delete=models.CASCADE) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = "主板表" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.manufacturer | 主板表 | 62598faecc40096d6161a1db |
class SnippetInstance(EditableTextObject): <NEW_LINE> <INDENT> def __init__(self, snippet, parent, initial_text, start, end, visual_content, last_re, globals): <NEW_LINE> <INDENT> if start is None: <NEW_LINE> <INDENT> start = Position(0, 0) <NEW_LINE> <DEDENT> if end is None: <NEW_LINE> <INDENT> end = Position(0, 0) <NEW_LINE> <DEDENT> self.snippet = snippet <NEW_LINE> self._cts = 0 <NEW_LINE> self.locals = {'match': last_re} <NEW_LINE> self.globals = globals <NEW_LINE> self.visual_content = visual_content <NEW_LINE> EditableTextObject.__init__(self, parent, start, end, initial_text) <NEW_LINE> <DEDENT> def replace_initial_text(self): <NEW_LINE> <INDENT> def _place_initial_text(obj): <NEW_LINE> <INDENT> obj.overwrite() <NEW_LINE> if isinstance(obj, EditableTextObject): <NEW_LINE> <INDENT> for child in obj._children: <NEW_LINE> <INDENT> _place_initial_text(child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> _place_initial_text(self) <NEW_LINE> <DEDENT> def replay_user_edits(self, cmds): <NEW_LINE> <INDENT> for cmd in cmds: <NEW_LINE> <INDENT> self._do_edit(cmd) <NEW_LINE> <DEDENT> <DEDENT> def update_textobjects(self): <NEW_LINE> <INDENT> vc = _VimCursor(self) <NEW_LINE> done = set() <NEW_LINE> not_done = set() <NEW_LINE> def _find_recursive(obj): <NEW_LINE> <INDENT> if isinstance(obj, EditableTextObject): <NEW_LINE> <INDENT> for child in obj._children: <NEW_LINE> <INDENT> _find_recursive(child) <NEW_LINE> <DEDENT> <DEDENT> not_done.add(obj) <NEW_LINE> <DEDENT> _find_recursive(self) <NEW_LINE> counter = 10 <NEW_LINE> while (done != not_done) and counter: <NEW_LINE> <INDENT> for obj in sorted(not_done - done): <NEW_LINE> <INDENT> if obj._update(done): <NEW_LINE> <INDENT> done.add(obj) <NEW_LINE> <DEDENT> <DEDENT> counter -= 1 <NEW_LINE> <DEDENT> if not counter: <NEW_LINE> <INDENT> raise RuntimeError( 'The snippets content did not converge: Check for Cyclic ' 'dependencies or random strings in your snippet. You can use ' "'if not snip.c' to make sure to only expand random output " 'once.') <NEW_LINE> <DEDENT> vc.to_vim() <NEW_LINE> self._del_child(vc) <NEW_LINE> <DEDENT> def select_next_tab(self, backwards=False): <NEW_LINE> <INDENT> if self._cts is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if backwards: <NEW_LINE> <INDENT> cts_bf = self._cts <NEW_LINE> res = self._get_prev_tab(self._cts) <NEW_LINE> if res is None: <NEW_LINE> <INDENT> self._cts = cts_bf <NEW_LINE> return self._tabstops.get(self._cts, None) <NEW_LINE> <DEDENT> self._cts, ts = res <NEW_LINE> return ts <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = self._get_next_tab(self._cts) <NEW_LINE> if res is None: <NEW_LINE> <INDENT> self._cts = None <NEW_LINE> return self._tabstops.get(0, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._cts, ts = res <NEW_LINE> return ts <NEW_LINE> <DEDENT> <DEDENT> return self._tabstops[self._cts] <NEW_LINE> <DEDENT> def _get_tabstop(self, requester, no): <NEW_LINE> <INDENT> cached_parent = self._parent <NEW_LINE> self._parent = None <NEW_LINE> rv = EditableTextObject._get_tabstop(self, requester, no) <NEW_LINE> self._parent = cached_parent <NEW_LINE> return rv | See module docstring. | 62598faed486a94d0ba2bfd1 |
class TextFeature(): <NEW_LINE> <INDENT> def __init__(self, input_ids, attention_mask, token_type_ids, label, input_len): <NEW_LINE> <INDENT> self.input_ids = input_ids <NEW_LINE> self.attention_mask = attention_mask <NEW_LINE> self.token_type_ids = token_type_ids <NEW_LINE> self.input_len = input_len <NEW_LINE> self.label = label <NEW_LINE> <DEDENT> def tensorfy(self, unsqueeze = True): <NEW_LINE> <INDENT> if isinstance(self.input_ids, list): <NEW_LINE> <INDENT> self.input_ids = torch.LongTensor(self.input_ids) <NEW_LINE> self.attention_mask = torch.LongTensor(self.attention_mask) <NEW_LINE> self.token_type_ids = torch.LongTensor(self.token_type_ids) <NEW_LINE> self.label = torch.LongTensor([self.label]) <NEW_LINE> if unsqueeze: <NEW_LINE> <INDENT> self.input_ids = self.input_ids.unsqueeze(0) <NEW_LINE> self.attention_mask = self.attention_mask.unsqueeze(0) <NEW_LINE> self.token_type_ids= self.token_type_ids.unsqueeze(0) <NEW_LINE> self.label = self.label.unsqueeze(0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.to_json_string()) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> output = copy.deepcopy(self.__dict__) <NEW_LINE> return output <NEW_LINE> <DEDENT> def to_json_string(self): <NEW_LINE> <INDENT> return json.dumps(self.to_dict(), indent=2, sort_keys=True) + "\n" | A single set of features of text.
Args:
input_ids: Indices of input sequence tokens in the vocabulary.
attention_mask: Mask to avoid performing attention on padding token indices.
Mask values selected in ``[0, 1]``:
Usually ``1`` for tokens that are NOT MASKED, ``0`` for MASKED (padded) tokens.
token_type_ids: Segment token indices to indicate first and second portions of the inputs.
label: Label corresponding to the input | 62598fae5166f23b2e2433db |
class Symbol: <NEW_LINE> <INDENT> def __init__(self, c: str, color: tuple[int, int, int] | None = (0, 0, 0), isGeneric: bool = False, ) -> None: <NEW_LINE> <INDENT> if len(c) > 3: <NEW_LINE> <INDENT> raise error(f"Symbol needs to be 3 chars or shorter, not \"{c}\" with length {len(c)}") <NEW_LINE> <DEDENT> self.c = c <NEW_LINE> self.isGeneric = isGeneric <NEW_LINE> self.__name__ = c <NEW_LINE> self.color = color <NEW_LINE> <DEDENT> def __eq__(self, o: object) -> bool: <NEW_LINE> <INDENT> return isinstance(o, Symbol) and self.c == o.c <NEW_LINE> <DEDENT> def __hash__(self) -> int: <NEW_LINE> <INDENT> return hash(self.c) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.c <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return self.c <NEW_LINE> <DEDENT> def __format__(self, format_spec: str) -> str: <NEW_LINE> <INDENT> return format(str(self), format_spec) | The symbols automata read.
Symbols are identified by the attribute c, which is 3 or fewer chars long.
When rendered in a CA color will be displayed instead of c.
Generic Symbols are used as variables in transition functions. | 62598faef9cc0f698b1c52ca |
class ConfigurableMeta(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> if bases: <NEW_LINE> <INDENT> class_params = attrs.get("_parameters", []) <NEW_LINE> class_param_groups = list(attrs.get("_param_groups", [])) <NEW_LINE> for base in bases: <NEW_LINE> <INDENT> if "_parameters" in vars(base): <NEW_LINE> <INDENT> class_params = [*class_params, *deepcopy(base._parameters)] <NEW_LINE> <DEDENT> if "_param_groups" in vars(base): <NEW_LINE> <INDENT> class_param_groups = [*deepcopy(base._param_groups), *class_param_groups] <NEW_LINE> <DEDENT> <DEDENT> attrs["_parameters"] = class_params <NEW_LINE> attrs["_param_groups"] = [group for group in class_param_groups if group["key"] is not None] <NEW_LINE> attrs["_param_groups"].append({ "key": None, "name": "Other settings", "icon": "settings", "description": "Here are some unclassified settings. Even if they don't belong to any group, they might still be important. They may be here just because the developer was too lazy to categorize them or forgot to do so. <b>If you are the developer</b> and it's the first case, <b>shame on you<b>." }) <NEW_LINE> for f_name, f in attrs.items(): <NEW_LINE> <INDENT> if callable(f) and (not f_name.startswith("__")) and (not isinstance(f, (staticmethod, classmethod))): <NEW_LINE> <INDENT> attrs[f_name] = _populate_with_settings(f, [param["key"] for param in class_params]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> new_cls = super().__new__(cls, name, bases, attrs) <NEW_LINE> new_cls._create_update_maps() <NEW_LINE> if bases: <NEW_LINE> <INDENT> def update_settings(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._update_settings(*args, **kwargs) <NEW_LINE> <DEDENT> update_settings.__doc__ = f"Updates the settings of this plot.\n\nDocs for {new_cls.__name__}:\n\n{get_configurable_docstring(new_cls)}" <NEW_LINE> new_cls.update_settings = update_settings <NEW_LINE> <DEDENT> return new_cls | Metaclass used to build the Configurable class and its childs.
This is used mainly for two reasons, and they both affect only subclasses of Configurable
not Configurable itself.:
- Make the class functions able to access settings through their arguments
(see the `_populate_with_settings` function in this same file)
- Set documentation to the `update_settings` method that is specific to the particular class
so that the user can check what each parameter does exactly. | 62598faea8370b77170f03de |
class VendorResource(CommonResource): <NEW_LINE> <INDENT> productundertests = ReverseManyIncludeURIField( 'oilserver.api.resources.ProductUnderTestResource', 'productundertests') <NEW_LINE> class Meta(CommonMeta): <NEW_LINE> <INDENT> queryset = models.Vendor.objects.all() <NEW_LINE> filtering = { 'uuid': ('exact',), 'name': ('exact',), 'productundertests': ALL_WITH_RELATIONS, } | API Resource for 'Vendor' model. | 62598faed58c6744b42dc2d8 |
class Xdmf(PetscComponent, ModuleXdmf): <NEW_LINE> <INDENT> import pyre.inventory <NEW_LINE> filename = pyre.inventory.str("filename", default="output.h5") <NEW_LINE> filename.meta['tip'] = "Name of HDF5 file." <NEW_LINE> def __init__(self, name="xdmf"): <NEW_LINE> <INDENT> PetscComponent.__init__(self, name, facility="xdmf") <NEW_LINE> self._loggingPrefix = "Xdmf " <NEW_LINE> ModuleXdmf.__init__(self) <NEW_LINE> return <NEW_LINE> <DEDENT> def _configure(self): <NEW_LINE> <INDENT> PetscComponent._configure(self) <NEW_LINE> self.filename = self.inventory.filename <NEW_LINE> return | Python class for Xdmf metadata file associated with an HDF5 file.
Properties
@li filename Name of HDF5 file.
Facilities
@li None | 62598fae7c178a314d78d49f |
class PowerNorm(Normalize): <NEW_LINE> <INDENT> def __init__(self, gamma, vmin=None, vmax=None, clip=False): <NEW_LINE> <INDENT> Normalize.__init__(self, vmin, vmax, clip) <NEW_LINE> self.gamma = gamma <NEW_LINE> <DEDENT> def __call__(self, value, clip=None): <NEW_LINE> <INDENT> if clip is None: <NEW_LINE> <INDENT> clip = self.clip <NEW_LINE> <DEDENT> result, is_scalar = self.process_value(value) <NEW_LINE> self.autoscale_None(result) <NEW_LINE> gamma = self.gamma <NEW_LINE> vmin, vmax = self.vmin, self.vmax <NEW_LINE> if vmin > vmax: <NEW_LINE> <INDENT> raise ValueError("minvalue must be less than or equal to maxvalue") <NEW_LINE> <DEDENT> elif vmin == vmax: <NEW_LINE> <INDENT> result.fill(0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res_mask = result.data < 0 <NEW_LINE> if clip: <NEW_LINE> <INDENT> mask = np.ma.getmask(result) <NEW_LINE> result = np.ma.array(np.clip(result.filled(vmax), vmin, vmax), mask=mask) <NEW_LINE> <DEDENT> resdat = result.data <NEW_LINE> resdat -= vmin <NEW_LINE> np.power(resdat, gamma, resdat) <NEW_LINE> resdat /= (vmax - vmin) ** gamma <NEW_LINE> result = np.ma.array(resdat, mask=result.mask, copy=False) <NEW_LINE> result[res_mask] = 0 <NEW_LINE> <DEDENT> if is_scalar: <NEW_LINE> <INDENT> result = result[0] <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def inverse(self, value): <NEW_LINE> <INDENT> if not self.scaled(): <NEW_LINE> <INDENT> raise ValueError("Not invertible until scaled") <NEW_LINE> <DEDENT> gamma = self.gamma <NEW_LINE> vmin, vmax = self.vmin, self.vmax <NEW_LINE> if cbook.iterable(value): <NEW_LINE> <INDENT> val = np.ma.asarray(value) <NEW_LINE> return np.ma.power(val, 1. / gamma) * (vmax - vmin) + vmin <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return pow(value, 1. / gamma) * (vmax - vmin) + vmin <NEW_LINE> <DEDENT> <DEDENT> def autoscale(self, A): <NEW_LINE> <INDENT> self.vmin = np.ma.min(A) <NEW_LINE> if self.vmin < 0: <NEW_LINE> <INDENT> self.vmin = 0 <NEW_LINE> warnings.warn("Power-law scaling on negative values is " "ill-defined, clamping to 0.") <NEW_LINE> <DEDENT> self.vmax = np.ma.max(A) <NEW_LINE> <DEDENT> def autoscale_None(self, A): <NEW_LINE> <INDENT> if self.vmin is None and np.size(A) > 0: <NEW_LINE> <INDENT> self.vmin = np.ma.min(A) <NEW_LINE> if self.vmin < 0: <NEW_LINE> <INDENT> self.vmin = 0 <NEW_LINE> warnings.warn("Power-law scaling on negative values is " "ill-defined, clamping to 0.") <NEW_LINE> <DEDENT> <DEDENT> if self.vmax is None and np.size(A) > 0: <NEW_LINE> <INDENT> self.vmax = np.ma.max(A) | Normalize a given value to the ``[0, 1]`` interval with a power-law
scaling. This will clip any negative data points to 0. | 62598faee5267d203ee6b90c |
class FBObjectPoseOptionsFlag (Enumeration): <NEW_LINE> <INDENT> kFBObjectPoseOptionsNoFlag=property(doc=" ") <NEW_LINE> kFBObjectPoseOptionsTranslationX=property(doc=" ") <NEW_LINE> kFBObjectPoseOptionsTranslationY=property(doc=" ") <NEW_LINE> kFBObjectPoseOptionsTranslationZ=property(doc=" ") <NEW_LINE> kFBObjectPoseOptionsRotation=property(doc=" ") <NEW_LINE> kFBObjectPoseOptionsScaling=property(doc=" ") <NEW_LINE> pass | ObjectPoseOptions flags.
| 62598faeff9c53063f51a650 |
class Pairing(GitEntry): <NEW_LINE> <INDENT> required_fields = [u'Player 1', u'Player 2', u'Group'] <NEW_LINE> def __init__(self, repo, params=None, index=None, *args, **kwargs): <NEW_LINE> <INDENT> self.repo = repo <NEW_LINE> if params: <NEW_LINE> <INDENT> for field in Pairing.required_fields: <NEW_LINE> <INDENT> if field not in params: <NEW_LINE> <INDENT> raise PairingModError( 'Required field {0} missing.'.format(field)) <NEW_LINE> <DEDENT> <DEDENT> self.pairing_index = _generate_filename(params) <NEW_LINE> <DEDENT> elif index is not None: <NEW_LINE> <INDENT> self.pairing_index = index <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('One of the keyword arguments params ' 'or index is required.') <NEW_LINE> <DEDENT> super(Pairing, self).__init__(repo, params=params, git_table=PairingList.path, filename=self.pairing_index, *args, **kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if key == u'Player 1' or key == u'Player 2': <NEW_LINE> <INDENT> index = super(Pairing, self).__getitem__(key) <NEW_LINE> return Player(self.repo, index=index) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(Pairing, self).__getitem__(key) <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> super(Pairing, self).__delitem__(key) <NEW_LINE> <DEDENT> def serialize(self, data): <NEW_LINE> <INDENT> data = data.copy() <NEW_LINE> data[u'Player 1'] = data[u'Player 1'].getindex() <NEW_LINE> data[u'Player 2'] = data[u'Player 2'].getindex() <NEW_LINE> return data <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.pairing_index == other.pairing_index <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.pairing_index) | Class to manage a pairing between two teams. | 62598fae2c8b7c6e89bd37c8 |
class InPlay(object): <NEW_LINE> <INDENT> def __init__(self, tetriminos, width, height): <NEW_LINE> <INDENT> self.tetrimino = tetriminos[randrange(len(tetriminos))] <NEW_LINE> self.rotation = randrange(1) <NEW_LINE> self.x = width/2 <NEW_LINE> self.y = height <NEW_LINE> <DEDENT> def rotateClock(self): <NEW_LINE> <INDENT> self.rotation = (self.rotation + 1) & 3 <NEW_LINE> <DEDENT> def rotateAnti(self): <NEW_LINE> <INDENT> self.rotation = (self.rotation - 1) & 3 <NEW_LINE> <DEDENT> def nudge(self): <NEW_LINE> <INDENT> if self.x > self.targetX: <NEW_LINE> <INDENT> self.x -= 1 <NEW_LINE> <DEDENT> elif self.x < self.targetX: <NEW_LINE> <INDENT> self.x += 1 <NEW_LINE> <DEDENT> <DEDENT> def drop(self, direction): <NEW_LINE> <INDENT> self.y += direction <NEW_LINE> return self.y <NEW_LINE> <DEDENT> def modePlace(self, well, x, y, color): <NEW_LINE> <INDENT> well[x][y] = color <NEW_LINE> return True <NEW_LINE> <DEDENT> def modeIsPlacementValid(self, well, x, y, color): <NEW_LINE> <INDENT> if y < 0 or well[x][y] is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def modeErase(self, well, x, y, color): <NEW_LINE> <INDENT> well[x][y] = None <NEW_LINE> return True <NEW_LINE> <DEDENT> def _getHydrated(self): <NEW_LINE> <INDENT> return self.tetrimino.hydrate(self.rotation) <NEW_LINE> <DEDENT> def operation(self, action, well): <NEW_LINE> <INDENT> hydrated = self._getHydrated() <NEW_LINE> for y in range(len(hydrated[0])): <NEW_LINE> <INDENT> for x in range(len(hydrated)): <NEW_LINE> <INDENT> color = hydrated[x][y] <NEW_LINE> if color is not None: <NEW_LINE> <INDENT> if action(well, self.x+x, self.y-y, color) is False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def _tryX(self, x, piece, wall): <NEW_LINE> <INDENT> for w in range(len(piece)): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return randrange(8), True <NEW_LINE> <DEDENT> def xForBestFit(self, well): <NEW_LINE> <INDENT> hydrated = self._getHydrated() <NEW_LINE> pieceProfile = self.tetrimino.profile(hydrated) <NEW_LINE> wellProfile = well.profile() <NEW_LINE> candidate = { True: {"y": well.height, "x": None}, False: {"y": well.height, "x": None}, } <NEW_LINE> for x in range(well.width-len(pieceProfile)): <NEW_LINE> <INDENT> y, perfect = self._tryX(x, pieceProfile, wellProfile) <NEW_LINE> if candidate[perfect]["y"] > y: <NEW_LINE> <INDENT> candidate[perfect] = {"y": y, "x": x} <NEW_LINE> <DEDENT> <DEDENT> if candidate[True]["x"] is not None: <NEW_LINE> <INDENT> return candidate[True]["x"] <NEW_LINE> <DEDENT> return candidate[False]["x"] | This is the piece in play, and it takes direction from the game | 62598faecc0a2c111447b014 |
@actions.register('snapshot') <NEW_LINE> class Snapshot(BaseAction): <NEW_LINE> <INDENT> schema = type_schema('snapshot') <NEW_LINE> permissions = ('rds:CreateDBSnapshot',) <NEW_LINE> def process(self, dbs): <NEW_LINE> <INDENT> with self.executor_factory(max_workers=3) as w: <NEW_LINE> <INDENT> futures = [] <NEW_LINE> for db in dbs: <NEW_LINE> <INDENT> futures.append(w.submit( self.process_rds_snapshot, db)) <NEW_LINE> <DEDENT> for f in as_completed(futures): <NEW_LINE> <INDENT> if f.exception(): <NEW_LINE> <INDENT> self.log.error( "Exception creating rds snapshot \n %s", f.exception()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dbs <NEW_LINE> <DEDENT> def process_rds_snapshot(self, resource): <NEW_LINE> <INDENT> if not _db_instance_eligible_for_backup(resource): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> c = local_session(self.manager.session_factory).client('rds') <NEW_LINE> c.create_db_snapshot( DBSnapshotIdentifier=snapshot_identifier( self.data.get('snapshot-prefix', 'Backup'), resource['DBInstanceIdentifier']), DBInstanceIdentifier=resource['DBInstanceIdentifier']) | Creates a manual snapshot of a RDS instance
:example:
.. code-block: yaml
policies:
- name: rds-snapshot
resource: rds
actions:
- snapshot | 62598fae4e4d562566372429 |
class UnBindingPolicyObjectRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Module = None <NEW_LINE> self.GroupId = None <NEW_LINE> self.UniqueId = None <NEW_LINE> self.InstanceGroupId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Module = params.get("Module") <NEW_LINE> self.GroupId = params.get("GroupId") <NEW_LINE> self.UniqueId = params.get("UniqueId") <NEW_LINE> self.InstanceGroupId = params.get("InstanceGroupId") | UnBindingPolicyObject request structure.
| 62598fae167d2b6e312b6f75 |
class PredictForm2(Form): <NEW_LINE> <INDENT> review = fields.TextAreaField('Review:', validators=[Required()]) <NEW_LINE> submit = fields.SubmitField('Submit') | Fields for Predict | 62598faef548e778e596b5a7 |
class itkMaskNegatedImageFilterIUC2IUS2IUC2(itkMaskNegatedImageFilterIUC2IUS2IUC2_Superclass): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> MaskEqualityComparableCheck = _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_MaskEqualityComparableCheck <NEW_LINE> InputConvertibleToOutputCheck = _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_InputConvertibleToOutputCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetOutsideValue(self, *args): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_SetOutsideValue(self, *args) <NEW_LINE> <DEDENT> def GetOutsideValue(self): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_GetOutsideValue(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkMaskNegatedImageFilterPython.delete_itkMaskNegatedImageFilterIUC2IUS2IUC2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkMaskNegatedImageFilterPython.itkMaskNegatedImageFilterIUC2IUS2IUC2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkMaskNegatedImageFilterIUC2IUS2IUC2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkMaskNegatedImageFilterIUC2IUS2IUC2 class | 62598fae7b25080760ed74b3 |
class WikiPageIterator(WikiPageIteratorAll): <NEW_LINE> <INDENT> def next(self): <NEW_LINE> <INDENT> if self.current > self.high: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> good_doc_status = False <NEW_LINE> while not good_doc_status: <NEW_LINE> <INDENT> self.current += 1 <NEW_LINE> if self.current==self.high: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> doc = self.cursor.next() <NEW_LINE> if 'title' not in doc.keys(): <NEW_LINE> <INDENT> doc['title'] = doc['dict']['title'] <NEW_LINE> <DEDENT> if 'revision' not in doc.keys(): <NEW_LINE> <INDENT> doc['revision'] = doc['dict'].pop('revision') <NEW_LINE> <DEDENT> if not badpage(doc): <NEW_LINE> <INDENT> good_doc = doc <NEW_LINE> good_doc_status = True <NEW_LINE> <DEDENT> <DEDENT> return good_doc | Performs same function as 'WikiPageIteratorAll', but
filters out pages deemed to be not of interest. | 62598fae4428ac0f6e658528 |
class Database(DependencyDescriptor): <NEW_LINE> <INDENT> scope = 'operation' <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return (self.config,) <NEW_LINE> <DEDENT> def instanciate(self, op): <NEW_LINE> <INDENT> name = op.env.get_config(self.config) <NEW_LINE> if name is None: <NEW_LINE> <INDENT> raise RuntimeError('Unknown config "%s"' % self.config) <NEW_LINE> <DEDENT> backend = op.env.get_config('databases', name, 'backend') <NEW_LINE> if not backend: <NEW_LINE> <INDENT> raise RuntimeError('No configuration for db "%s"' % name) <NEW_LINE> <DEDENT> return backends[backend](op, name) | A database dependency descriptor gives access to a specific database
backend instanciated for the current operation. | 62598faed7e4931a7ef3c099 |
class MetricModel: <NEW_LINE> <INDENT> def __init__(self, modelversion: 'ModelVersion', threshold: float, comparator: ThresholdCmpOp) -> 'MetricModel': <NEW_LINE> <INDENT> self.modelversion = modelversion <NEW_LINE> self.threshold = threshold <NEW_LINE> self.comparator = comparator | Model having extra metric fields | 62598faed486a94d0ba2bfd2 |
class TestSamConverter(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._backend = backend.FileSystemBackend("tests/data") <NEW_LINE> self._client = client.LocalClient(self._backend) <NEW_LINE> <DEDENT> def verifySamRecordsEqual(self, sourceReads, convertedReads): <NEW_LINE> <INDENT> self.assertEqual(len(sourceReads), len(convertedReads)) <NEW_LINE> for source, converted in zip(sourceReads, convertedReads): <NEW_LINE> <INDENT> self.assertEqual(source.query_sequence, converted.query_sequence) <NEW_LINE> <DEDENT> <DEDENT> def verifyFullConversion(self, readGroupSet, readGroup, reference): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile() as fileHandle: <NEW_LINE> <INDENT> converter = converters.SamConverter( self._client, readGroup.getId(), reference.getId(), outputFileName=fileHandle.name) <NEW_LINE> converter.convert() <NEW_LINE> samFile = pysam.AlignmentFile(fileHandle.name, "r") <NEW_LINE> try: <NEW_LINE> <INDENT> convertedReads = list(samFile.fetch()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> samFile.close() <NEW_LINE> <DEDENT> samFile = pysam.AlignmentFile(readGroupSet.getSamFilePath(), "rb") <NEW_LINE> try: <NEW_LINE> <INDENT> sourceReads = [] <NEW_LINE> referenceName = reference.getName().encode() <NEW_LINE> readGroupName = readGroup.getLocalId().encode() <NEW_LINE> for readAlignment in samFile.fetch(referenceName): <NEW_LINE> <INDENT> tags = dict(readAlignment.tags) <NEW_LINE> if 'RG' in tags and tags['RG'] == readGroupName: <NEW_LINE> <INDENT> sourceReads.append(readAlignment) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> samFile.close() <NEW_LINE> <DEDENT> self.verifySamRecordsEqual(sourceReads, convertedReads) <NEW_LINE> <DEDENT> <DEDENT> def testSamConversion(self): <NEW_LINE> <INDENT> dataset = self._backend.getDatasetByIndex(0) <NEW_LINE> readGroupSet = dataset.getReadGroupSetByIndex(0) <NEW_LINE> referenceSet = readGroupSet.getReferenceSet() <NEW_LINE> for reference in referenceSet.getReferences(): <NEW_LINE> <INDENT> for readGroup in readGroupSet.getReadGroups(): <NEW_LINE> <INDENT> self.verifyFullConversion(readGroupSet, readGroup, reference) | Tests for the GA4GH reads API -> SAM conversion. | 62598fae57b8e32f5250811d |
class AWSAccountAlias(models.Model): <NEW_LINE> <INDENT> account_id = models.CharField(max_length=50, null=False, unique=True) <NEW_LINE> account_alias = models.CharField(max_length=63, null=True) | The alias table for AWS accounts. | 62598fae498bea3a75a57b23 |
class TestUserInfoToUser(TestCase): <NEW_LINE> <INDENT> def test_empty(self): <NEW_LINE> <INDENT> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> userInfo_to_user(None) <NEW_LINE> <DEDENT> <DEDENT> def test_is_user_info(self): <NEW_LINE> <INDENT> userInfo = MagicMock() <NEW_LINE> userInfo.user = 'This is not a user' <NEW_LINE> r = userInfo_to_user(userInfo) <NEW_LINE> self.assertEqual(userInfo.user, r) <NEW_LINE> <DEDENT> def test_is_user(self): <NEW_LINE> <INDENT> user = MagicMock() <NEW_LINE> del(user.user) <NEW_LINE> r = userInfo_to_user(user) <NEW_LINE> self.assertEqual(user, r) | Test the ``userInfo_to_user`` function | 62598fae851cf427c66b82bf |
class Disk(_messages.Message): <NEW_LINE> <INDENT> class TypeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> TYPE_UNSPECIFIED = 0 <NEW_LINE> PERSISTENT_HDD = 1 <NEW_LINE> PERSISTENT_SSD = 2 <NEW_LINE> LOCAL_SSD = 3 <NEW_LINE> <DEDENT> autoDelete = _messages.BooleanField(1) <NEW_LINE> mountPoint = _messages.StringField(2) <NEW_LINE> name = _messages.StringField(3) <NEW_LINE> readOnly = _messages.BooleanField(4) <NEW_LINE> sizeGb = _messages.IntegerField(5, variant=_messages.Variant.INT32) <NEW_LINE> source = _messages.StringField(6) <NEW_LINE> type = _messages.EnumField('TypeValueValuesEnum', 7) | A Google Compute Engine disk resource specification.
Enums:
TypeValueValuesEnum: Required. The type of the disk to create.
Fields:
autoDelete: Specifies whether or not to delete the disk when the pipeline
completes. This field is applicable only for newly created disks. See ht
tps://cloud.google.com/compute/docs/reference/latest/instances#resource
for more details. By default, `autoDelete` is `false`. `autoDelete` will
be enabled if set to `true` at create time or run time.
mountPoint: Required at create time and cannot be overridden at run time.
Specifies the path in the docker container where files on this disk
should be located. For example, if `mountPoint` is `/mnt/disk`, and the
parameter has `localPath` `inputs/file.txt`, the docker container can
access the data at `/mnt/disk/inputs/file.txt`.
name: Required. The name of the disk that can be used in the pipeline
parameters. Must be 1 - 63 characters. The name "boot" is reserved for
system use.
readOnly: Specifies how a sourced-base persistent disk will be mounted.
See https://cloud.google.com/compute/docs/disks/persistent-
disks#use_multi_instances for more details. Can only be set at create
time.
sizeGb: The size of the disk. Defaults to 500 (GB). This field is not
applicable for local SSD.
source: The full or partial URL of the persistent disk to attach. See
https://cloud.google.com/compute/docs/reference/latest/instances#resourc
e and https://cloud.google.com/compute/docs/disks/persistent-
disks#snapshots for more details.
type: Required. The type of the disk to create. | 62598fae7d847024c075c3c7 |
class dataFrameCursor(): <NEW_LINE> <INDENT> def __init__(self, dataframes, table_names ): <NEW_LINE> <INDENT> self.tables = {} <NEW_LINE> for df, name in zip(dataframes,table_names): <NEW_LINE> <INDENT> self.tables[name] = df <NEW_LINE> <DEDENT> <DEDENT> def dictionary_iterator(self, table_name): <NEW_LINE> <INDENT> for index, row in self.tables[table_name].iterrows(): <NEW_LINE> <INDENT> yield row <NEW_LINE> <DEDENT> <DEDENT> def execute(self, sql, table): <NEW_LINE> <INDENT> assert(sql=='''select * from ?''') <NEW_LINE> self.next_table = table <NEW_LINE> <DEDENT> def fetchall(self): <NEW_LINE> <INDENT> return self.dictionary_iterator(self.next_table) | wrapper for dataframe to use same API as DictCursor for databases | 62598fae66656f66f7d5a3f4 |
class DBSessionTask(celery.Task): <NEW_LINE> <INDENT> def after_return(self, status, retval, task_id, args, kwargs, einfo): <NEW_LINE> <INDENT> db_session.remove() | A Celery Task that ensures that the connection the the
database is closed when the task is done
The db_session is scoped, therefore thread-local | 62598faebe8e80087fbbf068 |
class PluginLoader(object): <NEW_LINE> <INDENT> def __init__(self, class_name, package, config, subdir, aliases={}): <NEW_LINE> <INDENT> self.class_name = class_name <NEW_LINE> self.package = package <NEW_LINE> self.config = config <NEW_LINE> self.subdir = subdir <NEW_LINE> self.aliases = aliases <NEW_LINE> self._module_cache = {} <NEW_LINE> self._extra_dirs = [] <NEW_LINE> <DEDENT> def _get_package_path(self): <NEW_LINE> <INDENT> if not self.package: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not hasattr(self, 'package_path'): <NEW_LINE> <INDENT> m = __import__(self.package) <NEW_LINE> parts = self.package.split('.')[1:] <NEW_LINE> self.package_path = os.path.join(os.path.dirname(m.__file__), *parts) <NEW_LINE> <DEDENT> return [self.package_path] <NEW_LINE> <DEDENT> def _get_paths(self): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> ret += ['%s/library/' % os.path.dirname(os.path.dirname(__file__))] <NEW_LINE> ret += self._extra_dirs <NEW_LINE> for basedir in _basedirs: <NEW_LINE> <INDENT> fullpath = os.path.join(basedir, self.subdir) <NEW_LINE> if fullpath not in ret: <NEW_LINE> <INDENT> ret.append(fullpath) <NEW_LINE> <DEDENT> <DEDENT> ret += self.config.split(os.pathsep) <NEW_LINE> ret += self._get_package_path() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def add_directory(self, directory): <NEW_LINE> <INDENT> if directory is not None: <NEW_LINE> <INDENT> self._extra_dirs.append(directory) <NEW_LINE> <DEDENT> <DEDENT> def print_paths(self): <NEW_LINE> <INDENT> ret = [] <NEW_LINE> for i in self._get_paths(): <NEW_LINE> <INDENT> if i not in ret: <NEW_LINE> <INDENT> ret.append(i) <NEW_LINE> <DEDENT> <DEDENT> return os.pathsep.join(ret) <NEW_LINE> <DEDENT> def find_plugin(self, name): <NEW_LINE> <INDENT> suffix = ".py" <NEW_LINE> if not self.class_name: <NEW_LINE> <INDENT> suffix = "" <NEW_LINE> <DEDENT> for i in self._get_paths(): <NEW_LINE> <INDENT> path = os.path.join(i, "%s%s" % (name, suffix)) <NEW_LINE> if os.path.exists(path): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def has_plugin(self, name): <NEW_LINE> <INDENT> return self.find_plugin(name) is not None <NEW_LINE> <DEDENT> __contains__ = has_plugin <NEW_LINE> def get(self, name, *args, **kwargs): <NEW_LINE> <INDENT> if name in self.aliases: <NEW_LINE> <INDENT> name = self.aliases[name] <NEW_LINE> <DEDENT> path = self.find_plugin(name) <NEW_LINE> if path is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if path not in self._module_cache: <NEW_LINE> <INDENT> self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path) <NEW_LINE> <DEDENT> return getattr(self._module_cache[path], self.class_name)(*args, **kwargs) <NEW_LINE> <DEDENT> def all(self, *args, **kwargs): <NEW_LINE> <INDENT> for i in self._get_paths(): <NEW_LINE> <INDENT> for path in glob.glob(os.path.join(i, "*.py")): <NEW_LINE> <INDENT> name, ext = os.path.splitext(os.path.basename(path)) <NEW_LINE> if name.startswith("_"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if path not in self._module_cache: <NEW_LINE> <INDENT> self._module_cache[path] = imp.load_source('.'.join([self.package, name]), path) <NEW_LINE> <DEDENT> yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs) | PluginLoader loads plugins from the best source
It searches for plugins by iterating through the combined list of
play basedirs, configured paths, and the installed package directory.
The first match is used. | 62598faef9cc0f698b1c52cb |
class NumDiff(Func): <NEW_LINE> <INDENT> __h = 0.001 <NEW_LINE> def __init__(self, f=None, h=0.001): <NEW_LINE> <INDENT> if f is not None: <NEW_LINE> <INDENT> self.f = f <NEW_LINE> <DEDENT> self.__h = h <NEW_LINE> <DEDENT> def d(self, x, dx): <NEW_LINE> <INDENT> x = asarray(x) <NEW_LINE> dx = asarray(dx) <NEW_LINE> d = linalg.norm(dx) <NEW_LINE> if d > 0.0: <NEW_LINE> <INDENT> fs = lambda s: self.f(x + (s/d) * dx) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fs = lambda s: self.f(x + s * dx) <NEW_LINE> <DEDENT> fsprime, err = dfridr(fs, 0.0, h=self.__h) <NEW_LINE> return fsprime * d <NEW_LINE> <DEDENT> def fprime(self, x): <NEW_LINE> <INDENT> f = lambda x: asarray (self.f (x)) <NEW_LINE> if type (x) == type (1.0): <NEW_LINE> <INDENT> dfdx, err = dfridr (f, x, h=self.__h) <NEW_LINE> assert err <= abs(dfdx) * 1.e-12 <NEW_LINE> return dfdx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = asarray(x) <NEW_LINE> xshape = x.shape <NEW_LINE> xsize = x.size <NEW_LINE> fx = asarray(f(x)) <NEW_LINE> fshape = fx.shape <NEW_LINE> xwork = array(x) <NEW_LINE> xview = xwork.reshape(-1) <NEW_LINE> def func(y, n): <NEW_LINE> <INDENT> old = xview[n] <NEW_LINE> xview[n] = old + y <NEW_LINE> fx = f(xwork) <NEW_LINE> xview[n] = old <NEW_LINE> return fx <NEW_LINE> <DEDENT> fwork = empty(fshape + xshape) <NEW_LINE> fview = fwork.reshape(fshape + (xsize,)) <NEW_LINE> for n in range(xsize): <NEW_LINE> <INDENT> g = lambda y: func(y, n) <NEW_LINE> gprime, err = dfridr(g, 0.0, h=self.__h) <NEW_LINE> fview[..., n] = gprime <NEW_LINE> <DEDENT> return fwork | Implements |.fprime| method by numerical differentiation | 62598faebe383301e02537fd |
class StaticCompositor(object): <NEW_LINE> <INDENT> def __init__(self, dryrun, verbose): <NEW_LINE> <INDENT> self.dryrun = dryrun <NEW_LINE> self.verbose = verbose <NEW_LINE> self.source_dir = '' <NEW_LINE> self.timestamp_helper = TimestampCompositorHelper(dryrun, verbose) <NEW_LINE> self.pages = {} <NEW_LINE> <DEDENT> def composite(self, filename, template): <NEW_LINE> <INDENT> if self.dryrun: <NEW_LINE> <INDENT> print('StaticCompositor: ({fn})'.format(fn=filename)) <NEW_LINE> return <NEW_LINE> <DEDENT> if '' == self.source_dir: <NEW_LINE> <INDENT> self.source_dir = os.getcwd() <NEW_LINE> <DEDENT> self.pages[self.source_dir + '/' + filename] = template <NEW_LINE> self.timestamp_helper.composite(filename) <NEW_LINE> <DEDENT> def write(self): <NEW_LINE> <INDENT> if self.dryrun: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def static_render(dest_filename, source_filename): <NEW_LINE> <INDENT> value_map = read_page_spec(source_filename) <NEW_LINE> template = self.pages[source_filename] <NEW_LINE> page = template.render(value_map) <NEW_LINE> with open(dest_filename, 'w') as page_fp: <NEW_LINE> <INDENT> page_fp.write(page) <NEW_LINE> <DEDENT> <DEDENT> self.timestamp_helper.write(static_render) | A singleton page compositor.
A compositor to handle static pages that have no automated
relationship between each other. | 62598faeac7a0e7691f7250d |
class FadeToplevel(tk.Toplevel): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> tk.Toplevel.__init__(self, *args, **kwargs) <NEW_LINE> self.attributes("-alpha", 0.0) <NEW_LINE> <DEDENT> def fade_in(self): <NEW_LINE> <INDENT> alpha = self.attributes("-alpha") <NEW_LINE> alpha = min(alpha + .01, 1.0) <NEW_LINE> self.attributes("-alpha", alpha) <NEW_LINE> if alpha < 1.0: <NEW_LINE> <INDENT> self.after(10, self.fade_in) | A toplevel widget with the ability to fade in | 62598fae23849d37ff8510b8 |
class PathParser(): <NEW_LINE> <INDENT> def __init__(self, filepath, *args, **kwargs): <NEW_LINE> <INDENT> self._path = filepath if isinstance(filepath, str) else '' <NEW_LINE> <DEDENT> def get_path_and_filename(self): <NEW_LINE> <INDENT> fullpath = self._path <NEW_LINE> if not fullpath: <NEW_LINE> <INDENT> return ('', '') <NEW_LINE> <DEDENT> l = fullpath.rsplit('/', maxsplit=1) <NEW_LINE> filename = l[-1] <NEW_LINE> path = l[0] if len(l) == 2 else '' <NEW_LINE> return (path, filename) <NEW_LINE> <DEDENT> def get_bucket_path_and_filename(self): <NEW_LINE> <INDENT> bucket_path, filename = self.get_path_and_filename() <NEW_LINE> if not bucket_path: <NEW_LINE> <INDENT> return ('', '', filename) <NEW_LINE> <DEDENT> l = bucket_path.split('/', maxsplit=1) <NEW_LINE> bucket_name = l[0] <NEW_LINE> path = l[-1] if len(l) == 2 else '' <NEW_LINE> return (bucket_name, path, filename) <NEW_LINE> <DEDENT> def get_bucket_and_dirpath(self): <NEW_LINE> <INDENT> fullpath = self._path.strip('/') <NEW_LINE> if not fullpath: <NEW_LINE> <INDENT> return ('', '') <NEW_LINE> <DEDENT> l = fullpath.split('/', maxsplit=1) <NEW_LINE> bucket_name = l[0] <NEW_LINE> dirpath = l[-1] if len(l) == 2 else '' <NEW_LINE> return (bucket_name, dirpath) <NEW_LINE> <DEDENT> def get_bucket_path_and_dirname(self): <NEW_LINE> <INDENT> bucket_name, dirpath = self.get_bucket_and_dirpath() <NEW_LINE> if not dirpath: <NEW_LINE> <INDENT> return (bucket_name, '', '') <NEW_LINE> <DEDENT> l = dirpath.rsplit('/', maxsplit=1) <NEW_LINE> dirname = l[-1] <NEW_LINE> path = l[0] if len(l) == 2 else '' <NEW_LINE> return (bucket_name, path, dirname) <NEW_LINE> <DEDENT> def get_path_breadcrumb(self, path=None): <NEW_LINE> <INDENT> breadcrumb = [] <NEW_LINE> _path = path if path is not None else self._path <NEW_LINE> if _path == '': <NEW_LINE> <INDENT> return breadcrumb <NEW_LINE> <DEDENT> _path = _path.strip('/') <NEW_LINE> dirs = _path.split('/') <NEW_LINE> for i, key in enumerate(dirs): <NEW_LINE> <INDENT> breadcrumb.append([key, '/'.join(dirs[0:i+1])]) <NEW_LINE> <DEDENT> return breadcrumb | 路径字符串解析 | 62598fae7b180e01f3e49052 |
class AttrDict(dict): <NEW_LINE> <INDENT> def _convert_fun_(x): <NEW_LINE> <INDENT> if hasattr(x, "__call__"): <NEW_LINE> <INDENT> return x.__name__ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AttrDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, filename): <NEW_LINE> <INDENT> obj = cls.__new__(cls) <NEW_LINE> with open(filename) as fh: <NEW_LINE> <INDENT> kwargs= yaml.load(fh) <NEW_LINE> super(AttrDict, obj).__init__(**kwargs) <NEW_LINE> obj.__dict__ = obj <NEW_LINE> return obj <NEW_LINE> <DEDENT> <DEDENT> def to_yaml(self, filename, use_pyaml=True): <NEW_LINE> <INDENT> with open(filename, "w+") as outfh: <NEW_LINE> <INDENT> if use_pyaml: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pyaml.dump(self.__dict__, outfh) <NEW_LINE> <DEDENT> except yaml.representer.RepresenterError as ee: <NEW_LINE> <INDENT> yaml.dump(self.__dict__, outfh, default_flow_style=False) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yaml.dump(self.__dict__, outfh, default_flow_style=False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_json(self, filename): <NEW_LINE> <INDENT> with open(filename, "w+") as outfh: <NEW_LINE> <INDENT> json.dump(self.__dict__, outfh) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def md5(self): <NEW_LINE> <INDENT> txt = str([(kk, self.__dict__[kk]) for kk in sorted(self.__dict__.keys())]) <NEW_LINE> return md5(txt.encode()).hexdigest() | A dictionary allowing to retrieve values by class attribute syntax
additionally supports `to_yaml()` serialization and `md5` hashing | 62598fae99fddb7c1ca62deb |
class ETF(Asset, IndexConstituentProvider): <NEW_LINE> <INDENT> def __init__(self, id_: str, asset_class: AssetClass, name: str, exchange=None): <NEW_LINE> <INDENT> Asset.__init__(self, id_, asset_class, name, exchange) <NEW_LINE> IndexConstituentProvider.__init__(self, id_) <NEW_LINE> <DEDENT> def get_type(self) -> AssetType: <NEW_LINE> <INDENT> return AssetType.ETF | ETF Asset
ETF which tracks an evolving portfolio of securities, and can be traded on exchange | 62598faeaad79263cf42e7d8 |
class ContestWebServer(WebService): <NEW_LINE> <INDENT> def __init__(self, shard, contest_id=None): <NEW_LINE> <INDENT> parameters = { "static_files": [("cms.server", "static"), ("cms.server.contest", "static")], "cookie_secret": hex_to_bin(config.secret_key), "debug": config.tornado_debug, "is_proxy_used": config.is_proxy_used, "num_proxies_used": config.num_proxies_used, "xsrf_cookies": True, } <NEW_LINE> try: <NEW_LINE> <INDENT> listen_address = config.contest_listen_address[shard] <NEW_LINE> listen_port = config.contest_listen_port[shard] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise ConfigError("Wrong shard number for %s, or missing " "address/port configuration. Please check " "contest_listen_address and contest_listen_port " "in cms.conf." % __name__) <NEW_LINE> <DEDENT> self.contest_id = contest_id <NEW_LINE> if self.contest_id is None: <NEW_LINE> <INDENT> HANDLERS.append((r"", MainHandler)) <NEW_LINE> handlers = [(r'/', ContestListHandler)] <NEW_LINE> for h in HANDLERS: <NEW_LINE> <INDENT> handlers.append((r'/([^/]+)' + h[0],) + h[1:]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> HANDLERS.append((r"/", MainHandler)) <NEW_LINE> handlers = HANDLERS <NEW_LINE> <DEDENT> super().__init__( listen_port, handlers, parameters, shard=shard, listen_address=listen_address) <NEW_LINE> self.wsgi_app = SharedDataMiddleware( self.wsgi_app, {"/stl": config.stl_path}, cache=True, cache_timeout=SECONDS_IN_A_YEAR, fallback_mimetype="application/octet-stream") <NEW_LINE> self.jinja2_environment = CWS_ENVIRONMENT <NEW_LINE> self.notifications = {} <NEW_LINE> self.translations = get_translations() <NEW_LINE> self.evaluation_service = self.connect_to( ServiceCoord("EvaluationService", 0)) <NEW_LINE> self.scoring_service = self.connect_to( ServiceCoord("ScoringService", 0)) <NEW_LINE> ranking_enabled = len(config.rankings) > 0 <NEW_LINE> self.proxy_service = self.connect_to( ServiceCoord("ProxyService", 0), must_be_present=ranking_enabled) <NEW_LINE> printing_enabled = config.printer is not None <NEW_LINE> self.printing_service = self.connect_to( ServiceCoord("PrintingService", 0), must_be_present=printing_enabled) <NEW_LINE> <DEDENT> def add_notification(self, username, timestamp, subject, text, level): <NEW_LINE> <INDENT> if username not in self.notifications: <NEW_LINE> <INDENT> self.notifications[username] = [] <NEW_LINE> <DEDENT> self.notifications[username].append((timestamp, subject, text, level)) | Service that runs the web server serving the contestants.
| 62598fae63b5f9789fe8516c |
class AyxPlugin: <NEW_LINE> <INDENT> def __init__(self, n_tool_id: int, alteryx_engine: object, output_anchor_mgr: object): <NEW_LINE> <INDENT> self.n_tool_id = n_tool_id <NEW_LINE> self.alteryx_engine = alteryx_engine <NEW_LINE> self.output_anchor_mgr = output_anchor_mgr <NEW_LINE> self.is_initialized = True <NEW_LINE> self.output_anchor = None <NEW_LINE> self.output_text = ['InfoLab'] <NEW_LINE> self.n_columns = None <NEW_LINE> <DEDENT> def pi_init(self, str_xml: str): <NEW_LINE> <INDENT> self.output_anchor = self.output_anchor_mgr.get_output_anchor('Output') <NEW_LINE> self.n_columns = int(Et.fromstring(str_xml).find('NColumns').text) if 'NColumns' in str_xml else None <NEW_LINE> pass <NEW_LINE> <DEDENT> def pi_add_incoming_connection(self, str_type: str, str_name: str) -> object: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def pi_add_outgoing_connection(self, str_name: str) -> bool: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def pi_push_all_records(self, n_record_limit: int) -> bool: <NEW_LINE> <INDENT> record_info_out = self.build_record_info_out() <NEW_LINE> self.output_anchor.init(record_info_out) <NEW_LINE> record_creator = record_info_out.construct_record_creator() <NEW_LINE> for field in enumerate(self.output_text*self.n_columns): <NEW_LINE> <INDENT> record_info_out[field[0]].set_from_string(record_creator, field[1]) <NEW_LINE> <DEDENT> out_record = record_creator.finalize_record() <NEW_LINE> self.output_anchor.push_record(out_record, False) <NEW_LINE> self.alteryx_engine.output_message(self.n_tool_id, Sdk.EngineMessageType.info, self.xmsg("One Record")) <NEW_LINE> self.output_anchor.close() <NEW_LINE> return True <NEW_LINE> <DEDENT> def pi_close(self, b_has_errors: bool): <NEW_LINE> <INDENT> self.output_anchor.assert_close() <NEW_LINE> <DEDENT> def build_record_info_out(self): <NEW_LINE> <INDENT> record_info_out = Sdk.RecordInfo(self.alteryx_engine) <NEW_LINE> for i in range(self.n_columns): <NEW_LINE> <INDENT> record_info_out.add_field('NewText_'+str(i), Sdk.FieldType.string, 254) <NEW_LINE> <DEDENT> return record_info_out <NEW_LINE> <DEDENT> def display_error_msg(self, msg_string: str): <NEW_LINE> <INDENT> self.is_initialized = False <NEW_LINE> self.alteryx_engine.output_message(self.n_tool_id, Sdk.EngineMessageType.error, self.xmsg(msg_string)) <NEW_LINE> <DEDENT> def xmsg(self, msg_string: str): <NEW_LINE> <INDENT> return msg_string | Implements the plugin interface methods, to be utilized by the Alteryx engine to communicate with this plugin.
Prefixed with "pi", the Alteryx engine will expect the below five interface methods to be defined. | 62598fae379a373c97d99018 |
class DistributiveMagmasAndAdditiveMagmas(CategoryWithAxiom): <NEW_LINE> <INDENT> class AdditiveAssociative(CategoryWithAxiom): <NEW_LINE> <INDENT> class AdditiveCommutative(CategoryWithAxiom): <NEW_LINE> <INDENT> class AdditiveUnital(CategoryWithAxiom): <NEW_LINE> <INDENT> class Associative(CategoryWithAxiom): <NEW_LINE> <INDENT> AdditiveInverse = LazyImport('sage.categories.rngs', 'Rngs', at_startup=True) <NEW_LINE> Unital = LazyImport('sage.categories.semirings', 'Semirings', at_startup=True) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> class ParentMethods: <NEW_LINE> <INDENT> def _test_distributivity(self, **options): <NEW_LINE> <INDENT> tester = self._tester(**options) <NEW_LINE> S = tester.some_elements() <NEW_LINE> from sage.misc.misc import some_tuples <NEW_LINE> for x,y,z in some_tuples(tester.some_elements(), 3, tester._max_runs): <NEW_LINE> <INDENT> tester.assertTrue(x * (y + z) == (x * y) + (x * z)) <NEW_LINE> tester.assertTrue((x + y) * z == (x * z) + (y * z)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> class CartesianProducts(CartesianProductsCategory): <NEW_LINE> <INDENT> def extra_super_categories(self): <NEW_LINE> <INDENT> return [DistributiveMagmasAndAdditiveMagmas()] | The category of sets `(S,+,*)` with `*` distributing on `+`.
This is similar to a ring, but `+` and `*` are only required to be
(additive) magmas.
EXAMPLES::
sage: from sage.categories.distributive_magmas_and_additive_magmas import DistributiveMagmasAndAdditiveMagmas
sage: C = DistributiveMagmasAndAdditiveMagmas(); C
Category of distributive magmas and additive magmas
sage: C.super_categories()
[Category of magmas and additive magmas]
TESTS::
sage: from sage.categories.magmas_and_additive_magmas import MagmasAndAdditiveMagmas
sage: C is MagmasAndAdditiveMagmas().Distributive()
True
sage: C is (Magmas() & AdditiveMagmas()).Distributive()
True
sage: TestSuite(C).run() | 62598fae5fc7496912d48283 |
class ApplicationHandler(BaseHandler): <NEW_LINE> <INDENT> def get(self,application_id = ''): <NEW_LINE> <INDENT> if application_id == '': <NEW_LINE> <INDENT> respon_json = application.db_get_application_list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> respon_json = application.db_get_application_id(application_id) <NEW_LINE> <DEDENT> respon_json = json.dumps(respon_json) <NEW_LINE> self.write(respon_json) <NEW_LINE> <DEDENT> def post(self, *args, **kwargs): <NEW_LINE> <INDENT> param_list = self.request.body.decode('utf-8') <NEW_LINE> param_list = json.loads(param_list) <NEW_LINE> for param in param_list: <NEW_LINE> <INDENT> if 'name' not in param.keys(): <NEW_LINE> <INDENT> raise HTTPError(400, reason="KeyError", log_message="key not exist") <NEW_LINE> <DEDENT> <DEDENT> for param in param_list: <NEW_LINE> <INDENT> name = param['name'] <NEW_LINE> post_flag = application.db_get_id_application(name) <NEW_LINE> if post_flag: <NEW_LINE> <INDENT> raise HTTPError(400, reason="InsertDataError", log_message="data exist") <NEW_LINE> <DEDENT> <DEDENT> response = application.db_insert_application(param_list) <NEW_LINE> respon_json = [] <NEW_LINE> for i in response: <NEW_LINE> <INDENT> response = tool.language2dict(i) <NEW_LINE> respon_json.append(response) <NEW_LINE> <DEDENT> respon_json = json.dumps(respon_json) <NEW_LINE> self.write(respon_json) <NEW_LINE> <DEDENT> def delete(self, application_id = None): <NEW_LINE> <INDENT> if application_id is None: <NEW_LINE> <INDENT> raise HTTPError(405) <NEW_LINE> <DEDENT> delete_flag = application.db_get_application_id(application_id) <NEW_LINE> if delete_flag: <NEW_LINE> <INDENT> delete_data = application.db_delete_application(application_id) <NEW_LINE> respon_json = json.dumps(delete_flag) <NEW_LINE> self.write(respon_json) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise HTTPError(400, reason="FoundDataError", log_message="not found data") <NEW_LINE> <DEDENT> <DEDENT> def put(self,application_id = None): <NEW_LINE> <INDENT> if application_id is None: <NEW_LINE> <INDENT> raise HTTPError(405) <NEW_LINE> <DEDENT> param = self.request.body.decode('utf-8') <NEW_LINE> prarm = json.loads(param) <NEW_LINE> if 'name' in prarm.keys(): <NEW_LINE> <INDENT> put_data = application.db_get_application_id(application_id) <NEW_LINE> if put_data: <NEW_LINE> <INDENT> name = prarm['name'] <NEW_LINE> description = prarm['description'] <NEW_LINE> response = application.db_update_application(application_id,name,description) <NEW_LINE> response = tool.language2dict(response) <NEW_LINE> respon_json = json.dumps(response) <NEW_LINE> self.write(respon_json) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise HTTPError(400, reason="FoundDataError", log_message="not found data") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise HTTPError(400, reason="KeyError", log_message="key not exist") <NEW_LINE> <DEDENT> <DEDENT> def write_error(self,status_code, **kwargs): <NEW_LINE> <INDENT> exc_info = kwargs.get("exc_info", None) <NEW_LINE> exc = exc_info[1] <NEW_LINE> if isinstance(exc, HTTPError): <NEW_LINE> <INDENT> rsp = { "RequestId": 'a', "Error":{ "Code": exc.reason, "Message": exc.log_message, "Type": "Response" } } <NEW_LINE> self.write(rsp) <NEW_LINE> self.finish() <NEW_LINE> <DEDENT> <DEDENT> def patch(self, *args, **kwargs): <NEW_LINE> <INDENT> raise HTTPError(405) | 处理application表的handle | 62598fae01c39578d7f12d84 |
class Any(Content): <NEW_LINE> <INDENT> def __init__(self, schema, root): <NEW_LINE> <INDENT> Content.__init__(self, schema, root) <NEW_LINE> self.min = root.get('minOccurs', default='0') <NEW_LINE> self.max = root.get('maxOccurs', default='1') <NEW_LINE> <DEDENT> def get_child(self, name): <NEW_LINE> <INDENT> root = self.root.clone() <NEW_LINE> root.set('minOccurs', '0') <NEW_LINE> root.set('maxOccurs', '1') <NEW_LINE> root.set('note', 'synthesized (any) child') <NEW_LINE> child = Any(self.schema, root) <NEW_LINE> return (child, []) <NEW_LINE> <DEDENT> def get_attribute(self, name): <NEW_LINE> <INDENT> root = self.root.clone() <NEW_LINE> root.set('note', 'synthesized (any) attribute') <NEW_LINE> attribute = Any(self.schema, root) <NEW_LINE> return (attribute, []) <NEW_LINE> <DEDENT> def any(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def unbounded(self): <NEW_LINE> <INDENT> if self.max.isdigit(): <NEW_LINE> <INDENT> return (int(self.max) > 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ( self.max == 'unbounded' ) <NEW_LINE> <DEDENT> <DEDENT> def optional(self): <NEW_LINE> <INDENT> return ( self.min == '0' ) | Represents an (xsd) <any/> node | 62598fae4527f215b58e9edb |
class PythonParameters(CallbackServerParameters): <NEW_LINE> <INDENT> def __init__( self, address=DEFAULT_ADDRESS, port=DEFAULT_PYTHON_PROXY_PORT, daemonize=False, daemonize_connections=False, eager_load=True, ssl_context=None, auto_gc=False, accept_timeout=DEFAULT_ACCEPT_TIMEOUT_PLACEHOLDER, read_timeout=None,): <NEW_LINE> <INDENT> super(PythonParameters, self).__init__( address, port, daemonize, daemonize_connections, eager_load, ssl_context, accept_timeout, read_timeout) <NEW_LINE> self.auto_gc = auto_gc | Wrapper class that contains all parameters that can be passed to
configure a `ClientServer` | 62598fae56b00c62f0fb28bb |
class Tnode(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> children = None <NEW_LINE> parent = None <NEW_LINE> type = None <NEW_LINE> status = None <NEW_LINE> def __init__(self, name, doc, **kwargs): <NEW_LINE> <INDENT> self._doc = doc <NEW_LINE> self.name = name <NEW_LINE> self.children = [] <NEW_LINE> self.status = kwargs.pop('status', None) <NEW_LINE> self.read = kwargs.pop('read', 0) <NEW_LINE> self.write = kwargs.pop('write', 0) <NEW_LINE> self.cksum = kwargs.pop('cksum', 0) <NEW_LINE> <DEDENT> def find_by_name(self, name): <NEW_LINE> <INDENT> for child in self.children: <NEW_LINE> <INDENT> if child.name == name: <NEW_LINE> <INDENT> return child <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def find_not_online(self): <NEW_LINE> <INDENT> if len(self.children) == 0 and self.status not in ('ONLINE', 'AVAIL'): <NEW_LINE> <INDENT> return [self] <NEW_LINE> <DEDENT> unavails = [] <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> unavails.extend(child.find_not_online()) <NEW_LINE> <DEDENT> return unavails <NEW_LINE> <DEDENT> def append(self, tnode): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def pprint(node, level=0): <NEW_LINE> <INDENT> print(' ' * level + node.name) <NEW_LINE> for child in node.children: <NEW_LINE> <INDENT> node.pprint(child, level + 1) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for child in list(self.children): <NEW_LINE> <INDENT> yield child <NEW_LINE> <DEDENT> <DEDENT> def validate(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> raise NotImplementedError | Abstract class for Root, Vdev and Dev | 62598faecc0a2c111447b017 |
class WeightedPooling1D(_Pooling1D): <NEW_LINE> <INDENT> def __init__(self, pool_length=2, stride=None, border_mode='valid', init="one", **kwargs): <NEW_LINE> <INDENT> super(AveragePooling1D, self).__init__(pool_length, stride, border_mode, **kwargs) <NEW_LINE> self.init = initializations.get(init) <NEW_LINE> assert stride==pool_lengths, 'for weighted pooling, the pool stride must equal to the pool width' <NEW_LINE> <DEDENT> def build(self): <NEW_LINE> <INDENT> self.tau = self.init((self.input_shape[1],)) <NEW_LINE> self.trainable_weights = [self.tau] <NEW_LINE> <DEDENT> def _pooling_function(self, inputs, pool_size, strides, border_mode, dim_ordering): <NEW_LINE> <INDENT> pool_axis=-1; <NEW_LINE> if dim_ordering=="tf": <NEW_LINE> <INDENT> pool_axis=2; <NEW_LINE> <DEDENT> t_denominator=K.sum(K.exp(inputs/self.tau[None,:,None]),axis=pool_axis) <NEW_LINE> t_softmax=K.exp(inputs/self.tau[None,:,None])/t_denominator[:,:,None]; <NEW_LINE> t_weighted_average=K.sum(t_softmax*inputs,axis=pool_axis) <NEW_LINE> output=t_weighted_average[:,:,None]; <NEW_LINE> return output | Weighted pooling for temporal data with Softmax & learned temperature parameter
# Input shape
3D tensor with shape: `(samples, steps, features)`.
# Output shape
3D tensor with shape: `(samples, downsampled_steps, features)`.
# Arguments
pool_length: factor by which to downscale. 2 will halve the input.
stride: integer or None. Stride value.
init: glorot_uniform (for temperature initialization)
border_mode: 'valid' or 'same'.
Note: 'same' will only work with TensorFlow for the time being. | 62598fae38b623060ffa90a0 |
class IntelligentGameRound(models.Model): <NEW_LINE> <INDENT> _name = "og.igame.round" <NEW_LINE> _description = "iGame Round" <NEW_LINE> _rec_name = 'number' <NEW_LINE> _order = 'number' <NEW_LINE> igame_id = fields.Many2one('og.igame','Game') <NEW_LINE> name = fields.Char('Name',related='igame_id.name') <NEW_LINE> number = fields.Integer('Number') <NEW_LINE> round = fields.Integer('Number', related='number') <NEW_LINE> start_time = fields.Datetime('Start Time', required=True, DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S", help="Start Time") <NEW_LINE> over_time = fields.Datetime('Over Time', required=True, DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S", help="Over Time") <NEW_LINE> deal_ids = fields.Many2many('og.deal',string='Deals') <NEW_LINE> team_line_ids = fields.One2many('og.igame.team.line','round_id') <NEW_LINE> table_ids = fields.Many2many('og.table', compute='_compute_table') <NEW_LINE> @api.multi <NEW_LINE> def _compute_table(self): <NEW_LINE> <INDENT> for rec in self: <NEW_LINE> <INDENT> matchs = rec.team_line_ids.mapped('match_id') <NEW_LINE> open = matchs.mapped('open_table_id') <NEW_LINE> close = matchs.mapped('close_table_id') <NEW_LINE> rec.table_ids = open | close | # Only for Team Match | 62598fae97e22403b383af13 |
class LabelWidget(Widget): <NEW_LINE> <INDENT> template_name = 'dynamic_forms/widgets/label.html' <NEW_LINE> def get_context(self, name, value, attrs): <NEW_LINE> <INDENT> context = super().get_context(name, value, attrs) <NEW_LINE> attrs = context['widget']['attrs'] <NEW_LINE> if 'class' in attrs: <NEW_LINE> <INDENT> attrs['class'] = attrs['class'].replace('form-control', '') <NEW_LINE> <DEDENT> return context | Widget for LabelField | 62598fae8da39b475be031ea |
class NotJSONifiableError(DokomoError): <NEW_LINE> <INDENT> pass | The jsonify function encountered a strange object. | 62598fae56ac1b37e63021f1 |
class EndpointsAuthTest(test_case.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(EndpointsAuthTest, self).setUp() <NEW_LINE> self.mock(endpoints_support.logging, 'error', lambda *_args: None) <NEW_LINE> <DEDENT> def call(self, remote_address, email, headers=None): <NEW_LINE> <INDENT> class User(object): <NEW_LINE> <INDENT> def email(self): <NEW_LINE> <INDENT> return email <NEW_LINE> <DEDENT> <DEDENT> self.mock( endpoints_support.endpoints, 'get_current_user', lambda: User() if email else None) <NEW_LINE> api.reset_local_state() <NEW_LINE> endpoints_support.initialize_request_auth(remote_address, headers or {}) <NEW_LINE> return api.get_current_identity().to_bytes() <NEW_LINE> <DEDENT> def test_ip_whitelist_bot(self): <NEW_LINE> <INDENT> model.bootstrap_ip_whitelist('bots', ['192.168.1.100/32']) <NEW_LINE> self.assertEqual('bot:192.168.1.100', self.call('192.168.1.100', None)) <NEW_LINE> self.assertEqual('anonymous:anonymous', self.call('127.0.0.1', None)) <NEW_LINE> <DEDENT> def test_ip_whitelist_whitelisted(self): <NEW_LINE> <INDENT> model.bootstrap_ip_whitelist('whitelist', ['192.168.1.100/32']) <NEW_LINE> model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, 'a@example.com'), 'whitelist') <NEW_LINE> self.assertEqual( 'user:a@example.com', self.call('192.168.1.100', 'a@example.com')) <NEW_LINE> <DEDENT> def test_ip_whitelist_not_whitelisted(self): <NEW_LINE> <INDENT> model.bootstrap_ip_whitelist('whitelist', ['192.168.1.100/32']) <NEW_LINE> model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, 'a@example.com'), 'whitelist') <NEW_LINE> with self.assertRaises(api.AuthorizationError): <NEW_LINE> <INDENT> self.call('127.0.0.1', 'a@example.com') <NEW_LINE> <DEDENT> <DEDENT> def test_ip_whitelist_not_used(self): <NEW_LINE> <INDENT> model.bootstrap_ip_whitelist('whitelist', ['192.168.1.100/32']) <NEW_LINE> model.bootstrap_ip_whitelist_assignment( model.Identity(model.IDENTITY_USER, 'a@example.com'), 'whitelist') <NEW_LINE> self.assertEqual( 'user:another_user@example.com', self.call('127.0.0.1', 'another_user@example.com')) <NEW_LINE> <DEDENT> def test_get_current_identity_ip(self): <NEW_LINE> <INDENT> self.call('1.2.3.4', 'user@example.com') <NEW_LINE> self.assertEqual( ipaddr.ip_from_string('1.2.3.4'), api.get_current_identity_ip()) <NEW_LINE> <DEDENT> def test_get_current_identity_host(self): <NEW_LINE> <INDENT> tok = host_token.create_host_token('host-name.domain') <NEW_LINE> self.call('127.0.0.1', 'user@example.com', headers={'X-Host-Token-V1': tok}) <NEW_LINE> self.assertEqual('host-name.domain', api.get_current_identity_host()) | Tests for auth.endpoints_support.initialize_request_auth function. | 62598fae0c0af96317c56388 |
class AvailableEntitlementsCache(CacheManager): <NEW_LINE> <INDENT> BETA = 2.0 <NEW_LINE> LBOUND = 5.0 <NEW_LINE> UBOUND = 10.0 <NEW_LINE> CACHE_FILE = "/var/lib/rhsm/cache/available_entitlements.json" <NEW_LINE> def __init__(self, available_entitlements=None): <NEW_LINE> <INDENT> self.available_entitlements = available_entitlements or {} <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return self.available_entitlements <NEW_LINE> <DEDENT> def timeout(self): <NEW_LINE> <INDENT> uep = inj.require(inj.CP_PROVIDER).get_consumer_auth_cp() <NEW_LINE> if uep.conn.smoothed_rt is not None: <NEW_LINE> <INDENT> smoothed_rt = uep.conn.smoothed_rt <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> smoothed_rt = 0.0 <NEW_LINE> <DEDENT> return min(self.UBOUND, max(self.LBOUND, self.BETA * smoothed_rt)) <NEW_LINE> <DEDENT> def get_not_obsolete_data(self, identity, filter_options): <NEW_LINE> <INDENT> data = self.read_cache_only() <NEW_LINE> available_pools = {} <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> if identity.uuid in data: <NEW_LINE> <INDENT> cached_data = data[identity.uuid] <NEW_LINE> if cached_data['filter_options'] == filter_options: <NEW_LINE> <INDENT> log.debug('timeout: %s, current time: %s' % (cached_data['timeout'], time.time())) <NEW_LINE> if cached_data['timeout'] > time.time(): <NEW_LINE> <INDENT> log.debug('Using cached list of available entitlements') <NEW_LINE> available_pools = cached_data['pools'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.debug('Cache of available entitlements timed-out') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> log.debug('Cache of available entitlements does not contain given filter options') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return available_pools <NEW_LINE> <DEDENT> def _load_data(self, open_file): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.available_entitlements = json.loads(open_file.read()) or {} <NEW_LINE> return self.available_entitlements <NEW_LINE> <DEDENT> except IOError as err: <NEW_LINE> <INDENT> log.error("Unable to read cache: %s" % self.CACHE_FILE) <NEW_LINE> log.exception(err) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass | Cache of available entitlements | 62598faebaa26c4b54d4f2b9 |
class Post(models.Model): <NEW_LINE> <INDENT> class Meta(): <NEW_LINE> <INDENT> db_table = 'post' <NEW_LINE> <DEDENT> author = models.ForeignKey('auth.User') <NEW_LINE> title = models.CharField(max_length=200) <NEW_LINE> text = models.TextField() <NEW_LINE> created_date = models.DateTimeField(default=timezone.now) <NEW_LINE> published_date = models.DateTimeField(blank=True, null=True) <NEW_LINE> def publish(self): <NEW_LINE> <INDENT> self.published_date = timezone.now() <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def approved_comments(self): <NEW_LINE> <INDENT> return self.comments.filter(approved_comment=True) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title | Класс создания модели для БД | 62598faebd1bec0571e150c6 |
class StockComment(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=20) <NEW_LINE> comment = models.TextField() <NEW_LINE> permit = models.ForeignKey(Permit,on_delete=models.CASCADE) <NEW_LINE> department = models.ForeignKey(Department,on_delete=models.CASCADE) <NEW_LINE> default = models.BooleanField( help_text="This comment will appear in all new reviews") <NEW_LINE> acknowledge = models.BooleanField(default=True, help_text="Applicant must acknowledge comment") <NEW_LINE> respond = models.BooleanField(default=False, help_text="Applicant must provide a written response") <NEW_LINE> last_modified = models.DateField(auto_now=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | Frequently used comments for reviewers. | 62598fae7d847024c075c3c8 |
class MockGymEnvironment(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.observation_space = 'observation_space' <NEW_LINE> self.action_space = 'action_space' <NEW_LINE> self.reward_range = 'reward_range' <NEW_LINE> self.metadata = 'metadata' <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> return 'reset' <NEW_LINE> <DEDENT> def step(self, unused_action): <NEW_LINE> <INDENT> return 'obs', 'rew', 'game_over', 'info' | Mock environment for testing. | 62598faeb7558d5895463630 |
class TestMessageFromString(unittest.TestCase): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> m = email.mime.text.MIMEText(u'This is some text', 'plain', 'utf-8') <NEW_LINE> m['Subject'] = 'test' <NEW_LINE> m['From'] = 'me' <NEW_LINE> m['To'] = 'Nobody' <NEW_LINE> message = utils.decrypted_message_from_string(m.as_string()) <NEW_LINE> self.assertEqual(message.get_payload(), 'This is some text') | Tests for decrypted_message_from_string.
Because the implementation is that this is a wrapper around
decrypted_message_from_file, it's not important to have a large swath of
tests, just enough to show that things are being passed correctly. | 62598fae009cb60464d01526 |
class TwitterTimeline(object): <NEW_LINE> <INDENT> def __init__(self, consumer_key, consumer_secret, access_token_key, access_token_secret): <NEW_LINE> <INDENT> self.consumer_key = consumer_key <NEW_LINE> self.consumer_secret = consumer_secret <NEW_LINE> self.access_token_key = access_token_key <NEW_LINE> self.access_token_secret = access_token_secret <NEW_LINE> <DEDENT> def get_api(self) -> TwitterAPI: <NEW_LINE> <INDENT> if self.consumer_key != None and self.consumer_secret != None and self.access_token_key != None and self.access_token_secret != None: <NEW_LINE> <INDENT> return TwitterAPI(self.consumer_key, self.consumer_secret, self.access_token_key, self.access_token_secret) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Required enviroment variables not set!") <NEW_LINE> <DEDENT> <DEDENT> def get_timeline(self, count: int = 20, since: int = None, max: int = None) -> TwitterPager: <NEW_LINE> <INDENT> api = self.get_api() <NEW_LINE> if api != None: <NEW_LINE> <INDENT> pager = TwitterPager(api, 'statuses/home_timeline', self.query_params(count, since, max)) <NEW_LINE> return pager <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("API is not set!") <NEW_LINE> <DEDENT> <DEDENT> def query_params(self, count: int, since: int, max: int): <NEW_LINE> <INDENT> params = {} <NEW_LINE> params['count'] = count <NEW_LINE> params['tweet_mode'] = 'extended' <NEW_LINE> if since != None: <NEW_LINE> <INDENT> params['since_id'] = since <NEW_LINE> <DEDENT> if max != None: <NEW_LINE> <INDENT> params['max_id'] = max <NEW_LINE> <DEDENT> return params | docstring | 62598fae851cf427c66b82c2 |
class PersistentTemporaryFile(object): <NEW_LINE> <INDENT> _file = None <NEW_LINE> def __init__(self, suffix="", prefix="", dir=None, mode='w+b'): <NEW_LINE> <INDENT> if prefix == None: <NEW_LINE> <INDENT> prefix = "" <NEW_LINE> <DEDENT> if dir is None: <NEW_LINE> <INDENT> dir = base_dir() <NEW_LINE> <DEDENT> fd, name = _make_file(suffix, prefix, dir) <NEW_LINE> self._file = os.fdopen(fd, mode) <NEW_LINE> self._name = name <NEW_LINE> self._fd = fd <NEW_LINE> atexit.register(cleanup, name) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name == 'name': <NEW_LINE> <INDENT> return self.__dict__['_name'] <NEW_LINE> <DEDENT> return getattr(self.__dict__['_file'], name) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass | A file-like object that is a temporary file that is available even after being closed on
all platforms. It is automatically deleted on normal program termination. | 62598fae8a43f66fc4bf2180 |
class ContactUsForm(FlaskForm): <NEW_LINE> <INDENT> name = TextField('Name', [Required(message='Name missing')]) <NEW_LINE> email = TextField('Email Address', [Email(), Required(message='Email address missing')]) <NEW_LINE> phone = IntegerField('Phone Number') <NEW_LINE> subject = TextField('Subject', [Required(message='Subject missing'), Length(max=255)]) <NEW_LINE> message = TextField('Message', [Required(message='message missing')]) | Form for sending messages
| 62598faefff4ab517ebcd7eb |
class WandererClass(PlayerClass): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.Name = "Wanderer" | Default player class with nothing special. | 62598faed58c6744b42dc2da |
class SimpleFeatWriter(object): <NEW_LINE> <INDENT> def __init__(self, pathForOutput, maxRanking, include_verification=False): <NEW_LINE> <INDENT> self.path = pathForOutput <NEW_LINE> self.maxRanking = maxRanking <NEW_LINE> self.use_verification = include_verification <NEW_LINE> <DEDENT> def writeFeatures(self, seqObject): <NEW_LINE> <INDENT> file2write = open(self.path+seqObject.id+".features", mode="w") <NEW_LINE> for feature in seqObject.features: <NEW_LINE> <INDENT> if "ranking" in feature.qualifiers and feature.qualifiers["ranking"] > self.maxRanking: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> start = feature.location.start <NEW_LINE> end = feature.location.end <NEW_LINE> evalue = "N/A" <NEW_LINE> ranking = "N/A" <NEW_LINE> region = "N/A" <NEW_LINE> note = "N/A" <NEW_LINE> subtype = "N/A" <NEW_LINE> if "evalue" in feature.qualifiers: <NEW_LINE> <INDENT> evalue = feature.qualifiers["evalue"] <NEW_LINE> <DEDENT> if "ranking" in feature.qualifiers: <NEW_LINE> <INDENT> ranking = feature.qualifiers["ranking"] <NEW_LINE> <DEDENT> if "region" in feature.qualifiers: <NEW_LINE> <INDENT> region = feature.qualifiers["region"] <NEW_LINE> <DEDENT> if "note" in feature.qualifiers: <NEW_LINE> <INDENT> note = feature.qualifiers["note"] <NEW_LINE> <DEDENT> if "subtype" in feature.qualifiers: <NEW_LINE> <INDENT> subtype = feature.qualifiers["subtype"] <NEW_LINE> <DEDENT> score = feature.qualifiers["score"] <NEW_LINE> name = feature.qualifiers["name"] <NEW_LINE> type = feature.type <NEW_LINE> if self.use_verification: <NEW_LINE> <INDENT> verification = "N/A" <NEW_LINE> if Domain_Verifier.get_qualifier_key() in feature.qualifiers: <NEW_LINE> <INDENT> verification = str(feature.qualifiers[Domain_Verifier.get_qualifier_key()]) <NEW_LINE> <DEDENT> file2write.write("\t".join([str(start), str(end), str(evalue), str(score), str(ranking), str(region), type, subtype, name, note, str(verification)])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file2write.write("\t".join([str(start), str(end), str(evalue), str(score), str(ranking), str(region), type, subtype, name, note])) <NEW_LINE> <DEDENT> file2write.write("\n") <NEW_LINE> <DEDENT> file2write.close() | This class receives a seqRecord object and write a simple, one line per feature. Include verification should
only be used when the new annotation scheme is used. | 62598fae097d151d1a2c1030 |
class TestGui(object): <NEW_LINE> <INDENT> def test_init(self): <NEW_LINE> <INDENT> display = gui.Gui() <NEW_LINE> assert display <NEW_LINE> <DEDENT> def test_cell_listener(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_next_step_button_listener(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_start_stop_button_listener(self): <NEW_LINE> <INDENT> pass | Tests the Gui object. | 62598faeac7a0e7691f7250f |
class AccountsViewsTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user1 = User.objects.create(**user_data_1) <NEW_LINE> self.user1.set_password('XGEyPfoMRNYTo7A#yWLnKEht') <NEW_LINE> self.user1.save() <NEW_LINE> self.client = Client() <NEW_LINE> <DEDENT> def login(self): <NEW_LINE> <INDENT> self.client.login(email='testuser1@test.com', password='XGEyPfoMRNYTo7A#yWLnKEht') <NEW_LINE> <DEDENT> def test_index_view(self): <NEW_LINE> <INDENT> response = self.client.get('/') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, 'index.html') <NEW_LINE> <DEDENT> def test_sign_up_view(self): <NEW_LINE> <INDENT> response = self.client.get('/accounts/signup/') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, 'accounts/signup.html') <NEW_LINE> <DEDENT> def test_sign_up_post_view(self): <NEW_LINE> <INDENT> response = self.client.post('/accounts/signup/', sign_up_data) <NEW_LINE> self.assertRedirects(response, '/accounts/login/') <NEW_LINE> <DEDENT> def test_sign_out_view(self): <NEW_LINE> <INDENT> self.login() <NEW_LINE> response = self.client.post('/accounts/signout/') <NEW_LINE> self.assertRedirects(response, '/') <NEW_LINE> <DEDENT> def test_profile_view(self): <NEW_LINE> <INDENT> self.login() <NEW_LINE> user = User.objects.get(email='testuser1@test.com') <NEW_LINE> response = self.client.get(reverse('accounts:profile', kwargs={'pk': user.id})) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertContains(response, '<title>Profile | Test User 1</title') <NEW_LINE> <DEDENT> def test_edit_profile_view(self): <NEW_LINE> <INDENT> self.login() <NEW_LINE> user = User.objects.get(email='testuser1@test.com') <NEW_LINE> response = self.client.get(reverse('accounts:edit', kwargs={'pk': user.id})) <NEW_LINE> self.assertContains( response, '<title>Edit Profile | Test User 1</title>') <NEW_LINE> response = self.client.post( reverse('accounts:edit', kwargs={'pk': user.id}), edit_profile_data) <NEW_LINE> response = self.client.get( reverse('accounts:profile', kwargs={'pk': user.id})) <NEW_LINE> self.assertContains( response, 'test user 1, your profile was successfully updated') | test of our views | 62598faecc40096d6161a1dd |
class Endpoint: <NEW_LINE> <INDENT> __uri__ = '/' <NEW_LINE> logger = logging.getLogger(__name__) <NEW_LINE> def __init__(self, server, allowed_origin=None): <NEW_LINE> <INDENT> self.server = server <NEW_LINE> self._allowed_methods = set([ mthd.upper() for mthd in HTTP_METHODS if asyncio.iscoroutinefunction(getattr(self, mthd, None)) ]) <NEW_LINE> self._allowed_origin = allowed_origin <NEW_LINE> <DEDENT> @property <NEW_LINE> def uri(self): <NEW_LINE> <INDENT> return self.__uri__ <NEW_LINE> <DEDENT> @property <NEW_LINE> def allowed_methods(self): <NEW_LINE> <INDENT> return list(self._allowed_methods) <NEW_LINE> <DEDENT> async def options(self, _, *args, **kwargs): <NEW_LINE> <INDENT> if self._allowed_origin: <NEW_LINE> <INDENT> methods = ','.join(self._allowed_methods) <NEW_LINE> return response.text( '', status=200, headers={ 'Access-Control-Allow-Origin': self._allowed_origin, 'Access-Control-Allow-Headers': '*', 'Access-Control-Allow-Methods': methods, }) <NEW_LINE> <DEDENT> return response.json({'error': 'Method not allowed'}, status=405) <NEW_LINE> <DEDENT> async def handle_request(self, request, *args, **kwargs): <NEW_LINE> <INDENT> session = self.server.db_session <NEW_LINE> result = None <NEW_LINE> try: <NEW_LINE> <INDENT> result = await getattr(self, request.method.lower())( session, request, *args, **kwargs) <NEW_LINE> <DEDENT> except APIError as err: <NEW_LINE> <INDENT> logger.exception( 'An error occurred during the handling of a %s ' 'request to %s', request.method, self.__class__.__name__) <NEW_LINE> result = response.json({'error': err.message}, status=err.status) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> logger.exception( 'An error occurred during the handling of a %s ' 'request to %s', request.method, self.__class__.__name__) <NEW_LINE> result = response.json( { 'error': 'Internal server error' }, status=500) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if self._allowed_origin and not hasattr( result.headers, 'Access-Control-Allow-Origin'): <NEW_LINE> <INDENT> result.headers[ 'Access-Control-Allow-Origin'] = self._allowed_origin <NEW_LINE> <DEDENT> session.close() <NEW_LINE> <DEDENT> return result | Represents an endpoint to which requests can be made in order to manage
a REST resource. | 62598fae4f6381625f1994c2 |
class RobertaEmbedderModule(HuggingfaceTransformersEmbedderModule): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> super(RobertaEmbedderModule, self).__init__(args) <NEW_LINE> self.model = transformers.RobertaModel.from_pretrained( args.input_module, cache_dir=self.cache_dir, output_hidden_states=True ) <NEW_LINE> self.max_pos = self.model.config.max_position_embeddings <NEW_LINE> self.tokenizer = transformers.RobertaTokenizer.from_pretrained( args.input_module, cache_dir=self.cache_dir ) <NEW_LINE> self._sep_id = self.tokenizer.convert_tokens_to_ids("</s>") <NEW_LINE> self._cls_id = self.tokenizer.convert_tokens_to_ids("<s>") <NEW_LINE> self._pad_id = self.tokenizer.convert_tokens_to_ids("<pad>") <NEW_LINE> self._unk_id = self.tokenizer.convert_tokens_to_ids("<unk>") <NEW_LINE> self.parameter_setup(args) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def apply_boundary_tokens(s1, s2=None, get_offset=False): <NEW_LINE> <INDENT> if s2: <NEW_LINE> <INDENT> s = ["<s>"] + s1 + ["</s>", "</s>"] + s2 + ["</s>"] <NEW_LINE> if get_offset: <NEW_LINE> <INDENT> return s, 1, len(s1) + 3 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> s = ["<s>"] + s1 + ["</s>"] <NEW_LINE> if get_offset: <NEW_LINE> <INDENT> return s, 1 <NEW_LINE> <DEDENT> <DEDENT> return s <NEW_LINE> <DEDENT> def forward(self, sent: Dict[str, torch.LongTensor], task_name: str = "") -> torch.FloatTensor: <NEW_LINE> <INDENT> ids, input_mask = self.correct_sent_indexing(sent) <NEW_LINE> hidden_states, lex_seq = [], None <NEW_LINE> if self.output_mode not in ["none", "top"]: <NEW_LINE> <INDENT> lex_seq = self.model.embeddings.word_embeddings(ids) <NEW_LINE> lex_seq = self.model.embeddings.LayerNorm(lex_seq) <NEW_LINE> <DEDENT> if self.output_mode != "only": <NEW_LINE> <INDENT> _, output_pooled_vec, hidden_states = self.model(ids, attention_mask=input_mask) <NEW_LINE> <DEDENT> return self.prepare_output(lex_seq, hidden_states, input_mask) <NEW_LINE> <DEDENT> def get_pretrained_lm_head(self): <NEW_LINE> <INDENT> model_with_lm_head = transformers.RobertaForMaskedLM.from_pretrained( self.input_module, cache_dir=self.cache_dir ) <NEW_LINE> lm_head = model_with_lm_head.lm_head <NEW_LINE> lm_head.predictions.decoder.weight = self.model.embeddings.word_embeddings.weight <NEW_LINE> return nn.Sequential(lm_head, nn.LogSoftmax(dim=-1)) | Wrapper for RoBERTa module to fit into jiant APIs.
Check HuggingfaceTransformersEmbedderModule for function definitions | 62598fae1b99ca400228f533 |
class FileCreateAction(Action): <NEW_LINE> <INDENT> def __init__(self, *_, **__): <NEW_LINE> <INDENT> self.path = "" <NEW_LINE> <DEDENT> def execute(self, action, path, data, binary=False): <NEW_LINE> <INDENT> with open(path, "wb" if binary else "w") as handle: <NEW_LINE> <INDENT> self.path = path <NEW_LINE> handle.write(data) <NEW_LINE> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def revert(self): <NEW_LINE> <INDENT> if self.path: <NEW_LINE> <INDENT> os.remove(self.path) | Create a file and write data | 62598fae63d6d428bbee27b2 |
class GoogleAppEngineOAuth2(GoogleOAuth2): <NEW_LINE> <INDENT> AUTH_BACKEND = GoogleAppEngineOAuth2Backend <NEW_LINE> SETTINGS_KEY_NAME = 'GOOGLE_APPENGINE_CLIENT_ID' <NEW_LINE> SETTINGS_SECRET_NAME = 'GOOGLE_APPENGINE_CLIENT_SECRET' <NEW_LINE> def user_data(self, access_token, *args, **kwargs): <NEW_LINE> <INDENT> return google_appengine_userinfo_v2(GOOGLE_APPENGINE_PROFILE_V2, access_token) | Google App Engine OAuth2 authentication backend | 62598fae2c8b7c6e89bd37cc |
class AuthRequired(Exception): <NEW_LINE> <INDENT> def __init__(self, msg = ""): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Authorization Required: " + self.msg | Raised on 401 - Authentication Required error. Service requires authentication, pass user credentials in CLAMClient constructor. | 62598fae56ac1b37e63021f2 |
class AntennaList(Obit.AntennaList): <NEW_LINE> <INDENT> def __init__(self, name, inUV, subA, err): <NEW_LINE> <INDENT> super(AntennaList, self).__init__() <NEW_LINE> Obit.CreateAntennaList(self.this, name, inUV.me, subA, err.me) <NEW_LINE> <DEDENT> def __del__(self, DeleteAntennaList=_Obit.DeleteAntennaList): <NEW_LINE> <INDENT> if _Obit!=None: <NEW_LINE> <INDENT> DeleteAntennaList(self.this) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self,name,value): <NEW_LINE> <INDENT> if name == "me" : <NEW_LINE> <INDENT> Obit.AntennaList_Set_me(self.this,value) <NEW_LINE> return <NEW_LINE> <DEDENT> self.__dict__[name] = value <NEW_LINE> <DEDENT> def __getattr__(self,name): <NEW_LINE> <INDENT> if name == "me" : <NEW_LINE> <INDENT> return Obit.AntennaList_Get_me(self.this) <NEW_LINE> <DEDENT> if name=="JDRef": <NEW_LINE> <INDENT> return Obit.AntennaListGetRefJD(self.me); <NEW_LINE> <DEDENT> if name=="ArrName": <NEW_LINE> <INDENT> return Obit.AntennaListGetArrName(self.me); <NEW_LINE> <DEDENT> raise AttributeError(str(name)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<C AntennaList instance>" <NEW_LINE> <DEDENT> def Elev(self, ant, time, Source): <NEW_LINE> <INDENT> val = Obit.AntennaListGetElev (self.me, ant, time, Source.me) <NEW_LINE> if val==FArray.fblank: <NEW_LINE> <INDENT> raise RuntimeError("Invalid antenna") <NEW_LINE> <DEDENT> return val <NEW_LINE> <DEDENT> def Az(self, ant, time, Source): <NEW_LINE> <INDENT> val = Obit.AntennaListGetAz (self.me, ant, time, Source.me) <NEW_LINE> if val==FArray.fblank: <NEW_LINE> <INDENT> raise RuntimeError("Invalid antenna") <NEW_LINE> <DEDENT> return val <NEW_LINE> <DEDENT> def ParAng(self, ant, time, Source): <NEW_LINE> <INDENT> val = Obit.AntennaListGetParAng (self.me, ant, time, Source.me) <NEW_LINE> if val==FArray.fblank: <NEW_LINE> <INDENT> raise RuntimeError("Invalid antenna") <NEW_LINE> <DEDENT> return val | Python Obit AntennaList class
| 62598fae3d592f4c4edbaec9 |
class TestSuccessfulCMSBuild_case3(PathTestCase): <NEW_LINE> <INDENT> _testCasePath = Path('tests/case3') <NEW_LINE> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls._touch_file( cls._testCasePath.joinpath( 'intermediate', 'Debug', 'project3.tlog', 'project3.lastbuildstate', ), ) <NEW_LINE> time.sleep(1.2) <NEW_LINE> cls._touch_file(cls._testCasePath.joinpath('file3.c')) <NEW_LINE> time.sleep(1.2) <NEW_LINE> cls._touch_file( cls._testCasePath.joinpath( 'intermediate', 'Release', 'project3.tlog', 'project3.lastbuildstate', ), ) <NEW_LINE> <DEDENT> def test_main_file_is_newer(self): <NEW_LINE> <INDENT> self.assertEqual( 1, main( argv=[ str(self._testCasePath.joinpath('file3.c')), '--buildtype', 'Debug', ], ), ) <NEW_LINE> <DEDENT> def test_main_build_is_newer(self): <NEW_LINE> <INDENT> self.assertEqual( 0, main( argv=[ str(self._testCasePath.joinpath('file3.c')), '--buildtype', 'Release', ], ), ) <NEW_LINE> <DEDENT> def test_main_multiple_build_types(self): <NEW_LINE> <INDENT> self.assertEqual( 1, main( argv=[ str(self._testCasePath.joinpath('file3.c')), '--buildtype', 'Release', '--buildtype', 'Debug', ], ), ) | Testcase for testing the successful C MSBuild, where this
testcase uses the file in the tests\case3\ subdirectory which
reflects an successful build for the 'Release' and the
'Debug' buildtype.
However the lastbuildstate of the Release is more recent then
the file. The lastbuildstate of the Debug is older then the file. | 62598fae4527f215b58e9edd |
class UndirectedGraphCochain(GraphCochain, UndirectedGraphVector): <NEW_LINE> <INDENT> def bracket(self, other): <NEW_LINE> <INDENT> return sum((sum(self.homogeneous_part(v,e).insertion(i, other) for i in range(v)) for (v,e) in self.gradings()), self._parent.zero()) + sum(((1 if e % 2 == 1 and f % 2 == 1 else -1)*sum(other.homogeneous_part(v,e).insertion(i, self.homogeneous_part(w,f)) for i in range(v)) for (v,e) in other.gradings() for (w,f) in self.gradings()), self._parent.zero()) <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def _indices_and_coefficients(self, bi_grading): <NEW_LINE> <INDENT> pass | Cochain of an :class:`UndirectedGraphComplex_`. | 62598fae4e4d56256637242d |
class KeyMaterialFactory: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create(cipher: BlockCipher, seed=None): <NEW_LINE> <INDENT> key = KeyMaterialFactory.create_key(cipher, seed) <NEW_LINE> nonce = KeyMaterialFactory.create_nonce(cipher, seed) <NEW_LINE> return key, nonce <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_key(cipher: BlockCipher, seed=None) -> int: <NEW_LINE> <INDENT> if seed is None: <NEW_LINE> <INDENT> return secrets.randbits(cipher.key_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> random.seed(seed) <NEW_LINE> return random.getrandbits(cipher.key_size) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def create_nonce(cipher: BlockCipher, seed=None) -> int: <NEW_LINE> <INDENT> if seed is None: <NEW_LINE> <INDENT> return secrets.randbits(cipher.block_size) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> random.seed(seed) <NEW_LINE> return random.getrandbits(cipher.block_size) | Factory for random numbers.
| 62598fae44b2445a339b6974 |
class AlreadyDoneHelper: <NEW_LINE> <INDENT> @retry(stop_max_attempt_number=6, wait_fixed=3000) <NEW_LINE> def __init__(self, logger=None): <NEW_LINE> <INDENT> query = AlreadyDoneModel.delete().where((time.time() - AlreadyDoneModel.timestamp) > 604800) <NEW_LINE> num = query.execute() <NEW_LINE> if num: <NEW_LINE> <INDENT> if logger is not None: <NEW_LINE> <INDENT> logger.info("AlreadyDoneHelper: cleaned up %s ids." % str(num)) <NEW_LINE> <DEDENT> print("AlreadyDoneHelper: cleaned up %s ids." % str(num)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def add(thing_id, subreddit): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> AlreadyDoneModel.create(thing_id=thing_id, timestamp=time.time(), subreddit=subreddit) <NEW_LINE> break <NEW_LINE> <DEDENT> except (OperationalError, InterfaceError): <NEW_LINE> <INDENT> print("Failed to write") <NEW_LINE> time.sleep(1) | Utility class for easy management of Reddit items or posts that have already been checked | 62598fae7cff6e4e811b5a34 |
class HighwayEncoder(object): <NEW_LINE> <INDENT> def __init__(self, num_layers, keep_prob, l2_lambda=3e-7): <NEW_LINE> <INDENT> self.num_layers = num_layers <NEW_LINE> self.keep_prob = keep_prob <NEW_LINE> self.l2_lambda = l2_lambda <NEW_LINE> <DEDENT> def build_graph(self, inputs, scope="HighwayEncoder", reuse=None): <NEW_LINE> <INDENT> with tf.variable_scope(scope, reuse=reuse): <NEW_LINE> <INDENT> outputs = inputs <NEW_LINE> for l in range(self.num_layers): <NEW_LINE> <INDENT> with tf.variable_scope("Layer{}".format(l+1), reuse=reuse): <NEW_LINE> <INDENT> vec_size = inputs.get_shape().as_list()[-1] <NEW_LINE> h = std_conv(outputs, vec_size, activation_fn=tf.nn.relu, scope="NonLinearTransform", reuse=reuse) <NEW_LINE> t = std_conv(outputs, vec_size, activation_fn=tf.nn.sigmoid, scope="TransformGate", reuse=reuse) <NEW_LINE> outputs = h * t + outputs * (1. - t) <NEW_LINE> outputs = tf.nn.dropout(outputs, self.keep_prob) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return outputs | Encode an input sequence using a highway network.
Based on the paper "Highway Networks" by Srivastava et al.
(https://arxiv.org/pdf/1505.00387.pdf). | 62598fae0c0af96317c5638a |
class IdNumber(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'value': 'str', 'state_code': 'str' } <NEW_LINE> attribute_map = { 'type': 'type', 'value': 'value', 'state_code': 'state_code' } <NEW_LINE> def __init__(self, type=None, value=None, state_code=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._value = None <NEW_LINE> self._state_code = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if value is not None: <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> if state_code is not None: <NEW_LINE> <INDENT> self.state_code = state_code <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_code(self): <NEW_LINE> <INDENT> return self._state_code <NEW_LINE> <DEDENT> @state_code.setter <NEW_LINE> def state_code(self, state_code): <NEW_LINE> <INDENT> self._state_code = state_code <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, IdNumber): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fae3346ee7daa33764b |
class BatchNormalization(Layer): <NEW_LINE> <INDENT> def __init__(self, momentum=0.99): <NEW_LINE> <INDENT> self.momentum = momentum <NEW_LINE> self.trainable = True <NEW_LINE> self.eps = 0.01 <NEW_LINE> self.running_mean = None <NEW_LINE> self.running_var = None <NEW_LINE> <DEDENT> def initialize(self, optimizer): <NEW_LINE> <INDENT> self.gamma = np.ones(self.input_shape) <NEW_LINE> self.beta = np.zeros(self.input_shape) <NEW_LINE> self.gamma_opt = copy.copy(optimizer) <NEW_LINE> self.beta_opt = copy.copy(optimizer) <NEW_LINE> <DEDENT> def parameters(self): <NEW_LINE> <INDENT> return np.prod(self.gamma.shape) + np.prod(self.beta.shape) <NEW_LINE> <DEDENT> def forward_pass(self, X, training=True): <NEW_LINE> <INDENT> if self.running_mean is None: <NEW_LINE> <INDENT> self.running_mean = np.mean(X, axis=0) <NEW_LINE> self.running_var = np.var(X, axis=0) <NEW_LINE> <DEDENT> if training and self.trainable: <NEW_LINE> <INDENT> mean = np.mean(X, axis=0) <NEW_LINE> var = np.var(X, axis=0) <NEW_LINE> self.running_mean = self.momentum * self.running_mean + (1 - self.momentum) * mean <NEW_LINE> self.running_var = self.momentum * self.running_var + (1 - self.momentum) * var <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mean = self.running_mean <NEW_LINE> var = self.running_var <NEW_LINE> <DEDENT> self.X_centered = X - mean <NEW_LINE> self.stddev_inv = 1 / np.sqrt(var + self.eps) <NEW_LINE> X_norm = self.X_centered * self.stddev_inv <NEW_LINE> output = self.gamma * X_norm + self.beta <NEW_LINE> return output <NEW_LINE> <DEDENT> def backward_pass(self, accum_grad): <NEW_LINE> <INDENT> gamma = self.gamma <NEW_LINE> if self.trainable: <NEW_LINE> <INDENT> X_norm = self.X_centered * self.stddev_inv <NEW_LINE> grad_gamma = np.sum(accum_grad * X_norm, axis=0) <NEW_LINE> grad_beta = np.sum(accum_grad, axis=0) <NEW_LINE> self.gamma = self.gamma_opt.update(self.gamma, grad_gamma) <NEW_LINE> self.beta = self.beta_opt.update(self.beta, grad_beta) <NEW_LINE> <DEDENT> batch_size = accum_grad.shape[0] <NEW_LINE> accum_grad = (1 / batch_size) * gamma * self.stddev_inv * ( batch_size * accum_grad - np.sum(accum_grad, axis=0) - self.X_centered * self.stddev_inv ** 2 * np.sum(accum_grad * self.X_centered, axis=0) ) <NEW_LINE> return accum_grad <NEW_LINE> <DEDENT> def output_shape(self): <NEW_LINE> <INDENT> return self.input_shape | Batch normalization. | 62598faef7d966606f747fed |
class KMSBase(AWSResourceBase): <NEW_LINE> <INDENT> def __init__(self, ctx_node, resource_id=None, client=None, logger=None): <NEW_LINE> <INDENT> AWSResourceBase.__init__( self, client or Boto3Connection(ctx_node).client('kms'), resource_id=resource_id, logger=logger) <NEW_LINE> <DEDENT> @property <NEW_LINE> def properties(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def create(self, params): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def delete(self, params=None): <NEW_LINE> <INDENT> raise NotImplementedError() | AWS KMS base interface | 62598faebe8e80087fbbf06c |
class BR(RegisterBranch): <NEW_LINE> <INDENT> BITV = "0001 0000 0000 AAAA" <NEW_LINE> @staticmethod <NEW_LINE> def should(vm): <NEW_LINE> <INDENT> return True | BR(label)
Jump unconditionally to the given label.
Run `doc branch` for a detailed explanation of branching instructions. | 62598faea8370b77170f03e3 |
class CallUnits(unittest.TestCase): <NEW_LINE> <INDENT> def testCase010(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> resX = [ tdata + '/b/c/c.pl', tdata + '/b/c/c.pm', tdata + '/b/c/c.pod', tdata + '/b/c/c.py', ] <NEW_LINE> from filesysobjects import V3K <NEW_LINE> if os.path.exists(tdata + '/b/c/c.pyc'): <NEW_LINE> <INDENT> resX.append(tdata + '/b/c/c.pyc') <NEW_LINE> <DEDENT> if RTE & RTE_WIN32: <NEW_LINE> <INDENT> arg = tdata + '/b/*/[cd][^"""\\\\"""]*[.][p][^"""\\\\"""]*' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> arg = tdata + '/b/*/[cd][^"""//"""]*[.][p][^"""//"""]*' <NEW_LINE> <DEDENT> resX = sorted(resX) <NEW_LINE> arg = filesysobjects.apppaths.normapppathx(arg) <NEW_LINE> res = filesysobjects.pathtools.expandpath(arg, wildcards=W_RE) <NEW_LINE> res = sorted(res) <NEW_LINE> res = [ filesysobjects.apppaths.normapppathx(x, tpf='posix') for x in res] <NEW_LINE> resX = [ filesysobjects.apppaths.normapppathx(x, tpf='posix') for x in resX] <NEW_LINE> self.assertEqual(sorted(res), sorted(resX)) | Sets the specific data array and required parameters for test case.
| 62598fae60cbc95b06364357 |
class User(TimestampMixin, models.Model): <NEW_LINE> <INDENT> uuid = models.UUIDField(default=uuid.uuid4) <NEW_LINE> first_name = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> last_name = models.CharField(max_length=100, blank=True, null=True) <NEW_LINE> nick = models.CharField(max_length=200, blank=True, null=True) <NEW_LINE> email = models.EmailField() <NEW_LINE> is_round_1_reviewer = models.BooleanField(default=True) <NEW_LINE> is_round_2_reviewer = models.BooleanField(default=False) <NEW_LINE> organizer = models.BooleanField(default=False) <NEW_LINE> invitation_sent = models.BooleanField(default=False) <NEW_LINE> disabled_at = models.DateTimeField(blank=True, null=True) <NEW_LINE> def invite(self): <NEW_LINE> <INDENT> if not settings.REVIEWER_MAIL_ENABLED: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> context = { "user": self, "link": "{}{}".format( settings.BASE_URL, reverse("rating:rate", args=[self.uuid]) ) } <NEW_LINE> subject = render_to_string( "rating/email/invite.subject", context ).strip() <NEW_LINE> message = render_to_string("rating/email/invite.message", context) <NEW_LINE> send_mail( subject, message, settings.FROM_MAIL, [self.email], fail_silently=False ) <NEW_LINE> self.invitation_sent = True <NEW_LINE> self.save() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.nick <NEW_LINE> <DEDENT> def avg(self): <NEW_LINE> <INDENT> count = self.rated.count() <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return sum(rating.rating for rating in self.rated.all()) / count <NEW_LINE> <DEDENT> def avg2(self): <NEW_LINE> <INDENT> count = self.rated2.count() <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return sum(rating.rating for rating in self.rated2.all()) / count <NEW_LINE> <DEDENT> def standard_deviation(self): <NEW_LINE> <INDENT> count = self.rated.count() <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> ratings = self.rated.all() <NEW_LINE> mean = sum(rating.rating for rating in ratings) / count <NEW_LINE> sm = sum((mean-rating.rating)**2 for rating in ratings) <NEW_LINE> return math.sqrt(sm/count) <NEW_LINE> <DEDENT> def standard_deviation2(self): <NEW_LINE> <INDENT> count = self.rated2.count() <NEW_LINE> if count == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> ratings = self.rated2.all() <NEW_LINE> mean = sum(rating.rating for rating in ratings) / count <NEW_LINE> sm = sum((mean-rating.rating)**2 for rating in ratings) <NEW_LINE> return math.sqrt(sm/count) | Users allowed to rate applications. | 62598fae66656f66f7d5a3f8 |
class MultiEpochSampler(torch.utils.data.Sampler): <NEW_LINE> <INDENT> def __init__(self, data_source, num_epochs, start_itr=0, batch_size=128): <NEW_LINE> <INDENT> self.data_source = data_source <NEW_LINE> self.num_samples = len(self.data_source) <NEW_LINE> self.num_epochs = num_epochs <NEW_LINE> self.start_itr = start_itr <NEW_LINE> self.batch_size = batch_size <NEW_LINE> if not isinstance(self.num_samples, int) or self.num_samples <= 0: <NEW_LINE> <INDENT> raise ValueError( "num_samples should be a positive integeral " "value, but got num_samples={}".format( self.num_samples)) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> n = len(self.data_source) <NEW_LINE> num_epochs = int(np.ceil((n * self.num_epochs - (self.start_itr * self.batch_size)) / float(n))) <NEW_LINE> out = [torch.randperm(n) for epoch in range(self.num_epochs)][-num_epochs:] <NEW_LINE> out[0] = out[0][(self.start_itr * self.batch_size % n):] <NEW_LINE> output = torch.cat(out).tolist() <NEW_LINE> xm.master_print('Length dataset output is %d' % len(output)) <NEW_LINE> return iter(output) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.data_source) * self.num_epochs - self.start_itr * self.batch_size | Samples elements randomly over multiple epochs
Arguments:
data_source (Dataset): dataset to sample from
num_epochs (int) : Number of times to loop over the dataset
start_itr (int) : which iteration to begin from | 62598faea219f33f346c681e |
class Element(metaclass=InheritDocstrings): <NEW_LINE> <INDENT> _element_name = '' <NEW_LINE> _attr_list = [] <NEW_LINE> def _add_unknown_tag(self, iterator, tag, data, config, pos): <NEW_LINE> <INDENT> warn_or_raise(W10, W10, tag, config, pos) <NEW_LINE> <DEDENT> def _ignore_add(self, iterator, tag, data, config, pos): <NEW_LINE> <INDENT> warn_unknown_attrs(tag, data.keys(), config, pos) <NEW_LINE> <DEDENT> def _add_definitions(self, iterator, tag, data, config, pos): <NEW_LINE> <INDENT> if config.get('version_1_1_or_later'): <NEW_LINE> <INDENT> warn_or_raise(W22, W22, (), config, pos) <NEW_LINE> <DEDENT> warn_unknown_attrs(tag, data.keys(), config, pos) <NEW_LINE> <DEDENT> def parse(self, iterator, config): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def to_xml(self, w, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() | A base class for all classes that represent XML elements in the
VOTABLE file. | 62598fae32920d7e50bc605c |
class CaperBridge(object): <NEW_LINE> <INDENT> def __init__(self, caper_map, genome_db, reads_db): <NEW_LINE> <INDENT> self.map = caper_map <NEW_LINE> self.genome_db = genome_db <NEW_LINE> self.reads_db = reads_db <NEW_LINE> <DEDENT> def __getitem__(self, ival): <NEW_LINE> <INDENT> seq_id, start, stop = ival.id, ival.start, ival.stop <NEW_LINE> return CaperSlice(self, ival, self.map.get_slice(seq_id, start, stop)) | Mimic a pygr.cnestedlist.NLMSA with a caper database. | 62598fae4f6381625f1994c3 |
class ManagerUtilsManager(ManagerUtilsMixin, Manager): <NEW_LINE> <INDENT> pass | A class that can be used as a manager. It already inherits the Django Manager class and adds
the mixin. | 62598fae0c0af96317c5638b |
class AircraftViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = s.AircraftSerializer <NEW_LINE> queryset = am.Aircraft.objects.all() <NEW_LINE> permission_classes = (p.IsReadOnlyOrAdmin,) <NEW_LINE> filter_backends = (filters.SearchFilter,) <NEW_LINE> search_fields = ('name',) | CRUD for Aircraft. | 62598fae4c3428357761a2c1 |
class DOMHTMLNewsParser(DOMParserBase): <NEW_LINE> <INDENT> _defGetRefs = True <NEW_LINE> extractors = [ Extractor( label='news', path="//h2", attrs=Attribute( key='news', multi=True, path={ 'title': "./text()", 'fromdate': "../following-sibling::p[1]/small//text()", 'body': "../following-sibling::p[2]//text()", 'link': "../..//a[text()='Permalink']/@href", 'fulllink': "../..//a[starts-with(text(), 'See full article at')]/@href" }, postprocess=lambda x: { 'title': x.get('title').strip(), 'date': x.get('fromdate').split('|')[0].strip(), 'from': x.get('fromdate').split('|')[1].replace('From ', '').strip(), 'body': (x.get('body') or '').strip(), 'link': _normalize_href(x.get('link')), 'full article link': _normalize_href(x.get('fulllink')) } ) ) ] <NEW_LINE> preprocessors = [ (re.compile('(<a name=[^>]+><h2>)', re.I), r'<div class="_imdbpy">\1'), (re.compile('(<hr/>)', re.I), r'</div>\1'), (re.compile('<p></p>', re.I), r'') ] <NEW_LINE> def postprocess_data(self, data): <NEW_LINE> <INDENT> if 'news' not in data: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> for news in data['news']: <NEW_LINE> <INDENT> if 'full article link' in news: <NEW_LINE> <INDENT> if news['full article link'] is None: <NEW_LINE> <INDENT> del news['full article link'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return data | Parser for the "news" page of a given movie or person.
The page should be provided as a string, as taken from
the akas.imdb.com server. The final result will be a
dictionary, with a key for every relevant section.
Example:
nwparser = DOMHTMLNewsParser()
result = nwparser.parse(news_html_string) | 62598fae26068e7796d4c95e |
class _IterRTPSubsectionLines: <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.lines = parent.lines <NEW_LINE> self.running = True <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if not self.running: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> line = next(self.lines) <NEW_LINE> if line.strip().startswith('['): <NEW_LINE> <INDENT> self.parent.buffer.append(line) <NEW_LINE> self.running = False <NEW_LINE> raise StopIteration <NEW_LINE> <DEDENT> return line <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> for _ in self: <NEW_LINE> <INDENT> pass | Iterate over the lines of an RTP file within a subsection. | 62598fae9c8ee82313040176 |
class GroupFolder(Comparer): <NEW_LINE> <INDENT> def __init__( self, id=None, mount_point=None, groups=None, quota=None, size=None, **kwargs ): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.mount_point = mount_point <NEW_LINE> self.groups = [x for x in groups] if groups else [] <NEW_LINE> self.quota = quota <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = '<GroupFolder> ({id}) "{mp}" quota: {q}, size: {s}\n'.format( mp=self.mount_point, id=self.id, q=human_size(self.quota) if self.quota else self.quota, s=human_size(self.size) if self.size else self.size, ) <NEW_LINE> for it in self.groups: <NEW_LINE> <INDENT> out += " {grp}\n".format(grp=it.__str__()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = out[:-1] <NEW_LINE> <DEDENT> return out | GroupFolder class | 62598fae44b2445a339b6975 |
class Action_Energy_Dataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, file_path, split = 'train'): <NEW_LINE> <INDENT> if(split not in ['train','valid','dummy_test']): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> if(not os.path.exists(file_path)): <NEW_LINE> <INDENT> raise ValueError('File not found at "{0}".'.format(file_path)) <NEW_LINE> <DEDENT> self.df = pd.read_csv(file_path) <NEW_LINE> self.data = torch.tensor(self.df['Point'].values).float() <NEW_LINE> self.target = torch.tensor(self.df['Energy'].values).float() <NEW_LINE> if(split == 'train'): <NEW_LINE> <INDENT> num_train_samples = math.floor(0.8*self.target.shape[0]) <NEW_LINE> self.data = self.data[0:num_train_samples] <NEW_LINE> self.target = self.target[0:num_train_samples] <NEW_LINE> <DEDENT> elif(split == 'valid'): <NEW_LINE> <INDENT> num_valid_samples = math.floor(0.2*self.target.shape[0]) <NEW_LINE> self.data = self.data[0:num_valid_samples] <NEW_LINE> self.target = self.target[0:num_valid_samples] <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return math.floor(self.target.shape[0] / 10) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> start_idx = idx * 10 <NEW_LINE> end_idx = start_idx + 10 <NEW_LINE> x = self.data[start_idx:end_idx] <NEW_LINE> y = self.target[start_idx : end_idx] <NEW_LINE> return x, y | This is a torch dataset class for the new_action_energy_data directory in the repo.
Note the new_action_energy_data directory contains a reformatted version of the data
from before in order to make it easier to parse. | 62598fae7b25080760ed74b9 |
class ChapelLexer(RegexLexer): <NEW_LINE> <INDENT> name = 'Chapel' <NEW_LINE> filenames = ['*.chpl'] <NEW_LINE> aliases = ['chapel', 'chpl'] <NEW_LINE> tokens = { 'root': [ (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), (r'//(.*?)\n', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'(config|const|in|inout|out|param|ref|type|var)\b', Keyword.Declaration), (r'(false|nil|true)\b', Keyword.Constant), (r'(bool|complex|imag|int|opaque|range|real|string|uint)\b', Keyword.Type), (words(( 'align', 'atomic', 'begin', 'break', 'by', 'cobegin', 'coforall', 'continue', 'delete', 'dmapped', 'do', 'domain', 'else', 'enum', 'export', 'extern', 'for', 'forall', 'if', 'index', 'inline', 'iter', 'label', 'lambda', 'let', 'local', 'new', 'noinit', 'on', 'otherwise', 'pragma', 'private', 'public', 'reduce', 'return', 'scan', 'select', 'serial', 'single', 'sparse', 'subdomain', 'sync', 'then', 'use', 'when', 'where', 'while', 'with', 'yield', 'zip'), suffix=r'\b'), Keyword), (r'(proc)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'procname'), (r'(class|module|record|union)(\s+)', bygroups(Keyword, Text), 'classname'), (r'\d+i', Number), (r'\d+\.\d*([Ee][-+]\d+)?i', Number), (r'\.\d+([Ee][-+]\d+)?i', Number), (r'\d+[Ee][-+]\d+i', Number), (r'(\d*\.\d+)([eE][+-]?[0-9]+)?i?', Number.Float), (r'\d+[eE][+-]?[0-9]+i?', Number.Float), (r'0[bB][01]+', Number.Bin), (r'0[xX][0-9a-fA-F]+', Number.Hex), (r'0[oO][0-7]+', Number.Oct), (r'[0-9]+', Number.Integer), (r'["\'](\\\\|\\"|[^"\'])*["\']', String), (r'(=|\+=|-=|\*=|/=|\*\*=|%=|&=|\|=|\^=|&&=|\|\|=|<<=|>>=|' r'<=>|<~>|\.\.|by|#|\.\.\.|' r'&&|\|\||!|&|\||\^|~|<<|>>|' r'==|!=|<=|>=|<|>|' r'[+\-*/%]|\*\*)', Operator), (r'[:;,.?()\[\]{}]', Punctuation), (r'[a-zA-Z_][\w$]*', Name.Other), ], 'classname': [ (r'[a-zA-Z_][\w$]*', Name.Class, '#pop'), ], 'procname': [ (r'[a-zA-Z_][\w$]*', Name.Function, '#pop'), ], } | For `Chapel <http://chapel.cray.com/>`_ source.
.. versionadded:: 2.0 | 62598fae1f5feb6acb162c28 |
class Achievement(models.Model): <NEW_LINE> <INDENT> creation_date = models.DateTimeField(verbose_name="Date d'obtention du badge", auto_now_add=True) <NEW_LINE> user = models.ForeignKey(get_user_model(), verbose_name="Utilisateur", on_delete=models.CASCADE) <NEW_LINE> badge = models.CharField(verbose_name="Badge obtenu", max_length=50, choices=BADGES) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Succès" <NEW_LINE> verbose_name_plural = "Succès" <NEW_LINE> constraints = [ models.UniqueConstraint(fields=["user", "badge"], name="user_badge_unique") ] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s:%s" % (self.user, self.badge) | Represent a use achievement. | 62598fae76e4537e8c3ef5b7 |
class WildcardNotCoveredNSEC(WildcardNotCovered): <NEW_LINE> <INDENT> _abstract = False <NEW_LINE> references = ['RFC 4035, Sec. 3.1.3.2'] <NEW_LINE> nsec_type = 'NSEC' | >>> e = WildcardNotCoveredNSEC(wildcard='*.foo.baz.')
>>> e.description
'No NSEC RR covers the wildcard (*.foo.baz.).' | 62598faed486a94d0ba2bfd8 |
class Job(IdEqualityMixin): <NEW_LINE> <INDENT> def __init__(self, jid, server): <NEW_LINE> <INDENT> self.id = jid <NEW_LINE> self.server = server <NEW_LINE> <DEDENT> def info(self): <NEW_LINE> <INDENT> return self.server.call('get', '/job/' + self.id) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> return self.server.call('get', '/job/%s/stop' % self.id) <NEW_LINE> <DEDENT> def abort(self): <NEW_LINE> <INDENT> return self.server.call('get', '/job/%s/abort' % self.id) | Representation of a running Nutch job, use JobClient to get a list of running jobs or to create one | 62598faed486a94d0ba2bfd9 |
class ChangeModelPlugin(Plugin): <NEW_LINE> <INDENT> def activate(self): <NEW_LINE> <INDENT> global Backups <NEW_LINE> self.log.info("Replace Backups") <NEW_LINE> Backups = MyBackups | A basic plugin implementation. | 62598fae851cf427c66b82c5 |
class signalonce(object): <NEW_LINE> <INDENT> __messages = [] <NEW_LINE> def __init__(self, f): <NEW_LINE> <INDENT> self.__f = f <NEW_LINE> self.__f.reset = signalonce.reset <NEW_LINE> <DEDENT> def __call__(self, messageIdString, *args, **kwargs): <NEW_LINE> <INDENT> if not messageIdString in self.__messages: <NEW_LINE> <INDENT> self.__messages.append(messageIdString) <NEW_LINE> return self.__f(messageIdString, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def reset(messageIdString=None): <NEW_LINE> <INDENT> if messageIdString is None: <NEW_LINE> <INDENT> signalonce.__messages = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> signalonce.__messages.remove(messageIdString) | A decorator class that records the messages sent by the sendSignal function.
The decorated function's first argument is expected to be a message Id string. | 62598faebe8e80087fbbf06e |
class ISODateTimeConverter(FancyValidator): <NEW_LINE> <INDENT> datetime_module = None <NEW_LINE> messages = dict( invalidFormat=_('The must enter your date & time in the format YYYY-MM-DDTHH:MM:SS'),) <NEW_LINE> def _convert_to_python(self, value, state): <NEW_LINE> <INDENT> dt_mod = import_datetime(self.datetime_module) <NEW_LINE> datetime_class = dt_mod.datetime <NEW_LINE> try: <NEW_LINE> <INDENT> datetime = datetime_class.strptime(value, r"%Y-%m-%dT%H:%M:%S") <NEW_LINE> return datetime <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise Invalid(self.message('invalidFormat', state), value, state) <NEW_LINE> <DEDENT> <DEDENT> def _convert_from_python(self, value, state): <NEW_LINE> <INDENT> return value.isoformat("T") | Converts fields which contain both date and time, in the
ISO 8601 standard format YYYY-MM-DDTHH:MM:SS.
Stores in a datetime.datetime object.
Examples::
>>> tim = ISODateTimeConverter()
>>> tim.to_python('2012-06-25T05:30:25')
datetime.datetime(2012, 6, 25, 5, 30, 25)
>>> tim.to_python('1999-12-01T12:00:00')
datetime.datetime(1999, 12, 1, 12, 0)
>>> tim.to_python('2012-06-25 05:30:25')
Traceback (most recent call last):
...
Invalid: The must enter your date & time in the format YYYY-MM-DDTHH:MM:SS | 62598fae21bff66bcd722c71 |
class Neoplasm(_CaseInsensitiveEnum): <NEW_LINE> <INDENT> primary = 'Primary' <NEW_LINE> metastatic = 'Metastatic' <NEW_LINE> unknown = 'Unknown' | Type of malignant neoplasm detected | 62598fae7c178a314d78d4a6 |
class RegistroD309(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'D309'), Campo(2, 'NUM_PROC', obrigatorio=True), Campo(3, 'IND_PROC', obrigatorio=True), ] <NEW_LINE> nivel = 4 | Processo Referenciado | 62598fae4e4d562566372430 |
class KillRequestManager(models.Manager): <NEW_LINE> <INDENT> def killable(self): <NEW_LINE> <INDENT> qs = self.get_query_set() <NEW_LINE> return qs.filter( schedule_dts__lte=timezone.now(), enqueue_dts__isnull=True, execute_dts__isnull=True, run__pid__isnull=False, run__start_dts__isnull=False, run__return_dts__isnull=True, ) | Custom manager for the KillRequest model. | 62598fae99fddb7c1ca62dee |
class QueryArrayWidget(BaseCSVWidget, forms.TextInput): <NEW_LINE> <INDENT> def value_from_datadict(self, data, files, name): <NEW_LINE> <INDENT> if not isinstance(data, MultiValueDict): <NEW_LINE> <INDENT> for key, value in data.items(): <NEW_LINE> <INDENT> if isinstance(value, string_types): <NEW_LINE> <INDENT> data[key] = [x.strip() for x in value.rstrip(',').split(',') if x] <NEW_LINE> <DEDENT> <DEDENT> data = MultiValueDict(data) <NEW_LINE> <DEDENT> values_list = data.getlist(name, data.getlist('%s[]' % name)) or [] <NEW_LINE> if len(values_list) > 0: <NEW_LINE> <INDENT> ret = [x for x in values_list if x] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = [] <NEW_LINE> <DEDENT> return list(set(ret)) | Enables request query array notation that might be consumed by MultipleChoiceFilter
1. Values can be provided as csv string: ?foo=bar,baz
2. Values can be provided as query array: ?foo[]=bar&foo[]=baz
3. Values can be provided as query array: ?foo=bar&foo=baz
Note: Duplicate and empty values are skipped from results | 62598faefff4ab517ebcd7ef |
class Screen: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.message = '' <NEW_LINE> self.dict_ref = {} <NEW_LINE> self.list_item = [] <NEW_LINE> self.choice = 0 <NEW_LINE> self.dbf = DbFetcher() <NEW_LINE> self.dbw = DbWriter() <NEW_LINE> <DEDENT> def fill_references(self, m_query): <NEW_LINE> <INDENT> dcount = 1 <NEW_LINE> for record in m_query: <NEW_LINE> <INDENT> self.dict_ref[str(dcount)] = record.id <NEW_LINE> self.list_item.append(format_for_screen(dcount, record.reference)) <NEW_LINE> dcount += 1 <NEW_LINE> <DEDENT> <DEDENT> def message_initialize(self, file): <NEW_LINE> <INDENT> record = {'items': self.list_item} <NEW_LINE> jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader('.')) <NEW_LINE> template = jinja_env.get_template('scr/' + file) <NEW_LINE> return template.render(record) <NEW_LINE> <DEDENT> def message_display(self): <NEW_LINE> <INDENT> clear_screen() <NEW_LINE> response = self.response() <NEW_LINE> if response is None: <NEW_LINE> <INDENT> return self.message_display() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> <DEDENT> def response(self): <NEW_LINE> <INDENT> resp = input(self.message + mess0) <NEW_LINE> if resp == '0': <NEW_LINE> <INDENT> exit_script() <NEW_LINE> <DEDENT> if str(resp) not in self.dict_ref: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> resp = self.dict_ref[str(resp)] <NEW_LINE> return int(resp) | Mother Class for displaying.
Class variables :
---------------
message : string message that will appear in terminal
dict_ref : dict to link order of appearance to item's id
list_item : list for formatted items for screen
dbf : initialize class for database's fetching
dbw : initialize class for database's writing
Operation :
--------- - Fill references after fetching database if necessary
- Initialize message for terminal
- display message in terminal templating text file(s)
- wait for input response
- verify response's validity (if in link_ref)
- daugther classes manage next screen regarding response | 62598fae10dbd63aa1c70bbd |
class BackendInfo(object): <NEW_LINE> <INDENT> @service_method <NEW_LINE> def get_time_zone(self): <NEW_LINE> <INDENT> return str(settings.TIME_ZONE) <NEW_LINE> <DEDENT> @service_method <NEW_LINE> def get_current_timestamp(self): <NEW_LINE> <INDENT> return datetime.datetime.now(local_timezone).isoformat() <NEW_LINE> <DEDENT> @service_method <NEW_LINE> def get_revision(self): <NEW_LINE> <INDENT> if not hasattr(self, '_revision'): <NEW_LINE> <INDENT> self._revision = 'exported' <NEW_LINE> try: <NEW_LINE> <INDENT> self._revision = subprocess.Popen(["svnversion", "-n"], cwd=settings.PROJECT_ROOT, stdout=subprocess.PIPE).communicate()[0].strip() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self._revision == 'exported': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._revision = file(os.path.join(settings.PROJECT_ROOT, 'svn_revision'), 'rb').read().strip() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return self._revision | This class provides information about the backend software revision and
date/time settings. Implements #280 and #1850. | 62598fae2c8b7c6e89bd37cf |
class ChromeColeta(ColetarRegistros): <NEW_LINE> <INDENT> def __init__(self, url, usuario, senha): <NEW_LINE> <INDENT> options = ChromeOptions() <NEW_LINE> preferecias = { "download.default_directory": DIRETORIO, "safebrowsing.enabled": "false", } <NEW_LINE> options.add_experimental_option("prefs", preferecias) <NEW_LINE> options.add_argument("--ignore-certificate-errors") <NEW_LINE> options.add_argument("--start-maximized") <NEW_LINE> browser = Remote( desired_capabilities=DesiredCapabilities.CHROME, options=options, ) <NEW_LINE> super().__init__(url, usuario, senha, browser) | Classe que usa o webdrive do Chrome, defenindo as configurações necessarias, responsavel por coletar os registros.
Utilizando os metodos da Classe pai ColetarRegistros. | 62598faeac7a0e7691f72513 |
class wmoving(gr.sync_block): <NEW_LINE> <INDENT> def __init__(self, alpha=0.5, samples=False): <NEW_LINE> <INDENT> if samples: <NEW_LINE> <INDENT> self.set_samples(samples) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.set_alpha(alpha) <NEW_LINE> <DEDENT> self._first = True <NEW_LINE> gr.sync_block.__init__(self, "wmoving_average", ["float32"], ["float32"]) <NEW_LINE> <DEDENT> def set_alpha(self,alpha): <NEW_LINE> <INDENT> self._alpha = numpy.float128(alpha) <NEW_LINE> self._beta = (1 - alpha) <NEW_LINE> <DEDENT> def set_samples(self,samples): <NEW_LINE> <INDENT> self.set_alpha( numpy.float128(2) / (1 + samples) ) <NEW_LINE> <DEDENT> def work(self, input_items, output_items): <NEW_LINE> <INDENT> p = 0 <NEW_LINE> if self._first and len(input_items[0]): <NEW_LINE> <INDENT> self._avg = input_items[0][p] <NEW_LINE> output_items[0][p] = self._avg <NEW_LINE> p = 1 <NEW_LINE> self._first = False; <NEW_LINE> <DEDENT> while p < len(input_items[0]): <NEW_LINE> <INDENT> self._avg = self._alpha * input_items[0][p] + self._beta * self._avg <NEW_LINE> output_items[0][p] = self._avg <NEW_LINE> p = p + 1 <NEW_LINE> <DEDENT> if os.getenv("DEBUG_WMA"): <NEW_LINE> <INDENT> os.write(2, "alpha=%f; avg=%f\n" % (self._alpha, self._avg)) <NEW_LINE> <DEDENT> return p | weighted moving average | 62598faefff4ab517ebcd7f0 |
class Column: <NEW_LINE> <INDENT> @class_common_init(re_column_url) <NEW_LINE> def __init__(self, url, name=None, follower_num=None, post_num=None, session=None): <NEW_LINE> <INDENT> self._in_name = re_column_url.match(url).group(1) <NEW_LINE> self.url = url <NEW_LINE> self._session = session <NEW_LINE> self._name = name <NEW_LINE> self._follower_num = follower_num <NEW_LINE> self._post_num = post_num <NEW_LINE> <DEDENT> def _make_soup(self): <NEW_LINE> <INDENT> if self.soup is None: <NEW_LINE> <INDENT> origin_host = self._session.headers.get('Host') <NEW_LINE> self._session.headers.update(Host='zhuanlan.zhihu.com') <NEW_LINE> res = self._session.get(Column_Data.format(self._in_name)) <NEW_LINE> self._session.headers.update(Host=origin_host) <NEW_LINE> self.soup = res.json() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> @check_soup('_name') <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.soup['name'] <NEW_LINE> <DEDENT> @property <NEW_LINE> @check_soup('_follower_num') <NEW_LINE> def follower_num(self): <NEW_LINE> <INDENT> return int(self.soup['followersCount']) <NEW_LINE> <DEDENT> @property <NEW_LINE> @check_soup('_post_num') <NEW_LINE> def post_num(self): <NEW_LINE> <INDENT> return int(self.soup['postsCount']) <NEW_LINE> <DEDENT> @property <NEW_LINE> def posts(self): <NEW_LINE> <INDENT> origin_host = self._session.headers.get('Host') <NEW_LINE> for offset in range(0, (self.post_num - 1) // 10 + 1): <NEW_LINE> <INDENT> self._session.headers.update(Host='zhuanlan.zhihu.com') <NEW_LINE> res = self._session.get( Column_Posts_Data.format(self._in_name, offset * 10)) <NEW_LINE> soup = res.json() <NEW_LINE> self._session.headers.update(Host=origin_host) <NEW_LINE> for post in soup: <NEW_LINE> <INDENT> yield self._parse_post_data(post) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _parse_post_data(self, post): <NEW_LINE> <INDENT> from .author import Author <NEW_LINE> from .post import Post <NEW_LINE> url = Column_Url + post['url'] <NEW_LINE> template = post['author']['avatar']['template'] <NEW_LINE> photo_id = post['author']['avatar']['id'] <NEW_LINE> photo_url = template.format(id=photo_id, size='r') <NEW_LINE> author = Author(post['author']['profileUrl'], post['author']['name'], post['author']['bio'], photo_url=photo_url, session=self._session) <NEW_LINE> title = post['title'] <NEW_LINE> upvote_num = post['likesCount'] <NEW_LINE> comment_num = post['commentsCount'] <NEW_LINE> print(url) <NEW_LINE> return Post(url, self, author, title, upvote_num, comment_num, session=self._session) | 专栏类,请使用``ZhihuClient.column``方法构造对象. | 62598faecc40096d6161a1df |
class RecollectWasteException(Exception): <NEW_LINE> <INDENT> pass | Recollect Waste error. | 62598faf63d6d428bbee27b6 |
class Camera(object): <NEW_LINE> <INDENT> def __init__(self, focalpoint=None, distance=None, azimuth=None, elevation=None, roll=None): <NEW_LINE> <INDENT> self.focalpoint = focalpoint or 'auto' <NEW_LINE> self.distance = distance or 'auto' <NEW_LINE> self.azimuth = azimuth or 0 <NEW_LINE> self.elevation = elevation or 0 <NEW_LINE> self.roll = roll or 0 <NEW_LINE> self._set_view() <NEW_LINE> if self.distance == 'auto': <NEW_LINE> <INDENT> _, _, self.distance, self.focalpoint = mlab.view() <NEW_LINE> <DEDENT> <DEDENT> def _set_view(self): <NEW_LINE> <INDENT> mlab.view(azimuth=self.azimuth, elevation=self.elevation, distance=self.distance, focalpoint=self.focalpoint, roll=self.roll) <NEW_LINE> <DEDENT> def parameters(self): <NEW_LINE> <INDENT> self.azimuth, self.elevation, self.distance, self.focalpoint = mlab.view() <NEW_LINE> self.roll = mlab.roll() <NEW_LINE> return {'focalpoint' : self.focalpoint, 'distance' : self.distance, 'azimuth' : self.azimuth, 'elevation' : self.elevation, 'roll' : self.roll} <NEW_LINE> <DEDENT> def update(self, focalpoint=None, distance=None, azimuth=None, elevation=None, roll=None): <NEW_LINE> <INDENT> self.focalpoint = focalpoint or self.focalpoint <NEW_LINE> self.distance = self.distance + (distance or 0) <NEW_LINE> self.azimuth = self.azimuth + (azimuth or 0) <NEW_LINE> self.elevation = self.elevation + (elevation or 0) <NEW_LINE> self.roll = self.roll + (roll or 0) <NEW_LINE> self._set_view() | Class that wraps some of the mlab functions that manipulate the camera.
A camera instance is automatically created by the animation. | 62598fafadb09d7d5dc0a595 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.