code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class TestStudentFromIdentifier(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(TestStudentFromIdentifier, cls).setUpClass() <NEW_LINE> cls.valid_student = UserFactory.create(username='baz@touchstone') <NEW_LINE> cls.student_conflicting_email = UserFactory.create(email='foo@touchstone.com') <NEW_LINE> cls.student_conflicting_username = UserFactory.create(username='foo@touchstone.com') <NEW_LINE> <DEDENT> def test_valid_student_id(self): <NEW_LINE> <INDENT> assert self.valid_student == tools.get_student_from_identifier(self.valid_student.username) <NEW_LINE> <DEDENT> def test_valid_student_email(self): <NEW_LINE> <INDENT> assert self.valid_student == tools.get_student_from_identifier(self.valid_student.email) <NEW_LINE> <DEDENT> def test_student_username_has_conflict_with_others_email(self): <NEW_LINE> <INDENT> with self.assertRaises(MultipleObjectsReturned): <NEW_LINE> <INDENT> tools.get_student_from_identifier(self.student_conflicting_username.username) <NEW_LINE> <DEDENT> assert self.student_conflicting_username == tools.get_student_from_identifier( self.student_conflicting_username.email ) <NEW_LINE> <DEDENT> def test_student_email_has_conflict_with_others_username(self): <NEW_LINE> <INDENT> with self.assertRaises(MultipleObjectsReturned): <NEW_LINE> <INDENT> tools.get_student_from_identifier(self.student_conflicting_email.email) <NEW_LINE> <DEDENT> assert self.student_conflicting_email == tools.get_student_from_identifier( self.student_conflicting_email.username ) <NEW_LINE> <DEDENT> def test_invalid_student_id(self): <NEW_LINE> <INDENT> with self.assertRaises(User.DoesNotExist): <NEW_LINE> <INDENT> assert tools.get_student_from_identifier("invalid") | Test get_student_from_identifier() | 62598fb4d268445f26639be2 |
class RobotServiceException(Exception): <NEW_LINE> <INDENT> pass | General exception used by the agent. | 62598fb4bf627c535bcb155f |
class Button(Control): <NEW_LINE> <INDENT> def __init__(self, message, position, size = (0,0), color = (0,0,0), triggerFunc = None, menuRef = None, bgColor = None, fontSize = None): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> self.menuRef = menuRef <NEW_LINE> self.color = color <NEW_LINE> self.bgColor = bgColor <NEW_LINE> self.triggerFunc = triggerFunc <NEW_LINE> self.hovered = False <NEW_LINE> self.counter = 0 <NEW_LINE> self.counterSpeed = .1 <NEW_LINE> self.clicked = False <NEW_LINE> if fontSize is None: <NEW_LINE> <INDENT> fontSize = size[1] <NEW_LINE> <DEDENT> self.fontSize = fontSize <NEW_LINE> self.font = pygame.font.Font("font/FiraCode-Retina.ttf", fontSize) <NEW_LINE> Control.__init__(self, position, size, menuRef) <NEW_LINE> <DEDENT> def update(self, mposition, mclicked): <NEW_LINE> <INDENT> if mposition[0] > self.position[0] and mposition[0] < self.position[0] + self.size[0] and mposition[1] > self.position[1] and mposition[1] < self.position[1] + self.size[1]: <NEW_LINE> <INDENT> if self.hovered: <NEW_LINE> <INDENT> if self.counter >= 3.5 or self.counter <= -0.5: <NEW_LINE> <INDENT> self.counterSpeed = -self.counterSpeed <NEW_LINE> <DEDENT> self.counter += self.counterSpeed <NEW_LINE> if mclicked: <NEW_LINE> <INDENT> self.clicked = True <NEW_LINE> <DEDENT> elif self.clicked: <NEW_LINE> <INDENT> self.clicked = False <NEW_LINE> if self.triggerFunc is not None: <NEW_LINE> <INDENT> self.triggerFunc() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.hovered = True <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.hovered = False <NEW_LINE> self.counter = False <NEW_LINE> <DEDENT> if not mclicked: <NEW_LINE> <INDENT> self.clicked = False <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> if self.bgColor is not None: <NEW_LINE> <INDENT> pygame.draw.rect(self.menuRef.appRef.draw_surface,self.bgColor,(self.position[0],self.position[1],self.size[0],self.size[1])) <NEW_LINE> <DEDENT> midbcol = self.color[0]*0.4, self.color[1]*0.4, self.color[2]*0.4 <NEW_LINE> midcol = self.color[0]*0.6, self.color[1]*0.6, self.color[2]*0.6 <NEW_LINE> midfcol = self.color[0]*0.8, self.color[1]*0.8, self.color[2]*0.8 <NEW_LINE> cols = [self.color, midfcol, midcol, midbcol] <NEW_LINE> self.menuRef.appRef.draw_surface.blit(self.font.render(self.message, True, cols[int(self.counter)%4]), (self.position[0]+self.fontSize/9,self.position[1]-self.fontSize/9)) | button class derived from control | 62598fb416aa5153ce4005c1 |
class P4ConvP4(SplitGConv2D): <NEW_LINE> <INDENT> @property <NEW_LINE> def input_stabilizer_size(self): <NEW_LINE> <INDENT> return 4 <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_stabilizer_size(self): <NEW_LINE> <INDENT> return 4 <NEW_LINE> <DEDENT> @property <NEW_LINE> def transformation_indices(self): <NEW_LINE> <INDENT> return idx.make_c4_p4_indices(ksize=self.kernel_size) | P4 to P4 group convolution layer.
This layer creates a convolution kernel that is convolved
(actually cross-correlated) with the layer input to produce a tensor of
outputs. If `use_bias` is True, a bias vector is created and added to the outputs.
Finally, if `activation` is not `None`, it is applied to the outputs as well.
Arguments:
filters: Integer, the dimensionality of the output space (i.e. the number of filters in the convolution).
kernel_size: An integer specifying the length of the convolution window.
strides: An integer or tuple/list of n integers, specifying the stride length of the convolution.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, ..., channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, ...)`.
activation: Activation function. Set it to None to maintain a linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: An initializer for the convolution kernel.
bias_initializer: An initializer for the bias vector. If None, the default initializer will be used.
kernel_regularizer: Optional regularizer for the convolution kernel.
bias_regularizer: Optional regularizer for the bias vector.
activity_regularizer: Optional regularizer function for the output.
kernel_constraint: Optional projection function to be applied to the
kernel after being updated by an `Optimizer` (e.g. used to implement
norm constraints or value constraints for layer weights). The function
must take as input the unprojected variable and must return the
projected variable (which must have the same shape). Constraints are
not safe to use when doing asynchronous distributed training.
bias_constraint: Optional projection function to be applied to the bias after being updated by an `Optimizer`.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: A string, the name of the layer. | 62598fb43d592f4c4edbaf7e |
class ComponentTests(ossie.utils.testing.ScaComponentTestCase): <NEW_LINE> <INDENT> def testScaBasicBehavior(self): <NEW_LINE> <INDENT> execparams = self.getPropertySet(kinds=("execparam",), modes=("readwrite", "writeonly"), includeNil=False) <NEW_LINE> execparams = dict([(x.id, any.from_any(x.value)) for x in execparams]) <NEW_LINE> self.launch(execparams) <NEW_LINE> self.assertNotEqual(self.comp, None) <NEW_LINE> self.assertEqual(self.comp.ref._non_existent(), False) <NEW_LINE> self.assertEqual(self.comp.ref._is_a("IDL:CF/Resource:1.0"), True) <NEW_LINE> expectedProps = [] <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("configure", "execparam"), modes=("readwrite", "readonly"), includeNil=True)) <NEW_LINE> expectedProps.extend(self.getPropertySet(kinds=("allocate",), action="external", includeNil=True)) <NEW_LINE> props = self.comp.query([]) <NEW_LINE> props = dict((x.id, any.from_any(x.value)) for x in props) <NEW_LINE> for expectedProp in expectedProps: <NEW_LINE> <INDENT> self.assertEquals(props.has_key(expectedProp.id), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_uses(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_usesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a("IDL:CF/Port:1.0"), True) <NEW_LINE> <DEDENT> for port in self.scd.get_componentfeatures().get_ports().get_provides(): <NEW_LINE> <INDENT> port_obj = self.comp.getPort(str(port.get_providesname())) <NEW_LINE> self.assertNotEqual(port_obj, None) <NEW_LINE> self.assertEqual(port_obj._non_existent(), False) <NEW_LINE> self.assertEqual(port_obj._is_a(port.get_repid()), True) <NEW_LINE> <DEDENT> self.comp.start() <NEW_LINE> self.comp.stop() <NEW_LINE> self.comp.releaseObject() | Test for all component implementations in throttle_ii | 62598fb455399d3f056265d2 |
class AnswerComment(models.Model): <NEW_LINE> <INDENT> content = models.TextField(verbose_name=_('Content')) <NEW_LINE> commenter = models.ForeignKey(User, verbose_name=_('Commenter')) <NEW_LINE> create_time = models.DateTimeField(verbose_name=_('Create Time'), auto_now_add=True) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> app_label = 'athena' <NEW_LINE> verbose_name = _('Answer Comment') <NEW_LINE> verbose_name_plural = _('Answer Comments') | Answer comment model
| 62598fb44f6381625f19951f |
class MaxCorrectionsError(CustodianError): <NEW_LINE> <INDENT> def __init__(self, message, raises, max_errors): <NEW_LINE> <INDENT> super().__init__(message, raises) <NEW_LINE> self.max_errors = max_errors | Error raised when the maximum allowed number of errors is reached | 62598fb4f548e778e596b662 |
class PipProvider(AbstractProvider): <NEW_LINE> <INDENT> def __init__( self, factory, constraints, ignore_dependencies, upgrade_strategy, user_requested, ): <NEW_LINE> <INDENT> self._factory = factory <NEW_LINE> self._constraints = constraints <NEW_LINE> self._ignore_dependencies = ignore_dependencies <NEW_LINE> self._upgrade_strategy = upgrade_strategy <NEW_LINE> self._user_requested = user_requested <NEW_LINE> <DEDENT> def identify(self, dependency): <NEW_LINE> <INDENT> return dependency.name <NEW_LINE> <DEDENT> def get_preference( self, resolution, candidates, information ): <NEW_LINE> <INDENT> def _get_restrictive_rating(requirements): <NEW_LINE> <INDENT> lookups = (r.get_candidate_lookup() for r in requirements) <NEW_LINE> cands, ireqs = zip(*lookups) <NEW_LINE> if any(cand is not None for cand in cands): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> spec_sets = (ireq.specifier for ireq in ireqs if ireq) <NEW_LINE> operators = [ specifier.operator for spec_set in spec_sets for specifier in spec_set ] <NEW_LINE> if any(op in ("==", "===") for op in operators): <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> if operators: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> return 3 <NEW_LINE> <DEDENT> restrictive = _get_restrictive_rating(req for req, _ in information) <NEW_LINE> transitive = all(parent is not None for _, parent in information) <NEW_LINE> key = next(iter(candidates)).name if candidates else "" <NEW_LINE> delay_this = (key == "setuptools") <NEW_LINE> return (delay_this, restrictive, transitive, key) <NEW_LINE> <DEDENT> def find_matches(self, requirements): <NEW_LINE> <INDENT> if not requirements: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> name = requirements[0].project_name <NEW_LINE> def _eligible_for_upgrade(name): <NEW_LINE> <INDENT> if self._upgrade_strategy == "eager": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif self._upgrade_strategy == "only-if-needed": <NEW_LINE> <INDENT> return (name in self._user_requested) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> return self._factory.find_candidates( requirements, constraint=self._constraints.get(name, Constraint.empty()), prefers_installed=(not _eligible_for_upgrade(name)), ) <NEW_LINE> <DEDENT> def is_satisfied_by(self, requirement, candidate): <NEW_LINE> <INDENT> return requirement.is_satisfied_by(candidate) <NEW_LINE> <DEDENT> def get_dependencies(self, candidate): <NEW_LINE> <INDENT> with_requires = not self._ignore_dependencies <NEW_LINE> return [ r for r in candidate.iter_dependencies(with_requires) if r is not None ] | Pip's provider implementation for resolvelib.
:params constraints: A mapping of constraints specified by the user. Keys
are canonicalized project names.
:params ignore_dependencies: Whether the user specified ``--no-deps``.
:params upgrade_strategy: The user-specified upgrade strategy.
:params user_requested: A set of canonicalized package names that the user
supplied for pip to install/upgrade. | 62598fb45fc7496912d482db |
class LabelledPath(object): <NEW_LINE> <INDENT> labels = dict() <NEW_LINE> def __init__(self, label, path): <NEW_LINE> <INDENT> assert isinstance(label, str) and len(label) == 1 <NEW_LINE> assert label not in LabelledPath.labels <NEW_LINE> assert isinstance(path, Path) <NEW_LINE> self.label = label <NEW_LINE> self.path = path <NEW_LINE> LabelledPath.labels[self.label] = self.path | This class represents a path labelled with a unique single-letter label.
Parameters
----------
label : str
A single-letter label.
path : Path
The labelled-path.
Attributes
----------
labels : dict of (str, Path)
A mapping between labels, and paths.
label : str
A single-letter label.
path : Path
The labelled-path. | 62598fb4cc40096d6161a238 |
class ShortThrower(ThrowerAnt): <NEW_LINE> <INDENT> name = "ShortThrower" <NEW_LINE> implemented = True <NEW_LINE> food_cost = 2 <NEW_LINE> min_range = 0 <NEW_LINE> max_range = 3 | Thrower Ant with a range of 0 - 3 places | 62598fb4a219f33f346c68c4 |
class Optimization(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> supported_constraints = [] <NEW_LINE> def __init__(self, opt_method): <NEW_LINE> <INDENT> self._opt_method = opt_method <NEW_LINE> self._maxiter = DEFAULT_MAXITER <NEW_LINE> self._eps = DEFAULT_EPS <NEW_LINE> self._acc = DEFAULT_ACC <NEW_LINE> <DEDENT> @property <NEW_LINE> def maxiter(self): <NEW_LINE> <INDENT> return self._maxiter <NEW_LINE> <DEDENT> @maxiter.setter <NEW_LINE> def maxiter(self, val): <NEW_LINE> <INDENT> self._maxiter = val <NEW_LINE> <DEDENT> @property <NEW_LINE> def eps(self): <NEW_LINE> <INDENT> return self._eps <NEW_LINE> <DEDENT> @eps.setter <NEW_LINE> def eps(self, val): <NEW_LINE> <INDENT> self._eps = val <NEW_LINE> <DEDENT> @property <NEW_LINE> def acc(self): <NEW_LINE> <INDENT> return self._acc <NEW_LINE> <DEDENT> @acc.setter <NEW_LINE> def acc(self, val): <NEW_LINE> <INDENT> self._acc = val <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> fmt = f"{self.__class__.__name__}()" <NEW_LINE> return fmt <NEW_LINE> <DEDENT> @property <NEW_LINE> def opt_method(self): <NEW_LINE> <INDENT> return self._opt_method <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def __call__(self): <NEW_LINE> <INDENT> raise NotImplementedError("Subclasses should implement this method") | Base class for optimizers.
Parameters
----------
opt_method : callable
Implements optimization method
Notes
-----
The base Optimizer does not support any constraints by default; individual
optimizers should explicitly set this list to the specific constraints
it supports. | 62598fb4a8370b77170f049b |
class Reader(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> return self.process() <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> pass | Generic Reader class for processing input. Should be subclassed instead of used directly. | 62598fb4236d856c2adc949e |
class Saver(PostProcessorBaseClass): <NEW_LINE> <INDENT> def __init__(self, spec: LoaderSpec) -> None: <NEW_LINE> <INDENT> super().__init__(spec) <NEW_LINE> self._time_list = [] <NEW_LINE> self._first_compute = True <NEW_LINE> if df.MPI.rank(df.MPI.comm_world) == 0: <NEW_LINE> <INDENT> self._casedir.mkdir(parents=True, exist_ok=self._spec.overwrite_casedir) <NEW_LINE> <DEDENT> df.MPI.barrier(df.MPI.comm_world) <NEW_LINE> <DEDENT> def store_mesh( self, mesh: dolfin.Mesh, cell_domains: dolfin.MeshFunction = None, facet_domains: dolfin.MeshFunction = None ) -> None: <NEW_LINE> <INDENT> with dolfin.XDMFFile(mesh.mpi_comm(), str(self._casedir / "mesh.xdmf")) as meshfile: <NEW_LINE> <INDENT> meshfile.write(mesh) <NEW_LINE> <DEDENT> if cell_domains is not None: <NEW_LINE> <INDENT> with df.XDMFFile(mesh.mpi_comm(), str(self._casedir / "cell_function.xdmf")) as cf_file: <NEW_LINE> <INDENT> cf_file.write(mesh) <NEW_LINE> cf_file.write(cell_domains) <NEW_LINE> <DEDENT> <DEDENT> if facet_domains is not None: <NEW_LINE> <INDENT> with df.XDMFFile(mesh.mpi_comm(), str(self._casedir / "facet_function.xdmf")) as ff_file: <NEW_LINE> <INDENT> ff_file.write(mesh) <NEW_LINE> ff_file.write(facet_domains) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_field(self, field: Field) -> None: <NEW_LINE> <INDENT> msg = "A field with name {name} already exists.".format(name=field.name) <NEW_LINE> assert field.name not in self._fields, msg <NEW_LINE> field.path = self._casedir <NEW_LINE> self._fields[field.name] = field <NEW_LINE> <DEDENT> def update( self, time: float, timestep: Union[int, dolfin.Constant], data_dict: Dict[str, dolfin.Function] ) -> None: <NEW_LINE> <INDENT> self._time_list.append(float(time)) <NEW_LINE> for name, data in data_dict.items(): <NEW_LINE> <INDENT> self._fields[name].update(timestep, time, data) <NEW_LINE> <DEDENT> filename = self._casedir / Path("times.txt") <NEW_LINE> with open(filename, "a") as of_handle: <NEW_LINE> <INDENT> of_handle.write("{} {}\n".format(timestep, float(time))) <NEW_LINE> <DEDENT> <DEDENT> def update_this_timestep(self, *, field_names: Iterable[str], timestep: int, time: float) -> bool: <NEW_LINE> <INDENT> return any([self._fields[name].save_this_timestep(timestep, time) for name in field_names]) <NEW_LINE> <DEDENT> def store_initial_condition(self, data_dict) -> None: <NEW_LINE> <INDENT> time = 0.0 <NEW_LINE> timestep = 0 <NEW_LINE> self._time_list.append(time) <NEW_LINE> for name, data in data_dict.items(): <NEW_LINE> <INDENT> self._fields[name].update(timestep, time, data) <NEW_LINE> <DEDENT> with (self._casedir / Path("times.txt")).open("a") as of_handle: <NEW_LINE> <INDENT> of_handle.write("{} {}\n".format(timestep, float(time))) <NEW_LINE> <DEDENT> <DEDENT> def close(self) -> None: <NEW_LINE> <INDENT> for _, field in self._fields.items(): <NEW_LINE> <INDENT> field.close() | Class for saving stuff. | 62598fb460cbc95b06364403 |
class Review(object): <NEW_LINE> <INDENT> def __init__(self, reviewerID, productID, helpful, reviewText, overall,category): <NEW_LINE> <INDENT> self.reviewerID = reviewerID <NEW_LINE> self.productID = productID <NEW_LINE> self.helpful = helpful <NEW_LINE> self.reviewText = reviewText <NEW_LINE> self.overall = overall <NEW_LINE> self.category = category | reviewerID - ID of the reviewer, e.g. A2SUAM1J3GNN3B
asin - ID of the product, e.g. 0000013714
reviewerName - name of the reviewer
helpful - helpfulness rating of the review, e.g. 2/3
reviewText - text of the review
overall - rating of the product
summary - summary of the review
unixReviewTime - time of the review (unix time)
reviewTime - time of the review (raw) | 62598fb456ac1b37e63022a9 |
class PositionalTensorArgs: <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self.tf_args = [(i,a) for i,a in enumerate(args) if is_tensor(a)] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tensor_args(self): <NEW_LINE> <INDENT> return [a for i,a in self.tf_args] <NEW_LINE> <DEDENT> def mix_args(self, tensor_values): <NEW_LINE> <INDENT> args = list(self.args) <NEW_LINE> for i, (j, _) in enumerate(self.tf_args): <NEW_LINE> <INDENT> args[j] = tensor_values[i] <NEW_LINE> <DEDENT> return args | Handle tensor arguments. | 62598fb423849d37ff851172 |
class Conmat(CommandLine): <NEW_LINE> <INDENT> _cmd = 'conmat' <NEW_LINE> input_spec = ConmatInputSpec <NEW_LINE> output_spec = ConmatOutputSpec <NEW_LINE> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self.output_spec().get() <NEW_LINE> output_root = self._gen_outputroot() <NEW_LINE> outputs['conmat_sc'] = os.path.abspath(output_root + "sc.csv") <NEW_LINE> outputs['conmat_ts'] = os.path.abspath(output_root + "ts.csv") <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def _gen_outfilename(self): <NEW_LINE> <INDENT> return self._gen_outputroot() <NEW_LINE> <DEDENT> def _gen_outputroot(self): <NEW_LINE> <INDENT> output_root = self.inputs.output_root <NEW_LINE> if not isdefined(output_root): <NEW_LINE> <INDENT> output_root = self._gen_filename('output_root') <NEW_LINE> <DEDENT> return output_root <NEW_LINE> <DEDENT> def _gen_filename(self, name): <NEW_LINE> <INDENT> if name == 'output_root': <NEW_LINE> <INDENT> _, filename, _ = split_filename(self.inputs.in_file) <NEW_LINE> filename = filename + "_" <NEW_LINE> <DEDENT> return filename | Creates a connectivity matrix using a 3D label image (the target image)
and a set of streamlines. The connectivity matrix records how many stream-
lines connect each pair of targets, and optionally the mean tractwise
statistic (eg tract-averaged FA, or length).
The output is a comma separated variable file or files. The first row of
the output matrix is label names. Label names may be defined by the user,
otherwise they are assigned based on label intensity.
Starting from the seed point, we move along the streamline until we find
a point in a labeled region. This is done in both directions from the seed
point. Streamlines are counted if they connect two target regions, one on
either side of the seed point. Only the labeled region closest to the seed
is counted, for example if the input contains two streamlines: ::
1: A-----B------SEED---C
2: A--------SEED-----------
then the output would be ::
A,B,C
0,0,0
0,0,1
0,1,0
There are zero connections to A because in streamline 1, the connection
to B is closer to the seed than the connection to A, and in streamline 2
there is no region reached in the other direction.
The connected target regions can have the same label, as long as the seed
point is outside of the labeled region and both ends connect to the same
label (which may be in different locations). Therefore this is allowed: ::
A------SEED-------A
Such fibers will add to the diagonal elements of the matrix. To remove
these entries, run procstreamlines with -endpointfile before running conmat.
If the seed point is inside a labled region, it counts as one end of the
connection. So ::
----[SEED inside A]---------B
counts as a connection between A and B, while ::
C----[SEED inside A]---------B
counts as a connection between A and C, because C is closer to the seed point.
In all cases, distance to the seed point is defined along the streamline path.
Example 1
---------
To create a standard connectivity matrix based on streamline counts.
>>> import nipype.interfaces.camino as cam
>>> conmat = cam.Conmat()
>>> conmat.inputs.in_file = 'tracts.Bdouble'
>>> conmat.inputs.target_file = 'atlas.nii.gz'
>>> conmat.run() # doctest: +SKIP
Example 1
---------
To create a standard connectivity matrix and mean tractwise FA statistics.
>>> import nipype.interfaces.camino as cam
>>> conmat = cam.Conmat()
>>> conmat.inputs.in_file = 'tracts.Bdouble'
>>> conmat.inputs.target_file = 'atlas.nii.gz'
>>> conmat.inputs.scalar_file = 'fa.nii.gz'
>>> conmat.tract_stat = 'mean'
>>> conmat.run() # doctest: +SKIP | 62598fb4baa26c4b54d4f376 |
class Sorting(Enum): <NEW_LINE> <INDENT> ASCENDING = True <NEW_LINE> DESCENDING = False | Util class, to choose what type of sorting should be used. | 62598fb4097d151d1a2c10ee |
class TransactionsList(APIView): <NEW_LINE> <INDENT> renderer_classes = [TemplateHTMLRenderer] <NEW_LINE> template_name = 'transaction.html' <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = TransactionSerializer(data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> transaction = Transactions(**serializer.validated_data) <NEW_LINE> transaction.save() <NEW_LINE> if Transactions.objects.filter(added_to_block = False).count() >= 2: <NEW_LINE> <INDENT> trans = Transactions.objects.filter(added_to_block = False) <NEW_LINE> t = [] <NEW_LINE> for item in trans: <NEW_LINE> <INDENT> data1 = item.__dict__ <NEW_LINE> data1.pop("_state",0) <NEW_LINE> data1.pop("added_to_block",0) <NEW_LINE> t.append(data1) <NEW_LINE> <DEDENT> trans.update(added_to_block=True) <NEW_LINE> block_obj = Block.mine_block("first",t,) <NEW_LINE> block_obj.save() <NEW_LINE> headers = self.get_success_headers(serializer.data) <NEW_LINE> <DEDENT> <DEDENT> return Response(serializer.data, status=status.HTTP_201_CREATED, headers=headers) | Create a new transaction. | 62598fb456ac1b37e63022aa |
class KMeans(Clustering): <NEW_LINE> <INDENT> def __init__(self, n_clusters=2, n_runs=10): <NEW_LINE> <INDENT> self.n_clusters = n_clusters <NEW_LINE> self.n_runs = n_runs <NEW_LINE> self.distortion = 0 <NEW_LINE> self.centroids = [] <NEW_LINE> self.clusters = [] <NEW_LINE> self._X = None <NEW_LINE> <DEDENT> def _calc_distortion(self): <NEW_LINE> <INDENT> m = self._X.shape[0] <NEW_LINE> self.distortion = 1/m * sum( linalg.norm(self._X[i, :] - self.centroids[self.clusters[i]])**2 for i in range(m) ) <NEW_LINE> return self.distortion <NEW_LINE> <DEDENT> def _init_random_centroids(self): <NEW_LINE> <INDENT> self.centroids = self._X[np.random.choice(list(range(self._X.shape[0])), size=self.n_clusters), :] <NEW_LINE> <DEDENT> def _move_centroids(self): <NEW_LINE> <INDENT> for k in range(self.n_clusters): <NEW_LINE> <INDENT> if k in self.clusters: <NEW_LINE> <INDENT> centroid = np.mean(self._X[self.clusters == k, :], axis=0) <NEW_LINE> self.centroids[k] = centroid <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.n_clusters-=1 <NEW_LINE> self.centroids = self.centroids[:self.n_clusters] <NEW_LINE> self.clusters-=1 <NEW_LINE> k-=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _closest_centroid(self, x): <NEW_LINE> <INDENT> closest_centroid = 0 <NEW_LINE> distance = 10^9 <NEW_LINE> for i in range(self.n_clusters): <NEW_LINE> <INDENT> current_distance = linalg.norm(x - self.centroids[i]) <NEW_LINE> if current_distance < distance: <NEW_LINE> <INDENT> closest_centroid = i <NEW_LINE> distance = current_distance <NEW_LINE> <DEDENT> <DEDENT> return closest_centroid <NEW_LINE> <DEDENT> def _assign_clusters(self): <NEW_LINE> <INDENT> self.clusters = np.array([self._closest_centroid(x) for x in self._X]) <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> self._X = super().cluster(X) <NEW_LINE> candidates = [] <NEW_LINE> for _ in range(self.n_runs): <NEW_LINE> <INDENT> self._init_random_centroids() <NEW_LINE> while True: <NEW_LINE> <INDENT> prev_clusters = self.clusters <NEW_LINE> self._assign_clusters() <NEW_LINE> self._move_centroids() <NEW_LINE> if np.all(prev_clusters == self.clusters): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> self._calc_distortion() <NEW_LINE> candidates.append((self.distortion, self.centroids, self.clusters)) <NEW_LINE> <DEDENT> candidates.sort(key=lambda x: x[0]) <NEW_LINE> self.distortion = candidates[0][0] <NEW_LINE> self.centroids = candidates[0][1] <NEW_LINE> self.clusters = candidates[0][2] <NEW_LINE> return self <NEW_LINE> <DEDENT> def cluster(self, X): <NEW_LINE> <INDENT> X = super().cluster(X) <NEW_LINE> return np.array([self._closest_centroid(x) for x in X]) | K-Means Clustering algorithm
Parameters:
-----------
n_clusters : integer, optional
n_runs : integer, how many times to run the algorithm, optional | 62598fb460cbc95b06364404 |
class ISODatastore(Updateable): <NEW_LINE> <INDENT> def __init__(self, provider=None): <NEW_LINE> <INDENT> self.provider = provider <NEW_LINE> <DEDENT> def _form_mapping(self, create=None, **kwargs): <NEW_LINE> <INDENT> return {'provider': kwargs.get('provider')} <NEW_LINE> <DEDENT> def _submit(self, cancel, submit_button): <NEW_LINE> <INDENT> if cancel: <NEW_LINE> <INDENT> sel.click(form_buttons.cancel) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sel.click(submit_button) <NEW_LINE> flash.assert_no_errors() <NEW_LINE> <DEDENT> <DEDENT> def create(self, cancel=False, refresh=True): <NEW_LINE> <INDENT> sel.force_navigate('infrastructure_iso_datastore_new') <NEW_LINE> fill(iso_properties_form, self._form_mapping(True, **self.__dict__)) <NEW_LINE> self._submit(cancel, form_buttons.add) <NEW_LINE> flash.assert_message_match('ISO Datastore "{}" was added'.format(self.provider)) <NEW_LINE> if refresh: <NEW_LINE> <INDENT> self.refresh() <NEW_LINE> <DEDENT> <DEDENT> def exists(self): <NEW_LINE> <INDENT> sel.force_navigate('infrastructure_iso_datastores') <NEW_LINE> try: <NEW_LINE> <INDENT> iso_tree(self.provider) <NEW_LINE> return True <NEW_LINE> <DEDENT> except CandidateNotFound: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def delete(self, cancel=True): <NEW_LINE> <INDENT> sel.force_navigate('infrastructure_iso_datastore', context=self) <NEW_LINE> cfg_btn('Remove this ISO Datastore from the VMDB', invokes_alert=True) <NEW_LINE> sel.handle_alert(cancel=cancel) <NEW_LINE> flash.assert_message_match('ISO Datastore "{}": Delete successful'.format(self.provider)) <NEW_LINE> <DEDENT> def refresh(self, wait=True): <NEW_LINE> <INDENT> sel.force_navigate('infrastructure_iso_datastore', context=self) <NEW_LINE> last_time = iso_details_page.infoblock.text('Basic Information', 'Last Refreshed On') <NEW_LINE> cfg_btn('Refresh Relationships', invokes_alert=True) <NEW_LINE> sel.handle_alert() <NEW_LINE> flash.assert_message_match( 'ISO Datastore "{}": Refresh Relationships successfully initiated' .format(self.provider)) <NEW_LINE> if wait: <NEW_LINE> <INDENT> wait_for(lambda lt: lt != pxe_details_page.infoblock.text ('Basic Information', 'Last Refreshed On'), func_args=[last_time], fail_func=sel.refresh, num_sec=120) <NEW_LINE> <DEDENT> <DEDENT> def set_iso_image_type(self, image_name, image_type): <NEW_LINE> <INDENT> sel.force_navigate('infrastructure_iso_datastores') <NEW_LINE> iso_tree(self.provider, 'ISO Images', image_name) <NEW_LINE> cfg_btn('Edit this ISO Image') <NEW_LINE> fill(iso_image_type_form, {'image_type': image_type}, action=form_buttons.save) | Model of a PXE Server object in CFME
Args:
provider: Provider name. | 62598fb4f548e778e596b663 |
class ReferenceCounter(BoundVariableTracker): <NEW_LINE> <INDENT> def __init__(self, name, value): <NEW_LINE> <INDENT> super().__init__(name, value) <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def update(self, reference=None): <NEW_LINE> <INDENT> del reference <NEW_LINE> self.count += 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Instance count: {}; value: {}; name: {}.'.format( self.count, self.value, self.name) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is other: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if not isinstance(other, ReferenceCounter): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> if not super().__eq__(other): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.count == other.count | Data container to track number References to a variable in an AST.
Attributes:
name: The string name representing the variable whose binding is represented
by an instance of `ReferenceCounter`.
value: The value bound to `name`. Can be an instance of
`building_blocks.ComputationBuildingBlock` or None if this binding is
simply a placeholder, e.g. in a Lambda.
count: An integer tracking how many times the variable an instance of
`ReferenceCounter` represents is referenced in a TFF AST. | 62598fb4cc0a2c111447b0d2 |
@register_node <NEW_LINE> class Function(Node): <NEW_LINE> <INDENT> def __init__(self, graph, name, inputs=[], config={}): <NEW_LINE> <INDENT> super(Function, self).__init__(graph, name, inputs=inputs, config=config) <NEW_LINE> self.computes_gradient = False <NEW_LINE> <DEDENT> def setup_defaults(self): <NEW_LINE> <INDENT> super(Function, self).setup_defaults() <NEW_LINE> self.conf_default("expression", None) <NEW_LINE> self.conf_default("output_shape", None) <NEW_LINE> <DEDENT> def alloc(self): <NEW_LINE> <INDENT> if len(self.inputs) != 1: <NEW_LINE> <INDENT> raise AssertionError("Function nodes need exactly one input.") <NEW_LINE> <DEDENT> input_shape = self.inputs[0].output_shape <NEW_LINE> if self.conf("output_shape") is None: <NEW_LINE> <INDENT> self.output_shape = input_shape <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output_shape = self.conf("output_shape") <NEW_LINE> <DEDENT> <DEDENT> def forward(self): <NEW_LINE> <INDENT> inp = self.inputs[0].expression <NEW_LINE> lambda_func = self.conf("expression") <NEW_LINE> if lambda_func is None: <NEW_LINE> <INDENT> raise AssertionError("Function Nodes need to have the 'expression' config set!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not hasattr(lambda_func, '__call__'): <NEW_LINE> <INDENT> raise AssertionError("'expression' config parameter has to be callable!") <NEW_LINE> <DEDENT> self.expression = lambda_func(inp) | Define an arbitrary function inside the computational graph | 62598fb4d486a94d0ba2c091 |
class ExecutionRecursionDecorator(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def __call__(self, execution, evaluate_generator=False): <NEW_LINE> <INDENT> debug.dbg('Execution recursions: %s' % execution, self.recursion_level, self.execution_count, len(self.execution_funcs)) <NEW_LINE> if self.check_recursion(execution, evaluate_generator): <NEW_LINE> <INDENT> result = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self.func(execution, evaluate_generator) <NEW_LINE> <DEDENT> self.cleanup() <NEW_LINE> return result <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def cleanup(cls): <NEW_LINE> <INDENT> if cls.parent_execution_funcs: <NEW_LINE> <INDENT> cls.parent_execution_funcs.pop() <NEW_LINE> cls.recursion_level -= 1 <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def check_recursion(cls, execution, evaluate_generator): <NEW_LINE> <INDENT> in_par_execution_funcs = execution.base in cls.parent_execution_funcs <NEW_LINE> in_execution_funcs = execution.base in cls.execution_funcs <NEW_LINE> cls.recursion_level += 1 <NEW_LINE> cls.execution_count += 1 <NEW_LINE> cls.execution_funcs.add(execution.base) <NEW_LINE> cls.parent_execution_funcs.append(execution.base) <NEW_LINE> if cls.execution_count > settings.max_executions: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if isinstance(execution.base, (evaluate.Generator, evaluate.Array)): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> module = execution.get_parent_until() <NEW_LINE> if evaluate_generator or module == builtin.Builtin.scope: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if in_par_execution_funcs: <NEW_LINE> <INDENT> if cls.recursion_level > settings.max_function_recursion_level: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if in_execution_funcs and len(cls.execution_funcs) > settings.max_until_execution_unique: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if cls.execution_count > settings.max_executions_without_builtins: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def reset(cls): <NEW_LINE> <INDENT> cls.recursion_level = 0 <NEW_LINE> cls.parent_execution_funcs = [] <NEW_LINE> cls.execution_funcs = set() <NEW_LINE> cls.execution_count = 0 | Catches recursions of executions.
It is designed like a Singelton. Only one instance should exist. | 62598fb457b8e32f5250817c |
class TeamMembershipType(bb.Union): <NEW_LINE> <INDENT> _catch_all = None <NEW_LINE> full = None <NEW_LINE> limited = None <NEW_LINE> def is_full(self): <NEW_LINE> <INDENT> return self._tag == 'full' <NEW_LINE> <DEDENT> def is_limited(self): <NEW_LINE> <INDENT> return self._tag == 'limited' <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, processor): <NEW_LINE> <INDENT> super(TeamMembershipType, self)._process_custom_annotations(annotation_type, processor) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'TeamMembershipType(%r, %r)' % (self._tag, self._value) | This class acts as a tagged union. Only one of the ``is_*`` methods will
return true. To get the associated value of a tag (if one exists), use the
corresponding ``get_*`` method.
:ivar team.TeamMembershipType.full: User uses a license and has full access
to team resources like the shared quota.
:ivar team.TeamMembershipType.limited: User does not have access to the
shared quota and team admins have restricted administrative control. | 62598fb47b180e01f3e490b0 |
class ResmokeGenTaskService: <NEW_LINE> <INDENT> @inject.autoparams() <NEW_LINE> def __init__(self, gen_task_options: GenTaskOptions) -> None: <NEW_LINE> <INDENT> self.gen_task_options = gen_task_options <NEW_LINE> <DEDENT> def generate_tasks(self, generated_suite: GeneratedSuite, params: ResmokeGenTaskParams) -> Set[Task]: <NEW_LINE> <INDENT> tasks = { self._create_sub_task(suite, generated_suite, params) for suite in generated_suite.sub_suites } <NEW_LINE> if self.gen_task_options.create_misc_suite: <NEW_LINE> <INDENT> misc_suite_name = f"{os.path.basename(generated_suite.suite_name)}_misc" <NEW_LINE> misc_task_name = f"{generated_suite.task_name}_misc_{generated_suite.build_variant}" <NEW_LINE> tasks.add( self._generate_task(misc_suite_name, misc_task_name, TimeoutEstimate.no_timeouts(), params, generated_suite)) <NEW_LINE> <DEDENT> return tasks <NEW_LINE> <DEDENT> def _create_sub_task(self, sub_suite: SubSuite, suite: GeneratedSuite, params: ResmokeGenTaskParams) -> Task: <NEW_LINE> <INDENT> sub_task_name = taskname.name_generated_task(suite.task_name, sub_suite.index, len(suite), suite.build_variant) <NEW_LINE> return self._generate_task( sub_suite.name(len(suite)), sub_task_name, sub_suite.get_timeout_estimate(), params, suite) <NEW_LINE> <DEDENT> def _generate_task(self, sub_suite_name: str, sub_task_name: str, timeout_est: TimeoutEstimate, params: ResmokeGenTaskParams, suite: GeneratedSuite) -> Task: <NEW_LINE> <INDENT> LOGGER.debug("Generating task", sub_suite=sub_suite_name) <NEW_LINE> target_suite_file = self.gen_task_options.suite_location(sub_suite_name) <NEW_LINE> run_tests_vars = self._get_run_tests_vars(target_suite_file, suite.suite_name, params, suite.build_variant) <NEW_LINE> use_multiversion = params.use_multiversion <NEW_LINE> timeout_cmd = timeout_est.generate_timeout_cmd(self.gen_task_options.is_patch, params.repeat_suites, self.gen_task_options.use_default_timeouts) <NEW_LINE> commands = resmoke_commands("run generated tests", run_tests_vars, timeout_cmd, use_multiversion) <NEW_LINE> return Task(sub_task_name, commands, self._get_dependencies()) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_run_tests_vars(suite_file: str, suite_name: str, params: ResmokeGenTaskParams, build_variant: str) -> Dict[str, Any]: <NEW_LINE> <INDENT> variables = { "resmoke_args": params.generate_resmoke_args(suite_file, suite_name, build_variant), "gen_task_config_location": params.config_location, } <NEW_LINE> if params.resmoke_jobs_max: <NEW_LINE> <INDENT> variables["resmoke_jobs_max"] = params.resmoke_jobs_max <NEW_LINE> <DEDENT> if params.use_multiversion: <NEW_LINE> <INDENT> variables["task_path_suffix"] = params.use_multiversion <NEW_LINE> <DEDENT> return variables <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_dependencies() -> Set[TaskDependency]: <NEW_LINE> <INDENT> dependencies = {TaskDependency("archive_dist_test_debug")} <NEW_LINE> return dependencies | A service to generated split resmoke suites. | 62598fb44a966d76dd5eef97 |
class Python27(Python, Dependency): <NEW_LINE> <INDENT> name = "python27" <NEW_LINE> recommended = False | Backwards compatibility. | 62598fb4adb09d7d5dc0a64c |
class Pitchfork(Publication): <NEW_LINE> <INDENT> title = 'Pitchfork: Best New Albums' <NEW_LINE> url = "http://pitchfork.com/reviews/best/albums/" <NEW_LINE> rank = 3 <NEW_LINE> medium = Album() <NEW_LINE> @Publication.catch_scraper_exceptions <NEW_LINE> def scrape(self): <NEW_LINE> <INDENT> reviews = self.html.find_all(class_='info') <NEW_LINE> for r in reviews[:15]: <NEW_LINE> <INDENT> self.recommendations.append((r.h1.contents[0], r.h2.contents[0])) | The Pitchfork Music publication class | 62598fb4fff4ab517ebcd8a6 |
class MateriaDaTurmaDetailView(LoginRequiredMixin, DetailView): <NEW_LINE> <INDENT> model = MateriaDaTurma <NEW_LINE> context_object_name = 'materia' | View de detalhes sobre a materia | 62598fb47047854f4633f49a |
class UtgReplayPolicy(InputPolicy): <NEW_LINE> <INDENT> def __init__(self, device, app, replay_output): <NEW_LINE> <INDENT> super(UtgReplayPolicy, self).__init__(device, app) <NEW_LINE> self.logger = logging.getLogger(self.__class__.__name__) <NEW_LINE> self.replay_output = replay_output <NEW_LINE> import os <NEW_LINE> event_dir = os.path.join(replay_output, "events") <NEW_LINE> self.event_paths = sorted([os.path.join(event_dir, x) for x in next(os.walk(event_dir))[2] if x.endswith(".json")]) <NEW_LINE> self.event_idx = 2 <NEW_LINE> self.num_replay_tries = 0 <NEW_LINE> <DEDENT> def generate_event(self): <NEW_LINE> <INDENT> import time <NEW_LINE> while self.event_idx < len(self.event_paths) and self.num_replay_tries < MAX_REPLY_TRIES: <NEW_LINE> <INDENT> self.num_replay_tries += 1 <NEW_LINE> current_state = self.device.get_current_state() <NEW_LINE> if current_state is None: <NEW_LINE> <INDENT> time.sleep(5) <NEW_LINE> self.num_replay_tries = 0 <NEW_LINE> return KeyEvent(name="BACK") <NEW_LINE> <DEDENT> curr_event_idx = self.event_idx <NEW_LINE> while curr_event_idx < len(self.event_paths): <NEW_LINE> <INDENT> event_path = self.event_paths[curr_event_idx] <NEW_LINE> with open(event_path, "r") as f: <NEW_LINE> <INDENT> curr_event_idx += 1 <NEW_LINE> try: <NEW_LINE> <INDENT> event_dict = json.load(f) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.info("Loading %s failed" % event_path) <NEW_LINE> continue <NEW_LINE> <DEDENT> if event_dict["start_state"] != current_state.state_str: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.logger.info("Replaying %s" % event_path) <NEW_LINE> self.event_idx = curr_event_idx <NEW_LINE> self.num_replay_tries = 0 <NEW_LINE> return InputEvent.from_dict(event_dict["event"]) <NEW_LINE> <DEDENT> <DEDENT> time.sleep(5) <NEW_LINE> <DEDENT> raise InputInterruptedException("No more record can be replayed.") | Replay DroidBot output generated by UTG policy | 62598fb43d592f4c4edbaf80 |
class Character: <NEW_LINE> <INDENT> def __init__(self, char, position, clock) -> None: <NEW_LINE> <INDENT> self.char = char <NEW_LINE> self.position = position <NEW_LINE> self.clock = clock <NEW_LINE> <DEDENT> @property <NEW_LINE> def author(self) -> int: <NEW_LINE> <INDENT> return self.position.sites[-1] <NEW_LINE> <DEDENT> def __lt__(self, other) -> bool: <NEW_LINE> <INDENT> return self.position < other.position | Represents a character in CRDT document. | 62598fb492d797404e388bc3 |
@public <NEW_LINE> @implementer(IRule) <NEW_LINE> class SuspiciousHeader: <NEW_LINE> <INDENT> name = 'suspicious-header' <NEW_LINE> description = _('Catch messages with suspicious headers.') <NEW_LINE> record = True <NEW_LINE> def check(self, mlist, msg, msgdata): <NEW_LINE> <INDENT> return (mlist.bounce_matching_headers and has_matching_bounce_header(mlist, msg)) | The historical 'suspicious header' rule. | 62598fb44e4d5625663724e3 |
class Worker(BaseAnt): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> BaseAnt.__init__(self) <NEW_LINE> self.role = 'Worker' | A worker in the ant colony. | 62598fb455399d3f056265d4 |
class OpCodeMapper(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.code_to_name = {} <NEW_LINE> for idx, d in enumerate(data["operator_codes"]): <NEW_LINE> <INDENT> self.code_to_name[idx] = d["builtin_code"] <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> if x not in self.code_to_name: <NEW_LINE> <INDENT> s = "<UNKNOWN>" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> s = self.code_to_name[x] <NEW_LINE> <DEDENT> return "%s (opcode=%d)" % (s, x) | Maps an opcode index to an op name. | 62598fb4be8e80087fbbf127 |
class SatTableWithoutHeaders(Table): <NEW_LINE> <INDENT> ROWS = './tbody/tr' <NEW_LINE> COLUMNS = './tbody/tr[1]/td' <NEW_LINE> ROW_AT_INDEX = './tbody/tr[{0}]' <NEW_LINE> HEADER_IN_ROWS = None <NEW_LINE> HEADERS = None <NEW_LINE> @property <NEW_LINE> def _is_header_in_body(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def headers(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for index, _ in enumerate(self.browser.elements(self.COLUMNS, parent=self)): <NEW_LINE> <INDENT> result.append(f'column{index}') <NEW_LINE> <DEDENT> return tuple(result) | Applicable for every table in application that has no headers. Due logic of the Table
widget we have to explicitly specify custom headers. As we have no idea about the content
and structure of the table in advance, we will dynamically name each column using simple -
'column1', 'column2', ... 'columnN'.
Example html representation::
<table>
<tbody>
<tr>
<td>Name</td>
<td>my_host</td>
<td>my_new_host</td>
</tr>
<tr>
<td>Arhitecture</td>
<td>x32</td>
<td>x64</td>
</tr>
</tbody>
</table>
Locator example::
//table[@id='audit_table'] | 62598fb466656f66f7d5a4b1 |
class itkIntensityWindowingImageFilterID3IUC3_Superclass(itkInPlaceImageFilterAPython.itkInPlaceImageFilterID3IUC3): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterID3IUC3_Superclass___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def GetFunctor(self, *args): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterID3IUC3_Superclass_GetFunctor(self, *args) <NEW_LINE> <DEDENT> def SetFunctor(self, *args): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterID3IUC3_Superclass_SetFunctor(self, *args) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkIntensityWindowingImageFilterPython.delete_itkIntensityWindowingImageFilterID3IUC3_Superclass <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterID3IUC3_Superclass_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkIntensityWindowingImageFilterPython.itkIntensityWindowingImageFilterID3IUC3_Superclass_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkIntensityWindowingImageFilterID3IUC3_Superclass.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkIntensityWindowingImageFilterID3IUC3_Superclass class | 62598fb4bd1bec0571e15122 |
class PythonLexer(Qsci.QsciLexerPython): <NEW_LINE> <INDENT> py_kwds = ( 'ArithmeticError AssertionError AttributeError BaseException ' 'BufferError BytesWarning DeprecationWarning EOFErr Ellipsis ' 'EnvironmentError Exception False FloatingPointError FutureWarning ' 'GeneratorExit IOError ImportError ImportWarning IndentationError ' 'IndexError KeyError KeyboardInterrupt LookupError MemoryError ' 'NameError None NotImplemented NotImplementedError OSError ' 'OverflowError PendingDeprecationWarning ReferenceError RuntimeError ' 'RuntimeWarning StandardError StopIteration SyntaxError SyntaxWarning ' 'SystemError SystemExit TabError True TypeError UnboundLocalError ' 'UnicodeDecodeError UnicodeEncodeError UnicodeError ' 'UnicodeTranslateError UnicodeWarning UserWarning ValueError Warning ' 'WindowsError ZeroDivisionError abs all any apply basestring bin bool ' 'buffer bytearray bytes callable chr classmethod cmp coerce compile ' 'complex delattr dict dir divmod enumerate eval execfile file filter' 'float format frozenset getattr globals hasattr hash help hex id input' 'int intern isinstance issubclass iter len list locals long map max ' 'memoryview min next object oct open ord pow print property range ' 'raw_input reduce reload repr reversed round set setattr slice sorted' 'staticmethod str sum super tuple type unichr unicode vars xrange zip' ) <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> super(PythonLexer, self).__init__(*args) <NEW_LINE> setter = getattr(self, 'setHighlightSubidentifiers', None) <NEW_LINE> if setter is not None: <NEW_LINE> <INDENT> setter(False) <NEW_LINE> <DEDENT> <DEDENT> def keywords(self, kwset): <NEW_LINE> <INDENT> if kwset == 2: <NEW_LINE> <INDENT> return self.py_kwds <NEW_LINE> <DEDENT> return super(PythonLexer, self).keywords(kwset) | A custom Python lexer which highlights extra identifiers.
| 62598fb4aad79263cf42e894 |
class GoawayFrame(SpdyFrame): <NEW_LINE> <INDENT> def __init__(self, last_stream_id, reason): <NEW_LINE> <INDENT> SpdyFrame.__init__(self, FrameTypes.GOAWAY, Flags.FLAG_NONE) <NEW_LINE> self.last_stream_id = last_stream_id <NEW_LINE> self.reason = reason <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return SpdyFrame.__str__(self) + ' LSID=%d %s]' % ( self.last_stream_id, GoawayReasons.str[self.reason]) <NEW_LINE> <DEDENT> def serialize(self, context): <NEW_LINE> <INDENT> data = struct.pack("!II", STREAM_MASK & self.last_stream_id, self.reason) <NEW_LINE> return self._serialize_control_frame(self.type, self.flags, data) | A SPDY GOAWAY frame. | 62598fb41f5feb6acb162cde |
class Solution: <NEW_LINE> <INDENT> @timeit <NEW_LINE> def isSubPath(self, head: ListNode, root: TreeNode) -> bool: <NEW_LINE> <INDENT> def dfs1(p, q): <NEW_LINE> <INDENT> if not p: return True <NEW_LINE> if not q: return False <NEW_LINE> if p.val != q.val: return False <NEW_LINE> return dfs1(p.next, q.left) or dfs1(p.next, q.right) <NEW_LINE> <DEDENT> def dfs(p, q): <NEW_LINE> <INDENT> if not p: return True <NEW_LINE> if not q: return False <NEW_LINE> if dfs1(p, q): return True <NEW_LINE> return dfs(p, q.left) or dfs(p, q.right) <NEW_LINE> <DEDENT> return dfs(head, root) | [5346. 二叉树中的列表](https://leetcode-cn.com/problems/linked-list-in-binary-tree/) | 62598fb4851cf427c66b8377 |
class ListProfiles(APIView): <NEW_LINE> <INDENT> permission_classes = (permissions.IsAuthenticated,) <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> articles = Article.objects.distinct("author").all() <NEW_LINE> authors = Profile.objects.filter( user_id__in=[article.author.username for article in articles] ) <NEW_LINE> serializer = ProfileSerializer(authors, many=True) <NEW_LINE> return Response(data={"authors":serializer.data}, status=status.HTTP_200_OK) | Class to list author profiles | 62598fb456b00c62f0fb2978 |
class tvdb_resourcenotfound(tvdb_exception): <NEW_LINE> <INDENT> pass | Resource cannot be found on thetvdb.com
| 62598fb430bbd722464699d9 |
class PsuWrapper: <NEW_LINE> <INDENT> def __init__(self, dmgr, devices, mappings): <NEW_LINE> <INDENT> self.core = dmgr.get("core") <NEW_LINE> self.devices = { dev: dmgr.get(dev) for dev in devices } <NEW_LINE> self.mappings = mappings <NEW_LINE> <DEDENT> def set_voltage_limit(self, logicalChannel, value): <NEW_LINE> <INDENT> (device, channel) = self._get_dev_channel(logicalChannel) <NEW_LINE> device.set_voltage_limit(value, channel=channel) <NEW_LINE> <DEDENT> def set_current_limit(self, logicalChannel, value): <NEW_LINE> <INDENT> (device, channel) = self._get_dev_channel(logicalChannel) <NEW_LINE> device.set_current_limit(value, channel=channel) <NEW_LINE> <DEDENT> def set_output_enable(self, logicalChannel, value): <NEW_LINE> <INDENT> (device, channel) = self._get_dev_channel(logicalChannel) <NEW_LINE> device.set_output_enable(value, channel=channel) <NEW_LINE> <DEDENT> def _get_dev_channel(self, logicalChannel): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> (deviceName,channel) = self.mappings[logicalChannel] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise UnknownLogicalChannel <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> device = self.devices[deviceName] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise UnknownDeviceName <NEW_LINE> <DEDENT> return (device, channel) | Wraps multiple power supplies to allow reference to channels by an
easily remappable logical name. The arguments are:
'devices', the list of power supplies,
'mappings', a dictionary mapping logical devices names to
(device,channel) tuples | 62598fb497e22403b383afcd |
class VideoSearchView(LoginRequiredMixin, FormView): <NEW_LINE> <INDENT> template_name = 'video_search_results.html' <NEW_LINE> form_class = YouTubeVideoSearchForm <NEW_LINE> def get(self, request, pk): <NEW_LINE> <INDENT> return HttpResponseNotAllowed('post') <NEW_LINE> <DEDENT> def get_context_data(self, form, **kwargs): <NEW_LINE> <INDENT> context = super(VideoSearchView, self).get_context_data(**kwargs) <NEW_LINE> project = get_object_or_404( Project, pk=self.kwargs['pk'], owner=self.request.user) <NEW_LINE> results = youtube.Client().search(form.cleaned_data.get('keywords')) <NEW_LINE> existing_videos = [v.youtube_id for v in Video.objects.filter( project__pk=project.pk)] <NEW_LINE> context['project'] = project <NEW_LINE> for i, r in enumerate(results): <NEW_LINE> <INDENT> if r['id'] in existing_videos: <NEW_LINE> <INDENT> results.pop(i) <NEW_LINE> <DEDENT> <DEDENT> context['formset'] = YouTubeVideoFormSet(initial=[{ 'youtube_id': result['id'], 'published': datetime.datetime.strptime( result['snippet']['publishedAt'], "%Y-%m-%dT%H:%M:%S.%fZ"), 'name': result['snippet']['title'], 'description': result['snippet']['description'], 'thumbnail_default': result['snippet']['thumbnails']['default']['url'], 'thumbnail_medium': result['snippet']['thumbnails']['medium']['url'], 'thumbnail_high': result['snippet']['thumbnails']['high']['url'], 'likes': result['statistics'].get('likeCount', 0), 'dislikes': result['statistics'].get('dislikeCount', 0), 'comment_count': result['statistics'].get('commentCount', 0), } for result in results ]) <NEW_LINE> return context <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> return self.render_to_response(self.get_context_data(form)) | Allows users to search youtube for videos to add to their project | 62598fb4baa26c4b54d4f378 |
class BadDomainError(OutputFilterError): <NEW_LINE> <INDENT> pass | An error to throw when a domain is invalid. | 62598fb4442bda511e95c519 |
class FolderMoveObjects(BaseSubstitution): <NEW_LINE> <INDENT> category = u'AsyncMove' <NEW_LINE> description = u'Move folder objects' <NEW_LINE> def safe_call(self): <NEW_LINE> <INDENT> return getattr(self.wrapper, 'folder_move_objects', '') | Move folder objects substitution
| 62598fb4097d151d1a2c10f0 |
class UserRoles(Enum): <NEW_LINE> <INDENT> admin = "admin" <NEW_LINE> writer = "writer" <NEW_LINE> reader = "reader" | Closed list of accepted Contact roles in Isogeo API.
:Example:
>>> # parse members and values
>>> print("{0:<30} {1:>20}".format("Enum", "Value"))
>>> for role in UserRoles:
>>> print("{0:<30} {1:>20}".format(role, role.value))
Enum Value
UserRoles.admin admin
UserRoles.writer writer
...
>>> # check if a var is an accepted value
>>> print("admin" in UserRoles.__members__)
True
>>> print("Author" in UserRoles.__members__) # case sensitive
False
>>> print("follower" in UserRoles.__members__)
False
See: https://docs.python.org/3/library/enum.html | 62598fb44527f215b58e9f96 |
class MatchNdTargetAddrIDL(object): <NEW_LINE> <INDENT> thrift_spec = (None, (1, TType.I32, 'sense', None, None), (2, TType.LIST, 'targetAddr', (TType.BYTE, None), None)) <NEW_LINE> def __init__(self, sense = None, targetAddr = None): <NEW_LINE> <INDENT> self.sense = sense <NEW_LINE> self.targetAddr = targetAddr <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid,) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.sense = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.targetAddr = [] <NEW_LINE> (_etype115, _size112,) = iprot.readListBegin() <NEW_LINE> for _i116 in xrange(_size112): <NEW_LINE> <INDENT> _elem117 = iprot.readByte() <NEW_LINE> self.targetAddr.append(_elem117) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('MatchNdTargetAddrIDL') <NEW_LINE> if self.sense != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('sense', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.sense) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.targetAddr != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('targetAddr', TType.LIST, 2) <NEW_LINE> oprot.writeListBegin(TType.BYTE, len(self.targetAddr)) <NEW_LINE> for iter118 in self.targetAddr: <NEW_LINE> <INDENT> oprot.writeByte(iter118) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = [ '%s=%r' % (key, value) for (key, value,) in self.__dict__.iteritems() ] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | ND Target Address match
Attributes:
- sense
- targetAddr | 62598fb466673b3332c3048e |
class Attention(nn.Module): <NEW_LINE> <INDENT> def __init__(self, hidden_dim: int, method: str): <NEW_LINE> <INDENT> super(Attention, self).__init__() <NEW_LINE> assert method in {'dot', 'general', 'concat'}, 'method should either be dot, general or concat' <NEW_LINE> self.method = method <NEW_LINE> self.hidden_dim = hidden_dim <NEW_LINE> <DEDENT> def dot(self, decoder_hidden: Tensor, encoder_outputs: Tensor) -> Tensor: <NEW_LINE> <INDENT> attn_weights = torch.sum(decoder_hidden * encoder_outputs, dim=2) <NEW_LINE> return attn_weights <NEW_LINE> <DEDENT> def forward(self, decoder_hidden: Tensor, encoder_outputs: Tensor) -> Tensor: <NEW_LINE> <INDENT> if self.method == 'dot': <NEW_LINE> <INDENT> attn_energies = self.dot(decoder_hidden, encoder_outputs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Not implemented") <NEW_LINE> <DEDENT> return F.softmax(attn_energies, dim=1) | Based on Luong's attention https://arxiv.org/pdf/1508.04025.pdf
PyTorch implementation inspired by https://github.com/marumalo/pytorch-seq2seq/blob/master/model.py | 62598fb467a9b606de546090 |
class B2PmxeSolidifyDelete(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "b2pmxem.delete_solidify" <NEW_LINE> bl_label = "Delete Solidify Edge" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> obj = context.active_object <NEW_LINE> return (obj and obj.type == 'MESH') <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> for obj in context.selected_objects: <NEW_LINE> <INDENT> if obj.type != 'MESH': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> mod = obj.modifiers.get(GV.SolidfyName) <NEW_LINE> if mod is not None: <NEW_LINE> <INDENT> obj.modifiers.remove(mod) <NEW_LINE> <DEDENT> for index, mat in enumerate(obj.data.materials): <NEW_LINE> <INDENT> if mat is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if mat.name.startswith(GV.SolidfyName): <NEW_LINE> <INDENT> obj.data.materials.pop(index) <NEW_LINE> if mat.users == 0: <NEW_LINE> <INDENT> bpy.data.materials.remove(mat) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return {"FINISHED"} | Delete Solidify Edge of selected objects | 62598fb460cbc95b06364406 |
class cmd_connect(Command): <NEW_LINE> <INDENT> connected = True <NEW_LINE> arguments = [ Argument('device', help = 'The device to connect to, can include an optional [username@].'), ] <NEW_LINE> options = [ Option('search-all', 'a', take_argument = False, help = 'Search all attributes and networks (default is to only search devices names).'), Option('console', '0', take_argument = False, help = 'Attach to console for rdp connections.'), Option('regexp', 'r', take_argument = False, help = 'Use regexp search, slower (automatically used with -a).'), ] <NEW_LINE> def run(self, device, search_all = False, console = False, regexp = False): <NEW_LINE> <INDENT> if search_all: <NEW_LINE> <INDENT> regexp = True <NEW_LINE> <DEDENT> cmdconnect.cmd_connect(self.object_store, device, search_all, not regexp, console, self.cm.config) <NEW_LINE> return 0 | SSH/RDP connection to a device. | 62598fb4cc0a2c111447b0d4 |
class GCommitArrow(GConnectionLine): <NEW_LINE> <INDENT> def __init__(self, origin, origin_attach_mode, destination, destination_attach_mode): <NEW_LINE> <INDENT> super().__init__(origin, origin_attach_mode, destination, destination_attach_mode) <NEW_LINE> <DEDENT> def paint(self, QPainter, QStyleOptionGraphicsItem, QWidget_widget=None): <NEW_LINE> <INDENT> self._render_line(QPainter) <NEW_LINE> self._render_head(QPainter) <NEW_LINE> <DEDENT> def _render_head(self, QPainter): <NEW_LINE> <INDENT> angle_of_line = acos(self._line.dx() / self._line.length()) <NEW_LINE> if self._line.dy() >= 0: <NEW_LINE> <INDENT> angle_of_line = (pi * 2) - angle_of_line <NEW_LINE> <DEDENT> head_point1 = self._origin_point + QPointF( sin(angle_of_line + pi / 3) * ARROW_HEAD_WIDTH, cos(angle_of_line + pi / 3) * ARROW_HEAD_LENGTH) <NEW_LINE> head_point2 = self._origin_point + QPointF( sin(angle_of_line + pi - pi / 3) * ARROW_HEAD_WIDTH, cos(angle_of_line + pi - pi / 3) * ARROW_HEAD_LENGTH) <NEW_LINE> QPainter.setBrush(ARROW_HEAD_BRUSH) <NEW_LINE> QPainter.drawPolygon(self._origin_point, head_point1, head_point2) | A graphics item representing the connection between GCommitNodes
A GCommitArrow originates from child GCommitNodes and points to
their parent GCommitNode. GCommitArrows consist of a line and an
arrow head, and will expand and contract as their source and
destination nodes are moved around | 62598fb416aa5153ce4005c5 |
class GrowthAPILoader(DataFileLoader): <NEW_LINE> <INDENT> def __init__(self, api_file_dbf, year): <NEW_LINE> <INDENT> super(GrowthAPILoader, self).__init__(api_file_dbf) <NEW_LINE> self.year = year <NEW_LINE> <DEDENT> def load_file(self): <NEW_LINE> <INDENT> if self.file[-4:].lower() != '.dbf': <NEW_LINE> <INDENT> raise Exception('Please input a .dbf') <NEW_LINE> <DEDENT> self.table = DBF(self.file, encoding='latin1', lowernames=True) <NEW_LINE> self.lines = self.table.records <NEW_LINE> <DEDENT> def prep_line(self, record): <NEW_LINE> <INDENT> return record.values() <NEW_LINE> <DEDENT> def set_layout(self): <NEW_LINE> <INDENT> self.layout = self.table.field_names <NEW_LINE> <DEDENT> def make_api(self, record): <NEW_LINE> <INDENT> return models.GrowthAPI(record, self.year) <NEW_LINE> <DEDENT> def query_api(self, record): <NEW_LINE> <INDENT> _id = record.cds + '_' + unicode(self.year) <NEW_LINE> return models.GrowthAPI.query.filter_by(id = _id).first() <NEW_LINE> <DEDENT> def query_school(self, record): <NEW_LINE> <INDENT> cds_code = record.cds <NEW_LINE> return models.School.query.filter_by(cds_code = cds_code).first() <NEW_LINE> <DEDENT> def query_district(self, record): <NEW_LINE> <INDENT> cds_code = record.cds <NEW_LINE> return models.District.query.filter_by(cds_code = cds_code).first() <NEW_LINE> <DEDENT> def load_record_into_db(self, record): <NEW_LINE> <INDENT> db_row = self.query_api(record) <NEW_LINE> if not db_row: <NEW_LINE> <INDENT> new_row = self.make_api(record) <NEW_LINE> db.session.add(new_row) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> db_row.update(record, self.year) <NEW_LINE> db.session.add(db_row) <NEW_LINE> <DEDENT> db.session.commit() | Parses/loads growth api DBF files | 62598fb43317a56b869be5ad |
class SlippyImageArtist(AxesImage): <NEW_LINE> <INDENT> def __init__(self, ax, raster_source, **kwargs): <NEW_LINE> <INDENT> self.raster_source = raster_source <NEW_LINE> super(SlippyImageArtist, self).__init__(ax, **kwargs) <NEW_LINE> self.set_clip_path(ax.outline_patch) <NEW_LINE> <DEDENT> @matplotlib.artist.allow_rasterization <NEW_LINE> def draw(self, renderer, *args, **kwargs): <NEW_LINE> <INDENT> if not self.get_visible(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ax = self.axes <NEW_LINE> window_extent = ax.get_window_extent() <NEW_LINE> [x1, y1], [x2, y2] = ax.viewLim.get_points() <NEW_LINE> located_images = self.raster_source.fetch_raster( ax.projection, extent=[x1, x2, y1, y2], target_resolution=(window_extent.width, window_extent.height)) <NEW_LINE> for img, extent in located_images: <NEW_LINE> <INDENT> self.set_array(img) <NEW_LINE> with ax.hold_limits(): <NEW_LINE> <INDENT> self.set_extent(extent) <NEW_LINE> <DEDENT> super(SlippyImageArtist, self).draw(renderer, *args, **kwargs) | A subclass of :class:`~matplotlib.image.AxesImage` which provides an
interface for getting a raster from the given object with interactive
slippy map type functionality.
Kwargs are passed to the AxesImage constructor. | 62598fb4ff9c53063f51a70f |
class LargeFeeCharger(FeeCharger): <NEW_LINE> <INDENT> def getFee(self, start, end): <NEW_LINE> <INDENT> return (end - start) * 0.5 | Parking Lot large fee charger. | 62598fb4283ffb24f3cf394e |
class WindowError(RasterioError): <NEW_LINE> <INDENT> pass | Raised when errors occur during window operations | 62598fb47047854f4633f49c |
class ConditionProvider(cst.VisitorMetadataProvider): <NEW_LINE> <INDENT> cond_stack: tp.List[cst.BaseExpression] <NEW_LINE> def __init__(self, simplify: bool = False): <NEW_LINE> <INDENT> self.cond_stack = [] <NEW_LINE> <DEDENT> def on_leave(self, node: cst.CSTNode) -> None: <NEW_LINE> <INDENT> self.set_metadata(node, tuple(self.cond_stack)) <NEW_LINE> return super().on_leave(node) <NEW_LINE> <DEDENT> def visit_If_body(self, node: cst.If) -> None: <NEW_LINE> <INDENT> self.cond_stack.append(node.test) <NEW_LINE> <DEDENT> def leave_If_body(self, node: cst.If) -> None: <NEW_LINE> <INDENT> self.cond_stack.pop() <NEW_LINE> <DEDENT> def visit_If_orelse(self, node: cst.If) -> None: <NEW_LINE> <INDENT> self.cond_stack.append(cst.UnaryOperation(cst.Not(), node.test)) <NEW_LINE> <DEDENT> def leave_If_orelse(self, node: cst.If) -> None: <NEW_LINE> <INDENT> self.cond_stack.pop() <NEW_LINE> <DEDENT> def visit_IfExp_body(self, node: cst.IfExp) -> None: <NEW_LINE> <INDENT> self.cond_stack.append(node.test) <NEW_LINE> <DEDENT> def leave_IfExp_body(self, node: cst.If) -> None: <NEW_LINE> <INDENT> self.cond_stack.pop() <NEW_LINE> <DEDENT> def visit_IfExp_orelse(self, node: cst.IfExp) -> None: <NEW_LINE> <INDENT> self.cond_stack.append(cst.UnaryOperation(cst.Not(), node.test)) <NEW_LINE> <DEDENT> def leave_IfExp_orelse(self, node: cst.IfExp) -> None: <NEW_LINE> <INDENT> self.cond_stack.pop() | Marks each node with the conditions under which they will be executed | 62598fb4fff4ab517ebcd8a8 |
class Reaction(models.Model): <NEW_LINE> <INDENT> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> like_or_dislike = models.CharField( choices=LikeOrDislike.choices, max_length=255, null=True, blank=True ) <NEW_LINE> comment = models.TextField(null=True, blank=True) <NEW_LINE> author = models.ForeignKey(user_models.User, on_delete=models.SET_NULL, null=True) <NEW_LINE> post = models.ForeignKey( "data.Post", on_delete=models.CASCADE, related_name="reactions" ) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return ( f"{self.created_at.date()} - {self.author} - {self.like_or_dislike} -" f" {self.comment}" ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( f"<{self.__class__.__name__}, id: {self.pk} created_at: {self.created_at}, " f"author: {self.author}, post: {self.post.pk}>" ) | A model to store user reactions associated with a Post instance. | 62598fb4bd1bec0571e15123 |
class ReactiveFluxSimulation(ShootFromSnapshotsSimulation): <NEW_LINE> <INDENT> def __init__(self, storage, engine=None, states=None, randomizer=None, initial_snapshots=None, rc=None): <NEW_LINE> <INDENT> self.states = states <NEW_LINE> state_A = states[0] <NEW_LINE> state_B = states[1] <NEW_LINE> self.rc = rc <NEW_LINE> rc_array = np.array(self.rc(initial_snapshots)) <NEW_LINE> rc_min = np.nextafter(rc_array.min(), -np.inf) <NEW_LINE> rc_max = np.nextafter(rc_array.max(), np.inf) <NEW_LINE> self.dividing_surface = paths.CVDefinedVolume(self.rc, rc_min, rc_max) <NEW_LINE> self.volume_towards_A = paths.CVDefinedVolume(self.rc, -np.inf, rc_max) <NEW_LINE> backward_ensemble = paths.SequentialEnsemble([ paths.AllInXEnsemble(state_A) & paths.LengthEnsemble(1), paths.AllInXEnsemble(self.volume_towards_A - state_A) ]) <NEW_LINE> forward_ensemble = paths.SequentialEnsemble([ paths.AllInXEnsemble(state_A) & paths.LengthEnsemble(1), paths.AllOutXEnsemble(state_A | state_B), paths.AllInXEnsemble(state_B) & paths.LengthEnsemble(1), ]) <NEW_LINE> super(ReactiveFluxSimulation, self).__init__( storage=storage, engine=engine, starting_volume=self.dividing_surface, forward_ensemble=forward_ensemble, backward_ensemble=backward_ensemble, randomizer=randomizer, initial_snapshots=initial_snapshots ) <NEW_LINE> self.backward_mover = paths.BackwardExtendMover( ensemble=self.starting_ensemble, target_ensemble=self.backward_ensemble ) <NEW_LINE> self.forward_mover = paths.ForwardExtendMover( ensemble=self.backward_ensemble, target_ensemble=self.forward_ensemble ) <NEW_LINE> self.mover = paths.NonCanonicalConditionalSequentialMover([ self.backward_mover, self.forward_mover ]) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> ret_dict = { 'states' : self.states, 'dividing_surface' : self.dividing_surface, 'volume_towards_A' : self.volume_towards_A, 'rc' : self.rc } <NEW_LINE> return ret_dict <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dct): <NEW_LINE> <INDENT> rf = cls.__new__(cls) <NEW_LINE> rf.states = dct['states'] <NEW_LINE> rf.dividing_surface = dct['dividing_surface'] <NEW_LINE> rf.volume_towards_A = dct['volume_towards_A'] <NEW_LINE> rf.rc = dct['rc'] <NEW_LINE> return rf | Reactive Flux simulations (effective positive flux).
Parameters
----------
storage : :class:`.Storage`
the file to store simulations in
engine : :class:`.DynamicsEngine`
the dynamics engine to use to run the simulation
states : list of :class:`.Volume`
the volumes representing the stable states, first state A then state B
randomizer : :class:`.SnapshotModifier`
the method used to modify the input snapshot before each shot
initial_snapshots : list of :class:`.Snapshot`
initial snapshots to use
rc : :class:`.CollectiveVariable`
reaction coordinate | 62598fb4aad79263cf42e896 |
class CounterCollection(object): <NEW_LINE> <INDENT> def __init__(self, sim): <NEW_LINE> <INDENT> self.sim = sim <NEW_LINE> self.cnt_wt = TimeIndependentCounter() <NEW_LINE> self.hist_wt = TimeIndependentHistogram(self.sim, "w") <NEW_LINE> self.cnt_ql = TimeDependentCounter(self.sim) <NEW_LINE> self.hist_ql = TimeDependentHistogram(self.sim, "q") <NEW_LINE> self.cnt_sys_util = TimeDependentCounter(self.sim) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.cnt_wt.reset() <NEW_LINE> self.hist_wt.reset() <NEW_LINE> self.cnt_ql.reset() <NEW_LINE> self.hist_ql.reset() <NEW_LINE> self.cnt_sys_util.reset() <NEW_LINE> <DEDENT> def report(self): <NEW_LINE> <INDENT> self.cnt_wt.report() <NEW_LINE> self.hist_wt.report() <NEW_LINE> self.cnt_ql.report() <NEW_LINE> self.hist_ql.report() <NEW_LINE> self.cnt_sys_util.report() <NEW_LINE> <DEDENT> def count_packet(self, packet): <NEW_LINE> <INDENT> self.cnt_wt.count(packet.get_waiting_time()) <NEW_LINE> self.hist_wt.count(packet.get_waiting_time()) <NEW_LINE> <DEDENT> def count_queue(self): <NEW_LINE> <INDENT> self.cnt_ql.count(self.sim.system_state.get_queue_length()) <NEW_LINE> self.hist_ql.count(self.sim.system_state.get_queue_length()) | CounterCollection is a collection of all counters and histograms that are used in the simulations.
It contains several counters and histograms, that are used in the different tasks.
Reporting is done by calling the report function. This function can be adapted, depending on which counters should
report their results and print strings or plot histograms. | 62598fb476e4537e8c3ef668 |
class context_schema(Embryo.Schema): <NEW_LINE> <INDENT> dao = fields.Nested( { 'name': fields.String(), 'type': fields.String(nullable=True), 'params': fields.Nested({}), 'fields': fields.List(fields.Dict()) } ) | # Context Schema
The respective Dao schema
## Fields
* `dao`:
* `name`: TODO
* `type`: TODO
* `fields`: TODO | 62598fb491f36d47f2230f09 |
class ChdirContext(object): <NEW_LINE> <INDENT> def __init__(self, dpath=None, stay=False, verbose=None): <NEW_LINE> <INDENT> if verbose is None: <NEW_LINE> <INDENT> verbose = 1 <NEW_LINE> <DEDENT> self.verbose = verbose <NEW_LINE> self.stay = stay <NEW_LINE> self.dpath = dpath <NEW_LINE> self.curdir = os.getcwd() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.dpath is not None: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('[path.push] Change directory to %r' % (self.dpath,)) <NEW_LINE> <DEDENT> os.chdir(self.dpath) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, type_, value, trace): <NEW_LINE> <INDENT> if not self.stay: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('[path.pop] Change directory to %r' % (self.curdir,)) <NEW_LINE> <DEDENT> os.chdir(self.curdir) <NEW_LINE> <DEDENT> if trace is not None: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> print('[util_path] Error in chdir context manager!: ' + str(value)) <NEW_LINE> <DEDENT> return False | References http://www.astropython.org/snippet/2009/10/chdir-context-manager | 62598fb48a349b6b436862fe |
class MixedCaseUnderscoreStyle(Style): <NEW_LINE> <INDENT> def pythonAttrToDBColumn(self, attr): <NEW_LINE> <INDENT> return mixedToUnder(attr) <NEW_LINE> <DEDENT> def dbColumnToPythonAttr(self, col): <NEW_LINE> <INDENT> return underToMixed(col) <NEW_LINE> <DEDENT> def pythonClassToDBTable(self, className): <NEW_LINE> <INDENT> return className[0].lower() + mixedToUnder(className[1:]) <NEW_LINE> <DEDENT> def dbTableToPythonClass(self, table): <NEW_LINE> <INDENT> return table[0].upper() + underToMixed(table[1:]) <NEW_LINE> <DEDENT> def pythonClassToDBTableReference(self, className): <NEW_LINE> <INDENT> return self.tableReference(self.pythonClassToDBTable(className)) <NEW_LINE> <DEDENT> def tableReference(self, table): <NEW_LINE> <INDENT> return table + "_id" | This is the default style. Python attributes use mixedCase,
while database columns use underscore_separated. | 62598fb497e22403b383afce |
class PluginRegistry(object): <NEW_LINE> <INDENT> def find_requirement_by_env_var(self, env_var, options): <NEW_LINE> <INDENT> from .requirement import EnvVarRequirement <NEW_LINE> return EnvVarRequirement(registry=self, env_var=env_var, options=options) <NEW_LINE> <DEDENT> def find_requirement_by_service_type(self, service_type, env_var, options): <NEW_LINE> <INDENT> if 'type' not in options or options['type'] != service_type: <NEW_LINE> <INDENT> options = options.copy() <NEW_LINE> options['type'] = service_type <NEW_LINE> <DEDENT> if service_type == 'redis': <NEW_LINE> <INDENT> from .requirements.redis import RedisRequirement <NEW_LINE> return RedisRequirement(registry=self, env_var=env_var, options=options) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def list_service_types(self): <NEW_LINE> <INDENT> return [ServiceType(name='redis', default_variable='REDIS_URL', description='A Redis server')] <NEW_LINE> <DEDENT> def find_provider_by_class_name(self, class_name): <NEW_LINE> <INDENT> if class_name == 'CondaEnvProvider': <NEW_LINE> <INDENT> from .providers.conda_env import CondaEnvProvider <NEW_LINE> return CondaEnvProvider() <NEW_LINE> <DEDENT> elif class_name == 'RedisProvider': <NEW_LINE> <INDENT> from .providers.redis import RedisProvider <NEW_LINE> return RedisProvider() <NEW_LINE> <DEDENT> elif class_name == 'EnvVarProvider': <NEW_LINE> <INDENT> from .provider import EnvVarProvider <NEW_LINE> return EnvVarProvider() <NEW_LINE> <DEDENT> elif class_name == 'DownloadProvider': <NEW_LINE> <INDENT> from .providers.download import DownloadProvider <NEW_LINE> return DownloadProvider() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None | Allows creating Requirement and Provider instances. | 62598fb463b5f9789fe8522e |
class Life(): <NEW_LINE> <INDENT> life = False <NEW_LINE> lifetime = 0 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.life = False <NEW_LINE> self.lifetime = 0 <NEW_LINE> <DEDENT> def ILive(self): <NEW_LINE> <INDENT> return self.life <NEW_LINE> <DEDENT> def Kill(self): <NEW_LINE> <INDENT> self.lifetime = 0 <NEW_LINE> self.life = False <NEW_LINE> <DEDENT> def Rise(self): <NEW_LINE> <INDENT> self.life = True <NEW_LINE> self.lifetime += 1 | Attributes
----------
life : boolean
True is a live
life_time : int
updtime of life
Methods
-------
Ilive()
return life
Kill()
to kill life
Rise()
to rise life | 62598fb4a8370b77170f04a0 |
class Variable(object): <NEW_LINE> <INDENT> def __init__(self, var): <NEW_LINE> <INDENT> self.var = var <NEW_LINE> self.literal = None <NEW_LINE> self.lookups = None <NEW_LINE> self.translate = False <NEW_LINE> self.message_context = None <NEW_LINE> try: <NEW_LINE> <INDENT> self.literal = float(var) <NEW_LINE> if '.' not in var and 'e' not in var.lower(): <NEW_LINE> <INDENT> self.literal = int(self.literal) <NEW_LINE> <DEDENT> if var.endswith('.'): <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if var.startswith('_(') and var.endswith(')'): <NEW_LINE> <INDENT> self.translate = True <NEW_LINE> var = var[2:-1] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.literal = mark_safe(unescape_string_literal(var)) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_': <NEW_LINE> <INDENT> raise TemplateSyntaxError("Variables and attributes may " "not begin with underscores: '%s'" % var) <NEW_LINE> <DEDENT> self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def resolve(self, context): <NEW_LINE> <INDENT> if self.lookups is not None: <NEW_LINE> <INDENT> value = self._resolve_lookup(context) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self.literal <NEW_LINE> <DEDENT> if self.translate: <NEW_LINE> <INDENT> if self.message_context: <NEW_LINE> <INDENT> return pgettext_lazy(self.message_context, value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ugettext_lazy(value) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s: %r>" % (self.__class__.__name__, self.var) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.var <NEW_LINE> <DEDENT> def _resolve_lookup(self, context): <NEW_LINE> <INDENT> current = context <NEW_LINE> try: <NEW_LINE> <INDENT> for bit in self.lookups: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> current = current[bit] <NEW_LINE> <DEDENT> except (TypeError, AttributeError, KeyError, ValueError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if isinstance(current, BaseContext) and getattr(type(current), bit): <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> current = getattr(current, bit) <NEW_LINE> <DEDENT> except (TypeError, AttributeError): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> current = current[int(bit)] <NEW_LINE> <DEDENT> except (IndexError, ValueError, KeyError, TypeError): <NEW_LINE> <INDENT> raise VariableDoesNotExist("Failed lookup for key " "[%s] in %r", (bit, current)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if callable(current): <NEW_LINE> <INDENT> if getattr(current, 'do_not_call_in_templates', False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif getattr(current, 'alters_data', False): <NEW_LINE> <INDENT> current = settings.TEMPLATE_STRING_IF_INVALID <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> current = current() <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> current = settings.TEMPLATE_STRING_IF_INVALID <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if getattr(e, 'silent_variable_failure', False): <NEW_LINE> <INDENT> current = settings.TEMPLATE_STRING_IF_INVALID <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> return current | A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.') | 62598fb4097d151d1a2c10f1 |
class RoundRobinMap(Map): <NEW_LINE> <INDENT> def getPartition(self, seq, p, q): <NEW_LINE> <INDENT> return seq[p:len(seq):q] <NEW_LINE> <DEDENT> def joinPartitions(self, listOfPartitions): <NEW_LINE> <INDENT> testObject = listOfPartitions[0] <NEW_LINE> for m in arrayModules: <NEW_LINE> <INDENT> if isinstance(testObject, m['type']): <NEW_LINE> <INDENT> return self.flatten_array(m['type'], listOfPartitions) <NEW_LINE> <DEDENT> <DEDENT> if isinstance(testObject, (list, tuple)): <NEW_LINE> <INDENT> return self.flatten_list(listOfPartitions) <NEW_LINE> <DEDENT> return listOfPartitions <NEW_LINE> <DEDENT> def flatten_array(self, klass, listOfPartitions): <NEW_LINE> <INDENT> test = listOfPartitions[0] <NEW_LINE> shape = list(test.shape) <NEW_LINE> shape[0] = sum([ p.shape[0] for p in listOfPartitions]) <NEW_LINE> A = klass(shape) <NEW_LINE> N = shape[0] <NEW_LINE> q = len(listOfPartitions) <NEW_LINE> for p,part in enumerate(listOfPartitions): <NEW_LINE> <INDENT> A[p:N:q] = part <NEW_LINE> <DEDENT> return A <NEW_LINE> <DEDENT> def flatten_list(self, listOfPartitions): <NEW_LINE> <INDENT> flat = [] <NEW_LINE> for i in range(len(listOfPartitions[0])): <NEW_LINE> <INDENT> flat.extend([ part[i] for part in listOfPartitions if len(part) > i ]) <NEW_LINE> <DEDENT> return flat | Partitions a sequence in a roun robin fashion.
This currently does not work! | 62598fb44428ac0f6e6585e3 |
class BlockDeviceMapping(dict): <NEW_LINE> <INDENT> def __init__(self, connection=None): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> self.connection = connection <NEW_LINE> self.current_name = None <NEW_LINE> self.current_value = None <NEW_LINE> <DEDENT> def startElement(self, name, attrs, connection): <NEW_LINE> <INDENT> lname = name.lower() <NEW_LINE> if lname in ['ebs', 'virtualname']: <NEW_LINE> <INDENT> self.current_value = BlockDeviceType(self) <NEW_LINE> return self.current_value <NEW_LINE> <DEDENT> <DEDENT> def endElement(self, name, value, connection): <NEW_LINE> <INDENT> lname = name.lower() <NEW_LINE> if lname in ['device', 'devicename']: <NEW_LINE> <INDENT> self.current_name = value <NEW_LINE> <DEDENT> elif lname in ['item', 'member']: <NEW_LINE> <INDENT> self[self.current_name] = self.current_value <NEW_LINE> <DEDENT> <DEDENT> def ec2_build_list_params(self, params, prefix=''): <NEW_LINE> <INDENT> pre = '%sBlockDeviceMapping' % prefix <NEW_LINE> return self._build_list_params(params, prefix=pre) <NEW_LINE> <DEDENT> def autoscale_build_list_params(self, params, prefix=''): <NEW_LINE> <INDENT> pre = '%sBlockDeviceMappings.member' % prefix <NEW_LINE> return self._build_list_params(params, prefix=pre) <NEW_LINE> <DEDENT> def _build_list_params(self, params, prefix=''): <NEW_LINE> <INDENT> i = 1 <NEW_LINE> for dev_name in self: <NEW_LINE> <INDENT> pre = '%s.%d' % (prefix, i) <NEW_LINE> params['%s.DeviceName' % pre] = dev_name <NEW_LINE> block_dev = self[dev_name] <NEW_LINE> if block_dev.ephemeral_name: <NEW_LINE> <INDENT> params['%s.VirtualName' % pre] = block_dev.ephemeral_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if block_dev.no_device: <NEW_LINE> <INDENT> params['%s.NoDevice' % pre] = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if block_dev.snapshot_id: <NEW_LINE> <INDENT> params['%s.Ebs.SnapshotId' % pre] = block_dev.snapshot_id <NEW_LINE> <DEDENT> if block_dev.size: <NEW_LINE> <INDENT> params['%s.Ebs.VolumeSize' % pre] = block_dev.size <NEW_LINE> <DEDENT> if block_dev.delete_on_termination: <NEW_LINE> <INDENT> params['%s.Ebs.DeleteOnTermination' % pre] = 'true' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params['%s.Ebs.DeleteOnTermination' % pre] = 'false' <NEW_LINE> <DEDENT> if block_dev.volume_type: <NEW_LINE> <INDENT> params['%s.Ebs.VolumeType' % pre] = block_dev.volume_type <NEW_LINE> <DEDENT> if block_dev.iops is not None: <NEW_LINE> <INDENT> params['%s.Ebs.Iops' % pre] = block_dev.iops <NEW_LINE> <DEDENT> if block_dev.encrypted is not None: <NEW_LINE> <INDENT> if block_dev.encrypted: <NEW_LINE> <INDENT> params['%s.Ebs.Encrypted' % pre] = 'true' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params['%s.Ebs.Encrypted' % pre] = 'false' <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> i += 1 | Represents a collection of BlockDeviceTypes when creating ec2 instances.
Example:
dev_sda1 = BlockDeviceType()
dev_sda1.size = 100 # change root volume to 100GB instead of default
bdm = BlockDeviceMapping()
bdm['/dev/sda1'] = dev_sda1
reservation = image.run(..., block_device_map=bdm, ...) | 62598fb471ff763f4b5e7837 |
class UserExists(BackendError): <NEW_LINE> <INDENT> pass | Raised when a user already exists. | 62598fb467a9b606de546092 |
class Config(object): <NEW_LINE> <INDENT> DB_URI = 'postgresql+psycopg2://alex:@127.0.0.1/openbookmark' <NEW_LINE> DOMAIN = '' <NEW_LINE> DEBUG = True <NEW_LINE> TIMEOUT = 3600 <NEW_LINE> REDIS_HOST = '127.0.0.1' <NEW_LINE> REDIS_PORT = 6379 | 基本配置 | 62598fb4fff4ab517ebcd8a9 |
class TestOrderStyleForwardingAlgorithm(TradingAlgorithm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.method_name = kwargs.pop('method_name') <NEW_LINE> super(TestOrderStyleForwardingAlgorithm, self) .__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> self.incr = 0 <NEW_LINE> self.last_price = None <NEW_LINE> <DEDENT> def handle_data(self, data): <NEW_LINE> <INDENT> if self.incr == 0: <NEW_LINE> <INDENT> assert len(self.portfolio.positions.keys()) == 0 <NEW_LINE> method_to_check = getattr(self, self.method_name) <NEW_LINE> method_to_check(self.sid(133), data.current(sid(0), "price"), style=StopLimitOrder(10, 10)) <NEW_LINE> assert len(self.blotter.open_orders[self.sid(133)]) == 1 <NEW_LINE> result = self.blotter.open_orders[self.sid(133)][0] <NEW_LINE> assert result.limit == 10 <NEW_LINE> assert result.stop == 10 <NEW_LINE> self.incr += 1 | Test Algorithm for verifying that ExecutionStyles are properly forwarded by
order API helper methods. Pass the name of the method to be tested as a
string parameter to this algorithm's constructor. | 62598fb4d268445f26639be5 |
class People: <NEW_LINE> <INDENT> def __init__(self, user_id): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.email = None <NEW_LINE> self.user_id = user_id <NEW_LINE> <DEDENT> def set_name(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def set_email(self, email): <NEW_LINE> <INDENT> self.email = email | Identity on a issue tracking system.
@param user_id: identifier of the user
@type user_id: C{str} | 62598fb4be383301e02538bf |
class Codelab__Music_Tiny_Orchestra_Glock_Pluck_Mode_3(object): <NEW_LINE> <INDENT> Invalid = 0 <NEW_LINE> Tiny_Orchestra_Glock_Pluck_Mode_3_Off = 2569343836 <NEW_LINE> Tiny_Orchestra_Glock_Pluck_Mode_3_On = 1273367574 | Automatically-generated uint_32 enumeration. | 62598fb4cc0a2c111447b0d7 |
class BaseSetting(object): <NEW_LINE> <INDENT> enable_themes = True <NEW_LINE> use_bootswatch = True | 配置主题 | 62598fb4a79ad1619776a12f |
class TTAVerticalFlip(BaseWheatTTA): <NEW_LINE> <INDENT> def augment(self, image): <NEW_LINE> <INDENT> return image.flip(2) <NEW_LINE> <DEDENT> def batch_augment(self, images): <NEW_LINE> <INDENT> return images.flip(3) <NEW_LINE> <DEDENT> def deaugment_boxes(self, boxes): <NEW_LINE> <INDENT> boxes[:, [0,2]] = self.image_size - boxes[:, [2,0]] <NEW_LINE> return boxes | author: @shonenkov | 62598fb4f548e778e596b668 |
class ActMask(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_channels): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert isinstance(num_channels, int) <NEW_LINE> assert num_channels >= 1 <NEW_LINE> self.num_channels = num_channels <NEW_LINE> self.mask = nn.Parameter(torch.ones(num_channels), requires_grad=False) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> mask = self.mask <NEW_LINE> if len(x.shape) == 4: <NEW_LINE> <INDENT> mask = mask.view([x.shape[1], 1, 1]) <NEW_LINE> <DEDENT> return torch.mul(x, mask) <NEW_LINE> <DEDENT> def extra_repr(self): <NEW_LINE> <INDENT> s = '{num_channels}' <NEW_LINE> return s.format(**self.__dict__) | Apply a 0-1 mask on the activation channels. | 62598fb42c8b7c6e89bd3888 |
class WorksheetPrintView(WorksheetDetailView): <NEW_LINE> <INDENT> template_name = 'worksheet/print.html' <NEW_LINE> def render_to_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> numbering = self.request.GET.get('q', '') <NEW_LINE> response = super(WorksheetPrintView, self).render_to_response( context, **response_kwargs) <NEW_LINE> response.render() <NEW_LINE> pdf_response = HttpResponse(content_type='application/pdf') <NEW_LINE> pdf_response['Content-Disposition'] = 'filename={}. {}.pdf'.format( numbering, context['file_title']) <NEW_LINE> html_object = HTML( string=response.content, base_url='file://', ) <NEW_LINE> html_object.write_pdf(pdf_response) <NEW_LINE> return pdf_response | Based on the WorkSheet Detail View, this is one is used for
downloading PDF module and sample test file. | 62598fb491f36d47f2230f0a |
class Optional(Proxy[T]): <NEW_LINE> <INDENT> def __init__(self, obj): <NEW_LINE> <INDENT> super().__init__(obj) <NEW_LINE> <DEDENT> def __contains__(self, item) -> bool: <NEW_LINE> <INDENT> me = getattr(self, '_Proxy__obj') <NEW_LINE> if me is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return item in me <NEW_LINE> <DEDENT> def __eq__(self, other) -> bool: <NEW_LINE> <INDENT> me = getattr(self, '_Proxy__obj') <NEW_LINE> if me is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return me == other <NEW_LINE> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return EMPTY_OPTIONAL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().__getattr__(item) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return EMPTY_OPTIONAL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().__call__(*args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().__bool__() <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return EMPTY_OPTIONAL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().__getitem__(item) <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value) -> None: <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return super().__setitem__(key, value) <NEW_LINE> <DEDENT> def __delitem__(self, key) -> None: <NEW_LINE> <INDENT> if getattr(self, '_Proxy__obj') is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return super().__delitem__(key) | A wrapper for your classes, that does nothing if the object passed in is None.
It will return an empty Optional in that case.
Usage example:
>>> may_be_none = None
>>> Optional(may_be_none).cancel().result()
So far operations supported:
* calling
* getattr
* getitem/setitem/delitem
* testing for truth
* comparison (with nonexistent elements always comparing false)
* membership test (with nonexistent elements always returning false)
.. warning:: Returned objects via getattr and getitem are NOT wrapped in an
Optional. You need to do it by hand or just file an issue. I'll add that when I
need it.
:param obj: object to wrap | 62598fb45fdd1c0f98e5e053 |
class LSTMCell(tf.contrib.rnn.RNNCell): <NEW_LINE> <INDENT> def __init__(self, num_units, forget_bias=1.0, use_recurrent_dropout=False, dropout_keep_prob=0.9): <NEW_LINE> <INDENT> self.num_units = num_units <NEW_LINE> self.forget_bias = forget_bias <NEW_LINE> self.use_recurrent_dropout = use_recurrent_dropout <NEW_LINE> self.dropout_keep_prob = dropout_keep_prob <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_size(self): <NEW_LINE> <INDENT> return 2 * self.num_units <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_size(self): <NEW_LINE> <INDENT> return self.num_units <NEW_LINE> <DEDENT> def get_output(self, state): <NEW_LINE> <INDENT> unused_c, h = tf.split(state, 2, 1) <NEW_LINE> return h <NEW_LINE> <DEDENT> def __call__(self, x, state, scope=None): <NEW_LINE> <INDENT> with tf.variable_scope(scope or type(self).__name__): <NEW_LINE> <INDENT> c, h = tf.split(state, 2, 1) <NEW_LINE> x_size = x.get_shape().as_list()[1] <NEW_LINE> w_init = None <NEW_LINE> h_init = lstm_ortho_initializer(1.0) <NEW_LINE> w_xh = tf.get_variable( 'W_xh', [x_size, 4 * self.num_units], initializer=w_init) <NEW_LINE> w_hh = tf.get_variable( 'W_hh', [self.num_units, 4 * self.num_units], initializer=h_init) <NEW_LINE> bias = tf.get_variable( 'bias', [4 * self.num_units], initializer=tf.constant_initializer(0.0)) <NEW_LINE> concat = tf.concat([x, h], 1) <NEW_LINE> w_full = tf.concat([w_xh, w_hh], 0) <NEW_LINE> hidden = tf.matmul(concat, w_full) + bias <NEW_LINE> i, j, f, o = tf.split(hidden, 4, 1) <NEW_LINE> if self.use_recurrent_dropout: <NEW_LINE> <INDENT> g = tf.nn.dropout(tf.tanh(j), self.dropout_keep_prob) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> g = tf.tanh(j) <NEW_LINE> <DEDENT> new_c = c * tf.sigmoid(f + self.forget_bias) + tf.sigmoid(i) * g <NEW_LINE> new_h = tf.tanh(new_c) * tf.sigmoid(o) <NEW_LINE> return new_h, tf.concat([new_c, new_h], 1) | Vanilla LSTM cell.
Uses ortho initializer, and also recurrent dropout without memory loss
(https://arxiv.org/abs/1603.05118) | 62598fb4091ae35668704ce4 |
class MethodAttributeMemoizer(object): <NEW_LINE> <INDENT> def __init__(self, attribute_name): <NEW_LINE> <INDENT> self.attribute_name = attribute_name <NEW_LINE> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def wrapped_f(*args, **kwargs): <NEW_LINE> <INDENT> obj = args[0] <NEW_LINE> try: <NEW_LINE> <INDENT> cache = obj.__cache <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> cache = obj.__cache = {} <NEW_LINE> <DEDENT> key = (str(func.__name__), str(getattr(obj, self.attribute_name)), str(args[1:]) + str(kwargs.items())) <NEW_LINE> if key not in cache: <NEW_LINE> <INDENT> cache[key] = func(*args, **kwargs) <NEW_LINE> <DEDENT> return cache[key] <NEW_LINE> <DEDENT> return wrapped_f | Define a decorator which caches results of an instance method.
Results are cached according to the value of a specific instance attribute. | 62598fb460cbc95b0636440a |
class Grammar(object): <NEW_LINE> <INDENT> def __init__(self, tags): <NEW_LINE> <INDENT> self.tags = tags <NEW_LINE> <DEDENT> def validate_server(self, server): <NEW_LINE> <INDENT> for tag in self.tags.keys(): <NEW_LINE> <INDENT> if tag in server.metadata: <NEW_LINE> <INDENT> self.validate(tag, self.metadata[tag]) <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def validate(self, key, value): <NEW_LINE> <INDENT> if key not in self.tags: <NEW_LINE> <INDENT> raise AnnotationSyntaxError( key, value, "%s not a valid key name!" % (key)) <NEW_LINE> <DEDENT> if self.tags[key].validate(value): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> raise AnnotationSyntaxError( key, value, "Unknown syntax error with annotation!") <NEW_LINE> <DEDENT> def list_keys(self): <NEW_LINE> <INDENT> return self.tags.keys() <NEW_LINE> <DEDENT> def explain_values(self, key): <NEW_LINE> <INDENT> if key not in self.tags: <NEW_LINE> <INDENT> raise AnnotationSyntaxError( key, "?", "%s not a valid key name!" % (key)) <NEW_LINE> <DEDENT> elif self.tags[key]: <NEW_LINE> <INDENT> return self.tags[key].description() | Container object for valid instance annotations. | 62598fb47d43ff2487427465 |
class DnsChecker(): <NEW_LINE> <INDENT> def querry(self, resolvers, targets): <NEW_LINE> <INDENT> resolver = dns.resolver.Resolver() <NEW_LINE> test_resolvers = {} <NEW_LINE> dnsresults = dnsresult.DNSResult() <NEW_LINE> for name, value in sorted(resolvers["Servers"].items()): <NEW_LINE> <INDENT> if value["active"] is True: <NEW_LINE> <INDENT> test_resolvers[value["IPv6"]] = name <NEW_LINE> <DEDENT> <DEDENT> for [domain, record] in sorted(targets["Targets"]): <NEW_LINE> <INDENT> for name in sorted(test_resolvers): <NEW_LINE> <INDENT> resolver.nameservers = [name] <NEW_LINE> try: <NEW_LINE> <INDENT> response = resolver.resolve(domain, record) <NEW_LINE> for ip_address in response: <NEW_LINE> <INDENT> dnsresults.add(domain, test_resolvers[name], str(ip_address)) <NEW_LINE> <DEDENT> <DEDENT> except (dns.resolver.NoAnswer, dns.resolver.NXDOMAIN, dns.resolver.NoNameservers, dns.exception.Timeout): <NEW_LINE> <INDENT> dnsresults.add(domain, test_resolvers[name], "None") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dnsresults.get_all() <NEW_LINE> <DEDENT> def display (self, result): <NEW_LINE> <INDENT> table = [ ["Domain", "IP", "Hosts"] ] <NEW_LINE> for domain, value in result.items(): <NEW_LINE> <INDENT> for ip_address, hosts in value.items(): <NEW_LINE> <INDENT> if ip_address != "None": <NEW_LINE> <INDENT> table.append([domain, ip_address, str(hosts)]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> table.append([domain, '\033[0;31m'+ip_address+'\033[0m', str(hosts)]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> ascii_table = AsciiTable(table) <NEW_LINE> print(ascii_table.table) | Query each server defined in self.dns_resolvers for each domain in self.targets | 62598fb4cc0a2c111447b0d8 |
class EightHeatSensor(EightSleepHeatEntity): <NEW_LINE> <INDENT> def __init__(self, name, eight, sensor): <NEW_LINE> <INDENT> super().__init__(eight) <NEW_LINE> self._sensor = sensor <NEW_LINE> self._mapped_name = NAME_MAP.get(self._sensor, self._sensor) <NEW_LINE> self._name = f"{name} {self._mapped_name}" <NEW_LINE> self._state = None <NEW_LINE> self._side = self._sensor.split("_")[0] <NEW_LINE> self._userid = self._eight.fetch_userid(self._side) <NEW_LINE> self._usrobj = self._eight.users[self._userid] <NEW_LINE> _LOGGER.debug( "Heat Sensor: %s, Side: %s, User: %s", self._sensor, self._side, self._userid, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return UNIT_PERCENTAGE <NEW_LINE> <DEDENT> async def async_update(self): <NEW_LINE> <INDENT> _LOGGER.debug("Updating Heat sensor: %s", self._sensor) <NEW_LINE> self._state = self._usrobj.heating_level <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> state_attr = {ATTR_TARGET_HEAT: self._usrobj.target_heating_level} <NEW_LINE> state_attr[ATTR_ACTIVE_HEAT] = self._usrobj.now_heating <NEW_LINE> state_attr[ATTR_DURATION_HEAT] = self._usrobj.heating_remaining <NEW_LINE> return state_attr | Representation of an eight sleep heat-based sensor. | 62598fb47b180e01f3e490b3 |
class UserDialogInfo: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'dialog', (DialogInfoDto, DialogInfoDto.thrift_spec), None, ), (2, TType.STRUCT, 'lastMessage', (InstantMessageDto, InstantMessageDto.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, dialog=None, lastMessage=None,): <NEW_LINE> <INDENT> self.dialog = dialog <NEW_LINE> self.lastMessage = lastMessage <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.dialog = DialogInfoDto() <NEW_LINE> self.dialog.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.lastMessage = InstantMessageDto() <NEW_LINE> self.lastMessage.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('UserDialogInfo') <NEW_LINE> if self.dialog is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('dialog', TType.STRUCT, 1) <NEW_LINE> self.dialog.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.lastMessage is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('lastMessage', TType.STRUCT, 2) <NEW_LINE> self.lastMessage.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- dialog
- lastMessage | 62598fb467a9b606de546095 |
class NonStringError(TypeError): <NEW_LINE> <INDENT> pass | An Error for arguments given that are not strings. | 62598fb45fdd1c0f98e5e054 |
class RegExpInstance(ObjectInstance): <NEW_LINE> <INDENT> es_class = 'RegExp' <NEW_LINE> def __init__(self, interpreter, source=None, is_global=False, is_ignore_case=False, is_multiline=False): <NEW_LINE> <INDENT> super(RegExpInstance, self).__init__(interpreter) <NEW_LINE> self.source = source <NEW_LINE> self.is_global= is_global <NEW_LINE> self.is_ignore_case = is_ignore_case <NEW_LINE> self.is_multiline = is_multiline <NEW_LINE> self.set_property("source", source) <NEW_LINE> self.set_property("global", is_global) <NEW_LINE> self.set_property("ignoreCase", is_ignore_case) <NEW_LINE> self.set_property("multiline", is_multiline) <NEW_LINE> self.set_property("lastIndex", 0, writable=True) <NEW_LINE> <DEDENT> def match(self, string, index): <NEW_LINE> <INDENT> flags = 0 <NEW_LINE> if self.is_ignore_case: <NEW_LINE> <INDENT> flags |= re.IGNORECASE <NEW_LINE> <DEDENT> if self.is_multiline: <NEW_LINE> <INDENT> flags |= re.MULTILINE <NEW_LINE> <DEDENT> translated = RegExpParser.parse_string(self.source) <NEW_LINE> regexp = re.compile(translated, flags) <NEW_LINE> match = regexp.match(string, index) <NEW_LINE> if match is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return (match.end(), match.groups()) | The specialized ``RegExp`` object class.
15.10.7 | 62598fb47047854f4633f4a1 |
class RegistrationForm(UserCreationForm): <NEW_LINE> <INDENT> error_messages = {'password_mismatch': _e[7], 'email_unique': _e[9], 'username_min_length': _e[2]} <NEW_LINE> Meta = UserCreationForm.Meta <NEW_LINE> Meta.error_messages = {'username': {'unique': _e[1], 'required': _e[0], 'max_length': _e[3]}} <NEW_LINE> password1 = forms.CharField(widget=forms.PasswordInput, min_length=settings.PASSWORD_MIN_LENGTH, error_messages={'required': _e[4], 'min_length': _e[6]}) <NEW_LINE> password2 = forms.CharField(widget=forms.PasswordInput, error_messages={'required': _e[5]}) <NEW_LINE> email = forms.EmailField(error_messages={'invalid': _e[10], 'required': _e[8]}) <NEW_LINE> def clean_username(self): <NEW_LINE> <INDENT> code = 'username_min_length' <NEW_LINE> username = self.cleaned_data['username'] <NEW_LINE> if len(username) < settings.USERNAME_MIN_LENGTH: <NEW_LINE> <INDENT> raise forms.ValidationError(self.error_messages[code], code) <NEW_LINE> <DEDENT> return username <NEW_LINE> <DEDENT> def clean_email(self): <NEW_LINE> <INDENT> email = self.cleaned_data['email'] <NEW_LINE> if EmailAddress.objects.filter(address=email, verified=True): <NEW_LINE> <INDENT> raise forms.ValidationError(self.error_messages['email_unique'], 'email_unique') <NEW_LINE> <DEDENT> return email <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> user = super(RegistrationForm, self).save(commit=False) <NEW_LINE> user.is_active = False <NEW_LINE> user.email = self.cleaned_data['email'] <NEW_LINE> user.save() <NEW_LINE> UserProfile.objects.create(user=user) | User registration form. | 62598fb410dbd63aa1c70c7c |
class TestInitialisation(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fh = StringIO() <NEW_LINE> self.workbook = Workbook() <NEW_LINE> self.workbook._set_filehandle(self.fh) <NEW_LINE> <DEDENT> def test_xml_declaration(self): <NEW_LINE> <INDENT> self.workbook._xml_declaration() <NEW_LINE> exp = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n""" <NEW_LINE> got = self.fh.getvalue() <NEW_LINE> self.assertEqual(got, exp) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.workbook.fileclosed = 1 | Test initialisation of the Workbook class and call a method. | 62598fb456b00c62f0fb297e |
class MultiRegionAccessPoint(AWSObject): <NEW_LINE> <INDENT> resource_type = "AWS::S3::MultiRegionAccessPoint" <NEW_LINE> props: PropsDictType = { "Name": (str, False), "PublicAccessBlockConfiguration": (PublicAccessBlockConfiguration, False), "Regions": ([Region], True), } | `MultiRegionAccessPoint <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-s3-multiregionaccesspoint.html>`__ | 62598fb430bbd722464699dc |
class DependencyType(object): <NEW_LINE> <INDENT> (NONE, IMAGE, CONTAINER) = flag_gen(3) | Differentiate between `Image` and `Container` graphs | 62598fb4627d3e7fe0e06f75 |
class LsbootLan(ManagedObject): <NEW_LINE> <INDENT> consts = LsbootLanConsts() <NEW_LINE> naming_props = set([]) <NEW_LINE> mo_meta = MoMeta("LsbootLan", "lsbootLan", "lan", VersionMeta.Version111a, "InputOutput", 0x3f, [], ["admin", "ls-compute", "ls-config", "ls-config-policy", "ls-server", "ls-server-policy", "ls-storage", "ls-storage-policy"], [u'lsbootDef', u'lsbootPolicy'], [u'lsbootLanImagePath'], ["Add", "Get", "Remove", "Set"]) <NEW_LINE> prop_meta = { "access": MoPropertyMeta("access", "access", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["read-only", "read-only-local", "read-only-remote", "read-only-remote-cimc", "read-write", "read-write-drive", "read-write-local", "read-write-remote", "read-write-remote-cimc"], []), "child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version111a, MoPropertyMeta.INTERNAL, None, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []), "dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x2, 0, 256, None, [], []), "order": MoPropertyMeta("order", "order", "ushort", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x4, None, None, None, [], ["1-16"]), "prot": MoPropertyMeta("prot", "prot", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["gpxe", "iSCSI", "pxe"], []), "rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []), "status": MoPropertyMeta("status", "status", "string", VersionMeta.Version111a, MoPropertyMeta.READ_WRITE, 0x20, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []), "type": MoPropertyMeta("type", "type", "string", VersionMeta.Version111a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["efi-shell", "iscsi", "lan", "san", "storage", "virtual-media"], []), } <NEW_LINE> prop_map = { "access": "access", "childAction": "child_action", "dn": "dn", "order": "order", "prot": "prot", "rn": "rn", "status": "status", "type": "type", } <NEW_LINE> def __init__(self, parent_mo_or_dn, **kwargs): <NEW_LINE> <INDENT> self._dirty_mask = 0 <NEW_LINE> self.access = None <NEW_LINE> self.child_action = None <NEW_LINE> self.order = None <NEW_LINE> self.prot = None <NEW_LINE> self.status = None <NEW_LINE> self.type = None <NEW_LINE> ManagedObject.__init__(self, "LsbootLan", parent_mo_or_dn, **kwargs) | This is LsbootLan class. | 62598fb4dc8b845886d5367d |
class ChildAbs(Abstract): <NEW_LINE> <INDENT> pass | Child of abstract class, should not be abstract by default | 62598fb45fc7496912d482df |
class UnknownVerificarloBackend(Exception): <NEW_LINE> <INDENT> pass | Raised when backend is unknown | 62598fb491f36d47f2230f0b |
class Union(_WithSubValidators): <NEW_LINE> <INDENT> def _exec(self, funcs, v, path=None): <NEW_LINE> <INDENT> error = None <NEW_LINE> for func in funcs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if path is None: <NEW_LINE> <INDENT> return func(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return func(path, v) <NEW_LINE> <DEDENT> <DEDENT> except Invalid as e: <NEW_LINE> <INDENT> if error is None or len(e.path) > len(error.path): <NEW_LINE> <INDENT> error = e <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if error: <NEW_LINE> <INDENT> raise error if self.msg is None else AnyInvalid( self.msg, path=path) <NEW_LINE> <DEDENT> raise AnyInvalid(self.msg or 'no valid value found', path=path) | Use the first validated value among those selected by discriminant.
:param msg: Message to deliver to user if validation fails.
:param discriminant(value, validators): Returns the filtered list of validators based on the value.
:param kwargs: All other keyword arguments are passed to the sub-schema constructors.
:returns: Return value of the first validator that passes.
>>> validate = Schema(Union({'type':'a', 'a_val':'1'},{'type':'b', 'b_val':'2'},
... discriminant=lambda val, alt: filter(
... lambda v : v['type'] == val['type'] , alt)))
>>> validate({'type':'a', 'a_val':'1'}) == {'type':'a', 'a_val':'1'}
True
>>> with raises(MultipleInvalid, "not a valid value for dictionary value @ data['b_val']"):
... validate({'type':'b', 'b_val':'5'})
```discriminant({'type':'b', 'a_val':'5'}, [{'type':'a', 'a_val':'1'},{'type':'b', 'b_val':'2'}])``` is invoked
Without the discriminant, the exception would be "extra keys not allowed @ data['b_val']" | 62598fb44c3428357761a381 |
class PrmDetacher(Parametre): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Parametre.__init__(self, "détacher", "untide") <NEW_LINE> self.aide_courte = "détache les amarres" <NEW_LINE> self.aide_longue = "Cette commande permet de détacher les amarres retenant un " "navire. Vous ne devez pas l'entrer sur le navire-même " "mais dans la salle où l'amarre est fixée, sur le quai." <NEW_LINE> <DEDENT> def interpreter(self, personnage, dic_masques): <NEW_LINE> <INDENT> salle = personnage.salle <NEW_LINE> if hasattr(salle, "navire"): <NEW_LINE> <INDENT> personnage << "|err|Vous ne pouvez manipuler les amarres d'ici.|ff|" <NEW_LINE> return <NEW_LINE> <DEDENT> etendue = salle.etendue <NEW_LINE> if etendue is None or salle.coords.z != etendue.altitude or salle.nom_terrain not in TERRAINS_QUAI: <NEW_LINE> <INDENT> personnage << "|err|Vous n'êtes pas sur un quai.|ff|" <NEW_LINE> return <NEW_LINE> <DEDENT> navires = [n for n in importeur.navigation.navires.values() if n.etendue is etendue] <NEW_LINE> d_salle = None <NEW_LINE> for t_navire in navires: <NEW_LINE> <INDENT> for t_salle in t_navire.salles.values(): <NEW_LINE> <INDENT> if t_salle.amarre and t_salle.amarre.attachee is salle: <NEW_LINE> <INDENT> d_salle = t_salle <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if d_salle is None: <NEW_LINE> <INDENT> personnage << "|err|Aucun navire n'est amarré ici.|ff|" <NEW_LINE> return <NEW_LINE> <DEDENT> navire = d_salle.navire <NEW_LINE> if navire.proprietaire and not navire.a_le_droit(personnage, si_present=True): <NEW_LINE> <INDENT> personnage << "|err|Vous ne pouvez désamarrer ce " "navire.|ff|" <NEW_LINE> return <NEW_LINE> <DEDENT> d_salle.amarre.attachee = None <NEW_LINE> d_salle.navire.immobilise = False <NEW_LINE> personnage << "Vous détachez l'amarre retenant {}.".format( d_salle.navire.nom) <NEW_LINE> salle.envoyer("{{}} détache l'amarre retenant {}.".format( d_salle.navire.nom), personnage) | Commande 'amarre detacher'.
| 62598fb4f9cc0f698b1c532f |
class BaseCache(object): <NEW_LINE> <INDENT> def cached(self, extra=None, timeout=None): <NEW_LINE> <INDENT> def decorator(func): <NEW_LINE> <INDENT> def get_cache_key(*args, **kwargs): <NEW_LINE> <INDENT> md5 = cross.md5() <NEW_LINE> md5.update('%s.%s' % (func.__module__, func.__name__)) <NEW_LINE> if extra is not None: <NEW_LINE> <INDENT> if isinstance(extra, (list, tuple)): <NEW_LINE> <INDENT> md5.update(':'.join(map(str, extra))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> md5.update(str(extra)) <NEW_LINE> <DEDENT> <DEDENT> if args: <NEW_LINE> <INDENT> md5.update(repr(args)) <NEW_LINE> <DEDENT> if kwargs: <NEW_LINE> <INDENT> md5.update(repr(sorted(kwargs.items()))) <NEW_LINE> <DEDENT> return 'c:%s' % md5.hexdigest() <NEW_LINE> <DEDENT> @wraps(func) <NEW_LINE> def wrapper(*args, **kwargs): <NEW_LINE> <INDENT> cache_key = get_cache_key(*args, **kwargs) <NEW_LINE> try: <NEW_LINE> <INDENT> result = self.get(cache_key) <NEW_LINE> <DEDENT> except CacheMiss: <NEW_LINE> <INDENT> result = func(*args, **kwargs) <NEW_LINE> self.set(cache_key, result, timeout) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def invalidate(*args, **kwargs): <NEW_LINE> <INDENT> cache_key = get_cache_key(*args, **kwargs) <NEW_LINE> self.delete(cache_key) <NEW_LINE> <DEDENT> wrapper.invalidate = invalidate <NEW_LINE> return wrapper <NEW_LINE> <DEDENT> return decorator | Simple cache with time-based invalidation | 62598fb45166f23b2e2434a2 |
class InitFenetre(Fenetre): <NEW_LINE> <INDENT> def createFenetre(self): <NEW_LINE> <INDENT> self.setWindowTitle('Chargement') <NEW_LINE> self.resize(395, 460) <NEW_LINE> self.okButton = QtGui.QPushButton("Ok", parent=self) <NEW_LINE> self.okButton.setGeometry(QtCore.QRect(10, 250, 361, 41)) <NEW_LINE> self.progressBar = QtGui.QProgressBar(parent=self) <NEW_LINE> self.progressBar.setGeometry(QtCore.QRect(10, 200, 361, 31)) <NEW_LINE> self.progressBar.setValue(0) <NEW_LINE> self.label = QtGui.QLabel(parent=self) <NEW_LINE> self.label.setGeometry(QtCore.QRect(10, 230, 361, 17)) <NEW_LINE> self.label_2 = QtGui.QLabel(parent=self) <NEW_LINE> self.label_2.setGeometry(QtCore.QRect(20, 10, 341, 171)) <NEW_LINE> self.label_2.setPixmap(QtGui.QPixmap("/home/moustache/prog/DIV/logo.jpeg")) <NEW_LINE> self.text=QtGui.QTextEdit(parent=self) <NEW_LINE> self.text.setGeometry(QtCore.QRect(10, 300, 360, 150)) <NEW_LINE> <DEDENT> def createConnection(self): <NEW_LINE> <INDENT> self.okButton.clicked.connect(self.closeAction) <NEW_LINE> <DEDENT> def closeAction(self): <NEW_LINE> <INDENT> self.mere._fenetre=None <NEW_LINE> self.parent.mere=self.mere <NEW_LINE> self.parent._initNext() <NEW_LINE> self.parent.show() <NEW_LINE> self.close() <NEW_LINE> <DEDENT> def initAction(self): <NEW_LINE> <INDENT> self.mere=Mere(fenetre=self) <NEW_LINE> self.mere.initData() <NEW_LINE> self.text.append("Ok !") <NEW_LINE> <DEDENT> def _actualisation(self,val_prog_bar=None,i_tot=None,label=None,val_time=None): <NEW_LINE> <INDENT> if val_prog_bar:self.progressBar.setValue(val_prog_bar) <NEW_LINE> if label:self.label.setText(label) <NEW_LINE> if i_tot:self.text.append("Les {} {} chargé en {}s".format(i_tot,label,val_time)) | fenetre d'initialisation | 62598fb463b5f9789fe85232 |
class ClearEntryMixin(object): <NEW_LINE> <INDENT> __gsignals__ = {'clear': ( gobject.SIGNAL_RUN_LAST|gobject.SIGNAL_ACTION, gobject.TYPE_NONE, ())} <NEW_LINE> def enable_clear_button(self): <NEW_LINE> <INDENT> self.set_icon_from_stock( gtk.ENTRY_ICON_SECONDARY, gtk.STOCK_CLEAR) <NEW_LINE> self.connect("icon-release", self.__clear) <NEW_LINE> <DEDENT> def __clear(self, button, *args): <NEW_LINE> <INDENT> self.delete_text(0, -1) <NEW_LINE> self.emit('clear') | A clear icon mixin supporting newer gtk.Entry or sexy.IconEntry /
a separate clear button as a fallback. | 62598fb4498bea3a75a57be8 |
class EnvironmentSettingsBuilderCompletenessTests(PythonAPICompletenessTestCase, unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def python_class(cls): <NEW_LINE> <INDENT> return EnvironmentSettings.Builder <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def java_class(cls): <NEW_LINE> <INDENT> return "org.apache.flink.table.api.EnvironmentSettings$Builder" | Tests whether the Python :class:`EnvironmentSettings.Builder` is consistent with
Java `org.apache.flink.table.api.EnvironmentSettings$Builder`. | 62598fb4091ae35668704ce6 |
class AddAccessConfigInputSet(InputSet): <NEW_LINE> <INDENT> def set_AccessConfiguration(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AccessConfiguration', value) <NEW_LINE> <DEDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AccessToken', value) <NEW_LINE> <DEDENT> def set_ClientID(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ClientID', value) <NEW_LINE> <DEDENT> def set_ClientSecret(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'ClientSecret', value) <NEW_LINE> <DEDENT> def set_Instance(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Instance', value) <NEW_LINE> <DEDENT> def set_Name(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Name', value) <NEW_LINE> <DEDENT> def set_NatIP(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'NatIP', value) <NEW_LINE> <DEDENT> def set_NetworkInterface(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'NetworkInterface', value) <NEW_LINE> <DEDENT> def set_Project(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Project', value) <NEW_LINE> <DEDENT> def set_RefreshToken(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'RefreshToken', value) <NEW_LINE> <DEDENT> def set_Type(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Type', value) <NEW_LINE> <DEDENT> def set_Zone(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Zone', value) | An InputSet with methods appropriate for specifying the inputs to the AddAccessConfig
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 62598fb456ac1b37e63022b2 |
class Writer: <NEW_LINE> <INDENT> def __init__(self, file_type, file_handler, **kwargs): <NEW_LINE> <INDENT> _mapped_writer_class = { "csv": CsvWriter, "flat": FlatWriter, }[file_type] <NEW_LINE> self.writer = _mapped_writer_class(file_handler, **kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.writer) <NEW_LINE> <DEDENT> def write_row(self, row): <NEW_LINE> <INDENT> self.writer.write_row(row) | writer factory | 62598fb44f88993c371f056f |
class Verb_a(Verb): <NEW_LINE> <INDENT> def __init__(self, name, base=None): <NEW_LINE> <INDENT> Verb.__init__(self, name, base) <NEW_LINE> if base is None: <NEW_LINE> <INDENT> self.base = self.name[:-2] <NEW_LINE> <DEDENT> self.endings = { '1s': 'ám', '2s': 'áš', '3s': 'á', '1p': 'áme', '2p': 'áte', '3p': 'ají', } <NEW_LINE> self.conjugate() <NEW_LINE> <DEDENT> def print_imperative(self): <NEW_LINE> <INDENT> print('Imperative') <NEW_LINE> print('2s:{}'.format(self.base + 'ej')) <NEW_LINE> print('1p:{}'.format(self.base + 'ejme')) <NEW_LINE> print('2p:{}'.format(self.base + 'ejte')) | -A verbs (dĕlat, , , mít) | 62598fb416aa5153ce4005cb |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.