code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class TaskDefinition(resource.BaseResource): <NEW_LINE> <INDENT> def __init__(self, name, container_spec, cluster): <NEW_LINE> <INDENT> super(TaskDefinition, self).__init__() <NEW_LINE> self.name = name <NEW_LINE> self.cpus = container_spec.cpus <NEW_LINE> self.memory = container_spec.memory <NEW_LINE> self.image = container_spec.image <NEW_LINE> self.container_port = container_spec.container_port <NEW_LINE> self.region = cluster.region <NEW_LINE> self.arn = None <NEW_LINE> self.log_group = aws_logs.LogGroup(self.region, 'pkb') <NEW_LINE> <DEDENT> def _CreateDependencies(self): <NEW_LINE> <INDENT> if not self.log_group.Exists(): <NEW_LINE> <INDENT> self.log_group.Create() <NEW_LINE> <DEDENT> <DEDENT> def _Create(self): <NEW_LINE> <INDENT> register_cmd = util.AWS_PREFIX + [ '--region', self.region, 'ecs', 'register-task-definition', '--family', self.name, '--execution-role-arn', 'ecsTaskExecutionRole', '--network-mode', 'awsvpc', '--requires-compatibilities=FARGATE', '--cpu', str(int(1024 * self.cpus)), '--memory', str(self.memory), '--container-definitions', self._GetContainerDefinitions() ] <NEW_LINE> stdout, _, _ = vm_util.IssueCommand(register_cmd) <NEW_LINE> response = json.loads(stdout) <NEW_LINE> self.arn = response['taskDefinition']['taskDefinitionArn'] <NEW_LINE> <DEDENT> def _Delete(self): <NEW_LINE> <INDENT> if self.arn is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> deregister_cmd = util.AWS_PREFIX + [ '--region', self.region, 'ecs', 'deregister-task-definition', '--task-definition', self.arn ] <NEW_LINE> vm_util.IssueCommand(deregister_cmd) <NEW_LINE> <DEDENT> def _GetContainerDefinitions(self): <NEW_LINE> <INDENT> definitions = [{ 'name': self.name, 'image': self.image, 'essential': True, 'portMappings': [ { 'containerPort': self.container_port, 'protocol': 'TCP' } ], 'logConfiguration': { 'logDriver': 'awslogs', 'options': { 'awslogs-group': 'pkb', 'awslogs-region': self.region, 'awslogs-stream-prefix': 'pkb' } } }] <NEW_LINE> return json.dumps(definitions) | Class representing an AWS task definition. | 62598fa201c39578d7f12bed |
class CompAddWithDefault(ExplicitComponent): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> self.add_input('x_a') <NEW_LINE> self.add_input('x_b', val=3.) <NEW_LINE> self.add_input('x_c', val=(3., 3.)) <NEW_LINE> self.add_input('x_d', val=[3., 3.]) <NEW_LINE> self.add_input('x_e', val=3. * np.ones((2, 2))) <NEW_LINE> self.add_output('y_a') <NEW_LINE> self.add_output('y_b', val=6.) <NEW_LINE> self.add_output('y_c', val=(6., 6., 6.)) <NEW_LINE> self.add_output('y_d', val=[6., 6., 6.]) <NEW_LINE> self.add_output('y_e', val=6. * np.ones((3, 2))) | Component for tests for declaring only default value. | 62598fa2baa26c4b54d4f11e |
class EDSR(nn.Module): <NEW_LINE> <INDENT> def __init__(self, nb_channel, upscale_factor=2, base_channel=64, num_residuals=50): <NEW_LINE> <INDENT> super(EDSR, self).__init__() <NEW_LINE> self.input_conv = nn.Conv2d(nb_channel, base_channel, kernel_size=3, stride=1, padding=1) <NEW_LINE> resnet_blocks = [] <NEW_LINE> for _ in range(num_residuals): <NEW_LINE> <INDENT> resnet_blocks.append(ResnetBlock(base_channel, kernel=3, stride=1, padding=1)) <NEW_LINE> <DEDENT> self.residual_layers = nn.Sequential(*resnet_blocks) <NEW_LINE> self.mid_conv = nn.Conv2d(base_channel, base_channel, kernel_size=3, stride=1, padding=1) <NEW_LINE> upscale = [] <NEW_LINE> for _ in range(int(math.log2(upscale_factor))): <NEW_LINE> <INDENT> upscale.append(deconv(base_channel, base_channel, upscale_factor=2)) <NEW_LINE> <DEDENT> self.upscale_layers = nn.Sequential(*upscale) <NEW_LINE> self.output_conv = nn.Conv2d(base_channel, nb_channel, kernel_size=3, stride=1, padding=1) <NEW_LINE> for m in self.modules(): <NEW_LINE> <INDENT> if isinstance(m, nn.Conv2d): <NEW_LINE> <INDENT> nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu') <NEW_LINE> <DEDENT> elif isinstance(m, nn.BatchNorm2d): <NEW_LINE> <INDENT> nn.init.constant_(m.weight, 1) <NEW_LINE> nn.init.constant_(m.bias, 0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.input_conv(x) <NEW_LINE> residual = x <NEW_LINE> x = self.residual_layers(x) <NEW_LINE> x = self.mid_conv(x) <NEW_LINE> x = torch.add(x, residual) <NEW_LINE> x = self.upscale_layers(x) <NEW_LINE> x = self.output_conv(x) <NEW_LINE> return x | https://github.com/icpm/super-resolution/edit/master/EDSR/model.py | 62598fa2656771135c4894f3 |
class BufferTree(gtkextra.Tree): <NEW_LINE> <INDENT> YPAD = 2 <NEW_LINE> XPAD = 2 <NEW_LINE> COLUMNS = [('icon', gtk.gdk.Pixbuf, gtk.CellRendererPixbuf, True, 'pixbuf'), ('name', gobject.TYPE_STRING, gtk.CellRendererText, True, 'text'), ('file', gobject.TYPE_STRING, None, False, None), ('number', gobject.TYPE_INT, None, False, None)] <NEW_LINE> def populate(self, bufferlist): <NEW_LINE> <INDENT> self.clear() <NEW_LINE> for buf in bufferlist: <NEW_LINE> <INDENT> path = '' <NEW_LINE> if len(buf) > 1: <NEW_LINE> <INDENT> path = '%s' % buf[1] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> nr = int(buf[0]) <NEW_LINE> name = os.path.split(path)[-1] <NEW_LINE> mtype = mimetypes.guess_type(path)[0] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> nr = 0 <NEW_LINE> name = '' <NEW_LINE> mtype = None <NEW_LINE> <DEDENT> if mtype: <NEW_LINE> <INDENT> mtype = mtype.replace('/','-') <NEW_LINE> im = self.cb.icons.get_image(mtype).get_pixbuf() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> im = self.cb.icons.get_image('text-plain').get_pixbuf() <NEW_LINE> <DEDENT> self.add_item([im, name, path, nr]) <NEW_LINE> <DEDENT> <DEDENT> def set_active(self, i): <NEW_LINE> <INDENT> for node in self.model: <NEW_LINE> <INDENT> if node[3] == i: <NEW_LINE> <INDENT> self.view.set_cursor(node.path) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> return False | Tree view control for buffer list. | 62598fa25f7d997b871f9317 |
class GitProject(ProjectExtention): <NEW_LINE> <INDENT> def __init__(self, name: str, proj_path: str,): <NEW_LINE> <INDENT> super().__init__(name, proj_path) <NEW_LINE> <DEDENT> def _create(self): <NEW_LINE> <INDENT> files = ["__init__.py"] <NEW_LINE> files = [osp.join(self.proj_path, "source", fn) for fn in files] <NEW_LINE> self.repo = git.Repo.init(self.proj_path) <NEW_LINE> for file_name in files: <NEW_LINE> <INDENT> open(file_name, 'wb').close() <NEW_LINE> <DEDENT> self.repo.index.add(files) <NEW_LINE> self.repo.index.commit("initial commit") <NEW_LINE> <DEDENT> def _load(self): <NEW_LINE> <INDENT> self.repo = git.Repo(self.proj_path) <NEW_LINE> <DEDENT> def _open(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> p = subprocess.Popen(["/usr/share/gitkraken/gitkraken", "-p" , self.proj_path, ">/dev/null", "&"]) <NEW_LINE> <DEDENT> except KeyError as e: <NEW_LINE> <INDENT> print("{} does not have a run command mapped".format(sys.platform)) <NEW_LINE> raise e <NEW_LINE> <DEDENT> raise UnopenableProject <NEW_LINE> <DEDENT> def _check_for_existing(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> git.Repo(self.proj_path) <NEW_LINE> return True <NEW_LINE> <DEDENT> except git.InvalidGitRepositoryError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _on_existing_create_new(self): <NEW_LINE> <INDENT> return False | This extention creates a git project and adds some essential files
Basic tutorial follows:
https://gitpython.readthedocs.io/en/stable/tutorial.html#the-commit-object | 62598fa24f6381625f1993f4 |
class Binding: <NEW_LINE> <INDENT> def __init__(self, name, parserFn=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.invocation = None <NEW_LINE> self.outputs = None <NEW_LINE> self.parserFn = parserFn <NEW_LINE> self.instanceOf = None <NEW_LINE> <DEDENT> def matchArgs(self, bindings): <NEW_LINE> <INDENT> if self.invocation: <NEW_LINE> <INDENT> invocation = copy.copy(self.invocation) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> invocation = {} <NEW_LINE> <DEDENT> if bindings: <NEW_LINE> <INDENT> invocation += bindings <NEW_LINE> <DEDENT> return self.instanceOf.matchArgs(invocation) <NEW_LINE> <DEDENT> def compile(self, symbols=None): <NEW_LINE> <INDENT> if self.instanceOf is None: <NEW_LINE> <INDENT> self.instanceOf = symbols[self.name] <NEW_LINE> <DEDENT> if symbols: <NEW_LINE> <INDENT> syms = copy.copy(symbols) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> syms = {} <NEW_LINE> <DEDENT> cinv = compileParamsList(self.invocation, symbols) <NEW_LINE> syms.update(self.instanceOf.matchArgs(cinv)) <NEW_LINE> ret = self.instanceOf.compile(syms) <NEW_LINE> return ret <NEW_LINE> <DEDENT> def parse(self, tokens, n, symbols=None): <NEW_LINE> <INDENT> if self.parserFn: <NEW_LINE> <INDENT> return self.parserFn(self, tokens, n, symbols) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params = None <NEW_LINE> if tokens[n] == "(": <NEW_LINE> <INDENT> params = [] <NEW_LINE> (n, p) = argsConsumer(tokens, n, symbols) <NEW_LINE> params += p <NEW_LINE> <DEDENT> if n < len(tokens) and tokens[n] == "->": <NEW_LINE> <INDENT> n += 1 <NEW_LINE> (n, p) = paramsConsumer(tokens, n) <NEW_LINE> self.outputs = p <NEW_LINE> <DEDENT> self.invocation = params <NEW_LINE> return (n, copy.copy(self)) | Connects a macro definition with the arguments of a particular invocation
| 62598fa2d53ae8145f9182fb |
class RegistrationDetail(JSONAPIBaseView, generics.RetrieveUpdateAPIView, RegistrationMixin, WaterButlerMixin): <NEW_LINE> <INDENT> permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, ContributorOrPublic, base_permissions.TokenHasScope, ) <NEW_LINE> required_read_scopes = [CoreScopes.NODE_REGISTRATIONS_READ] <NEW_LINE> required_write_scopes = [CoreScopes.NODE_REGISTRATIONS_WRITE] <NEW_LINE> serializer_class = RegistrationDetailSerializer <NEW_LINE> view_category = 'registrations' <NEW_LINE> view_name = 'registration-detail' <NEW_LINE> parser_classes = (JSONAPIMultipleRelationshipsParser, JSONAPIMultipleRelationshipsParserForRegularJSON,) <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> registration = self.get_node() <NEW_LINE> if not registration.is_registration: <NEW_LINE> <INDENT> raise ValidationError('This is not a registration.') <NEW_LINE> <DEDENT> return registration <NEW_LINE> <DEDENT> def get_renderer_context(self): <NEW_LINE> <INDENT> context = super(RegistrationDetail, self).get_renderer_context() <NEW_LINE> show_counts = is_truthy(self.request.query_params.get('related_counts', False)) <NEW_LINE> if show_counts: <NEW_LINE> <INDENT> registration = self.get_object() <NEW_LINE> context['meta'] = { 'templated_by_count': registration.templated_list.count(), } <NEW_LINE> <DEDENT> return context | The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/registrations_read).
| 62598fa260cbc95b063641bd |
class UserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label='Password', widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Tbusuario <NEW_LINE> fields = ('email', 'nome') <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1") <NEW_LINE> password2 = self.cleaned_data.get("password2") <NEW_LINE> if password1 and password2 and password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError(_("Passwords don't match")) <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user | A form for creating new users. Includes all the required
fields, plus a repeated password. | 62598fa25fdd1c0f98e5de07 |
class AttributedDictTest(test_base.RDFValueTestCase): <NEW_LINE> <INDENT> rdfvalue_class = rdfvalue.AttributedDict <NEW_LINE> def GenerateSample(self, number=0): <NEW_LINE> <INDENT> return rdfvalue.AttributedDict({"number": number}) <NEW_LINE> <DEDENT> def testInitialize(self): <NEW_LINE> <INDENT> arnie = {"target": "Sarah Connor", "mission": "Protect"} <NEW_LINE> t800 = {"target": "Sarah Connor", "mission": "Terminate"} <NEW_LINE> terminator = rdfvalue.AttributedDict(arnie) <NEW_LINE> self.assertEquals(terminator.GetItem("target"), "Sarah Connor") <NEW_LINE> self.assertEquals(terminator.GetItem("mission"), "Protect") <NEW_LINE> terminator = rdfvalue.AttributedDict(t800) <NEW_LINE> self.assertEquals(terminator.target, "Sarah Connor") <NEW_LINE> self.assertEquals(terminator.mission, "Terminate") <NEW_LINE> self.assertFalse(terminator.GetItem("happy_face")) <NEW_LINE> self.assertRaises(AttributeError, terminator.happy_face) <NEW_LINE> <DEDENT> def testAttributedDictSettingsAreAttr(self): <NEW_LINE> <INDENT> t800 = {"target": "Sarah Connor", "mission": "Terminate"} <NEW_LINE> terminator = rdfvalue.AttributedDict(t800) <NEW_LINE> self.assertEquals(terminator.target, "Sarah Connor") <NEW_LINE> self.assertEquals(terminator.mission, "Terminate") | Test AttributedDictFile operations. | 62598fa266656f66f7d5a260 |
class BaseLogger(Callback): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(BaseLogger, self).__init__() <NEW_LINE> <DEDENT> def on_epoch_begin(self, epoch, logs=None): <NEW_LINE> <INDENT> self.seen = 0 <NEW_LINE> self.totals = defaultdict(list) <NEW_LINE> <DEDENT> def on_batch_end(self, batch, logs=None): <NEW_LINE> <INDENT> if logs is not None: <NEW_LINE> <INDENT> for k, v in logs.items(): <NEW_LINE> <INDENT> self.totals[k].append(v) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_epoch_end(self, epoch, logs=None): <NEW_LINE> <INDENT> if logs is not None: <NEW_LINE> <INDENT> for k in self.totals: <NEW_LINE> <INDENT> if not k.startswith("size:"): <NEW_LINE> <INDENT> logs[k] = np.mean(self.totals[k], axis=0) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logs[k] = np.sum(self.totals[k]) | Callback that accumulates epoch averages. | 62598fa20c0af96317c561f1 |
class GIC_CFG_PERMIS(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'A_GIC_CFG_PERMIS' <NEW_LINE> id_permis = db.Column(db.Integer, primary_key=True) <NEW_LINE> nom_permis = db.Column(db.String(40)) <NEW_LINE> actiu = db.Column(db.String(1)) <NEW_LINE> grup = db.Column(db.Integer, db.ForeignKey(GIC_CFG_GRUP.id_grup)) <NEW_LINE> grupr = db.relationship('GIC_CFG_GRUP', foreign_keys='GIC_CFG_PERMIS.grup') <NEW_LINE> permisos = db.relationship('GIC_PERMIS', backref='GIC_PERMIS.id_permis', primaryjoin='GIC_CFG_PERMIS.id_permis==GIC_PERMIS.id_permis', lazy='dynamic') <NEW_LINE> def __init__(self, nom_permis, actiu, grup): <NEW_LINE> <INDENT> self.nom_permis = nom_permis <NEW_LINE> self.actiu = actiu <NEW_LINE> self.grup = grup | taula de permisos | 62598fa2e5267d203ee6b77c |
class IsSuperOrProfileOwner(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.user.profile == obj or request.user.is_superuser: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> return bool( request.user.is_authenticated and (request.user.is_superuser or request.user.profile) ) | Custom permission to only allow superusers or owners of an object to see and edit it. | 62598fa276e4537e8c3ef41b |
class DateTimeScaleDraw( QwtScaleDraw ): <NEW_LINE> <INDENT> def __init__( self, *args ): <NEW_LINE> <INDENT> QwtScaleDraw.__init__( self, *args ) <NEW_LINE> <DEDENT> def label(self, value ): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> dt = datetime.fromtimestamp( value ) <NEW_LINE> return QwtText( '%s' % dt.strftime( '%H:%M:%S' ) ) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass | Class used to draw a datetime axis on the plot.
| 62598fa23539df3088ecc123 |
class List(ListWidget): <NEW_LINE> <INDENT> admin = site.get_action('bans') <NEW_LINE> id = 'list' <NEW_LINE> columns = ( ('ban', _("Ban"), 50), ('expires', _("Expires")), ) <NEW_LINE> default_sorting = 'expires' <NEW_LINE> sortables = { 'ban': 1, 'expires': 0, } <NEW_LINE> pagination = 20 <NEW_LINE> search_form = SearchBansForm <NEW_LINE> empty_message = _('No bans are currently set.') <NEW_LINE> empty_search_message = _('No bans have been found.') <NEW_LINE> nothing_checked_message = _('You have to check at least one ban.') <NEW_LINE> actions = ( ('delete', _("Lift selected bans"), _("Are you sure you want to lift selected bans?")), ) <NEW_LINE> def set_filters(self, model, filters): <NEW_LINE> <INDENT> if 'ban' in filters: <NEW_LINE> <INDENT> model = model.filter(ban__contains=filters['ban']) <NEW_LINE> <DEDENT> if 'reason' in filters: <NEW_LINE> <INDENT> model = model.filter(Q(reason_user__contains=filters['reason']) | Q(reason_admin__contains=filters['reason'])) <NEW_LINE> <DEDENT> if 'test' in filters: <NEW_LINE> <INDENT> model = model.filter(test__in=filters['test']) <NEW_LINE> <DEDENT> return model <NEW_LINE> <DEDENT> def get_item_actions(self, item): <NEW_LINE> <INDENT> return ( self.action('pencil', _("Edit Ban"), reverse('admin_bans_edit', item)), self.action('remove', _("Lift Ban"), reverse('admin_bans_delete', item), post=True, prompt=_("Are you sure you want to lift this ban?")), ) <NEW_LINE> <DEDENT> def action_delete(self, items, checked): <NEW_LINE> <INDENT> Ban.objects.filter(id__in=checked).delete() <NEW_LINE> with UpdatingMonitor() as cm: <NEW_LINE> <INDENT> monitor.increase('bans_version') <NEW_LINE> <DEDENT> return Message(_('Selected bans have been lifted successfully.'), messages.SUCCESS), reverse('admin_bans') | List Bans | 62598fa23539df3088ecc124 |
class NEODatabase(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.neo_name = {} <NEW_LINE> self.neo_date = {} <NEW_LINE> <DEDENT> def load_data(self, filename=None): <NEW_LINE> <INDENT> if not (filename or self.filename): <NEW_LINE> <INDENT> raise Exception('Cannot load data, no filename provided') <NEW_LINE> <DEDENT> filename = filename or self.filename <NEW_LINE> neo_data_file = open(filename, 'r') <NEW_LINE> neo_data = csv.DictReader(neo_data_file) <NEW_LINE> for neo_row_data in neo_data: <NEW_LINE> <INDENT> orbit_path = OrbitPath(**neo_row_data) <NEW_LINE> if not self.neo_name.get(neo_row_data['name'], None): <NEW_LINE> <INDENT> self.neo_name[neo_row_data['name']] = NearEarthObject(**neo_row_data) <NEW_LINE> <DEDENT> near_earth_object = self.neo_name.get(neo_row_data['name'], None) <NEW_LINE> near_earth_object.update_orbits(orbit_path) <NEW_LINE> if not self.neo_date.get(neo_row_data['close_approach_date'], None): <NEW_LINE> <INDENT> self.neo_date[neo_row_data['close_approach_date']] = [] <NEW_LINE> <DEDENT> self.neo_date[neo_row_data['close_approach_date']].append(near_earth_object) <NEW_LINE> <DEDENT> return None | Object to hold Near Earth Objects and their orbits.
To support optimized date searching, a dict mapping of all orbit date paths to the Near Earth Objects
recorded on a given day is maintained. Additionally, all unique instances of a Near Earth Object
are contained in a dict mapping the Near Earth Object name to the NearEarthObject instance. | 62598fa26aa9bd52df0d4d39 |
class Kong(_SameNum): <NEW_LINE> <INDENT> size: int = 4 <NEW_LINE> def __str__(self) -> str: <NEW_LINE> <INDENT> return '|'.join(map(str, [ self.tiles[0], Misc.HIDDEN.value, self.tiles[-1]])) | Represents a Kong (four identical tiles, counted as three) | 62598fa291f36d47f2230dd9 |
class TestBaseType(unittest.TestCase): <NEW_LINE> <INDENT> def test_no_data(self): <NEW_LINE> <INDENT> var = BaseType("var") <NEW_LINE> self.assertIsNone(var.data) <NEW_LINE> self.assertEqual(var.dimensions, ()) <NEW_LINE> <DEDENT> def test_data_and_dimensions(self): <NEW_LINE> <INDENT> var = BaseType("var", [42], ('x',)) <NEW_LINE> self.assertEqual(var.data, [42]) <NEW_LINE> self.assertEqual(var.dimensions, ('x',)) <NEW_LINE> <DEDENT> def test_repr(self): <NEW_LINE> <INDENT> var = BaseType("var", 42, foo="bar") <NEW_LINE> self.assertEqual(repr(var), "<BaseType with data 42>") <NEW_LINE> <DEDENT> def test_dtype(self): <NEW_LINE> <INDENT> var = BaseType("var", np.array(1, np.int32)) <NEW_LINE> self.assertEqual(var.dtype, np.int32) <NEW_LINE> <DEDENT> def test_shape(self): <NEW_LINE> <INDENT> var = BaseType("var", np.arange(16).reshape(2, 2, 2, 2)) <NEW_LINE> self.assertEqual(var.shape, (2, 2, 2, 2)) <NEW_LINE> <DEDENT> def test_copy(self): <NEW_LINE> <INDENT> original = BaseType("var", np.array(1)) <NEW_LINE> clone = copy.copy(original) <NEW_LINE> self.assertIsNot(original, clone) <NEW_LINE> self.assertIs(original.data, clone.data) <NEW_LINE> self.assertEqual(original.id, clone.id) <NEW_LINE> self.assertEqual(original.name, clone.name) <NEW_LINE> self.assertEqual(original.dimensions, clone.dimensions) <NEW_LINE> self.assertEqual(original.attributes, clone.attributes) <NEW_LINE> <DEDENT> def test_comparisons(self): <NEW_LINE> <INDENT> var = BaseType("var", np.array(1)) <NEW_LINE> self.assertTrue(var == 1) <NEW_LINE> self.assertTrue(var != 2) <NEW_LINE> self.assertTrue(var >= 0) <NEW_LINE> self.assertTrue(var <= 2) <NEW_LINE> self.assertTrue(var > 0) <NEW_LINE> self.assertTrue(var < 2) <NEW_LINE> <DEDENT> def test_sequence_protocol(self): <NEW_LINE> <INDENT> var = BaseType("var", np.arange(10)) <NEW_LINE> self.assertEqual(var[-5], 5) <NEW_LINE> self.assertEqual(len(var), 10) <NEW_LINE> self.assertEqual(list(var), list(range(10))) <NEW_LINE> <DEDENT> def test_iter_protocol(self): <NEW_LINE> <INDENT> var = BaseType("var", np.arange(10)) <NEW_LINE> self.assertEqual(list(iter(var)), list(range(10))) | Test the base Pydap type. | 62598fa201c39578d7f12bee |
class RangeModule: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._ranges = [] <NEW_LINE> <DEDENT> def addRange(self, left: int, right: int) -> None: <NEW_LINE> <INDENT> lb = bisect_left(self._ranges, left) <NEW_LINE> rb = bisect_right(self._ranges, right) <NEW_LINE> self._ranges[lb: rb] = [left] * (lb & 1 == 0) + [right] * (rb & 1 == 0) <NEW_LINE> <DEDENT> def queryRange(self, left: int, right: int) -> bool: <NEW_LINE> <INDENT> lb = bisect_right(self._ranges, left) <NEW_LINE> rb = bisect_left(self._ranges, right) <NEW_LINE> return lb == rb and lb & 1 <NEW_LINE> <DEDENT> def removeRange(self, left: int, right: int) -> None: <NEW_LINE> <INDENT> lb = bisect_left(self._ranges, left) <NEW_LINE> rb = bisect_right(self._ranges, right) <NEW_LINE> self._ranges[lb: rb] = [left] * (lb & 1 == 1) + [right] * (rb & 1 == 1) | 1. Take the ranges as a sorted list of numbers where the items on the
even indexes stand for the openings of each range while the items on
the odd indexes stand for the closings of each range. For example, if
ranges = [10, 15, 20, 25], this covers the ranges of
[10, 15) and [20, 25).
2. When we want to add a range, let's say [14, 22), we do a binary search
on the left most number in _ranges which is <= 14. In our case it is
lb = bisect_left(ranges, left) = 1. And we also want to find the
right most number in _ranges which is >= 22. In our case it is
rb = bisect_right(ranges, right) = 3.
2.1 If lb is odd, it means the left of the added range falls into an
existing range.
2.1.1 If rb is odd, it means the right of the added range falls
into an existing range. This means the added range has
connected the ranges specified by ranges[lb: rb], so we should
collapse them by setting ranges[lb:rb] = []. In our case the
ranges will become [10, 25] after addinng range [14, 22).
2.1.2 If rb is even, it means the right of the added range does
not fall into any existing range, so we need to create a new
range by replacing ranges[lb: rb] to [right].
2.2 If lb is even, it means the left of the added range does not fall
into any existing range.
2.2.1 If rb is odd, ranges[lb: rb] = [left].
2.2.2 If rb is even, ranges[lb: rb] = [left, right].
3. Similar cases happen as the above when we want to remove a range.
4. When we want to query a target range [left, right), first we should find
the right most boundary which is <= left and the left most boundary
which is >= right.
4.1 If lb == rb, it means there is no extra range between left and
right.
4.2 If lb is odd, it means the left side of the queried range falls
into an existing range.
So when both 4.1 and 4.2 are satisfied, it means the target range has
been covered by the exisiting ranges. | 62598fa2627d3e7fe0e06d1b |
class RandomBallsEnv(PoliceKillAllEnv): <NEW_LINE> <INDENT> def __init__(self, init_thief_num=1, step_add_thief_max=3, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.step_add_thief_max = step_add_thief_max <NEW_LINE> self.init_thief_num = init_thief_num <NEW_LINE> self.team_size[self.adversary_team] = init_thief_num <NEW_LINE> self.rest_thief_num = self.adversary_num - init_thief_num <NEW_LINE> <DEDENT> def _step(self, action): <NEW_LINE> <INDENT> random_num = random.choice(range(1, self.step_add_thief_max)) <NEW_LINE> add_num = min(random_num, self.rest_thief_num) <NEW_LINE> self.rest_thief_num -= add_num <NEW_LINE> for i in range(add_num): <NEW_LINE> <INDENT> self.current_state['thief'].append(self.add_one_thief()) <NEW_LINE> <DEDENT> return super()._step(action) <NEW_LINE> <DEDENT> def _reset(self): <NEW_LINE> <INDENT> self.rest_thief_num = self.adversary_num - self.init_thief_num <NEW_LINE> return super()._reset() <NEW_LINE> <DEDENT> def _cal_done(self, state, kill_num): <NEW_LINE> <INDENT> all_killed = self.rest_thief_num <= 0 and len(state["thief"]) == 0 <NEW_LINE> _pass_step_limit = self.elapsed_steps >= self.spec.max_episode_steps <NEW_LINE> if _pass_step_limit or all_killed: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Focus to add more randomness into env
Feature:
1. Thief are incremently added into map in each step
2. Each add batch has random num of thief
3. Thief walk in a random way | 62598fa232920d7e50bc5ec5 |
class DiscoveredNeighbor(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'heard_count': 'int', 'mac_address': 'str', 'rssi': 'int' } <NEW_LINE> self.attribute_map = { 'heard_count': 'heardCount', 'mac_address': 'macAddress', 'rssi': 'rssi' } <NEW_LINE> self._heard_count = None <NEW_LINE> self._mac_address = None <NEW_LINE> self._rssi = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def heard_count(self): <NEW_LINE> <INDENT> return self._heard_count <NEW_LINE> <DEDENT> @heard_count.setter <NEW_LINE> def heard_count(self, heard_count): <NEW_LINE> <INDENT> self._heard_count = heard_count <NEW_LINE> <DEDENT> @property <NEW_LINE> def mac_address(self): <NEW_LINE> <INDENT> return self._mac_address <NEW_LINE> <DEDENT> @mac_address.setter <NEW_LINE> def mac_address(self, mac_address): <NEW_LINE> <INDENT> self._mac_address = mac_address <NEW_LINE> <DEDENT> @property <NEW_LINE> def rssi(self): <NEW_LINE> <INDENT> return self._rssi <NEW_LINE> <DEDENT> @rssi.setter <NEW_LINE> def rssi(self, rssi): <NEW_LINE> <INDENT> self._rssi = rssi <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fa2097d151d1a2c0e9a |
class TestPlugin: <NEW_LINE> <INDENT> classProvides(ITestPlugin, IPlugin) <NEW_LINE> def test1(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> test1 = staticmethod(test1) | A plugin used solely for testing purposes. | 62598fa2d58c6744b42dc20b |
class MsgSetBinData(MsgpackMsg): <NEW_LINE> <INDENT> object_type = 'set_bindata' <NEW_LINE> rid = fields.SmallUnsignedInteger() <NEW_LINE> id = fields.NodeID() <NEW_LINE> key = fields.String() <NEW_LINE> start = fields.SmallUnsignedInteger(default=0) <NEW_LINE> data = fields.Binary() <NEW_LINE> truncate = fields.Boolean(default=False) | Sets a range of bindata on a given node. Server replies with MsgRequestAck
or MsgRequestError. The bindata is modified starting from a given start
position - it is an error if the position is after the current end of
bindata (but not if it's equal). The bindata is expanded if necessary
to hold the new data. If truncate is set, the bindata is truncated
after the end of the new data. If bindata would become 0-length,
it is deleted. | 62598fa27cff6e4e811b5896 |
class TestProductTranslationTemplateSplitter( TestCaseWithFactory, TestTranslationTemplateSplitterBase): <NEW_LINE> <INDENT> def makePOTemplate(self): <NEW_LINE> <INDENT> return self.factory.makePOTemplate( name='template', side=TranslationSide.UPSTREAM) <NEW_LINE> <DEDENT> def makeSharingTemplate(self, template, other_side=False): <NEW_LINE> <INDENT> if other_side: <NEW_LINE> <INDENT> template2 = self.factory.makePOTemplate( name='template', side=TranslationSide.UBUNTU) <NEW_LINE> self.factory.makePackagingLink( productseries=template.productseries, distroseries=template2.distroseries, sourcepackagename=template2.sourcepackagename) <NEW_LINE> return template2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> product = template.productseries.product <NEW_LINE> other_series = self.factory.makeProductSeries(product=product) <NEW_LINE> return self.factory.makePOTemplate(name='template', productseries=other_series) | Templates in a product get split appropriately. | 62598fa2adb09d7d5dc0a3fa |
class IFixtureAsset(Interface): <NEW_LINE> <INDENT> pass | Marker to register :term:`asset` specs for fixtures directories. | 62598fa201c39578d7f12bef |
class ReorderableListBox(wx.ListBox): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) == 0 and len(kwargs) == 0: <NEW_LINE> <INDENT> wx.ListBox.__init__(self) <NEW_LINE> self.Bind(wx.EVT_WINDOW_CREATE, self.OnCreate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wx.ListBox.__init__(self, *args, **kwargs) <NEW_LINE> wx.CallAfter(self.__PostInit) <NEW_LINE> <DEDENT> <DEDENT> def OnCreate(self,evt): <NEW_LINE> <INDENT> self.Unbind(wx.EVT_WINDOW_CREATE) <NEW_LINE> wx.CallAfter(self.__PostInit) <NEW_LINE> evt.Skip() <NEW_LINE> return True <NEW_LINE> <DEDENT> def __PostInit(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def MoveSelectedUp(self): <NEW_LINE> <INDENT> sel = self.GetSelection() <NEW_LINE> if sel == wx.NOT_FOUND or sel == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = self.GetClientData(sel) <NEW_LINE> label = self.GetString(sel) <NEW_LINE> self.Delete(sel) <NEW_LINE> self.Insert(label, sel - 1) <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.SetClientData(sel - 1, data) <NEW_LINE> <DEDENT> self.SetSelection(sel - 1) <NEW_LINE> <DEDENT> def MoveSelectedDown(self): <NEW_LINE> <INDENT> sel = self.GetSelection() <NEW_LINE> if sel == wx.NOT_FOUND or sel == self.GetCount() - 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> data = self.GetClientData(sel) <NEW_LINE> label = self.GetString(sel) <NEW_LINE> self.Delete(sel) <NEW_LINE> self.Insert(label, sel + 1) <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.SetClientData(sel + 1, data) <NEW_LINE> <DEDENT> self.SetSelection(sel + 1) <NEW_LINE> <DEDENT> def GetClientDatas(self): <NEW_LINE> <INDENT> return [self.GetClientData(i) for i in range(self.GetCount())] <NEW_LINE> <DEDENT> def SetLabelsAndClientDatas(self, labels, datas): <NEW_LINE> <INDENT> with WindowUpdateLocker(self): <NEW_LINE> <INDENT> self.Clear() <NEW_LINE> self.Append(labels) <NEW_LINE> for i, d in enumerate(datas): <NEW_LINE> <INDENT> self.SetClientData(i, d) | Additional functionality: Move selected item one step upward/downward | 62598fa2d6c5a102081e1fb7 |
class TerminateError(Exception): <NEW_LINE> <INDENT> pass | Raised when attempts to terminate the browser fail. | 62598fa263d6d428bbee2622 |
class CustomCreateView(CreateView): <NEW_LINE> <INDENT> def set_initial(self, instance): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> self.set_initial(form.instance) <NEW_LINE> return super().form_valid(form) | オブジェクトの生成時に初期値をシステム側で管理する生成ビュー
(フォームとしては扱わない値の初期値を制御) | 62598fa230dc7b766599f6be |
class Book(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'tbl_books' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(64), unique=True) <NEW_LINE> author_id = db.Column(db.Integer, db.ForeignKey("tbl_authors.id")) | 书籍 | 62598fa2d53ae8145f9182fd |
class RequestedRangeNotSatisfiable(HTTPError): <NEW_LINE> <INDENT> status = "416", "Requested Range Not Satisfiable" | Allow customized messages on 415 errors | 62598fa210dbd63aa1c70a20 |
@dataclass <NEW_LINE> class Tags(Generics): <NEW_LINE> <INDENT> content: List[str] <NEW_LINE> def __init__(self, pack: DataPack): <NEW_LINE> <INDENT> super().__init__(pack) <NEW_LINE> self.content: List[str] = [] | A Generics class Tags, used to refer to tags part of the report
Attributes:
content (List[str]) | 62598fa276e4537e8c3ef41d |
class DdnsCollection(XmlObject): <NEW_LINE> <INDENT> OPERATE_ADD = 1 <NEW_LINE> OPERATE_DELETE = 2 <NEW_LINE> OPERATE_EDIT = 3 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(DdnsCollection, self).__init__() <NEW_LINE> self.ddnss = [] <NEW_LINE> self.operate = self.OPERATE_ADD <NEW_LINE> <DEDENT> def addNoIpDdns(self, config): <NEW_LINE> <INDENT> config[Ddns.P_PROVIDER] = Ddns.PROVIDERS[1] <NEW_LINE> return self.addDdns(config) <NEW_LINE> <DEDENT> def addDynDnsDdns(self, config): <NEW_LINE> <INDENT> config[Ddns.P_PROVIDER] = Ddns.PROVIDERS[0] <NEW_LINE> return self.addDdns(config) <NEW_LINE> <DEDENT> def addOrayDdns(self, config): <NEW_LINE> <INDENT> config[Ddns.P_PROVIDER] = Ddns.PROVIDERS[2] <NEW_LINE> return self.addDdns(config) <NEW_LINE> <DEDENT> def addDdns(self, config): <NEW_LINE> <INDENT> rec = Ddns(config) <NEW_LINE> rec.index = len(self.ddnss) <NEW_LINE> self.ddnss.append(rec) <NEW_LINE> return rec <NEW_LINE> <DEDENT> def setToAdd(self): <NEW_LINE> <INDENT> self.operate = self.OPERATE_ADD <NEW_LINE> <DEDENT> def setToDelete(self): <NEW_LINE> <INDENT> self.operate = self.OPERATE_DELETE <NEW_LINE> <DEDENT> def setToEdit(self): <NEW_LINE> <INDENT> self.operate = self.OPERATE_EDIT | Provides support for dynamic DNS providers: NoIp, DynDns, Oray | 62598fa2c432627299fa2e4b |
class Users(_base.Base): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String(512), nullable=False, info={'verbose_name': 'Имя'}) <NEW_LINE> last_name = Column(String(512), nullable=False, info={'verbose_name': 'Фамилия'}) <NEW_LINE> fathers_name = Column(String(512), nullable=False, info={'verbose_name': 'Отчество'}) <NEW_LINE> birthday = Column(String(512), nullable=False, info={'verbose_name': 'Дата рождения'}) <NEW_LINE> email = Column(String(512), nullable=False, info={'verbose_name': 'Эл. почта'}) <NEW_LINE> phone = Column(String(512), nullable=True, info={'verbose_name': 'Телефон'}) <NEW_LINE> type_account = Column(USER_TYPES, default='Физическое лицо', info={'verbose_name': 'Тип пользователя'}) <NEW_LINE> wallet_id = Column(Integer, ForeignKey('wallet.id'), nullable=True, info={'verbose_name': 'Идентификатор кошелька '}) <NEW_LINE> wallet = relationship("Wallet", backref="users") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f'{self.id} - {self.name} - {self.fathers_name}' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'{self.id} - {self.name} - {self.fathers_name}' | Таблица пользователь | 62598fa2e5267d203ee6b77e |
class ComputeManagementClient: <NEW_LINE> <INDENT> def __init__( self, credential: "TokenCredential", subscription_id: str, base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: <NEW_LINE> <INDENT> self._config = ComputeManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) <NEW_LINE> self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self._serialize.client_side_validation = False <NEW_LINE> self.disks = DisksOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.snapshots = SnapshotsOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> self.disk_encryption_sets = DiskEncryptionSetsOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> <DEDENT> def _send_request( self, request, **kwargs: Any ) -> HttpResponse: <NEW_LINE> <INDENT> request_copy = deepcopy(request) <NEW_LINE> request_copy.url = self._client.format_url(request_copy.url) <NEW_LINE> return self._client.send_request(request_copy, **kwargs) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._client.close() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._client.__enter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, *exc_details): <NEW_LINE> <INDENT> self._client.__exit__(*exc_details) | Compute Client.
:ivar disks: DisksOperations operations
:vartype disks: azure.mgmt.compute.v2019_11_01.operations.DisksOperations
:ivar snapshots: SnapshotsOperations operations
:vartype snapshots: azure.mgmt.compute.v2019_11_01.operations.SnapshotsOperations
:ivar disk_encryption_sets: DiskEncryptionSetsOperations operations
:vartype disk_encryption_sets:
azure.mgmt.compute.v2019_11_01.operations.DiskEncryptionSetsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: Subscription credentials which uniquely identify Microsoft Azure
subscription. The subscription ID forms part of the URI for every service call.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present. | 62598fa2d268445f26639abb |
class PageAlert(object): <NEW_LINE> <INDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> rds = Redis.get_conn() <NEW_LINE> alert = rds.get('Single:{}'.format(item)) <NEW_LINE> return alert | 处理页面警告
从redis中获取页面警告 | 62598fa2462c4b4f79dbb87d |
class AllPostRssFeed(Feed): <NEW_LINE> <INDENT> title = 'VanBlog博客' <NEW_LINE> link = '/' <NEW_LINE> description = 'VanBlog博客上的文章' <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return Post.objects.all().filter(is_pub=True).filter(category__is_pub=True).order_by('-create_time') <NEW_LINE> <DEDENT> def item_title(self, item): <NEW_LINE> <INDENT> return '[%s] %s' % (item.category, item.title) <NEW_LINE> <DEDENT> def item_description(self, item): <NEW_LINE> <INDENT> return item.body | RSS订阅 | 62598fa245492302aabfc342 |
class Flow(Cut): <NEW_LINE> <INDENT> def __init__(self, graph, value, flow, cut, partition): <NEW_LINE> <INDENT> super(Flow, self).__init__(graph, value, cut, partition) <NEW_LINE> self._flow = flow <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(%r, %r, %r, %r, %r)" % (self.__class__.__name__, self._graph, self._value, self._flow, self._cut, self._partition) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Graph flow (%d edges, %d vs %d vertices, value=%.4f)" % (len(self._cut), len(self._partition), self._graph.vcount() - len(self._partition), self._value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def flow(self): <NEW_LINE> <INDENT> return self._flow | A flow of a given graph.
This is a simple class used to represent flows returned by
L{Graph.maxflow}. It has the following attributes:
- C{graph} - the graph on which this flow is defined
- C{value} - the value (capacity) of the flow
- C{flow} - the flow values on each edge. For directed graphs,
this is simply a list where element M{i} corresponds to the
flow on edge M{i}. For undirected graphs, the direction of
the flow is not constrained (since the edges are undirected),
hence positive flow always means a flow from the smaller vertex
ID to the larger, while negative flow means a flow from the
larger vertex ID to the smaller.
- C{cut} - edge IDs in the minimal cut corresponding to
the flow.
- C{partition} - vertex IDs in the parts created
after removing edges in the cut
- C{es} - an edge selector restricted to the edges
in the cut.
This class is usually not instantiated directly, everything
is taken care of by L{Graph.maxflow}.
Examples:
>>> from igraph import Graph
>>> g = Graph.Ring(20)
>>> mf = g.maxflow(0, 10)
>>> print(mf.value)
2.0
>>> mf.es["color"] = "red" | 62598fa23539df3088ecc126 |
class MLPContinuousPolicy(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_dim, action_dim, num_hidden=20): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.base = nn.Sequential( nn.Linear(state_dim, num_hidden), nn.Tanh(), nn.Linear(num_hidden, num_hidden), nn.Tanh(), ) <NEW_LINE> self.mean_head = nn.Linear(num_hidden, action_dim) <NEW_LINE> self.logvars = nn.Parameter(0 * torch.ones(1, action_dim)) <NEW_LINE> self.apply(self.weight_init) <NEW_LINE> <DEDENT> def forward(self, states): <NEW_LINE> <INDENT> tmp = self.base(states) <NEW_LINE> means = self.mean_head(tmp) <NEW_LINE> logvars = self.logvars.expand_as(means) <NEW_LINE> std = torch.exp(0.5 * logvars) <NEW_LINE> return Normal(means, std) <NEW_LINE> <DEDENT> def weight_init(self, m): <NEW_LINE> <INDENT> classname = m.__class__.__name__ <NEW_LINE> if classname.find('Linear') != -1: <NEW_LINE> <INDENT> torch.nn.init.xavier_normal_(m.weight, gain=1) <NEW_LINE> torch.nn.init.constant_(m.bias, 0) | For classic control | 62598fa24e4d562566372295 |
@injected <NEW_LINE> @setup(IUserService, name='userService') <NEW_LINE> class UserServiceAlchemy(EntityServiceAlchemy, IUserService): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> EntityServiceAlchemy.__init__(self, UserMapped, QUser) | Implementation for @see: IUserService | 62598fa2435de62698e9bc65 |
class RPSGame: <NEW_LINE> <INDENT> def __init__(self, engineClass, playerClasses, engineArgs = None, playerArgs = None): <NEW_LINE> <INDENT> if(engineArgs == None): <NEW_LINE> <INDENT> engineArgs = [] <NEW_LINE> <DEDENT> if(playerArgs == None): <NEW_LINE> <INDENT> playerArgs = [[]]*len(playerClasses) <NEW_LINE> <DEDENT> if(not issubclass(engineClass, RPSEngine)): <NEW_LINE> <INDENT> raise RPSGameError(engineClass, RPSEngine) <NEW_LINE> <DEDENT> for pC in playerClasses: <NEW_LINE> <INDENT> if(not issubclass(pC, RPSPlayer)): <NEW_LINE> <INDENT> raise RPSGameError(pC, RPSPlayer) <NEW_LINE> <DEDENT> <DEDENT> if(len(playerClasses) != len(playerArgs)): <NEW_LINE> <INDENT> raise RPSGameError("Mismatch length in playerClasses and playerArgs.") <NEW_LINE> <DEDENT> self.engine = engineClass(*engineArgs) <NEW_LINE> self.players = [pC(self.engine, *pArgs) for pC, pArgs in zip(playerClasses, playerArgs)] <NEW_LINE> <DEDENT> def loop(self): <NEW_LINE> <INDENT> while(True): <NEW_LINE> <INDENT> for p in self.players: <NEW_LINE> <INDENT> self.loopPrompt(p) <NEW_LINE> try: <NEW_LINE> <INDENT> p.triggerPlay() <NEW_LINE> <DEDENT> except ExitGame as e: <NEW_LINE> <INDENT> return e <NEW_LINE> <DEDENT> except PlayError as pE: <NEW_LINE> <INDENT> self.handlePlayError(p, pE.getThrow()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def loopPrompt(self, player): <NEW_LINE> <INDENT> print("Current player: " + str(player.id + 1)) <NEW_LINE> print("Current score : " + str(player.wins)) <NEW_LINE> <DEDENT> def handlePlayError(self, player, newThrow): <NEW_LINE> <INDENT> self.engine.clearThrow(player) <NEW_LINE> self.engine.play(player, newThrow) | Base (factory) class for making an RPSGame.
engineClass must be inherited from RPSEngine.
playerClasses must be a list of classes inherited from RPSPlayer.
engineArgs, if not None, is the list of arguements for the engine
invocation
playerArgs is a list of lists of arguements for the players.
(e.g., playerArgs[0][4] will be the fifth arguement for
the first class in playerClass) | 62598fa2627d3e7fe0e06d1d |
class Transaction: <NEW_LINE> <INDENT> def __init__(self, payer, amount, receiver): <NEW_LINE> <INDENT> self.payer = payer <NEW_LINE> self.amount = amount <NEW_LINE> self.receiver = receiver <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{} pays {} eur to {}".format(self.payer, round(self.amount,2), self.receiver) | Class representing a transaction (un remboursement) between two people
| 62598fa21f5feb6acb162a94 |
class MockIOStream(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.colored = True <NEW_LINE> for k in ('error', 'success', 'write'): <NEW_LINE> <INDENT> setattr(self, 'test__{0}_data'.format(k), None) <NEW_LINE> def _wrapper(k): <NEW_LINE> <INDENT> def _mockfunc(self, s, newline=True): <NEW_LINE> <INDENT> attrib = 'test__{0}_data'.format(k) <NEW_LINE> value = getattr(self, attrib, None) or '' <NEW_LINE> if newline: <NEW_LINE> <INDENT> s += os.linesep <NEW_LINE> <DEDENT> setattr(self, attrib, value + s) <NEW_LINE> <DEDENT> return _mockfunc <NEW_LINE> <DEDENT> setattr(self, k, types.MethodType(_wrapper(k), self)) <NEW_LINE> self.read = types.MethodType(lambda: '', self) <NEW_LINE> <DEDENT> <DEDENT> def set_colored(self, colored): <NEW_LINE> <INDENT> self.colored = colored | Mock object for `IOStream` class.
| 62598fa2cb5e8a47e493c0af |
class SUB(Instruction): <NEW_LINE> <INDENT> operand_count = 3 <NEW_LINE> def do(self): <NEW_LINE> <INDENT> self.set_operand(0, self.get_operand(1) - self.get_operand(2)) | Substract (unsigned): <op0> = <op1> - <op2> | 62598fa2379a373c97d98e89 |
class Producer(celery_app.Task): <NEW_LINE> <INDENT> name = 'producer' <NEW_LINE> def run(self, file, consumer='consumer', queue='test', header_rows=0, column_map={'name': 0, 'email': 1}, sep=','): <NEW_LINE> <INDENT> logger.info("Processing file: {}".format(file)) <NEW_LINE> data = open(file, 'r') <NEW_LINE> reader = csv.reader(data, delimiter=sep) <NEW_LINE> self.group_id = uuid() <NEW_LINE> self.header_rows = header_rows <NEW_LINE> self.column_map = column_map <NEW_LINE> self.consumer = consumer <NEW_LINE> self.queue = queue <NEW_LINE> reader = self._skip_headers(reader) <NEW_LINE> workflow = group(self._generate_rows(reader)) <NEW_LINE> return workflow.apply_async(task_id=self.group_id) <NEW_LINE> <DEDENT> def on_failure(self, exc, task_id, args, kwargs, einfo): <NEW_LINE> <INDENT> logger.error( "task: {} with group-id: {} failed. error trace: {}".format(self.group_id, task_id, exc)) <NEW_LINE> <DEDENT> def on_success(self, retval, task_id, args, kwargs): <NEW_LINE> <INDENT> logger.info( "task: {} with group-id: {} is successful".format(self.group_id, task_id)) <NEW_LINE> <DEDENT> def _generate_rows(self, reader): <NEW_LINE> <INDENT> base_obj = { "parent_task_id": self.group_id, "timestamp": time.time() } <NEW_LINE> for index, row in enumerate(reader): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = { **self._parse_row(row), **base_obj } <NEW_LINE> yield signature(self.consumer, kwargs=obj, queue=self.queue) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> logger.error("Malformed row at index: {}".format(index)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _skip_headers(self, reader): <NEW_LINE> <INDENT> [next(reader) for _ in range(self.header_rows)] <NEW_LINE> return reader <NEW_LINE> <DEDENT> def _parse_row(self, row): <NEW_LINE> <INDENT> return {k: row[val] for k, val in self.column_map.items()} | Producer class is responsible for reading input file and
creating a `group` of tasks. Each group element represent
a row plus some other identification data like timestamps
and group_id. Each group element is sent to attached broker
in a specified queue. | 62598fa256ac1b37e630205d |
class Result(Base): <NEW_LINE> <INDENT> assignment = db.relationship("Assignment", back_populates="result", uselist=False) <NEW_LINE> type = db.Column(db.String(50)) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> __mapper_args__ = { "polymorphic_identity": "result", "polymorphic_on": type, } | A Result is the outcome of a Participant completing an Activity.
Different Activities have different data that they generate, so this model
does not actually contain any information on the outcome of an Activity.
That is something that child classes of this class must define in their
schemas.
On the Assignment level, the type of Activity will determine the type of
Result.
Attributes:
assignment (Assignment): The Assignment that owns this Result. | 62598fa2b7558d58954634a0 |
class BatchServiceClientConfiguration(AzureConfiguration): <NEW_LINE> <INDENT> def __init__( self, credentials, batch_url): <NEW_LINE> <INDENT> if credentials is None: <NEW_LINE> <INDENT> raise ValueError("Parameter 'credentials' must not be None.") <NEW_LINE> <DEDENT> if batch_url is None: <NEW_LINE> <INDENT> raise ValueError("Parameter 'batch_url' must not be None.") <NEW_LINE> <DEDENT> base_url = '{batchUrl}' <NEW_LINE> super(BatchServiceClientConfiguration, self).__init__(base_url) <NEW_LINE> self.keep_alive = True <NEW_LINE> self.add_user_agent('azure-batch/{}'.format(VERSION)) <NEW_LINE> self.add_user_agent('Azure-SDK-For-Python') <NEW_LINE> self.credentials = credentials <NEW_LINE> self.batch_url = batch_url | Configuration for BatchServiceClient
Note that all parameters used to create this instance are saved as instance
attributes.
:param credentials: Credentials needed for the client to connect to Azure.
:type credentials: :mod:`A msrestazure Credentials
object<msrestazure.azure_active_directory>`
:param batch_url: The base URL for all Azure Batch service requests.
:type batch_url: str | 62598fa28c0ade5d55dc35c8 |
class NoSuchColumnError(DatabaseException): <NEW_LINE> <INDENT> pass | Raised when a non-existing column is requested. | 62598fa2d7e4931a7ef3bf0c |
class MovementController: <NEW_LINE> <INDENT> def __init__(self, motor000: MotorDriver, motor120: MotorDriver, motor240: MotorDriver ): <NEW_LINE> <INDENT> self._motor000 = motor000 <NEW_LINE> self._motor120 = motor120 <NEW_LINE> self._motor240 = motor240 <NEW_LINE> self._speed = 0. <NEW_LINE> self._direction = 0. <NEW_LINE> self._rotation = 0. <NEW_LINE> <DEDENT> def _update_motors(self): <NEW_LINE> <INDENT> self._motor000.speed = self.speed * math.sin(self.direction - math.pi / 3 * 0) + self.rotation <NEW_LINE> self._motor120.speed = self.speed * math.sin(self.direction - math.pi / 3 * 2) + self.rotation <NEW_LINE> self._motor240.speed = self.speed * math.sin(self.direction - math.pi / 3 * 4) + self.rotation <NEW_LINE> <DEDENT> @property <NEW_LINE> def speed(self) -> float: <NEW_LINE> <INDENT> return self._speed <NEW_LINE> <DEDENT> @speed.setter <NEW_LINE> def speed(self, speed: float): <NEW_LINE> <INDENT> self._speed = speed <NEW_LINE> self._update_motors() <NEW_LINE> <DEDENT> @property <NEW_LINE> def direction(self) -> float: <NEW_LINE> <INDENT> return self._direction <NEW_LINE> <DEDENT> @direction.setter <NEW_LINE> def direction(self, direction: float): <NEW_LINE> <INDENT> self._direction = direction <NEW_LINE> self._update_motors() <NEW_LINE> <DEDENT> @property <NEW_LINE> def rotation(self) -> float: <NEW_LINE> <INDENT> return self._rotation <NEW_LINE> <DEDENT> @rotation.setter <NEW_LINE> def rotation(self, rotation: float): <NEW_LINE> <INDENT> self._rotation = rotation <NEW_LINE> self._update_motors() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.speed = 0 <NEW_LINE> self.rotation = 0 <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self._motor000.cleanup() <NEW_LINE> self._motor120.cleanup() <NEW_LINE> self._motor240.cleanup() | Class responsible for controlling the movement of the robot by properly adjusting the speed
of each one of the three motors.
The controller assumes that the motors ale set up in a way, that each wheel creates one vertex
of an equilateral triangle. The naming convention for each motor is "motorA", where "A"
denotes the angle of rotation relatively to some arbitrary value. | 62598fa28da39b475be03052 |
class Like(TimeStampedModel): <NEW_LINE> <INDENT> creator = models.ForeignKey(user_models.User, on_delete=models.CASCADE, null=True) <NEW_LINE> image = models.ForeignKey(Image, on_delete=models.CASCADE, null=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "User: {} - Image Caption: {}".format(self.creator.username, self.image.caption) | Like model | 62598fa24f88993c371f0443 |
class TestPortletsStats(TestCase): <NEW_LINE> <INDENT> def afterSetUp(self): <NEW_LINE> <INDENT> self.view = queryMultiAdapter((self.portal, self.portal.REQUEST), name='portlets_stats') <NEW_LINE> <DEDENT> def test_getPropsList(self): <NEW_LINE> <INDENT> self.loginAsPortalOwner() <NEW_LINE> portlet = getUtility(IPortletType, name='portlets.Calendar') <NEW_LINE> mapping = self.portal.restrictedTraverse('++contextportlets++plone.leftcolumn') <NEW_LINE> mapping.restrictedTraverse('+/' + portlet.addview)() <NEW_LINE> plone_portlets_info = filter(lambda info:info['path'] == '/plone', self.view.getPropsList()) <NEW_LINE> lslots = plone_portlets_info[0]['left_slots'] <NEW_LINE> self.assert_(filter(lambda info: info['title'] == 'Calendar', lslots)) | Tests all properties_stats view methods. | 62598fa2460517430c431f94 |
class TestModule(TransactionCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.SaleOrder = self.env["sale.order"] <NEW_LINE> self.ResPartner = self.env["res.partner"] <NEW_LINE> self.user_worker = self.env.ref("fiscal_company_base.user_worker") <NEW_LINE> self.child_company = self.env.ref("fiscal_company_base.company_fiscal_child_1") <NEW_LINE> self.mother_company = self.env.ref("fiscal_company_base.company_fiscal_mother") <NEW_LINE> <DEDENT> def test_01_block_sale_order_creation(self): <NEW_LINE> <INDENT> self._create_sale_order(self.child_company) <NEW_LINE> with self.assertRaises(ValidationError): <NEW_LINE> <INDENT> self._create_sale_order(self.mother_company) <NEW_LINE> <DEDENT> <DEDENT> def _create_sale_order(self, company): <NEW_LINE> <INDENT> partner = self.ResPartner.create( {"name": "Test partner", "company_id": company.id} ) <NEW_LINE> order_vals = { "name": "Sale Order Test", "company_id": company.id, "partner_id": partner.id, } <NEW_LINE> self.user_worker.company_id = company.id <NEW_LINE> self.SaleOrder.sudo(self.user_worker).create(order_vals) | Tests for 'CAE - Sale' Module | 62598fa2925a0f43d25e7eb0 |
class TenCrop(BaseTransformation): <NEW_LINE> <INDENT> def __init__(self, size, vertical_flip=False): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> if isinstance(size, numbers.Number): <NEW_LINE> <INDENT> self.size = (int(size), int(size)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(size) == 2, "Please provide only two dimensions (h, w) for size." <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> self.vertical_flip = vertical_flip <NEW_LINE> <DEDENT> def __call__(self, img): <NEW_LINE> <INDENT> return F.ten_crop(img, self.size, self.vertical_flip) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__class__.__name__ + '(size={0}, vertical_flip={1})'.format(self.size, self.vertical_flip) | Crop the given PIL Image into four corners and the central crop plus the flipped version of
these (horizontal flipping is used by default)
.. Note::
This transform returns a tuple of images and there may be a mismatch in the number of
inputs and targets your Dataset returns. See below for an example of how to deal with
this.
Args:
size (sequence or int): Desired output size of the crop. If size is an
int instead of sequence like (h, w), a square crop (size, size) is
made.
vertical_flip(bool): Use vertical flipping instead of horizontal
Example:
>>> transform = Compose([
>>> TenCrop(size), # this is a list of PIL Images
>>> Lambda(lambda crops: torch.stack([ToTensor()(crop) for crop in crops])) # returns a 4D tensor
>>> ])
>>> #In your test loop you can do the following:
>>> input, target = batch # input is a 5d tensor, target is 2d
>>> bs, ncrops, c, h, w = input.size()
>>> result = model(input.view(-1, c, h, w)) # fuse batch size and ncrops
>>> result_avg = result.view(bs, ncrops, -1).mean(1) # avg over crops | 62598fa256b00c62f0fb2724 |
class GeneNotFound(Exception): <NEW_LINE> <INDENT> pass | My own exception, for a gene that wasn't found. | 62598fa26e29344779b004cf |
class RTHNLayer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_depth, total_key_depth, total_value_depth, num_heads, output_depth, program_class, max_doc_len, bias_mask=None, attention_dropout=0.0, layer_dropout=0.0): <NEW_LINE> <INDENT> super(RTHNLayer, self).__init__() <NEW_LINE> self.program_class = program_class <NEW_LINE> self.max_doc_len = max_doc_len <NEW_LINE> self.output_depth = output_depth <NEW_LINE> self.multi_head_attention = MultiHeadAttention(input_depth, total_key_depth, total_value_depth, output_depth, num_heads, bias_mask, attention_dropout) <NEW_LINE> self.class_lt = nn.Linear(output_depth, program_class) <NEW_LINE> self.pred_lt = nn.Linear(max_doc_len, max_doc_len, bias=False) <NEW_LINE> self.layer_dropout = nn.Dropout(layer_dropout) <NEW_LINE> <DEDENT> def forward(self, sen_encode_value, sen_encode, attn_mask=None): <NEW_LINE> <INDENT> self.device = sen_encode_value.device <NEW_LINE> batch_size = sen_encode_value.size(0) <NEW_LINE> pred_zeros = torch.zeros((batch_size, self.max_doc_len, self.max_doc_len)).to(self.device) <NEW_LINE> pred_ones = torch.ones_like(pred_zeros).to(self.device) <NEW_LINE> pred_two = torch.ones_like(pred_zeros).to(self.device).fill_(2.) <NEW_LINE> matrix = (1 - torch.eye(self.max_doc_len).to(self.device)).unsqueeze(0) + pred_zeros <NEW_LINE> y, _ = self.multi_head_attention(sen_encode_value, sen_encode_value, sen_encode, ~attn_mask.unsqueeze(1)) <NEW_LINE> y = torch.relu(y) + sen_encode <NEW_LINE> pred = self.class_lt(self.layer_dropout(y.reshape(-1, self.output_depth).to(self.device))) <NEW_LINE> pred = torch.softmax(pred * attn_mask.reshape(-1, 1).float(), dim=-1). reshape(-1, self.max_doc_len, self.program_class) <NEW_LINE> reg = torch.tensor(0.).to(self.device) <NEW_LINE> for param in self.class_lt.parameters(): <NEW_LINE> <INDENT> reg = reg + torch.norm(param) <NEW_LINE> <DEDENT> pred_label = torch.argmax(pred, dim=-1).reshape(-1, 1, self.max_doc_len).float() <NEW_LINE> pred_label = pred_label * pred_two - pred_ones <NEW_LINE> pred_label = (pred_label + pred_zeros) * matrix <NEW_LINE> pred_label = torch.tanh(self.pred_lt(pred_label.reshape(-1, self.max_doc_len))).reshape(batch_size, self.max_doc_len, self.max_doc_len) <NEW_LINE> return y, pred, pred_label, reg | An implementation of the framework in https://arxiv.org/abs/1906.01236
Refer Figure 2 | 62598fa2090684286d593614 |
class Article(object): <NEW_LINE> <INDENT> def __init__(self, article): <NEW_LINE> <INDENT> self.title = article["Title"] <NEW_LINE> self.author = article["Author"] <NEW_LINE> self.up = article["UpVote"] <NEW_LINE> self.down = article["DownVote"] <NEW_LINE> self.noVote = article["NoVote"] <NEW_LINE> self.hot = self.up + self.down + self.noVote <NEW_LINE> self.grade = (self.up - self.down)/self.hot <NEW_LINE> self.responses = [] <NEW_LINE> self.merge_response(article["Responses"]) <NEW_LINE> <DEDENT> def merge_response(self, responses): <NEW_LINE> <INDENT> cur_resp = { "User": responses[0]["User"], "Content": responses[0]["Content"], "Vote": responses[0]["Vote"] } <NEW_LINE> for i in range(1,len(responses)-1): <NEW_LINE> <INDENT> if responses[i]["User"] == cur_resp["User"]: <NEW_LINE> <INDENT> cur_resp["Content"].rsrtip('\n') <NEW_LINE> cur_resp["Content"] += responses[i]["Content"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.responses.append(cur_resp) <NEW_LINE> cur_resp = { "User": responses[i]["User"], "Content": responses[i]["Content"], "Vote": responses[i]["Vote"] } <NEW_LINE> <DEDENT> <DEDENT> self.responses.append(cur_resp) | 文章的保存結構,包含了文章標題、作者、與回文狀態 (不包含文章內容) | 62598fa256ac1b37e630205e |
class Rectangle: <NEW_LINE> <INDENT> pass | defining rectangle
| 62598fa2c432627299fa2e4d |
class ProMySqlDB(object): <NEW_LINE> <INDENT> def __init__(self, dbName, user, passwd, host, port): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.dbName = str(dbName) <NEW_LINE> self.user = str(user) <NEW_LINE> self.passwd = str(passwd) <NEW_LINE> self.host = str(host) <NEW_LINE> self.port = int(port) <NEW_LINE> self.__conn = MySQLdb.connect(user=self.user, db=self.dbName, passwd=self.passwd, host=self.host, charset='utf8', port=self.port) <NEW_LINE> <DEDENT> except MySQLdb.Error as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> def Sql(self, sql): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> c = self.__conn.cursor() <NEW_LINE> c.execute(sql) <NEW_LINE> self.__conn.commit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.__conn.rollback() <NEW_LINE> raise e <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> c.close() <NEW_LINE> <DEDENT> <DEDENT> def Sqls(self, sqls, values): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> c = self.__conn.cursor() <NEW_LINE> c.executemany(sqls, values) <NEW_LINE> self.__conn.commit() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.__conn.rollback() <NEW_LINE> raise e <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> c.close() <NEW_LINE> <DEDENT> <DEDENT> def GetResults(self, sql): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> c = self.__conn.cursor() <NEW_LINE> c.execute(sql) <NEW_LINE> results = c.fetchall() <NEW_LINE> return results <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> c.close() <NEW_LINE> <DEDENT> <DEDENT> def Close(self): <NEW_LINE> <INDENT> self.__conn.close() | MySQL数据库类 | 62598fa21f037a2d8b9e3f5c |
class Database: <NEW_LINE> <INDENT> def __init__(self, path_to_db="database.db"): <NEW_LINE> <INDENT> self._db = path_to_db <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._conn = sqlite3.connect(self._db) <NEW_LINE> cursor = self._conn.cursor() <NEW_LINE> cursor.execute("CREATE TABLE IF NOT EXISTS cards (id INTEGER PRIMARY KEY, front TEXT, back TEXT)") <NEW_LINE> return cursor <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self._conn.commit() <NEW_LINE> self._conn.close() | Main database connection class.
Returns a cursor object. | 62598fa201c39578d7f12bf2 |
class NotFoundException(ConanException): <NEW_LINE> <INDENT> pass | 404 error | 62598fa2442bda511e95c2ce |
class change_password_user(osv.TransientModel): <NEW_LINE> <INDENT> _name = 'change.password.user' <NEW_LINE> _description = 'Change Password Wizard User' <NEW_LINE> _columns = { 'wizard_id': fields.many2one('change.password.wizard', string='Wizard', required=True), 'user_id': fields.many2one('res.users', string='User', required=True), 'user_login': fields.char('User Login', readonly=True), 'new_passwd': fields.char('New Password'), } <NEW_LINE> _defaults = { 'new_passwd': '', } <NEW_LINE> def change_password_button(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> for user in self.browse(cr, uid, ids, context=context): <NEW_LINE> <INDENT> self.pool.get('res.users').write(cr, uid, user.user_id.id, {'password': user.new_passwd}) | A model to configure users in the change password wizard | 62598fa22c8b7c6e89bd3639 |
class TeamFolderTeamSharedDropboxError(bb.Union): <NEW_LINE> <INDENT> _catch_all = 'other' <NEW_LINE> disallowed = None <NEW_LINE> other = None <NEW_LINE> def is_disallowed(self): <NEW_LINE> <INDENT> return self._tag == 'disallowed' <NEW_LINE> <DEDENT> def is_other(self): <NEW_LINE> <INDENT> return self._tag == 'other' <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(TeamFolderTeamSharedDropboxError, self)._process_custom_annotations(annotation_type, field_path, processor) | This class acts as a tagged union. Only one of the ``is_*`` methods will
return true. To get the associated value of a tag (if one exists), use the
corresponding ``get_*`` method.
:ivar team.TeamFolderTeamSharedDropboxError.disallowed: This action is not
allowed for a shared team root. | 62598fa297e22403b383ad7f |
class CommentAnalysisChartMixin(object): <NEW_LINE> <INDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> if isinstance(self, VideoCommentListView): <NEW_LINE> <INDENT> qs = self.get_queryset() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> qs = self.object.videocomment_set.all() <NEW_LINE> <DEDENT> context = super(CommentAnalysisChartMixin, self).get_context_data( **kwargs) <NEW_LINE> context['comment_chart_headers'] = ['Sentiment', 'Percentage'] <NEW_LINE> context['comment_chart_data'] = { 'Positive': qs.filter(sentiment__gt=0).count(), 'Negative': qs.filter(sentiment__lt=0).count(), 'Neutral': qs.filter(sentiment=0).count(), } <NEW_LINE> return context | Mixin for comment analysis charts and graphs across views | 62598fa21f5feb6acb162a96 |
class TestBayes(unittest.TestCase): <NEW_LINE> <INDENT> def test_train(self): <NEW_LINE> <INDENT> train_data = mockData().get_mock_data() <NEW_LINE> list_classes = mockData().class_vec <NEW_LINE> p0v, p1v, pab = BayesLearning().train0(train_data, list_classes) <NEW_LINE> test_entry = ['love', 'my', 'dalmation'] <NEW_LINE> this_doc = array(BayesTool.set_word_2_vec(mockData().mock_vocab_list(), test_entry)) <NEW_LINE> result = BayesLearning().classify(this_doc, p0v, p1v, pab) <NEW_LINE> self.assertEqual(0, result, "bayes predicted error, not abusive word") <NEW_LINE> test_entry_2 = ['stupid', 'garbage'] <NEW_LINE> this_doc_2 = array(BayesTool.set_word_2_vec(mockData().mock_vocab_list(), test_entry_2)) <NEW_LINE> result_2 = BayesLearning().classify(this_doc_2, p0v, p1v, pab) <NEW_LINE> self.assertEqual(1, result_2, "bayes predicted error, abusive word") | 朴素贝叶斯算法测试 | 62598fa27d847024c075c239 |
class AllPairs(ParentWithSetFactory, DisjointUnionEnumeratedSets): <NEW_LINE> <INDENT> def __init__(self, policy): <NEW_LINE> <INDENT> ParentWithSetFactory.__init__(self, (), policy=policy, category=EnumeratedSets().Finite()) <NEW_LINE> DisjointUnionEnumeratedSets.__init__(self, LazyFamily(range(MAX), self.pairs_y), facade=True, keepkey=False, category=self.category()) <NEW_LINE> <DEDENT> def pairs_y(self, letter): <NEW_LINE> <INDENT> return Pairs_Y(letter, policy=self.facade_policy()) <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return "AllPairs" <NEW_LINE> <DEDENT> def check_element(self, el, check): <NEW_LINE> <INDENT> pass | This parent shows how one can use set factories together with
:class:`DisjointUnionEnumeratedSets`.
It is constructed as the disjoint union
(:class:`DisjointUnionEnumeratedSets`) of :class:`Pairs_Y` parents:
.. MATH::
S := \bigcup_{i = 0,1,..., 4} S^y
.. WARNING::
When writing a parent ``P`` as a disjoint union of a family of parents
``P_i``, the parents ``P_i`` must be constructed as facade parents for
``P``. As a consequence, it should be passed ``P.facade_policy()`` as
policy argument. See the source code of :meth:`pairs_y` for an
example.
TESTS::
sage: from sage.structure.set_factories_example import XYPairs
sage: P = XYPairs(); P.list()
[(0, 0), (1, 0), (2, 0), (3, 0), (4, 0), (0, 1), (1, 1), (2, 1), (3, 1), (4, 1), (0, 2), (1, 2), (2, 2), (3, 2), (4, 2), (0, 3), (1, 3), (2, 3), (3, 3), (4, 3), (0, 4), (1, 4), (2, 4), (3, 4), (4, 4)] | 62598fa2796e427e5384e607 |
class LoginForm(forms.Form): <NEW_LINE> <INDENT> username = forms.CharField(widget=forms.TextInput(attrs={"placeholder": "Username", "required": "required",}), max_length=50,error_messages={"required": "username不能为空",}) <NEW_LINE> password = forms.CharField(widget=forms.PasswordInput(attrs={"placeholder": "Password", "required": "required",}), max_length=20,error_messages={"required": "password不能为空",}) | 登录Form | 62598fa2a8ecb03325871082 |
class FirstBootForm(ValidNewUsernameCheckMixin, auth.forms.UserCreationForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.request = kwargs.pop('request') <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super().save(commit=commit) <NEW_LINE> if commit: <NEW_LINE> <INDENT> first_boot.mark_step_done('users_firstboot') <NEW_LINE> try: <NEW_LINE> <INDENT> actions.superuser_run( 'users', ['create-user', user.get_username()], input=self.cleaned_data['password1'].encode()) <NEW_LINE> <DEDENT> except ActionError: <NEW_LINE> <INDENT> messages.error(self.request, _('Creating LDAP user failed.')) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> actions.superuser_run( 'users', ['add-user-to-group', user.get_username(), 'admin']) <NEW_LINE> <DEDENT> except ActionError: <NEW_LINE> <INDENT> messages.error(self.request, _('Failed to add new user to admin group.')) <NEW_LINE> <DEDENT> for group_choice in get_group_choices(): <NEW_LINE> <INDENT> auth.models.Group.objects.get_or_create(name=group_choice[0]) <NEW_LINE> <DEDENT> admin_group = auth.models.Group.objects.get(name='admin') <NEW_LINE> admin_group.user_set.add(user) <NEW_LINE> self.login_user(self.cleaned_data['username'], self.cleaned_data['password1']) <NEW_LINE> try: <NEW_LINE> <INDENT> set_restricted_access(True) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> messages.error(self.request, _('Failed to restrict console access.')) <NEW_LINE> <DEDENT> <DEDENT> return user <NEW_LINE> <DEDENT> def login_user(self, username, password): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = auth.authenticate(username=username, password=password) <NEW_LINE> auth.login(self.request, user) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message = _('User account created, you are now logged in') <NEW_LINE> messages.success(self.request, message) | User module first boot step: create a new admin user. | 62598fa2442bda511e95c2cf |
class ResPQ(TLObject): <NEW_LINE> <INDENT> __slots__ = ["nonce", "server_nonce", "pq", "server_public_key_fingerprints"] <NEW_LINE> ID = 0x05162463 <NEW_LINE> QUALNAME = "types.ResPQ" <NEW_LINE> def __init__(self, *, nonce: int, server_nonce: int, pq: bytes, server_public_key_fingerprints: list): <NEW_LINE> <INDENT> self.nonce = nonce <NEW_LINE> self.server_nonce = server_nonce <NEW_LINE> self.pq = pq <NEW_LINE> self.server_public_key_fingerprints = server_public_key_fingerprints <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "ResPQ": <NEW_LINE> <INDENT> nonce = Int128.read(b) <NEW_LINE> server_nonce = Int128.read(b) <NEW_LINE> pq = Bytes.read(b) <NEW_LINE> server_public_key_fingerprints = TLObject.read(b, Long) <NEW_LINE> return ResPQ(nonce=nonce, server_nonce=server_nonce, pq=pq, server_public_key_fingerprints=server_public_key_fingerprints) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> b.write(Int128(self.nonce)) <NEW_LINE> b.write(Int128(self.server_nonce)) <NEW_LINE> b.write(Bytes(self.pq)) <NEW_LINE> b.write(Vector(self.server_public_key_fingerprints, Long)) <NEW_LINE> return b.getvalue() | Attributes:
LAYER: ``112``
Attributes:
ID: ``0x05162463``
Parameters:
nonce: ``int`` ``128-bit``
server_nonce: ``int`` ``128-bit``
pq: ``bytes``
server_public_key_fingerprints: List of ``int`` ``64-bit``
See Also:
This object can be returned by :obj:`ReqPq <pyrogram.api.functions.ReqPq>` and :obj:`ReqPqMulti <pyrogram.api.functions.ReqPqMulti>`. | 62598fa27cff6e4e811b589a |
class MelFrequencySpectrumCentroid(Features): <NEW_LINE> <INDENT> def __init__(self, arg, **kwargs): <NEW_LINE> <INDENT> kwargs['feature']='cqft' <NEW_LINE> Features.__init__(self, arg, kwargs) <NEW_LINE> <DEDENT> def extract(self): <NEW_LINE> <INDENT> Features.extract(self) <NEW_LINE> self.X = (self.X.T * self._logfrqs).sum(1) / self.X.T.sum(1) | Mel-Frequency Spectrum Centroid | 62598fa28e7ae83300ee8f14 |
class Rainbow(ColorCycle): <NEW_LINE> <INDENT> def init_parameters(self): <NEW_LINE> <INDENT> super().init_parameters() <NEW_LINE> self.set_parameter('num_steps_per_cycle', 255) <NEW_LINE> <DEDENT> def before_start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self, current_step: int, current_cycle: int) -> bool: <NEW_LINE> <INDENT> scale_factor = 255 / self.strip.num_leds <NEW_LINE> start_index = 255 / self.p['num_steps_per_cycle'] * current_step <NEW_LINE> for i in range(self.strip.num_leds): <NEW_LINE> <INDENT> led_index = start_index + i * scale_factor <NEW_LINE> pixel_color = wheel(led_index % 255) <NEW_LINE> self.strip.set_pixel(i, *pixel_color) <NEW_LINE> <DEDENT> return True | Rotates a rainbow color wheel around the strip.
No parameters necessary | 62598fa230bbd722464698b1 |
class LaunchpadCelebrities: <NEW_LINE> <INDENT> implements(ILaunchpadCelebrities) <NEW_LINE> admin = PersonCelebrityDescriptor('admins') <NEW_LINE> software_center_agent = PersonCelebrityDescriptor( 'software-center-agent') <NEW_LINE> bug_importer = PersonCelebrityDescriptor('bug-importer') <NEW_LINE> bug_watch_updater = PersonCelebrityDescriptor('bug-watch-updater') <NEW_LINE> buildd_admin = PersonCelebrityDescriptor('launchpad-buildd-admins') <NEW_LINE> commercial_admin = PersonCelebrityDescriptor('commercial-admins') <NEW_LINE> debbugs = CelebrityDescriptor(IBugTrackerSet, 'debbugs') <NEW_LINE> debian = CelebrityDescriptor(IDistributionSet, 'debian') <NEW_LINE> english = LanguageCelebrityDescriptor(ILanguageSet, 'en') <NEW_LINE> gnome_bugzilla = CelebrityDescriptor(IBugTrackerSet, 'gnome-bugs') <NEW_LINE> hwdb_team = PersonCelebrityDescriptor('hwdb-team') <NEW_LINE> janitor = PersonCelebrityDescriptor('janitor') <NEW_LINE> katie = PersonCelebrityDescriptor('katie') <NEW_LINE> launchpad = CelebrityDescriptor(IProductSet, 'launchpad') <NEW_LINE> launchpad_developers = PersonCelebrityDescriptor('launchpad') <NEW_LINE> obsolete_junk = CelebrityDescriptor(IProductSet, 'obsolete-junk') <NEW_LINE> ppa_key_guard = PersonCelebrityDescriptor('ppa-key-guard') <NEW_LINE> ppa_self_admins = PersonCelebrityDescriptor('launchpad-ppa-self-admins') <NEW_LINE> registry_experts = PersonCelebrityDescriptor('registry') <NEW_LINE> rosetta_experts = PersonCelebrityDescriptor('rosetta-admins') <NEW_LINE> savannah_tracker = CelebrityDescriptor(IBugTrackerSet, 'savannah') <NEW_LINE> sourceforge_tracker = CelebrityDescriptor(IBugTrackerSet, 'sf') <NEW_LINE> ubuntu = CelebrityDescriptor(IDistributionSet, 'ubuntu') <NEW_LINE> ubuntu_bugzilla = CelebrityDescriptor(IBugTrackerSet, 'ubuntu-bugzilla') <NEW_LINE> ubuntu_techboard = PersonCelebrityDescriptor('techboard') <NEW_LINE> vcs_imports = PersonCelebrityDescriptor('vcs-imports') <NEW_LINE> @property <NEW_LINE> def ubuntu_archive_mirror(self): <NEW_LINE> <INDENT> mirror = getUtility(IDistributionMirrorSet).getByHttpUrl( 'http://archive.ubuntu.com/ubuntu/') <NEW_LINE> if mirror is None: <NEW_LINE> <INDENT> raise MissingCelebrityError('http://archive.ubuntu.com/ubuntu/') <NEW_LINE> <DEDENT> assert mirror.isOfficial(), "Main mirror must be an official one." <NEW_LINE> return mirror <NEW_LINE> <DEDENT> @property <NEW_LINE> def ubuntu_cdimage_mirror(self): <NEW_LINE> <INDENT> mirror = getUtility(IDistributionMirrorSet).getByHttpUrl( 'http://releases.ubuntu.com/') <NEW_LINE> if mirror is None: <NEW_LINE> <INDENT> raise MissingCelebrityError('http://releases.ubuntu.com/') <NEW_LINE> <DEDENT> assert mirror.isOfficial(), "Main mirror must be an official one." <NEW_LINE> return mirror <NEW_LINE> <DEDENT> def isCelebrityPerson(self, name): <NEW_LINE> <INDENT> return str(name) in PersonCelebrityDescriptor.names | See `ILaunchpadCelebrities`. | 62598fa2fbf16365ca793f2f |
class UserDataViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> serializer_class = PerfilSerializer <NEW_LINE> http_method_names = ['get','head'] <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Perfil.objects.filter(user_id=self.request.user.id).all() | !
Clase que gestiona los datos del usuario autenticado
@author Rodrigo Boet (rboet at cenditel.gob.ve)
@copyright <a href='https://www.gnu.org/licenses/gpl-3.0.en.html'>GNU Public License versión 3 (GPLv3)</a>
@date 28-09-2017
@version 1.0.0 | 62598fa2a8370b77170f0254 |
class RunT(Enum): <NEW_LINE> <INDENT> REMOTE_CLI = 'remote_cli' <NEW_LINE> REMOTE_API = 'remote_api' <NEW_LINE> ONTARGET_CLI = 'ontarget_cli' | Modes of setup run | 62598fa2925a0f43d25e7eb2 |
class Dropout(KerasLayer): <NEW_LINE> <INDENT> def __init__(self, p, input_shape=None, **kwargs): <NEW_LINE> <INDENT> super(Dropout, self).__init__(None, float(p), list(input_shape) if input_shape else None, **kwargs) | Applies Dropout to the input by randomly setting a fraction 'p' of input units to 0 at each
update during training time in order to prevent overfitting.
When you use this layer as the first layer of a model, you need to provide the argument
input_shape (a shape tuple, does not include the batch dimension).
# Arguments
p: Fraction of the input units to drop. Float between 0 and 1.
input_shape: A shape tuple, not including batch.
name: String to set the name of the layer. If not specified, its name will by default to be a generated string.
>>> dropout = Dropout(0.25, input_shape=(2, 3))
creating: createKerasDropout | 62598fa238b623060ffa8f08 |
class SegmentationRecordsParserBase: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def getSegmentationRecords(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _getAllValidDirs(self, baseDir): <NEW_LINE> <INDENT> if not os.path.isdir(baseDir): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dirList = os.listdir(baseDir) <NEW_LINE> dirList = [d for d in dirList if os.path.isdir(os.path.join(baseDir, d))] <NEW_LINE> dirList = [d for d in dirList if not d.startswith(".")] <NEW_LINE> return dirList | Base class for Parsers that work on the mpReview Data/File Structure | 62598fa2656771135c4894f9 |
class B2BCouponManager(models.Manager): <NEW_LINE> <INDENT> def get_unexpired_coupon(self, *, coupon_code, product_id): <NEW_LINE> <INDENT> coupon = ( self.filter( Q(coupon_code=coupon_code), Q(enabled=True), Q(product_id=None) | Q(product_id=product_id), ) .filter(Q(activation_date__isnull=True) | Q(activation_date__lt=Now())) .filter(Q(expiration_date__isnull=True) | Q(expiration_date__gt=Now())) .get() ) <NEW_LINE> if coupon and not coupon.reusable: <NEW_LINE> <INDENT> coupon_redemption = B2BCouponRedemption.objects.filter( coupon=coupon, order__status__in=(B2BOrder.FULFILLED, B2BOrder.REFUNDED) ) <NEW_LINE> if coupon_redemption.exists(): <NEW_LINE> <INDENT> raise B2BCoupon.DoesNotExist <NEW_LINE> <DEDENT> <DEDENT> return coupon | Add a function to filter valid coupons | 62598fa256b00c62f0fb2726 |
class CreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> queryset = Contact.objects.all() <NEW_LINE> serializer_class = ContactSerializer <NEW_LINE> permission_classes = (permissions.IsAuthenticated, IsOwner) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(owner=self.request.user) | This class defines the create behavior of our rest api. | 62598fa24e4d562566372298 |
class SourceMultiSelectWidget(MultiSelectWidget): <NEW_LINE> <INDENT> def __init__(self, field, source, request): <NEW_LINE> <INDENT> super(SourceMultiSelectWidget, self).__init__( field, IterableSourceVocabulary(source, request), request) | A multi-selection widget with ordering support. | 62598fa2d268445f26639abd |
class MacManager(BaseManager): <NEW_LINE> <INDENT> NAME = 'Darwin' <NEW_LINE> FRIENDLY = 'Mac' <NEW_LINE> IGNORED_APPLICATION_NAMES = [ "iTunesHelper.app", "slack helper.app", "garcon.appex", "musiccacheextension", "podcastswidget", "mailcachedelete", ] <NEW_LINE> @log_running <NEW_LINE> def is_running(self, application): <NEW_LINE> <INDENT> name = application.versions.mac <NEW_LINE> if not name: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> process = self._get_process(name) <NEW_LINE> return process is not None <NEW_LINE> <DEDENT> @log_starting <NEW_LINE> def start(self, application): <NEW_LINE> <INDENT> name = application.versions.mac <NEW_LINE> path = None <NEW_LINE> for base in ( ".", "/Applications", "/Applications/*", "/System/Applications", "~/Applications", ): <NEW_LINE> <INDENT> pattern = os.path.expanduser(os.path.join(base, name)) <NEW_LINE> log.debug("Glob pattern: %s", pattern) <NEW_LINE> paths = glob.glob(pattern) <NEW_LINE> if paths: <NEW_LINE> <INDENT> path = paths[0] <NEW_LINE> log.debug("Match: %s", path) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert path, "Not found: {}".format(application) <NEW_LINE> <DEDENT> return self._start_app(path) <NEW_LINE> <DEDENT> @log_stopping <NEW_LINE> def stop(self, application): <NEW_LINE> <INDENT> name = application.versions.mac <NEW_LINE> process = self._get_process(name) <NEW_LINE> if process and process.is_running(): <NEW_LINE> <INDENT> process.terminate() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _start_app(path): <NEW_LINE> <INDENT> assert os.path.exists(path), path <NEW_LINE> process = psutil.Popen(['open', path]) <NEW_LINE> time.sleep(1) <NEW_LINE> return process <NEW_LINE> <DEDENT> def launch(self, path): <NEW_LINE> <INDENT> log.info("opening %s...", path) <NEW_LINE> return subprocess.call(['open', path]) == 0 | Application manager for OS X. | 62598fa292d797404e388aa0 |
class AbstractUser(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> username = models.CharField(_('username'), max_length=50, unique=True, help_text=_('Required. 50 characters or fewer. Letters, numbers and ' '@/./+/-/_ characters'), validators=[ validators.RegexValidator(re.compile('^[\w.@+-]+$'), _('Enter a valid username.'), 'invalid') ]) <NEW_LINE> first_name = models.CharField(_('first name'), max_length=30, blank=True) <NEW_LINE> last_name = models.CharField(_('last name'), max_length=30, blank=True) <NEW_LINE> email = models.EmailField(_('email address'), blank=True) <NEW_LINE> is_staff = models.BooleanField(_('staff status'), default=False, help_text=_('Designates whether the user can log into this admin ' 'site.')) <NEW_LINE> is_active = models.BooleanField(_('active'), default=True, help_text=_('Designates whether this user should be treated as ' 'active. Unselect this instead of deleting accounts.')) <NEW_LINE> date_joined = models.DateTimeField(_('date joined'), default=timezone.now) <NEW_LINE> objects = UserManager() <NEW_LINE> USERNAME_FIELD = 'username' <NEW_LINE> REQUIRED_FIELDS = ['email'] <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('user') <NEW_LINE> verbose_name_plural = _('users') <NEW_LINE> abstract = True <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return "/users/%s/" % urlquote(self.username) <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> full_name = '%s %s' % (self.first_name, self.last_name) <NEW_LINE> return full_name.strip() <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.first_name <NEW_LINE> <DEDENT> def email_user(self, subject, message, from_email=None): <NEW_LINE> <INDENT> send_mail(subject, message, from_email, [self.email]) <NEW_LINE> <DEDENT> def get_profile(self): <NEW_LINE> <INDENT> warnings.warn("The use of AUTH_PROFILE_MODULE to define user profiles has been deprecated.", PendingDeprecationWarning) <NEW_LINE> if not hasattr(self, '_profile_cache'): <NEW_LINE> <INDENT> from django.conf import settings <NEW_LINE> if not getattr(settings, 'AUTH_PROFILE_MODULE', False): <NEW_LINE> <INDENT> raise SiteProfileNotAvailable( 'You need to set AUTH_PROFILE_MODULE in your project ' 'settings') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.') <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise SiteProfileNotAvailable( 'app_label and model_name should be separated by a dot in ' 'the AUTH_PROFILE_MODULE setting') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> model = models.get_model(app_label, model_name) <NEW_LINE> if model is None: <NEW_LINE> <INDENT> raise SiteProfileNotAvailable( 'Unable to load the profile model, check ' 'AUTH_PROFILE_MODULE in your project settings') <NEW_LINE> <DEDENT> self._profile_cache = model._default_manager.using( self._state.db).get(user__id__exact=self.id) <NEW_LINE> self._profile_cache.user = self <NEW_LINE> <DEDENT> except (ImportError, ImproperlyConfigured): <NEW_LINE> <INDENT> raise SiteProfileNotAvailable <NEW_LINE> <DEDENT> <DEDENT> return self._profile_cache | An abstract base class implementing a fully featured User model with
admin-compliant permissions.
Username, password and email are required. Other fields are optional. | 62598fa245492302aabfc346 |
class ProvinceAreasView(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> province_list = cache.get('province_list') <NEW_LINE> if not province_list: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> province_model_list = Area.objects.filter(parent__isnull=True) <NEW_LINE> province_list = [] <NEW_LINE> for province_model in province_model_list: <NEW_LINE> <INDENT> province_list.append({'id': province_model.id, 'name': province_model.name}) <NEW_LINE> <DEDENT> cache.set('province_list', province_list, 3600) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return JsonResponse({ 'code': RETCODE.DBERR, 'errmsg': '省份数据错误' }) <NEW_LINE> <DEDENT> <DEDENT> return JsonResponse({ 'code': RETCODE.OK, 'errmsg': 'OK', 'province_list': province_list }) | 省级地区 | 62598fa221bff66bcd722ada |
class Service(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.name = name or self.__class__.__name__ <NEW_LINE> self.channel = None <NEW_LINE> self.connection = pika.SelectConnection( pika.ConnectionParameters(host='132.252.152.56', credentials=pika.PlainCredentials('ipark', 'GS~FsB3~&c7T')), self.on_connected) <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def on_connected(self, connection): <NEW_LINE> <INDENT> connection.channel(self.on_channel_open) <NEW_LINE> <DEDENT> def on_channel_open(self, channel): <NEW_LINE> <INDENT> self.channel = channel <NEW_LINE> self.channel.exchange_declare(self.on_exchange_declared, exchange="delayed-x", type="x-delayed-message", arguments={"x-delayed-type": "direct"}, ) <NEW_LINE> self.channel.queue_declare(self.on_queue_declared, queue=self.name) <NEW_LINE> self.channel.queue_bind(self.on_queue_bind, queue=self.name, exchange="delayed-x", routing_key=self.name) <NEW_LINE> self.channel.basic_consume(self.on_request, queue=self.name) <NEW_LINE> <DEDENT> def on_queue_declared(self, frame): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_exchange_declared(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_queue_bind(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> print(u"[{0:s}] Service running...".format(self.name)) <NEW_LINE> self.connection.ioloop.start() <NEW_LINE> <DEDENT> def on_request(self, ch, method, props, body): <NEW_LINE> <INDENT> request = pickle.loads(body) <NEW_LINE> try: <NEW_LINE> <INDENT> function = getattr(self, request['function']) <NEW_LINE> result = {'result': function(*request['args'], **request['kwargs'])} <NEW_LINE> <DEDENT> except BaseException as ex: <NEW_LINE> <INDENT> result = {'exception': ex, 'traceback': "".join(traceback.format_exception(*sys.exc_info()))} <NEW_LINE> <DEDENT> response = pickle.dumps(result) <NEW_LINE> ch.basic_publish(exchange='', routing_key=props.reply_to, properties=pika.BasicProperties(correlation_id=props.correlation_id), body=response) <NEW_LINE> ch.basic_ack(delivery_tag=method.delivery_tag) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.connection.close() <NEW_LINE> self.join() <NEW_LINE> self.run() | Service Base for Micro Service via RabbitMQ | 62598fa29c8ee823130400a9 |
@dataclass <NEW_LINE> class PassportElementErrorDataField(Base): <NEW_LINE> <INDENT> source: str <NEW_LINE> type: str <NEW_LINE> field_name: str <NEW_LINE> data_hash: str <NEW_LINE> message: str | Represents an issue in one of the data fields that was provided by
the user. The error is considered resolved when the field's value
changes. | 62598fa2236d856c2adc9375 |
class X10CommandType(Enum): <NEW_LINE> <INDENT> DIRECT = 0 <NEW_LINE> BROADCAST = 1 | X10 command types. | 62598fa2bd1bec0571e14ffe |
class DataTableResult: <NEW_LINE> <INDENT> def __init__(self, request_data, queryset, column_names): <NEW_LINE> <INDENT> self.queryset = queryset <NEW_LINE> self.request_data = request_data <NEW_LINE> self.column_names = column_names <NEW_LINE> <DEDENT> def _iter_sorting_columns(self): <NEW_LINE> <INDENT> number_of_sorting_cols = int(self.request_data.get('iSortingCols', 0)) <NEW_LINE> for idx_which_column in range(number_of_sorting_cols): <NEW_LINE> <INDENT> sorting_col_index = int( self.request_data.get('iSortCol_{}'.format(idx_which_column), 0)) <NEW_LINE> sortable_key = 'bSortable_{}'.format(sorting_col_index) <NEW_LINE> sort_dir_key = 'sSortDir_{}'.format(idx_which_column) <NEW_LINE> sortable = self.request_data.get(sortable_key, 'false') <NEW_LINE> if sortable == 'false': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> sorting_col_name = self.column_names[sorting_col_index] <NEW_LINE> sorting_direction = self.request_data.get(sort_dir_key, 'asc') <NEW_LINE> yield sorting_col_name, sorting_direction <NEW_LINE> <DEDENT> <DEDENT> def _sort_result(self): <NEW_LINE> <INDENT> sorting_columns = self._iter_sorting_columns() <NEW_LINE> order_fields = [] <NEW_LINE> for col_name, direction in sorting_columns: <NEW_LINE> <INDENT> if direction == 'desc': <NEW_LINE> <INDENT> order_fields.append('-{}'.format(col_name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> order_fields.append(col_name) <NEW_LINE> <DEDENT> <DEDENT> if order_fields: <NEW_LINE> <INDENT> self.queryset = self.queryset.order_by(*order_fields) <NEW_LINE> <DEDENT> <DEDENT> def _paginate_result(self): <NEW_LINE> <INDENT> display_length = int(self.request_data.get('iDisplayLength', settings.DEFAULT_PAGE_SIZE)) <NEW_LINE> display_start = int(self.request_data.get('iDisplayStart', 0)) <NEW_LINE> display_end = display_start + display_length <NEW_LINE> self.queryset = self.queryset[display_start:display_end] <NEW_LINE> <DEDENT> def get_response_data(self): <NEW_LINE> <INDENT> total_records = total_display_records = self.queryset.count() <NEW_LINE> self._sort_result() <NEW_LINE> self._paginate_result() <NEW_LINE> return { 'sEcho': int(self.request_data.get('sEcho', 0)), 'iTotalRecords': total_records, 'iTotalDisplayRecords': total_display_records, 'querySet': self.queryset, } | Paginate and order queryset for rendering DataTable response | 62598fa2b7558d58954634a3 |
class Variable(object): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.value = value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<AQL Variable: {}>".format(self.value) | AQL Variable | 62598fa21f5feb6acb162a98 |
class LocationOfInterest (google.appengine.ext.ndb.Model): <NEW_LINE> <INDENT> owner = google.appengine.ext.ndb.StringProperty() <NEW_LINE> description = google.appengine.ext.ndb.StringProperty() <NEW_LINE> location = google.appengine.ext.ndb.GeoPtProperty() <NEW_LINE> @classmethod <NEW_LINE> def query_user(cls, user_id): <NEW_LINE> <INDENT> return cls.query(cls.owner == user_id) | NDB model class for a location for which a user would like to receive
notifications for earthquakes.
In the interest of scalability, it might have been better to batch
locations of interest by user ID, but I didn't want to figure it out. | 62598fa2cb5e8a47e493c0b1 |
class DesignSpace(Entity): <NEW_LINE> <INDENT> def __init__(self, constellations=None, launchers=None, satellites=None, groundNetworks=None, groundStations=None, _id=None): <NEW_LINE> <INDENT> if isinstance(constellations, Constellation): self.constellations = [constellations] <NEW_LINE> else: self.constellations = constellations <NEW_LINE> if isinstance(launchers, LaunchVehicle): self.launchers = [launchers] <NEW_LINE> else: self.launchers = launchers <NEW_LINE> if isinstance(satellites, Satellite): self.satellites = [satellites] <NEW_LINE> else: self.satellites = satellites <NEW_LINE> if isinstance(groundNetworks, GroundNetwork): self.groundNetworks = [groundNetworks] <NEW_LINE> else: self.groundNetworks = groundNetworks <NEW_LINE> if isinstance(groundStations, GroundStation): self.groundStations = [groundStations] <NEW_LINE> else: self.groundStations = groundStations <NEW_LINE> super(DesignSpace,self).__init__(_id, "DesignSpace") <NEW_LINE> <DEDENT> def generate_architectures(self): <NEW_LINE> <INDENT> constellationsList = iter(iter(i) for i in self.constellations) <NEW_LINE> constellationsIter = iter(set().union(*constellationsList)) <NEW_LINE> groundNetworksList = iter(iter(i) for i in self.groundNetworks) <NEW_LINE> groundNetworksIter = iter(set().union(*groundNetworksList)) <NEW_LINE> return iter([ Architecture( constellation=constellation, groundNetwork=groundNetwork ) for constellation, groundNetwork in itertools.product( [i.generate_constellations(self.satellites) for i in constellationsIter], [i.generate_networks(self.groundStations) for i in groundNetworksIter] ) ]) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_dict(d): <NEW_LINE> <INDENT> return DesignSpace( constellations = Constellation.from_json(d.get("constellations", None)), launchers = LaunchVehicle.from_json(d.get("launchers", None)), satellites = Satellite.from_json(d.get("satellites", None)), groundNetworks = GroundNetwork.from_json(d.get("groundNetworks", None)), groundStations = GroundStation.from_json(d.get("groundStations", None)), _id = d.get("@id", None) ) | Specification of fixed and variable quantities for a space mission.
Attributes:
constellations List of potential constellations to consider.
launchers List of available launch vehicles to consider
(overrides default database).
satellites List of available satellites.
groundNetworks List of potential ground networks to consider.
groundStations List of available ground stations. | 62598fa2a79ad16197769ed5 |
class AjaxHandler(Handler): <NEW_LINE> <INDENT> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if not (request.is_ajax() or request.GET.get('ajax', False)): <NEW_LINE> <INDENT> raise NotMyJob('ajax') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(AjaxHandler, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def get_template_names(self): <NEW_LINE> <INDENT> templates = super(AjaxHandler, self).get_template_names() <NEW_LINE> if not isinstance(templates, list): <NEW_LINE> <INDENT> if isinstance(templates, tuple): <NEW_LINE> <INDENT> templates = [t for t in templates] <NEW_LINE> <DEDENT> elif isinstance(templates, Template): <NEW_LINE> <INDENT> templates = [templates] <NEW_LINE> <DEDENT> <DEDENT> templates.insert(0, 'ajax_' + templates[0]) <NEW_LINE> return templates | Handler for Ajax sub-page requests. | 62598fa299cbb53fe6830d49 |
class Config: <NEW_LINE> <INDENT> log_dir = './train_log' <NEW_LINE> '''where to write model snapshots to''' <NEW_LINE> log_model_dir = os.path.join(log_dir, 'models') <NEW_LINE> exp_name = os.path.basename(log_dir) <NEW_LINE> minibatch_size = 256 <NEW_LINE> nr_channel = 3 <NEW_LINE> image_shape = (32, 32) <NEW_LINE> nr_class = 10 <NEW_LINE> nr_epoch = 60 <NEW_LINE> weight_decay = 1e-10 <NEW_LINE> lp_reg = 1 <NEW_LINE> show_interval = 100 <NEW_LINE> snapshot_interval = 2 <NEW_LINE> test_interval = 1 <NEW_LINE> @property <NEW_LINE> def input_shape(self): <NEW_LINE> <INDENT> return (self.minibatch_size, self.nr_channel) + self.image_shape | where to write all the logging information during training(includes saved models) | 62598fa2796e427e5384e609 |
class MultiplayerSelect(PopUpMenu[None]): <NEW_LINE> <INDENT> shrink_to_items = True <NEW_LINE> def startup(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> super().startup(**kwargs) <NEW_LINE> self.task(self.reload_items, 1, -1) <NEW_LINE> <DEDENT> def initialize_items(self) -> Generator[MenuItem[None], None, None]: <NEW_LINE> <INDENT> servers = self.game.client.server_list <NEW_LINE> if servers: <NEW_LINE> <INDENT> for server in servers: <NEW_LINE> <INDENT> label = self.shadow_text(server) <NEW_LINE> yield MenuItem(label, None, None, None) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> label = self.shadow_text(T.translate("multiplayer_no_servers")) <NEW_LINE> item = MenuItem(label, None, None, None) <NEW_LINE> item.enabled = False <NEW_LINE> yield item | Menu to show games found by the network game scanner | 62598fa23c8af77a43b67e7b |
class CustomOp: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def process_ops(cls, ops, block_num, block_date): <NEW_LINE> <INDENT> for op in ops: <NEW_LINE> <INDENT> if op['id'] not in ['follow', 'com.steemit.community']: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if len(op['required_posting_auths']) != 1: <NEW_LINE> <INDENT> log.warning("unexpected auths: %s", op) <NEW_LINE> continue <NEW_LINE> <DEDENT> account = op['required_posting_auths'][0] <NEW_LINE> op_json = load_json_key(op, 'json') <NEW_LINE> if op['id'] == 'follow': <NEW_LINE> <INDENT> if block_num < 6000000 and not isinstance(op_json, list): <NEW_LINE> <INDENT> op_json = ['follow', op_json] <NEW_LINE> <DEDENT> cls._process_legacy(account, op_json, block_date) <NEW_LINE> <DEDENT> elif op['id'] == 'com.steemit.community': <NEW_LINE> <INDENT> if block_num > 30e6: <NEW_LINE> <INDENT> process_json_community_op(account, op_json, block_date) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _process_legacy(cls, account, op_json, block_date): <NEW_LINE> <INDENT> if not isinstance(op_json, list): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if len(op_json) != 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if first(op_json) not in ['follow', 'reblog']: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not isinstance(second(op_json), dict): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> cmd, op_json = op_json <NEW_LINE> if cmd == 'follow': <NEW_LINE> <INDENT> Follow.follow_op(account, op_json, block_date) <NEW_LINE> <DEDENT> elif cmd == 'reblog': <NEW_LINE> <INDENT> cls.reblog(account, op_json, block_date) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def reblog(cls, account, op_json, block_date): <NEW_LINE> <INDENT> if ('account' not in op_json or 'author' not in op_json or 'permlink' not in op_json): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> blogger = op_json['account'] <NEW_LINE> author = op_json['author'] <NEW_LINE> permlink = op_json['permlink'] <NEW_LINE> if blogger != account: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not all(map(Accounts.exists, [author, blogger])): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> post_id, depth = Posts.get_id_and_depth(author, permlink) <NEW_LINE> if depth > 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not post_id: <NEW_LINE> <INDENT> log.debug("reblog: post not found: %s/%s", author, permlink) <NEW_LINE> return <NEW_LINE> <DEDENT> if 'delete' in op_json and op_json['delete'] == 'delete': <NEW_LINE> <INDENT> DB.query("DELETE FROM hive_reblogs WHERE account = :a AND " "post_id = :pid LIMIT 1", a=blogger, pid=post_id) <NEW_LINE> if not DbState.is_initial_sync(): <NEW_LINE> <INDENT> FeedCache.delete(post_id, Accounts.get_id(blogger)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sql = ("INSERT INTO hive_reblogs (account, post_id, created_at) " "VALUES (:a, :pid, :date) ON CONFLICT (account, post_id) DO NOTHING") <NEW_LINE> DB.query(sql, a=blogger, pid=post_id, date=block_date) <NEW_LINE> if not DbState.is_initial_sync(): <NEW_LINE> <INDENT> FeedCache.insert(post_id, Accounts.get_id(blogger), block_date) | Processes custom ops and dispatches updates. | 62598fa2009cb60464d0139b |
class Tag(Command): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> version_file = version.VersionFile(self.cfg.version_file) <NEW_LINE> current = version_file.read() <NEW_LINE> try: <NEW_LINE> <INDENT> vcs_handler = vcs.VCS(self.cfg.vcs_engine) <NEW_LINE> vcs_handler.create_tag(current, self.cfg.vcs_tag_params) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print('Git tag failed, do it yourself') <NEW_LINE> if self.cfg.verbose: <NEW_LINE> <INDENT> traceback.print_exc() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print('Git tag created') <NEW_LINE> <DEDENT> return CommandOutput(current) | Realize tasks for 'tag' command | 62598fa266656f66f7d5a267 |
class mainSpace(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def main(cls, args): <NEW_LINE> <INDENT> poly_deg = 5 <NEW_LINE> seq_length = mSeqlength.seq_length(poly_deg) <NEW_LINE> print("The Sequences length is: " + seq_length) <NEW_LINE> init_state = [None] * poly_deg <NEW_LINE> init_state[poly_deg - 1] = 1 <NEW_LINE> seqA = mSeqGenA.PNseq(poly_deg, seq_length, init_state) <NEW_LINE> seqB = mSeqGenB.PNseq(poly_deg, seq_length, init_state) <NEW_LINE> print("") <NEW_LINE> gold = goldCodeGen.gold(seq_length, seqA, seqB) | generated source for class mainSpace | 62598fa2a8370b77170f0255 |
class Array(Pattern): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Array, self).__init__() <NEW_LINE> self._members = [] <NEW_LINE> <DEDENT> def add(self, expr): <NEW_LINE> <INDENT> if not isinstance(expr, Expression): <NEW_LINE> <INDENT> raise InconsistentExpression() <NEW_LINE> <DEDENT> self._members.append(expr) <NEW_LINE> <DEDENT> def walk(self, visitor): <NEW_LINE> <INDENT> visitor.enterArray() <NEW_LINE> for elem in self._members: <NEW_LINE> <INDENT> elem.walk(visitor) <NEW_LINE> visitor.next() <NEW_LINE> <DEDENT> visitor.leaveArray() | representation of an array of constants | 62598fa21b99ca400228f46a |
class VirtualLibrary(p.SingletonPlugin): <NEW_LINE> <INDENT> p.implements(p.IConfigurer) <NEW_LINE> def update_config(self, config): <NEW_LINE> <INDENT> p.toolkit.add_template_directory(config, 'templates') <NEW_LINE> p.toolkit.add_public_directory(config, 'public') | Plugin for public-facing version of data.gc.ca site, aka the "portal"
This plugin requires the DataGCCAForms plugin | 62598fa256b00c62f0fb2728 |
class Binomial(Distribution): <NEW_LINE> <INDENT> def __init__(self, prob:float = 0.5, size:int = 20): <NEW_LINE> <INDENT> self.p = prob <NEW_LINE> self.n = size <NEW_LINE> mean = self.calculate_mean() <NEW_LINE> stdev = self.calculate_stdev() <NEW_LINE> super().__init__(mean,stdev) <NEW_LINE> <DEDENT> def calculate_mean(self): <NEW_LINE> <INDENT> self.mean = self.n * self.p <NEW_LINE> return self.mean <NEW_LINE> <DEDENT> def calculate_stdev(self): <NEW_LINE> <INDENT> self.stdev = math.sqrt( self.n*self.p*(1-self.p) ) <NEW_LINE> return self.stdev <NEW_LINE> <DEDENT> def replace_stats_with_data(self): <NEW_LINE> <INDENT> self.n = len(self.data) <NEW_LINE> self.p = self.data.count(1)/self.n <NEW_LINE> self.calculate_mean() <NEW_LINE> self.calculate_stdev() <NEW_LINE> return self.p , self.n <NEW_LINE> <DEDENT> def plot_bar(self): <NEW_LINE> <INDENT> ones = self.data.count(1) <NEW_LINE> zeros = self.n - ones <NEW_LINE> plt.bar(["zero","one"],[zeros,ones]) <NEW_LINE> plt.xlabel("x-axis") <NEW_LINE> plt.ylabel("y-axis") <NEW_LINE> <DEDENT> def pdf(self, k: int ) -> float: <NEW_LINE> <INDENT> a = math.factorial(self.n) / (math.factorial(k) * math.factorial(self.n-k)) <NEW_LINE> b = math.pow(self.p , k) * math.pow( 1-self.p , self.n-k ) <NEW_LINE> return a * b <NEW_LINE> <DEDENT> def plot_bar_pdf(self): <NEW_LINE> <INDENT> x = [ k for k in range(self.n+1) ] <NEW_LINE> y = [ self.pdf(k) for k in range(self.n+1) ] <NEW_LINE> plt.bar(x,y) <NEW_LINE> plt.show() <NEW_LINE> return x,y <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> assert self.p == other.p, 'p values are not equal' <NEW_LINE> <DEDENT> except AssertionError as error: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> n = self.n + other.n <NEW_LINE> p = self.p <NEW_LINE> return Binomial(p,n) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"mean {self.mean}, standard deviation {self.stdev}, p {self.p}, n {self.n}" | Binomial distribution class for calculating and
visualizing a Binomial distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats to be extracted from the data file
p (float) representing the probability of an event occurring
n (int) the total number of trials
| 62598fa299cbb53fe6830d4a |
class CompositeTemplateHintProvider(list): <NEW_LINE> <INDENT> def get_template_hints(self, name_provider, hint_providers=None): <NEW_LINE> <INDENT> if hint_providers is None: <NEW_LINE> <INDENT> hint_providers = self <NEW_LINE> <DEDENT> template_hints = [] <NEW_LINE> for hint_provider in hint_providers: <NEW_LINE> <INDENT> template_hints.extend( hint_provider.get_template_hints(name_provider, hint_providers)) <NEW_LINE> <DEDENT> return template_hints <NEW_LINE> <DEDENT> def suggest_context_data(self, name_provider, hint_providers=None): <NEW_LINE> <INDENT> if hint_providers is None: <NEW_LINE> <INDENT> hint_providers = self <NEW_LINE> <DEDENT> context_data = {} <NEW_LINE> for hint_provider in hint_providers: <NEW_LINE> <INDENT> context_data.update( hint_provider.suggest_context_data(name_provider, hint_providers)) <NEW_LINE> <DEDENT> return context_data <NEW_LINE> <DEDENT> def suggest_template_names(self, name_provider, hint_providers=None, **kwargs): <NEW_LINE> <INDENT> if hint_providers is None: <NEW_LINE> <INDENT> hint_providers = self <NEW_LINE> <DEDENT> template_names = [] <NEW_LINE> for hint_provider in hint_providers: <NEW_LINE> <INDENT> template_names.extend( hint_provider.suggest_template_names( name_provider, hint_providers=hint_providers, **kwargs)) <NEW_LINE> <DEDENT> return template_names | Can be used as a composite of multiple TemplateHintProviders. That's
useful if you want to group the providers into a list. It's used for
example in the ``{% render_content %}`` template tag. | 62598fa24f6381625f1993f8 |
class PostgresLexer(PostgresBase, RegexLexer): <NEW_LINE> <INDENT> name = 'PostgreSQL SQL dialect' <NEW_LINE> aliases = ['postgresql', 'postgres'] <NEW_LINE> mimetypes = ['text/x-postgresql'] <NEW_LINE> flags = re.IGNORECASE <NEW_LINE> tokens = { 'root': [ (r'\s+', Text), (r'--.*?\n', Comment.Single), (r'/\*', Comment.Multiline, 'multiline-comments'), (r'(' + '|'.join(s.replace(" ", "\s+") for s in DATATYPES + PSEUDO_TYPES) + r')\b', Name.Builtin), (words(KEYWORDS, suffix=r'\b'), Keyword), (r'[+*/<>=~!@#%^&|`?-]+', Operator), (r'::', Operator), (r'\$\d+', Name.Variable), (r'([0-9]*\.[0-9]*|[0-9]+)(e[+-]?[0-9]+)?', Number.Float), (r'[0-9]+', Number.Integer), (r"(E|U&)?'(''|[^'])*'", String.Single), (r'(U&)?"(""|[^"])*"', String.Name), (r'(?s)(\$[^\$]*\$)(.*?)(\1)', language_callback), (r'[a-z_]\w*', Name), (r""":(['"]?)[a-z]\w*\b\1""", Name.Variable), (r'[;:()\[\]\{\},\.]', Punctuation), ], 'multiline-comments': [ (r'/\*', Comment.Multiline, 'multiline-comments'), (r'\*/', Comment.Multiline, '#pop'), (r'[^/\*]+', Comment.Multiline), (r'[/*]', Comment.Multiline) ], } | Lexer for the PostgreSQL dialect of SQL.
.. versionadded:: 1.5 | 62598fa285dfad0860cbf9b0 |
class ExcitationMotor: <NEW_LINE> <INDENT> def __init__(self, filename, phase_offset_excitation=0, rotation_direction=-1, optical_element='L/2 Plate'): <NEW_LINE> <INDENT> self.experiment_start_datetime = None <NEW_LINE> self.filename = filename <NEW_LINE> self.phase_offset_excitation = phase_offset_excitation <NEW_LINE> self.rotation_direction = rotation_direction <NEW_LINE> self.optical_element = optical_element <NEW_LINE> self.load_excitation_motor_file( filename ) <NEW_LINE> self.determine_function() <NEW_LINE> <DEDENT> def load_excitation_motor_file( self, filename ): <NEW_LINE> <INDENT> md = np.loadtxt( filename, delimiter='\t', skiprows=1, converters={0: lambda s: deal_with_date_time_string(self, s), 1: lambda s: s=='UP'} ) <NEW_LINE> timestamps = md[:,0] <NEW_LINE> signals = md[:,1] <NEW_LINE> self.timestamps = timestamps <NEW_LINE> self.signals = signals <NEW_LINE> self.starttime = timestamps[0] <NEW_LINE> self.endtime = timestamps[-1] <NEW_LINE> <DEDENT> def angle(self, time, raw_angles=True): <NEW_LINE> <INDENT> if self.starttime <= time <= self.endtime: <NEW_LINE> <INDENT> phi = self.anglefun_slope*time + self.anglefun_intercept + self.phase_offset_excitation <NEW_LINE> if raw_angles: <NEW_LINE> <INDENT> return phi <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return phi % np.pi <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> <DEDENT> def determine_function(self): <NEW_LINE> <INDENT> a = self.signals[self.signals==1] <NEW_LINE> t = self.timestamps[self.signals==1] <NEW_LINE> if self.optical_element=='L/2 Plate': <NEW_LINE> <INDENT> a *= 4*np.pi*self.rotation_direction <NEW_LINE> <DEDENT> elif self.optical_element=='Polarizer': <NEW_LINE> <INDENT> a *= 2*np.pi*self.rotation_direction <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Motor class doesn't know optical_element '%s'" % self.optical_element) <NEW_LINE> <DEDENT> a = np.cumsum(a) <NEW_LINE> t = np.reshape( t, (t.shape[0],1) ) <NEW_LINE> M = np.concatenate( (t, np.ones_like(t)), axis=1 ) <NEW_LINE> s = np.linalg.lstsq( M, a ) <NEW_LINE> self.anglefun_slope = s[0][0] <NEW_LINE> self.anglefun_intercept = s[0][1] | This class will hold the data associated with the excitation polarizer.
It is used to:
* read in the file generated by the labview component
* extrapolate the angular function (assumed to be linear)
* present the function such that it can be queried | 62598fa2c432627299fa2e51 |
class MapdataRouter(object): <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'mapdata': <NEW_LINE> <INDENT> return 'msemap_db' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'mapdata': <NEW_LINE> <INDENT> return 'msemap_db' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'mapdata' and obj2._meta.app_label == 'mapdata': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif 'mapdata' not in [obj1._meta.app_label, obj2._meta.app_label]: <NEW_LINE> <INDENT> return None <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> def allow_migrate(self, db, app_label, model_name=None, **hints): <NEW_LINE> <INDENT> if app_label == 'mapdata': <NEW_LINE> <INDENT> return db == 'msemap_db' <NEW_LINE> <DEDENT> elif db == 'msemap_db': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return None | Determine how to route database calls for an app's models (in this case, for an app named mapdata).
All other models will be routed to the next router in the DATABASE_ROUTERS setting if applicable,
or otherwise to the default database. | 62598fa255399d3f05626399 |
class ConcreteComponent(Component): <NEW_LINE> <INDENT> def operation(self): <NEW_LINE> <INDENT> pass | Defina un objeto al cual nuevas responsabilidades pueden ser agregadas | 62598fa292d797404e388aa1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.