code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class MixedPrecisionWrapper(object): <NEW_LINE> <INDENT> def __init__(self, optimizer, scale=None, auto_scale=True, inc_factor=2.0, dec_factor=0.5, num_iters_be_stable=500): <NEW_LINE> <INDENT> if not isinstance(optimizer, torch.optim.Optimizer): <NEW_LINE> <INDENT> raise ValueError("must provide a torch.optim.Optimizer") <NEW_LINE> <DEDENT> self.optimizer = optimizer <NEW_LINE> if hasattr(self.optimizer, 'name'): <NEW_LINE> <INDENT> self.name = self.optimizer.name <NEW_LINE> <DEDENT> param_groups_copy = [] <NEW_LINE> for i, group in enumerate(optimizer.param_groups): <NEW_LINE> <INDENT> group_copy = {n: v for n, v in group.items() if n != 'params'} <NEW_LINE> group_copy['params'] = param_fp32_copy(group['params']) <NEW_LINE> param_groups_copy.append(group_copy) <NEW_LINE> <DEDENT> self.param_groups = optimizer.param_groups <NEW_LINE> optimizer.param_groups = param_groups_copy <NEW_LINE> self.grad_scale = scale <NEW_LINE> self.auto_scale = auto_scale <NEW_LINE> self.inc_factor = inc_factor <NEW_LINE> self.dec_factor = dec_factor <NEW_LINE> self.stable_iter_count = 0 <NEW_LINE> self.num_iters_be_stable = num_iters_be_stable <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return self.optimizer.__getstate__() <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> return self.optimizer.__setstate__(state) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.optimizer.__repr__() <NEW_LINE> <DEDENT> def state_dict(self): <NEW_LINE> <INDENT> return self.optimizer.state_dict() <NEW_LINE> <DEDENT> def load_state_dict(self, state_dict): <NEW_LINE> <INDENT> return self.optimizer.load_state_dict(state_dict) <NEW_LINE> <DEDENT> def zero_grad(self): <NEW_LINE> <INDENT> return self.optimizer.zero_grad() <NEW_LINE> <DEDENT> def step(self, closure=None): <NEW_LINE> <INDENT> for g, g_copy in zip(self.param_groups, self.optimizer.param_groups): <NEW_LINE> <INDENT> invalid = set_grad(g_copy['params'], g['params'], self.grad_scale) <NEW_LINE> if invalid: <NEW_LINE> <INDENT> if self.grad_scale is None or self.auto_scale is False: <NEW_LINE> <INDENT> raise ValueError("nan/inf detected but auto_scale disabled.") <NEW_LINE> <DEDENT> self.grad_scale *= self.dec_factor <NEW_LINE> print('scale decay to {}'.format(self.grad_scale)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> if self.auto_scale is True: <NEW_LINE> <INDENT> self.stable_iter_count += 1 <NEW_LINE> if self.stable_iter_count > self.num_iters_be_stable: <NEW_LINE> <INDENT> if self.grad_scale is not None: <NEW_LINE> <INDENT> self.grad_scale *= self.inc_factor <NEW_LINE> <DEDENT> self.stable_iter_count = 0 <NEW_LINE> <DEDENT> <DEDENT> if closure is None: <NEW_LINE> <INDENT> self.optimizer.step() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.optimizer.step(closure) <NEW_LINE> <DEDENT> for g, g_copy in zip(self.param_groups, self.optimizer.param_groups): <NEW_LINE> <INDENT> for p_copy, p in zip(g_copy['params'], g['params']): <NEW_LINE> <INDENT> p.data.copy_(p_copy.data) | mixed precision optimizer wrapper.
Arguments:
optimizer (torch.optim.Optimizer): an instance of
:class:`torch.optim.Optimizer`
scale: (float): a scalar for grad scale.
auto_scale: (bool): whether enable auto scale.
The algorihm of auto scale is discribled in
http://docs.nvidia.com/deeplearning/sdk/mixed-precision-training/index.html | 62598fabf9cc0f698b1c5290 |
class Graph(object): <NEW_LINE> <INDENT> def __init__(self, variables): <NEW_LINE> <INDENT> self.output = {} <NEW_LINE> self.gradients = {} <NEW_LINE> self.nodes = [] <NEW_LINE> for node in variables: <NEW_LINE> <INDENT> self.add(node) <NEW_LINE> <DEDENT> <DEDENT> def get_nodes(self): <NEW_LINE> <INDENT> return self.nodes <NEW_LINE> <DEDENT> def get_inputs(self, node): <NEW_LINE> <INDENT> inputs = [] <NEW_LINE> for parent in node.get_parents(): <NEW_LINE> <INDENT> inputs.append(self.get_output(parent)) <NEW_LINE> <DEDENT> return inputs <NEW_LINE> <DEDENT> def get_output(self, node): <NEW_LINE> <INDENT> return self.output[node] <NEW_LINE> <DEDENT> def get_gradient(self, node): <NEW_LINE> <INDENT> return self.gradients[node] <NEW_LINE> <DEDENT> def add(self, node): <NEW_LINE> <INDENT> self.nodes.append(node) <NEW_LINE> self.output[node] = node.forward(self.get_inputs(node)) <NEW_LINE> self.gradients[node] = np.zeros_like(self.output[node]) <NEW_LINE> <DEDENT> def backprop(self): <NEW_LINE> <INDENT> loss_node = self.get_nodes()[-1] <NEW_LINE> assert np.asarray(self.get_output(loss_node)).ndim == 0 <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> index = len(self.get_nodes()) - 1 <NEW_LINE> while index >= 0: <NEW_LINE> <INDENT> current_node = self.get_nodes()[index] <NEW_LINE> if current_node == loss_node: <NEW_LINE> <INDENT> np.place(self.gradients[loss_node], self.gradients[loss_node] == 0, 1) <NEW_LINE> <DEDENT> output = current_node.backward(self.get_inputs(current_node), self.gradients[current_node]) <NEW_LINE> i = 0 <NEW_LINE> while i < len(current_node.get_parents()): <NEW_LINE> <INDENT> self.gradients[current_node.get_parents()[i]] = output[i] + self.gradients[current_node.get_parents()[i]] <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> index -= 1 <NEW_LINE> <DEDENT> <DEDENT> def step(self, step_size): <NEW_LINE> <INDENT> for node in self.get_nodes(): <NEW_LINE> <INDENT> if isinstance(node, Variable): <NEW_LINE> <INDENT> node.data -= self.gradients[node]*step_size | A graph that keeps track of the computations performed by a neural network
in order to implement back-propagation.
Each evaluation of the neural network (during both training and test-time)
will create a new Graph. The computation will add nodes to the graph, where
each node is either a DataNode or a FunctionNode.
A DataNode represents a trainable parameter or an input to the computation.
A FunctionNode represents doing a computation based on two previous nodes in
the graph.
The Graph is responsible for keeping track of all nodes and the order they
are added to the graph, for computing gradients using back-propagation, and
for performing updates to the trainable parameters.
For an example of how the Graph can be used, see the function `main` above. | 62598fabcb5e8a47e493c140 |
class SlbSetBackendServer(Aliyunsdk): <NEW_LINE> <INDENT> def __init__(self,slbip,ecsip,resultFormat=resultFormat): <NEW_LINE> <INDENT> Aliyunsdk.__init__(self) <NEW_LINE> self.resultFormat = resultFormat <NEW_LINE> self.slbip = slbip <NEW_LINE> self.ecsip = ecsip <NEW_LINE> rel = GetSlbInfo(slbip) <NEW_LINE> rel.run() <NEW_LINE> self.slbid = rel.slbid <NEW_LINE> if rel.backendServerId: <NEW_LINE> <INDENT> print('SLB: %s is used' % slbip) <NEW_LINE> os._exit(17) <NEW_LINE> <DEDENT> rel = EcsIpToId(ecsip) <NEW_LINE> rel.run() <NEW_LINE> self.ecsid = rel.ecsid <NEW_LINE> self.request = AddBackendServersRequest() <NEW_LINE> self.request.set_LoadBalancerId(self.slbid) <NEW_LINE> self.request.set_BackendServers([{"ServerId":str(self.ecsid)}]) <NEW_LINE> <DEDENT> def handling(self): <NEW_LINE> <INDENT> if u'RequestId' in self.result.keys(): <NEW_LINE> <INDENT> rel = SlbSetName(self.slbip) <NEW_LINE> rel.run() <NEW_LINE> print('ecs: %s => slb: %s is added' % (self.ecsip,self.slbip)) <NEW_LINE> <DEDENT> Aliyunsdk.handling(self) | 为构造函数提供SLB IP和ECS IP,生成一个实例
调用run方法,设置后端服务器 | 62598fab7c178a314d78d42c |
class Unit(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=250, verbose_name=_('unit name'), help_text=_("Example: cup")) <NEW_LINE> plural = models.CharField(max_length=250, blank=True, verbose_name=_('plural form'), help_text=_("Example: cups (comma separated " "if language has more" " 2 plural forms)")) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('unit') <NEW_LINE> verbose_name_plural = _('units') | Model of meashure unit | 62598fab67a9b606de545f5b |
class SingleMigrationRateChange(SingleParamChange): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> for _s in [SingleParamChange]: __swig_setmethods__.update(getattr(_s,'__swig_setmethods__',{})) <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, SingleMigrationRateChange, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> for _s in [SingleParamChange]: __swig_getmethods__.update(getattr(_s,'__swig_getmethods__',{})) <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, SingleMigrationRateChange, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> this = _egglib_binding.new_SingleMigrationRateChange(*args) <NEW_LINE> try: self.this.append(this) <NEW_LINE> except: self.this = this <NEW_LINE> <DEDENT> def source(self, *args): <NEW_LINE> <INDENT> return _egglib_binding.SingleMigrationRateChange_source(self, *args) <NEW_LINE> <DEDENT> def dest(self, *args): <NEW_LINE> <INDENT> return _egglib_binding.SingleMigrationRateChange_dest(self, *args) <NEW_LINE> <DEDENT> def apply(self, *args): <NEW_LINE> <INDENT> return _egglib_binding.SingleMigrationRateChange_apply(self, *args) <NEW_LINE> <DEDENT> __swig_destroy__ = _egglib_binding.delete_SingleMigrationRateChange <NEW_LINE> __del__ = lambda self : None; | Proxy of C++ egglib::SingleMigrationRateChange class | 62598fab99fddb7c1ca62db0 |
class TranslatorParameters(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.entity_oracle = None <NEW_LINE> self.relation_oracle = None <NEW_LINE> self.restrict_answer_type = True <NEW_LINE> self.require_relation_match = True | A class that holds parameters for the translator. | 62598fab21bff66bcd722bf6 |
class SHA512Obfuscator(DataObfuscator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SHA512Obfuscator, self).__init__() <NEW_LINE> <DEDENT> def obfuscate(self, data): <NEW_LINE> <INDENT> hashable_representation = self._hashable_representation(data) <NEW_LINE> hasher = hashlib.sha512() <NEW_LINE> hasher.update(hashable_representation) <NEW_LINE> return hasher.digest() | DataObfuscator that uses the SHA512 hashing mechanism.
| 62598fab6aa9bd52df0d4e58 |
class TtyRecWriter(object): <NEW_LINE> <INDENT> def __init__(self, f): <NEW_LINE> <INDENT> if isinstance(f, io.IOBase): <NEW_LINE> <INDENT> self.file = f <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.file = open(f, 'wb') <NEW_LINE> <DEDENT> <DEDENT> def write_frame(self, seconds, useconds, payload): <NEW_LINE> <INDENT> header = struct.pack('<III', seconds, useconds, len(payload)) <NEW_LINE> self.file.write(header) <NEW_LINE> self.file.write(payload) <NEW_LINE> return None <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.file.close() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.file.close() | A class to write ttyrecs | 62598fab32920d7e50bc5fe4 |
class CommandType(Enum): <NEW_LINE> <INDENT> NUM_TYPES = 4 <NEW_LINE> COMMANDS, SUB_COMMANDS, GLOBAL_OPTIONS, RESOURCE_OPTIONS = range(NUM_TYPES) | Enum specifying the command type.
Attributes:
* AWS_COMMAND: A string representing the 'aws' command.
* AWS_CONFIGURE: A string representing the 'configure' command.
* AWS_HELP: A string representing the 'help' command.
* AWS_DOCS: A string representing the 'docs' command.
* COMMANDS: An int representing commands.
* SUB_COMMANDS: An int representing subcommands.
* GLOBAL_OPTIONS: An int representing global options.
* RESOURCE_OPTIONS: An int representing resource options. | 62598fab4f6381625f199486 |
class itkExtractImageFilterID3ID2(itkImageToImageFilterBPython.itkImageToImageFilterID3ID2): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputImageDimension = _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_InputImageDimension <NEW_LINE> OutputImageDimension = _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_OutputImageDimension <NEW_LINE> InputCovertibleToOutputCheck = _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_InputCovertibleToOutputCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkExtractImageFilterPython.itkExtractImageFilterID3ID2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetExtractionRegion(self, *args): <NEW_LINE> <INDENT> return _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_SetExtractionRegion(self, *args) <NEW_LINE> <DEDENT> def GetExtractionRegion(self): <NEW_LINE> <INDENT> return _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_GetExtractionRegion(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkExtractImageFilterPython.delete_itkExtractImageFilterID3ID2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkExtractImageFilterPython.itkExtractImageFilterID3ID2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkExtractImageFilterID3ID2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkExtractImageFilterID3ID2 class | 62598fab56ac1b37e630217b |
class BlockTagNode(ExpressionMixin, Node): <NEW_LINE> <INDENT> @property <NEW_LINE> def function(self): <NEW_LINE> <INDENT> return self.token.contents["function"] | Node which presents block tag token.
Block tag example: ``{% if something %}``. This, with ``{%`` stuff.
This is one-to-one representation of
:py:class:`curly.lexer.StartBlockToken` token. | 62598fab71ff763f4b5e76fe |
class BamProfiler: <NEW_LINE> <INDENT> def __init__(self, bamFile, useSuppAlignments=False, useSecondaryAlignments=False ): <NEW_LINE> <INDENT> self.bamFile = bamFile <NEW_LINE> if useSuppAlignments: <NEW_LINE> <INDENT> self.ignoreSuppAlignments = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ignoreSuppAlignments = 1 <NEW_LINE> <DEDENT> if useSuppAlignments: <NEW_LINE> <INDENT> self.ignoreSecondaryAlignments = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ignoreSecondaryAlignments = 1 <NEW_LINE> <DEDENT> <DEDENT> def profile(self): <NEW_LINE> <INDENT> bamfile_c = c.c_char_p() <NEW_LINE> bamfile_c = self.bamFile <NEW_LINE> CW = CWrapper() <NEW_LINE> CW._profileReads(bamfile_c, self.ignoreSuppAlignments, self.ignoreSecondaryAlignments) | Class used to manage profiling reads from a BAM file | 62598fab63d6d428bbee273b |
class AccountViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> permission_classes = [IsAuthenticated, DjangoModelPermissions, ] <NEW_LINE> http_method_names = ["get", "post", "put", "delete"] <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> action_dict = { "list": serializers.AccountSerializer, "retrive": serializers.AccountSerializer } <NEW_LINE> return action_dict.get( self.action, serializers.WritableAccountSerializer) <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> ids = user.objectaccess_set .filter(content_type=ContentType.objects.get_for_model(user)) .values_list('object_id', flat=True) <NEW_LINE> queryset = core_models.User.objects.filter(pk__in=ids) <NEW_LINE> domain = self.request.query_params.get("domain") <NEW_LINE> if domain: <NEW_LINE> <INDENT> queryset = queryset.filter(mailbox__domain__name=domain) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> @detail_route(methods=["put"]) <NEW_LINE> def password(self, request, pk=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = core_models.User.objects.get(pk=pk) <NEW_LINE> <DEDENT> except core_models.User.DoesNotExist: <NEW_LINE> <INDENT> raise http.Http404 <NEW_LINE> <DEDENT> serializer = serializers.AccountPasswordSerializer( user, data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response() <NEW_LINE> <DEDENT> return Response( serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> @list_route() <NEW_LINE> def exists(self, request): <NEW_LINE> <INDENT> email = request.GET.get("email") <NEW_LINE> if not email: <NEW_LINE> <INDENT> raise ParseError("email not provided") <NEW_LINE> <DEDENT> if not core_models.User.objects.filter(email=email).exists(): <NEW_LINE> <INDENT> data = {"exists": False} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = {"exists": True} <NEW_LINE> <DEDENT> serializer = serializers.AccountExistsSerializer(data) <NEW_LINE> return Response(serializer.data) | ViewSet for User/Mailbox. | 62598fab4428ac0f6e6584b4 |
class ASSET_OT_tag_remove(AssetBrowserMetadataOperator, Operator): <NEW_LINE> <INDENT> bl_idname = "asset.tag_remove" <NEW_LINE> bl_label = "Remove Asset Tag" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> if not super().poll(context): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> active_asset_file = context.asset_file_handle <NEW_LINE> asset_metadata = active_asset_file.asset_data <NEW_LINE> return asset_metadata.active_tag in range(len(asset_metadata.tags)) <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> active_asset_file = context.asset_file_handle <NEW_LINE> asset_metadata = active_asset_file.asset_data <NEW_LINE> tag = asset_metadata.tags[asset_metadata.active_tag] <NEW_LINE> asset_metadata.tags.remove(tag) <NEW_LINE> asset_metadata.active_tag -= 1 <NEW_LINE> return {'FINISHED'} | Remove an existing keyword tag from the active asset | 62598fabd58c6744b42dc29e |
class InquiryResponseAPI(BaseAPI): <NEW_LINE> <INDENT> model = ActionExecutionDB <NEW_LINE> schema = { "title": "Inquiry", "description": "Record of an Inquiry", "type": "object", "properties": { "id": { "type": "string", "required": True }, "route": { "type": "string", "default": "", "required": True }, "ttl": { "type": "integer", "default": 1440, "required": True }, "users": { "type": "array", "default": [], "required": True }, "roles": { "type": "array", "default": [], "required": True }, "schema": { "type": "object", "default": { "title": "response_data", "type": "object", "properties": { "continue": { "type": "boolean", "description": "Would you like to continue the workflow?", "required": True } }, }, "required": True } }, "additionalProperties": False } <NEW_LINE> @classmethod <NEW_LINE> def from_model(cls, model, mask_secrets=False, skip_db=False): <NEW_LINE> <INDENT> if not skip_db: <NEW_LINE> <INDENT> doc = cls._from_model(model, mask_secrets=mask_secrets) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> doc = model <NEW_LINE> <DEDENT> newdoc = { "id": doc["id"] } <NEW_LINE> for field in ["route", "ttl", "users", "roles", "schema"]: <NEW_LINE> <INDENT> newdoc[field] = doc["result"].get(field) <NEW_LINE> <DEDENT> return cls(**newdoc) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_inquiry_api(cls, inquiry_api, mask_secrets=False): <NEW_LINE> <INDENT> return cls( id=getattr(inquiry_api, 'id', None), route=getattr(inquiry_api, 'route', None), ttl=getattr(inquiry_api, 'ttl', None), users=getattr(inquiry_api, 'users', None), roles=getattr(inquiry_api, 'roles', None), schema=getattr(inquiry_api, 'schema', None) ) | A more pruned Inquiry model, containing only the fields needed for an API response
| 62598fab67a9b606de545f5c |
class PyPMEnvironment(object): <NEW_LINE> <INDENT> def __init__(self, pyenv, repository_list, **options): <NEW_LINE> <INDENT> self.pyenv = pyenv <NEW_LINE> self.repository_list = repository_list <NEW_LINE> self.options = DEFAULT_OPTIONS.copy() <NEW_LINE> self.options.update(options) <NEW_LINE> self.pypm_dir = join(self.pyenv.site_packages_dir, '_pypm') <NEW_LINE> self.repo_store = store.RepoPackageStore( RemoteRepositoryManager(IDX_PATH), repository_list) <NEW_LINE> if not exists(self.pypm_dir): <NEW_LINE> <INDENT> self.pyenv.ensure_write_access() <NEW_LINE> sh.mkdirs(self.pypm_dir) <NEW_LINE> <DEDENT> self.installed_store = store.InstalledPackageStore( join(self.pypm_dir, 'installed.db') ) <NEW_LINE> <DEDENT> def clone(self, altattr={}): <NEW_LINE> <INDENT> return PyPMEnvironment( pyenv = altattr.get('pyenv', self.pyenv), repository_list = altattr.get('repository_list', self.repository_list), ) <NEW_LINE> <DEDENT> @contextmanager <NEW_LINE> def locked(self): <NEW_LINE> <INDENT> with dlocked(self.pypm_dir): <NEW_LINE> <INDENT> yield | A PyPM environment that is tied to
- one `PythonEnvironment`
- one or more `RemoteRepository`
Packages can, thus, be searched and installed from any number of remote
repositories (although usually it is the main respository) but can only be
installed to the specified Python environment | 62598fab16aa5153ce400492 |
class CloseFormResponse(HttpResponseRedirect): <NEW_LINE> <INDENT> def __init__(self, request, redirect_to=None): <NEW_LINE> <INDENT> if 'popup_form' in request.session: <NEW_LINE> <INDENT> del request.session['popup_form'] <NEW_LINE> <DEDENT> if redirect_to is None: <NEW_LINE> <INDENT> redirect_to = request.META.get('HTTP_REFERER', '/') <NEW_LINE> <DEDENT> return super(CloseFormResponse, self).__init__(redirect_to) | Redirects back to the referer, closing the popup form | 62598fab2c8b7c6e89bd3756 |
class EOF(Symbol): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(EOF, self).__init__("$EOF") <NEW_LINE> <DEDENT> def first(self, visited=None): <NEW_LINE> <INDENT> return set([self]) | The EOF symbol. | 62598fab9c8ee82313040139 |
class Statement: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def try_parse(cls, tokens): <NEW_LINE> <INDENT> raise NotImplementedError('Must be implemented in deriving classes') | Parser for a statement
Attributes:
start_token (tokenize.TokenInfo): The first token of the statement. | 62598fabd486a94d0ba2bf5e |
class SiteNotFoundException(SiteException): <NEW_LINE> <INDENT> pass | Raised when the site is not found and it's expected to exists. | 62598fab1b99ca400228f4f8 |
class Xt(librarypackage.LibraryPackage): <NEW_LINE> <INDENT> def __init__(self, system): <NEW_LINE> <INDENT> super(Xt, self).__init__("Xt", system, "Install Xt-dev on this system.", "Xt", ["X11/Intrinsic.h"]) | Package for the Xt library. | 62598fabfff4ab517ebcd775 |
class AdaptiveRemeshing: <NEW_LINE> <INDENT> def __init__(self,model_part,domain_size,solver,do_swap=True): <NEW_LINE> <INDENT> self.model_part = model_part <NEW_LINE> self.domain_size = domain_size <NEW_LINE> self.fluid_solver = solver <NEW_LINE> self.do_swap = do_swap <NEW_LINE> self.refinement_utilities = RefinementUtilities() <NEW_LINE> if (domain_size == 2): <NEW_LINE> <INDENT> self.refinement_process = LocalRefineTriangleMesh(model_part) <NEW_LINE> self.swapping_process = EdgeSwapping2DModeler() <NEW_LINE> self.time_estimator = EstimateDt2D(model_part) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.refinement_process = LocalRefineTetrahedraMesh(model_part) <NEW_LINE> self.time_estimator = EstimateDt3D(model_part) <NEW_LINE> <DEDENT> AvgElemNum = 10 <NEW_LINE> AvgNodeNum = 10 <NEW_LINE> self.nodal_neighbour_search = FindNodalNeighboursProcess(model_part, AvgElemNum, AvgNodeNum) <NEW_LINE> self.nodal_neighbour_search.Execute() <NEW_LINE> <DEDENT> def EstimateTimeStep(self,CFL,max_dt): <NEW_LINE> <INDENT> NewDt = self.time_estimator.EstimateDt(CFL,max_dt) <NEW_LINE> return NewDt <NEW_LINE> <DEDENT> def RefineOnErrorRatio(self,refine_var,refine_tol,min_area,max_refinements): <NEW_LINE> <INDENT> self.fluid_solver.solver.Clear() <NEW_LINE> self.refinement_utilities.MarkForRefinement(refine_var, self.model_part, self.domain_size, refine_tol, min_area, max_refinements) <NEW_LINE> refine_on_reference = True <NEW_LINE> interpolate_internal_variables = False <NEW_LINE> self.refinement_process.LocalRefineMesh(refine_on_reference, interpolate_internal_variables) <NEW_LINE> if (self.domain_size == 2 and self.do_swap==True): <NEW_LINE> <INDENT> self.swapping_process.ReGenerateMesh(self.model_part) <NEW_LINE> <DEDENT> self.nodal_neighbour_search.Execute() <NEW_LINE> <DEDENT> def RefineOnSubscaleError(self,refine_tol,min_area,max_refinements): <NEW_LINE> <INDENT> self.refinement_utilities.SubscaleErrorEstimate(self.model_part) <NEW_LINE> self.RefineOnErrorRatio(ERROR_RATIO,refine_tol,min_area,max_refinements) | This class allows refining the problem mesh at run time.
It will split all elements where some error estimate surpasses a given
threshold. In 2D, some edge swapping will be performed to improve the
quality of the refined mesh. The current version of this class is
intended to work with VMS2D and VMS3D elements (by calling
RefineOnSubscaleError), but it can work with other fluid elements if
an error estimate is calculated and stored an elemental
variable (in that case, call RefineOnErrorRatio instead). | 62598fab63b5f9789fe850f6 |
class OrderError(Exception): <NEW_LINE> <INDENT> pass | 订单错误 | 62598fab7047854f4633f36a |
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = DatabaseConfig.get_dev_config().get_uri() <NEW_LINE> DEBUG = True | Configurations for Testing, with a separate test database. | 62598fab99cbb53fe6830e68 |
class MovieDataApi(ApiUrl): <NEW_LINE> <INDENT> SCHEME = 'https' <NEW_LINE> HOST = 'omdbapi.com' <NEW_LINE> def get_movie_data(self, movie=None, imdb_id=None): <NEW_LINE> <INDENT> payload = { 'tomatoes': False, } <NEW_LINE> if movie: <NEW_LINE> <INDENT> payload['t'] = movie <NEW_LINE> <DEDENT> elif imdb_id: <NEW_LINE> <INDENT> payload['i'] = imdb_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Valid arguments missing: set t for movie or i for imdb_id.') <NEW_LINE> <DEDENT> response = requests.get(self.get_url(), params=payload) <NEW_LINE> return response.json() | Provides a class to fetch movie data from the omdb API | 62598fab7047854f4633f36b |
class DataSource: <NEW_LINE> <INDENT> def __init__(self, data_source_type, data_source_id, collection_id=None, name=None, service_url=None): <NEW_LINE> <INDENT> self.type = data_source_type <NEW_LINE> self.id = int(data_source_id) <NEW_LINE> self.collection_id = collection_id <NEW_LINE> self.name = name <NEW_LINE> self.service_url = service_url <NEW_LINE> <DEDENT> def get_wfs_id(self): <NEW_LINE> <INDENT> if self.id == 1: <NEW_LINE> <INDENT> return 'S2.TILE' <NEW_LINE> <DEDENT> wfs_id = 'DSS{}'.format(self.id) <NEW_LINE> if self.collection_id is not None: <NEW_LINE> <INDENT> wfs_id = '{}-{}'.format(wfs_id, self.collection_id) <NEW_LINE> <DEDENT> return wfs_id <NEW_LINE> <DEDENT> def is_cloudless(self): <NEW_LINE> <INDENT> return self.type in ['S1GRD', 'DEM'] <NEW_LINE> <DEDENT> def is_timeless(self): <NEW_LINE> <INDENT> return self.type == 'DEM' | Stores info about a Sentinel Hub data source
| 62598fab4f6381625f199487 |
class KMedoids(object): <NEW_LINE> <INDENT> def __init__(self, n_clusters=2, distance='euclidean', n_trials=10, max_iter=100, tol=0.001): <NEW_LINE> <INDENT> self.n_clusters = n_clusters <NEW_LINE> self.n_trials = n_trials <NEW_LINE> self.max_iter = max_iter <NEW_LINE> self.tol = tol <NEW_LINE> self.distance = distance <NEW_LINE> <DEDENT> def fit(self, X): <NEW_LINE> <INDENT> self.centers_, self.labels_, self.sse_arr_, self.n_iter_ = _kmedoids(X, self.n_clusters, self.distance, self.max_iter, self.n_trials, self.tol) <NEW_LINE> <DEDENT> def fit_predict(self, X): <NEW_LINE> <INDENT> self.fit(X) <NEW_LINE> return(self.labels_) | KMedoids Clustering
K-medoids clustering take the cluster centroid as the medoid of the data points,
as opposed to the average of data points in a cluster. As a result, K-medoids
gaurantees that the cluster centroid is among the cluster members.
The medoid is defined as the point that minimizes the total within-cluster distances.
K-medoids is more robust to outliers (the reason for this is similar to why
median is more robust to mean).
K-medoids is computationally more expensive, since it involves computation of all
the pairwise distances in a cluster.
Parameters
-------
n_clusters: number of clusters (default = 2)
n_trials: number of trial random centroid initialization (default = 10)
max_iter: maximum number of iterations (default = 100)
tol: tolerance (default = 0.0001)
Attibutes
-------
labels_ : cluster labels for each data item
centers_ : cluster centers
sse_arr_ : array of SSE values for each cluster
n_iter_ : number of iterations for the best trial
Methods
-------
fit(X): fit the model
fit_predict(X): fit the model and return the cluster labels | 62598fab6aa9bd52df0d4e5a |
class DocumentViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Document.objects.all() <NEW_LINE> serializer_class = DocumentSerializer <NEW_LINE> http_method_names = ['get', 'head', 'options',] <NEW_LINE> def retrieve(self, request, pk=None): <NEW_LINE> <INDENT> document = Document.objects.get(pk=pk) <NEW_LINE> serializer = self.serializer_class(document) <NEW_LINE> return JsonResponse(serializer.data) | API endpoint for listing documents out of context. | 62598fab8e7ae83300ee9034 |
class NoticesViewlet(grok.Viewlet): <NEW_LINE> <INDENT> grok.name('collective.notices') <NEW_LINE> grok.context(Interface) <NEW_LINE> grok.require('zope2.View') <NEW_LINE> grok.viewletmanager(IPortalTop) <NEW_LINE> def update(self): <NEW_LINE> <INDENT> cookie_name = 'hidden-notices-' + self.cookieSuffix() <NEW_LINE> hidden = self.request.get(cookie_name, '[]') <NEW_LINE> try: <NEW_LINE> <INDENT> hidden = [id for id in json.loads(hidden) if isinstance(id, int)] <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> hidden = [] <NEW_LINE> <DEDENT> self.notices = list(getUtility(INoticesQuery).filter( self.getPrincipalIds(), hidden )) <NEW_LINE> <DEDENT> def getPrincipalIds(self): <NEW_LINE> <INDENT> membership = getToolByName(self.context, 'portal_membership', None) <NEW_LINE> if membership is None or membership.isAnonymousUser(): <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> member = membership.getAuthenticatedMember() <NEW_LINE> if not member: <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> groups = hasattr(member, 'getGroups') and member.getGroups() or [] <NEW_LINE> for group in groups: <NEW_LINE> <INDENT> if not isinstance(group, basestring): <NEW_LINE> <INDENT> return () <NEW_LINE> <DEDENT> <DEDENT> return [member.getId()] + groups <NEW_LINE> <DEDENT> def cookieSuffix(self): <NEW_LINE> <INDENT> membership = getToolByName(self.context, 'portal_membership', None) <NEW_LINE> member = membership.getAuthenticatedMember() <NEW_LINE> if not member: <NEW_LINE> <INDENT> return 'anonymous' <NEW_LINE> <DEDENT> return member.getId() or 'anonymous' | Displays notices.
| 62598fabbe383301e025378b |
class UserError(Exception): <NEW_LINE> <INDENT> pass | Exception for obvious user errors that should be corrected.
Raised if the user made an obvious error that should be corrected
(e.g. invalid scanner name, missing required value, ).
Contains a message describing the error. | 62598fab5fcc89381b266115 |
class _InitHook(object): <NEW_LINE> <INDENT> pass | Dummy class to ensure that callable is really an init hook. | 62598fab167d2b6e312b6f03 |
class RouteTable(Resource): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, 'subnets': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'routes': {'key': 'properties.routes', 'type': '[Route]'}, 'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'}, 'disable_bgp_route_propagation': {'key': 'properties.disableBgpRoutePropagation', 'type': 'bool'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(RouteTable, self).__init__(**kwargs) <NEW_LINE> self.etag = kwargs.get('etag', None) <NEW_LINE> self.routes = kwargs.get('routes', None) <NEW_LINE> self.subnets = None <NEW_LINE> self.disable_bgp_route_propagation = kwargs.get('disable_bgp_route_propagation', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) | Route table resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: A set of tags. Resource tags.
:type tags: dict[str, str]
:param etag: Gets a unique read-only string that changes whenever the resource is updated.
:type etag: str
:param routes: Collection of routes contained within a route table.
:type routes: list[~azure.mgmt.network.v2018_10_01.models.Route]
:ivar subnets: A collection of references to subnets.
:vartype subnets: list[~azure.mgmt.network.v2018_10_01.models.Subnet]
:param disable_bgp_route_propagation: Gets or sets whether to disable the routes learned by BGP
on that route table. True means disable.
:type disable_bgp_route_propagation: bool
:param provisioning_state: The provisioning state of the resource. Possible values are:
'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str | 62598fab92d797404e388b2d |
class Argument: <NEW_LINE> <INDENT> class ArgumentType: <NEW_LINE> <INDENT> VALUE, POINTER, REFERENCE = range(3) <NEW_LINE> <DEDENT> def __init__(self, aLanguageObject, name = None, argType = ArgumentType.VALUE, isConst = False): <NEW_LINE> <INDENT> assert aLanguageObject <NEW_LINE> self.languageObject_ = aLanguageObject <NEW_LINE> self.argType_ = argType <NEW_LINE> self.isConst_ = isConst <NEW_LINE> self.name_ = name <NEW_LINE> <DEDENT> def getLanguageObject(self): <NEW_LINE> <INDENT> return self.languageObject_ <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.name_ <NEW_LINE> <DEDENT> def getArgumentType(self): <NEW_LINE> <INDENT> return self.argType_ <NEW_LINE> <DEDENT> def isConst(self): <NEW_LINE> <INDENT> return self.isConst_ | Represents a function argument. | 62598fab3539df3088ecc245 |
class Position(object): <NEW_LINE> <INDENT> def __init__(self, mark_generation, position, fragment_offset): <NEW_LINE> <INDENT> self.mark_offset = position <NEW_LINE> self.mark_generation = mark_generation <NEW_LINE> self.fragment_offset = fragment_offset <NEW_LINE> <DEDENT> def get_offset_into_buffer(self): <NEW_LINE> <INDENT> return self.mark_generation.get_offset_into_buffer(self.mark_offset) | Represents a position in the iterator. | 62598fab30bbd72246469941 |
class MyFrame(tkinter.Frame): <NEW_LINE> <INDENT> def __init__(self, controller): <NEW_LINE> <INDENT> tkinter.Frame.__init__(self) <NEW_LINE> self.pack() <NEW_LINE> self.controller = controller <NEW_LINE> self.userEntryF = tkinter.Entry() <NEW_LINE> self.userEntryF.insert(0, "") <NEW_LINE> self.userEntryF.pack({"side": "left"}) <NEW_LINE> self.userEntryC = tkinter.Entry() <NEW_LINE> self.userEntryC.insert(0, "") <NEW_LINE> self.userEntryC.pack({"side": "left"}) <NEW_LINE> self.convertToFahrenheitButton = tkinter.Button(self) <NEW_LINE> self.convertToFahrenheitButton["text"] = "Convert to Fahrenheit " <NEW_LINE> self.convertToFahrenheitButton["command"] = self.controller.convertToFahrenheitButtonPressed <NEW_LINE> self.convertToFahrenheitButton.pack({"side": "left"}) <NEW_LINE> self.convertToCelciusButton = tkinter.Button(self) <NEW_LINE> self.convertToCelciusButton["text"] = "Convert to Celcius " <NEW_LINE> self.convertToCelciusButton["command"] = self.controller.convertToCelciusButtonPressed <NEW_LINE> self.convertToCelciusButton.pack({"side": "left"}) <NEW_LINE> self.labelForOutput = tkinter.Label(self) <NEW_LINE> self.labelForOutput["text"] = "" <NEW_LINE> self.labelForOutput.pack({"side": "right"}) <NEW_LINE> self.quitButton = tkinter.Button(self) <NEW_LINE> self.quitButton["text"] = "Quit" <NEW_LINE> self.quitButton["command"] = self.quit <NEW_LINE> self.quitButton.pack({"side": "left"}) | The class MyFrame is the View for a simple program that exemplifies the Model/View/Controller
architecture. This View class is a tkinter.Frame that contains three Buttons, a user-entry field,
and a label and a Label. Two buttons notify the Controller when they are pressed, and the other
Button quits the app. The label displays the converted value. The text field allows a user
to input the number they want to convert to Celcius or Fahrenheit, but all conversions are handled
in the model.
| 62598fab66673b3332c3035d |
class Store(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def initialize(): <NEW_LINE> <INDENT> for keys in _nexus_dict.keys(): <NEW_LINE> <INDENT> if keys[1] == const.USERNAME: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cdb.add_credential(TENANT, keys[0], _nexus_dict[keys[0], const.USERNAME], _nexus_dict[keys[0], const.PASSWORD]) <NEW_LINE> <DEDENT> except cexc.CredentialAlreadyExists: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def put_credential(cred_name, username, password): <NEW_LINE> <INDENT> cdb.add_credential(TENANT, cred_name, username, password) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_username(cred_name): <NEW_LINE> <INDENT> credential = cdb.get_credential_name(TENANT, cred_name) <NEW_LINE> return credential[const.CREDENTIAL_USERNAME] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_password(cred_name): <NEW_LINE> <INDENT> credential = cdb.get_credential_name(TENANT, cred_name) <NEW_LINE> return credential[const.CREDENTIAL_PASSWORD] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_credential(cred_name): <NEW_LINE> <INDENT> cdb.get_credential_name(TENANT, cred_name) <NEW_LINE> return {const.USERNAME: const.CREDENTIAL_USERNAME, const.PASSWORD: const.CREDENTIAL_PASSWORD} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def delete_credential(cred_name): <NEW_LINE> <INDENT> cdb.remove_credential(TENANT, cred_name) | Credential Store. | 62598fab5166f23b2e24336a |
class UnpolarisedFFTTelescope(FFTTelescope, UnpolarisedFourierTransformTelescope): <NEW_LINE> <INDENT> pass | A base for a unpolarised Fast Fourier transform telescope.
| 62598fab26068e7796d4c8e6 |
class OverflowError(ArithmeticError): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(S, *more): <NEW_LINE> <INDENT> pass | Result too large to be represented. | 62598fab32920d7e50bc5fe7 |
class Resposta: <NEW_LINE> <INDENT> qtd_docs_por_pagina = 50 <NEW_LINE> def __init__(self, conteudo): <NEW_LINE> <INDENT> self._conteudo = conteudo <NEW_LINE> self._dados = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def conteudo(self): <NEW_LINE> <INDENT> return self._conteudo <NEW_LINE> <DEDENT> @property <NEW_LINE> def dados(self): <NEW_LINE> <INDENT> if not self._dados: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> j = json.loads(self.conteudo) <NEW_LINE> <DEDENT> except TypeError as e: <NEW_LINE> <INDENT> logging.exception( f"Resultado da consulta {self.conteudo}: tipo inválido!" ) <NEW_LINE> <DEDENT> except json.JSONDecodeError as e: <NEW_LINE> <INDENT> logging.exception( f"Resultado da consulta {self.conteudo}: JSON inválido!" ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._dados = j <NEW_LINE> <DEDENT> <DEDENT> return self._dados <NEW_LINE> <DEDENT> @property <NEW_LINE> def documentos(self): <NEW_LINE> <INDENT> return self.dados.get("docs", []) <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_de_paginas(self): <NEW_LINE> <INDENT> if len(self.documentos): <NEW_LINE> <INDENT> return ceil( self.dados.get("num_docs", 0) / self.qtd_docs_por_pagina ) <NEW_LINE> <DEDENT> return 0 | Conteúdo de página em formato JSON | 62598fab236d856c2adc9406 |
class LinuxConfig(OSConfig): <NEW_LINE> <INDENT> @property <NEW_LINE> def mktxp_user_dir_path(self): <NEW_LINE> <INDENT> return FSHelper.full_path('~/mktxp') | Linux-related config
| 62598fab851cf427c66b824f |
class ReflexAgent(Agent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> legalMoves = gameState.getLegalActions() <NEW_LINE> scores = [self.evaluationFunction(gameState, action) for action in legalMoves] <NEW_LINE> bestScore = max(scores) <NEW_LINE> bestIndices = [index for index in range(len(scores)) if scores[index] == bestScore] <NEW_LINE> chosenIndex = random.choice(bestIndices) <NEW_LINE> "Add more of your code here if you want to" <NEW_LINE> return legalMoves[chosenIndex] <NEW_LINE> <DEDENT> def evaluationFunction(self, currentGameState, action): <NEW_LINE> <INDENT> successorGameState = currentGameState.generatePacmanSuccessor(action) <NEW_LINE> newPos = successorGameState.getPacmanPosition() <NEW_LINE> newFood = successorGameState.getFood() <NEW_LINE> newGhostStates = successorGameState.getGhostStates() <NEW_LINE> newScaredTimes = [ghostState.scaredTimer for ghostState in newGhostStates] <NEW_LINE> foodlist = sorted(newFood.asList(), key=lambda t: t[1]) <NEW_LINE> foodDistances = [manhattanDistance(i, newPos) for i in foodlist] <NEW_LINE> minFood = min(foodDistances, default=0) <NEW_LINE> ghostDistances = [(manhattanDistance(i, newPos), c) for c, i in enumerate(successorGameState.getGhostPositions())] <NEW_LINE> nearestGhost = min(ghostDistances, default=0, key=lambda t: t[0]) <NEW_LINE> if action == 'Stop' or nearestGhost[0] < 2: <NEW_LINE> <INDENT> return -9999999 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return nearestGhost[0] * (newScaredTimes[nearestGhost[1]] + 1) - minFood * 5 - successorGameState.getNumFood() * 100 | A reflex agent chooses an action at each choice point by examining
its alternatives via a state evaluation function.
The code below is provided as a guide. You are welcome to change
it in any way you see fit, so long as you don't touch our method
headers. | 62598fabfff4ab517ebcd777 |
class Market(object): <NEW_LINE> <INDENT> def current_stats(self): <NEW_LINE> <INDENT> url = 'https://api.coinmarketcap.com/v1/global/?convert=EUR' <NEW_LINE> r = requests.get(url) <NEW_LINE> data = r.json() <NEW_LINE> data['date']=datetime.date.today() <NEW_LINE> return data <NEW_LINE> <DEDENT> def current_ticker(self,limit=100): <NEW_LINE> <INDENT> url = 'https://api.coinmarketcap.com/v1/ticker/?convert=EUR&limit={}'.format(limit) <NEW_LINE> r = requests.get(url) <NEW_LINE> data = r.json() <NEW_LINE> df = pandas.DataFrame(columns=data[0].keys()+['date']) <NEW_LINE> for i in range(len(data)): <NEW_LINE> <INDENT> df.loc[i]=data[i].values()+[datetime.date.today()] <NEW_LINE> <DEDENT> return df <NEW_LINE> <DEDENT> def current_historic_data(self, symbol='ETH', year=datetime.date.today().year): <NEW_LINE> <INDENT> url='http://coinmarketcap.northpole.ro/api/v5/history/{0}_{1}.json'.format(symbol,year) <NEW_LINE> r = requests.get(url) <NEW_LINE> data = r.json() <NEW_LINE> df = pandas.DataFrame(columns=['symbol','date','price','marketCap','availableSupplyNumber','volume24']) <NEW_LINE> for i in range(len(data['history'].values())): <NEW_LINE> <INDENT> row = data['history'].values()[i] <NEW_LINE> df.loc[i]=[ row['symbol'], datetime.datetime.strptime('-'.join(data['history'].keys()[i].split('-')),'%d-%m-%Y').date(), float(row['price']['eur']), float(row['marketCap']['eur']), row['availableSupplyNumber'], float(row['volume24']['btc']), ] <NEW_LINE> <DEDENT> return df.sort_values(by='date') <NEW_LINE> <DEDENT> def full_historic_data(self,symbol='ETH',years=range(datetime.date.today().year-1, datetime.date.today().year+1,1)): <NEW_LINE> <INDENT> return pandas.concat([self.current_historic_data(year=y) for y in years]) | based on coinmarketcap.com api | 62598fab0a50d4780f705370 |
class IO: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def print_menu(): <NEW_LINE> <INDENT> print('Menu\n\n[l] load Inventory from file\n[a] Add CD\n[i] Display Current Inventory') <NEW_LINE> print('[d] delete CD from Inventory\n[s] Save Inventory to file\n[x] exit\n') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def menu_choice(): <NEW_LINE> <INDENT> choice = ' ' <NEW_LINE> while choice not in ['l', 'a', 'i', 'd', 's', 'x']: <NEW_LINE> <INDENT> choice = input('Which operation would you like to perform? [l, a, i, d, s or x]: ').lower().strip() <NEW_LINE> <DEDENT> print() <NEW_LINE> return choice <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def user_entry(): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> strID = input('Enter ID: ').strip() <NEW_LINE> try: <NEW_LINE> <INDENT> val = int(strID) <NEW_LINE> break <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print('\nIntegers Only!\n') <NEW_LINE> continue <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> intsrtID = int(strID) <NEW_LINE> strTitle = input('What is the CD\'s title? ').strip() <NEW_LINE> stArtist = input('What is the Artist\'s name? ').strip() <NEW_LINE> return intsrtID, strTitle, stArtist <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def show_inventory(table): <NEW_LINE> <INDENT> print('======= The Current Inventory: =======') <NEW_LINE> print('ID\tCD Title (by: Artist)\n') <NEW_LINE> for row in table: <NEW_LINE> <INDENT> print('{}\t{} (by:{})'.format(*row.values())) <NEW_LINE> <DEDENT> print('======================================') | Handling Input / Output | 62598fab55399d3f056264b6 |
class TinyYoloFeature(BaseFeatureExtractor): <NEW_LINE> <INDENT> def __init__(self, input_size): <NEW_LINE> <INDENT> input_image = Input(shape=input_size) <NEW_LINE> x = Conv2D(16, (3, 3), strides=(1, 1), padding='same', name='conv_1', use_bias=False)(input_image) <NEW_LINE> x = BatchNormalization(name='norm_1')(x) <NEW_LINE> x = LeakyReLU(alpha=0.1)(x) <NEW_LINE> x = MaxPooling2D(pool_size=(2, 2))(x) <NEW_LINE> for i in range(0, 4): <NEW_LINE> <INDENT> x = Conv2D(32 * (2 ** i), (3, 3), strides=(1, 1), padding='same', name='conv_' + str(i + 2), use_bias=False)(x) <NEW_LINE> x = BatchNormalization(name='norm_' + str(i + 2))(x) <NEW_LINE> x = LeakyReLU(alpha=0.1)(x) <NEW_LINE> x = MaxPooling2D(pool_size=(2, 2))(x) <NEW_LINE> <DEDENT> x = Conv2D(512, (3, 3), strides=(1, 1), padding='same', name='conv_6', use_bias=False)(x) <NEW_LINE> x = BatchNormalization(name='norm_6')(x) <NEW_LINE> x = LeakyReLU(alpha=0.1)(x) <NEW_LINE> x = MaxPooling2D(pool_size=(2, 2), strides=(1, 1), padding='same')(x) <NEW_LINE> for i in range(0, 2): <NEW_LINE> <INDENT> x = Conv2D(1024, (3, 3), strides=(1, 1), padding='same', name='conv_' + str(i + 7), use_bias=False)(x) <NEW_LINE> x = BatchNormalization(name='norm_' + str(i + 7))(x) <NEW_LINE> x = LeakyReLU(alpha=0.1)(x) <NEW_LINE> <DEDENT> self.feature_extractor = Model(input_image, x, name='Tiny_YOLO_backend') <NEW_LINE> if input_size[2] == 3: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> print("Loading pretrained weights: " + TINY_YOLO_BACKEND_PATH) <NEW_LINE> self.feature_extractor.load_weights(TINY_YOLO_BACKEND_PATH) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Unable to load backend weights. Using a fresh model") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print('pre trained weights are available just for RGB network.') <NEW_LINE> <DEDENT> <DEDENT> def normalize(self, image): <NEW_LINE> <INDENT> return image / 255. | docstring for ClassName | 62598fab7d847024c075c356 |
class SoftmaxLayer(object): <NEW_LINE> <INDENT> def __init__(self, input, n_out, y): <NEW_LINE> <INDENT> n_in = input.get_shape()[1].value <NEW_LINE> self.input = input <NEW_LINE> r = 4*np.sqrt(6.0/(n_in + n_out)) <NEW_LINE> w = tf.Variable(tf.random_uniform([n_in, n_out], minval=-r, maxval=r)) <NEW_LINE> b = tf.Variable(tf.zeros([n_out]), name='b') <NEW_LINE> pred = tf.add(tf.matmul(input, w), b) <NEW_LINE> cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(pred, y)) <NEW_LINE> correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1)) <NEW_LINE> self.accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32)) <NEW_LINE> self.y = y <NEW_LINE> self.w = w <NEW_LINE> self.b = b <NEW_LINE> self.cost = cost <NEW_LINE> self.params= [w] | Softmax layer for classification
Parameters
----------
input: Tensor
The output from the last layer
n_out: int
Number of labels
y: numpy array
True label for the data | 62598fab7c178a314d78d430 |
class WeatherAPI(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.days = 4 <NEW_LINE> self.url = '{0}?q={1}&format=json&num_of_days={2}&key={3}'.format( settings.WEATHER_API_URL, settings.WEATHER_CITY, self.days, settings.WEATHER_API_KEY) <NEW_LINE> <DEDENT> def _api_response(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> weather = requests.get(self.url) <NEW_LINE> if weather.status_code == 200: <NEW_LINE> <INDENT> data = weather.json().get('data', {}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ApiRequestException <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> raise APIException <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> def widget(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> try: <NEW_LINE> <INDENT> data = self._api_response() <NEW_LINE> <DEDENT> except APIException: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> current_condition = data['current_condition'][0] <NEW_LINE> result['city_name'] = get_city_name(data, settings.WEATHER_CITY_TRANS) <NEW_LINE> result['icon_id'] = get_icon_id(current_condition, True) <NEW_LINE> result['temperature'] = get_temperature(current_condition) <NEW_LINE> <DEDENT> except (KeyError, IndexError, TemperatureException): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> result['forecast'] = [] <NEW_LINE> if 'weather' in data: <NEW_LINE> <INDENT> for day_condition in data['weather']: <NEW_LINE> <INDENT> buf = {} <NEW_LINE> try: <NEW_LINE> <INDENT> buf['day_of_week'] = get_day_of_week(day_condition) <NEW_LINE> buf['icon_id'] = get_icon_id(day_condition) <NEW_LINE> buf['temperature'] = get_temperature(day_condition) <NEW_LINE> <DEDENT> except (DayOfWeekException, TemperatureException): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> result['forecast'].append(buf) <NEW_LINE> <DEDENT> <DEDENT> return result | Weather proxy | 62598fab67a9b606de545f5f |
class HasObjectState(object): <NEW_LINE> <INDENT> _publish_attrs = [ PublishOnly('os_state'), ] <NEW_LINE> _fulltext_attrs = ["os_state"] <NEW_LINE> _aliases = { "os_state": { "display_name": "Review State", "mandatory": False } } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._skip_os_state_update = False <NEW_LINE> super(HasObjectState, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @declared_attr <NEW_LINE> def os_state(cls): <NEW_LINE> <INDENT> return deferred(db.Column(db.String, nullable=False, default='Unreviewed'), cls.__name__) <NEW_LINE> <DEDENT> def validate_os_state(self): <NEW_LINE> <INDENT> if getattr(self, '_set_reviewed_state', False): <NEW_LINE> <INDENT> self.os_state = 'Reviewed' <NEW_LINE> <DEDENT> elif self.os_state != 'Unreviewed': <NEW_LINE> <INDENT> self.os_state = 'Unreviewed' <NEW_LINE> <DEDENT> <DEDENT> def set_reviewed_state(self): <NEW_LINE> <INDENT> self.os_state = 'Reviewed' <NEW_LINE> self._set_reviewed_state = True | Has Object State Mixin | 62598fab7047854f4633f36c |
class ValidationOpinionListView(LoginRequiredMixin, PermissionRequiredMixin, ListView): <NEW_LINE> <INDENT> permissions = ["tutorialv2.change_validation"] <NEW_LINE> template_name = "tutorialv2/validation/opinions.html" <NEW_LINE> context_object_name = "contents" <NEW_LINE> subcategory = None <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return ( PublishableContent.objects.filter(type="OPINION", sha_public__isnull=False) .exclude(sha_picked=F("sha_public")) .exclude(pk__in=PickListOperation.objects.filter(is_effective=True).values_list("content__pk", flat=True)) ) | List the validations, with possibilities of filters | 62598fab442bda511e95c3ea |
class LptsPifibEnum(Enum): <NEW_LINE> <INDENT> isis = 0 <NEW_LINE> ipv4_frag = 1 <NEW_LINE> ipv4_echo = 2 <NEW_LINE> ipv4_any = 3 <NEW_LINE> ipv6_frag = 4 <NEW_LINE> ipv6_echo = 5 <NEW_LINE> ipv6_nd = 6 <NEW_LINE> ipv6_any = 7 <NEW_LINE> bfd_any = 8 <NEW_LINE> all = 9 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_lpts_pre_ifib_oper as meta <NEW_LINE> return meta._meta_table['LptsPifibEnum'] | LptsPifibEnum
Lpts pifib
.. data:: isis = 0
ISIS packets
.. data:: ipv4_frag = 1
IPv4 fragmented packets
.. data:: ipv4_echo = 2
IPv4 ICMP Echo packets
.. data:: ipv4_any = 3
All IPv4 packets
.. data:: ipv6_frag = 4
IPv6 fragmented packets
.. data:: ipv6_echo = 5
IPv6 ICMP Echo packets
.. data:: ipv6_nd = 6
IPv6 ND packets
.. data:: ipv6_any = 7
All IPv6 packets
.. data:: bfd_any = 8
BFD packets
.. data:: all = 9
All packets | 62598fabadb09d7d5dc0a51e |
class MyMplCanvas(FigureCanvas): <NEW_LINE> <INDENT> def __init__(self,q_list,logi_list, parent=None, width=7, height=7, dpi=100): <NEW_LINE> <INDENT> fig = Figure(figsize=(width, height), dpi=dpi) <NEW_LINE> self.axes = fig.add_subplot(111) <NEW_LINE> FigureCanvas.__init__(self, fig) <NEW_LINE> self.q_list = q_list <NEW_LINE> self.logi_list = logi_list <NEW_LINE> self.compute_initial_figure() <NEW_LINE> self.setParent(parent) <NEW_LINE> FigureCanvas.setSizePolicy(self, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) <NEW_LINE> FigureCanvas.updateGeometry(self) <NEW_LINE> <DEDENT> def compute_initial_figure(self): <NEW_LINE> <INDENT> q = self.q_list <NEW_LINE> i = self.logi_list <NEW_LINE> qi_line, = self.axes.plot(q,i,color="lightcoral",linestyle='-',linewidth=2.0,label="q-Intensity(log)") <NEW_LINE> self.axes.grid(True, color='gray') <NEW_LINE> self.axes.set_xlabel("q") <NEW_LINE> self.axes.set_ylabel('intensity(log)') <NEW_LINE> self.axes.legend(handles=[qi_line]) | Ultimately, this is a QWidget (as well as a FigureCanvasAgg, etc.). | 62598fab2ae34c7f260ab075 |
class ImagePanelBasic(wx.Panel): <NEW_LINE> <INDENT> __bitmapCache = {} <NEW_LINE> def __init__(self, tile, *args, **kw): <NEW_LINE> <INDENT> self.backgroundColour = wx.WHITE <NEW_LINE> from Tribler.Main.vwxGUI.GuiUtility import GUIUtility <NEW_LINE> self.guiUtility = GUIUtility.getInstance() <NEW_LINE> self.xpos = self.ypos = 0 <NEW_LINE> self.tile = tile <NEW_LINE> self.bitmap = None <NEW_LINE> if len(args) == 0: <NEW_LINE> <INDENT> pre = wx.PrePanel() <NEW_LINE> self.PostCreate(pre) <NEW_LINE> self.Bind(wx.EVT_WINDOW_CREATE, self.OnCreate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wx.Panel.__init__(self, *args, **kw) <NEW_LINE> self._PostInit() <NEW_LINE> <DEDENT> <DEDENT> def OnCreate(self, event): <NEW_LINE> <INDENT> self.Unbind(wx.EVT_WINDOW_CREATE) <NEW_LINE> wx.CallAfter(self._PostInit) <NEW_LINE> event.Skip() <NEW_LINE> return True <NEW_LINE> <DEDENT> def _PostInit(self): <NEW_LINE> <INDENT> if self.bitmap is None: <NEW_LINE> <INDENT> self.loadBitmap() <NEW_LINE> <DEDENT> wx.EVT_PAINT(self, self.OnPaint) <NEW_LINE> self.Refresh() <NEW_LINE> <DEDENT> def setBackground(self, colour): <NEW_LINE> <INDENT> self.backgroundColour = colour <NEW_LINE> <DEDENT> def loadBitmap(self, name = None): <NEW_LINE> <INDENT> self.bitmap = None <NEW_LINE> self.imagedir = os.path.join(self.guiUtility.vwxGUI_path, 'images') <NEW_LINE> if name is None: <NEW_LINE> <INDENT> self.bitmapPath = os.path.join(self.imagedir, self.GetName()+'.png') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bitmapPath = os.path.join(self.imagedir, name) <NEW_LINE> <DEDENT> if os.path.isfile(self.bitmapPath): <NEW_LINE> <INDENT> self.setBitmap(wx.Bitmap(self.bitmapPath, wx.BITMAP_TYPE_ANY)) <NEW_LINE> <DEDENT> elif DEBUG: <NEW_LINE> <INDENT> print >>sys.stderr,'bgPanel: Could not load image: %s' % self.bitmapPath <NEW_LINE> <DEDENT> <DEDENT> def setBitmap(self, bitmap): <NEW_LINE> <INDENT> self.bitmap = bitmap <NEW_LINE> w, h = self.GetSize() <NEW_LINE> iw, ih = self.bitmap.GetSize() <NEW_LINE> self.xpos, self.ypos = (w-iw)/2, (h-ih)/2 <NEW_LINE> self.Refresh() <NEW_LINE> <DEDENT> def OnPaint(self, evt): <NEW_LINE> <INDENT> obj = evt.GetEventObject() <NEW_LINE> dc = wx.BufferedPaintDC(obj) <NEW_LINE> if self.bitmap: <NEW_LINE> <INDENT> if self.tile: <NEW_LINE> <INDENT> dc.SetPen(wx.TRANSPARENT_PEN) <NEW_LINE> dc.SetBrush(wx.BrushFromBitmap(self.bitmap)) <NEW_LINE> w, h = self.GetClientSize() <NEW_LINE> dc.DrawRectangle(0, 0, w, h) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dc.SetBackground(wx.Brush(self.backgroundColour)) <NEW_LINE> dc.Clear() <NEW_LINE> dc.DrawBitmap(self.bitmap, self.xpos, self.ypos, True) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dc.SetBackground(wx.Brush(self.backgroundColour)) <NEW_LINE> dc.Clear() | Panel with automatic backgroundimage control. | 62598fab4f6381625f199488 |
class IDatabaseOpenedEvent(interface.Interface): <NEW_LINE> <INDENT> database = interface.Attribute("The main database.") | The main database has been opened. | 62598fab0a50d4780f705371 |
class GetPrivEscSvcInfo(Step): <NEW_LINE> <INDENT> attack_mapping = [('T1007', 'Discovery'), ('T1106', 'Execution')] <NEW_LINE> display_name = "privilege_escalation(service)" <NEW_LINE> summary = "Use PowerUp to find potential service-based privilege escalation vectors" <NEW_LINE> preconditions = [("rat", OPRat({"elevated": False})), ("host", OPHost(OPVar("rat.host")))] <NEW_LINE> postconditions = [("service_g", OPService({"host": OPVar("host"), "user_context": OPVar("rat.username")}))] <NEW_LINE> @staticmethod <NEW_LINE> def description(): <NEW_LINE> <INDENT> return "Looking for potential privilege escalation vectors related to services" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> async def action(operation, rat, host, service_g): <NEW_LINE> <INDENT> unquoted = await operation.execute_powershell(rat, "powerup", PSFunction("Get-ServiceUnquoted"), parsers.powerup.get_serviceunquoted) <NEW_LINE> for parsed_service in unquoted: <NEW_LINE> <INDENT> service_dict = {"name": parsed_service['name'], "bin_path": parsed_service['bin_path'], 'service_start_name': parsed_service['service_start_name'], 'can_restart': parsed_service['can_restart'], 'modifiable_paths': parsed_service['modifiable_paths'], 'vulnerability': 'unquoted', 'revert_command': ""} <NEW_LINE> await service_g(service_dict) <NEW_LINE> <DEDENT> fileperms = await operation.execute_powershell(rat, "powerup", PSFunction("Get-ModifiableServiceFile"), parsers.powerup.get_modifiableservicefile) <NEW_LINE> for parsed_service in fileperms: <NEW_LINE> <INDENT> service_dict = {'name': parsed_service['name'], 'bin_path': parsed_service['bin_path'], 'service_start_name': parsed_service['service_start_name'], 'can_restart': parsed_service['can_restart'], 'modifiable_paths': parsed_service['modifiable_paths'], 'vulnerability': 'file', 'revert_command': ""} <NEW_LINE> await service_g(service_dict) <NEW_LINE> <DEDENT> mod_bin_path = await operation.execute_powershell(rat, "powerup", PSFunction("Get-ModifiableService"), parsers.powerup.get_modifiableservice) <NEW_LINE> for parsed_service in mod_bin_path: <NEW_LINE> <INDENT> service_dict = {'name': parsed_service['name'], 'bin_path': parsed_service['bin_path'], 'service_start_name': parsed_service['service_start_name'], 'can_restart': parsed_service['can_restart'], 'vulnerability': 'bin_path', 'revert_command': ""} <NEW_LINE> await service_g(service_dict) <NEW_LINE> <DEDENT> return True | Description:
This step utilises the PowerUp powershell script to identify potential service-based privilege
escalation opportunities on a target machine.
Requirements:
Requires an non-elevated RAT. This step identifies unquoted service paths, modifiable service targets,
and modifiable services for privilege escalation purposes. | 62598fab796e427e5384e727 |
class OrgHomeView(View): <NEW_LINE> <INDENT> def get(self, request, org_id, *args, **kwargs): <NEW_LINE> <INDENT> course_org = CourseOrg.objects.get(id=int(org_id)) <NEW_LINE> course_org.click_nums += 1 <NEW_LINE> course_org.save() <NEW_LINE> current_page = "home" <NEW_LINE> all_courses = course_org.course_set.all()[:3] <NEW_LINE> all_teacher = course_org.teacher_set.all()[:1] <NEW_LINE> has_fav = False <NEW_LINE> if request.user.is_authenticated: <NEW_LINE> <INDENT> if UserFavorite.objects.filter(user=request.user, fav_id=course_org.id, fav_type=2): <NEW_LINE> <INDENT> has_fav = True <NEW_LINE> <DEDENT> <DEDENT> return render(request,"org-detail-homepage.html",{ 'all_courses':all_courses, 'all_teacher':all_teacher, 'course_org':course_org, 'current_page':current_page, "has_fav": has_fav }) | 显示机构的详细页面 | 62598fabe1aae11d1e7ce7ed |
class SegmentMasks(col.defaultdict): <NEW_LINE> <INDENT> def __init__(self, seg): <NEW_LINE> <INDENT> self.seg = seg <NEW_LINE> col.defaultdict.__init__(self, None) <NEW_LINE> <DEDENT> def __missing__(self, label): <NEW_LINE> <INDENT> if label != 0: <NEW_LINE> <INDENT> self.seg.check_label(label) <NEW_LINE> <DEDENT> return self.seg.sliced(label) != label | Container for segment masks | 62598fab4c3428357761a24d |
class Pet(Animal): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'ani_type': {'key': 'aniType', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Pet, self).__init__(**kwargs) <NEW_LINE> self.name = None | Pet.
Variables are only populated by the server, and will be ignored when sending a request.
:param ani_type:
:type ani_type: str
:ivar name: Gets the Pet by id.
:vartype name: str | 62598faba8ecb033258711a4 |
class InterBoundaryIter(object): <NEW_LINE> <INDENT> def __init__(self, stream, boundary): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> self._boundary = boundary <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return LazyStream(BoundaryIter(self._stream, self._boundary)) <NEW_LINE> <DEDENT> except InputStreamExhausted: <NEW_LINE> <INDENT> raise StopIteration() | A Producer that will iterate over boundaries. | 62598fabbd1bec0571e1508d |
class Option(Action): <NEW_LINE> <INDENT> body = u'<option tal:attributes="%(attributes)s">${content}</option>' <NEW_LINE> def update(self): <NEW_LINE> <INDENT> if 'value' not in self.attrs: <NEW_LINE> <INDENT> self.attrs['value'] = self.rcontext.get('value', None) | An action rendered as a select option::
>>> from webob import Request
>>> request = Request.blank('/')
>>> action = Option('myaction',
... value='request.application_url',
... content=_('Click here'))
Rendering::
>>> action.render(request)
u'<option id="myaction" value="http://localhost">Click here</option>' | 62598fab2c8b7c6e89bd3759 |
class GenerateArgyleTokenView(APIView): <NEW_LINE> <INDENT> permission_classes = (AllowAny,) <NEW_LINE> def post(self, request, uuid): <NEW_LINE> <INDENT> user_link = UserLink.objects.get(uuid=uuid) <NEW_LINE> params = {'user': str(user_link.argyle_uuid)} <NEW_LINE> token = generate_argyle_token(params) <NEW_LINE> if token: <NEW_LINE> <INDENT> user_link.argyle_token = token <NEW_LINE> user_link.save() <NEW_LINE> <DEDENT> return Response(status=status.HTTP_200_OK, data={ 'token': token }) | Generates an argyle token for the UserLink instance.
- - - - - - - - - -
Expected URL format: ((API_URL))/generator/argyle-token//((userUUID))/
Method: POST
- - - - - - - - - -
Example of returned data:
Data: {
'token': 'eyK1eXAiOiJKV1QiLCJhSjciOiJIUzI1NiJ9.eyJ0b2tlbl90eXBlIjoiYWNjZXNzIiwiZXhwIjoxNjA5NTI5Mjc5LCJqdGkiOiI5Yjk3ZjY0YWFkYTk0ODM0OWY0ZmY4MWY1YjgyNzdiNCIsInVzZXJfaWQiOiJiYzdlY2ZjYy1jZmY4LTQ2YzItYjljNS02Mjc3Y2ZhZjNiZGIiLCJjbGllbnRfaWQiOiJhM2UxMjBmNi0yNGU5LTRlZjctYTMxMy04MDEzNDI3NGExYWIifQ.DAA7mkkTMsk95lHYlr4dRMETRZUWeCmv8aTsDQTnAtQ'
} | 62598fab66673b3332c3035f |
class FailedEpisodes(tf_metric.TFStepMetric): <NEW_LINE> <INDENT> def __init__(self, failure_function, name='FailedEpisodes', prefix='Metrics', dtype=tf.int64): <NEW_LINE> <INDENT> super(FailedEpisodes, self).__init__(name=name, prefix=prefix) <NEW_LINE> self.dtype = dtype <NEW_LINE> self._failure_function = failure_function <NEW_LINE> self.number_failed_episodes = common.create_variable( initial_value=0, dtype=self.dtype, shape=(), name='number_failed_episodes') <NEW_LINE> <DEDENT> def call(self, trajectory): <NEW_LINE> <INDENT> num_failed_episodes = tf.cast( self._failure_function(trajectory), self.dtype) <NEW_LINE> num_failed_episodes = tf.reduce_sum(input_tensor=num_failed_episodes) <NEW_LINE> self.number_failed_episodes.assign_add(num_failed_episodes) <NEW_LINE> return trajectory <NEW_LINE> <DEDENT> def result(self): <NEW_LINE> <INDENT> return tf.identity(self.number_failed_episodes, name=self.name) <NEW_LINE> <DEDENT> @common.function <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> self.number_failed_episodes.assign(0) | Counts the number of episodes ending in failure / requiring human intervention. | 62598faba17c0f6771d5c1c9 |
class Animal: <NEW_LINE> <INDENT> def __init__(self, name, weight, location="Earth", diet_type="Food", poisonous="False"): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.weight = weight <NEW_LINE> self.location = location <NEW_LINE> self.diet_type = diet_type <NEW_LINE> self.poisonous = poisonous <NEW_LINE> <DEDENT> def eat(self, food): <NEW_LINE> <INDENT> return "Huge fan of that " + food <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> return "Vroom, Vroom, I go quick" | General Representation of Animals | 62598fab5fc7496912d4824c |
class Axis(): <NEW_LINE> <INDENT> def __init__(self, lo, hi): <NEW_LINE> <INDENT> self.lo = np.asarray(lo) if lo is not None else None <NEW_LINE> self.hi = np.asarray(hi) if hi is not None else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_empty(self): <NEW_LINE> <INDENT> return self.lo is None or not self.lo.size <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_integrated(self): <NEW_LINE> <INDENT> return self.hi is not None and self.hi.size > 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_ascending(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.lo[-1] > self.lo[0] <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise ValueError("{} does not seem to be an array".format(self.lo)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def start(self): <NEW_LINE> <INDENT> if self.is_ascending: <NEW_LINE> <INDENT> return self.lo[0] <NEW_LINE> <DEDENT> return self.lo[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def end(self): <NEW_LINE> <INDENT> if self.is_ascending and self.is_integrated: <NEW_LINE> <INDENT> return self.hi[-1] <NEW_LINE> <DEDENT> if self.is_ascending and not self.is_integrated: <NEW_LINE> <INDENT> return self.lo[-1] <NEW_LINE> <DEDENT> if self.is_integrated: <NEW_LINE> <INDENT> return self.hi[0] <NEW_LINE> <DEDENT> return self.lo[0] <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.lo.size <NEW_LINE> <DEDENT> def overlaps(self, other): <NEW_LINE> <INDENT> num = max(0, min(self.end, other.end) - max(self.start, other.start)) <NEW_LINE> return bool(num != 0) | Represent the axes of a N-D object.
This supports both "integrated" and "non-integrated" (point)
datasets.
Parameters
----------
lo : array_like or None
The starting point of the axis. If `lo` is `None` or empty
then the data axis is said to be empty. The axis can be in
ascending or descending order.
hi : array_like or None
The ending point of the axis for "integrated" axes, i.e. those
which have a low and high edge. | 62598fab3d592f4c4edbae60 |
class BeamError(Exception): <NEW_LINE> <INDENT> pass | Base class for all Beam errors. | 62598fab7b25080760ed7442 |
class TestConfig: <NEW_LINE> <INDENT> @mock.patch('twod.twod._Data') <NEW_LINE> def test_config_valid(self, mock_data, capsys, monkeypatch, valid_config_path): <NEW_LINE> <INDENT> cls = Twod(valid_config_path) <NEW_LINE> assert cls.interval == 9000 <NEW_LINE> <DEDENT> @mock.patch('twod.twod._Data') <NEW_LINE> def test_config_missing_username(self, mock_data, capsys, monkeypatch, missing_username_config_path): <NEW_LINE> <INDENT> with pytest.raises(SystemExit): <NEW_LINE> <INDENT> Twod(missing_username_config_path) <NEW_LINE> <DEDENT> out, err = capsys.readouterr() <NEW_LINE> assert "No option 'user'" in err <NEW_LINE> <DEDENT> @mock.patch('twod.twod._Data') <NEW_LINE> def test_config_missing_section(self, mock_data, capsys, monkeypatch, missing_section_config_path): <NEW_LINE> <INDENT> with pytest.raises(SystemExit): <NEW_LINE> <INDENT> Twod(missing_section_config_path) <NEW_LINE> <DEDENT> out, err = capsys.readouterr() <NEW_LINE> assert "No section: 'ip_service'" in err <NEW_LINE> <DEDENT> @mock.patch('twod.twod._Data') <NEW_LINE> def test_config_invalid_mode(self, mock_data, capsys, monkeypatch, invalid_mode_config_path): <NEW_LINE> <INDENT> with pytest.raises(SystemExit): <NEW_LINE> <INDENT> Twod(invalid_mode_config_path) <NEW_LINE> <DEDENT> out, err = capsys.readouterr() <NEW_LINE> assert "Invalid mode: 'invalid_mode'" in err <NEW_LINE> <DEDENT> @mock.patch('twod.twod._Data') <NEW_LINE> def test_config_invalid_url(self, mock_data, capsys, monkeypatch, invalid_url_config_path): <NEW_LINE> <INDENT> with pytest.raises(SystemExit): <NEW_LINE> <INDENT> Twod(invalid_url_config_path) <NEW_LINE> <DEDENT> out, err = capsys.readouterr() <NEW_LINE> assert "Invalid URL: 'invalid_url'" in err | Test config parsing. | 62598fab9c8ee8231304013b |
class TestRegress(RegressionTestCase): <NEW_LINE> <INDENT> def test_linearRegress(self): <NEW_LINE> <INDENT> data = Series(self.sc.parallelize([(1, array([1.5, 2.3, 6.2, 5.1, 3.4, 2.1]))])) <NEW_LINE> x = array([ array([1, 0, 0, 0, 0, 0]), array([0, 1, 0, 0, 0, 0]) ]) <NEW_LINE> model = RegressionModel.load(x, "linear") <NEW_LINE> result = model.fit(data) <NEW_LINE> assert(allclose(result.select('betas').values().collect()[0], array([-2.7, -1.9]))) <NEW_LINE> assert(allclose(result.select('stats').values().collect()[0], array([0.42785299]))) <NEW_LINE> assert(allclose(result.select('resid').values().collect()[0], array([0, 0, 2, 0.9, -0.8, -2.1]))) <NEW_LINE> assert(allclose(result.select('betas').index, array([0, 1]))) <NEW_LINE> assert(allclose(result.select('resid').index, array([0, 1, 2, 3, 4, 5]))) <NEW_LINE> assert(result.select('stats').index == ['stats']) <NEW_LINE> <DEDENT> def test_bilinearRegress(self): <NEW_LINE> <INDENT> data = Series(self.sc.parallelize([(1, array([1.5, 2.3, 6.2, 5.1, 3.4, 2.1]))])) <NEW_LINE> x1 = array([ array([1, 0, 1, 0, 1, 0]), array([0, 1, 0, 1, 0, 1]) ]) <NEW_LINE> x2 = array([ array([1, 1, 0, 0, 0, 0]), array([0, 0, 1, 1, 0, 0]), array([0, 0, 0, 0, 1, 1]) ]) <NEW_LINE> model = RegressionModel.load((x1, x2), "bilinear") <NEW_LINE> result = model.fit(data) <NEW_LINE> tol = 1E-4 <NEW_LINE> assert(allclose(result.select('betas').values().collect()[0], array([-3.1249, 5.6875, 0.4375]), atol=tol)) <NEW_LINE> assert(allclose(result.select('stats').values().collect()[0], array([0.6735]), tol)) <NEW_LINE> assert(allclose(result.select('resid').values().collect()[0], array([0, -0.8666, 0, 1.9333, 0, -1.0666]), atol=tol)) | Test accuracy of linear and bilinear regression
models by building small design matrices and testing
on small data against ground truth
(ground truth derived by doing the algebra in MATLAB) | 62598fab01c39578d7f12d14 |
class HTCondorJobStatus(enum.IntEnum): <NEW_LINE> <INDENT> idle = 1 <NEW_LINE> running = 2 <NEW_LINE> removed = 3 <NEW_LINE> completed = 4 <NEW_LINE> held = 5 <NEW_LINE> transferring_output = 6 <NEW_LINE> suspended = 7 <NEW_LINE> failed = 999 | See https://htcondor.readthedocs.io/en/latest/classad-attributes/job-classad-attributes.html | 62598fab0c0af96317c56317 |
class AnalyticsDosviscommonGeneratereport( AnalyticsDosviscommonGeneratereportSchema ): <NEW_LINE> <INDENT> cli_command = "/mgmt/tm/analytics/dos-vis-common/generate-report" <NEW_LINE> def rest(self): <NEW_LINE> <INDENT> response = self.device.get(self.cli_command) <NEW_LINE> response_json = response.json() <NEW_LINE> if not response_json: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> return response_json | To F5 resource for /mgmt/tm/analytics/dos-vis-common/generate-report
| 62598fab7cff6e4e811b59c0 |
@registry.register_problem <NEW_LINE> class TranslateEnfrWmtMulti64kPacked1k(TranslateEnfrWmtMulti64k): <NEW_LINE> <INDENT> @property <NEW_LINE> def packed_length(self): <NEW_LINE> <INDENT> return 1024 <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_training_examples(self): <NEW_LINE> <INDENT> return 1760600 <NEW_LINE> <DEDENT> @property <NEW_LINE> def inputs_prefix(self): <NEW_LINE> <INDENT> return "translate English French " <NEW_LINE> <DEDENT> @property <NEW_LINE> def targets_prefix(self): <NEW_LINE> <INDENT> return "translate French English " | Translation with muli-lingual vocabulary. | 62598fab7d847024c075c358 |
class ResourceSkuRestrictions(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'type': {'readonly': True}, 'values': {'readonly': True}, 'restriction_info': {'readonly': True}, 'reason_code': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'type': {'key': 'type', 'type': 'str'}, 'values': {'key': 'values', 'type': '[str]'}, 'restriction_info': {'key': 'restrictionInfo', 'type': 'ResourceSkuRestrictionInfo'}, 'reason_code': {'key': 'reasonCode', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ResourceSkuRestrictions, self).__init__(**kwargs) <NEW_LINE> self.type = None <NEW_LINE> self.values = None <NEW_LINE> self.restriction_info = None <NEW_LINE> self.reason_code = None | Describes scaling information of a SKU.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar type: The type of restrictions. Possible values include: "Location", "Zone".
:vartype type: str or ~storage_pool_management.models.ResourceSkuRestrictionsType
:ivar values: The value of restrictions. If the restriction type is set to location. This would
be different locations where the SKU is restricted.
:vartype values: list[str]
:ivar restriction_info: The information about the restriction where the SKU cannot be used.
:vartype restriction_info: ~storage_pool_management.models.ResourceSkuRestrictionInfo
:ivar reason_code: The reason for restriction. Possible values include: "QuotaId",
"NotAvailableForSubscription".
:vartype reason_code: str or ~storage_pool_management.models.ResourceSkuRestrictionsReasonCode | 62598fab8da39b475be03179 |
class SetCorpFlag: <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> sample['corpflag'] = True <NEW_LINE> return sample | Adds a deterministic flag to the sample such that subsequent transforms
use a fixed random seed where applicable. Used for test | 62598fab3346ee7daa337614 |
class ThreeNeighbors: <NEW_LINE> <INDENT> def __init__(self, A, B, C): <NEW_LINE> <INDENT> if not (isinstance(A, PartPoint) or (isinstance(A, FixedPoint))) and (isinstance(B, PartPoint) or (isinstance(B, FixedPoint))) and (isinstance(C, PartPoint) or (isinstance(C, FixedPoint))): <NEW_LINE> <INDENT> TypeError( "Arguments to ThreeNeighbors must be FixedPoint or PartPoint.") <NEW_LINE> <DEDENT> self.A = A <NEW_LINE> self.B = B <NEW_LINE> self.C = C <NEW_LINE> <DEDENT> def outer_normal(self): <NEW_LINE> <INDENT> outer_normal = get_triangle_normal( self.A.get_pos(), self.B.get_pos(), self.C.get_pos()) <NEW_LINE> return outer_normal | Represents three best spatially distributed neighbors of a point in a mesh. | 62598fabaad79263cf42e768 |
class SessionWizardView(WizardView): <NEW_LINE> <INDENT> storage_name = 'formtools.wizard.storage.session.SessionStorage' | A WizardView with pre-configured SessionStorage backend. | 62598fab7047854f4633f36e |
class Adagrad(Optimizer): <NEW_LINE> <INDENT> def __init__(self, lr=0.01, epsilon=1e-8, decay=0., **kwargs): <NEW_LINE> <INDENT> super(Adagrad, self).__init__(**kwargs) <NEW_LINE> with K.name_scope(self.__class__.__name__): <NEW_LINE> <INDENT> self.lr = K.variable(lr, name='lr') <NEW_LINE> self.decay = K.variable(decay, name='decay') <NEW_LINE> self.iterations = K.variable(0, dtype='int64', name='iterations') <NEW_LINE> <DEDENT> self.epsilon = epsilon <NEW_LINE> self.initial_decay = decay <NEW_LINE> <DEDENT> @interfaces.legacy_get_updates_support <NEW_LINE> def get_updates(self, loss, params): <NEW_LINE> <INDENT> grads = self.get_gradients(loss, params) <NEW_LINE> shapes = [K.int_shape(p) for p in params] <NEW_LINE> accumulators = [K.zeros(shape) for shape in shapes] <NEW_LINE> self.weights = accumulators <NEW_LINE> self.updates = [K.update_add(self.iterations, 1)] <NEW_LINE> lr = self.lr <NEW_LINE> if self.initial_decay > 0: <NEW_LINE> <INDENT> lr *= (1. / (1. + self.decay * K.cast(self.iterations, K.dtype(self.decay)))) <NEW_LINE> <DEDENT> for p, g, a in zip(params, grads, accumulators): <NEW_LINE> <INDENT> new_a = a + K.square(g) <NEW_LINE> self.updates.append(K.update(a, new_a)) <NEW_LINE> new_p = p - lr * g / (K.sqrt(new_a) + self.epsilon) <NEW_LINE> if getattr(p, 'constraint', None) is not None: <NEW_LINE> <INDENT> new_p = p.constraint(new_p) <NEW_LINE> <DEDENT> self.updates.append(K.update(p, new_p)) <NEW_LINE> <DEDENT> return self.updates <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'lr': float(K.get_value(self.lr)), 'decay': float(K.get_value(self.decay)), 'epsilon': self.epsilon} <NEW_LINE> base_config = super(Adagrad, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items())) | Adagrad optimizer.
It is recommended to leave the parameters of this optimizer
at their default values.
# Arguments
lr: float >= 0. Learning rate.
epsilon: float >= 0.
decay: float >= 0. Learning rate decay over each update.
# References
- [Adaptive Subgradient Methods for Online Learning and Stochastic Optimization](http://www.jmlr.org/papers/volume12/duchi11a/duchi11a.pdf) | 62598fab7d847024c075c359 |
class ListResources(Method): <NEW_LINE> <INDENT> interfaces = ['aggregate', 'slicemgr'] <NEW_LINE> accepts = [ Mixed(Parameter(str, "Credential string"), Parameter(type([str]), "List of credentials")), Parameter(dict, "Options") ] <NEW_LINE> returns = Parameter(str, "List of resources") <NEW_LINE> def call(self, creds, options): <NEW_LINE> <INDENT> self.api.logger.info("interface: %s\tmethod-name: %s" % (self.api.interface, self.name)) <NEW_LINE> if not options.get('geni_rspec_version'): <NEW_LINE> <INDENT> if options.get('rspec_version'): <NEW_LINE> <INDENT> options['geni_rspec_version'] = options['rspec_version'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SfaInvalidArgument('Must specify an rspec version option. geni_rspec_version cannot be null') <NEW_LINE> <DEDENT> <DEDENT> (speaking_for, _) = urn_to_hrn(options.get('geni_speaking_for')) <NEW_LINE> valid_creds = self.api.auth.checkCredentials(creds, 'listnodes', speaking_for_hrn=speaking_for) <NEW_LINE> origin_hrn = options.get('origin_hrn', None) <NEW_LINE> if not origin_hrn: <NEW_LINE> <INDENT> origin_hrn = Credential(cred=valid_creds[0]).get_gid_caller().get_hrn() <NEW_LINE> <DEDENT> rspec = self.api.manager.ListResources(self.api, creds, options) <NEW_LINE> if self.api.interface in ['aggregate']: <NEW_LINE> <INDENT> chain_name = 'OUTGOING' <NEW_LINE> <DEDENT> elif self.api.interface in ['slicemgr']: <NEW_LINE> <INDENT> chain_name = 'FORWARD-OUTGOING' <NEW_LINE> <DEDENT> self.api.logger.debug("ListResources: sfatables on chain %s"%chain_name) <NEW_LINE> filtered_rspec = run_sfatables(chain_name, '', origin_hrn, rspec) <NEW_LINE> if options.has_key('geni_compressed') and options['geni_compressed'] == True: <NEW_LINE> <INDENT> filtered_rspec = zlib.compress(filtered_rspec).encode('base64') <NEW_LINE> <DEDENT> return filtered_rspec | Returns information about available resources
@param credential list
@param options dictionary
@return string | 62598fab7047854f4633f36f |
class FilterDepartamentTfmForm(forms.Form): <NEW_LINE> <INDENT> search_text = forms.CharField(required=False, widget=forms.TextInput( attrs={'class': 'form-control', 'placeholder': 'Título'} )) <NEW_LINE> formation_project = forms.ModelChoiceField( queryset=Masters.objects.all(), empty_label="Masters", required=False, widget=forms.Select( attrs={'class': 'form-control'} ) ) <NEW_LINE> area = forms.ModelChoiceField( queryset=Areas.objects.all(), empty_label="Area de conocimiento", required=False, widget=forms.Select( attrs={'class': 'form-control'} ) ) <NEW_LINE> tutor = forms.ModelChoiceField( queryset=User.objects.all(), empty_label="Tutor", required=False, widget=forms.Select( attrs={'class': 'form-control'} ) ) <NEW_LINE> status = forms.ChoiceField( widget=forms.Select( attrs={'class': 'form-control'} ), required=False, choices=( ("", "Selecciona el estado de la validación"), (Tfms.NOT_VALIDATED, "❔ No validado"), (Tfms.DEPARTAMENT_VALIDATION, "✔️ Validado"), (Tfms.FAIL_VALIDATION, "❌ Rechazado") ) ) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.user = kwargs.pop("user") <NEW_LINE> super(FilterDepartamentTfmForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields["formation_project"].queryset = Masters.objects.filter( tfms__tutor1__userinfos__departaments=self.user.userinfos.departaments ).distinct() <NEW_LINE> self.fields["area"].queryset = self.user.userinfos.departaments.areas.all() <NEW_LINE> self.fields["tutor"].queryset = User.objects.filter( userinfos__departaments=self.user.userinfos.departaments, groups__name="Teachers" ) | Filtros para el listado de TFMs para departamentos.
Atributos:
search_text(forms.CharField): Input tipo Text para el titulo dle TFM.
formation_project(forms.ModelChoiceField): Selector para la elección de la titulación.
area(forms.ModelChoiceField): Selector para elección del area del tutor que
ha creado el TFM.
tutor(forms.ModelChoiceField): Selector para la elección dle tutor que ha creado el TFM. | 62598fab6aa9bd52df0d4e5e |
class DataDirectoryFilter(load.DirectoryFilter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def filter_directories(dirs: List[str]) -> List[str]: <NEW_LINE> <INDENT> return dirs | Represents a data directory filter.
The filter is used to | 62598fab656771135c489617 |
class dpp_base(base_ownProcess): <NEW_LINE> <INDENT> def _start_plugin_base(self): <NEW_LINE> <INDENT> return self.cb_initialize_plugin() <NEW_LINE> <DEDENT> def cb_initialize_plugin(self): <NEW_LINE> <INDENT> raise NotImplementedError("Please Implement this method") <NEW_LINE> <DEDENT> def _get_configuration_base(self): <NEW_LINE> <INDENT> config = {} <NEW_LINE> return config <NEW_LINE> <DEDENT> def _get_type(self): <NEW_LINE> <INDENT> return PLUGIN_DPP_IDENTIFIER | This kind of plugin is to process data provided by other plugins. | 62598fabd58c6744b42dc2a1 |
class DlRtmp(Downloader): <NEW_LINE> <INDENT> rtmpdumpEx='rtmpdump' <NEW_LINE> def __init__(self, lienRtmp, swfPlayerUrl, outDir, codeProgramme, timeStamp, navigateur, stopDownloadEvent, progressFnct): <NEW_LINE> <INDENT> self.lienRtmp = lienRtmp <NEW_LINE> self.swfPlayerUrl = swfPlayerUrl <NEW_LINE> super(DlRtmp, self).__init__(outDir, codeProgramme, timeStamp, "t.flv", navigateur, stopDownloadEvent, progressFnct) <NEW_LINE> <DEDENT> def telecharger(self): <NEW_LINE> <INDENT> if not self.checkExternalProgram(self.rtmpdumpEx): <NEW_LINE> <INDENT> logger.warning('Ce script requiert %s' % (self.rtmpdumpEx)) <NEW_LINE> <DEDENT> elif self.rtmpDownload(self.lienRtmp, False) == 0: <NEW_LINE> <INDENT> logger.info('Téléchargement terminé') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info('Problème réseau ou algo?') <NEW_LINE> <DEDENT> <DEDENT> def rtmpDownload(self, rtmpUrl, swfForceRefresh): <NEW_LINE> <INDENT> logger.debug('→rtmpDownload(%s, %s)' % ( rtmpUrl, swfForceRefresh)) <NEW_LINE> rtmpCmd = '%s --resume --rtmp "%s" --port 1935 --timeout 10' % ( self.rtmpdumpEx, rtmpUrl) <NEW_LINE> if swfForceRefresh: <NEW_LINE> <INDENT> rtmpCmd += ' --swfVfy %s --swfAge 0' % (self.swfPlayerUrl) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rtmpCmd += ' --swfVfy %s' % (self.swfPlayerUrl) <NEW_LINE> <DEDENT> rtmpCmd += ' -o "%s"' % (self.nomFichier) <NEW_LINE> logger.info(rtmpCmd) <NEW_LINE> rtmpCall = shlex.split(rtmpCmd) <NEW_LINE> rtmpProc = subprocess.Popen(rtmpCall, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) <NEW_LINE> (stdout, stderr) = rtmpProc.communicate() <NEW_LINE> if rtmpProc.returncode == 1: <NEW_LINE> <INDENT> logger.debug('rtmpdump output: %s' % (stdout)) <NEW_LINE> if 'corrupt file!' in stdout: <NEW_LINE> <INDENT> logger.warning('Le fichier %s est corrompu!\n\t le téléchargement doit reprendre du début...' % (self.nomFichier)) <NEW_LINE> os.remove(self.nomFichier) <NEW_LINE> return self.rtmpDownload(rtmpUrl, swfForceRefresh) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not swfForceRefresh: <NEW_LINE> <INDENT> return self.rtmpDownload(rtmpUrl, True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning ('Veuillez ré-essayer plus tard...') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif rtmpProc.returncode == 2: <NEW_LINE> <INDENT> logger.info('Téléchargement incomplet: nouvel essai dans 3s...') <NEW_LINE> time.sleep(3) <NEW_LINE> if swfForceRefresh: <NEW_LINE> <INDENT> return self.rtmpDownload(rtmpUrl, False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.rtmpDownload(rtmpUrl, swfForceRefresh) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return rtmpProc.returncode | Téléchargement des liens rtmp | 62598fabbe8e80087fbbeff9 |
class DetailPostAPIView(RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Post.objects.all() <NEW_LINE> lookup_field = "slug" <NEW_LINE> serializer_class = PostDetailSerializer <NEW_LINE> permission_classes = [IsAuthenticatedOrReadOnly, IsOwnerOrReadOnly] | get:
Returns the details of a post instance. Searches post using slug field.
put:
Updates an existing post. Returns updated post data
parameters: [slug, title, body, description, image]
delete:
Delete an existing post
parameters = [slug] | 62598fab56b00c62f0fb284a |
class b2JointDef(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> _Box2D.b2JointDef_swiginit(self,_Box2D.new_b2JointDef()) <NEW_LINE> _init_kwargs(self, **kwargs) <NEW_LINE> <DEDENT> type = _swig_property(_Box2D.b2JointDef_type_get, _Box2D.b2JointDef_type_set) <NEW_LINE> bodyA = _swig_property(_Box2D.b2JointDef_bodyA_get, _Box2D.b2JointDef_bodyA_set) <NEW_LINE> bodyB = _swig_property(_Box2D.b2JointDef_bodyB_get, _Box2D.b2JointDef_bodyB_set) <NEW_LINE> collideConnected = _swig_property(_Box2D.b2JointDef_collideConnected_get, _Box2D.b2JointDef_collideConnected_set) <NEW_LINE> __dir__ = _dir_filter <NEW_LINE> def __hash__(self): <NEW_LINE> <INDENT> return _Box2D.b2JointDef___hash__(self) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return _format_repr(self) <NEW_LINE> <DEDENT> def __GetUserData(self): <NEW_LINE> <INDENT> return _Box2D.b2JointDef___GetUserData(self) <NEW_LINE> <DEDENT> def __SetUserData(self, *args, **kwargs): <NEW_LINE> <INDENT> return _Box2D.b2JointDef___SetUserData(self, *args, **kwargs) <NEW_LINE> <DEDENT> def ClearUserData(self): <NEW_LINE> <INDENT> return _Box2D.b2JointDef_ClearUserData(self) <NEW_LINE> <DEDENT> userData = property(__GetUserData, __SetUserData) <NEW_LINE> def __del__(self): <NEW_LINE> <INDENT> self.ClearUserData() <NEW_LINE> <DEDENT> def to_kwargs(self): <NEW_LINE> <INDENT> def is_prop(attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> is_property = isinstance(getattr(cls, attr), property) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return is_property and attr not in skip_props <NEW_LINE> <DEDENT> skip_props = ['anchor', 'anchorA', 'anchorB', 'axis'] <NEW_LINE> cls = type(self) <NEW_LINE> return {attr: getattr(self, attr) for attr in dir(self) if is_prop(attr) } <NEW_LINE> <DEDENT> __swig_destroy__ = _Box2D.delete_b2JointDef | Joint definitions are used to construct joints. | 62598fabdd821e528d6d8ecb |
class Field(field): <NEW_LINE> <INDENT> def __init__(self, name, default=NOT_PROVIDED, filters=None, required=True): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.default = default <NEW_LINE> self.filters = filters or [] <NEW_LINE> self.required = required <NEW_LINE> <DEDENT> def __get__(self, instance, cls=None): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> value = getattr(instance._obj, self.name, self.default) <NEW_LINE> for filt in self.filters: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = filt.from_python(value) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> raise ValidationError('Invalid value') <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> for filt in self.filters[::-1]: <NEW_LINE> <INDENT> value = filt.to_python(value) <NEW_LINE> <DEDENT> setattr(instance._obj, self.name, value) | class V(DataMapper):
foo = Field('bar', default=1) | 62598fabd268445f26639b4e |
class Category(Base): <NEW_LINE> <INDENT> __tablename__ = 'category' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> name = Column(String(250), nullable=False) <NEW_LINE> @property <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> return { 'name': self.name, 'id': self.id, } | Category class has following properties
id : Integer
name : String | 62598fab4428ac0f6e6584ba |
class DerivableSetValueError(Exception): <NEW_LINE> <INDENT> pass | Raises when trying to set value for Derivable Field. | 62598fab92d797404e388b2f |
class ReadSageNB(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.tmp = tempfile.mkdtemp(prefix='sagenb_export_') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> shutil.rmtree(self.tmp, ignore_errors=True) <NEW_LINE> <DEDENT> def tmp_filename(self, name): <NEW_LINE> <INDENT> return os.path.join(self.tmp, name) <NEW_LINE> <DEDENT> def test_sage_4(self): <NEW_LINE> <INDENT> notebook = NotebookSageNB.find(DOT_SAGE, '_sage_:4') <NEW_LINE> ipynb = IpynbWriter(notebook) <NEW_LINE> ipynb.write(self.tmp_filename('sage:4.ipynb')) <NEW_LINE> <DEDENT> def test_aleksandra_slapik_44(self): <NEW_LINE> <INDENT> notebook = NotebookSageNB.find(DOT_SAGE, 'aleksandra.slapik:44') <NEW_LINE> ipynb = IpynbWriter(notebook) <NEW_LINE> ipynb.write(self.tmp_filename('aleksandra_slapik_44.ipynb')) <NEW_LINE> ipynb.write(self.tmp_filename(u'WDI projekt - R\xf3\u017cankowski, Kie\u0142pi\u0144ski, Kozok.ipynb')) | Test various sample notebooks | 62598fab3539df3088ecc248 |
class V1beta1ControllerRevisionList(object): <NEW_LINE> <INDENT> swagger_types = { 'api_version': 'str', 'items': 'list[V1beta1ControllerRevision]', 'kind': 'str', 'metadata': 'V1ListMeta' } <NEW_LINE> attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } <NEW_LINE> def __init__(self, api_version=None, items=None, kind=None, metadata=None): <NEW_LINE> <INDENT> self._api_version = None <NEW_LINE> self._items = None <NEW_LINE> self._kind = None <NEW_LINE> self._metadata = None <NEW_LINE> self.discriminator = None <NEW_LINE> if api_version is not None: <NEW_LINE> <INDENT> self.api_version = api_version <NEW_LINE> <DEDENT> self.items = items <NEW_LINE> if kind is not None: <NEW_LINE> <INDENT> self.kind = kind <NEW_LINE> <DEDENT> if metadata is not None: <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def api_version(self): <NEW_LINE> <INDENT> return self._api_version <NEW_LINE> <DEDENT> @api_version.setter <NEW_LINE> def api_version(self, api_version): <NEW_LINE> <INDENT> self._api_version = api_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> if items is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `items`, must not be `None`") <NEW_LINE> <DEDENT> self._items = items <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V1beta1ControllerRevisionList): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598faba8ecb033258711a6 |
@autoinitsingleton('windows', 'msys') <NEW_LINE> class WindowsToMsysPathConverter(Singleton, PathConverter): <NEW_LINE> <INDENT> def convert(self, source_path): <NEW_LINE> <INDENT> return get_path_converter( 'windows_alt', 'msys').convert( get_path_converter( 'windows', 'windows_alt').convert(source_path)) | Windows path to msys path converter. | 62598fab45492302aabfc467 |
class PerfDataSourcesV1(RestController): <NEW_LINE> <INDENT> apiver = 1 <NEW_LINE> @with_trailing_slash <NEW_LINE> @expose("api/pds-all.xml", content_type="application/xml; charset=utf-8") <NEW_LINE> @expose("json") <NEW_LINE> def get_all(self): <NEW_LINE> <INDENT> idhost = get_parent_id("hosts") <NEW_LINE> if idhost is None: <NEW_LINE> <INDENT> raise HTTPNotFound("Can't find the host") <NEW_LINE> <DEDENT> host = get_host(idhost) <NEW_LINE> result = [] <NEW_LINE> for pds in host.perfdatasources: <NEW_LINE> <INDENT> result.append({ "id": pds.idperfdatasource, "name": pds.name, "href": tg.url("/api/v%s/hosts/%s/perfdatasources/%s" % (self.apiver, host.idhost, pds.idperfdatasource)), }) <NEW_LINE> <DEDENT> return dict(perfdatasources=result) <NEW_LINE> <DEDENT> @expose("api/pds-one.xml", content_type="application/xml; charset=utf-8") <NEW_LINE> @expose("json") <NEW_LINE> def get_one(self, idpds): <NEW_LINE> <INDENT> idhost = get_parent_id("hosts") <NEW_LINE> pds = get_pds(idpds, idhost) <NEW_LINE> result = { "id": pds.idperfdatasource, "href": tg.url("/api/v%s/hosts/%s/perfdatasources/%s" % (self.apiver, pds.host.idhost, pds.idperfdatasource)), "host": { "id": pds.host.idhost, "name": pds.host.name, "href": tg.url("/api/v%s/hosts/%s" % (self.apiver, pds.host.idhost)), }, "name": pds.name, "type": pds.type, "label": pds.label, "factor": pds.factor, "max": pds.max, } <NEW_LINE> graphs = [] <NEW_LINE> for graph in pds.graphs: <NEW_LINE> <INDENT> graphs.append({ "id": graph.idgraph, "href": tg.url("/api/v%s/graphs/%s" % (self.apiver, graph.idgraph)), "name": graph.name, }) <NEW_LINE> <DEDENT> result["graphs"] = graphs <NEW_LINE> return dict(pds=result) | Controlleur d'accès aux données de performances d'un hôte. Ne peut être
monté qu'après un hôte dans l'arborescence. Techniquement on pourrait aussi
le monter à la racine, mais il faudrait alors limiter le nombre de
résultats pour éviter de saturer la machine. On fera s'il y a besoin. | 62598fab26068e7796d4c8ea |
class Database: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.database = SqliteDatabase('linklys.db') <NEW_LINE> self.load() <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> self.database.connect() <NEW_LINE> self.database.create_tables( [ Article, Playlist, PlaylistArticles ], safe=True ) | manages the database | 62598fab7b25080760ed7444 |
class PathTransform(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.path_file = None <NEW_LINE> <DEDENT> def relativeToAbsolute(self, path_file): <NEW_LINE> <INDENT> assert self.path_file is not None <NEW_LINE> path = join(self.path_file, path_file) <NEW_LINE> return normpath(path) | classe per trasformare il path di un file da relativo ad assoluto, questo
per recuperare file che si trovano in una directory diversa da quella
dell'editor. La classe mette a disposizione una variabile globale
g_ptransform per implemetare il template singleton.
Nel momento che si cambia il path del file .rooms su cui si lavora si
deve settare la variabile path_file | 62598fab3d592f4c4edbae62 |
class DefaultEventFormatterTest(test_lib.EventFormatterTestCase): <NEW_LINE> <INDENT> def testInitialization(self): <NEW_LINE> <INDENT> event_formatter = default.DefaultEventFormatter() <NEW_LINE> self.assertIsNotNone(event_formatter) <NEW_LINE> <DEDENT> def testGetFormatStringAttributeNames(self): <NEW_LINE> <INDENT> event_formatter = default.DefaultEventFormatter() <NEW_LINE> expected_attribute_names = ['attribute_driven'] <NEW_LINE> self._TestGetFormatStringAttributeNames( event_formatter, expected_attribute_names) | Tests for the default event formatter. | 62598fab44b2445a339b693b |
class GattError(LinkError): <NEW_LINE> <INDENT> pass | An operation could not be completed because of an invalid GATT state.
The message will provide more information on what the issue was. | 62598fab8a43f66fc4bf2112 |
class Dereference(object): <NEW_LINE> <INDENT> implements(IDereference) <NEW_LINE> __slots__ = ("refid", ) <NEW_LINE> def __init__(self, refid): <NEW_LINE> <INDENT> self.refid = refid <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Dereference %s>" % self.refid <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.refid) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Dereference): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return self.refid == other.refid <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) != type(other): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return not self.__eq__(other) | Used by TreeSerializer to encapsulate a dereference to a previous
referenced value. Can be compared for equality and hashed.
Implements L{IDereference}. | 62598fabf7d966606f747f7b |
@utils.ModuleSettings( options=[ utils.UtilOption(constants.OPTION_USERHOME, required=False, docstring="home directory"), utils.UtilOption(constants.OPTION_USERSHELL, required=False, docstring="login shell") ], required_transaction=storage.PasswdTransaction ) <NEW_LINE> class UserItem(utils.HardeningUtil): <NEW_LINE> <INDENT> def __run__(self): <NEW_LINE> <INDENT> if self.has_option(constants.OPTION_USERHOME): <NEW_LINE> <INDENT> self.transaction().set_pw_dir(self.get_option(constants.OPTION_USERHOME)) <NEW_LINE> <DEDENT> if self.has_option(constants.OPTION_USERSHELL): <NEW_LINE> <INDENT> self.transaction().set_pw_shell(self.get_option(constants.OPTION_USERSHELL)) | creates a user. If the user already exists, it will be modified. | 62598fabfff4ab517ebcd77c |
class ConstVelocityMP(MotionPrior): <NEW_LINE> <INDENT> def __init__(self, n_steps): <NEW_LINE> <INDENT> self.n_steps = n_steps <NEW_LINE> <DEDENT> def __call__(self, x_t, history): <NEW_LINE> <INDENT> if len(history) >= self.n_steps: <NEW_LINE> <INDENT> new_history = list(history[1:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_history = list(history) <NEW_LINE> <DEDENT> _x_t = _x1y1x2y2_to_cxcywh(x_t) <NEW_LINE> new_history.append(_x_t) <NEW_LINE> if len(new_history) == 1: <NEW_LINE> <INDENT> vel = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> vel = (new_history[-1] - new_history[0]) / (len(new_history) - 1) <NEW_LINE> <DEDENT> _pred = _x_t + vel <NEW_LINE> pred = _cxcywh_to_x1y1x2y2(_pred) <NEW_LINE> return pred, new_history | Constant velocity motion prior | 62598fabf548e778e596b53b |
class LogParabola(RegriddableModel1D): <NEW_LINE> <INDENT> def __init__(self, name='logparabola'): <NEW_LINE> <INDENT> self.ref = Parameter(name, 'ref', 1, alwaysfrozen=True) <NEW_LINE> self.c1 = Parameter(name, 'c1', 1) <NEW_LINE> self.c2 = Parameter(name, 'c2', 1) <NEW_LINE> self.ampl = Parameter(name, 'ampl', 1, 0) <NEW_LINE> ArithmeticModel.__init__(self, name, (self.ref, self.c1, self.c2, self.ampl)) <NEW_LINE> <DEDENT> @modelCacher1d <NEW_LINE> def calc(self, *args, **kwargs): <NEW_LINE> <INDENT> kwargs['integrate'] = bool_cast(self.integrate) <NEW_LINE> return _modelfcts.logparabola(*args, **kwargs) | One-dimensional log-parabolic function.
Attributes
----------
ref
The reference point for the normalization.
c1
The power-law index (gamma).
c2
The curvature of the parabola (beta).
ampl
The amplitude of the model.
See Also
--------
Exp, Exp10, Log, Log10, Sqrt
Notes
-----
The functional form of the model for points is::
f(x) = ampl * (x / ref) ^ (-c1 - c2 * log_10 (x / ref))
The grid version is evaluated by numerically intgerating the
function over each bin using a non-adaptive Gauss-Kronrod scheme
suited for smooth functions [1]_, falling over to a simple
trapezoid scheme if this fails.
References
----------
.. [1] https://www.gnu.org/software/gsl/manual/html_node/QNG-non_002dadaptive-Gauss_002dKronrod-integration.html | 62598fab4a966d76dd5eee78 |
class BTermMonoid(TermWithCoefficientMonoid): <NEW_LINE> <INDENT> __init__ = experimental(trac_number=31922)(GenericTermMonoid.__init__) <NEW_LINE> Element = BTerm <NEW_LINE> def _repr_(self): <NEW_LINE> <INDENT> return (f'B-Term Monoid {self.growth_group._repr_short_()} with ' f'coefficients in {self.coefficient_ring}') <NEW_LINE> <DEDENT> def _default_kwds_construction_(self): <NEW_LINE> <INDENT> defaults = {} <NEW_LINE> defaults.update(super()._default_kwds_construction_()) <NEW_LINE> defaults.update( {'valid_from': {v: 0 for v in self.growth_group.variable_names()}}) <NEW_LINE> return defaults <NEW_LINE> <DEDENT> def _convert_construction_(self, kwds_construction): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _coerce_map_from_(self, S): <NEW_LINE> <INDENT> if isinstance(S, (ExactTermMonoid,)): <NEW_LINE> <INDENT> if self.growth_group.has_coerce_map_from(S.growth_group) and self.coefficient_ring.has_coerce_map_from(S.coefficient_ring): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return super()._coerce_map_from_(S) <NEW_LINE> <DEDENT> <DEDENT> def _an_element_(self): <NEW_LINE> <INDENT> from sage.rings.semirings.non_negative_integer_semiring import NN <NEW_LINE> return self(self.growth_group.an_element(), coefficient=self.coefficient_ring.an_element(), valid_from={v: NN.an_element() for v in self.growth_group.variable_names()}) <NEW_LINE> <DEDENT> def some_elements(self): <NEW_LINE> <INDENT> from itertools import cycle <NEW_LINE> from sage.misc.mrange import cantor_product <NEW_LINE> from sage.rings.semirings.non_negative_integer_semiring import NN <NEW_LINE> return (self(g, coefficient=c, valid_from={v: f for v in self.growth_group.variable_names()}) for (g, c), f in zip(cantor_product( self.growth_group.some_elements(), (c for c in self.coefficient_ring.some_elements() if c != 0)), cycle(NN.some_elements()))) | Parent for asymptotic B-terms.
INPUT:
- ``growth_group`` -- a growth group
- ``coefficient_ring`` -- the ring which contains the
coefficients of the elements
- ``category`` -- The category of the parent can be specified
in order to broaden the base structure. It has to be a subcategory
of ``Join of Category of monoids and Category of posets``. This
is also the default category if ``None`` is specified
EXAMPLES::
sage: from sage.rings.asymptotic.growth_group import MonomialGrowthGroup
sage: from sage.rings.asymptotic.term_monoid import BTermMonoid
sage: from sage.rings.asymptotic.term_monoid import DefaultTermMonoidFactory as TermMonoid
sage: G = MonomialGrowthGroup(ZZ, 'x')
sage: BT = TermMonoid('B', G, QQ)
sage: BT
B-Term Monoid x^ZZ with coefficients in Rational Field
sage: BT is BTermMonoid(TermMonoid, G, QQ)
True | 62598fab7c178a314d78d434 |
class FleschReadingEase(BaseReadability): <NEW_LINE> <INDENT> name = 'Flesch reading ease' <NEW_LINE> slug = 'flesch_reading_ease' <NEW_LINE> def calc(self, text: str) -> ReadingLevel: <NEW_LINE> <INDENT> if not text: <NEW_LINE> <INDENT> return ReadingLevel(self.name) <NEW_LINE> <DEDENT> text_info = self._text_analyser.get_text_info(text) <NEW_LINE> words = text_info.word_count <NEW_LINE> sentences = text_info.sentence_count <NEW_LINE> syllables = text_info.syllable_count <NEW_LINE> if sentences < 1 or words < 1: <NEW_LINE> <INDENT> return ReadingLevel(self.name) <NEW_LINE> <DEDENT> result = 206.835 - 1.015 * (words / sentences) - 85.6 * (syllables / words) <NEW_LINE> if result > 130.0: <NEW_LINE> <INDENT> level = 1 <NEW_LINE> <DEDENT> elif result > 120.0: <NEW_LINE> <INDENT> level = 2 <NEW_LINE> <DEDENT> elif result > 110.0: <NEW_LINE> <INDENT> level = 3 <NEW_LINE> <DEDENT> elif result > 100.0: <NEW_LINE> <INDENT> level = 4 <NEW_LINE> <DEDENT> elif result > 90.0: <NEW_LINE> <INDENT> level = 5 <NEW_LINE> <DEDENT> elif result > 80.0: <NEW_LINE> <INDENT> level = 6 <NEW_LINE> <DEDENT> elif result > 70.0: <NEW_LINE> <INDENT> level = 7 <NEW_LINE> <DEDENT> elif result > 65.0: <NEW_LINE> <INDENT> level = 8 <NEW_LINE> <DEDENT> elif result > 60.0: <NEW_LINE> <INDENT> level = 9 <NEW_LINE> <DEDENT> elif result > 53.33: <NEW_LINE> <INDENT> level = 10 <NEW_LINE> <DEDENT> elif result > 56.66: <NEW_LINE> <INDENT> level = 11 <NEW_LINE> <DEDENT> elif result > 50.0: <NEW_LINE> <INDENT> level = 12 <NEW_LINE> <DEDENT> elif result > 45.0: <NEW_LINE> <INDENT> level = 13 <NEW_LINE> <DEDENT> elif result > 40.0: <NEW_LINE> <INDENT> level = 14 <NEW_LINE> <DEDENT> elif result > 35.0: <NEW_LINE> <INDENT> level = 15 <NEW_LINE> <DEDENT> elif result > 30.0: <NEW_LINE> <INDENT> level = 16 <NEW_LINE> <DEDENT> elif result > 15.0: <NEW_LINE> <INDENT> level = 17 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> level = 18 <NEW_LINE> <DEDENT> reading = ReadingLevel(self.name, index=result, level=level) <NEW_LINE> return reading | In the Flesch reading-ease test, higher scores indicate material that is easier to read;
lower numbers mark passages that are more difficult to read.
100.00-90.00 5th grade Very easy to read. Easily understood by an average 11-year-old student.
90.0–80.0 6th grade Easy to read. Conversational English for consumers.
80.0–70.0 7th grade Fairly easy to read.
70.0–60.0 8th & 9th grade Plain English. Easily understood by 13- to 15-year-old students.
60.0–50.0 10th to 12th grade Fairly difficult to read.
50.0–30.0 College Difficult to read.
30.0–0.0 College graduate Very difficult to read. Best understood by university graduates.
Description from https://en.wikipedia.org/wiki/Flesch%E2%80%93Kincaid_readability_tests#Flesch_reading_ease
206.835 - 1.015 x (total words / total sentences) - 84.6 x (total syllables / total words) | 62598fab7047854f4633f370 |
class BaseTaskTypeCreate( UserFormViewMixin, TaskTypeFormViewMixin, DisableUserSelectFormViewMixin, LoginRequiredMixin, CreateView, ): <NEW_LINE> <INDENT> model = None <NEW_LINE> fields = "__all__" <NEW_LINE> template_name = None <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> raise NotImplementedError | A base view for creating a task type. | 62598fabadb09d7d5dc0a522 |
class Rectangle: <NEW_LINE> <INDENT> number_of_instances = 0 <NEW_LINE> print_symbol = "#" <NEW_LINE> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> type(self).number_of_instances += 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if not isinstance(value, int): <NEW_LINE> <INDENT> raise TypeError("width must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("width must be >= 0") <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if not isinstance(value, int): <NEW_LINE> <INDENT> raise TypeError("height must be an integer") <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("height must be >= 0") <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width <= 0 or self.__height <= 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return (self.__width * 2) + (self.__height * 2) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> string = "" <NEW_LINE> for row in range(self.__height): <NEW_LINE> <INDENT> for column in range(self.__width): <NEW_LINE> <INDENT> string += str(self.print_symbol) <NEW_LINE> <DEDENT> if row != (self.__height - 1): <NEW_LINE> <INDENT> string += "\n" <NEW_LINE> <DEDENT> <DEDENT> return string <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ("Rectangle(" + str(self.__width) + ", " + str(self.__height) + ")") <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print("Bye rectangle...") <NEW_LINE> type(self).number_of_instances -= 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def bigger_or_equal(rect_1, rect_2): <NEW_LINE> <INDENT> if not isinstance(rect_1, Rectangle): <NEW_LINE> <INDENT> raise TypeError("rect_1 must be an instance of Rectangle") <NEW_LINE> <DEDENT> if not isinstance(rect_2, Rectangle): <NEW_LINE> <INDENT> raise TypeError("rect_2 must be an instance of Rectangle") <NEW_LINE> <DEDENT> if rect_1.area() >= rect_2.area(): <NEW_LINE> <INDENT> return rect_1 <NEW_LINE> <DEDENT> return rect_2 <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def square(cls, size=0): <NEW_LINE> <INDENT> return cls(size, size) | Rectange Class | 62598fab4428ac0f6e6584bb |
class WDateEditPlugin(QtDesigner.QPyDesignerCustomWidgetPlugin): <NEW_LINE> <INDENT> _module = 'wic.widgets.date_edit' <NEW_LINE> _class = 'DateEdit' <NEW_LINE> _icon = ':/icons/fugue/calendar-blue.png' <NEW_LINE> def __init__(self, parent = None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.initialized = False <NEW_LINE> <DEDENT> def initialize(self, formEditor): <NEW_LINE> <INDENT> self.initialized = True <NEW_LINE> <DEDENT> def isInitialized(self): <NEW_LINE> <INDENT> return self.initialized <NEW_LINE> <DEDENT> def isContainer(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def icon(self): <NEW_LINE> <INDENT> return QtGui.QIcon(self._icon) <NEW_LINE> <DEDENT> def domXml(self): <NEW_LINE> <INDENT> return '<widget class="%s" name="%s">\n</widget>\n' % (self._class, self.name()) <NEW_LINE> <DEDENT> def group(self): <NEW_LINE> <INDENT> return 'wic' <NEW_LINE> <DEDENT> def includeFile(self): <NEW_LINE> <INDENT> return self._module <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return self._class <NEW_LINE> <DEDENT> def toolTip(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def whatsThis(self): <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> def createWidget(self, parent): <NEW_LINE> <INDENT> module = importlib.import_module(self._module) <NEW_LINE> Klass = getattr(module, self._class) <NEW_LINE> return Klass(parent) | Designer plugin for WDateEdit.
Also serves as base class for other custom widget plugins: | 62598fab8e7ae83300ee903a |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.