code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class DocumentPretranslatingStatus(object): <NEW_LINE> <INDENT> openapi_types = { 'pretranslation': 'str' } <NEW_LINE> attribute_map = { 'pretranslation': 'pretranslation' } <NEW_LINE> def __init__(self, pretranslation=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._pretranslation = None <NEW_LINE> self.discriminator = None <NEW_LINE> if pretranslation is not None: <NEW_LINE> <INDENT> self.pretranslation = pretranslation <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def pretranslation(self): <NEW_LINE> <INDENT> return self._pretranslation <NEW_LINE> <DEDENT> @pretranslation.setter <NEW_LINE> def pretranslation(self, pretranslation): <NEW_LINE> <INDENT> self._pretranslation = pretranslation <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DocumentPretranslatingStatus): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, DocumentPretranslatingStatus): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fb999cbb53fe683102d
class OG_303: <NEW_LINE> <INDENT> events = OWN_SPELL_PLAY.after(Buff(CTHUN, "OG_303e"))
Cult Sorcerer
62598fb9283ffb24f3cf39d8
class TripleSource(abc.ABC): <NEW_LINE> <INDENT> @abc.abstractmethod <NEW_LINE> def cache(self, a, cache_updater): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def initializer(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def generate_triples(self, fetches): <NEW_LINE> <INDENT> pass
Base class for triples sources.
62598fb9ad47b63b2c5a79a9
class InvalidUser(Exception): <NEW_LINE> <INDENT> pass
Invalid token for user; cannot authenticate.
62598fb9377c676e912f6e1a
class ProtocolStates(BaseEnum): <NEW_LINE> <INDENT> UNKNOWN = DriverProtocolState.UNKNOWN <NEW_LINE> COMMAND = DriverProtocolState.COMMAND <NEW_LINE> AUTOSAMPLE = DriverProtocolState.AUTOSAMPLE <NEW_LINE> DIRECT_ACCESS = DriverProtocolState.DIRECT_ACCESS
Protocol states for MAVS-4. Cherry picked from DriverProtocolState enum.
62598fb98e7ae83300ee91f5
class GetAccountDataCommand(FilterCommand): <NEW_LINE> <INDENT> command = 'getAccountData' <NEW_LINE> def get_request_filter(self): <NEW_LINE> <INDENT> return GetAccountDataRequestFilter() <NEW_LINE> <DEDENT> def get_response_filter(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> async def _execute(self, request: dict) -> dict: <NEW_LINE> <INDENT> inclusion_states: bool = request['inclusionStates'] <NEW_LINE> seed: Seed = request['seed'] <NEW_LINE> start: int = request['start'] <NEW_LINE> stop: Optional[int] = request['stop'] <NEW_LINE> security_level: Optional[int] = request['security_level'] <NEW_LINE> if stop is None: <NEW_LINE> <INDENT> my_addresses: List[Address] = [] <NEW_LINE> my_hashes: List[TransactionHash] = [] <NEW_LINE> async for addy, hashes in iter_used_addresses(self.adapter, seed, start, security_level): <NEW_LINE> <INDENT> my_addresses.append(addy) <NEW_LINE> my_hashes.extend(hashes) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> ft_command = FindTransactionsCommand(self.adapter) <NEW_LINE> my_addresses = ( AddressGenerator(seed, security_level).get_addresses(start, stop - start) ) <NEW_LINE> my_hashes = (await ft_command(addresses=my_addresses)).get('hashes') or [] <NEW_LINE> <DEDENT> account_balance = 0 <NEW_LINE> if my_addresses: <NEW_LINE> <INDENT> gb_response = ( await GetBalancesCommand(self.adapter)(addresses=my_addresses) ) <NEW_LINE> for i, balance in enumerate(gb_response['balances']): <NEW_LINE> <INDENT> my_addresses[i].balance = balance <NEW_LINE> account_balance += balance <NEW_LINE> <DEDENT> <DEDENT> return { 'addresses': list(sorted(my_addresses, key=attrgetter('key_index'))), 'balance': account_balance, 'bundles': await get_bundles_from_transaction_hashes( adapter=self.adapter, transaction_hashes=my_hashes, inclusion_states=inclusion_states, ), }
Executes ``getAccountData`` extended API command. See :py:meth:`iota.api.Iota.get_account_data` for more info.
62598fb9a05bb46b3848a9c1
class Tanh(Activation): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def function(z): <NEW_LINE> <INDENT> return np.tanh(z) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def derivative(z): <NEW_LINE> <INDENT> return np.divide(1., np.cosh(z)**2)
Tanh Activation Function
62598fb901c39578d7f12ed0
class OperationList(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[Operation]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(OperationList, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
OperationList represents an RP operation list. :param value: List of operations supported by the resource provider. :type value: list[~azure.mgmt.redhatopenshift.v2020_04_30.models.Operation] :param next_link: The link used to get the next page of operations. :type next_link: str
62598fb94f88993c371f05b8
class VcfReadOptionsTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._options = variant_transform_options.VcfReadOptions() <NEW_LINE> <DEDENT> def _make_args(self, args): <NEW_LINE> <INDENT> return make_args(self._options, args) <NEW_LINE> <DEDENT> def test_failure_for_conflicting_flags(self): <NEW_LINE> <INDENT> args = self._make_args(['--input_pattern', 'gs://some_pattern', '--infer_headers', '--representative_header_file', 'gs://some_file']) <NEW_LINE> self.assertRaises(ValueError, self._options.validate, args) <NEW_LINE> <DEDENT> def test_failure_for_conflicting_flags_no_errors(self): <NEW_LINE> <INDENT> args = self._make_args(['--input_pattern', 'gs://some_pattern', '--representative_header_file', 'gs://some_file']) <NEW_LINE> self._options.validate(args)
Tests cases for the VcfReadOptions class.
62598fb94c3428357761a411
class WXXX(UrlFrame): <NEW_LINE> <INDENT> _framespec = [ EncodingSpec('encoding', default=Encoding.UTF16), EncodedTextSpec('desc'), Latin1TextSpec('url'), ] <NEW_LINE> @property <NEW_LINE> def HashKey(self): <NEW_LINE> <INDENT> return '%s:%s' % (self.FrameID, self.desc)
User-defined URL data. Like TXXX, this has a freeform description associated with it.
62598fb9283ffb24f3cf39d9
class BasicInterfaceTests(ExpandedTestCase): <NEW_LINE> <INDENT> def test_setting_csharp_attr(self): <NEW_LINE> <INDENT> form = Form() <NEW_LINE> form.Text = "Testing" <NEW_LINE> self.assertAllEqual(form.text, form.Text, "Testing") <NEW_LINE> <DEDENT> def test_setting_python_attr(self): <NEW_LINE> <INDENT> form = Form() <NEW_LINE> form.text = "Testing" <NEW_LINE> self.assertAllEqual(form.text, form.Text, "Testing") <NEW_LINE> <DEDENT> def test_setting_attr_in_init(self): <NEW_LINE> <INDENT> form = Form(text="Testing") <NEW_LINE> self.assertAllEqual(form.text, form.Text, "Testing") <NEW_LINE> <DEDENT> def test_implicit_attr_value_type_conversion(self): <NEW_LINE> <INDENT> form = Form(text=0) <NEW_LINE> self.assertEqual(form.text, "0")
Basic tests to ensure that we can correctly interface between C# and Python names and types.
62598fb9cc40096d6161a284
class REPSOptimizerNumerical(REPSOptimizer): <NEW_LINE> <INDENT> def set_evaluation_feedback(self, feedbacks): <NEW_LINE> <INDENT> self.reward = check_feedback(feedbacks, compute_sum=True) <NEW_LINE> self.history_theta.append(self.params) <NEW_LINE> self.history_R.append(self.reward) <NEW_LINE> self.it += 1 <NEW_LINE> if self.it % self.train_freq == 0: <NEW_LINE> <INDENT> theta = np.asarray(self.history_theta) <NEW_LINE> R = np.asarray(self.history_R) <NEW_LINE> d = solve_dual_reps(R, self.epsilon, self.min_eta)[0] <NEW_LINE> self.policy_.fit(None, theta, d) <NEW_LINE> <DEDENT> self.logger.info("Reward %.6f" % self.reward) <NEW_LINE> if self.reward > self.max_return: <NEW_LINE> <INDENT> self.max_return = self.reward <NEW_LINE> self.best_params = self.params
Relative Entropy Policy Search (REPS) (using umerical gradients). Inherits all parameters and methods from REPSOptimizer, with the only difference being 'set_evaluation_feedback' using a modified 'solve_dual_reps' function which uses numerical gradients when minimizing the dual function.
62598fb9f548e778e596b6fb
class GEOSContextHandle(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ptr = lgeos.initGEOS_r(notice_h, error_h) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.ptr: lgeos.finishGEOS_r(self.ptr)
Python object representing a GEOS context handle.
62598fb94f6381625f19956d
@register('Jupyter.Accordion') <NEW_LINE> class Accordion(_SelectionContainer): <NEW_LINE> <INDENT> _view_name = Unicode('AccordionView').tag(sync=True) <NEW_LINE> _model_name = Unicode('AccordionModel').tag(sync=True)
Displays children each on a separate accordion page.
62598fb957b8e32f525081c8
class ServiceModel(object): <NEW_LINE> <INDENT> SHAPE_CLASSES = { 'structure': StructureShape, 'list': ListShape, 'map': MapShape, } <NEW_LINE> def __init__(self, service_description, service_name=None): <NEW_LINE> <INDENT> self._service_description = service_description <NEW_LINE> self.metadata = service_description.get('metadata', {}) <NEW_LINE> self._shape_resolver = ShapeResolver( service_description.get('shapes', {})) <NEW_LINE> self._signature_version = NOT_SET <NEW_LINE> self._service_name = service_name <NEW_LINE> <DEDENT> def shape_for(self, shape_name, member_traits=None): <NEW_LINE> <INDENT> return self._shape_resolver.get_shape_by_name( shape_name, member_traits) <NEW_LINE> <DEDENT> def resolve_shape_ref(self, shape_ref): <NEW_LINE> <INDENT> return self._shape_resolver.resolve_shape_ref(shape_ref) <NEW_LINE> <DEDENT> def operation_model(self, operation_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = self._service_description['operations'][operation_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise OperationNotFoundError(operation_name) <NEW_LINE> <DEDENT> return OperationModel(model, self) <NEW_LINE> <DEDENT> @CachedProperty <NEW_LINE> def operation_names(self): <NEW_LINE> <INDENT> return list(self._service_description.get('operations', [])) <NEW_LINE> <DEDENT> @CachedProperty <NEW_LINE> def service_name(self): <NEW_LINE> <INDENT> if self._service_name is not None: <NEW_LINE> <INDENT> return self._service_name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.endpoint_prefix <NEW_LINE> <DEDENT> <DEDENT> @CachedProperty <NEW_LINE> def signing_name(self): <NEW_LINE> <INDENT> signing_name = self.metadata.get('signingName') <NEW_LINE> if signing_name is None: <NEW_LINE> <INDENT> signing_name = self.endpoint_prefix <NEW_LINE> <DEDENT> return signing_name <NEW_LINE> <DEDENT> @CachedProperty <NEW_LINE> def api_version(self): <NEW_LINE> <INDENT> return self._get_metadata_property('apiVersion') <NEW_LINE> <DEDENT> @CachedProperty <NEW_LINE> def protocol(self): <NEW_LINE> <INDENT> return self._get_metadata_property('protocol') <NEW_LINE> <DEDENT> @CachedProperty <NEW_LINE> def endpoint_prefix(self): <NEW_LINE> <INDENT> return self._get_metadata_property('endpointPrefix') <NEW_LINE> <DEDENT> def _get_metadata_property(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.metadata[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise UndefinedModelAttributeError( '"%s" not defined in the metadata of the the model: %s' % (name, self)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def signature_version(self): <NEW_LINE> <INDENT> if self._signature_version is NOT_SET: <NEW_LINE> <INDENT> signature_version = self.metadata.get('signatureVersion') <NEW_LINE> self._signature_version = signature_version <NEW_LINE> <DEDENT> return self._signature_version <NEW_LINE> <DEDENT> @signature_version.setter <NEW_LINE> def signature_version(self, value): <NEW_LINE> <INDENT> self._signature_version = value
:ivar service_description: The parsed service description dictionary.
62598fb94a966d76dd5ef028
class CourseModesDetailView(CourseModesMixin, RetrieveUpdateDestroyAPIView): <NEW_LINE> <INDENT> http_method_names = ['get', 'patch', 'delete', 'head', 'options'] <NEW_LINE> parser_classes = (MergePatchParser,) <NEW_LINE> multiple_lookup_fields = ('course_id', 'mode_slug') <NEW_LINE> queryset = CourseMode.objects.all() <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> queryset = self.get_queryset() <NEW_LINE> query_filter = {} <NEW_LINE> for field in self.multiple_lookup_fields: <NEW_LINE> <INDENT> query_filter[field] = self.kwargs[field] <NEW_LINE> <DEDENT> if 'course_id' in query_filter: <NEW_LINE> <INDENT> query_filter['course_id'] = CourseKey.from_string(query_filter['course_id']) <NEW_LINE> <DEDENT> obj = get_object_or_404(queryset, **query_filter) <NEW_LINE> self.check_object_permissions(self.request, obj) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def patch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> course_mode = self.get_object() <NEW_LINE> serializer = self.serializer_class(course_mode, data=request.data, partial=True) <NEW_LINE> if serializer.is_valid(raise_exception=True): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response( status=status.HTTP_204_NO_CONTENT, content_type='application/json', )
View to retrieve, update, or delete a specific course mode for a course. **Use Case** Get or update course mode details for a specific course mode on a course. Or you may delete a specific course mode from a course. **Example Requests** GET /api/course_modes/v1/courses/{course_id}/{mode_slug} Returns details on an existing course mode for a course. PATCH /api/course_modes/v1/courses/{course_id}/{mode_slug} Updates (via merge) details of an existing course mode for a course. DELETE /api/course_modes/v1/courses/{course_id}/{mode_slug} Deletes an existing course mode for a course. **Response Values** For each HTTP verb below, an HTTP 404 "Not Found" response is returned if the requested course id does not exist, or the mode slug does not exist within the course. GET: If the request is successful, an HTTP 200 "OK" response is returned along with a details for a single course mode within a course. The details are contained in a JSON dictionary as follows: * course_id: The course identifier. * mode_slug: The short name for the course mode. * mode_display_name: The verbose name for the course mode. * min_price: The minimum price for which a user can enroll in this mode. * currency: The currency of the listed prices. * expiration_datetime: The date and time after which users cannot enroll in the course in this mode (not required for PATCH). * expiration_datetime_is_explicit: Whether the expiration_datetime field was explicitly set (not required for PATCH). * description: A description of this mode (not required for PATCH). * sku: The SKU for this mode (for ecommerce purposes, not required for PATCH). * bulk_sku: The bulk SKU for this mode (for ecommerce purposes, not required for PATCH). PATCH: If the request is successful, an HTTP 204 "No Content" response is returned. If "application/merge-patch+json" is not the specified content type, a 415 "Unsupported Media Type" response is returned. DELETE: If the request is successful, an HTTP 204 "No Content" response is returned.
62598fb95166f23b2e243533
class OPS_add_mesh_to_assembly(Operator): <NEW_LINE> <INDENT> bl_idname = "fd_assembly.add_mesh_to_assembly" <NEW_LINE> bl_label = "Add Mesh To Assembly" <NEW_LINE> bl_description = "This will add a mesh to the selected assembly" <NEW_LINE> bl_options = {'UNDO'} <NEW_LINE> mesh_name = StringProperty(name="Mesh Name",default="New Mesh") <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> if context.active_object: <NEW_LINE> <INDENT> obj_bp = utils.get_assembly_bp(context.active_object) <NEW_LINE> if obj_bp: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> obj_bp = utils.get_assembly_bp(context.active_object) <NEW_LINE> assembly = fd_types.Assembly(obj_bp) <NEW_LINE> obj_bp = assembly.obj_bp <NEW_LINE> dim_x = assembly.obj_x.location.x <NEW_LINE> dim_y = assembly.obj_y.location.y <NEW_LINE> dim_z = assembly.obj_z.location.z <NEW_LINE> obj_mesh = utils.create_cube_mesh(self.mesh_name,(dim_x,dim_y,dim_z)) <NEW_LINE> if obj_mesh: <NEW_LINE> <INDENT> obj_mesh.mv.name_object = self.mesh_name <NEW_LINE> context.scene.objects.active = obj_mesh <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.mesh.select_all(action='SELECT') <NEW_LINE> bpy.ops.mesh.normals_make_consistent(inside=False) <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> if obj_bp: <NEW_LINE> <INDENT> obj_mesh.parent = obj_bp <NEW_LINE> <DEDENT> update_vector_groups(obj_bp) <NEW_LINE> bpy.ops.fd_assembly.load_active_assembly_objects(object_name=obj_bp.name) <NEW_LINE> <DEDENT> return {'FINISHED'} <NEW_LINE> <DEDENT> def invoke(self,context,event): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> return wm.invoke_props_dialog(self, width=utils.get_prop_dialog_width(400)) <NEW_LINE> <DEDENT> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> layout.prop(self, "mesh_name")
Since this looks to the context this should only be called from the ui.
62598fb967a9b606de546128
class ColorInput(Input): <NEW_LINE> <INDENT> input_type = 'color'
This widget is a radiobutton
62598fb9f548e778e596b6fc
class EntryPublishedManager(models.Manager): <NEW_LINE> <INDENT> def get_query_set(self): <NEW_LINE> <INDENT> return entries_published( super(EntryPublishedManager, self).get_query_set()) <NEW_LINE> <DEDENT> def search(self, pattern): <NEW_LINE> <INDENT> lookup = None <NEW_LINE> for pattern in pattern.split(): <NEW_LINE> <INDENT> q = models.Q(content__icontains=pattern) | models.Q(excerpt__icontains=pattern) | models.Q(title__icontains=pattern) <NEW_LINE> if lookup is None: <NEW_LINE> <INDENT> lookup = q <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lookup |= q <NEW_LINE> <DEDENT> <DEDENT> return self.get_query_set().filter(lookup)
Manager to retrieve published entries
62598fb956b00c62f0fb2a11
class Generator(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.filestring = "" <NEW_LINE> <DEDENT> def generate(self, macr, decl, defi, adddate=True): <NEW_LINE> <INDENT> header = "File generated automatically from ASN.1 JSON description\n" <NEW_LINE> if adddate: <NEW_LINE> <INDENT> date = datetime.today() <NEW_LINE> header += "Generation date: " + date.strftime("%Y-%m-%d %H:%M") <NEW_LINE> <DEDENT> contents = "" <NEW_LINE> if len(macr) > 0: <NEW_LINE> <INDENT> contents = "\n\n" <NEW_LINE> for macrodef in macr: <NEW_LINE> <INDENT> contents += str(macrodef) + "\n" <NEW_LINE> <DEDENT> <DEDENT> if len(decl) > 0: <NEW_LINE> <INDENT> contents += "\n\n" <NEW_LINE> for declaration in decl: <NEW_LINE> <INDENT> contents += str(declaration) + "\n\n" <NEW_LINE> <DEDENT> <DEDENT> contents += "\n" <NEW_LINE> contents += self.comment("Main function") <NEW_LINE> contents += "\n" <NEW_LINE> maincontents = "" <NEW_LINE> if len(defi) > 0: <NEW_LINE> <INDENT> for definition in defi: <NEW_LINE> <INDENT> maincontents += str(definition) + "\n\n" <NEW_LINE> <DEDENT> <DEDENT> self.filestring += self.comment(header) <NEW_LINE> self.filestring += contents + self.mainfunc(maincontents) <NEW_LINE> return self.filestring <NEW_LINE> <DEDENT> def comment(self, text): <NEW_LINE> <INDENT> string = "/* " + str(text) + " */" <NEW_LINE> return string <NEW_LINE> <DEDENT> def mainfunc(self, contents): <NEW_LINE> <INDENT> mainfunc = "int main(int argc, char *argv[]) {\n\n" <NEW_LINE> mainfunc += indent(contents) <NEW_LINE> mainfunc += indent("\n\nreturn 0;") <NEW_LINE> mainfunc += "\n}" <NEW_LINE> return mainfunc <NEW_LINE> <DEDENT> def saveFile(self, filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(filename, "w") as f: <NEW_LINE> <INDENT> f.write(self.filestring) <NEW_LINE> <DEDENT> <DEDENT> except IOError as ioErr: <NEW_LINE> <INDENT> ioErr.args += (filename,) <NEW_LINE> raise
Generator takes all declarations, definitions and defines objects and converts them to string, then forms that string correctly and dumps it to specified file.
62598fb9498bea3a75a57c7a
class myVGG(nn.Module): <NEW_LINE> <INDENT> def __init__(self, opts, cfg): <NEW_LINE> <INDENT> super(myVGG, self).__init__() <NEW_LINE> vgg16 = torchvision.models.vgg16(pretrained=True) <NEW_LINE> self.vgg16_enc = torch.nn.Sequential(*list(vgg16.children())[0][:22]) <NEW_LINE> for param in self.vgg16_enc.parameters(): <NEW_LINE> <INDENT> param.requires_grad = False <NEW_LINE> <DEDENT> <DEDENT> def forward(self, data): <NEW_LINE> <INDENT> vgg_output = self.vgg16_enc(data) <NEW_LINE> return [vgg_output]
VGG Discriminator.
62598fb97d43ff24874274af
class NLWrapper(object): <NEW_LINE> <INDENT> def __init__(self, list, func): <NEW_LINE> <INDENT> self.list = list <NEW_LINE> self.func = func <NEW_LINE> <DEDENT> def _return_nodelist(self): <NEW_LINE> <INDENT> return self.nodelist <NEW_LINE> <DEDENT> def _gen_nodelist(self): <NEW_LINE> <INDENT> mylist = self.list <NEW_LINE> if mylist is None: <NEW_LINE> <INDENT> mylist = [] <NEW_LINE> <DEDENT> elif not is_Sequence(mylist): <NEW_LINE> <INDENT> mylist = [mylist] <NEW_LINE> <DEDENT> self.nodelist = SCons.Util.NodeList(list(map(self.func, mylist))) <NEW_LINE> self._create_nodelist = self._return_nodelist <NEW_LINE> return self.nodelist <NEW_LINE> <DEDENT> _create_nodelist = _gen_nodelist
A wrapper class that delays turning a list of sources or targets into a NodeList until it's needed. The specified function supplied when the object is initialized is responsible for turning raw nodes into proxies that implement the special attributes like .abspath, .source, etc. This way, we avoid creating those proxies just "in case" someone is going to use $TARGET or the like, and only go through the trouble if we really have to. In practice, this might be a wash performance-wise, but it's a little cleaner conceptually...
62598fb9dc8b845886d53710
class pfException(Exception): <NEW_LINE> <INDENT> pass
Printful exception returned from the API.
62598fb94f88993c371f05b9
class MapTester(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.mappings = [] <NEW_LINE> self.mapper = PslMap(self) <NEW_LINE> <DEDENT> def mapBlock(self, psl, blk, qRngStart, qRngEnd, tRngStart, tRngEnd): <NEW_LINE> <INDENT> self.mappings.append(("blk", psl.qName, blk.iBlk, qRngStart, qRngEnd, tRngStart, tRngEnd)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __iBlkOrNone(blk): <NEW_LINE> <INDENT> return blk.iBlk if blk != None else None <NEW_LINE> <DEDENT> def mapGap(self, psl, prevBlk, nextBlk, qRngStart, qRngEnd, tRngStart, tRngEnd): <NEW_LINE> <INDENT> self.mappings.append(("gap", psl.qName, MapTester.__iBlkOrNone(prevBlk), MapTester.__iBlkOrNone(nextBlk), qRngStart, qRngEnd, tRngStart, tRngEnd)) <NEW_LINE> <DEDENT> def __joinMappings(self): <NEW_LINE> <INDENT> m = tuple(self.mappings) <NEW_LINE> self.mappings = [] <NEW_LINE> return m <NEW_LINE> <DEDENT> def targetToQueryMap(self, psl, tRngStart, tRngEnd): <NEW_LINE> <INDENT> self.mapper.targetToQueryMap(psl, tRngStart, tRngEnd) <NEW_LINE> return self.__joinMappings() <NEW_LINE> <DEDENT> def queryToTargetMap(self, psl, qRngStart, qRngEnd): <NEW_LINE> <INDENT> self.mapper.queryToTargetMap(psl, qRngStart, qRngEnd) <NEW_LINE> return self.__joinMappings()
test object that collects results
62598fb94c3428357761a413
class Diary(object): <NEW_LINE> <INDENT> def __init__(self, data_file='data.pickle'): <NEW_LINE> <INDENT> self.data_file = data_file <NEW_LINE> self.entries = self.load_data() <NEW_LINE> <DEDENT> def load_data(self): <NEW_LINE> <INDENT> entry_data = [] <NEW_LINE> if not os.path.isfile(self.data_file): <NEW_LINE> <INDENT> open(self.data_file, 'w').close() <NEW_LINE> <DEDENT> with open(self.data_file, 'rb') as source: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> entry_data.append(pickle.load(source)) <NEW_LINE> <DEDENT> <DEDENT> except EOFError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return [DiaryEntry(**dataset) for dataset in entry_data] <NEW_LINE> <DEDENT> @property <NEW_LINE> def taken_uids(self): <NEW_LINE> <INDENT> return [entry.uid for entry in self.entries] <NEW_LINE> <DEDENT> @update_data <NEW_LINE> def add(self, item_type, subject, description, due_date): <NEW_LINE> <INDENT> uid = self.generate_initial_uid() <NEW_LINE> self.entries.append(DiaryEntry(uid, item_type, subject, description, due_date)) <NEW_LINE> <DEDENT> @update_data <NEW_LINE> def remove(self, *uids): <NEW_LINE> <INDENT> for entry in self.entries: <NEW_LINE> <INDENT> if entry.uid in uids: <NEW_LINE> <INDENT> self.entries.remove(entry) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @update_data <NEW_LINE> def edit(self, attr, value, *uids): <NEW_LINE> <INDENT> for entry in self.entries: <NEW_LINE> <INDENT> if entry.uid in uids: <NEW_LINE> <INDENT> entry.edit(attr, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @update_data <NEW_LINE> def extend(self, days, *uids): <NEW_LINE> <INDENT> for entry in self.entries: <NEW_LINE> <INDENT> if entry.uid in uids: <NEW_LINE> <INDENT> if entry.due_date is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> entry.due_date += datetime.timedelta(days=days) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @update_data <NEW_LINE> def priority(self, priority, *uids): <NEW_LINE> <INDENT> for entry in self.entries: <NEW_LINE> <INDENT> if entry.uid in uids: <NEW_LINE> <INDENT> entry.priority = priority <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def generate_initial_uid(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> uid = randint(100, 999) <NEW_LINE> if uid not in self.taken_uids: <NEW_LINE> <INDENT> return uid
A Diary class that contains a list of DiaryEntry objects and has methods to modify them. This class also handles local storing and fetching of all DiaryEntry data.
62598fb9f548e778e596b6fd
class CopyTestCase(TestCase): <NEW_LINE> <INDENT> def test_copy_success(self): <NEW_LINE> <INDENT> source_file = NamedTemporaryFile(delete=False) <NEW_LINE> source_file.seek(0) <NEW_LINE> source_file.write(b"cur report") <NEW_LINE> source_file.flush() <NEW_LINE> source_file_name = os.path.split(source_file.name)[1] <NEW_LINE> bucket_name = mkdtemp() <NEW_LINE> bucket_file_path = "/{}/{}".format("report_name", source_file_name) <NEW_LINE> success = copy_to_local_dir(bucket_name, source_file.name, bucket_file_path) <NEW_LINE> self.assertTrue(success) <NEW_LINE> expected_full_file_path = f"{bucket_name}{bucket_file_path}" <NEW_LINE> self.assertTrue(os.path.isfile(expected_full_file_path)) <NEW_LINE> shutil.rmtree(bucket_name) <NEW_LINE> os.remove(source_file.name) <NEW_LINE> <DEDENT> def test_copy_failure(self): <NEW_LINE> <INDENT> source_file = NamedTemporaryFile(delete=False) <NEW_LINE> source_file.seek(0) <NEW_LINE> source_file.write(b"cur report") <NEW_LINE> source_file.flush() <NEW_LINE> bucket_name = mkdtemp() <NEW_LINE> bad_bucket_name = bucket_name + "bad" <NEW_LINE> bucket_file_path = "/bucket_location" <NEW_LINE> success = copy_to_local_dir(bad_bucket_name, source_file.name, bucket_file_path) <NEW_LINE> self.assertFalse(success) <NEW_LINE> shutil.rmtree(bucket_name) <NEW_LINE> os.remove(source_file.name)
TestCase class for copy
62598fb9f9cc0f698b1c5379
class Objective(QtCore.QObject): <NEW_LINE> <INDENT> magnificationChanged = QtCore.pyqtSignal(str, float) <NEW_LINE> offsetChanged = QtCore.pyqtSignal(str, float, float) <NEW_LINE> def __init__(self, fixed = None, objective_item = None, objective_name = None, **kwds): <NEW_LINE> <INDENT> super().__init__(**kwds) <NEW_LINE> self.fixed = fixed <NEW_LINE> self.objective_item = objective_item <NEW_LINE> self.qt_widgets = [] <NEW_LINE> self.qt_widgets.append(ObjLabel(self.objective_item.objective_name)) <NEW_LINE> if fixed: <NEW_LINE> <INDENT> for elt in self.objective_item.getData(): <NEW_LINE> <INDENT> self.qt_widgets.append(ObjLabel("{0:.2f}".format(elt))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sbox = ObjDoubleSpinBox(self.objective_item.um_per_pixel, 0.01, 100.0) <NEW_LINE> sbox.setDecimals(2) <NEW_LINE> sbox.setSingleStep(0.01) <NEW_LINE> sbox.valueChanged.connect(self.handleMagChanged) <NEW_LINE> self.qt_widgets.append(sbox) <NEW_LINE> sbox = ObjDoubleSpinBox(self.objective_item.x_offset, -10000.0, 10000.0) <NEW_LINE> sbox.valueChanged.connect(self.handleXOffsetChanged) <NEW_LINE> self.qt_widgets.append(sbox) <NEW_LINE> sbox = ObjDoubleSpinBox(self.objective_item.y_offset, -10000.0, 10000.0) <NEW_LINE> sbox.valueChanged.connect(self.handleYOffsetChanged) <NEW_LINE> self.qt_widgets.append(sbox) <NEW_LINE> <DEDENT> <DEDENT> def getData(self): <NEW_LINE> <INDENT> return self.objective_item.getData() <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.objective_item.objective_name <NEW_LINE> <DEDENT> def getQtWidgets(self): <NEW_LINE> <INDENT> return self.qt_widgets <NEW_LINE> <DEDENT> def handleMagChanged(self, value): <NEW_LINE> <INDENT> self.objective_item.um_per_pixel = value <NEW_LINE> self.magnificationChanged.emit(self.objective_item.objective_name, self.objective_item.um_per_pixel) <NEW_LINE> <DEDENT> def handleXOffsetChanged(self, value): <NEW_LINE> <INDENT> self.objective_item.x_offset = value <NEW_LINE> self.offsetChanged.emit(self.objective_item.objective_name, self.objective_item.x_offset, self.objective_item.y_offset) <NEW_LINE> <DEDENT> def handleYOffsetChanged(self, value): <NEW_LINE> <INDENT> self.objective_item.y_offset = value <NEW_LINE> self.offsetChanged.emit(self.objective_item.objective_name, self.objective_item.x_offset, self.objective_item.y_offset) <NEW_LINE> <DEDENT> def select(self, on_off): <NEW_LINE> <INDENT> for widget in self.qt_widgets: <NEW_LINE> <INDENT> widget.select(on_off)
Handles controls for a single objective.
62598fb9091ae35668704d79
class CursorBase(MySQLCursorAbstract): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._description = None <NEW_LINE> self._rowcount = -1 <NEW_LINE> self._last_insert_id = None <NEW_LINE> self.arraysize = 1 <NEW_LINE> super(CursorBase, self).__init__() <NEW_LINE> <DEDENT> def callproc(self, procname, args=()): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def execute(self, operation, params=(), multi=False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def executemany(self, operation, seqparams): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetchone(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetchmany(self, size=1): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fetchall(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def nextset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setinputsizes(self, sizes): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def setoutputsize(self, size, column=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reset(self, free=True): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @property <NEW_LINE> def rowcount(self): <NEW_LINE> <INDENT> return self._rowcount <NEW_LINE> <DEDENT> @property <NEW_LINE> def lastrowid(self): <NEW_LINE> <INDENT> return self._last_insert_id
Base for defining MySQLCursor. This class is a skeleton and defines methods and members as required for the Python Database API Specification v2.0. It's better to inherite from MySQLCursor.
62598fb93346ee7daa3376f4
class SyncV1TestCase(_SuccessMixin, _BaseTestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(SyncV1TestCase, cls).setUpClass() <NEW_LINE> docker_utils.repo_create( cls.cfg, feed=DOCKER_V1_FEED_URL, repo_id=cls.repo_id, upstream_name=_UPSTREAM_NAME, ) <NEW_LINE> cls.completed_proc = docker_utils.repo_sync(cls.cfg, cls.repo_id)
Show it is possible to sync a docker repository with a v1 registry.
62598fb9099cdd3c6367548e
class ModelOptions(usage.Options): <NEW_LINE> <INDENT> synopsis = '[options] name table' <NEW_LINE> optFlags = [ ['dump', 'd', 'Dump the configuration to the standard output'], ['noschema', 's', 'Set this if you don\'t want Mamba manaing this at schema-level'], ['noquestions', 'n', 'When this option is set, mamba will NOT ask anything to the user ' 'that means ot will overwrite any other version of the model file ' 'that already exists on the file system. Use with caution.'] ] <NEW_LINE> optParameters = [ ['description', None, None, 'Model\'s description'], ['author', None, None, 'Model\'s author'], ['email', None, None, 'Author\'s email'], ['classname', None, None, 'Set this parameter if you want that your new model use a specific' ' class name'], ['platforms', None, None, 'Supported platforms (example: \'Unix, Windows\')'] ] <NEW_LINE> def opt_version(self): <NEW_LINE> <INDENT> show_version() <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> def parseArgs(self, name=None, table=None): <NEW_LINE> <INDENT> if name is None or table is None: <NEW_LINE> <INDENT> self['name'] = None <NEW_LINE> self['table'] = None <NEW_LINE> return <NEW_LINE> <DEDENT> regex = re.compile(r'[^._a-zA-Z0-9]') <NEW_LINE> name = regex.sub('', name) <NEW_LINE> path, name = commons.process_path_name(name) <NEW_LINE> self['filename'] = filepath.joinpath(path.lower(), name.lower()) <NEW_LINE> self['name'] = CamelCase(name.replace('_', ' ')).camelize(True) <NEW_LINE> self['model_table'] = table <NEW_LINE> <DEDENT> def postOptions(self): <NEW_LINE> <INDENT> RFC2822 = re.compile( r"[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*" "+/=?^_`{|}~-]+)*@(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9]" ")?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?" ) <NEW_LINE> if self['author'] is None: <NEW_LINE> <INDENT> self['author'] = getpass.getuser() <NEW_LINE> <DEDENT> if self['email'] is not None: <NEW_LINE> <INDENT> if RFC2822.match(self['email']) is None: <NEW_LINE> <INDENT> print( 'error: the given email address {} is not a valid RFC2822 ' 'email address, ' 'check http://www.rfc-editor.org/rfc/rfc2822.txt for ' 'very extended details'.format(self['email']) ) <NEW_LINE> sys.exit(-1) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self['email'] = '{}@localhost'.format(self['author']) <NEW_LINE> <DEDENT> if self['platforms'] is None: <NEW_LINE> <INDENT> self['platforms'] = 'Linux'
Model Configuration options for mamba-admin tool
62598fb9bf627c535bcb15fc
class LDObjectField(models.TextField): <NEW_LINE> <INDENT> def __init__(self, type_hint=None, **kwargs): <NEW_LINE> <INDENT> self.type_hint = type_hint <NEW_LINE> super(LDObjectField, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if not isinstance(value, str): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> return self.from_db_value(value) <NEW_LINE> <DEDENT> def from_db_value(self, value, *args, **kwargs): <NEW_LINE> <INDENT> parsed_value = utils.from_json(value) <NEW_LINE> if parsed_value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return_val = LDObject.fromDict(parsed_value, type_hint=self.type_hint).wrapped_obj <NEW_LINE> return return_val <NEW_LINE> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> if isinstance(value, str): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> ld_object = LDObject.instantiate(value, datatype=self.type_hint) <NEW_LINE> return ld_object.serialize() <NEW_LINE> <DEDENT> def value_to_string(self, obj): <NEW_LINE> <INDENT> value = self._get_val_from_obj(obj) <NEW_LINE> return self.get_db_prep_value(value, None)
LDObject is a generic textfield that neatly serializes/unserializes JSON objects seamlessly. deserialization_params added on 2011-01-09 to provide additional hints at deserialization time
62598fb97047854f4633f52f
class SecretManager(base.BaseEntityManager): <NEW_LINE> <INDENT> def __init__(self, api): <NEW_LINE> <INDENT> super(SecretManager, self).__init__(api, 'secrets') <NEW_LINE> <DEDENT> def get(self, secret_ref, payload_content_type=None): <NEW_LINE> <INDENT> LOG.debug("Getting secret - Secret href: {0}".format(secret_ref)) <NEW_LINE> base.validate_ref(secret_ref, 'Secret') <NEW_LINE> return Secret( api=self._api, payload_content_type=payload_content_type, secret_ref=secret_ref ) <NEW_LINE> <DEDENT> def update(self, secret_ref, payload=None): <NEW_LINE> <INDENT> base.validate_ref(secret_ref, 'Secret') <NEW_LINE> if not secret_ref: <NEW_LINE> <INDENT> raise ValueError('secret_ref is required.') <NEW_LINE> <DEDENT> if type(payload) is six.binary_type: <NEW_LINE> <INDENT> headers = {'content-type': "application/octet-stream"} <NEW_LINE> <DEDENT> elif type(payload) is six.text_type: <NEW_LINE> <INDENT> headers = {'content-type': "text/plain"} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exceptions.PayloadException("Invalid Payload Type") <NEW_LINE> <DEDENT> self._api.put(secret_ref, headers=headers, data=payload) <NEW_LINE> <DEDENT> def create(self, name=None, payload=None, payload_content_type=None, payload_content_encoding=None, algorithm=None, bit_length=None, secret_type=None, mode=None, expiration=None): <NEW_LINE> <INDENT> return Secret(api=self._api, name=name, payload=payload, payload_content_type=payload_content_type, payload_content_encoding=payload_content_encoding, algorithm=algorithm, bit_length=bit_length, mode=mode, secret_type=secret_type, expiration=expiration) <NEW_LINE> <DEDENT> def delete(self, secret_ref): <NEW_LINE> <INDENT> base.validate_ref(secret_ref, 'Secret') <NEW_LINE> if not secret_ref: <NEW_LINE> <INDENT> raise ValueError('secret_ref is required.') <NEW_LINE> <DEDENT> self._api.delete(secret_ref) <NEW_LINE> <DEDENT> def list(self, limit=10, offset=0, name=None, algorithm=None, mode=None, bits=0, secret_type=None, created=None, updated=None, expiration=None, sort=None): <NEW_LINE> <INDENT> LOG.debug('Listing secrets - offset {0} limit {1}'.format(offset, limit)) <NEW_LINE> params = {'limit': limit, 'offset': offset} <NEW_LINE> if name: <NEW_LINE> <INDENT> params['name'] = name <NEW_LINE> <DEDENT> if algorithm: <NEW_LINE> <INDENT> params['alg'] = algorithm <NEW_LINE> <DEDENT> if mode: <NEW_LINE> <INDENT> params['mode'] = mode <NEW_LINE> <DEDENT> if bits > 0: <NEW_LINE> <INDENT> params['bits'] = bits <NEW_LINE> <DEDENT> if secret_type: <NEW_LINE> <INDENT> params['secret_type'] = secret_type <NEW_LINE> <DEDENT> if created: <NEW_LINE> <INDENT> params['created'] = created <NEW_LINE> <DEDENT> if updated: <NEW_LINE> <INDENT> params['updated'] = updated <NEW_LINE> <DEDENT> if expiration: <NEW_LINE> <INDENT> params['expiration'] = expiration <NEW_LINE> <DEDENT> if sort: <NEW_LINE> <INDENT> params['sort'] = sort <NEW_LINE> <DEDENT> response = self._api.get(self._entity, params=params) <NEW_LINE> return [ Secret(api=self._api, **s) for s in response.get('secrets', []) ]
Entity Manager for Secret entities
62598fb9377c676e912f6e1c
class ConveRTFFN(nn.Module): <NEW_LINE> <INDENT> def __init__(self, insz, hszs, outsz, pdrop): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.dense_stack = DenseStack(insz, hszs, activation='gelu', pdrop_value=pdrop, skip_connect=True, layer_norm=True) <NEW_LINE> self.final = Dense(hszs[-1], outsz) <NEW_LINE> self.proj = Dense(insz, outsz) if insz != outsz else nn.Identity() <NEW_LINE> self.ln1 = nn.LayerNorm(insz, eps=1e-6) <NEW_LINE> self.ln2 = nn.LayerNorm(outsz, eps=1e-6) <NEW_LINE> <DEDENT> def forward(self, inputs): <NEW_LINE> <INDENT> x = self.ln1(inputs) <NEW_LINE> x = self.dense_stack(x) <NEW_LINE> x = self.final(x) <NEW_LINE> x = x + self.proj(inputs) <NEW_LINE> return self.ln2(x)
Implementation of the FFN layer from the convert paper (https://arxiv.org/pdf/1911.03688.pdf)
62598fb9009cb60464d0167c
class MapImageThresholdsCalculator: <NEW_LINE> <INDENT> def __init__(self, thr_coeff: float = 0.0005, no_value: float = 0.0): <NEW_LINE> <INDENT> self._thr_coeff = thr_coeff <NEW_LINE> self._no_value = no_value <NEW_LINE> <DEDENT> def __call__(self, map_image: np.ndarray) -> t.Optional[Thresholds]: <NEW_LINE> <INDENT> map_image_ma = np.ma.masked_equal(map_image, self._no_value) <NEW_LINE> if map_image_ma.mask.all(): <NEW_LINE> <INDENT> logger.warning('There are no any values on the map') <NEW_LINE> return None <NEW_LINE> <DEDENT> data = np.sort(map_image_ma.compressed().ravel()) <NEW_LINE> lower_data = data[:int(self._thr_coeff * data.size)] <NEW_LINE> upper_data = data[int(data.size - self._thr_coeff * data.size):] <NEW_LINE> if lower_data.size > 0: <NEW_LINE> <INDENT> lower_thr = np.median(lower_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lower_thr = data.min() <NEW_LINE> <DEDENT> if upper_data.size > 0: <NEW_LINE> <INDENT> upper_thr = np.median(upper_data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> upper_thr = data.min() <NEW_LINE> <DEDENT> return Thresholds(lower_thr, upper_thr)
Statistics/CNR map thresholds calculator class The class computes the optimal thresholds for display a map.
62598fb9e1aae11d1e7ce8d1
class CurrentSensor(GEMSensor): <NEW_LINE> <INDENT> _attr_icon = CURRENT_SENSOR_ICON <NEW_LINE> _attr_unit_of_measurement = UNIT_WATTS <NEW_LINE> def __init__(self, monitor_serial_number, number, name, net_metering): <NEW_LINE> <INDENT> super().__init__(monitor_serial_number, name, "current", number) <NEW_LINE> self._net_metering = net_metering <NEW_LINE> <DEDENT> def _get_sensor(self, monitor): <NEW_LINE> <INDENT> return monitor.channels[self._number - 1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> if not self._sensor: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._sensor.watts <NEW_LINE> <DEDENT> @property <NEW_LINE> def extra_state_attributes(self): <NEW_LINE> <INDENT> if not self._sensor: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if self._net_metering: <NEW_LINE> <INDENT> watt_seconds = self._sensor.polarized_watt_seconds <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> watt_seconds = self._sensor.absolute_watt_seconds <NEW_LINE> <DEDENT> return {DATA_WATT_SECONDS: watt_seconds}
Entity showing power usage on one channel of the monitor.
62598fb9ec188e330fdf89ea
class TestLoginHTML(MobileHTMLRenderingCase): <NEW_LINE> <INDENT> def test_login_page_in_a_clean_state(self): <NEW_LINE> <INDENT> fixture_name = 'login_clean.html' <NEW_LINE> fixture_filepath = self._get_fixture_file_path(fixture_name) <NEW_LINE> if fixture_filepath: <NEW_LINE> <INDENT> tree = etree.parse(fixture_filepath) <NEW_LINE> expected_output = etree.tostring(tree) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _logger.warning( 'IOError: Error reading fixture "{}". Hence, ' 'this test has not been actually executed'.format( fixture_name) ) <NEW_LINE> <DEDENT> api_data = { 'stylesheet': '/mobile/src/css/main.css', 'logo': '/mobile/src/img/logo.png', 'form_action': '/mobile/login/', 'errors': '', 'databases': ['nhclinical'] } <NEW_LINE> rendered_template = self.jinja_env. get_template('login.html').render(**api_data) <NEW_LINE> compressed_rendered_template = self._compress_string( rendered_template) <NEW_LINE> compressed_expected_output = self._compress_string( expected_output) <NEW_LINE> self.assertEqual( compressed_rendered_template, compressed_expected_output) <NEW_LINE> <DEDENT> def test_login_page_in_an_invalid_state(self): <NEW_LINE> <INDENT> fixture_name = 'login_invalid.html' <NEW_LINE> fixture_filepath = self._get_fixture_file_path(fixture_name) <NEW_LINE> if fixture_filepath: <NEW_LINE> <INDENT> tree = etree.parse(fixture_filepath) <NEW_LINE> expected_output = etree.tostring(tree) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _logger.warning( 'IOError: Error reading fixture "{}". Hence, ' 'this test has not been actually executed'.format( fixture_name ) ) <NEW_LINE> <DEDENT> err = '<div class="alert alert-error">Invalid username/password</div>' <NEW_LINE> api_data = { 'stylesheet': '/mobile/src/css/main.css', 'logo': '/mobile/src/img/logo.png', 'form_action': '/mobile/login/', 'errors': err, 'databases': ['nhclinical'] } <NEW_LINE> rendered_template = self.jinja_env.get_template('login.html') .render(**api_data) <NEW_LINE> compressed_rendered_template = self._compress_string( rendered_template) <NEW_LINE> compressed_expected_output = self._compress_string( expected_output) <NEW_LINE> self.assertEqual( compressed_rendered_template, compressed_expected_output )
Test case collecting all the tests relating to the RENDERING of the 'login' page. Compare the actual rendered HTML pages against fixtures (i.e. 'fake' HTML files) specially built.
62598fb99f2886367281891e
class StopInstancesException(BaseException): <NEW_LINE> <INDENT> pass
A child exception for the stop_instance method.
62598fb9dc8b845886d53712
class NullEvent: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Null Reaction" <NEW_LINE> <DEDENT> def updaterate(self): <NEW_LINE> <INDENT> return None
Null event represents nothing happening. Useful in path generation.
62598fb9656771135c4897c7
class MedicationPrescriptionDosageInstruction(fhirelement.FHIRElement): <NEW_LINE> <INDENT> resource_name = "MedicationPrescriptionDosageInstruction" <NEW_LINE> def __init__(self, jsondict=None): <NEW_LINE> <INDENT> self.additionalInstructions = None <NEW_LINE> self.asNeededBoolean = None <NEW_LINE> self.asNeededCodeableConcept = None <NEW_LINE> self.doseQuantity = None <NEW_LINE> self.doseRange = None <NEW_LINE> self.maxDosePerPeriod = None <NEW_LINE> self.method = None <NEW_LINE> self.rate = None <NEW_LINE> self.route = None <NEW_LINE> self.scheduledDateTime = None <NEW_LINE> self.scheduledPeriod = None <NEW_LINE> self.scheduledTiming = None <NEW_LINE> self.site = None <NEW_LINE> self.text = None <NEW_LINE> super(MedicationPrescriptionDosageInstruction, self).__init__(jsondict) <NEW_LINE> <DEDENT> def elementProperties(self): <NEW_LINE> <INDENT> js = super(MedicationPrescriptionDosageInstruction, self).elementProperties() <NEW_LINE> js.extend([ ("additionalInstructions", "additionalInstructions", codeableconcept.CodeableConcept, False), ("asNeededBoolean", "asNeededBoolean", bool, False), ("asNeededCodeableConcept", "asNeededCodeableConcept", codeableconcept.CodeableConcept, False), ("doseQuantity", "doseQuantity", quantity.Quantity, False), ("doseRange", "doseRange", range.Range, False), ("maxDosePerPeriod", "maxDosePerPeriod", ratio.Ratio, False), ("method", "method", codeableconcept.CodeableConcept, False), ("rate", "rate", ratio.Ratio, False), ("route", "route", codeableconcept.CodeableConcept, False), ("scheduledDateTime", "scheduledDateTime", fhirdate.FHIRDate, False), ("scheduledPeriod", "scheduledPeriod", period.Period, False), ("scheduledTiming", "scheduledTiming", timing.Timing, False), ("site", "site", codeableconcept.CodeableConcept, False), ("text", "text", str, False), ]) <NEW_LINE> return js
How medication should be taken. Indicates how the medication is to be used by the patient.
62598fb9a8370b77170f0538
class SerializedDataField(fields.TextField): <NEW_LINE> <INDENT> __metaclass__ = SubfieldBase <NEW_LINE> def to_python(self, value): <NEW_LINE> <INDENT> if value is None or value is "": return <NEW_LINE> if not isinstance(value, basestring): return value <NEW_LINE> try: <NEW_LINE> <INDENT> return pickle.loads(base64.b64decode(value)) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def get_db_prep_save(self, value): <NEW_LINE> <INDENT> if value is None or value is "": return <NEW_LINE> return base64.b64encode(pickle.dumps(value))
A field which serializes python values to the database, and returns them intact.
62598fb94527f215b58ea02f
class TokenPageTestCase(ptc.PloneTestCase): <NEW_LINE> <INDENT> def afterSetUp(self): <NEW_LINE> <INDENT> request = TestRequest() <NEW_LINE> self.consumerManager = zope.component.getMultiAdapter( (self.portal, request), IConsumerManager) <NEW_LINE> self.consumer = Consumer('consumer.example.com', 'consumer-secret') <NEW_LINE> self.consumerManager.add(self.consumer) <NEW_LINE> self.tokenManager = zope.component.getMultiAdapter( (self.portal, request), ITokenManager) <NEW_LINE> self.scopeManager = zope.component.getMultiAdapter( (self.portal, request), IScopeManager) <NEW_LINE> self.reqtoken = self.tokenManager.generateRequestToken( self.consumer.key, 'oob') <NEW_LINE> self.scopeManager.requestScope(self.reqtoken.key, None) <NEW_LINE> <DEDENT> def test_request_token_page_fail(self): <NEW_LINE> <INDENT> request = TestRequest() <NEW_LINE> rt = token.RequestTokenPage(self.portal, request) <NEW_LINE> self.assertRaises(BadRequest, rt) <NEW_LINE> <DEDENT> def test_request_token_page_good(self): <NEW_LINE> <INDENT> baseurl = self.portal.absolute_url() <NEW_LINE> timestamp = str(int(time.time())) <NEW_LINE> request = SignedTestRequest( timestamp=timestamp, consumer=self.consumer, callback=baseurl + '/test_oauth_callback', ) <NEW_LINE> qs = dict(parse_qsl(token.RequestTokenPage(self.portal, request)())) <NEW_LINE> self.assertEqual( self.tokenManager.getRequestToken(qs['oauth_token']).secret, qs['oauth_token_secret'], ) <NEW_LINE> <DEDENT> def test_access_token_page_fail(self): <NEW_LINE> <INDENT> request = TestRequest() <NEW_LINE> rt = token.GetAccessTokenPage(self.portal, request) <NEW_LINE> self.assertRaises(BadRequest, rt) <NEW_LINE> <DEDENT> def test_access_token_page_good(self): <NEW_LINE> <INDENT> self.reqtoken.user = 'test_user_1_' <NEW_LINE> baseurl = self.portal.absolute_url() <NEW_LINE> timestamp = str(int(time.time())) <NEW_LINE> request = SignedTestRequest( timestamp=timestamp, consumer=self.consumer, token=self.reqtoken, verifier=self.reqtoken.verifier, ) <NEW_LINE> page = token.GetAccessTokenPage(self.portal, request) <NEW_LINE> qs = dict(parse_qsl(page())) <NEW_LINE> self.assertEqual( self.tokenManager.getAccessToken(qs['oauth_token']).secret, qs['oauth_token_secret'], ) <NEW_LINE> self.assertTrue( self.tokenManager.getRequestToken(self.reqtoken.key, None) is None) <NEW_LINE> self.assertTrue( self.scopeManager.getScope(self.reqtoken.key, None) is None) <NEW_LINE> self.assertRaises(Unauthorized, page)
Testing functionalities of forms that don't fit well into doctests.
62598fb944b2445a339b6a21
class PasswordGrant(grants.ResourceOwnerPasswordCredentialsGrant): <NEW_LINE> <INDENT> TOKEN_ENDPOINT_AUTH_METHODS = [ 'client_secret_basic', 'client_secret_post' ] <NEW_LINE> def authenticate_user(self, username, password): <NEW_LINE> <INDENT> user = User.query.filter_by(username=username).first() <NEW_LINE> if user.validate_password(password): <NEW_LINE> <INDENT> return user
Resource owner uses his username and password to exchange an access token, this grant type should be used only when the client is trustworthy, implement it with a subclass of ResourceOwnerPasswordCredentialsGrant
62598fb94f6381625f19956f
class ThisPerson(unittest.TestCase): <NEW_LINE> <INDENT> def test_add_item_return_list(self): <NEW_LINE> <INDENT> self.assertIsInstance(add_items(),{},msg='method returning a dictionary instance')
class holding tests for shopping list class
62598fb9f9cc0f698b1c537a
@python_2_unicode_compatible <NEW_LINE> class Pagamento(models.Model): <NEW_LINE> <INDENT> data = models.DateTimeField(db_index=True, verbose_name=_(u"Data do pagamento")) <NEW_LINE> valor = models.DecimalField(max_digits=20, decimal_places=2, verbose_name=_(u"Valor")) <NEW_LINE> juros = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True, verbose_name=_(u"Juros")) <NEW_LINE> multa = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True, verbose_name=_(u"Multa")) <NEW_LINE> desconto = models.DecimalField(max_digits=20, decimal_places=2, blank=True, null=True, verbose_name=_(u"Desconto")) <NEW_LINE> parcelas_contas_pagar = models.ForeignKey('ParcelasContasPagar', on_delete=models.PROTECT, verbose_name=_(u"Pagamento de parcela")) <NEW_LINE> observacao = models.TextField(blank=True, verbose_name=_(u"Observações")) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return u'%s' % (self.id) <NEW_LINE> <DEDENT> def conta_associada(self): <NEW_LINE> <INDENT> if self.parcelas_contas_pagar: <NEW_LINE> <INDENT> url = reverse("admin:contas_pagar_contaspagar_change", args=[self.parcelas_contas_pagar.contas_pagar]) <NEW_LINE> return u"<a href='%s'>%s</a>" % (url, self.parcelas_contas_pagar.contas_pagar) <NEW_LINE> <DEDENT> return '-' <NEW_LINE> <DEDENT> conta_associada.allow_tags = True <NEW_LINE> conta_associada.short_description = _(u"Conta a pagar") <NEW_LINE> conta_associada.admin_order_field = 'parcelas_contas_pagar__contas_pagar' <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> from contas_pagar.models import ContasPagar, ParcelasContasPagar <NEW_LINE> if self.pk is None: <NEW_LINE> <INDENT> super(Pagamento, self).save(*args, **kwargs) <NEW_LINE> parcela_pagamento = Pagamento.objects.filter(pk=self.pk).values_list('parcelas_contas_pagar')[0] <NEW_LINE> parcela = ParcelasContasPagar.objects.get(pk=parcela_pagamento[0]) <NEW_LINE> if parcela.valor_pago() >= parcela.valor_total(): <NEW_LINE> <INDENT> parcela.status = True <NEW_LINE> parcela.save() <NEW_LINE> conta_pagar = ContasPagar.objects.get(pk=parcela.contas_pagar.pk) <NEW_LINE> conta_aberta = ParcelasContasPagar.objects.filter(contas_pagar=conta_pagar.pk, status=0).exists() <NEW_LINE> if conta_aberta: <NEW_LINE> <INDENT> conta_pagar.status = False <NEW_LINE> conta_pagar.save() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conta_pagar.status = True <NEW_LINE> conta_pagar.save() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> super(Pagamento, self).save(*args, **kwargs)
Classe Pagamento. Criada para registrar todas as saídas financeiras do estabelecimento. Os registros de pagamentos entrarão automaticamente na tabela. Contudo, também será possível cadastrar pagamentos manualmente, pensando em casos em que valores são pagos, eventualmente, sem a compra ter sido cadastrada. Criada em 16/06/2014.
62598fb923849d37ff85120d
class Fluid: <NEW_LINE> <INDENT> def __init__(self,density=1,viscosity=1,name='fluid',velocity=array([0,0]),relativePermittivity=81): <NEW_LINE> <INDENT> self.density = density <NEW_LINE> self.viscosity = viscosity <NEW_LINE> self.name = name <NEW_LINE> self.velocity = velocity <NEW_LINE> self.relativePermittivity = relativePermittivity
The fluid object (SI units). Attributes: density : An integer for density (defaults to 1) viscosity : An integer for viscosity (defaults to 1) name : A string to define the fluid's name (defaults to 'fluid') velocity : A numpy.array for velocity field (numpy.array([[x1,y1],[x2,y2],[x3,y3],...])) relativePermittivity : An integer for fluid permittivity (defaults to 81)
62598fb932920d7e50bc61a8
class ECU_am(AnsweringMachine): <NEW_LINE> <INDENT> function_name = "ECU_am" <NEW_LINE> sniff_options_list = ["store", "opened_socket", "count", "filter", "prn", "stop_filter", "timeout"] <NEW_LINE> def parse_options(self, supported_responses=None, main_socket=None, broadcast_socket=None, basecls=Raw, timeout=None): <NEW_LINE> <INDENT> self.main_socket = main_socket <NEW_LINE> self.sockets = [self.main_socket] <NEW_LINE> if broadcast_socket is not None: <NEW_LINE> <INDENT> self.sockets.append(broadcast_socket) <NEW_LINE> <DEDENT> self.ecu_state = ECU(logging=False, verbose=False, store_supported_responses=False) <NEW_LINE> self.basecls = basecls <NEW_LINE> self.supported_responses = supported_responses <NEW_LINE> self.sniff_options["timeout"] = timeout <NEW_LINE> self.sniff_options["opened_socket"] = self.sockets <NEW_LINE> <DEDENT> def is_request(self, req): <NEW_LINE> <INDENT> return req.__class__ == self.basecls <NEW_LINE> <DEDENT> def print_reply(self, req, reply): <NEW_LINE> <INDENT> print("%s ==> %s" % (req.summary(), [res.summary() for res in reply])) <NEW_LINE> <DEDENT> def make_reply(self, req): <NEW_LINE> <INDENT> if self.supported_responses is not None: <NEW_LINE> <INDENT> for resp in self.supported_responses: <NEW_LINE> <INDENT> if not isinstance(resp, ECUResponse): <NEW_LINE> <INDENT> raise Scapy_Exception("Unsupported type for response. " "Please use `ECUResponse` objects. ") <NEW_LINE> <DEDENT> if not resp.in_correct_session(self.ecu_state.current_session): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not resp.has_security_access( self.ecu_state.current_security_level): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not resp.answers(req): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> for r in resp.responses: <NEW_LINE> <INDENT> for layer in r.layers(): <NEW_LINE> <INDENT> if hasattr(layer, "modifies_ecu_state"): <NEW_LINE> <INDENT> layer.modifies_ecu_state(r, self.ecu_state) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return resp.responses <NEW_LINE> <DEDENT> <DEDENT> return PacketList([self.basecls(b"\x7f" + bytes(req)[0:1] + b"\x10")]) <NEW_LINE> <DEDENT> def send_reply(self, reply): <NEW_LINE> <INDENT> for p in reply: <NEW_LINE> <INDENT> time.sleep(conf.contribs['ECU_am']['send_delay']) <NEW_LINE> if len(reply) > 1: <NEW_LINE> <INDENT> time.sleep(random.uniform(0.01, 0.5)) <NEW_LINE> <DEDENT> self.main_socket.send(p)
AnsweringMachine which emulates the basic behaviour of a real world ECU. Provide a list of ``ECUResponse`` objects to configure the behaviour of this AnsweringMachine. :param supported_responses: List of ``ECUResponse`` objects to define the behaviour. The default response is ``generalReject``. :param main_socket: Defines the object of the socket to send and receive packets. :param broadcast_socket: Defines the object of the broadcast socket. Listen-only, responds with the main_socket. `None` to disable broadcast capabilities. :param basecls: Provide a basecls of the used protocol Usage: >>> resp = ECUResponse(session=range(0,255), security_level=0, responses=UDS() / UDS_NR(negativeResponseCode=0x7f, requestServiceId=0x10)) # noqa: E501 >>> sock = ISOTPSocket(can_iface, sid=0x700, did=0x600, basecls=UDS) # noqa: E501 >>> answering_machine = ECU_am(supported_responses=[resp], main_socket=sock, basecls=UDS) # noqa: E501 >>> sim = threading.Thread(target=answering_machine, kwargs={'count': 4, 'timeout':5}) # noqa: E501 >>> sim.start()
62598fb992d797404e388c10
class LocalTransformations(ind.LocalTransformations): <NEW_LINE> <INDENT> def __init__(self, attribs): <NEW_LINE> <INDENT> super(LocalTransformations, self).__init__() <NEW_LINE> for key, value in attribs.items(): <NEW_LINE> <INDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> self.DerivedField = [] <NEW_LINE> self.Extension = []
Represents a <LocalTransformations> tag in v4.0 and provides methods to convert to PFA.
62598fb95fcc89381b2661f9
class HumanPoseDetectionServicer(object): <NEW_LINE> <INDENT> def Ping(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def StartSession(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def Detect(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') <NEW_LINE> <DEDENT> def StopSession(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!')
Missing associated documentation comment in .proto file.
62598fb997e22403b383b060
class Position(object): <NEW_LINE> <INDENT> def __init__(self, filename, line, col): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.line = line <NEW_LINE> self.col = col <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s:%d:%d" % (self.filename, self.line, self.col)
Each node has a position which is an instance of this type.
62598fb997e22403b383b061
class PMRequestPlugin(object): <NEW_LINE> <INDENT> @traceLog() <NEW_LINE> def __init__(self, plugins, conf, buildroot): <NEW_LINE> <INDENT> self.buildroot = buildroot <NEW_LINE> self.config = conf <NEW_LINE> plugins.add_hook("earlyprebuild", self.start_listener) <NEW_LINE> plugins.add_hook("preshell", self.start_listener) <NEW_LINE> plugins.add_hook("postbuild", self.log_executed) <NEW_LINE> <DEDENT> @traceLog() <NEW_LINE> def start_listener(self): <NEW_LINE> <INDENT> process = multiprocessing.Process( name="pm-request-listener", target=lambda: PMRequestListener(self.config, self.buildroot).listen()) <NEW_LINE> process.daemon = True <NEW_LINE> self.buildroot.env['PM_REQUEST_SOCKET'] = os.path.join(RUNDIR, SOCKET_NAME) <NEW_LINE> self.buildroot.root_log.info("Enabled pm_request plugin") <NEW_LINE> process.start() <NEW_LINE> <DEDENT> @traceLog() <NEW_LINE> def log_executed(self): <NEW_LINE> <INDENT> sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) <NEW_LINE> try: <NEW_LINE> <INDENT> sock.connect(self.buildroot.make_chroot_path(RUNDIR, SOCKET_NAME)) <NEW_LINE> sock.sendall(b'!LOG_EXECUTED\n') <NEW_LINE> executed_commands = sock.makefile().read() <NEW_LINE> if executed_commands: <NEW_LINE> <INDENT> self.buildroot.root_log.warning( "The pm_request plugin executed following commands:\n" + executed_commands + "\nThe build may not be reproducible.\n") <NEW_LINE> <DEDENT> <DEDENT> except socket.error: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> sock.close()
Executes package manager commands requested by processes runninng in the chroot.
62598fb97b180e01f3e490ff
class S3EventProjectModel(S3Model): <NEW_LINE> <INDENT> names = ("event_project", ) <NEW_LINE> def model(self): <NEW_LINE> <INDENT> tablename = "event_project" <NEW_LINE> self.define_table(tablename, self.event_event_id(empty = False, ondelete = "CASCADE"), self.project_project_id( ), *s3_meta_fields()) <NEW_LINE> return {}
Link Projects to Events
62598fb97cff6e4e811b5b7c
class Infrator(Pessoa): <NEW_LINE> <INDENT> estado = models.CharField(max_length=255, null=True) <NEW_LINE> cidade = models.CharField(max_length=255, null=True) <NEW_LINE> endereco = models.CharField(max_length=255, null=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.nome <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> app_label = "detransapp"
Classe para models de infratores
62598fb91f5feb6acb162d7b
class BadStatement(object): <NEW_LINE> <INDENT> openapi_types = { 'type': 'str', 'text': 'str' } <NEW_LINE> attribute_map = { 'type': 'type', 'text': 'text' } <NEW_LINE> def __init__(self, type=None, text=None): <NEW_LINE> <INDENT> self._type = None <NEW_LINE> self._text = None <NEW_LINE> self.discriminator = None <NEW_LINE> if type is not None: <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> if text is not None: <NEW_LINE> <INDENT> self.text = text <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @type.setter <NEW_LINE> def type(self, type): <NEW_LINE> <INDENT> self._type = type <NEW_LINE> <DEDENT> @property <NEW_LINE> def text(self): <NEW_LINE> <INDENT> return self._text <NEW_LINE> <DEDENT> @text.setter <NEW_LINE> def text(self, text): <NEW_LINE> <INDENT> self._text = text <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BadStatement): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fb9adb09d7d5dc0a6d8
class Activation(Layer): <NEW_LINE> <INDENT> def __init__(self, f: F, f_prime: F) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.f = f <NEW_LINE> self.f_prime = f_prime <NEW_LINE> <DEDENT> def forward(self, inputs: Tensor) -> Tensor: <NEW_LINE> <INDENT> self.inputs = inputs <NEW_LINE> return self.f(inputs) <NEW_LINE> <DEDENT> def backward(self, grad: Tensor) -> Tensor: <NEW_LINE> <INDENT> return self.f_prime(self.inputs) * grad
An activation layer just applies a function elementwise to its inputs
62598fb94f88993c371f05bb
class ActivitySchema(Schema): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> type_ = 'activity' <NEW_LINE> self_view = 'v1.activity_detail' <NEW_LINE> self_view_kwargs = {'id': '<id>'} <NEW_LINE> inflect = dasherize <NEW_LINE> <DEDENT> id = fields.Str(dump_only=True) <NEW_LINE> actor = fields.Str(allow_none=True) <NEW_LINE> time = fields.DateTime(allow_none=True) <NEW_LINE> action = fields.Str(allow_none=True)
Api schema for Activity Model
62598fb967a9b606de54612d
class _CommandBuilding: <NEW_LINE> <INDENT> def GetResources(self): <NEW_LINE> <INDENT> return {'Pixmap' : 'Arch_Building', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Arch_Building","Building"), 'Accel': "B, U", 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Arch_Building","Creates a building object including selected objects.")} <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> sel = FreeCADGui.Selection.getSelection() <NEW_LINE> ok = False <NEW_LINE> if (len(sel) == 1): <NEW_LINE> <INDENT> if Draft.getType(sel[0]) in ["Cell","Site","Floor"]: <NEW_LINE> <INDENT> FreeCAD.ActiveDocument.openTransaction(str(translate("Arch","Type conversion"))) <NEW_LINE> FreeCADGui.doCommand("import Arch") <NEW_LINE> FreeCADGui.doCommand("obj = Arch.makeBuilding()") <NEW_LINE> FreeCADGui.doCommand("Arch.copyProperties(FreeCAD.ActiveDocument."+sel[0].Name+",obj)") <NEW_LINE> FreeCADGui.doCommand('FreeCAD.ActiveDocument.removeObject("'+sel[0].Name+'")') <NEW_LINE> FreeCAD.ActiveDocument.commitTransaction() <NEW_LINE> ok = True <NEW_LINE> <DEDENT> <DEDENT> if not ok: <NEW_LINE> <INDENT> FreeCAD.ActiveDocument.openTransaction(str(translate("Arch"," Create Building"))) <NEW_LINE> ss = "[" <NEW_LINE> for o in sel: <NEW_LINE> <INDENT> if len(ss) > 1: <NEW_LINE> <INDENT> ss += "," <NEW_LINE> <DEDENT> ss += "FreeCAD.ActiveDocument."+o.Name <NEW_LINE> <DEDENT> ss += "]" <NEW_LINE> FreeCAD.ActiveDocument.openTransaction(str(translate("Arch","Floor"))) <NEW_LINE> FreeCADGui.doCommand("import Arch") <NEW_LINE> FreeCADGui.doCommand("Arch.makeBuilding("+ss+")") <NEW_LINE> FreeCAD.ActiveDocument.commitTransaction() <NEW_LINE> FreeCAD.ActiveDocument.recompute()
the Arch Building command definition
62598fb9cc40096d6161a287
class Resizer(object): <NEW_LINE> <INDENT> def __call__(self, sample, min_side=608, max_side=1280): <NEW_LINE> <INDENT> image, annots = sample['img'], sample['annot'] <NEW_LINE> rows, cols, cns = image.shape <NEW_LINE> smallest_side = min(rows, cols) <NEW_LINE> scale = min_side / smallest_side <NEW_LINE> largest_side = max(rows, cols) <NEW_LINE> if largest_side * scale > max_side: <NEW_LINE> <INDENT> scale = max_side / largest_side <NEW_LINE> <DEDENT> image = skimage.transform.resize(image, (int(round(rows*scale)), int(round((cols*scale))))) <NEW_LINE> rows, cols, cns = image.shape <NEW_LINE> pad_w = 32 - rows%32 <NEW_LINE> pad_h = 32 - cols%32 <NEW_LINE> new_image = np.zeros((rows + pad_w, cols + pad_h, cns)).astype(np.float32) <NEW_LINE> new_image[:rows, :cols, :] = image.astype(np.float32) <NEW_LINE> annots[:, :4] *= scale <NEW_LINE> return {'img': torch.from_numpy(new_image), 'annot': torch.from_numpy(annots), 'scale': scale}
Convert ndarrays in sample to Tensors.
62598fb93d592f4c4edbb01a
class Seat(CSKAType): <NEW_LINE> <INDENT> @property <NEW_LINE> def _fields(self): <NEW_LINE> <INDENT> return ['seat_id', 'license_name']
license seat details. seat details contain the following fields: - seat_id (string): internal id representing the license seat. - license_name (string): type of license for the seat.
62598fb9097d151d1a2c118e
class Competence(ElementCollection): <NEW_LINE> <INDENT> def __init__(self, agent, competence_name, priority_elements, goal): <NEW_LINE> <INDENT> ElementCollection.__init__(self, agent, "C.%s" % competence_name) <NEW_LINE> self._name = competence_name <NEW_LINE> self._elements = priority_elements <NEW_LINE> self._goal = goal <NEW_LINE> self.debug("Created") <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.debug("Reset") <NEW_LINE> for element in self._elements: <NEW_LINE> <INDENT> element.reset() <NEW_LINE> <DEDENT> <DEDENT> def fire(self): <NEW_LINE> <INDENT> self.debug("Fired") <NEW_LINE> if self._goal and self._goal.fire(): <NEW_LINE> <INDENT> self.debug("Goal satisfied") <NEW_LINE> return FireResult(0, None) <NEW_LINE> <DEDENT> for element in self._elements: <NEW_LINE> <INDENT> result = element.fire() <NEW_LINE> if result.continueExecution() and not result.nextElement(): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> self.debug("Failed") <NEW_LINE> return FireResult(0, None) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> new_obj = copy(self) <NEW_LINE> new_elements = [] <NEW_LINE> for element in self._elements: <NEW_LINE> <INDENT> new_elements.append(element.copy()) <NEW_LINE> <DEDENT> new_obj._elements = new_elements <NEW_LINE> return new_obj <NEW_LINE> <DEDENT> def setElements(self, elements): <NEW_LINE> <INDENT> self._elements = elements <NEW_LINE> self.reset()
A POSH competence, containing competence priority elements.
62598fb966656f66f7d5a54e
class MemoryDetail(DetailView): <NEW_LINE> <INDENT> template_name = "happenings/memory_detail.html" <NEW_LINE> queryset = Memory.objects.all() <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.event_slug = kwargs.get('event_slug', False) <NEW_LINE> self.slug = kwargs.get('slug', False) <NEW_LINE> self.event = get_object_or_404(Event, slug=self.event_slug) <NEW_LINE> return super(MemoryDetail, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(MemoryDetail, self).get_context_data(**kwargs) <NEW_LINE> context['event'] = self.event <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> return get_object_or_404(Memory, pk=self.kwargs.get('pk', None))
Creates a detail page for an Event.Memory.
62598fb94a966d76dd5ef02e
class blk(gr.sync_block): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> gr.sync_block.__init__( self, name="Gotenna decoder", in_sig=[np.int8], out_sig=None ) <NEW_LINE> self.prefix = "10"*16 + "0010110111010100" <NEW_LINE> self.bits = "" <NEW_LINE> <DEDENT> def work(self, input_items, output_items): <NEW_LINE> <INDENT> self.bits += "".join([str(n) for n in input_items[0]]) <NEW_LINE> idx = self.bits[:-2048].find(self.prefix) <NEW_LINE> while idx >= 0: <NEW_LINE> <INDENT> self.bits = self.bits[idx + len(self.prefix):] <NEW_LINE> length = int(self.bits[0:8], 2) <NEW_LINE> packet = bytearray() <NEW_LINE> for i in range(length + 1): <NEW_LINE> <INDENT> packet.append(int(self.bits[i*8:i*8 + 8], 2)) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> print() <NEW_LINE> print("Raw bytes: " + " ".join(["{0:02x}".format(b) for b in packet])) <NEW_LINE> packet = gotenna_packet.correct_packet(packet) <NEW_LINE> print("Corrected: " + " ".join(["{0:02x}".format(b) for b in packet])) <NEW_LINE> print() <NEW_LINE> gotenna_packet.ingest_packet(packet) <NEW_LINE> <DEDENT> except Exception as err: <NEW_LINE> <INDENT> print("Error decoding packet: " + str(err)) <NEW_LINE> <DEDENT> self.bits = self.bits[(length + 1) * 8:] <NEW_LINE> idx = self.bits[:-2048].find(self.prefix) <NEW_LINE> <DEDENT> self.bits = self.bits[-2048 - len(self.prefix) + 1:] <NEW_LINE> return len(input_items[0])
Embedded Python Block example - a simple multiply const
62598fb9442bda511e95c5b8
class CppFileAnalyzer(IFileAnalyzer): <NEW_LINE> <INDENT> def initialize_fields(self, repo_detail: dict) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> repo_detail["cpplint_errors"] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> repo_detail["cpplint_errors"] = 0 <NEW_LINE> <DEDENT> <DEDENT> def __analyze_file(self, path: str, repo_detail: dict) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cpplint_report = str(subprocess.check_output( "cpplint --filter=-whitespace/tab,-whitespace/braces,-build/headerguard,-readability/streams,-build/include_order,-whitespace/newline,-whitespace/labels,-runtime/references " + path, shell=True, stderr=subprocess.STDOUT)) <NEW_LINE> if "Total errors found:" in cpplint_report: <NEW_LINE> <INDENT> repo_detail["cpplint_errors"] += int(cpplint_report.split('\n')[-2].split(': ')[-1]) <NEW_LINE> <DEDENT> <DEDENT> except subprocess.CalledProcessError as error: <NEW_LINE> <INDENT> cpplint_report = error.output <NEW_LINE> repo_detail["cpplint_errors"] += int(cpplint_report.decode("utf-8").split('\n')[-2].split(': ')[-1]) <NEW_LINE> <DEDENT> <DEDENT> def analyze_files(self, path_list: list, repo_detail: dict): <NEW_LINE> <INDENT> for file_path in filter(lambda k: k.endswith(".hpp") or k.endswith(".cpp") or k.endswith(".h"), path_list): <NEW_LINE> <INDENT> self.__analyze_file(file_path, repo_detail)
Analyzes C++ source and header files.
62598fb956b00c62f0fb2a17
class PartialUnit(namedtuple("PartialUnit", "span type features")): <NEW_LINE> <INDENT> pass
Partially instantiated unit, for use when you want to programmatically insert annotations into a document A partially instantiated unit does not have any metadata (creation date, etc); as these will be derived automatically
62598fb991f36d47f2230f58
class LRD_cooldown(LRC): <NEW_LINE> <INDENT> def __init__(self, lr, epoch_wait=1, decay_rate=0.9): <NEW_LINE> <INDENT> self.lr = lr <NEW_LINE> self.epoch_max = epoch_wait + 1 <NEW_LINE> self.decay_rate = decay_rate <NEW_LINE> self.err_seq = np.zeros(self.epoch_max) <NEW_LINE> self.err_seq[0] = np.inf <NEW_LINE> self.epoch_not_down = 1 <NEW_LINE> <DEDENT> def update(self, epoch, err): <NEW_LINE> <INDENT> self.err_seq[self.epoch_not_down] = err <NEW_LINE> if np.where( np.min(self.err_seq[:self.epoch_not_down + 1]) == self.err_seq)[0][0] == self.epoch_not_down: <NEW_LINE> <INDENT> self.err_seq = np.zeros(self.epoch_max) <NEW_LINE> self.err_seq[0] = err <NEW_LINE> self.epoch_not_down = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.epoch_not_down += 1 <NEW_LINE> if self.epoch_not_down == self.epoch_max: <NEW_LINE> <INDENT> self.lr *= self.decay_rate <NEW_LINE> self.err_seq = np.zeros(self.epoch_max) <NEW_LINE> self.err_seq[0] = err <NEW_LINE> self.epoch_not_down = 1 <NEW_LINE> print('learning rate update to:', self.lr) <NEW_LINE> <DEDENT> <DEDENT> return self.lr
this LRD function will decrease the lr if err do not decrease.
62598fb93617ad0b5ee062a3
class PacketFunctionsTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_checksums(self): <NEW_LINE> <INDENT> for i, (command, response) in enumerate(simulator.simulated_command_responses): <NEW_LINE> <INDENT> if command == b'$$$': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.assertEqual(command[-1:], comms.calculate_checksum(command[:-1]), (i, command, response)) <NEW_LINE> if len(response) > 0: <NEW_LINE> <INDENT> self.assertEqual(response[-1:], comms.calculate_checksum(response[:-1]), (i, command, response)) <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual(bytes([234]), comms.calculate_checksum(bytes([21]))) <NEW_LINE> <DEDENT> def test_execute_command(self): <NEW_LINE> <INDENT> sim = simulator.DaikinSimulator() <NEW_LINE> self.assertEqual(bytes([21, 234]), comms.execute_command(sim, bytes([3, 64, 160, 28]))) <NEW_LINE> self.assertEqual(bytes([21, 234]), comms.execute_command(sim, b'$$$')) <NEW_LINE> self.assertEqual(bytes([64, 17, 8, 2, 49, 149, 1, 2, 5, 214]), comms.execute_command(sim, bytes([3, 64, 17, 171])))
Tests for the comms module.
62598fb966673b3332c3052d
class ArduinoSerialUIData: <NEW_LINE> <INDENT> def __init__(self, serialport = 'COM2'): <NEW_LINE> <INDENT> self.serialcnx = serial.Serial(port=serialport, baudrate=9600, timeout=1, writeTimeout=1) <NEW_LINE> <DEDENT> def getmesures(): <NEW_LINE> <INDENT> while (self.serialcnx.in_waiting()==0): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> data = self.serialcnx.readline().decode('ascii') <NEW_LINE> matchObj = re.match( r'(\d+\.\d+);(\d+\.\d+)', data) <NEW_LINE> if(matchObj) : <NEW_LINE> <INDENT> mesures = {'tension':0, 'intensity':0, 'power':0} <NEW_LINE> mesures['tension'] = float(matchObj.group(1)) <NEW_LINE> mesures['intensity'] = float(matchObj.group(2)) <NEW_LINE> mesures['power'] = mesures['tension']*mesures['intensity'] <NEW_LINE> return mesures <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> self.getmesures()
Class pour se connecter à Arduino via le port serie et lire les données envoyées : tension et intensité via getmesures()
62598fb9dc8b845886d53716
class Resource(_NamedUIDObject): <NEW_LINE> <INDENT> def __init__(self, name: str) -> None: <NEW_LINE> <INDENT> super().__init__(name) <NEW_LINE> self.busy_intervals = {} <NEW_LINE> <DEDENT> def add_busy_interval(self, task, interval: Tuple[ArithRef, ArithRef]) -> None: <NEW_LINE> <INDENT> self.busy_intervals[task] = interval <NEW_LINE> <DEDENT> def get_busy_intervals(self) -> List[Tuple[ArithRef, ArithRef]]: <NEW_LINE> <INDENT> return list(self.busy_intervals.values())
base class for the representation of a resource
62598fb91f5feb6acb162d7d
class SqlStorageUpdateSettings(Model): <NEW_LINE> <INDENT> _attribute_map = { 'disk_count': {'key': 'diskCount', 'type': 'int'}, 'disk_configuration_type': {'key': 'diskConfigurationType', 'type': 'str'}, 'starting_device_id': {'key': 'startingDeviceId', 'type': 'int'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(SqlStorageUpdateSettings, self).__init__(**kwargs) <NEW_LINE> self.disk_count = kwargs.get('disk_count', None) <NEW_LINE> self.disk_configuration_type = kwargs.get('disk_configuration_type', None) <NEW_LINE> self.starting_device_id = kwargs.get('starting_device_id', None)
Set disk storage settings for SQL Server. :param disk_count: Virtual machine disk count. :type disk_count: int :param disk_configuration_type: Disk configuration to apply to SQL Server. Possible values include: 'NEW', 'EXTEND', 'ADD' :type disk_configuration_type: str or ~azure.mgmt.sqlvirtualmachine.models.DiskConfigurationType :param starting_device_id: Device id of the first disk to be updated. :type starting_device_id: int
62598fb95fc7496912d4832a
class ImDisplay(Widget): <NEW_LINE> <INDENT> def create_im(self, mat, c_map, scale=True): <NEW_LINE> <INDENT> mat = np.flipud(mat) <NEW_LINE> dims = np.shape(mat) <NEW_LINE> self.texture = Texture.create(size=(dims[1], dims[0]), colorfmt='rgb') <NEW_LINE> self.scale = scale <NEW_LINE> self.cmap = color_map(c_map) <NEW_LINE> m = mat.flatten() <NEW_LINE> m = m.astype(float) <NEW_LINE> if scale: <NEW_LINE> <INDENT> im = numpytoimage.scale_im(m, len(self.cmap) // 3 - 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> im = m <NEW_LINE> <DEDENT> im = im.astype(int) <NEW_LINE> im = numpytoimage.mat_to_im(im, self.cmap) <NEW_LINE> arr = np.asarray(im, dtype=np.uint8) <NEW_LINE> self.texture.blit_buffer(arr.tostring(), colorfmt='rgb', bufferfmt='ubyte') <NEW_LINE> with self.canvas: <NEW_LINE> <INDENT> self.im = Rectangle(texture=self.texture, size=self.size, pos=self.pos) <NEW_LINE> <DEDENT> self.bind(pos=self.update_size, size=self.update_size) <NEW_LINE> <DEDENT> def update_im(self, mat): <NEW_LINE> <INDENT> mat = np.flipud(mat) <NEW_LINE> m = mat.flatten() <NEW_LINE> m = m.astype(float) <NEW_LINE> if self.scale: <NEW_LINE> <INDENT> im = numpytoimage.scale_im(m, len(self.cmap) // 3 - 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> im = m <NEW_LINE> <DEDENT> im = im.astype(int) <NEW_LINE> im = numpytoimage.mat_to_im(im, self.cmap) <NEW_LINE> arr = np.asarray(im, dtype=np.uint8) <NEW_LINE> self.texture.blit_buffer(arr.tostring(), colorfmt='rgb', bufferfmt='ubyte') <NEW_LINE> <DEDENT> def update_size(self, *args): <NEW_LINE> <INDENT> self.im.pos = self.pos <NEW_LINE> self.im.size = self.size
Class for displaying numpy matrices as widgets on the canvas. Scaling is performed on the image by default otherwise image values must lie between 0 and 255. Mapping to rgb color scheme is then performed in Cython compiled code.
62598fb971ff763f4b5e78d6
class RCN(nn.Module): <NEW_LINE> <INDENT> def __init__(self, cell): <NEW_LINE> <INDENT> super(RCN, self).__init__() <NEW_LINE> if not isinstance(cell, RCNCell): <NEW_LINE> <INDENT> raise ValueError('cell must be an instance of RCNCell') <NEW_LINE> <DEDENT> self._cell = cell <NEW_LINE> <DEDENT> def forward(self, x, h0, output='last'): <NEW_LINE> <INDENT> out, h = self._cell(x[:, 0, :], h0) <NEW_LINE> out_seq = [out, ] <NEW_LINE> for t in range(1, x.size(1)): <NEW_LINE> <INDENT> out, h = self._cell(x[:, t, :], h) <NEW_LINE> out_seq.append(out) <NEW_LINE> <DEDENT> if output == 'last': <NEW_LINE> <INDENT> return out_seq[-1], h <NEW_LINE> <DEDENT> elif output == 'all': <NEW_LINE> <INDENT> return torch.stack(out_seq, dim=1), h <NEW_LINE> <DEDENT> elif output == 'average': <NEW_LINE> <INDENT> return torch.mean(torch.stack(out_seq), dim=0), h <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("output mode can only be one in ['last', 'all', 'average']")
Convolutional RNN Args: cell - **cell**: instance of RCNCell. Inputs: x, h0 - **x** (batch, seq, channel, height, width): tensor containing input features. - **h0** (batch, channel, height, width): tensor containing the initial hidden state to feed the cell. - **output** (one in ['last', 'all', 'average']): string that determines the output mode. If 'last', only return the output at the last time instance; if 'all', return outputs at all time instances in a large tensor; if 'average', average outputs along time axis then return. Default; 'last' Outputs: output, h - **output** (batch, seq, channel, height, width): tensor cantaining output of all time instances - **h**: (batch, hidden_size): tensor containing the current hidden state
62598fb9091ae35668704d7f
class PatternException(Exception): <NEW_LINE> <INDENT> def __init__(self, text): <NEW_LINE> <INDENT> self.txt = text
Образец не подходит для анализа
62598fb967a9b606de54612f
class UpdateOwnProfile(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self,request,view,obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.id == request.user.id
Allow users to edit their own profile
62598fb9cc40096d6161a288
class TestSdCIEIlluminantDSeries(unittest.TestCase): <NEW_LINE> <INDENT> def test_sd_CIE_illuminant_D_series(self): <NEW_LINE> <INDENT> for name, CCT, tolerance in ( ("D50", 5000, 0.001), ("D55", 5500, 0.001), ("D65", 6500, 0.00001), ("D75", 7500, 0.0001), ): <NEW_LINE> <INDENT> CCT = CCT * 1.4388 / 1.4380 <NEW_LINE> xy = CCT_to_xy_CIE_D(CCT) <NEW_LINE> sd_r = SDS_ILLUMINANTS[name] <NEW_LINE> sd_t = sd_CIE_illuminant_D_series(xy) <NEW_LINE> np.testing.assert_allclose( sd_r.values, sd_t[sd_r.wavelengths], rtol=tolerance, atol=tolerance, )
Define :func:`colour.colorimetry.illuminants.sd_CIE_illuminant_D_series` definition unit tests methods.
62598fb9099cdd3c63675491
class DecryptResponse(proto.Message): <NEW_LINE> <INDENT> plaintext = proto.Field( proto.BYTES, number=1, ) <NEW_LINE> plaintext_crc32c = proto.Field( proto.MESSAGE, number=2, message=wrappers_pb2.Int64Value, ) <NEW_LINE> used_primary = proto.Field( proto.BOOL, number=3, ) <NEW_LINE> protection_level = proto.Field( proto.ENUM, number=4, enum=resources.ProtectionLevel, )
Response message for [KeyManagementService.Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt]. Attributes: plaintext (bytes): The decrypted data originally supplied in [EncryptRequest.plaintext][google.cloud.kms.v1.EncryptRequest.plaintext]. plaintext_crc32c (google.protobuf.wrappers_pb2.Int64Value): Integrity verification field. A CRC32C checksum of the returned [DecryptResponse.plaintext][google.cloud.kms.v1.DecryptResponse.plaintext]. An integrity check of [DecryptResponse.plaintext][google.cloud.kms.v1.DecryptResponse.plaintext] can be performed by computing the CRC32C checksum of [DecryptResponse.plaintext][google.cloud.kms.v1.DecryptResponse.plaintext] and comparing your results to this field. Discard the response in case of non-matching checksum values, and perform a limited number of retries. A persistent mismatch may indicate an issue in your computation of the CRC32C checksum. Note: receiving this response message indicates that [KeyManagementService][google.cloud.kms.v1.KeyManagementService] is able to successfully decrypt the [ciphertext][google.cloud.kms.v1.DecryptRequest.ciphertext]. Note: This field is defined as int64 for reasons of compatibility across different languages. However, it is a non-negative integer, which will never exceed 2^32-1, and can be safely downconverted to uint32 in languages that support this type. used_primary (bool): Whether the Decryption was performed using the primary key version. protection_level (google.cloud.kms_v1.types.ProtectionLevel): The [ProtectionLevel][google.cloud.kms.v1.ProtectionLevel] of the [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] used in decryption.
62598fb9236d856c2adc94ef
class TimerHandle: <NEW_LINE> <INDENT> def __init__(self, when, callback, args): <NEW_LINE> <INDENT> assert when is not None <NEW_LINE> self._callback = callback <NEW_LINE> self._args = args <NEW_LINE> self._cancelled = False <NEW_LINE> self._when = when <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self._cancelled = True <NEW_LINE> <DEDENT> def _run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._callback(*self._args) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> res = 'TimerHandle({}, {}, {})'.format(self._when, self._callback, self._args) <NEW_LINE> if self._cancelled: <NEW_LINE> <INDENT> res += '<cancelled>' <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self._when) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self._when < other._when <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> if self._when < other._when: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.__eq__(other) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self._when > other._when <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> if self._when > other._when: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.__eq__(other) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, TimerHandle): <NEW_LINE> <INDENT> return (self._when == other._when and self._callback == other._callback and self._args == other._args and self._cancelled == other._cancelled) <NEW_LINE> <DEDENT> return NotImplemented <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> equal = self.__eq__(other) <NEW_LINE> return NotImplemented if equal is NotImplemented else not equal
Took from toolip for heapq to work
62598fb9be7bc26dc9251f0b
class dRdX(KonaMatrix): <NEW_LINE> <INDENT> def product(self, in_vec, out_vec): <NEW_LINE> <INDENT> self._check_linearization() <NEW_LINE> if not self._transposed: <NEW_LINE> <INDENT> self._solver.multiply_dRdX( self._primal._data, self._state._data, in_vec._data, out_vec._data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._solver.multiply_dRdX_T( self._primal._data, self._state._data, in_vec._data, out_vec._data)
Partial jacobian of the system residual with respect to primal variables.
62598fb91b99ca400228f5e0
class DropoutLayer(object): <NEW_LINE> <INDENT> def __init__(self, input, rng=RandomState(1234), p=0.5): <NEW_LINE> <INDENT> if T.lt(p,1e-5): <NEW_LINE> <INDENT> self.output = input <NEW_LINE> return <NEW_LINE> <DEDENT> srng = RandomStreams(rng.randint(999999)) <NEW_LINE> mask = srng.binomial(n=1, p=1-p, size=input.shape, dtype=floatX) <NEW_LINE> self.output = input * mask
Dropout layer: https://github.com/mdenil/dropout
62598fb9167d2b6e312b70d4
class slj(wallpotential): <NEW_LINE> <INDENT> def __init__(self, walls, r_cut=False, d_max=None, name=""): <NEW_LINE> <INDENT> hoomd.util.print_status_line(); <NEW_LINE> wallpotential.__init__(self, walls, r_cut, name); <NEW_LINE> if d_max is None : <NEW_LINE> <INDENT> sysdef = hoomd.context.current.system_definition; <NEW_LINE> d_max = sysdef.getParticleData().getMaxDiameter() <NEW_LINE> hoomd.context.msg.notice(2, "Notice: slj set d_max=" + str(d_max) + "\n"); <NEW_LINE> <DEDENT> if not hoomd.context.exec_conf.isCUDAEnabled(): <NEW_LINE> <INDENT> self.cpp_force = _md.WallsPotentialSLJ(hoomd.context.current.system_definition, self.name); <NEW_LINE> self.cpp_class = _md.WallsPotentialSLJ; <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cpp_force = _md.WallsPotentialSLJGPU(hoomd.context.current.system_definition, self.name); <NEW_LINE> self.cpp_class = _md.WallsPotentialSLJGPU; <NEW_LINE> <DEDENT> hoomd.context.current.system.addCompute(self.cpp_force, self.force_name); <NEW_LINE> self.required_coeffs += ['epsilon', 'sigma', 'alpha']; <NEW_LINE> self.force_coeff.set_default_coeff('alpha', 1.0); <NEW_LINE> <DEDENT> def process_coeff(self, coeff): <NEW_LINE> <INDENT> epsilon = coeff['epsilon']; <NEW_LINE> sigma = coeff['sigma']; <NEW_LINE> alpha = coeff['alpha']; <NEW_LINE> lj1 = 4.0 * epsilon * math.pow(sigma, 12.0); <NEW_LINE> lj2 = alpha * 4.0 * epsilon * math.pow(sigma, 6.0); <NEW_LINE> return _md.make_wall_slj_params(_hoomd.make_scalar2(lj1, lj2), coeff['r_cut']*coeff['r_cut'], coeff['r_extrap']);
Shifted Lennard-Jones wall potential Args: walls (:py:class:`group`): Wall group containing half-space geometries for the force to act in. r_cut (float): The global r_cut value for the force. Defaults to False or 0 if not specified. name (str): The force name which will be used in the metadata and log files. Wall force evaluated using the Shifted Lennard-Jones potential. Note that because slj is dependent upon particle diameters the following correction is necessary to the force details in the :py:class:`hoomd.md.pair.slj` description. :math:`\Delta = d_i/2 - 1` where :math:`d_i` is the diameter of particle :math:`i`. See :py:class:`hoomd.md.pair.slj` for force details and base parameters and :py:class:`wallpotential` for generalized wall potential implementation Example:: walls=wall.group() # add walls to interact with wall_force_slj=wall.slj(walls, r_cut=3.0) wall_force_slj.force_coeff.set('A', epsilon=1.0, sigma=1.0) wall_force_slj.force_coeff.set('A', epsilon=2.0, sigma=1.0, r_cut=3.0) wall_force_slj.force_coeff.set('B', epsilon=1.0, sigma=1.0, r_cut=2**(1.0/6.0))
62598fb910dbd63aa1c70d18
class RoofFinish(Finish): <NEW_LINE> <INDENT> pass
The final material applied to a surface, either interior or exterior. Some structural components don't have an exterior finish, such as unfinished poured concrete.
62598fb9956e5f7376df572d
class ThrottlingInformation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'duration': {'key': 'duration', 'type': 'duration'}, } <NEW_LINE> def __init__( self, *, duration: Optional[datetime.timedelta] = None, **kwargs ): <NEW_LINE> <INDENT> super(ThrottlingInformation, self).__init__(**kwargs) <NEW_LINE> self.duration = duration
Optional throttling information for the alert rule. :param duration: The required duration (in ISO8601 format) to wait before notifying on the alert rule again. The time granularity must be in minutes and minimum value is 0 minutes. :type duration: ~datetime.timedelta
62598fb9fff4ab517ebcd943
class RegistroY001(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'Y001'), Campo(2, 'IND_DAD'), ]
Abertura do Bloco Y – Informações Gerais
62598fb960cbc95b0636449e
class credentials: <NEW_LINE> <INDENT> credential_list = [] <NEW_LINE> def __init__(self,account_name,user_name,password): <NEW_LINE> <INDENT> self.account_name = account_name <NEW_LINE> self.user_name = user_name <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> def save_account(self): <NEW_LINE> <INDENT> credentials.credential_list.append(self) <NEW_LINE> <DEDENT> def delete_account(self): <NEW_LINE> <INDENT> credentials.credential_list.remove(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_account(cls,account_name): <NEW_LINE> <INDENT> for account in cls.credential_list: <NEW_LINE> <INDENT> if account.account_name == account_name: <NEW_LINE> <INDENT> return account <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def account_exists(cls, account_name): <NEW_LINE> <INDENT> for account in cls.credential_list: <NEW_LINE> <INDENT> if account.account_name == account_name: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def display_accounts(cls): <NEW_LINE> <INDENT> return cls.credential_list
class that generates new instances of credentials
62598fb9a8370b77170f053e
class Server(Base): <NEW_LINE> <INDENT> def execute(self): <NEW_LINE> <INDENT> if self.args['cpu']: <NEW_LINE> <INDENT> cpu_info = utils.get_cpu_info() <NEW_LINE> for i in cpu_info: <NEW_LINE> <INDENT> utils.log_rep(i+" : "+str(cpu_info[i])) <NEW_LINE> <DEDENT> exit() <NEW_LINE> <DEDENT> if self.args['memory']: <NEW_LINE> <INDENT> cpu_info = utils.get_memory_info() <NEW_LINE> for i in cpu_info: <NEW_LINE> <INDENT> utils.log_rep("#### "+i+" #####") <NEW_LINE> for a in cpu_info[i]: <NEW_LINE> <INDENT> utils.log_rep(a+" : "+str(cpu_info[i][a])) <NEW_LINE> <DEDENT> <DEDENT> exit() <NEW_LINE> <DEDENT> if self.args['disk']: <NEW_LINE> <INDENT> cpu_info = utils.get_disk_info() <NEW_LINE> for i in cpu_info: <NEW_LINE> <INDENT> utils.log_rep(i+" : "+str(cpu_info[i])) <NEW_LINE> <DEDENT> exit()
usage: server cpu server memory server disk Run nvc server [command] [option] Options: -h --help Print usage
62598fb94f6381625f199572
class GA_segmented(object): <NEW_LINE> <INDENT> def __init__(self, service, profile_id): <NEW_LINE> <INDENT> self.service = service <NEW_LINE> self.profile_id = profile_id <NEW_LINE> <DEDENT> def get_results(self, service, profile_id, start_day, end_day, metric, segmentation): <NEW_LINE> <INDENT> return service.data().ga().get(ids='ga:' + profile_id, start_date=start_day, end_date=end_day, metrics='ga:' + metric, segment=segmentation).execute()
This class can be looped over to query results into a column. Need to define service and profile for the class, and service, profile, start_day, end_day, ga:{metric}, and segmentation.
62598fb9cc40096d6161a289
class TestBaseModelpep8(unittest.TestCase): <NEW_LINE> <INDENT> def test_pep8(self): <NEW_LINE> <INDENT> style = pep8.StyleGuide(quiet=True) <NEW_LINE> state_pep8 = "models/state.py" <NEW_LINE> test_state_pep8 = "tests/test_models/test_state.py" <NEW_LINE> result = style.check_files([state_pep8, test_state_pep8]) <NEW_LINE> self.assertEqual(result.total_errors, 0)
Validate pep8
62598fb957b8e32f525081cd
class SensorResponseMode(GetSetSpotValue, EnumSpotValue): <NEW_LINE> <INDENT> _nParam = SpotCamConstant.SENSORRESPONSEMODE <NEW_LINE> _ctype = ctypes.c_ulong <NEW_LINE> _enum_class = SpotCamEnum.SensorRespMode
The sensor response mode.
62598fb9099cdd3c63675492
class RadarDataLayer(caffe.Layer): <NEW_LINE> <INDENT> def setup(self, bottom, top): <NEW_LINE> <INDENT> params = eval(self.param_str) <NEW_LINE> self.radarFiles = params['radar_files'] <NEW_LINE> self.videoIds = params['videos'] <NEW_LINE> self.videos = NSVideo.objects.filter(_id__in=self.videoIds) <NEW_LINE> self.batchSize = int(params['batch_size']) <NEW_LINE> self.radExt = RadarExtractor(self.radarFiles, featureType='position') <NEW_LINE> self.radarFeat = [] <NEW_LINE> self.idx = 0 <NEW_LINE> self.spf = 1/(self.videos[0].fps) <NEW_LINE> for v in self.videos: <NEW_LINE> <INDENT> frameCount = v.frame_count <NEW_LINE> start = v.start <NEW_LINE> if start.minute < 17: <NEW_LINE> <INDENT> start = datetime.datetime(start.year, start.month, start.day, start.hour, 17, 0) <NEW_LINE> <DEDENT> for x in range(frameCount): <NEW_LINE> <INDENT> feat = self.radExt.extract_features(start + timedelta(seconds = self.spf*x)) <NEW_LINE> self.radarFeat.append(feat.flatten()) <NEW_LINE> <DEDENT> <DEDENT> self.radarFeat = np.array(self.radarFeat) <NEW_LINE> self.numFeatures = len(self.radarFeat) <NEW_LINE> <DEDENT> def reshape(self, bottom, top): <NEW_LINE> <INDENT> self.data = self.load_radar_batch() <NEW_LINE> top[0].reshape(*self.data.shape) <NEW_LINE> <DEDENT> def forward(self, bottom, top): <NEW_LINE> <INDENT> top[0].data[...] = self.data <NEW_LINE> self.idx += self.batchSize <NEW_LINE> if self.idx > self.numFeatures: <NEW_LINE> <INDENT> self.idx = self.idx % self.numFeatures <NEW_LINE> <DEDENT> <DEDENT> def backward(self, top, propogate_down, bottom): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def load_radar_batch(self): <NEW_LINE> <INDENT> indices = [(self.idx + i) % self.numFeatures for i in range(self.batchSize)] <NEW_LINE> data_batch = self.radarFeat[indices].astype(np.float32) <NEW_LINE> return data_batch
Loading image, label from the medical data files
62598fb966656f66f7d5a552
class transformer_t(object): <NEW_LINE> <INDENT> USE_1_BASED_INDEXING = False <NEW_LINE> def __init__(self, function): <NEW_LINE> <INDENT> object.__init__( self ) <NEW_LINE> self.__function = function <NEW_LINE> <DEDENT> @property <NEW_LINE> def function( self ): <NEW_LINE> <INDENT> return self.__function <NEW_LINE> <DEDENT> def required_headers( self ): <NEW_LINE> <INDENT> raise NotImplementedError( self.__class__.__name__ ) <NEW_LINE> <DEDENT> def get_argument( self, reference ): <NEW_LINE> <INDENT> if isinstance( reference, str ): <NEW_LINE> <INDENT> found = [arg for arg in self.function.arguments if arg.name == reference] <NEW_LINE> if len( found ) == 1: <NEW_LINE> <INDENT> return found[0] <NEW_LINE> <DEDENT> raise RuntimeError( "Argument with name \"%s\" was not found" % reference ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance( reference, int ) <NEW_LINE> if transformer_t.USE_1_BASED_INDEXING: <NEW_LINE> <INDENT> reference += 1 <NEW_LINE> <DEDENT> return self.function.arguments[ reference ] <NEW_LINE> <DEDENT> <DEDENT> def get_type( self, reference ): <NEW_LINE> <INDENT> global return_ <NEW_LINE> if isinstance( reference, int ) and reference == return_: <NEW_LINE> <INDENT> return self.function.return_type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get_argument( reference ).type <NEW_LINE> <DEDENT> <DEDENT> def configure_mem_fun( self, controller ): <NEW_LINE> <INDENT> raise NotImplementedError(self.__class__.__name__) <NEW_LINE> <DEDENT> def configure_free_fun( self, controller ): <NEW_LINE> <INDENT> raise NotImplementedError(self.__class__.__name__) <NEW_LINE> <DEDENT> def configure_virtual_mem_fun( self, controller ): <NEW_LINE> <INDENT> raise NotImplementedError(self.__class__.__name__)
Base class for a function transformer.
62598fb98a349b6b4368639c
class DebManager(AbstractPkgManager): <NEW_LINE> <INDENT> CMDPREFIX_DETECT = 'dpkg -l' <NEW_LINE> CMDPREFIX_UPDATE = 'apt-get update' <NEW_LINE> CMDPREFIX_INSTALL = 'apt-get -y --force-yes install' <NEW_LINE> CMDPREFIX_REMOVE = 'apt-get -y --force-yes --purge remove' <NEW_LINE> CMDPREFIX_ADDREPO = '' <NEW_LINE> SOURCELISTS_DIR = '/etc/apt/sources.list.d' <NEW_LINE> SOURCELISTS_CFG = '/etc/apt/sources.list' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.update_sources = self.update_sources_by_file <NEW_LINE> self.requires_keymgr = True <NEW_LINE> self.keymgr = DebKeyManager()
Deb Package System Manager(Debian, Ubuntu, LinuxMint)
62598fb9236d856c2adc94f0
class AlexDANN(chainer.Chain): <NEW_LINE> <INDENT> iter_count = 0 <NEW_LINE> def __init__(self, class_num=31): <NEW_LINE> <INDENT> super(AlexDANN, self).__init__() <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.conv1 = L.Convolution2D(3, 96, 11, stride=4) <NEW_LINE> self.conv2 = L.Convolution2D(96, 256, 5, pad=2) <NEW_LINE> self.conv3 = L.Convolution2D(256, 384, 3, pad=1) <NEW_LINE> self.conv4 = L.Convolution2D(384, 384, 3, pad=1) <NEW_LINE> self.conv5 = L.Convolution2D(384, 256, 3, pad=1) <NEW_LINE> self.lp6 = L.Linear(256 * 6 * 6, 4096) <NEW_LINE> self.lp7 = L.Linear(4096, 4096) <NEW_LINE> self.lp8 = L.Linear(4096, class_num) <NEW_LINE> self.dc6 = L.Linear(256 * 6 * 6, 1024) <NEW_LINE> self.dc7 = L.Linear(1024, 1024) <NEW_LINE> self.dc8 = L.Linear(1024, 1) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> with chainer.using_config('train', False), chainer.no_backprop_mode(): <NEW_LINE> <INDENT> return self.forward_lp(self.forward_fe(x)) <NEW_LINE> <DEDENT> <DEDENT> def forward_fe(self, x): <NEW_LINE> <INDENT> h1 = F.local_response_normalization(F.relu(self.conv1(x))) <NEW_LINE> h1 = F.max_pooling_2d(h1, 3, stride=2) <NEW_LINE> h2 = F.local_response_normalization(F.relu(self.conv2(h1))) <NEW_LINE> h2 = F.max_pooling_2d(h2, 3, stride=2) <NEW_LINE> h3 = F.relu(self.conv3(h2)) <NEW_LINE> h4 = F.relu(self.conv4(h3)) <NEW_LINE> h5 = F.max_pooling_2d(F.relu(self.conv5(h4)), 3, stride=2) <NEW_LINE> return h5 <NEW_LINE> <DEDENT> def forward_lp(self, h): <NEW_LINE> <INDENT> h6 = F.dropout(F.relu(self.lp6(h))) <NEW_LINE> h7 = F.dropout(F.relu(self.lp7(h6))) <NEW_LINE> h8 = self.lp8(h7) <NEW_LINE> return h8 <NEW_LINE> <DEDENT> def forward_dc(self, h): <NEW_LINE> <INDENT> self.iter_count += 1 <NEW_LINE> h5 = grl.flip_grad(h, self.iter_count) <NEW_LINE> h6 = F.dropout(F.relu(self.dc6(h5))) <NEW_LINE> h7 = F.dropout(F.relu(self.dc7(h6))) <NEW_LINE> h8 = self.dc8(h7) <NEW_LINE> return h8 <NEW_LINE> <DEDENT> def forward_training(self, src, tgt): <NEW_LINE> <INDENT> x = F.concat([src, tgt], axis=0) <NEW_LINE> h = self.forward_fe(x) <NEW_LINE> h_src, h_tgt = F.split_axis(x=h, indices_or_sections=2, axis=0) <NEW_LINE> h_lp = self.forward_lp(h_src) <NEW_LINE> h_dc = self.forward_dc(h) <NEW_LINE> return h_lp, h_dc
input_size:(batch, 3ch, 227, 227)
62598fb997e22403b383b066
class MultiProcessingQueue(queue.Queue): <NEW_LINE> <INDENT> def __init__(self, maximum_number_of_queued_items=0): <NEW_LINE> <INDENT> super(MultiProcessingQueue, self).__init__() <NEW_LINE> queue_max_length = multiprocessing._multiprocessing.SemLock.SEM_VALUE_MAX <NEW_LINE> if maximum_number_of_queued_items > queue_max_length: <NEW_LINE> <INDENT> logging.warn( u'Maximum queue size requested ({0:d}) is larger than system ' u'supported maximum size. Setting queue size to maximum supported ' u'size, ' u'({1:d})'.format(maximum_number_of_queued_items, queue_max_length)) <NEW_LINE> maximum_number_of_queued_items = queue_max_length <NEW_LINE> <DEDENT> self._queue = multiprocessing.Queue( maxsize=maximum_number_of_queued_items) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> size = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> size = self._queue.qsize() <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> logging.warning(( u'Returning queue length does not work on Mac OS X because of broken ' u'sem_getvalue()')) <NEW_LINE> raise <NEW_LINE> <DEDENT> return size <NEW_LINE> <DEDENT> def IsEmpty(self): <NEW_LINE> <INDENT> return self._queue.empty() <NEW_LINE> <DEDENT> def PushItem(self, item): <NEW_LINE> <INDENT> self._queue.put(item) <NEW_LINE> <DEDENT> def PopItem(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._queue.get() <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise errors.QueueEmpty
Class that defines the multi-processing queue.
62598fb9f548e778e596b706
class ElectricHeater(annuity.Annuity): <NEW_LINE> <INDENT> def __init__(self, model, th_capacity): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.th_capacity = th_capacity <NEW_LINE> self.heat_hourly = [0]*8760 <NEW_LINE> self.heat_yearly = 0 <NEW_LINE> self.imported_electricity = 0 <NEW_LINE> self.annuity = 0 <NEW_LINE> self.emissions = 0 <NEW_LINE> super(ElectricHeater, self).__init__(deperiod=database.annuity_factors['ElHe'][0], effop=database.annuity_factors['ElHe'][1], fwins=database.annuity_factors['ElHe'][2], finst=database.annuity_factors['ElHe'][3], obperiod=database.annuity_factors['Common'][0], q=database.annuity_factors['ElHe'][4], r=database.annuity_factors['ElHe'][5], gas_price=database.annuity_factors['Common'][1], electricity_price=database.annuity_factors['Common'][2]) <NEW_LINE> <DEDENT> def get_heat(self, required_heat, hour): <NEW_LINE> <INDENT> if required_heat <= self.th_capacity: <NEW_LINE> <INDENT> self.heat_yearly += required_heat <NEW_LINE> self.heat_hourly[hour] = required_heat <NEW_LINE> required_heat = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.heat_yearly += self.th_capacity <NEW_LINE> self.heat_hourly[hour] = self.th_capacity <NEW_LINE> required_heat -= self.th_capacity <NEW_LINE> <DEDENT> return required_heat <NEW_LINE> <DEDENT> def set_emissions(self): <NEW_LINE> <INDENT> self.emissions = 595*self.heat_yearly/1000 <NEW_LINE> return <NEW_LINE> <DEDENT> def set_annuity(self): <NEW_LINE> <INDENT> self.A0 = 53.938*(self.th_capacity*1000)**0.2685 <NEW_LINE> self.set_ank() <NEW_LINE> drc = self.heat_yearly*self.electricity_price <NEW_LINE> self.Anv = drc*self.a*self.bv <NEW_LINE> orc = 30*self.effop <NEW_LINE> ain = self.A0*(self.finst+self.fwins)/100 <NEW_LINE> self.Anb = orc*self.a*self.bb + ain*self.a*self.bi <NEW_LINE> self.Ans = 0 <NEW_LINE> self.Ane = 0 <NEW_LINE> self.annuity = self.Ane - (self.Ank + self.Anv + self.Anb + self.Ans) <NEW_LINE> return
Class representing Electric Heaters. Efficiency is approximated to 100%. Attributes: model: Model of the Electric Heater. th_capacity: Thermal capacity of the Electric Heater [kW]. heat_yearly: Sum value of heat provided by the electric heater unit over the year [kWh]. heat_hourly: Hourly values of the heat provided by the Electric Heater unit [kWh]. annuity: Annuity of the Electric Heater [Euros]. emissions: CO2 emissions of the electric heater unit [kg of CO2]. Extends: Annuity class
62598fb93317a56b869be5fe
class Gear(object): <NEW_LINE> <INDENT> def __init__(self, pitch_dia, num_teeth): <NEW_LINE> <INDENT> dia_dim = dict(ureg.get_dimensionality(pitch_dia)) <NEW_LINE> if '[length]' not in dia_dim: <NEW_LINE> <INDENT> raise AttributeError("Arg 'pitch_dia' must have [length] dimensionality") <NEW_LINE> <DEDENT> self._pitch_dia = pitch_dia <NEW_LINE> self._num_teeth = num_teeth <NEW_LINE> <DEDENT> @property <NEW_LINE> def pitch_dia(self): <NEW_LINE> <INDENT> return self._pitch_dia <NEW_LINE> <DEDENT> @pitch_dia.setter <NEW_LINE> def pitch_dia(self, value): <NEW_LINE> <INDENT> self._pitch_dia = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_teeth(self): <NEW_LINE> <INDENT> return self._num_teeth <NEW_LINE> <DEDENT> @num_teeth.setter <NEW_LINE> def num_teeth(self, value): <NEW_LINE> <INDENT> self._num_teeth = value
Base class for gears Properties ---------- pitch_dia : Quantity [length] Pitch diameter num_teeth : int Number of teeth
62598fb956b00c62f0fb2a1b
class UpdateCacheMiddleware(object): <NEW_LINE> <INDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if not hasattr(request, '_cache_update_cache') or not request._cache_update_cache: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if request.method != 'GET': <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if not response.status_code == 200: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> timeout = get_max_age(response) <NEW_LINE> if timeout == None: <NEW_LINE> <INDENT> timeout = self.cache_timeout <NEW_LINE> <DEDENT> elif timeout == 0: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if self.patch_headers: <NEW_LINE> <INDENT> patch_response_headers(response, timeout) <NEW_LINE> <DEDENT> if timeout: <NEW_LINE> <INDENT> if callable(self.key_prefix): <NEW_LINE> <INDENT> key_prefix = self.key_prefix(request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key_prefix = self.key_prefix <NEW_LINE> <DEDENT> if self.post_process_response: <NEW_LINE> <INDENT> response = self.post_process_response(response, request=request) <NEW_LINE> <DEDENT> cache_key = learn_cache_key(request, response, timeout, key_prefix) <NEW_LINE> cache.set(cache_key, response, timeout) <NEW_LINE> <DEDENT> return response
Response-phase cache middleware that updates the cache if the response is cacheable. Must be used as part of the two-part update/fetch cache middleware. UpdateCacheMiddleware must be the first piece of middleware in MIDDLEWARE_CLASSES so that it'll get called last during the response phase.
62598fb93617ad0b5ee062a7
class LazyCachedDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, dataset, cache_dir=None, device="cpu"): <NEW_LINE> <INDENT> import tempfile <NEW_LINE> self.dataset = dataset <NEW_LINE> self.device = device <NEW_LINE> self._tmpdir = tempfile.TemporaryDirectory( dir=cache_dir, prefix="lagomorph.LazyCachedDataset." ) <NEW_LINE> self.tmpdir = self._tmpdir.name <NEW_LINE> <DEDENT> def filename(self, j): <NEW_LINE> <INDENT> return os.path.join(self.tmpdir, f"{j}.pth") <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.dataset) <NEW_LINE> <DEDENT> def __getitem__(self, j): <NEW_LINE> <INDENT> fn = self.filename(j) <NEW_LINE> if os.path.isfile(fn): <NEW_LINE> <INDENT> return torch.load(fn, map_location=self.device) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> di = self.dataset[j] <NEW_LINE> torch.save(di, fn) <NEW_LINE> return di
Cache data to a tempdir as samples are requested
62598fb910dbd63aa1c70d1a
class AuthRouter(object): <NEW_LINE> <INDENT> def db_for_read(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'auth': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> if model._meta.app_label == 'sessions': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> if model._meta.app_label == 'web_core': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def db_for_write(self, model, **hints): <NEW_LINE> <INDENT> if model._meta.app_label == 'auth': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> if model._meta.app_label == 'sessions': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> if model._meta.app_label == 'web_core': <NEW_LINE> <INDENT> return 'auth_db' <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_relation(self, obj1, obj2, **hints): <NEW_LINE> <INDENT> if obj1._meta.app_label == 'auth' or obj2._meta.app_label == 'auth': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if obj1._meta.app_label == 'sessions' or obj2._meta.app_label == 'sessions': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if obj1._meta.app_label == 'web_core' or obj2._meta.app_label == 'web_core': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def allow_syncdb(self, db, model): <NEW_LINE> <INDENT> if db == 'auth_db': <NEW_LINE> <INDENT> return model._meta.app_label == 'auth' <NEW_LINE> <DEDENT> elif model._meta.app_label == 'auth': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return None
Foi criado um router para definir para onde vai a autenticacao no inicio do projeto.
62598fb966673b3332c30531
@skipIf(NO_MOCK, NO_MOCK_REASON) <NEW_LINE> @skipIf(not HAS_PYVMOMI, 'The \'pyvmomi\' library is missing') <NEW_LINE> @patch('salt.utils.vmware.get_datacenters', MagicMock(return_value=[MagicMock()])) <NEW_LINE> class GetDatacenterTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.mock_si = MagicMock() <NEW_LINE> self.mock_dc = MagicMock() <NEW_LINE> <DEDENT> def test_get_datacenters_call(self): <NEW_LINE> <INDENT> mock_get_datacenters = MagicMock(return_value=[MagicMock()]) <NEW_LINE> with patch('salt.utils.vmware.get_datacenters', mock_get_datacenters): <NEW_LINE> <INDENT> vmware.get_datacenter(self.mock_si, 'fake_dc1') <NEW_LINE> <DEDENT> mock_get_datacenters.assert_called_once_with( self.mock_si, datacenter_names=['fake_dc1']) <NEW_LINE> <DEDENT> def test_no_datacenters_returned(self): <NEW_LINE> <INDENT> with patch('salt.utils.vmware.get_datacenters', MagicMock(return_value=[])): <NEW_LINE> <INDENT> with self.assertRaises(VMwareObjectRetrievalError) as excinfo: <NEW_LINE> <INDENT> vmware.get_datacenter(self.mock_si, 'fake_dc1') <NEW_LINE> <DEDENT> <DEDENT> self.assertEqual('Datacenter \'fake_dc1\' was not found', excinfo.exception.strerror) <NEW_LINE> <DEDENT> def test_get_datacenter_return(self): <NEW_LINE> <INDENT> with patch('salt.utils.vmware.get_datacenters', MagicMock(return_value=[self.mock_dc])): <NEW_LINE> <INDENT> res = vmware.get_datacenter(self.mock_si, 'fake_dc1') <NEW_LINE> <DEDENT> self.assertEqual(res, self.mock_dc)
Tests for salt.utils.vmware.get_datacenter
62598fb97cff6e4e811b5b82
class DataSource(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.callbacks = [] <NEW_LINE> self._logger = logging.getLogger(self.name) <NEW_LINE> <DEDENT> def on_change(self, callback): <NEW_LINE> <INDENT> self.callbacks.append(callback) <NEW_LINE> <DEDENT> def changed(self): <NEW_LINE> <INDENT> for callback in self.callbacks: <NEW_LINE> <INDENT> callback() <NEW_LINE> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def export(self): <NEW_LINE> <INDENT> return { "name": self.name, "timeout": 5*60, } <NEW_LINE> <DEDENT> def desc(self): <NEW_LINE> <INDENT> return { "type": self.__class__.__name__, "name": self.name, }
Abstract data source model
62598fb94f88993c371f05be
class TargetMisfit(InversionDirective): <NEW_LINE> <INDENT> chifact = 1. <NEW_LINE> phi_d_star = None <NEW_LINE> @property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> if getattr(self, '_target', None) is None: <NEW_LINE> <INDENT> if self.phi_d_star is None: <NEW_LINE> <INDENT> nD = 0 <NEW_LINE> for survey in self.survey: <NEW_LINE> <INDENT> nD += survey.nD <NEW_LINE> <DEDENT> self.phi_d_star = 0.5 * nD <NEW_LINE> <DEDENT> self._target = self.chifact * self.phi_d_star <NEW_LINE> <DEDENT> return self._target <NEW_LINE> <DEDENT> @target.setter <NEW_LINE> def target(self, val): <NEW_LINE> <INDENT> self._target = val <NEW_LINE> <DEDENT> def endIter(self): <NEW_LINE> <INDENT> if self.invProb.phi_d < self.target: <NEW_LINE> <INDENT> self.opt.stopNextIteration = True
... note:: Currently the target misfit is not set up for joint inversions. Get `in touch <https://github.com/simpeg/simpeg/issues/new>`_ if you would like to help with the upgrade!
62598fb9ad47b63b2c5a79b5
class ra_rb_of_re(Inst54bit): <NEW_LINE> <INDENT> add_commas = False <NEW_LINE> def get_operands(self): <NEW_LINE> <INDENT> reg1 = self.get_bits(7,11) <NEW_LINE> reg2 = self.get_bits(12,16) <NEW_LINE> reg_count = self.get_bits(17,21) + 1 <NEW_LINE> adjust = self.get_bits(22,23) <NEW_LINE> offset = self.get_bits(25,50) <NEW_LINE> if self.get_bits(24, 24): <NEW_LINE> <INDENT> offset = -((1 << 26) - offset) <NEW_LINE> <DEDENT> return reg1, reg2, reg_count, adjust, offset <NEW_LINE> <DEDENT> def get_operand_tokens(self): <NEW_LINE> <INDENT> operands = self.get_operands() <NEW_LINE> tokens = [] <NEW_LINE> tokens.append(make_operand_token(REGISTER_MODE, REGISTER_NAMES[operands[0]], None)) <NEW_LINE> tokens.append(InstructionTextToken(InstructionTextTokenType.OperandSeparatorToken, ",")) <NEW_LINE> tokens.append(InstructionTextToken(InstructionTextTokenType.BeginMemoryOperandToken, "[")) <NEW_LINE> tokens.append(make_operand_token(REGISTER_MODE, REGISTER_NAMES[operands[1]], None)) <NEW_LINE> if operands[4] != 0: <NEW_LINE> <INDENT> if operands[4] < 0: <NEW_LINE> <INDENT> tokens.append(InstructionTextToken(InstructionTextTokenType.TextToken, '-')) <NEW_LINE> tokens.append(make_operand_token(IMMEDIATE_MODE, None, -operands[4])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tokens.append(InstructionTextToken(InstructionTextTokenType.TextToken, '+')) <NEW_LINE> tokens.append(make_operand_token(IMMEDIATE_MODE, None, operands[4])) <NEW_LINE> <DEDENT> <DEDENT> if operands[2] != 1: <NEW_LINE> <INDENT> tokens.append(InstructionTextToken(InstructionTextTokenType.OperandSeparatorToken, ",")) <NEW_LINE> tokens.append(make_operand_token(IMMEDIATE_MODE, None, operands[2])) <NEW_LINE> <DEDENT> tokens.append(InstructionTextToken(InstructionTextTokenType.EndMemoryOperandToken, "]")) <NEW_LINE> return tokens <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> operands = self.get_operands() <NEW_LINE> suffix = "" <NEW_LINE> if operands[3] == 1: <NEW_LINE> <INDENT> suffix = "i" <NEW_LINE> <DEDENT> elif operands[3] == 2: <NEW_LINE> <INDENT> suffix = "d" <NEW_LINE> <DEDENT> return self.name.lower() + suffix <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decode(cls, name, values, addr, instruction): <NEW_LINE> <INDENT> if (get_bits(instruction, 54, 0, 6) == int(values[0], 2) and get_bits(instruction, 54, 51, 53) == int(values[1], 2)): <NEW_LINE> <INDENT> return cls(addr, instruction, name) <NEW_LINE> <DEDENT> return None
LDS LDT LDW STS STT STW
62598fb9f548e778e596b707