code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class BaseWebTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = webtest.TestApp("config:conf/tests.ini", relative_to='.') <NEW_LINE> self.app.RequestClass = PrefixedRequestClass <NEW_LINE> self.db = self.app.app.registry.backend <NEW_LINE> self.indexer = self.app.app.registry.index <NEW_LINE> token, self.credentials = get_hawk_credentials() <NEW_LINE> self.db.store_credentials(token, self.credentials) <NEW_LINE> auth_password = base64.b64encode( (u'%s:%s' % (self.credentials['id'], self.credentials['key'])).encode('ascii')) .strip().decode('ascii') <NEW_LINE> self.headers = { 'Content-Type': 'application/json', 'Authorization': 'Basic {0}'.format(auth_password), } <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.db.delete_db() <NEW_LINE> self.indexer.delete_indices() | Base Web Test to test your cornice service.
It setups the database before each test and delete it after. | 62598faf009cb60464d01548 |
class RFile: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'meta', (RFileMetadata, RFileMetadata.thrift_spec), None, ), (2, TType.STRING, 'content', None, None, ), ) <NEW_LINE> def __init__(self, meta=None, content=None,): <NEW_LINE> <INDENT> self.meta = meta <NEW_LINE> self.content = content <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.meta = RFileMetadata() <NEW_LINE> self.meta.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.content = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('RFile') <NEW_LINE> if self.meta is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('meta', TType.STRUCT, 1) <NEW_LINE> self.meta.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.content is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('content', TType.STRING, 2) <NEW_LINE> oprot.writeString(self.content) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- meta
- content | 62598faf097d151d1a2c1052 |
class array_element: <NEW_LINE> <INDENT> def __init__(self, arr, popped): <NEW_LINE> <INDENT> self.arr = arr <NEW_LINE> self.popped = popped <NEW_LINE> self.left = None <NEW_LINE> self.mid = None <NEW_LINE> self.right = None <NEW_LINE> self.consume() <NEW_LINE> <DEDENT> def consume(self): <NEW_LINE> <INDENT> temp = val(self.arr, self.popped) <NEW_LINE> if len(self.arr) == 0: <NEW_LINE> <INDENT> print("Never found a closing bracket ']' for an array") <NEW_LINE> print_error(self.arr, self.popped) <NEW_LINE> exit(1) <NEW_LINE> <DEDENT> if self.arr[0] == ',': <NEW_LINE> <INDENT> self.left = temp <NEW_LINE> pop = self.arr.pop(0) <NEW_LINE> self.popped.append(pop) <NEW_LINE> self.mid = pop <NEW_LINE> self.right = array_element(self.arr, self.popped) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.mid = temp <NEW_LINE> <DEDENT> return | Arguments:
arr: This should be a pointer (mutable object) to the string that the parser is parsing.
popped: This should be a pointer (mutable object) to the string that the parser has parsed.
Attributes:
arr: A list of the characters that have been popped within the parser
popped: A list of the characters that have been successfully popped
left: If the array_element only contains a single val this will be empty
If the array_element contains multiple vals this will be a val object
mid: If the array_element only contains a single val this will be an val object
If the array_element contains multiple items this will be the character ','
right: If the array_element only contains a single val this will be empty
If the array_element contains multiple vals this will be an array_element object
Functions:
consume: This function consumes characters to define vals and array elements
Returns:
None | 62598fafd7e4931a7ef3c0bc |
class MAVLink_array_test_3_message(MAVLink_message): <NEW_LINE> <INDENT> id = MAVLINK_MSG_ID_ARRAY_TEST_3 <NEW_LINE> name = 'ARRAY_TEST_3' <NEW_LINE> fieldnames = ['v', 'ar_u32'] <NEW_LINE> ordered_fieldnames = [ 'ar_u32', 'v' ] <NEW_LINE> format = '<4IB' <NEW_LINE> native_format = bytearray('<IB', 'ascii') <NEW_LINE> orders = [1, 0] <NEW_LINE> lengths = [4, 1] <NEW_LINE> array_lengths = [4, 0] <NEW_LINE> crc_extra = 19 <NEW_LINE> def __init__(self, v, ar_u32): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLink_array_test_3_message.id, MAVLink_array_test_3_message.name) <NEW_LINE> self._fieldnames = MAVLink_array_test_3_message.fieldnames <NEW_LINE> self.v = v <NEW_LINE> self.ar_u32 = ar_u32 <NEW_LINE> <DEDENT> def pack(self, mav, force_mavlink1=False): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 19, struct.pack('<4IB', self.ar_u32[0], self.ar_u32[1], self.ar_u32[2], self.ar_u32[3], self.v), force_mavlink1=force_mavlink1) | Array test #3. | 62598faf498bea3a75a57b47 |
class Jsonifier: <NEW_LINE> <INDENT> def __init__(self, json_=json, **kwargs): <NEW_LINE> <INDENT> self.json = json_ <NEW_LINE> self.dumps_args = kwargs <NEW_LINE> <DEDENT> def dumps(self, data, **kwargs): <NEW_LINE> <INDENT> for k, v in self.dumps_args.items(): <NEW_LINE> <INDENT> kwargs.setdefault(k, v) <NEW_LINE> <DEDENT> return self.json.dumps(data, **kwargs) + '\n' <NEW_LINE> <DEDENT> def loads(self, data): <NEW_LINE> <INDENT> if isinstance(data, bytes): <NEW_LINE> <INDENT> data = data.decode() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.json.loads(data) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> if isinstance(data, str): <NEW_LINE> <INDENT> return data | Central point to serialize and deserialize to/from JSon in Connexion. | 62598faf5166f23b2e243401 |
class AttributeBasedElementClassLookup(FallbackElementClassLookup): <NEW_LINE> <INDENT> def __init__(self, attribute_name, class_mapping, fallback=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __pyx_vtable__ = None | AttributeBasedElementClassLookup(self, attribute_name, class_mapping, fallback=None)
Checks an attribute of an Element and looks up the value in a
class dictionary.
Arguments:
- attribute name - '{ns}name' style string
- class mapping - Python dict mapping attribute values to Element classes
- fallback - optional fallback lookup mechanism
A None key in the class mapping will be checked if the attribute is
missing. | 62598faf30bbd7224646998c |
class ContentViewFilterSearchTestCase(APITestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super(ContentViewFilterSearchTestCase, cls).setUpClass() <NEW_LINE> cls.content_view = entities.ContentView().create() <NEW_LINE> <DEDENT> @tier1 <NEW_LINE> @skip_if_bug_open('bugzilla', 1242534) <NEW_LINE> def test_positive_search_erratum(self): <NEW_LINE> <INDENT> cv_filter = entities.ErratumContentViewFilter( content_view=self.content_view ).create() <NEW_LINE> entities.ContentViewFilterRule(content_view_filter=cv_filter).search() <NEW_LINE> <DEDENT> @tier1 <NEW_LINE> def test_positive_search_package_group(self): <NEW_LINE> <INDENT> cv_filter = entities.PackageGroupContentViewFilter( content_view=self.content_view ).create() <NEW_LINE> entities.ContentViewFilterRule(content_view_filter=cv_filter).search() <NEW_LINE> <DEDENT> @tier1 <NEW_LINE> def test_positive_search_rpm(self): <NEW_LINE> <INDENT> cv_filter = entities.RPMContentViewFilter( content_view=self.content_view ).create() <NEW_LINE> entities.ContentViewFilterRule(content_view_filter=cv_filter).search() | Tests that search through content view filters. | 62598faf56ac1b37e6302213 |
class TestInlineResponse2002DatasetResources(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testInlineResponse2002DatasetResources(self): <NEW_LINE> <INDENT> pass | InlineResponse2002DatasetResources unit test stubs | 62598faf8e7ae83300ee90ca |
class MedicinalProductNameNamePart(backboneelement.BackboneElement): <NEW_LINE> <INDENT> resource_type = Field("MedicinalProductNameNamePart", const=True) <NEW_LINE> part: fhirtypes.String = Field( None, alias="part", title="A fragment of a product name", description=None, element_property=True, element_required=True, ) <NEW_LINE> part__ext: fhirtypes.FHIRPrimitiveExtensionType = Field( None, alias="_part", title="Extension field for ``part``." ) <NEW_LINE> type: fhirtypes.CodingType = Field( ..., alias="type", title="Idenifying type for this part of the name (e.g. strength part)", description=None, element_property=True, ) <NEW_LINE> @classmethod <NEW_LINE> def elements_sequence(cls): <NEW_LINE> <INDENT> return ["id", "extension", "modifierExtension", "part", "type"] <NEW_LINE> <DEDENT> @root_validator(pre=True, allow_reuse=True) <NEW_LINE> def validate_required_primitive_elements_3009( cls, values: typing.Dict[str, typing.Any] ) -> typing.Dict[str, typing.Any]: <NEW_LINE> <INDENT> required_fields = [("part", "part__ext")] <NEW_LINE> _missing = object() <NEW_LINE> def _fallback(): <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> errors: typing.List["ErrorWrapper"] = [] <NEW_LINE> for name, ext in required_fields: <NEW_LINE> <INDENT> field = cls.__fields__[name] <NEW_LINE> ext_field = cls.__fields__[ext] <NEW_LINE> value = values.get(field.alias, _missing) <NEW_LINE> if value not in (_missing, None): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> ext_value = values.get(ext_field.alias, _missing) <NEW_LINE> missing_ext = True <NEW_LINE> if ext_value not in (_missing, None): <NEW_LINE> <INDENT> if isinstance(ext_value, dict): <NEW_LINE> <INDENT> missing_ext = len(ext_value.get("extension", [])) == 0 <NEW_LINE> <DEDENT> elif ( getattr(ext_value.__class__, "get_resource_type", _fallback)() == "FHIRPrimitiveExtension" ): <NEW_LINE> <INDENT> if ext_value.extension and len(ext_value.extension) > 0: <NEW_LINE> <INDENT> missing_ext = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> validate_pass = True <NEW_LINE> for validator in ext_field.type_.__get_validators__(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ext_value = validator(v=ext_value) <NEW_LINE> <DEDENT> except ValidationError as exc: <NEW_LINE> <INDENT> errors.append(ErrorWrapper(exc, loc=ext_field.alias)) <NEW_LINE> validate_pass = False <NEW_LINE> <DEDENT> <DEDENT> if not validate_pass: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if ext_value.extension and len(ext_value.extension) > 0: <NEW_LINE> <INDENT> missing_ext = False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if missing_ext: <NEW_LINE> <INDENT> if value is _missing: <NEW_LINE> <INDENT> errors.append(ErrorWrapper(MissingError(), loc=field.alias)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> errors.append( ErrorWrapper(NoneIsNotAllowedError(), loc=field.alias) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(errors) > 0: <NEW_LINE> <INDENT> raise ValidationError(errors, cls) <NEW_LINE> <DEDENT> return values | Disclaimer: Any field name ends with ``__ext`` doesn't part of
Resource StructureDefinition, instead used to enable Extensibility feature
for FHIR Primitive Data Types.
Coding words or phrases of the name. | 62598faf99fddb7c1ca62dfd |
class OneDataAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> def has_add_permission(self, request): <NEW_LINE> <INDENT> return False if self.model.objects.count() > 0 else True | DBに1つだけデータを格納したいモデルは、これを使う. | 62598faf2ae34c7f260ab109 |
class ListEnvironmentsAsyncPager: <NEW_LINE> <INDENT> def __init__( self, method: Callable[..., Awaitable[environment.ListEnvironmentsResponse]], request: environment.ListEnvironmentsRequest, response: environment.ListEnvironmentsResponse, *, metadata: Sequence[Tuple[str, str]] = () ): <NEW_LINE> <INDENT> self._method = method <NEW_LINE> self._request = environment.ListEnvironmentsRequest(request) <NEW_LINE> self._response = response <NEW_LINE> self._metadata = metadata <NEW_LINE> <DEDENT> def __getattr__(self, name: str) -> Any: <NEW_LINE> <INDENT> return getattr(self._response, name) <NEW_LINE> <DEDENT> @property <NEW_LINE> async def pages(self) -> AsyncIterator[environment.ListEnvironmentsResponse]: <NEW_LINE> <INDENT> yield self._response <NEW_LINE> while self._response.next_page_token: <NEW_LINE> <INDENT> self._request.page_token = self._response.next_page_token <NEW_LINE> self._response = await self._method(self._request, metadata=self._metadata) <NEW_LINE> yield self._response <NEW_LINE> <DEDENT> <DEDENT> def __aiter__(self) -> AsyncIterator[environment.Environment]: <NEW_LINE> <INDENT> async def async_generator(): <NEW_LINE> <INDENT> async for page in self.pages: <NEW_LINE> <INDENT> for response in page.environments: <NEW_LINE> <INDENT> yield response <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return async_generator() <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return "{0}<{1!r}>".format(self.__class__.__name__, self._response) | A pager for iterating through ``list_environments`` requests.
This class thinly wraps an initial
:class:`google.cloud.dialogflow_v2beta1.types.ListEnvironmentsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``environments`` field.
If there are more pages, the ``__aiter__`` method will make additional
``ListEnvironments`` requests and continue to iterate
through the ``environments`` field on the
corresponding responses.
All the usual :class:`google.cloud.dialogflow_v2beta1.types.ListEnvironmentsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup. | 62598faf8a43f66fc4bf21a2 |
class GhostscriptFonts(Package): <NEW_LINE> <INDENT> homepage = "http://ghostscript.com/" <NEW_LINE> url = "https://www.imagemagick.org/download/delegates/ghostscript-fonts-std-8.11.tar.gz" <NEW_LINE> version('8.11', sha256='0eb6f356119f2e49b2563210852e17f57f9dcc5755f350a69a46a0d641a0c401') <NEW_LINE> def install(self, spec, prefix): <NEW_LINE> <INDENT> fdir = join_path(prefix.share, 'font') <NEW_LINE> mkdirp(fdir) <NEW_LINE> files = glob.glob('*') <NEW_LINE> for f in files: <NEW_LINE> <INDENT> if not f.startswith('spack-build'): <NEW_LINE> <INDENT> install(f, fdir) | Ghostscript Fonts | 62598faf7d847024c075c3eb |
class FeedPlaceholderViewServiceServicer(object): <NEW_LINE> <INDENT> def GetFeedPlaceholderView(self, request, context): <NEW_LINE> <INDENT> context.set_code(grpc.StatusCode.UNIMPLEMENTED) <NEW_LINE> context.set_details('Method not implemented!') <NEW_LINE> raise NotImplementedError('Method not implemented!') | Proto file describing the FeedPlaceholderView service.
Service to fetch feed placeholder views. | 62598faf4a966d76dd5eeeff |
class DesignateAdminClient(BaseDesignateClient): <NEW_LINE> <INDENT> def _get_noauth_auth_provider(self): <NEW_LINE> <INDENT> creds = KeystoneV2Credentials( tenant_id=cfg.CONF.noauth.tenant_id, ) <NEW_LINE> return NoAuthAuthProvider(creds, cfg.CONF.noauth.designate_endpoint) <NEW_LINE> <DEDENT> def _get_keystone_auth_provider(self): <NEW_LINE> <INDENT> creds = KeystoneV2Credentials( username=cfg.CONF.identity.admin_username, password=cfg.CONF.identity.admin_password, tenant_name=cfg.CONF.identity.admin_tenant_name, ) <NEW_LINE> return self._create_keystone_auth_provider(creds) | Client with admin user | 62598faf66656f66f7d5a418 |
class League(BaseDonbestResponse): <NEW_LINE> <INDENT> def __init__(self, node, donbest): <NEW_LINE> <INDENT> super().__init__(node=node, donbest=donbest) <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.abbreviation = None <NEW_LINE> self.information = None <NEW_LINE> self.sport = None <NEW_LINE> self._setattr_from_attributes(self.node) <NEW_LINE> self._setattr_from_single_children(self.node) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_xml_collection(cls, node, sport, donbest): <NEW_LINE> <INDENT> l = cls(node=node, donbest=donbest) <NEW_LINE> l.sport = sport <NEW_LINE> return l | Returns a League | 62598faf7d43ff2487427416 |
class SoftmaxDistribution(CategoricalDistribution): <NEW_LINE> <INDENT> def __init__(self, logits, beta=1.0, min_prob=0.0): <NEW_LINE> <INDENT> self.logits = logits <NEW_LINE> self.beta = beta <NEW_LINE> self.min_prob = min_prob <NEW_LINE> self.n = logits.shape[1] <NEW_LINE> assert self.min_prob * self.n <= 1.0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def params(self): <NEW_LINE> <INDENT> return (self.logits,) <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def all_prob(self): <NEW_LINE> <INDENT> with chainer.force_backprop_mode(): <NEW_LINE> <INDENT> if self.min_prob > 0: <NEW_LINE> <INDENT> return (F.softmax(self.beta * self.logits) * (1 - self.min_prob * self.n)) + self.min_prob <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return F.softmax(self.beta * self.logits) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @cached_property <NEW_LINE> def all_log_prob(self): <NEW_LINE> <INDENT> with chainer.force_backprop_mode(): <NEW_LINE> <INDENT> if self.min_prob > 0: <NEW_LINE> <INDENT> return F.log(self.all_prob) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return F.log_softmax(self.beta * self.logits) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return SoftmaxDistribution(_unwrap_variable(self.logits).copy(), beta=self.beta, min_prob=self.min_prob) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'SoftmaxDistribution(beta={}, min_prob={}) logits:{} probs:{} entropy:{}'.format( self.beta, self.min_prob, self.logits.array, self.all_prob.array, self.entropy.array) <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> return SoftmaxDistribution(self.logits[i], beta=self.beta, min_prob=self.min_prob) | Softmax distribution.
Args:
logits (ndarray or chainer.Variable): Logits for softmax
distribution.
beta (float): inverse of the temperature parameter of softmax
distribution
min_prob (float): minimum probability across all labels | 62598fafa219f33f346c683e |
class SphericalCircle(Polygon): <NEW_LINE> <INDENT> def __init__(self, center, radius, resolution=100, vertex_unit=u.degree, **kwargs): <NEW_LINE> <INDENT> longitude, latitude = center <NEW_LINE> lon = np.linspace(0., 2 * np.pi, resolution + 1)[:-1] * u.radian <NEW_LINE> lat = np.repeat(0.5 * np.pi - radius.to_value(u.radian), resolution) * u.radian <NEW_LINE> lon, lat = _rotate_polygon(lon, lat, longitude, latitude) <NEW_LINE> lon = lon.to_value(vertex_unit) <NEW_LINE> lat = lat.to_value(vertex_unit) <NEW_LINE> vertices = np.array([lon, lat]).transpose() <NEW_LINE> super().__init__(vertices, **kwargs) | Create a patch representing a spherical circle - that is, a circle that is
formed of all the points that are within a certain angle of the central
coordinates on a sphere. Here we assume that latitude goes from -90 to +90
This class is needed in cases where the user wants to add a circular patch
to a celestial image, since otherwise the circle will be distorted, because
a fixed interval in longitude corresponds to a different angle on the sky
depending on the latitude.
Parameters
----------
center : tuple or `~astropy.units.Quantity` ['angle']
This can be either a tuple of two `~astropy.units.Quantity` objects, or
a single `~astropy.units.Quantity` array with two elements.
radius : `~astropy.units.Quantity` ['angle']
The radius of the circle
resolution : int, optional
The number of points that make up the circle - increase this to get a
smoother circle.
vertex_unit : `~astropy.units.Unit`
The units in which the resulting polygon should be defined - this
should match the unit that the transformation (e.g. the WCS
transformation) expects as input.
Notes
-----
Additional keyword arguments are passed to `~matplotlib.patches.Polygon` | 62598fafcc0a2c111447b03a |
class ExpRand(UGen): <NEW_LINE> <INDENT> __documentation_section__ = 'Noise UGens' <NEW_LINE> __slots__ = () <NEW_LINE> _ordered_input_names = ( 'minimum', 'maximum', ) <NEW_LINE> _valid_calculation_rates = None <NEW_LINE> def __init__( self, calculation_rate=None, minimum=0., maximum=1., ): <NEW_LINE> <INDENT> UGen.__init__( self, calculation_rate=calculation_rate, minimum=minimum, maximum=maximum, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ir( cls, maximum=1, minimum=0.01, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.SCALAR <NEW_LINE> ugen = cls._new_expanded( calculation_rate=calculation_rate, maximum=maximum, minimum=minimum, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @property <NEW_LINE> def maximum(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('maximum') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def minimum(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('minimum') <NEW_LINE> return self._inputs[index] | An exponential random distribution.
::
>>> exp_rand = ugentools.ExpRand.ir()
>>> exp_rand
ExpRand.ir() | 62598faf5fc7496912d48295 |
class StudentViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Student.objects.all() <NEW_LINE> serializer_class = StudentSerializer | A simple ViewSet for viewing and editing the s. | 62598faf3317a56b869be55f |
class ContentPointer(): <NEW_LINE> <INDENT> content = None <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.content.__str__() <NEW_LINE> <DEDENT> def tag_id(self): <NEW_LINE> <INDENT> return self.content.tag_id() <NEW_LINE> <DEDENT> def details(self): <NEW_LINE> <INDENT> return self.content.details <NEW_LINE> <DEDENT> def about(self): <NEW_LINE> <INDENT> return self.content.about() <NEW_LINE> <DEDENT> def content_url(self): <NEW_LINE> <INDENT> return self.content.url() <NEW_LINE> <DEDENT> def is_theory(self): <NEW_LINE> <INDENT> return self.content.is_theory() <NEW_LINE> <DEDENT> def is_subtheory(self): <NEW_LINE> <INDENT> return self.content.is_subtheory() <NEW_LINE> <DEDENT> def is_evidence(self): <NEW_LINE> <INDENT> return self.content.is_evidence() <NEW_LINE> <DEDENT> def is_verifiable(self): <NEW_LINE> <INDENT> return self.content.is_verifiable() <NEW_LINE> <DEDENT> def is_fact(self): <NEW_LINE> <INDENT> return self.content.is_fact() | Abstract manager for accessing and agregating the theory's points.
Usage: This class can also be used to construct dummy Contents that will not show up in
the database.
Attributes:
content (Content): The theory dependency.
saved_true_points (float): Cache for the true points.
saved_false_points (float): Cache for the fasle points. | 62598faf8e7ae83300ee90cb |
class FloatRangeField(models.FloatField): <NEW_LINE> <INDENT> def __init__(self, verbose_name=None, name=None, min_value=None, max_value=None, **kwargs): <NEW_LINE> <INDENT> self.min_value, self.max_value = min_value, max_value <NEW_LINE> models.FloatField.__init__(self, verbose_name, name, **kwargs) <NEW_LINE> <DEDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> defaults = {'min_value': self.min_value, 'max_value':self.max_value} <NEW_LINE> defaults.update(kwargs) <NEW_LINE> return super(FloatRangeField, self).formfield(**defaults) | Model for restricting float field range
| 62598fafd486a94d0ba2bff9 |
class AuditConfig(_messages.Message): <NEW_LINE> <INDENT> auditLogConfigs = _messages.MessageField('AuditLogConfig', 1, repeated=True) <NEW_LINE> exemptedMembers = _messages.StringField(2, repeated=True) <NEW_LINE> service = _messages.StringField(3) | Specifies the audit configuration for a service. The configuration
determines which permission types are logged, and what identities, if any,
are exempted from logging. An AuditConfig must have one or more
AuditLogConfigs. If there are AuditConfigs for both `allServices` and a
specific service, the union of the two AuditConfigs is used for that
service: the log_types specified in each AuditConfig are enabled, and the
exempted_members in each AuditLogConfig are exempted. Example Policy with
multiple AuditConfigs: { "audit_configs": [ { "service": "allServices"
"audit_log_configs": [ { "log_type": "DATA_READ", "exempted_members": [
"user:foo@gmail.com" ] }, { "log_type": "DATA_WRITE", }, { "log_type":
"ADMIN_READ", } ] }, { "service": "fooservice.googleapis.com"
"audit_log_configs": [ { "log_type": "DATA_READ", }, { "log_type":
"DATA_WRITE", "exempted_members": [ "user:bar@gmail.com" ] } ] } ] } For
fooservice, this policy enables DATA_READ, DATA_WRITE and ADMIN_READ
logging. It also exempts foo@gmail.com from DATA_READ logging, and
bar@gmail.com from DATA_WRITE logging.
Fields:
auditLogConfigs: The configuration for logging of each type of permission.
exemptedMembers:
service: Specifies a service that will be enabled for audit logging. For
example, `storage.googleapis.com`, `cloudsql.googleapis.com`.
`allServices` is a special value that covers all services. | 62598faf99cbb53fe6830f02 |
class MiniMaxPlayer(ComputerizedPlayer): <NEW_LINE> <INDENT> def __init__(self, color: str, max_depth=3): <NEW_LINE> <INDENT> super().__init__(color=color, name='MiniMaxPlayer') <NEW_LINE> self.next_move = None <NEW_LINE> self.MAX_DEPTH = max_depth <NEW_LINE> <DEDENT> def best_move(self, board: object) -> object: <NEW_LINE> <INDENT> self.next_move = None <NEW_LINE> is_white = self.color == 'white' <NEW_LINE> self.find_move(board, is_white, self.MAX_DEPTH) <NEW_LINE> return self.next_move <NEW_LINE> <DEDENT> def find_move(self, board: object, is_white: bool, depth: int): <NEW_LINE> <INDENT> if depth == 0: <NEW_LINE> <INDENT> return self.score_board_improved(board) <NEW_LINE> <DEDENT> valid_moves = [] <NEW_LINE> if self.color == 'white': <NEW_LINE> <INDENT> if is_white: <NEW_LINE> <INDENT> valid_moves = self.legal_moves(board) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> valid_moves = self.enemy.legal_moves(board) <NEW_LINE> <DEDENT> <DEDENT> elif self.color == 'black': <NEW_LINE> <INDENT> if is_white: <NEW_LINE> <INDENT> valid_moves = self.enemy.legal_moves(board) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> valid_moves = self.legal_moves(board) <NEW_LINE> <DEDENT> <DEDENT> if is_white: <NEW_LINE> <INDENT> max_score = - self.CHECKMATE <NEW_LINE> for move in valid_moves: <NEW_LINE> <INDENT> board.move_piece(move, move_finding=True) <NEW_LINE> score = self.find_move(board, False, depth - 1) <NEW_LINE> if score > max_score: <NEW_LINE> <INDENT> max_score = score <NEW_LINE> if depth == self.MAX_DEPTH: <NEW_LINE> <INDENT> self.next_move = move <NEW_LINE> <DEDENT> <DEDENT> board.undo_move() <NEW_LINE> <DEDENT> return max_score <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> min_score = self.CHECKMATE <NEW_LINE> for move in valid_moves: <NEW_LINE> <INDENT> board.move_piece(move, move_finding=True) <NEW_LINE> score = self.find_move(board, True, depth - 1) <NEW_LINE> if score < min_score: <NEW_LINE> <INDENT> min_score = score <NEW_LINE> if depth == self.MAX_DEPTH: <NEW_LINE> <INDENT> self.next_move = move <NEW_LINE> <DEDENT> <DEDENT> board.undo_move() <NEW_LINE> <DEDENT> return min_score | A 'MiniMaxPlayer' is a computerized player and inherits from 'ComputerizedPlayer'.
He chooses the best own move. | 62598faf10dbd63aa1c70bdd |
class Battery(): <NEW_LINE> <INDENT> def __init__(self, battery_size=70): <NEW_LINE> <INDENT> self.battery_size = battery_size <NEW_LINE> <DEDENT> def describe_battery(self): <NEW_LINE> <INDENT> print("This car has a " + str(self.battery_size) + "-kWh battery.") <NEW_LINE> <DEDENT> def get_range(self): <NEW_LINE> <INDENT> if self.battery_size == 70: <NEW_LINE> <INDENT> range = 240 <NEW_LINE> <DEDENT> elif self.battery_size == 85: <NEW_LINE> <INDENT> range = 270 <NEW_LINE> <DEDENT> message = "This car can go approximately " + str(range) <NEW_LINE> message += " miles on a full charge." <NEW_LINE> print(message) | A simple attempt to model a battery for an electric car. | 62598faf85dfad0860cbfa88 |
class Output(object): <NEW_LINE> <INDENT> def __init__(self, output): <NEW_LINE> <INDENT> self.beta = output[0] <NEW_LINE> self.sd_beta = output[1] <NEW_LINE> self.cov_beta = output[2] <NEW_LINE> if len(output) == 4: <NEW_LINE> <INDENT> self.__dict__.update(output[3]) <NEW_LINE> self.stopreason = _report_error(self.info) <NEW_LINE> <DEDENT> <DEDENT> def pprint(self): <NEW_LINE> <INDENT> print('Beta:', self.beta) <NEW_LINE> print('Beta Std Error:', self.sd_beta) <NEW_LINE> print('Beta Covariance:', self.cov_beta) <NEW_LINE> if hasattr(self, 'info'): <NEW_LINE> <INDENT> print('Residual Variance:',self.res_var) <NEW_LINE> print('Inverse Condition #:', self.inv_condnum) <NEW_LINE> print('Reason(s) for Halting:') <NEW_LINE> for r in self.stopreason: <NEW_LINE> <INDENT> print(' %s' % r) | The Output class stores the output of an ODR run.
Takes one argument for initialization, the return value from the
function `odr`.
Attributes
----------
beta : ndarray
Estimated parameter values, of shape (q,).
sd_beta : ndarray
Standard errors of the estimated parameters, of shape (p,).
cov_beta : ndarray
Covariance matrix of the estimated parameters, of shape (p,p).
delta : ndarray, optional
Array of estimated errors in input variables, of same shape as `x`.
eps : ndarray, optional
Array of estimated errors in response variables, of same shape as `y`.
xplus : ndarray, optional
Array of ``x + delta``.
y : ndarray, optional
Array ``y = fcn(x + delta)``.
res_var : float, optional
Residual variance.
sum_sqare : float, optional
Sum of squares error.
sum_square_delta : float, optional
Sum of squares of delta error.
sum_square_eps : float, optional
Sum of squares of eps error.
inv_condnum : float, optional
Inverse condition number (cf. ODRPACK UG p. 77).
rel_error : float, optional
Relative error in function values computed within fcn.
work : ndarray, optional
Final work array.
work_ind : dict, optional
Indices into work for drawing out values (cf. ODRPACK UG p. 83).
info : int, optional
Reason for returning, as output by ODRPACK (cf. ODRPACK UG p. 38).
stopreason : list of str, optional
`info` interpreted into English.
Notes
-----
The attributes listed as "optional" above are only present if `odr` was run
with ``full_output=1``. | 62598faf7c178a314d78d4c6 |
class HexesToStringTest(TestCase): <NEW_LINE> <INDENT> def test_stringing(self): <NEW_LINE> <INDENT> stringed = COLOR_CONVERTER.hexes_to_string(["80", "80", "80"]) <NEW_LINE> self.assertEqual(stringed, "808080") <NEW_LINE> <DEDENT> def test_rgba(self): <NEW_LINE> <INDENT> stringed = COLOR_CONVERTER.hexes_to_string(["80", "80", "80", "80"]) <NEW_LINE> self.assertEqual(stringed, "80808080") | This test guerentees that a proper string conversion . | 62598faf283ffb24f3cf38b6 |
class ImageResult(EqualityMixin): <NEW_LINE> <INDENT> def __init__(self, ndarray, properties, bandinfo, geocontext): <NEW_LINE> <INDENT> self.ndarray = ndarray <NEW_LINE> self.properties = properties <NEW_LINE> self.bandinfo = collections.OrderedDict(**bandinfo) <NEW_LINE> self.geocontext = geocontext <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.bandinfo) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> name_header = type(self).__name__ + ":" <NEW_LINE> try: <NEW_LINE> <INDENT> ndarray = "{}<shape={}, dtype={}>".format( type(self.ndarray).__name__, self.ndarray.shape, self.ndarray.dtype ) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> ndarray = "None (Empty Image)" <NEW_LINE> <DEDENT> ndarray_line = " * ndarray: {}".format(ndarray) <NEW_LINE> properties_line = " * properties: {}".format(_join_dict_keys(self.properties)) <NEW_LINE> bandinfo_line = " * bandinfo: {}".format(_join_dict_keys(self.bandinfo)) <NEW_LINE> geocontext_line = " * geocontext: {}".format(_join_dict_keys(self.geocontext)) <NEW_LINE> return "\n".join( (name_header, ndarray_line, properties_line, bandinfo_line, geocontext_line) ) | Result of calling `~.models.compute` on an `~.geospatial.Image`.
Examples
--------
>>> from descarteslabs.workflows import Image
>>> my_img = Image.from_id("sentinel-2:L1C:2019-05-04_13SDV_99_S2B_v1")
>>> my_img.compute(my_geoctx) # my_geoctx is an arbitrary geocontext for 'my_img' # doctest: +SKIP
ImageResult:
* ndarray: MaskedArray<shape=(27, 512, 512), dtype=float64>
* properties: 'absolute_orbit', 'acquired', 'archived', 'area', ...
* bandinfo: 'coastal-aerosol', 'blue', 'green', 'red', ...
* geocontext: 'geometry', 'key', 'resolution', 'tilesize', ...
Attributes
----------
ndarray: numpy.ndarray
3-dimensional array of image data, in order ``(band, y, x)``
properties: dict[str, any]
dict of metadata about the `~.geospatial.Image`.
bandinfo: OrderedDict[str, dict[str, any]]
OrderedDict of metadata about each band.
The order corresponds to the bands in the `ndarray`.
geocontext: dict
GeoContext over which computation was done. | 62598faf7d847024c075c3ed |
class TransportException(RPCException): <NEW_LINE> <INDENT> code = 300 | Exceptions in transport layer
| 62598faf4a966d76dd5eef01 |
class Command(BaseCommand): <NEW_LINE> <INDENT> help = 'Retrieve information about experiments from the catalog' <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> raise CommandError("infocatalog takes no arguments") <NEW_LINE> <DEDENT> column_names = ['Catalog Name'] <NEW_LINE> catalog = [cat.name for cat in Catalog.objects.all()] <NEW_LINE> self.stdout.write(format_pretty_table( [catalog, ], column_names)) | Retrieve information about experiments from the catalog. | 62598faf1b99ca400228f545 |
class UserArticleCategory(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'user_article_categories' <NEW_LINE> id = db.Column(db.String, primary_key=True) <NEW_LINE> user_id = db.Column(db.String, db.ForeignKey(u'users.id')) <NEW_LINE> category_id = db.Column(db.Integer) <NEW_LINE> category_name = db.Column(db.String) <NEW_LINE> saved_at = db.Column(db.DateTime, nullable=False, default=func.now()) <NEW_LINE> deleted_at = db.Column(db.DateTime) <NEW_LINE> user = relationship("User", back_populates="categories") <NEW_LINE> __table_args__ = ( UniqueConstraint("user_id", "category_id"), ) <NEW_LINE> def transform(self): <NEW_LINE> <INDENT> return { 'id' : self.category_id, 'name' : self.category_name, 'savedAt' : datetime_to_epoch(self.saved_at), } | docstring for UserArticleCategory | 62598faf442bda511e95c482 |
class InternalLogger(BaseLogger): <NEW_LINE> <INDENT> __slots__ = ['output'] <NEW_LINE> def __init__(self, output, fields=None, options=None, min_level=None): <NEW_LINE> <INDENT> super(InternalLogger, self).__init__(fields, options, min_level) <NEW_LINE> self.output = output <NEW_LINE> <DEDENT> def _clone(self): <NEW_LINE> <INDENT> return self.__class__(fields=self._fields, options=self._options, min_level=self.min_level, output=self.output) <NEW_LINE> <DEDENT> def _emit(self, level, format_spec, args, kwargs): <NEW_LINE> <INDENT> if level < self.min_level: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> msg = Message(level, format_spec, self._fields.copy(), self._options.copy(), args, kwargs) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> msg = None <NEW_LINE> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output.output(msg) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> print(iso8601time(), "Error in twiggy internal log! Something is seriously broken.", file=sys.stderr) <NEW_LINE> print("Offending message:", repr(msg), file=sys.stderr) <NEW_LINE> traceback.print_exc(file=sys.stderr) | Special-purpose logger for internal uses
Sends messages directly to output, bypassing :data:`.emitters`.
:ivar `Output` output: an output to write to | 62598faf23849d37ff8510de |
class FailError(Exception): <NEW_LINE> <INDENT> pass | This error class is the base for all errors that cause the current test case fails.
| 62598fafff9c53063f51a677 |
class Escenarios: <NEW_LINE> <INDENT> parametros_default = {'prob_40_creditos': 0.1, 'prob_50_creditos': 0.7, 'prob_55_creditos': 0.15, 'prob_60_creditos': 0.05, 'prob_visitar_profesor': 0.2, 'prob_atraso_notas_Mavrakis': 0.1, 'porcentaje_progreso_tarea_mail': 0.5, 'fiesta_mes': 1/30, 'partido_futbol_mes': 1/70, 'nivel_inicial_confianza_inferior': 2, 'nivel_inicial_confianza_superior': 12} <NEW_LINE> matriz_escenarios = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> import csv <NEW_LINE> with open('escenarios.csv') as csvfile: <NEW_LINE> <INDENT> reader = csv.DictReader(csvfile) <NEW_LINE> matriz = {} <NEW_LINE> for fila in reader: <NEW_LINE> <INDENT> parametro = fila['Parametro:string'] <NEW_LINE> del fila['Parametro:string'] <NEW_LINE> for escenario in fila: <NEW_LINE> <INDENT> key_escenario = escenario.split(":") <NEW_LINE> if not key_escenario[0] in matriz: <NEW_LINE> <INDENT> matriz[key_escenario[0]] = {} <NEW_LINE> matriz[key_escenario[0]][parametro] = fila[escenario] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> matriz[key_escenario[0]][parametro] = fila[escenario] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for escenario in matriz: <NEW_LINE> <INDENT> for parametro in matriz[escenario]: <NEW_LINE> <INDENT> if matriz[escenario][parametro] == '-': <NEW_LINE> <INDENT> valor = Escenarios.parametros_default[parametro] <NEW_LINE> matriz[escenario][parametro] = valor <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> valor = float(matriz[escenario][parametro]) <NEW_LINE> matriz[escenario][parametro] = valor <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> Escenarios.matriz_escenarios = matriz <NEW_LINE> <DEDENT> self.escenarios = Escenarios.matriz_escenarios | Esta clase contiene los datos de escenarios.csv guardados en su
atributo escenarios. | 62598faf2ae34c7f260ab10c |
class StraightFlush(Event): <NEW_LINE> <INDENT> def __call__(self, d, cards): <NEW_LINE> <INDENT> cards = d.reduce(cards) <NEW_LINE> cards = d.unique(cards) <NEW_LINE> cards = d.sort(cards) <NEW_LINE> cards_dict = d.separate(cards) <NEW_LINE> cards_dict = d.collapse(cards_dict) <NEW_LINE> for key in cards_dict.keys(): <NEW_LINE> <INDENT> if key == 'jokers': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> suit_cards = cards_dict[key] <NEW_LINE> if len(suit_cards) >= 5: <NEW_LINE> <INDENT> for i in range(len(suit_cards)-4): <NEW_LINE> <INDENT> if self.is_consecutive(suit_cards[i:i+5]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if self.contain_10jqka(suit_cards): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def is_consecutive(self, array, stepsize=1): <NEW_LINE> <INDENT> return (np.diff(array) == stepsize).all() <NEW_LINE> <DEDENT> def contain_10jqka(self, suit_cards): <NEW_LINE> <INDENT> target_cards = np.array([10, 11, 12, 13, 1]) <NEW_LINE> return np.isin(target_cards, suit_cards).all() | check if there is a straight flush at hand
without considering wild cards | 62598faf4527f215b58e9f00 |
@dbus_error("PayloadNotSetError", namespace=PAYLOADS_NAMESPACE) <NEW_LINE> class PayloadNotSetError(AnacondaError): <NEW_LINE> <INDENT> pass | Payload is not set. | 62598faf92d797404e388b79 |
class WhoisRu(WhoisEntry): <NEW_LINE> <INDENT> regex = { 'domain_name': 'domain: *(.+)', 'registrar': 'registrar: *(.+)', 'creation_date': 'created: *(.+)', 'expiration_date': 'paid-till: *(.+)', 'updated_date': None, 'name_servers': 'nserver: *(.+)', 'status': 'state: *(.+)', 'emails': EMAIL_REGEX, 'registrant_org': 'org: *(.+)' } <NEW_LINE> def __init__(self, domain, text): <NEW_LINE> <INDENT> if text.strip() == 'No entries found': <NEW_LINE> <INDENT> raise PywhoisError(text) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> WhoisEntry.__init__(self, domain, text, self.regex) | Whois parser for .ru domains
| 62598faf26068e7796d4c980 |
class NGramFeatureSpace: <NEW_LINE> <INDENT> def __init__(self, alphabet, n, Y, is_normalized): <NEW_LINE> <INDENT> self.feature_space = build_feature_space_without_positions(alphabet, n, Y) <NEW_LINE> self._normalize(is_normalized, self.feature_space) <NEW_LINE> <DEDENT> def _normalize(self, is_normalized, feature_space): <NEW_LINE> <INDENT> if is_normalized: <NEW_LINE> <INDENT> y_normalization = self._get_y_normalization(feature_space) <NEW_LINE> data_normalization = y_normalization.repeat(numpy.diff(feature_space.indptr)) <NEW_LINE> feature_space.data *= data_normalization <NEW_LINE> <DEDENT> <DEDENT> def _get_y_normalization(self, feature_space): <NEW_LINE> <INDENT> y_normalization = (feature_space.multiply(feature_space)).sum(axis=1) <NEW_LINE> y_normalization = 1. / numpy.sqrt(numpy.array((y_normalization.reshape(1, -1))[0])) <NEW_LINE> return y_normalization <NEW_LINE> <DEDENT> def compute_weights(self, y_weights): <NEW_LINE> <INDENT> data_copy = numpy.copy(self.feature_space.data) <NEW_LINE> self.feature_space.data *= self._repeat_each_y_weight_by_y_column_count(y_weights) <NEW_LINE> n_gram_weights = numpy.array(self.feature_space.sum(axis=0))[0] <NEW_LINE> self.feature_space.data = data_copy <NEW_LINE> return n_gram_weights <NEW_LINE> <DEDENT> def _repeat_each_y_weight_by_y_column_count(self, y_weights): <NEW_LINE> <INDENT> return y_weights.repeat(numpy.diff(self.feature_space.indptr)) | Output feature space for the N-Gram Kernel
Creates a sparse matrix representation of the n-grams in each training string. This is used to compute the weights
of the graph during the inference phase.
Attributes
----------
feature_space : sparse matrix, shape = [n_samples, len(alphabet)**n]
Sparse matrix representation of the n-grams in each training string, where n_samples is the number of training
samples. | 62598faf460517430c432073 |
class ExpressRouteCircuitPeeringConfig(Model): <NEW_LINE> <INDENT> _attribute_map = { 'advertised_public_prefixes': {'key': 'advertisedPublicPrefixes', 'type': '[str]'}, 'advertised_public_prefixes_state': {'key': 'advertisedPublicPrefixesState', 'type': 'str'}, 'customer_asn': {'key': 'customerASN', 'type': 'int'}, 'routing_registry_name': {'key': 'routingRegistryName', 'type': 'str'}, } <NEW_LINE> def __init__(self, advertised_public_prefixes=None, advertised_public_prefixes_state=None, customer_asn=None, routing_registry_name=None): <NEW_LINE> <INDENT> self.advertised_public_prefixes = advertised_public_prefixes <NEW_LINE> self.advertised_public_prefixes_state = advertised_public_prefixes_state <NEW_LINE> self.customer_asn = customer_asn <NEW_LINE> self.routing_registry_name = routing_registry_name | Specifies the peering configuration.
:param advertised_public_prefixes: The reference of
AdvertisedPublicPrefixes.
:type advertised_public_prefixes: list of str
:param advertised_public_prefixes_state: AdvertisedPublicPrefixState of
the Peering resource. Possible values are 'NotConfigured', 'Configuring',
'Configured', and 'ValidationNeeded'. Possible values include:
'NotConfigured', 'Configuring', 'Configured', 'ValidationNeeded'
:type advertised_public_prefixes_state: str or
:class:`ExpressRouteCircuitPeeringAdvertisedPublicPrefixState
<azure.mgmt.network.v2016_09_01.models.ExpressRouteCircuitPeeringAdvertisedPublicPrefixState>`
:param customer_asn: The CustomerASN of the peering.
:type customer_asn: int
:param routing_registry_name: The RoutingRegistryName of the
configuration.
:type routing_registry_name: str | 62598fb001c39578d7f12daa |
class CustomerDao: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def save(cls, customer): <NEW_LINE> <INDENT> print("Saving customer {} with {} years".format(customer.name, customer.age)) | Customer's DataAccess class | 62598fb0baa26c4b54d4f2df |
class RisParser(BaseParser): <NEW_LINE> <INDENT> START_TAG = "TY" <NEW_LINE> PATTERN = r"^[A-Z][A-Z0-9] - |^ER -\s*$" <NEW_LINE> DEFAULT_MAPPING = TAG_KEY_MAPPING <NEW_LINE> DEFAULT_LIST_TAGS = LIST_TYPE_TAGS <NEW_LINE> counter_re = re.compile("^[0-9]+.") <NEW_LINE> def get_content(self, line): <NEW_LINE> <INDENT> return line[6:].strip() <NEW_LINE> <DEDENT> def is_header(self, line): <NEW_LINE> <INDENT> none_or_match = self.counter_re.match(line) <NEW_LINE> return bool(none_or_match) | Subclass of Base for reading base RIS files. | 62598fb0d7e4931a7ef3c0c0 |
class ManagerDeploymentsInsertRequest(_messages.Message): <NEW_LINE> <INDENT> deployment = _messages.MessageField('Deployment', 1) <NEW_LINE> projectId = _messages.StringField(2, required=True) <NEW_LINE> region = _messages.StringField(3, required=True) | A ManagerDeploymentsInsertRequest object.
Fields:
deployment: A Deployment resource to be passed as the request body.
projectId: A string attribute.
region: A string attribute. | 62598fb0f548e778e596b5cf |
class RankType: <NEW_LINE> <INDENT> Max = 'max' <NEW_LINE> Min = 'min' <NEW_LINE> Same = 'same' <NEW_LINE> Sink = 'sink' <NEW_LINE> Source = 'source' | These values can be used for Dotter.rank() | 62598fb0dd821e528d6d8f60 |
class TestBankAccess(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testBankAccess(self): <NEW_LINE> <INDENT> pass | BankAccess unit test stubs | 62598fb099cbb53fe6830f04 |
class _Integrator: <NEW_LINE> <INDENT> def __init__(self, image, geometry, angles, radii, intensities): <NEW_LINE> <INDENT> self._image = image <NEW_LINE> self._geometry = geometry <NEW_LINE> self._angles = angles <NEW_LINE> self._radii = radii <NEW_LINE> self._intensities = intensities <NEW_LINE> self._i_range = range(0, self._image.shape[1] - 1) <NEW_LINE> self._j_range = range(0, self._image.shape[0] - 1) <NEW_LINE> <DEDENT> def integrate(self, radius, phi): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _reset(self): <NEW_LINE> <INDENT> self._angles = [] <NEW_LINE> self._radii = [] <NEW_LINE> self._intensities = [] <NEW_LINE> <DEDENT> def _store_results(self, phi, radius, sample): <NEW_LINE> <INDENT> self._angles.append(phi) <NEW_LINE> self._radii.append(radius) <NEW_LINE> self._intensities.append(sample) <NEW_LINE> <DEDENT> def get_polar_angle_step(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_sector_area(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def is_area(self): <NEW_LINE> <INDENT> raise NotImplementedError | Base class that supports different kinds of pixel integration methods.
Parameters
----------
image : 2D `~numpy.ndarray`
The image array.
geometry : `~photutils.isophote.EllipseGeometry` instance
Object that encapsulates geometry information about current
ellipse.
angles : list
Output list; contains the angle values along the elliptical
path.
radii : list
Output list; contains the radius values along the elliptical
path.
intensities : list
Output list; contains the extracted intensity values along the
elliptical path. | 62598fb0e1aae11d1e7ce839 |
class SensorForce(Element): <NEW_LINE> <INDENT> def __init__( self, site, cutoff: float=None, name: str=None, noise: float=None, user: str="0 0 ...", ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.site = site <NEW_LINE> self.cutoff = cutoff <NEW_LINE> self.name = name <NEW_LINE> self.noise = noise <NEW_LINE> self.user = user <NEW_LINE> self._attribute_names = ['site', 'cutoff', 'name', 'noise', 'user'] | This element creates a 3-axis force sensor. The sensor outputs three
numbers, which are the interaction force between a child and a parent
body, expressed in the site frame defining the sensor. The convention is
that the site is attached to the child body, and the force points from the
child towards the parent. To change the sign of the sensor reading, use
the scale attribute. The computation here takes into account all forces
acting on the system, including contacts as well as external
perturbations. Using this sensor often requires creating a dummy body
welded to its parent (i.e. having no joint elements).
:param site:
Site where the sensor is mounted. The measured interaction force is
between the body where the site is defined and its parent body, and
points from the child towards the parent. The physical sensor being
modeled could of course be attached to the parent body, in which case
the sensor data would have the opposite sign. Note that each body has a
unique parent but can have multiple children, which is why we define
this sensor through the child rather than the parent body in the pair.
:param cutoff:
When this value is positive, it limits the absolute value of the sensor
output. It is also used to normalize the sensor output in the sensor
data plots in HAPTIX and simulate.cpp.
:param name:
Name of the sensor.
:param noise:
The standard deviation of zero-mean Gaussian noise added to the sensor
output, when the sensornoise attribute of flag is enabled. Sensor noise
respects the sensor data type: quaternions and unit vectors remain
normalized, non-negative quantities remain non-negative.
:param user:
See User parameters. | 62598fb01f5feb6acb162c4a |
class ResultIter(object): <NEW_LINE> <INDENT> def __init__(self, result_proxies, row_type=dict): <NEW_LINE> <INDENT> if not isgenerator(result_proxies): <NEW_LINE> <INDENT> result_proxies = iter((result_proxies, )) <NEW_LINE> <DEDENT> self.result_proxies = result_proxies <NEW_LINE> self.row_type = row_type <NEW_LINE> self.count = 0 <NEW_LINE> if not self._next_rp(): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> <DEDENT> def _next_rp(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.rp = self.result_proxies.next() <NEW_LINE> self.count += self.rp.rowcount <NEW_LINE> self.keys = self.rp.keys() <NEW_LINE> return True <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> row = self.rp.fetchone() <NEW_LINE> if row is None: <NEW_LINE> <INDENT> if self._next_rp(): <NEW_LINE> <INDENT> return self.next() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> <DEDENT> return self.row_type(zip(self.keys, row)) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self | SQLAlchemy ResultProxies are not iterable to get a
list of dictionaries. This is to wrap them. | 62598fb021bff66bcd722c92 |
class FileConsentCard(Model): <NEW_LINE> <INDENT> _attribute_map = { "description": {"key": "description", "type": "str"}, "size_in_bytes": {"key": "sizeInBytes", "type": "long"}, "accept_context": {"key": "acceptContext", "type": "object"}, "decline_context": {"key": "declineContext", "type": "object"}, } <NEW_LINE> def __init__( self, *, description: str = None, size_in_bytes: int = None, accept_context=None, decline_context=None, **kwargs ) -> None: <NEW_LINE> <INDENT> super(FileConsentCard, self).__init__(**kwargs) <NEW_LINE> self.description = description <NEW_LINE> self.size_in_bytes = size_in_bytes <NEW_LINE> self.accept_context = accept_context <NEW_LINE> self.decline_context = decline_context | File consent card attachment.
:param description: File description.
:type description: str
:param size_in_bytes: Size of the file to be uploaded in Bytes.
:type size_in_bytes: long
:param accept_context: Context sent back to the Bot if user consented to
upload. This is free flow schema and is sent back in Value field of
Activity.
:type accept_context: object
:param decline_context: Context sent back to the Bot if user declined.
This is free flow schema and is sent back in Value field of Activity.
:type decline_context: object | 62598fb0851cf427c66b82e7 |
class KlassRescheduleView(KlassStartView): <NEW_LINE> <INDENT> template_name = 'teachers/clients/klass_reschedule.html' <NEW_LINE> form_class = forms.KlassRescheduleForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> self.klass.program_usage.delete() <NEW_LINE> self.client = self.klass.contract.client <NEW_LINE> self.teacher = self.klass.teacher.teacher_set.get(client=self.client) <NEW_LINE> self.create_program_usage(**form.cleaned_data) <NEW_LINE> self.reset_passwords(self.klass.students.all()) <NEW_LINE> return self.get_success_url() <NEW_LINE> <DEDENT> def reset_passwords(self, students): <NEW_LINE> <INDENT> password_list = generate_random_passwords(len(students)) <NEW_LINE> pw_list = {} <NEW_LINE> for student in students: <NEW_LINE> <INDENT> password = password_list.pop(0) <NEW_LINE> student.set_password(password) <NEW_LINE> student.save() <NEW_LINE> pw_list[student.get_full_name()] = password <NEW_LINE> <DEDENT> klass_pw_list = {self.klass.pk: pw_list} <NEW_LINE> try: <NEW_LINE> <INDENT> self.request.session['password_list'].update(klass_pw_list) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.request.session['password_list'] = klass_pw_list | Same as the klass start but don't deal with students, just reset the
already existent users passwords. | 62598fb0a8370b77170f0407 |
class Extraction: <NEW_LINE> <INDENT> def __init__(self, sent, pred, args, probs, calc_prob = lambda probs: 1.0 / (reduce(lambda x, y: x * y, probs) + 0.001)): <NEW_LINE> <INDENT> self.sent = sent <NEW_LINE> self.calc_prob = calc_prob <NEW_LINE> self.probs = probs <NEW_LINE> self.prob = self.calc_prob(self.probs) <NEW_LINE> self.pred = pred <NEW_LINE> self.args = args <NEW_LINE> logging.debug(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '\t'.join(map(str, [' '.join(self.sent), self.prob, self.pred, '\t'.join([' '.join(arg) for arg in self.args])])) | Store and print an OIE extraction | 62598fb0796e427e5384e7c0 |
class Pin: <NEW_LINE> <INDENT> def __init__(self,pin_number,frequency=50,duty_cycle=100,as_input=False,initial_pull=None): <NEW_LINE> <INDENT> self.pin_number = pin_number <NEW_LINE> self.duty_cycle = duty_cycle <NEW_LINE> self.frequency = frequency <NEW_LINE> self.as_input = as_input <NEW_LINE> self._pwm = None <NEW_LINE> if self.as_input == True: <NEW_LINE> <INDENT> if initial_pull == None: <NEW_LINE> <INDENT> GPIO.setup(self.pin_number,GPIO.IN) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> GPIO.setup(self.pin_number,GPIO.IN,pull_up_down=initial_pull) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> GPIO.setup(self.pin_number, GPIO.OUT) <NEW_LINE> self.down() <NEW_LINE> <DEDENT> <DEDENT> def up(self): <NEW_LINE> <INDENT> if self._pwm == None: <NEW_LINE> <INDENT> GPIO.output(self.pin_number,True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> err = "cannot set to 'up': pulse width modulation running on pin.\n" <NEW_LINE> raise ValueError(err) <NEW_LINE> <DEDENT> <DEDENT> def down(self): <NEW_LINE> <INDENT> if self._pwm == None: <NEW_LINE> <INDENT> GPIO.output(self.pin_number,False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> err = "cannot set to 'down': pulse width modulation running on pin.\n" <NEW_LINE> raise ValueError(err) <NEW_LINE> <DEDENT> <DEDENT> def input(self): <NEW_LINE> <INDENT> return GPIO.input(self.pin_number) <NEW_LINE> <DEDENT> def start_pwm(self): <NEW_LINE> <INDENT> self._pwm = GPIO.PWM(self.pin_number,self.frequency) <NEW_LINE> self._pwm.start(self.duty_cycle) <NEW_LINE> <DEDENT> def stop_pwm(self): <NEW_LINE> <INDENT> if self._pwm != None: <NEW_LINE> <INDENT> self._pwm.stop() <NEW_LINE> self._pwm = None <NEW_LINE> <DEDENT> <DEDENT> def set_frequency(self,frequency): <NEW_LINE> <INDENT> self.frequency = frequency <NEW_LINE> if self._pwm != None: <NEW_LINE> <INDENT> self.stop_pwm() <NEW_LINE> self.start_pwm() <NEW_LINE> <DEDENT> <DEDENT> def set_duty_cycle(self,duty_cycle): <NEW_LINE> <INDENT> self.duty_cycle = duty_cycle <NEW_LINE> if self._pwm != None: <NEW_LINE> <INDENT> self.stop_pwm() <NEW_LINE> self.start_pwm() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self._pwm != None: <NEW_LINE> <INDENT> self.stop_pwm() <NEW_LINE> <DEDENT> self.down() <NEW_LINE> GPIO.cleanup(self.pin_number) | Class that controls a raspberry pi GPIO pin. | 62598fb04c3428357761a2e5 |
class DeleteVariantOperator(Operator): <NEW_LINE> <INDENT> bl_idname = "angel.delete_variant" <NEW_LINE> bl_label = "Delete Variant" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> scene = context.scene <NEW_LINE> angel = scene.angel <NEW_LINE> if angel.selected_variant < len(angel.variants): <NEW_LINE> <INDENT> angel.revert_to_base_materials() <NEW_LINE> variant = angel.variants[angel.selected_variant] <NEW_LINE> variant.remove_all_materials() <NEW_LINE> angel.variants.remove(angel.selected_variant) <NEW_LINE> if angel.selected_variant > 0: <NEW_LINE> <INDENT> angel.selected_variant -= 1 <NEW_LINE> <DEDENT> angel.apply_to_scene() <NEW_LINE> <DEDENT> return {'FINISHED'} | Deletes the currently selected variant | 62598fb07c178a314d78d4c8 |
class ReallocateTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.dojo = Dojo() <NEW_LINE> <DEDENT> def test_check_which_room_person_is(self): <NEW_LINE> <INDENT> dict_a = { 'Mandela': ['David', 'Samuel', 'Turi'], 'Machel':['Anne', 'Linet', 'Turi'] } <NEW_LINE> dict_b = { 'Nyerere': ['Myles', 'Reginald', 'Booker'], 'Obote':['Memo', 'Liliosa'] } <NEW_LINE> self.assertEqual(self.dojo.check_which_room_person_is('Liliosa', dict_b), 'Obote') <NEW_LINE> self.assertEqual(self.dojo.check_which_room_person_is('Francis', dict_a), False) <NEW_LINE> <DEDENT> def test_reallocate_person_removes_person(self): <NEW_LINE> <INDENT> self.dojo.create_room('office', 'Mandela') <NEW_LINE> self.dojo.create_room('office', 'Madiba') <NEW_LINE> self.dojo.add_person('Joseph Simiyu', 'staff') <NEW_LINE> if self.dojo.room_is_empty('Mandela'): <NEW_LINE> <INDENT> self.dojo.reallocate_person('Joseph Simiyu', 'Mandela') <NEW_LINE> self.assertEqual(len(self.dojo.dict_offices['Mandela']), 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dojo.reallocate_person('Joseph Simiyu', 'Madiba') <NEW_LINE> self.assertEqual(len(self.dojo.dict_offices['Madiba']), 1) <NEW_LINE> <DEDENT> <DEDENT> def test_room_is_empty(self): <NEW_LINE> <INDENT> self.dojo.create_room('office', 'Sama') <NEW_LINE> self.dojo.add_person('Linda Masero', 'staff') <NEW_LINE> self.assertFalse(self.dojo.room_is_empty('Sama')) <NEW_LINE> <DEDENT> def test_allocate_room(self): <NEW_LINE> <INDENT> self.dojo.add_person('John Doe', 'staff') <NEW_LINE> self.dojo.add_person('Jane Duh', 'fellow', 'Y') <NEW_LINE> self.dojo.create_room('office', 'Mandela') <NEW_LINE> inital_count = len(self.dojo.unallocated_people) <NEW_LINE> self.dojo.allocate_room('John Doe', 'office') <NEW_LINE> current_count = len(self.dojo.unallocated_people) <NEW_LINE> self.assertEqual((inital_count - current_count), 1) <NEW_LINE> <DEDENT> def test_allocate_missing_person(self): <NEW_LINE> <INDENT> self.dojo.create_room('living', 'Suswa') <NEW_LINE> self.dojo.allocate_room('Babu Brian', 'living') <NEW_LINE> output = "Living space Suswa created successfully" +"Babu Brian does not exist among the unallocated people" <NEW_LINE> self.assertEqual(re.sub(r'(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~][\n]*', '', sys.stdout.getvalue()), output) | Test the dojo module | 62598fb04f6381625f1994d5 |
class SyncLogModel(S3Model): <NEW_LINE> <INDENT> names = ("sync_log", ) <NEW_LINE> def model(self): <NEW_LINE> <INDENT> T = current.T <NEW_LINE> s3 = current.response.s3 <NEW_LINE> crud_strings = s3.crud_strings <NEW_LINE> s3_datetime_represent = lambda dt: S3DateTime.datetime_represent(dt, utc=True) <NEW_LINE> tablename = "sync_log" <NEW_LINE> self.define_table(tablename, Field("timestmp", "datetime", label = T("Date/Time"), represent = s3_datetime_represent, ), self.sync_repository_id(), Field("resource_name"), Field("mode"), Field("action"), Field("result"), Field("remote", "boolean", default = False, label = T("Remote Error"), represent = s3_yes_no_represent, ), Field("message", "text", represent = s3_strip_markup, ), *s3_meta_fields()) <NEW_LINE> crud_strings[tablename] = Storage( title_display = T("Log Entry"), title_list = T("Synchronization Log"), label_list_button = T("List All Entries"), msg_record_deleted = T("Log Entry Deleted"), msg_list_empty = T("No entries found"), msg_no_match = T("No entries found")) <NEW_LINE> self.configure(tablename, deletable = True, editable = False, insertable = False, orderby = "sync_log.timestmp desc", ) <NEW_LINE> return None | Model for the Sync log | 62598fb04527f215b58e9f01 |
class DescribeListPacketFilterConfigResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Total = None <NEW_LINE> self.ConfigList = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Total = params.get("Total") <NEW_LINE> if params.get("ConfigList") is not None: <NEW_LINE> <INDENT> self.ConfigList = [] <NEW_LINE> for item in params.get("ConfigList"): <NEW_LINE> <INDENT> obj = PacketFilterRelation() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.ConfigList.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | DescribeListPacketFilterConfig返回参数结构体
| 62598fb07047854f4633f406 |
class _BatchLoaderFromDisk(_BatchLoader): <NEW_LINE> <INDENT> def update_parameters(self): <NEW_LINE> <INDENT> path = self.parameters["path"].get() <NEW_LINE> should_reload = False <NEW_LINE> if self.path != path: <NEW_LINE> <INDENT> self.path = path <NEW_LINE> should_reload = True <NEW_LINE> <DEDENT> return super().update_parameters() or should_reload <NEW_LINE> <DEDENT> def get_next_batch(self): <NEW_LINE> <INDENT> start = self.total_batches_sent % self.number_of_batches * self.batch_size <NEW_LINE> end = start + self.batch_size <NEW_LINE> labeled_image_paths = self.dataset[start:end] <NEW_LINE> images = [] <NEW_LINE> labels = [] <NEW_LINE> for image_path, label in labeled_image_paths: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> images.append(plt.imread(image_path)) <NEW_LINE> labels.append(label) <NEW_LINE> <DEDENT> except (SyntaxError, OSError): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return images, labels <NEW_LINE> <DEDENT> def get_classes(self): <NEW_LINE> <INDENT> classes = list(set((image[1] for image in self.dataset))) <NEW_LINE> classes.sort() <NEW_LINE> return classes | Base element for loading data from disk. Deriving elements should contain path parameter. | 62598fb07d43ff2487427418 |
class Rodenticide(CharField): <NEW_LINE> <INDENT> def to_python(self, value): <NEW_LINE> <INDENT> return unmangle(super(Rodenticide, self).to_python(value)) | Undo the text mangling done by TinyMCE | 62598fb0aad79263cf42e800 |
class OrganisationRepos(UserRepos): <NEW_LINE> <INDENT> def get_repos(self): <NEW_LINE> <INDENT> org_name = self.kwargs['name'] <NEW_LINE> org = self.gh.gh.organization(org_name) <NEW_LINE> return self.gh.org_repos(org) | Retrieve an organisation's repos from github.
The organisation name is kwarg ``name`` in the URL pattern. | 62598fb05fc7496912d48297 |
class Environment(object): <NEW_LINE> <INDENT> def __init__(self, gym_env, action_repeat): <NEW_LINE> <INDENT> self.env = gym_env <NEW_LINE> self.timespan = action_repeat <NEW_LINE> self.gym_actions = 2 <NEW_LINE> self.state_buffer = deque() <NEW_LINE> <DEDENT> def get_action_size(self): <NEW_LINE> <INDENT> return self.env.action_space.n <NEW_LINE> <DEDENT> def get_state_size(self): <NEW_LINE> <INDENT> return self.env.observation_space.shape <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.state_buffer = deque() <NEW_LINE> x_t = self.env.reset() <NEW_LINE> s_t = np.stack([x_t for i in range(self.timespan)], axis=0) <NEW_LINE> for i in range(self.timespan-1): <NEW_LINE> <INDENT> self.state_buffer.append(x_t) <NEW_LINE> <DEDENT> return s_t <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> x_t1, r_t, terminal, info = self.env.step(action) <NEW_LINE> previous_states = np.array(self.state_buffer) <NEW_LINE> s_t1 = np.empty((self.timespan, *self.env.observation_space.shape)) <NEW_LINE> s_t1[:self.timespan-1, :] = previous_states <NEW_LINE> s_t1[self.timespan-1] = x_t1 <NEW_LINE> self.state_buffer.popleft() <NEW_LINE> self.state_buffer.append(x_t1) <NEW_LINE> return s_t1, r_t, terminal, info <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return self.env.render() | Environment Helper Class (Multiple State Buffer) for Continuous Action Environments
(MountainCarContinuous-v0, LunarLanderContinuous-v2, etc..), and MujuCo Environments | 62598fb016aa5153ce40052f |
class BaseASpaceClient(requests.Session): <NEW_LINE> <INDENT> def __init__(self, api_host: str = constants.DEFAULT_API_HOST, username: str = constants.DEFAULT_USERNAME, password: str = constants.DEFAULT_PASSWORD, auto_auth=True): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.aspace_api_host = api_host.strip() <NEW_LINE> self.aspace_username = username <NEW_LINE> self.aspace_password = password <NEW_LINE> if not self.aspace_api_host.endswith('/'): <NEW_LINE> <INDENT> self.aspace_api_host += '/' <NEW_LINE> <DEDENT> self.headers['Accept'] = 'application/json' <NEW_LINE> if auto_auth: <NEW_LINE> <INDENT> self.authenticate() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def init_from_config(cls, config: configparser.ConfigParser, section='aspace_credentials', auto_auth=False): <NEW_LINE> <INDENT> def aspace_credential(term, default=None): <NEW_LINE> <INDENT> return config.get(section, term, fallback=default) <NEW_LINE> <DEDENT> _self = cls( api_host=aspace_credential( 'api_host', constants.DEFAULT_API_HOST), username=aspace_credential( 'username', constants.DEFAULT_USERNAME), password=aspace_credential( 'password', constants.DEFAULT_PASSWORD), auto_auth=auto_auth, ) <NEW_LINE> return _self <NEW_LINE> <DEDENT> def prepare_request(self, request: requests.Request): <NEW_LINE> <INDENT> relative_uri = ( request.url.lstrip(' /') if request.url else '' ) <NEW_LINE> request.url = urllib.parse.urljoin(self.aspace_api_host, relative_uri) <NEW_LINE> return super().prepare_request(request) <NEW_LINE> <DEDENT> def send(self, request: requests.PreparedRequest, **kwargs): <NEW_LINE> <INDENT> resp = super().send(request, **kwargs) <NEW_LINE> if resp.status_code == 412: <NEW_LINE> <INDENT> self.authenticate() <NEW_LINE> request.headers[constants.X_AS_SESSION] = ( self.headers[constants.X_AS_SESSION] ) <NEW_LINE> resp = super().send(request, **kwargs) <NEW_LINE> <DEDENT> return resp <NEW_LINE> <DEDENT> def wait_until_ready(self, check_interval=5.0, max_wait_time=None, on_fail=None, authenticate_on_success=False): <NEW_LINE> <INDENT> timer = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.get('/').ok: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> except requests.exceptions.ConnectionError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if max_wait_time is not None and timer > max_wait_time: <NEW_LINE> <INDENT> raise Exception( "The API could not be reached within the maximum allowed " "time." ) <NEW_LINE> <DEDENT> if callable(on_fail): <NEW_LINE> <INDENT> on_fail() <NEW_LINE> <DEDENT> time.sleep(check_interval) <NEW_LINE> timer += check_interval <NEW_LINE> <DEDENT> if authenticate_on_success: <NEW_LINE> <INDENT> self.authenticate() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def authenticate(self): <NEW_LINE> <INDENT> if constants.X_AS_SESSION in self.headers: <NEW_LINE> <INDENT> del self.headers[constants.X_AS_SESSION] <NEW_LINE> <DEDENT> resp = self.post( 'users/' + self.aspace_username + '/login', {'password': self.aspace_password} ) <NEW_LINE> assert resp.ok, ( 'Received {} while attempting to authenticate: {}'.format( resp.status_code, resp.text, ) ) <NEW_LINE> session = resp.json()['session'] <NEW_LINE> self.headers[constants.X_AS_SESSION] = session <NEW_LINE> return resp | Extends the Session class from the requests Python library, adding
methods that abstract ArchivesSpace-specific functionality. | 62598fb0baa26c4b54d4f2e1 |
class PadMaker: <NEW_LINE> <INDENT> def __init__(self, module): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> <DEDENT> def THPad(self, w, l, drill, shape=pcbnew.PAD_OVAL): <NEW_LINE> <INDENT> pad = pcbnew.D_PAD(self.module) <NEW_LINE> pad.SetSize(pcbnew.wxSize(l, w)) <NEW_LINE> pad.SetShape(shape) <NEW_LINE> pad.SetAttribute(pcbnew.PAD_STANDARD) <NEW_LINE> pad.SetLayerSet(pad.StandardMask()) <NEW_LINE> pad.SetDrillSize(pcbnew.wxSize(drill, drill)) <NEW_LINE> return pad <NEW_LINE> <DEDENT> def THRoundPad(self, size, drill): <NEW_LINE> <INDENT> pad = self.THPad(size, size, drill, shape=pcbnew.PAD_CIRCLE) <NEW_LINE> return pad <NEW_LINE> <DEDENT> def NPTHRoundPad(self, drill): <NEW_LINE> <INDENT> pad = pcbnew.D_PAD(self.module) <NEW_LINE> pad.SetSize(pcbnew.wxSize(drill, drill)) <NEW_LINE> pad.SetShape(pcbnew.PAD_CIRCLE) <NEW_LINE> pad.SetAttribute(pcbnew.PAD_HOLE_NOT_PLATED) <NEW_LINE> pad.SetLayerSet(pad.UnplatedHoleMask()) <NEW_LINE> pad.SetDrillSize(pcbnew.wxSize(drill, drill)) <NEW_LINE> return pad <NEW_LINE> <DEDENT> def SMDPad(self, w, l, shape=pcbnew.PAD_RECT): <NEW_LINE> <INDENT> pad = pcbnew.D_PAD(self.module) <NEW_LINE> pad.SetSize(pcbnew.wxSize(l, w)) <NEW_LINE> pad.SetShape(shape) <NEW_LINE> pad.SetAttribute(pcbnew.PAD_SMD) <NEW_LINE> pad.SetLayerSet(pad.SMDMask()) <NEW_LINE> return pad <NEW_LINE> <DEDENT> def SMTRoundPad(self, size): <NEW_LINE> <INDENT> pad = self.SMDPad(size, size, shape=pcbnew.PAD_CIRCLE) <NEW_LINE> return pad | Useful construction functions for common types of pads | 62598fb0d58c6744b42dc2ee |
class Topic(Base): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = u'专题' <NEW_LINE> verbose_name_plural = u'专题' | 专题 Model | 62598fb055399d3f05626551 |
class ServiceIntro(ServiceDescriptionDirective): <NEW_LINE> <INDENT> def generate_rst(self, d, api_version): <NEW_LINE> <INDENT> rawtext = "" <NEW_LINE> scalar = {} <NEW_LINE> for key in d.description: <NEW_LINE> <INDENT> if isinstance(d[key], str) or isinstance(d[key], unicode): <NEW_LINE> <INDENT> scalar[key] = d[key] <NEW_LINE> rawtext += ".. |%s| replace:: %s\n\n" % (key, scalar[key]) <NEW_LINE> <DEDENT> <DEDENT> docs = self.get_service_doc_url(d["namespace"]) <NEW_LINE> if api_version: <NEW_LINE> <INDENT> apiVersionSuffix = "_" + api_version.replace("-", "_") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> apiVersionSuffix = "" <NEW_LINE> <DEDENT> env = Environment(loader=PackageLoader('aws', 'templates')) <NEW_LINE> template = env.get_template("client_intro") <NEW_LINE> rawtext += template.render( scalar, regions=get_regions(d["namespace"]), doc_url=docs, specifiedApiVersion=api_version, apiVersionSuffix=apiVersionSuffix) <NEW_LINE> return rawtext | Creates a service introduction to inject into a document | 62598fb08e7ae83300ee90d0 |
class ParallelPyBGEN(PyBGEN): <NEW_LINE> <INDENT> def __init__(self, fn, prob_t=0.9, cpus=2, probs_only=False, max_variants=1000): <NEW_LINE> <INDENT> super(ParallelPyBGEN, self).__init__( fn, mode="r", prob_t=prob_t, probs_only=probs_only, ) <NEW_LINE> self.cpus = cpus <NEW_LINE> self._max_variants = max_variants <NEW_LINE> self._seeks = None <NEW_LINE> <DEDENT> def iter_variants(self): <NEW_LINE> <INDENT> if self._seeks is None: <NEW_LINE> <INDENT> self._get_all_seeks() <NEW_LINE> <DEDENT> seeks = [self._seeks[i::self.cpus] for i in range(self.cpus)] <NEW_LINE> return self._parallel_iter_seeks(seeks) <NEW_LINE> <DEDENT> def iter_variants_by_names(self, names): <NEW_LINE> <INDENT> seeks = self._get_seeks_for_names(names) <NEW_LINE> seeks = [seeks[i::self.cpus] for i in range(self.cpus)] <NEW_LINE> return self._parallel_iter_seeks(seeks) <NEW_LINE> <DEDENT> def _get_all_seeks(self): <NEW_LINE> <INDENT> self._bgen_index.execute("SELECT file_start_position FROM Variant") <NEW_LINE> seeks = [_[0] for _ in self._bgen_index.fetchall()] <NEW_LINE> seeks.sort() <NEW_LINE> self._seeks = tuple(seeks) <NEW_LINE> <DEDENT> def _get_seeks_for_names(self, names): <NEW_LINE> <INDENT> self._bgen_index.execute("CREATE TEMPORARY TABLE tnames (name text)") <NEW_LINE> self._bgen_index.executemany( "INSERT INTO tnames VALUES (?)", [(n, ) for n in names], ) <NEW_LINE> self._bgen_index.execute( "SELECT file_start_position " "FROM Variant " "WHERE rsid IN (SELECT name FROM tnames)", ) <NEW_LINE> return tuple(_[0] for _ in self._bgen_index.fetchall()) <NEW_LINE> <DEDENT> def _spawn_workers(self, seeks, queue): <NEW_LINE> <INDENT> self._workers = [] <NEW_LINE> for i in range(self.cpus): <NEW_LINE> <INDENT> worker = multiprocessing.Process( target=_pybgen_reader, args=(self._bgen.name, self.prob_t, self._return_probs, seeks[i], queue), ) <NEW_LINE> self._workers.append(worker) <NEW_LINE> worker.start() <NEW_LINE> <DEDENT> <DEDENT> def _parallel_iter_seeks(self, seeks): <NEW_LINE> <INDENT> queue = multiprocessing.Queue(self._max_variants) <NEW_LINE> self._spawn_workers(seeks, queue) <NEW_LINE> try: <NEW_LINE> <INDENT> nb_finish = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> if nb_finish >= self.cpus: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> result = queue.get() <NEW_LINE> if result is None: <NEW_LINE> <INDENT> nb_finish += 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> yield result <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> for worker in self._workers: <NEW_LINE> <INDENT> worker.terminate() | Reads BGEN files in parallel.
Args:
fn (str): The name of the BGEN file.
prob_t (float): The probability threshold (optional).
cpus (int): The number of CPUs (default is 2).
probs_only (boolean): Return only the probabilities instead of dosage.
max_variants (int): The maximal number of variants in the Queue
Reads a BGEN file using multiple processes.
.. code-block:: python
from pybgen import ParrallelPyBGEN as PyBGEN
# Reading a BGEN file
with PyBGEN("bgen_file_name") as bgen:
pass | 62598fb0cc0a2c111447b03f |
class GeneralError(ExitCodeError): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise NotImplementedError("Please use the constructor taking an error message describing what is wrong!") <NEW_LINE> <DEDENT> def __init__(self, message, *args): <NEW_LINE> <INDENT> super(CommandLineArgumentError, self).__init__(EXIT_CODE_GENERAL_ERROR, message, *args) | Exception that is raised, if an error occurs that cannot be specified any further.
Attributes:
message (str) : Explanation of the error. | 62598fb0851cf427c66b82e9 |
class Client(tcp.Client): <NEW_LINE> <INDENT> implementsOnly(interfaces.ISSLTransport, *[i for i in implementedBy(tcp.Client) if i != interfaces.ITLSTransport]) <NEW_LINE> def __init__(self, host, port, bindAddress, ctxFactory, connector, reactor=None): <NEW_LINE> <INDENT> self.ctxFactory = ctxFactory <NEW_LINE> tcp.Client.__init__(self, host, port, bindAddress, connector, reactor) <NEW_LINE> <DEDENT> def _connectDone(self): <NEW_LINE> <INDENT> self.startTLS(self.ctxFactory) <NEW_LINE> self.startWriting() <NEW_LINE> tcp.Client._connectDone(self) | I am an SSL client. | 62598fb0bd1bec0571e150d9 |
class ReleaseCustomName(Item): <NEW_LINE> <INDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> if hasattr(self, 'projecttitle') and hasattr(self, 'releasenumber'): <NEW_LINE> <INDENT> if self.projecttitle is None: <NEW_LINE> <INDENT> self.projecttitle = '' <NEW_LINE> <DEDENT> return self.projecttitle + ' - ' + self.releasenumber <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def setTitle(self, value): <NEW_LINE> <INDENT> return | Custom name for a release and linked release from the title and the
release number | 62598fb04e4d562566372454 |
class BatchDescription: <NEW_LINE> <INDENT> def __init__(self, dict): <NEW_LINE> <INDENT> for k, v in dict.items(): <NEW_LINE> <INDENT> setattr(self, "_" + k, v) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, attribute): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def bestBeforeDescription(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._bestBeforeDescription <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def bestBeforeDate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._bestBeforeDate <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def useByDescription(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._useByDescription <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def useByDate(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._useByDate <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def batchCode(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._batchCode <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def lotNumber(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._lotNumber <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def batchTextDescription(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = self._batchTextDescription <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> value = None <NEW_LINE> <DEDENT> return value | Attributes:
_bestBeforeDescription (string, optional): "best before" date range for the batch
_bestBeforeDate (string, optional): "best before" date (or dates) for the batch
_useByDescription (string, optional): "use by" date range for the batch
_useByDate (string, optional): "use by" date (or dates) for the batch
_batchCode (string, optional): batch number or code for the batch
_lotNumber (string, optional): lot number for the batch
_batchTextDescription (string, optional): other textual description for the batch | 62598fb0a05bb46b3848a899 |
class Backend(BaseBackend): <NEW_LINE> <INDENT> can_detect_close = False <NEW_LINE> def __init__(self, push=None, feedback=None, **options): <NEW_LINE> <INDENT> super(Backend, self).__init__(**options) <NEW_LINE> self.push_results = push <NEW_LINE> self.push_result_pos = -1 <NEW_LINE> self.feedback_results = feedback <NEW_LINE> self.new_connections = 0 <NEW_LINE> assert (push is not None) ^ (feedback is not None), "Push results or feedback stream must be provided" <NEW_LINE> <DEDENT> def get_new_connection(self, address, certificate, timeout=None): <NEW_LINE> <INDENT> self.new_connections += 1 <NEW_LINE> self.push_result_pos += 1 <NEW_LINE> return Connection(self, address, certificate) <NEW_LINE> <DEDENT> def get_certificate(self, cert_params): <NEW_LINE> <INDENT> return Certificate(**cert_params) <NEW_LINE> <DEDENT> def create_lock(self): <NEW_LINE> <INDENT> return _threading.Lock() | Dummy backend designed for testing without performing real IO. Serves
as an exmple for your custom backends. | 62598fb0fff4ab517ebcd813 |
class OneMayorCollegeProjectCreated(MayorCollegeCondition): <NEW_LINE> <INDENT> def evaluate(self): <NEW_LINE> <INDENT> mayor_events = self.licence.getAllMayorColleges() <NEW_LINE> if not mayor_events: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for event in mayor_events: <NEW_LINE> <INDENT> if api.content.get_state(event) == 'draft': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | At least one MayorCollege project is created | 62598fb01b99ca400228f547 |
class TestLoginApi(unittest.TestCase): <NEW_LINE> <INDENT> def test_login_api(self): <NEW_LINE> <INDENT> url = "http://sytest.54315.com/login" <NEW_LINE> querystring = {"mobile": "13727086330", "password": "qwe123"} <NEW_LINE> headers = { 'Cache-Control': "no-cache", 'Postman-Token': "167d7ea0-c440-4bfb-9f40-08d7169599e1" } <NEW_LINE> response = requests.request("POST", url, headers=headers, params=querystring).json() <NEW_LINE> print(response) <NEW_LINE> self.assertEqual(response['code'], '100') <NEW_LINE> self.assertEqual(response['message'], "登录成功") | 登陆接口测试 | 62598fb04a966d76dd5eef05 |
class WebsiteMiddleware(PageMiddleware): <NEW_LINE> <INDENT> def process_response_text(self, response): <NEW_LINE> <INDENT> url: str = response.url <NEW_LINE> text: str = response.text <NEW_LINE> text = self.replace_to_real_link(url, text) <NEW_LINE> return text <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def replace_to_real_link(url: str, text: str) -> str: <NEW_LINE> <INDENT> def _replace_relative_link(match): <NEW_LINE> <INDENT> path = match.group(1) <NEW_LINE> if path.endswith('.html'): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> path = urljoin(url, path) <NEW_LINE> return path <NEW_LINE> <DEDENT> text = re.sub(r'(?<=")(\.{2}.+?)(?=")', _replace_relative_link, text) <NEW_LINE> text = re.sub(r'(?<=src=")(.+?)(?=")', _replace_relative_link, text) <NEW_LINE> text = re.sub(r'(?<=url\()(.+?)(?=\))', _replace_relative_link, text) <NEW_LINE> scheme = url[:url.find(':')] <NEW_LINE> text = re.sub(r'(?<=")(//.+?)(?=")', f'{scheme}:\\1', text) <NEW_LINE> return text | 只把非 html 的链接替换了,html 的保持不变 | 62598fb0cb5e8a47e493c190 |
class Opposite(PrimaryVisibility): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> PrimaryVisibility.__init__(self) <NEW_LINE> self._name = "Opposite" <NEW_LINE> self._message = "{0} mesh(es) are set to opposite" <NEW_LINE> self._attribute = ".opposite" <NEW_LINE> self._errorBool = True | Meshes will be checked to see if they are set to opposite. When fixing
this error opposite state will be turned off. | 62598fb001c39578d7f12dad |
class Producer: <NEW_LINE> <INDENT> def producer(self): <NEW_LINE> <INDENT> print("Producer is working hard") <NEW_LINE> <DEDENT> def meet(self): <NEW_LINE> <INDENT> print("producer has time to meet you now") | Define the 'resource-intensive' object to instantiate | 62598fb0aad79263cf42e802 |
class ProductionConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = False | This class sets the PRODUCTION environment variables | 62598fb032920d7e50bc6083 |
class Catalog(BaseMunkiModel): <NEW_LINE> <INDENT> package_names = db.StringListProperty() <NEW_LINE> PLIST_LIB_CLASS = plist_lib.MunkiPlist <NEW_LINE> @classmethod <NEW_LINE> def Generate(cls, name, delay=0): <NEW_LINE> <INDENT> if delay: <NEW_LINE> <INDENT> now = datetime.datetime.utcnow() <NEW_LINE> now_str = '%s-%d' % (now.strftime('%Y-%m-%d-%H-%M-%S'), now.microsecond) <NEW_LINE> deferred_name = 'create-catalog-%s-%s' % (name, now_str) <NEW_LINE> deferred.defer(cls.Generate, name, _name=deferred_name, _countdown=delay) <NEW_LINE> return <NEW_LINE> <DEDENT> lock = 'catalog_lock_%s' % name <NEW_LINE> if not gae_util.ObtainLock(lock): <NEW_LINE> <INDENT> logging.debug('Catalog creation for %s is locked. Delaying....', name) <NEW_LINE> cls.Generate(name, delay=10) <NEW_LINE> return <NEW_LINE> <DEDENT> package_names = [] <NEW_LINE> try: <NEW_LINE> <INDENT> pkgsinfo_dicts = [] <NEW_LINE> package_infos = PackageInfo.all().filter('catalogs =', name).fetch(None) <NEW_LINE> if not package_infos: <NEW_LINE> <INDENT> logging.warning('No PackageInfo entities with catalog: %s', name) <NEW_LINE> <DEDENT> for p in package_infos: <NEW_LINE> <INDENT> package_names.append(p.name) <NEW_LINE> pkgsinfo_dicts.append(p.plist.GetXmlContent(indent_num=1)) <NEW_LINE> <DEDENT> catalog = constants.CATALOG_PLIST_XML % '\n'.join(pkgsinfo_dicts) <NEW_LINE> c = cls.get_or_insert(name) <NEW_LINE> c.package_names = package_names <NEW_LINE> c.name = name <NEW_LINE> c.plist = catalog <NEW_LINE> c.put() <NEW_LINE> cls.DeleteMemcacheWrap(name, prop_name='plist_xml') <NEW_LINE> Manifest.Generate(name, delay=1) <NEW_LINE> <DEDENT> except (db.Error, plist_lib.Error): <NEW_LINE> <INDENT> logging.exception('Catalog.Generate failure for catalog: %s', name) <NEW_LINE> raise <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> gae_util.ReleaseLock(lock) | Munki catalog.
These will be automatically generated on App Engine whenever an admin uploads
a pkginfo file.
Note: There is also an "all" catalog that includes all packages. | 62598fb04428ac0f6e658554 |
class IntegerTypeMixin: <NEW_LINE> <INDENT> def getName(self): <NEW_LINE> <INDENT> return 'i' + str(self.getBits()) <NEW_LINE> <DEDENT> def key(self): <NEW_LINE> <INDENT> return (self.typeId(), self.getBits()) <NEW_LINE> <DEDENT> def isAssignable(self, value): <NEW_LINE> <INDENT> return isinstance(value, (int, float)) | Mixin methods for integer type. | 62598fb04e4d562566372455 |
class DispersingGhost( ghostAgents.GhostAgent ): <NEW_LINE> <INDENT> def __init__( self, index, spreadProb=0.5): <NEW_LINE> <INDENT> self.index = index <NEW_LINE> self.spreadProb = spreadProb <NEW_LINE> <DEDENT> def getDistribution( self, state ): <NEW_LINE> <INDENT> ghostState = state.getGhostState( self.index ) <NEW_LINE> legalActions = state.getLegalActions( self.index ) <NEW_LINE> pos = state.getGhostPosition( self.index ) <NEW_LINE> isScared = ghostState.scaredTimer > 0 <NEW_LINE> speed = 1 <NEW_LINE> if isScared: speed = 0.5 <NEW_LINE> actionVectors = [Actions.directionToVector( a, speed ) for a in legalActions] <NEW_LINE> newPositions = [( pos[0]+a[0], pos[1]+a[1] ) for a in actionVectors] <NEW_LINE> others = [i for i in range(1,state.getNumAgents()) if i != self.index] <NEW_LINE> for a in others: assert state.getGhostState(a) != None, "Ghost position unspecified in state!" <NEW_LINE> otherGhostPositions = [state.getGhostPosition(a) for a in others if state.getGhostPosition(a)[1] > 1] <NEW_LINE> sumOfDistances = [] <NEW_LINE> for pos in newPositions: <NEW_LINE> <INDENT> sumOfDistances.append( sum([(1+manhattanDistance(pos, g))**(-2) for g in otherGhostPositions]) ) <NEW_LINE> <DEDENT> bestDistance = min(sumOfDistances) <NEW_LINE> numBest = [bestDistance == dist for dist in sumOfDistances].count(True) <NEW_LINE> distribution = util.Counter() <NEW_LINE> for action, distance in zip(legalActions, sumOfDistances): <NEW_LINE> <INDENT> if distance == bestDistance: distribution[action] += old_div(self.spreadProb, numBest) <NEW_LINE> distribution[action] += old_div((1 - self.spreadProb), len(legalActions)) <NEW_LINE> <DEDENT> return distribution | Chooses an action that distances the ghost from the other ghosts with probability spreadProb. | 62598fb03539df3088ecc2e1 |
@attributes( [ Attribute(name="index", default_value=lib.CV_CAP_ANY), ], ) <NEW_LINE> class Camera(object): <NEW_LINE> <INDENT> def frames(self): <NEW_LINE> <INDENT> capture = lib.cvCreateCameraCapture(self.index) <NEW_LINE> if capture == ffi.NULL: <NEW_LINE> <INDENT> raise InitializationError(self) <NEW_LINE> <DEDENT> next_frame = partial(lib.cvQueryFrame, capture) <NEW_LINE> for frame in iter(next_frame, None): <NEW_LINE> <INDENT> yield Image(cv_arr=frame) | A camera device. | 62598fb0baa26c4b54d4f2e3 |
class ClassificationProvider: <NEW_LINE> <INDENT> def __init__(self, class_data, folds, batch_size): <NEW_LINE> <INDENT> self.queue = Queue(10) <NEW_LINE> proc = Process( target=_fill_queue, args=(self.queue, class_data, folds, batch_size)) <NEW_LINE> proc.daemon = True <NEW_LINE> proc.start() <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> yield self.queue.get() | Class for generating batches of balanced classification sequences.
| 62598fb001c39578d7f12dae |
class Legend(TypedDict, total=False): <NEW_LINE> <INDENT> mime_type: str <NEW_LINE> href: str <NEW_LINE> max_resolution: float <NEW_LINE> min_resolution: float <NEW_LINE> width: int <NEW_LINE> height: int | Legend fields. | 62598fb05fdd1c0f98e5dfbb |
class TestLookmlModelExploreSupportedMeasureType(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testLookmlModelExploreSupportedMeasureType(self): <NEW_LINE> <INDENT> pass | LookmlModelExploreSupportedMeasureType unit test stubs | 62598fb0d58c6744b42dc2ef |
class TreeAdaptor(object): <NEW_LINE> <INDENT> def createWithPayload(self, payload): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def dupTree(self, tree): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def dupNode(self, treeNode): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def nil(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def isNil(self, tree): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def addChild(self, t, child): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def becomeRoot(self, newRoot, oldRoot): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def rulePostProcessing(self, root): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getUniqueID(self, node): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def createFromToken(self, tokenType, fromToken, text=None): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def createFromType(self, tokenType, text): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getType(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def setType(self, t, type): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getText(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def setText(self, t, text): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getToken(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def setTokenBoundaries(self, t, startToken, stopToken): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getTokenStartIndex(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getTokenStopIndex(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getChild(self, t, i): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def getChildCount(self, t): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def create(self, *args): <NEW_LINE> <INDENT> if len(args) == 1 and isinstance(args[0], Token): <NEW_LINE> <INDENT> warnings.warn( "Using create() is deprecated, use createWithPayload()", DeprecationWarning, stacklevel=2 ) <NEW_LINE> return self.createWithPayload(args[0]) <NEW_LINE> <DEDENT> if (len(args) == 2 and isinstance(args[0], (int, long)) and isinstance(args[1], Token) ): <NEW_LINE> <INDENT> warnings.warn( "Using create() is deprecated, use createFromToken()", DeprecationWarning, stacklevel=2 ) <NEW_LINE> return self.createFromToken(args[0], args[1]) <NEW_LINE> <DEDENT> if (len(args) == 3 and isinstance(args[0], (int, long)) and isinstance(args[1], Token) and isinstance(args[2], basestring) ): <NEW_LINE> <INDENT> warnings.warn( "Using create() is deprecated, use createFromToken()", DeprecationWarning, stacklevel=2 ) <NEW_LINE> return self.createFromToken(args[0], args[1], args[2]) <NEW_LINE> <DEDENT> if (len(args) == 2 and isinstance(args[0], (int, long)) and isinstance(args[1], basestring) ): <NEW_LINE> <INDENT> warnings.warn( "Using create() is deprecated, use createFromType()", DeprecationWarning, stacklevel=2 ) <NEW_LINE> return self.createFromType(args[0], args[1]) <NEW_LINE> <DEDENT> raise TypeError( "No create method with this signature found: %s" % (', '.join(type(v).__name__ for v in args)) ) | @brief Abstract baseclass for tree adaptors.
How to create and navigate trees. Rather than have a separate factory
and adaptor, I've merged them. Makes sense to encapsulate.
This takes the place of the tree construction code generated in the
generated code in 2.x and the ASTFactory.
I do not need to know the type of a tree at all so they are all
generic Objects. This may increase the amount of typecasting needed. :( | 62598fb0090684286d5936f4 |
class MsanV8Builder(V8Builder): <NEW_LINE> <INDENT> def setup_gn_args(self): <NEW_LINE> <INDENT> super(MsanV8Builder, self).setup_gn_args() <NEW_LINE> args_hash = self.deserialize_gn_args(self.gn_args) <NEW_LINE> msan_track_origins_value = (int(args_hash['msan_track_origins']) if 'msan_track_origins' in args_hash else 2) <NEW_LINE> common.execute('gclient', 'runhooks', self.source_directory, env={'GYP_DEFINES': ('msan=1 msan_track_origins=%d ' 'use_prebuilt_instrumented_libraries=1') % msan_track_origins_value}) | Build a MSAN V8 build. | 62598fb063b5f9789fe85197 |
class updateIdcForm(forms.ModelForm): <NEW_LINE> <INDENT> module = models.Idc <NEW_LINE> fields = ('virIP',) | 更新业务验证 | 62598fb08da39b475be03215 |
class FtwDashboardPortletManagerRenderer(DashboardPortletManagerRenderer): <NEW_LINE> <INDENT> adapts( INavigationRoot, IDashboardLayer, IBrowserView, IDashboard) <NEW_LINE> template = ViewPageTemplateFile('templates/dashboard-column.pt') <NEW_LINE> def isEditable(self, portlet): <NEW_LINE> <INDENT> if queryMultiAdapter( (portlet, self.request), name='edit', default=None): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | Render a column of the dashboard
| 62598fb0d486a94d0ba2bffe |
class InvalidBallotException(ValueError): <NEW_LINE> <INDENT> pass | The provided ballot is invalid for this election | 62598fb056ac1b37e630221b |
class ReverseList(): <NEW_LINE> <INDENT> def __init__(self, l=None): <NEW_LINE> <INDENT> self.index = 0 <NEW_LINE> self.l = l <NEW_LINE> if l is not None: <NEW_LINE> <INDENT> self.num_elements = len(l) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.num_elements = 0 <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = '[' <NEW_LINE> for index, val in enumerate(self.l): <NEW_LINE> <INDENT> s += str(val) <NEW_LINE> if index < self.num_elements - 1: <NEW_LINE> <INDENT> s += ',' <NEW_LINE> <DEDENT> <DEDENT> s += ']' <NEW_LINE> return s <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.index >= self.num_elements: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = self.l[self.num_elements - self.index - 1] <NEW_LINE> self.index += 1 <NEW_LINE> return val | Iterator that outputs the list in reverse. | 62598fb05166f23b2e243409 |
class DomainSpecificString( namedtuple("DomainSpecificString", ("localpart", "domain")) ): <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> raise ValueError("Attempted to iterate a %s" % (type(self).__name__,)) <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_string(cls, s): <NEW_LINE> <INDENT> if len(s) < 1 or s[0] != cls.SIGIL: <NEW_LINE> <INDENT> raise SynapseError(400, "Expected %s string to start with '%s'" % ( cls.__name__, cls.SIGIL, )) <NEW_LINE> <DEDENT> parts = s[1:].split(':', 1) <NEW_LINE> if len(parts) != 2: <NEW_LINE> <INDENT> raise SynapseError( 400, "Expected %s of the form '%slocalname:domain'" % ( cls.__name__, cls.SIGIL, ) ) <NEW_LINE> <DEDENT> domain = parts[1] <NEW_LINE> return cls(localpart=parts[0], domain=domain) <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> return "%s%s:%s" % (self.SIGIL, self.localpart, self.domain) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_valid(cls, s): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> cls.from_string(s) <NEW_LINE> return True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> __str__ = to_string <NEW_LINE> @classmethod <NEW_LINE> def create(cls, localpart, domain,): <NEW_LINE> <INDENT> return cls(localpart=localpart, domain=domain) | Common base class among ID/name strings that have a local part and a
domain name, prefixed with a sigil.
Has the fields:
'localpart' : The local part of the name (without the leading sigil)
'domain' : The domain part of the name | 62598fb01f5feb6acb162c4e |
class Lecture(db.Model, ModelMixin): <NEW_LINE> <INDENT> __tablename__ = 'lecture' <NEW_LINE> LECTURE_STATE_VALUES = ('published', 'recording', 'coming', 'deleted') <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(20)) <NEW_LINE> description = db.Column(db.Text) <NEW_LINE> created = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> knowledge_point = db.Column(db.Text) <NEW_LINE> prepare_knowledge = db.Column(db.Text) <NEW_LINE> term = db.Column(db.String(512)) <NEW_LINE> chapter = db.Column(db.String(512)) <NEW_LINE> record_time = db.Column(db.DateTime) <NEW_LINE> record_location = db.Column(db.String(256)) <NEW_LINE> upload_time = db.Column(db.DateTime, default=datetime.utcnow) <NEW_LINE> video_url = db.Column(db.String(150)) <NEW_LINE> video_length = db.Column(db.Integer) <NEW_LINE> logo_url = db.Column(db.String(100)) <NEW_LINE> state = db.Column(db.Enum(*LECTURE_STATE_VALUES), default='published') <NEW_LINE> watch_count = db.Column(db.Integer, default=0) <NEW_LINE> order = db.Column(db.Integer, default=9999) <NEW_LINE> play_count = db.Column(db.Integer, default=0) <NEW_LINE> course_id = db.Column(db.Integer, db.ForeignKey('course.id')) <NEW_LINE> teacher_id = db.Column(db.Integer, db.ForeignKey('teacher.id')) <NEW_LINE> learn_records = db.relationship('LearnRecord', backref=db.backref('lecture'), uselist=True, lazy='dynamic') <NEW_LINE> questions = db.relationship('Question', backref=db.backref('lecture'), uselist=True, lazy='dynamic') <NEW_LINE> answers = db.relationship('Answer', backref=db.backref('lecture'), uselist=True, lazy='dynamic') <NEW_LINE> tags = db.relationship('Tag', secondary=lecture_tags, backref=db.backref('lectures')) <NEW_LINE> def delete(self, commit=True): <NEW_LINE> <INDENT> for q in self.questions: <NEW_LINE> <INDENT> self.questions.remove(q) <NEW_LINE> <DEDENT> for a in self.answers: <NEW_LINE> <INDENT> self.answers.remove(a) <NEW_LINE> <DEDENT> for t in self.tags: <NEW_LINE> <INDENT> self.tags.remove(t) <NEW_LINE> <DEDENT> for lr in self.learn_records: <NEW_LINE> <INDENT> lr.delete() <NEW_LINE> <DEDENT> self.course = None <NEW_LINE> self.teacher = None <NEW_LINE> self.state = 'deleted' <NEW_LINE> db.session.add(self) <NEW_LINE> if commit: <NEW_LINE> <INDENT> db.session.commit() <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Lecture %s>" % self.name | Model of Course | 62598fb056b00c62f0fb28e5 |
class FakeLog(object): <NEW_LINE> <INDENT> def __init__(self, prefix): <NEW_LINE> <INDENT> self.prefix = prefix <NEW_LINE> self.messages = [] <NEW_LINE> <DEDENT> def exception(self): <NEW_LINE> <INDENT> info = sys.exc_info() <NEW_LINE> self.messages.append(info) | Fake logger that allows us to pick the log messages out from
within unit tests. | 62598fb03346ee7daa33765f |
class Widget(CountableWidget): <NEW_LINE> <INDENT> widget_type = 'intselect' <NEW_LINE> widget_label = _('Int Select') <NEW_LINE> groups = ( DefaultSchemata, LayoutSchemata, CountableSchemata, DisplaySchemata ) <NEW_LINE> index = ViewPageTemplateFile('widget.pt') <NEW_LINE> @property <NEW_LINE> def default(self): <NEW_LINE> <INDENT> default = super(Widget, self).default or u'' <NEW_LINE> return default.encode('utf-8') <NEW_LINE> <DEDENT> def query(self, form): <NEW_LINE> <INDENT> query = {} <NEW_LINE> index = self.data.get('index', '') <NEW_LINE> index = index.encode('utf-8', 'replace') <NEW_LINE> if not index: <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> if self.hidden: <NEW_LINE> <INDENT> value = self.default <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = form.get(self.data.getId(), '') <NEW_LINE> <DEDENT> if not value: <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> query[index] = int(value) <NEW_LINE> return query <NEW_LINE> <DEDENT> def vocabulary(self, **kwargs): <NEW_LINE> <INDENT> reverse = safeToInt(self.data.get('sortreversed', 0)) <NEW_LINE> self.data.catalog = 'portal_catalog' <NEW_LINE> values = self.catalog_vocabulary() <NEW_LINE> mapping = {} <NEW_LINE> res = [(val, mapping.get(val, val)) for val in values] <NEW_LINE> res.sort(key=operator.itemgetter(1), cmp=intcompare) <NEW_LINE> if reverse: <NEW_LINE> <INDENT> res.reverse() <NEW_LINE> <DEDENT> return res | Widget
| 62598fb08a43f66fc4bf21ab |
class UserQuestRelationship(models.Model): <NEW_LINE> <INDENT> task = models.ForeignKey(Task) <NEW_LINE> user = models.ForeignKey(User) <NEW_LINE> views = models.ManyToManyField('document.View', help_text='The 10 (5 Quests * 2 Sections) Viewable blocks of text that are part of the Quest') <NEW_LINE> completed = models.BooleanField(default=False, blank=True) <NEW_LINE> score = models.IntegerField(blank=True, default=5) <NEW_LINE> updated = models.DateTimeField(auto_now=True) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> def completed_views(self): <NEW_LINE> <INDENT> return self.views.filter(completed=True) <NEW_LINE> <DEDENT> def completed_document_ids(self): <NEW_LINE> <INDENT> return list(set( self.completed_views().values_list('section__document', flat=True) )) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> get_latest_by = 'updated' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'/task/entity-recognition/quest/{quest_pk}/ {username}'.format( quest_pk=self.task.pk, username=self.user.username) | Describes a User's Status on a specific ER Quest
We use this to track if a User has completed a Quest
* Technically this can be inferred by if they'ved completed all of the
documents within the quest, but was originally made to separate if a
PMID appears in multiple Quests | 62598fb0d7e4931a7ef3c0c4 |
class Motorcycle(Vehicle): <NEW_LINE> <INDENT> base_sale_price = 2000 <NEW_LINE> wheels = 2 <NEW_LINE> def vehicle_type(self): <NEW_LINE> <INDENT> return 'motorcycle' <NEW_LINE> <DEDENT> def is_motorcycle(self): <NEW_LINE> <INDENT> if self.wheels == 2: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | A motorcycle for sale.
| 62598fb07047854f4633f40a |
class ClustersPerLaneHandler(SafeHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> names= self.application.flowcells_db.view("lanes/clusters") <NEW_LINE> start=names.rows[0].key[0][:6] <NEW_LINE> end=names.rows[-1].key[0][:6] <NEW_LINE> start=start[:4]+'01' <NEW_LINE> end=end[:4]+'31' <NEW_LINE> t = self.application.loader.load("clusters_per_lane.html") <NEW_LINE> self.write(t.generate(user = self.get_current_user_name(), start=start, end=end)) | Serves a page with a plot of distribution of lane read production for a provided
time interval. | 62598fb0379a373c97d99044 |
class AuthorViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Author.objects.all() <NEW_LINE> serializer_class = AuthorSerializer <NEW_LINE> ordering_fields = '__all__' <NEW_LINE> ordering = ('-email',) <NEW_LINE> http_method_names = ['get', 'head'] | API endpoint for Author. | 62598fb02ae34c7f260ab112 |
class SleepTotalsChildReport(PermissionRequiredMixin, DetailView): <NEW_LINE> <INDENT> model = Child <NEW_LINE> permission_required = ('core.view_child',) <NEW_LINE> template_name = 'reports/sleep_totals.html' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(SleepTotalsChildReport, self).__init__() <NEW_LINE> self.html = '' <NEW_LINE> self.javascript = '' <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(SleepTotalsChildReport, self).get_context_data( **kwargs) <NEW_LINE> child = context['object'] <NEW_LINE> context['html'], context['javascript'] = sleep_totals(child) <NEW_LINE> return context | Graph of total sleep by day. | 62598fb099cbb53fe6830f09 |
class EntropyEpochMetric(Metric): <NEW_LINE> <INDENT> def __init__(self, output_transform, sizes, iters): <NEW_LINE> <INDENT> self.sizes = sizes.cpu().numpy() <NEW_LINE> self.iters = iters <NEW_LINE> self.num_layers = len(self.sizes) <NEW_LINE> self._layers = np.zeros((self.num_layers, self.iters)) <NEW_LINE> self._num_examples = 0 <NEW_LINE> super().__init__(output_transform) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._layers.fill(0) <NEW_LINE> self._num_examples = 0 <NEW_LINE> <DEDENT> def update(self, entropy): <NEW_LINE> <INDENT> entropy = entropy.mean(dim=1).cpu().numpy() <NEW_LINE> self._layers += entropy <NEW_LINE> self._num_examples += 1.0 <NEW_LINE> <DEDENT> def compute(self): <NEW_LINE> <INDENT> layers = self._layers / self._num_examples <NEW_LINE> weights = self.sizes / sum(self.sizes) <NEW_LINE> average = (layers * weights.reshape(-1, 1)).sum(axis=0) <NEW_LINE> return {"layers": layers, "avg": average} | Entropy metric per epoch
Entropy per layer per routing iter and entropy average correct for capsule size per routing iter. | 62598fb0ff9c53063f51a67d |
class Input(object): <NEW_LINE> <INDENT> def __init__(self, type='text'): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> <DEDENT> def render(self, field, rargs): <NEW_LINE> <INDENT> rargs.setdefault('type', self.type) <NEW_LINE> return Tag('input', name=field.name, value=field.cvalue, **rargs).render() | <input type="<type>"> | 62598fb04428ac0f6e658556 |
class DeltaSizeInfo(BaseSizeInfo): <NEW_LINE> <INDENT> __slots__ = ( 'before', 'after', ) <NEW_LINE> def __init__(self, before, after, section_sizes, raw_symbols): <NEW_LINE> <INDENT> super(DeltaSizeInfo, self).__init__(section_sizes, raw_symbols) <NEW_LINE> self.before = before <NEW_LINE> self.after = after | What you get when you Diff() two SizeInfo objects.
Fields:
before: SizeInfo for "before".
after: SizeInfo for "after". | 62598fb07047854f4633f40b |
class SecretKey(object): <NEW_LINE> <INDENT> def __init__(self, secret=None): <NEW_LINE> <INDENT> if secret is None: <NEW_LINE> <INDENT> self.secret = [[8, 6, 9, 5], [6, 9, 5, 10], [5, 8, 4, 9], [10, 6, 11, 4]] <NEW_LINE> self.secret_inverse = [[-3, 20, -21, 1], [2, -41, 44, 1], [2, -6, 6, -1], [-1, 28, -30, -1]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.secret_inverse = matrix_invert(secret) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("The secret key is not invertible.") <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_secret_key(self): <NEW_LINE> <INDENT> return np.array(self.secret) <NEW_LINE> <DEDENT> def get_secret_inverse(self): <NEW_LINE> <INDENT> return np.array(self.secret_inverse) <NEW_LINE> <DEDENT> def matrix_invert(self, secret=None): <NEW_LINE> <INDENT> return self.secret_inverse | SecretKey for Hill's Cipher encryption is a square matrix.
Decryption requires its inverse.
SecretKey class member vars are secret key square matrix and inverse.
Arbitrary 4x4 invertible matrix provided as default secret.
If a matrix is provided, its inverse is calculated via LU factorization on GPU. | 62598fb05fdd1c0f98e5dfbc |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.