code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class MappingForm(ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Mapping <NEW_LINE> fields = ['ip_or_cname', 'hostname'] <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> invalid_chars = '!@#$%^&*()+=<>,/? :;"\'{}[]|\\`~' <NEW_LINE> field1 = self.cleaned_data.get('ip_or_cname') <NEW_LINE> field2 = self.cleaned_data.get('hostname') <NEW_LINE> if field1 == field2: <NEW_LINE> <INDENT> raise forms.ValidationError('IP/CNAME and Hostname cannot be the same!', code='double') <NEW_LINE> <DEDENT> for character in invalid_chars: <NEW_LINE> <INDENT> if field1 and character in field1: <NEW_LINE> <INDENT> raise forms.ValidationError('Invalid character: "%s" in IP/CNAME' % character) <NEW_LINE> <DEDENT> if field2 and character in field2: <NEW_LINE> <INDENT> raise forms.ValidationError('Invalid character: "%s" in hostname' % character) <NEW_LINE> <DEDENT> <DEDENT> return self.cleaned_data
Creates a modelform from the Mapping model. Specifies which fields to display and has a custom clean method to check for duplicate fields and invalid characters in the entered data
62598fa9a8370b77170f0334
class GenericModel(BaseModel, SourceMixin, ContactDetailMixin, LinkMixin, AssociatedLinkMixin, OtherNameMixin, IdentifierMixin): <NEW_LINE> <INDENT> _type = "generic" <NEW_LINE> _schema = schema <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(GenericModel, self).__init__() <NEW_LINE> self._associated = []
a generic model used for testing the base and mixins
62598fa938b623060ffa8ff1
class some_of(CustomType): <NEW_LINE> <INDENT> def __init__(self, choices, separator=', ', **Config): <NEW_LINE> <INDENT> if not 'type_str' in Config: <NEW_LINE> <INDENT> Config['type_str'] = 'some of: %s' % separator.join(choices) <NEW_LINE> <DEDENT> CustomType.__init__(self, **Config) <NEW_LINE> self.choices = choices <NEW_LINE> self.separator = separator <NEW_LINE> <DEDENT> def __call__(self, val): <NEW_LINE> <INDENT> values = val.split(self.separator) <NEW_LINE> if [value for value in values if value not in self.choices]: <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> return values
Get mutilple items from a list of choices.
62598fa9aad79263cf42e72e
class DeleteRawModifiedDetails(FrozenClass): <NEW_LINE> <INDENT> ua_types = { 'NodeId': 'NodeId', 'IsDeleteModified': 'Boolean', 'StartTime': 'DateTime', 'EndTime': 'DateTime', } <NEW_LINE> def __init__(self, binary=None): <NEW_LINE> <INDENT> if binary is not None: <NEW_LINE> <INDENT> self._binary_init(binary) <NEW_LINE> self._freeze = True <NEW_LINE> return <NEW_LINE> <DEDENT> self.NodeId = NodeId() <NEW_LINE> self.IsDeleteModified = True <NEW_LINE> self.StartTime = datetime.utcnow() <NEW_LINE> self.EndTime = datetime.utcnow() <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> packet.append(self.NodeId.to_binary()) <NEW_LINE> packet.append(uabin.Primitives.Boolean.pack(self.IsDeleteModified)) <NEW_LINE> packet.append(uabin.Primitives.DateTime.pack(self.StartTime)) <NEW_LINE> packet.append(uabin.Primitives.DateTime.pack(self.EndTime)) <NEW_LINE> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> return DeleteRawModifiedDetails(data) <NEW_LINE> <DEDENT> def _binary_init(self, data): <NEW_LINE> <INDENT> self.NodeId = NodeId.from_binary(data) <NEW_LINE> self.IsDeleteModified = uabin.Primitives.Boolean.unpack(data) <NEW_LINE> self.StartTime = uabin.Primitives.DateTime.unpack(data) <NEW_LINE> self.EndTime = uabin.Primitives.DateTime.unpack(data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'DeleteRawModifiedDetails(' + 'NodeId:' + str(self.NodeId) + ', ' + 'IsDeleteModified:' + str(self.IsDeleteModified) + ', ' + 'StartTime:' + str(self.StartTime) + ', ' + 'EndTime:' + str(self.EndTime) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__
:ivar NodeId: :vartype NodeId: NodeId :ivar IsDeleteModified: :vartype IsDeleteModified: Boolean :ivar StartTime: :vartype StartTime: DateTime :ivar EndTime: :vartype EndTime: DateTime
62598fa9a219f33f346c6770
class Command(BaseCommand): <NEW_LINE> <INDENT> description = 'Delete remember tokens older then AUTH_REMEMBER_COOKIE_AGE' <NEW_LINE> def handle(self, *args, **options): <NEW_LINE> <INDENT> RememberToken.objects.clean_remember_tokens()
Delete remember tokens older then AUTH_REMEMBER_COOKIE_AGE
62598fa9eab8aa0e5d30bce4
class Params: <NEW_LINE> <INDENT> fps = 60 <NEW_LINE> num_agents = 200 <NEW_LINE> natural_frequency = 1 <NEW_LINE> num_neighbors = 30 <NEW_LINE> epsilon = 0.02
parameters for simulation
62598fa9bd1bec0571e15070
class LDAPGroupRead(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'str', 'roles': 'list[Role]', 'url': 'str', 'can': 'dict(str, bool)' } <NEW_LINE> attribute_map = { 'name': 'name', 'roles': 'roles', 'url': 'url', 'can': 'can' } <NEW_LINE> def __init__(self, name=None, roles=None, url=None, can=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._roles = None <NEW_LINE> self._url = None <NEW_LINE> self._can = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if roles is not None: <NEW_LINE> <INDENT> self.roles = roles <NEW_LINE> <DEDENT> if url is not None: <NEW_LINE> <INDENT> self.url = url <NEW_LINE> <DEDENT> if can is not None: <NEW_LINE> <INDENT> self.can = can <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def roles(self): <NEW_LINE> <INDENT> return self._roles <NEW_LINE> <DEDENT> @roles.setter <NEW_LINE> def roles(self, roles): <NEW_LINE> <INDENT> self._roles = roles <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self._url <NEW_LINE> <DEDENT> @url.setter <NEW_LINE> def url(self, url): <NEW_LINE> <INDENT> self._url = url <NEW_LINE> <DEDENT> @property <NEW_LINE> def can(self): <NEW_LINE> <INDENT> return self._can <NEW_LINE> <DEDENT> @can.setter <NEW_LINE> def can(self, can): <NEW_LINE> <INDENT> self._can = can <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, LDAPGroupRead): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa9851cf427c66b821f
class CrawlError(IOError): <NEW_LINE> <INDENT> pass
Error which rises when crawling given url failed.
62598fa94428ac0f6e65847d
class Snapshot(Artifact): <NEW_LINE> <INDENT> class __mongometa__: <NEW_LINE> <INDENT> session = artifact_orm_session <NEW_LINE> name = 'artifact_snapshot' <NEW_LINE> unique_indexes = [('artifact_class', 'artifact_id', 'version')] <NEW_LINE> indexes = [ ('artifact_id', 'version'), ('artifact_id', '_id') ] <NEW_LINE> <DEDENT> _id = FieldProperty(S.ObjectId) <NEW_LINE> artifact_id = FieldProperty(S.ObjectId) <NEW_LINE> artifact_class = FieldProperty(str) <NEW_LINE> version = FieldProperty(S.Int, if_missing=0) <NEW_LINE> author = FieldProperty({ 'id': S.ObjectId, 'username': str, 'display_name': str, 'logged_ip': str }) <NEW_LINE> timestamp = FieldProperty(datetime) <NEW_LINE> data = FieldProperty(None) <NEW_LINE> @property <NEW_LINE> def title_s(self): <NEW_LINE> <INDENT> original = self.original() <NEW_LINE> if original: <NEW_LINE> <INDENT> return 'Version {} of {}'.format(self.version, original.title_s) <NEW_LINE> <DEDENT> <DEDENT> def index(self, text_objects=None, **kwargs): <NEW_LINE> <INDENT> if text_objects is None: <NEW_LINE> <INDENT> text_objects = [] <NEW_LINE> <DEDENT> index = {} <NEW_LINE> original = self.original() <NEW_LINE> original_text = [] <NEW_LINE> if original: <NEW_LINE> <INDENT> index = original.index() <NEW_LINE> original_text = index.pop('text') <NEW_LINE> <DEDENT> index.update({ 'id': self.index_id(), 'version_i': self.version, 'author_username_t': self.author.username, 'author_display_name_t': self.author.display_name, 'timestamp_dt': self.timestamp, 'is_history_b': True }) <NEW_LINE> index.update(kwargs) <NEW_LINE> index['text_objects'] = text_objects + [original_text] + [ self.author.username, self.author.display_name, ] <NEW_LINE> return super(Snapshot, self).index(**index) <NEW_LINE> <DEDENT> def is_current(self): <NEW_LINE> <INDENT> return not bool(self.__class__.query.get( version=self.version + 1, artifact_id=self.artifact_id, artifact_class=self.artifact_class )) <NEW_LINE> <DEDENT> def original(self): <NEW_LINE> <INDENT> raise NotImplemented('original') <NEW_LINE> <DEDENT> def get_link_content(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def shorthand_id(self): <NEW_LINE> <INDENT> return '%s#%s' % (self.original().shorthand_id(), self.version) <NEW_LINE> <DEDENT> @property <NEW_LINE> def author_user(self): <NEW_LINE> <INDENT> if self.author: <NEW_LINE> <INDENT> return User.query.get(_id=self.author.id) <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.data, name)
A snapshot of an :class:`Artifact <vulcanforge.artifact.model.Artifact>`, used in :class:`VersionedArtifact <vulcanforge.artifact.model.VersionedArtifact>`
62598fa93317a56b869be4f7
class Image: <NEW_LINE> <INDENT> _last_id = 0 <NEW_LINE> def __init__(self, imgtype, name=None, cnf={}, master=None, **kw): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> if not master: <NEW_LINE> <INDENT> master = _default_root <NEW_LINE> if not master: <NEW_LINE> <INDENT> raise RuntimeError('Too early to create image') <NEW_LINE> <DEDENT> <DEDENT> self.tk = getattr(master, 'tk', master) <NEW_LINE> if not name: <NEW_LINE> <INDENT> Image._last_id += 1 <NEW_LINE> name = "pyimage%r" % (Image._last_id,) <NEW_LINE> if name[0] == '-': name = '_' + name[1:] <NEW_LINE> <DEDENT> if kw and cnf: cnf = _cnfmerge((cnf, kw)) <NEW_LINE> elif kw: cnf = kw <NEW_LINE> options = () <NEW_LINE> for k, v in cnf.items(): <NEW_LINE> <INDENT> if callable(v): <NEW_LINE> <INDENT> v = self._register(v) <NEW_LINE> <DEDENT> options = options + ('-'+k, v) <NEW_LINE> <DEDENT> self.tk.call(('image', 'create', imgtype, name,) + options) <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __str__(self): return self.name <NEW_LINE> def __del__(self): <NEW_LINE> <INDENT> if self.name: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.tk.call('image', 'delete', self.name) <NEW_LINE> <DEDENT> except TclError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.tk.call(self.name, 'configure', '-'+key, value) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.tk.call(self.name, 'configure', '-'+key) <NEW_LINE> <DEDENT> def configure(self, **kw): <NEW_LINE> <INDENT> res = () <NEW_LINE> for k, v in _cnfmerge(kw).items(): <NEW_LINE> <INDENT> if v is not None: <NEW_LINE> <INDENT> if k[-1] == '_': k = k[:-1] <NEW_LINE> if callable(v): <NEW_LINE> <INDENT> v = self._register(v) <NEW_LINE> <DEDENT> res = res + ('-'+k, v) <NEW_LINE> <DEDENT> <DEDENT> self.tk.call((self.name, 'config') + res) <NEW_LINE> <DEDENT> config = configure <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.tk.getint( self.tk.call('image', 'height', self.name)) <NEW_LINE> <DEDENT> def type(self): <NEW_LINE> <INDENT> return self.tk.call('image', 'type', self.name) <NEW_LINE> <DEDENT> def width(self): <NEW_LINE> <INDENT> return self.tk.getint( self.tk.call('image', 'width', self.name))
Base class for images.
62598fa9cc0a2c111447af6a
class TestFiFI(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fake = Faker('fi_FI') <NEW_LINE> Faker.seed(0) <NEW_LINE> self.provider = fi_Provider <NEW_LINE> <DEDENT> def test_century_code(self): <NEW_LINE> <INDENT> assert self.provider._get_century_code(1900) == '-' <NEW_LINE> assert self.provider._get_century_code(1999) == '-' <NEW_LINE> assert self.provider._get_century_code(2000) == 'A' <NEW_LINE> assert self.provider._get_century_code(2999) == 'A' <NEW_LINE> assert self.provider._get_century_code(1800) == '+' <NEW_LINE> assert self.provider._get_century_code(1899) == '+' <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> self.provider._get_century_code(1799) <NEW_LINE> <DEDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> self.provider._get_century_code(3000) <NEW_LINE> <DEDENT> <DEDENT> def test_ssn_sanity(self): <NEW_LINE> <INDENT> for age in range(100): <NEW_LINE> <INDENT> self.fake.ssn(min_age=age, max_age=age + 1) <NEW_LINE> <DEDENT> <DEDENT> def test_valid_ssn(self): <NEW_LINE> <INDENT> ssn = self.fake.ssn(artificial=False) <NEW_LINE> individual_number = int(ssn[7:10]) <NEW_LINE> assert individual_number <= 899 <NEW_LINE> <DEDENT> def test_artifical_ssn(self): <NEW_LINE> <INDENT> ssn = self.fake.ssn(artificial=True) <NEW_LINE> individual_number = int(ssn[7:10]) <NEW_LINE> assert individual_number >= 900 <NEW_LINE> <DEDENT> def test_vat_id(self): <NEW_LINE> <INDENT> for _ in range(100): <NEW_LINE> <INDENT> assert re.search(r'^FI\d{8}$', self.fake.vat_id())
Tests SSN in the fi_FI locale
62598fa93d592f4c4edbae26
class GPVirtualTableDomain(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{BE355340-B5A9-4432-B2F4-497A50E05115}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C031A050-82C6-4F8F-8836-5692631CFFE6}', 10, 2)
Object for filtering out the virtual tables.
62598fa9be383301e0253753
class ResponseWithRetrieveSerializerMixin: <NEW_LINE> <INDENT> def response(self, instance, status, headers=None): <NEW_LINE> <INDENT> retrieve_serializer_class = self.get_serializer_class(action='retrieve') <NEW_LINE> context = self.get_serializer_context() <NEW_LINE> retrieve_serializer = retrieve_serializer_class(instance, context=context) <NEW_LINE> return Response(retrieve_serializer.data, status=status, headers=headers) <NEW_LINE> <DEDENT> def get_serializer_class(self, action=None): <NEW_LINE> <INDENT> if action is None: <NEW_LINE> <INDENT> action = self.action <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.serializer_action_classes[action] <NEW_LINE> <DEDENT> except (KeyError, AttributeError): <NEW_LINE> <INDENT> return super().get_serializer_class()
Always response with 'retrieve' serializer or fallback to `serializer_class`. Usage: class MyViewSet(DefaultModelViewSet): serializer_class = MyDefaultSerializer serializer_action_classes = { 'list': MyListSerializer, 'my_action': MyActionSerializer, } @action def my_action: ... 'my_action' request will be validated with MyActionSerializer, but response will be serialized with MyDefaultSerializer (or 'retrieve' if provided). Thanks gonz: http://stackoverflow.com/a/22922156/11440
62598fa945492302aabfc42b
class OrderInfo(Object): <NEW_LINE> <INDENT> ID = "orderInfo" <NEW_LINE> def __init__(self, name, phone_number, email_address, shipping_address, **kwargs): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.phone_number = phone_number <NEW_LINE> self.email_address = email_address <NEW_LINE> self.shipping_address = shipping_address <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(q: dict, *args) -> "OrderInfo": <NEW_LINE> <INDENT> name = q.get('name') <NEW_LINE> phone_number = q.get('phone_number') <NEW_LINE> email_address = q.get('email_address') <NEW_LINE> shipping_address = Object.read(q.get('shipping_address')) <NEW_LINE> return OrderInfo(name, phone_number, email_address, shipping_address)
Order information Attributes: ID (:obj:`str`): ``OrderInfo`` Args: name (:obj:`str`): Name of the user phone_number (:obj:`str`): Phone number of the user email_address (:obj:`str`): Email address of the user shipping_address (:class:`telegram.api.types.address`): Shipping address for this order; may be null Returns: OrderInfo Raises: :class:`telegram.Error`
62598fa9435de62698e9bd50
class EmailVisionClient(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.client = Client(WSDL_URL) <NEW_LINE> <DEDENT> def create_request(self, mailing_name): <NEW_LINE> <INDENT> req = self.client.factory.create('sendRequest') <NEW_LINE> req.notificationId = TEMPLATES[mailing_name] <NEW_LINE> req.random = RANDOM_TAGS[mailing_name] <NEW_LINE> req.senddate = strftime("%Y-%m-%dT%H:%M:%S") <NEW_LINE> req.synchrotype = 'NOTHING' <NEW_LINE> req.uidkey = 'email' <NEW_LINE> return req <NEW_LINE> <DEDENT> def send(self, request): <NEW_LINE> <INDENT> logging.info("Sending SOAP request to EmailVision...") <NEW_LINE> return self.client.service.sendObject(request)
EmailVision SOAP Client
62598fa9f548e778e596b4fe
class PlainTextParser(ParserBase): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def is_gz_file(filepath): <NEW_LINE> <INDENT> with open(filepath, 'rb') as test_f: <NEW_LINE> <INDENT> return binascii.hexlify(test_f.read(2)) == b'1f8b' <NEW_LINE> <DEDENT> <DEDENT> def parse_file(self, file_entry, is_stream=False): <NEW_LINE> <INDENT> if not is_stream: <NEW_LINE> <INDENT> file_data = GzipFile(filename=file_entry) if self.is_gz_file(file_entry) else open(file_entry, 'rb') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> two_bytes = file_entry.buffer.read(2) <NEW_LINE> two_bytes = two_bytes.encode() if isinstance(two_bytes, str) else two_bytes.read(2) <NEW_LINE> file_entry.seek(0) <NEW_LINE> file_data = GzipFile(fileobj=file_entry) if binascii.hexlify(two_bytes) == b'1f8b' else file_entry.buffer <NEW_LINE> <DEDENT> for raw_line in file_data: <NEW_LINE> <INDENT> line = raw_line if isinstance(raw_line, str) else raw_line.decode() <NEW_LINE> self.check_ips(line) <NEW_LINE> <DEDENT> if 'closed' in dir(file_data) and not file_data.closed: <NEW_LINE> <INDENT> file_data.close()
Class to extract IP addresses from plain text and gzipped plain text files.
62598fa9be8e80087fbbefbd
class APIRouterV21(nova.api.openstack.APIRouterV21): <NEW_LINE> <INDENT> def __init__(self, init_only=None): <NEW_LINE> <INDENT> self._loaded_extension_info = extension_info.LoadedExtensionInfo() <NEW_LINE> super(APIRouterV21, self).__init__(init_only) <NEW_LINE> <DEDENT> def _register_extension(self, ext): <NEW_LINE> <INDENT> return self.loaded_extension_info.register_extension(ext.obj) <NEW_LINE> <DEDENT> @property <NEW_LINE> def loaded_extension_info(self): <NEW_LINE> <INDENT> return self._loaded_extension_info
Routes requests on the OpenStack API to the appropriate controller and method.
62598fa956ac1b37e6302146
class OntologyTerm(object): <NEW_LINE> <INDENT> def __init__(self, term_id, term_name, attrs = {}): <NEW_LINE> <INDENT> self.id = term_id <NEW_LINE> self.name = term_name <NEW_LINE> self.attrs = attrs <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "[Term]\n" <NEW_LINE> s += "id: " + self.id + "\n" <NEW_LINE> s += "name: " + self.name + "\n" <NEW_LINE> for k, v in self.attrs.items(): <NEW_LINE> <INDENT> for vi in v: <NEW_LINE> <INDENT> s+= k + ": " + vi + "\n" <NEW_LINE> <DEDENT> <DEDENT> return s <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "OntologyTerm(id = " + self.id + ", name = " + self.name + ")"
Represents ontology term.
62598fa930bbd72246469925
class CycleCache(object): <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> object.__init__(self) <NEW_LINE> self._list = list() <NEW_LINE> self._cpos = -1 <NEW_LINE> self._size = size <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._list) <NEW_LINE> <DEDENT> def NextIndex(self): <NEW_LINE> <INDENT> idx = self._cpos <NEW_LINE> idx -= 1 <NEW_LINE> if abs(idx) > len(self._list): <NEW_LINE> <INDENT> idx = -1 <NEW_LINE> <DEDENT> return idx <NEW_LINE> <DEDENT> def Clear(self): <NEW_LINE> <INDENT> del self._list <NEW_LINE> self._list = list() <NEW_LINE> <DEDENT> def GetCurrentSize(self): <NEW_LINE> <INDENT> return len(self._list) <NEW_LINE> <DEDENT> def GetNext(self): <NEW_LINE> <INDENT> item = None <NEW_LINE> if len(self._list): <NEW_LINE> <INDENT> item = self._list[self._cpos] <NEW_LINE> self._cpos = self.NextIndex() <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def PeekNext(self): <NEW_LINE> <INDENT> item = None <NEW_LINE> if abs(self._cpos) < len(self._list): <NEW_LINE> <INDENT> item = self._list[self._cpos] <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def PeekPrev(self): <NEW_LINE> <INDENT> idx = self._cpos + 1 <NEW_LINE> if idx == 0: <NEW_LINE> <INDENT> idx = -1 * len(self._list) <NEW_LINE> <DEDENT> llen = len(self._list) <NEW_LINE> if llen and abs(idx) <= llen: <NEW_LINE> <INDENT> item = self._list[idx] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item = None <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def PutItem(self, item): <NEW_LINE> <INDENT> llen = len(self._list) <NEW_LINE> if llen and (llen == self._size): <NEW_LINE> <INDENT> del self._list[0] <NEW_LINE> <DEDENT> self._list.append(item) <NEW_LINE> <DEDENT> def Reset(self): <NEW_LINE> <INDENT> self._cpos = -1
A simple circular cache. All items are added to the end of the cache regardless of the current reference position. As items are accessed from the cache the cache reference pointer is incremeneted, if it passes the end it will go back to the beginning.
62598fa91f037a2d8b9e4047
class dg: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name: str = "DeciGram" <NEW_LINE> self.unit: str = "dg" <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"dg(name={self.name}, unit={self.unit})" <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return f"{self.unit}"
Decigram class
62598fa901c39578d7f12cda
class _TkVar(RO.AddCallback.TkVarMixin): <NEW_LINE> <INDENT> def __init__(self, var=None, callFunc = None, callNow = False, ): <NEW_LINE> <INDENT> if var == None: <NEW_LINE> <INDENT> var = Tkinter.StringVar() <NEW_LINE> <DEDENT> RO.AddCallback.TkVarMixin.__init__(self, var, callFunc, callNow) <NEW_LINE> <DEDENT> def _doCallbacks(self, *args): <NEW_LINE> <INDENT> val = self._var.get() <NEW_LINE> self._basicDoCallbacks(val) <NEW_LINE> <DEDENT> def getVar(self): <NEW_LINE> <INDENT> return self._var <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return self._var.get() <NEW_LINE> <DEDENT> def set(self, val): <NEW_LINE> <INDENT> self._var.set(val)
A container for a Tkinter Variable; basically provides a slightly nicer interface (since they already support callbacks).
62598fa910dbd63aa1c70b0d
class MRWordCount(MRJob): <NEW_LINE> <INDENT> def mapper(self, _, line): <NEW_LINE> <INDENT> for word in WORD_RE.findall(line): <NEW_LINE> <INDENT> yield (jobconf_from_env("mapreduce.map.input.file"), 1) <NEW_LINE> <DEDENT> <DEDENT> def reducer(self, name, counts): <NEW_LINE> <INDENT> yield (name, sum(counts)) <NEW_LINE> <DEDENT> def combiner(self, name, counts): <NEW_LINE> <INDENT> self.increment_counter('count', 'combiners', 1) <NEW_LINE> yield name, sum(counts)
Trivial Job that returns the number of words in each input file
62598fa91b99ca400228f4dd
class PathHash(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._string_hash = StringHash() <NEW_LINE> self._current_path = '' <NEW_LINE> <DEDENT> def set_path(self, path): <NEW_LINE> <INDENT> self._current_path = path <NEW_LINE> <DEDENT> def get_path(self, fn, ln): <NEW_LINE> <INDENT> self._current_path = self.append(fn, ln) <NEW_LINE> return self._current_path <NEW_LINE> <DEDENT> def append(self, fn, ln): <NEW_LINE> <INDENT> if self._current_path: <NEW_LINE> <INDENT> return self._current_path + STRING_SPLIT + self._encode(fn, ln) <NEW_LINE> <DEDENT> return self._encode(fn, ln) <NEW_LINE> <DEDENT> def _encode(self, fn, ln): <NEW_LINE> <INDENT> return str(self._string_hash.hash(fn)) + LINE_SPLIT + str(ln) <NEW_LINE> <DEDENT> def _decode(self, string): <NEW_LINE> <INDENT> hash, ln = string.split(LINE_SPLIT) <NEW_LINE> return self._string_hash.unhash(int(hash)), int(ln) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_indent(path): <NEW_LINE> <INDENT> if path: <NEW_LINE> <INDENT> return len(path.split(STRING_SPLIT)) <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def get_code(self, path): <NEW_LINE> <INDENT> last = path.rpartition(STRING_SPLIT)[-1] <NEW_LINE> return self._string_hash.unhash(int(last.split(LINE_SPLIT, 1)[1])) <NEW_LINE> <DEDENT> def get_last_fn_ln(self, string): <NEW_LINE> <INDENT> last = string.rpartition(STRING_SPLIT)[-1] <NEW_LINE> return self._decode(last) <NEW_LINE> <DEDENT> def get_stacklines_path(self, stack_lines, index): <NEW_LINE> <INDENT> self.set_path('') <NEW_LINE> path = [] <NEW_LINE> while index >= 0: <NEW_LINE> <INDENT> path.append(stack_lines[index].code) <NEW_LINE> current_indent = stack_lines[index].indent <NEW_LINE> while index >= 0 and stack_lines[index].indent != current_indent - 1: <NEW_LINE> <INDENT> index -= 1 <NEW_LINE> <DEDENT> <DEDENT> for code_line in reversed(path): <NEW_LINE> <INDENT> self._current_path = self.append(code_line.filename, self._string_hash.hash(code_line.code)) <NEW_LINE> <DEDENT> return self._current_path
Used for encoding the stacktrace. A stacktrace can be seen by a list of tuples (filename and linenumber): e.g. [(fn1, 25), (fn2, 30)] this is encoded as a string: encoded = 'fn1:25->fn2->30' However, the filename could possibly contain '->', therefore the filename is hashed into a number. So, the encoding becomes: encoded = '0:25->1:30'
62598fa90a50d4780f705338
class WormStatistics(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def initObject(self, exp_hist, ctl_hist, p_t): <NEW_LINE> <INDENT> pass <NEW_LINE> pass
Notes -------------------- Formerly: seg_worm.stats Some of the statistics are aggegrate: - p_value - q_value List of exclusive features: properties #TODO: Move to object that both hist and stats display # #ALSO: We need two, one for experiment and one for controls #Definitions in: seg_worm.stats.hist field name short_name units feature_category hist_type motion_type data_type #New properties #------------------------------------------------------------------- p_normal_experiment p_normal_control q_normal_experiment q_normal_control z_score_experiment #From documentation: #- no controls, this is empty #- absent in controls, but 2+ experiments, Inf #- present in 2+ controls, -Inf #Technically, this is incorrect # z_score_control = 0 #By definition ... p_t #Differential expression ... # - function: mattest (bioinformatics toolbox) # This doesn't seem like it is used ... p_w = NaN #NaN Default value, if all videos have a valid value #then this is not set #NOTE: For the following, the corrections can be per strain or #across strains. I think the current implementation is per strain. #I'd like to go with the values used in the paper ... q_t #In the old code corrections were per strain or across all strains. q_w #In the old code corrections were per strain or across all strains. #Current implementation is per strain, not across strains ... p_significance #pTValue #pWValue #qTValue #qWValue #------------------------------------------------------------------- # z_score #not populated if no controls are provided ... # mean #mean of the mean hist values # std #std of the hist values # n_samples ## of videos where the mean is not NaN # p_normal = NaN #probability of being a normal distribution # # # # seg_worm.fex.swtest(data(i).dataMeans, 0.05, 0) # q_normal #
62598fa93539df3088ecc20e
class Get(JobCommandBase): <NEW_LINE> <INDENT> def latest_build_info(self, input, kwargs): <NEW_LINE> <INDENT> task_subject = "Latest Build Details" <NEW_LINE> task_log = OrderedDict() <NEW_LINE> package = input.get('pkg_downstream_name') or input.get('package') <NEW_LINE> builds = self.api_resources.build_info( hub_url=input.get('hub_url'), tag=input.get('build_tag'), pkg=package ) <NEW_LINE> if len(builds) > 0: <NEW_LINE> <INDENT> task_log.update(self._log_task(input['log_f'], task_subject, str(builds[0]))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> task_log.update(self._log_task( input['log_f'], task_subject, 'No build details found for %s.' % input.get('build_tag') )) <NEW_LINE> <DEDENT> return {'builds': builds}, {task_subject: task_log} <NEW_LINE> <DEDENT> def task_info(self, input, kwargs): <NEW_LINE> <INDENT> task_subject = "Task Details" <NEW_LINE> task_log = OrderedDict() <NEW_LINE> task_info = self.api_resources.task_info( hub_url=input.get('hub_url'), task_id=kwargs.get('task_id') ) <NEW_LINE> task_result = self.api_resources.task_result( hub_url=input.get('hub_url'), task_id=kwargs.get('task_id') ) <NEW_LINE> if kwargs.get('task_id') != task_info.get('id'): <NEW_LINE> <INDENT> task_log.update(self._log_task( input['log_f'], task_subject, 'No task info found for id %s.' % kwargs.get('task_id') )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> task_log.update(self._log_task( input['log_f'], task_subject, str({**task_info, **task_result}) )) <NEW_LINE> <DEDENT> if task_info.get('id'): <NEW_LINE> <INDENT> task_result.update(dict(task_id=task_info['id'])) <NEW_LINE> <DEDENT> return {'task': task_result}, {task_subject: task_log}
Handles all operations for GET Command
62598fa9a8370b77170f0336
class IntervalTimer(Thread): <NEW_LINE> <INDENT> def __init__(self, interval, function, iteration=-1, args=None, kwargs=None): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.interval = interval <NEW_LINE> self.function = function <NEW_LINE> self.iteration = iteration <NEW_LINE> self.args = args if args is not None else [] <NEW_LINE> self.kwargs = kwargs if kwargs is not None else {} <NEW_LINE> self.finished = Event() <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self.finished.set() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> self.finished.wait(self.interval) <NEW_LINE> if self.iteration == 0 or self.finished.is_set(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.function(*self.args, **self.kwargs) <NEW_LINE> if self.iteration > 0: <NEW_LINE> <INDENT> self.iteration -= 1
Call a function after a specified number of seconds: t = Timer(30.0, f, args=None, kwargs=None) t.start() t.cancel() # stop the timer's action if it's still waiting
62598fa999fddb7c1ca62d96
class Timer: <NEW_LINE> <INDENT> def __init__(self, description: str): <NEW_LINE> <INDENT> self.description = description <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.start = time.time() <NEW_LINE> <DEDENT> def __exit__(self, exc_type, value, traceback): <NEW_LINE> <INDENT> self.end = time.time() <NEW_LINE> print(f"{self.description}.\n\tCompleted in {self.end - self.start:.4f} seconds\n")
Context manager to time code section. .. example:: with Timer("Timing code section"): do_stuff()
62598fa99c8ee8231304011e
class ProteinPosition(MapPosition): <NEW_LINE> <INDENT> def __init__(self, ppos, index=None, **kwargs): <NEW_LINE> <INDENT> MapPosition.__init__(self, ppos, index) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, int): <NEW_LINE> <INDENT> return self.pos == other <NEW_LINE> <DEDENT> return isinstance(other, ProteinPosition) and self.pos == other.pos
Protein position for coordinate mapping
62598fa97047854f4633f334
class TestMagratheaCliBase(TestCase): <NEW_LINE> <INDENT> def test_01(self): <NEW_LINE> <INDENT> obj = BaseCommand() <NEW_LINE> self.assertIsInstance(obj, BaseCommand) <NEW_LINE> <DEDENT> def test_02(self): <NEW_LINE> <INDENT> obj = BaseCommand() <NEW_LINE> with self.assertRaises(NotImplementedError): <NEW_LINE> <INDENT> obj.handle()
Unit tests for :py:mod:`magrathea.cli.base`
62598fa930dc7b766599f7a9
class type(object): <NEW_LINE> <INDENT> def mro(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __delattr__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __dir__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __getattribute__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(cls, what, bases=None, dict=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __instancecheck__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(*args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __prepare__(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def __repr__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __setattr__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __sizeof__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __subclasscheck__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __subclasses__(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __abstractmethods__ = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> __bases__ = ( object, ) <NEW_LINE> __base__ = object <NEW_LINE> __basicsize__ = 864 <NEW_LINE> __dictoffset__ = 264 <NEW_LINE> __dict__ = None <NEW_LINE> __flags__ = 2148291584 <NEW_LINE> __itemsize__ = 40 <NEW_LINE> __mro__ = ( None, object, ) <NEW_LINE> __name__ = 'type' <NEW_LINE> __qualname__ = 'type' <NEW_LINE> __text_signature__ = None <NEW_LINE> __weakrefoffset__ = 368
type(object_or_name, bases, dict) type(object) -> the object's type type(name, bases, dict) -> a new type
62598fa92ae34c7f260ab03d
class app: <NEW_LINE> <INDENT> def __init__(self,a,b): <NEW_LINE> <INDENT> self.a=a <NEW_LINE> self.b=b <NEW_LINE> <DEDENT> def fun1(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.a+self.b <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def fun2(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.a*self.b <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None
App calss
62598fa9851cf427c66b8220
class NopClaim(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def disk_gb(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def memory_mb(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def vcpus(self): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> if exc_type is not None: <NEW_LINE> <INDENT> self.abort() <NEW_LINE> <DEDENT> <DEDENT> def abort(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[Claim: %d MB memory, %d GB disk, %d VCPUS]" % (self.memory_mb, self.disk_gb, self.vcpus)
For use with compute drivers that do not support resource tracking
62598fa9435de62698e9bd52
class InstitutionNodeList(JSONAPIBaseView, generics.ListAPIView, InstitutionMixin, NodesFilterMixin): <NEW_LINE> <INDENT> permission_classes = ( drf_permissions.IsAuthenticatedOrReadOnly, base_permissions.TokenHasScope, ) <NEW_LINE> required_read_scopes = [CoreScopes.INSTITUTION_READ, CoreScopes.NODE_BASE_READ] <NEW_LINE> required_write_scopes = [CoreScopes.NULL] <NEW_LINE> model_class = Node <NEW_LINE> serializer_class = NodeSerializer <NEW_LINE> view_category = 'institutions' <NEW_LINE> view_name = 'institution-nodes' <NEW_LINE> ordering = ('-modified', ) <NEW_LINE> def get_default_queryset(self): <NEW_LINE> <INDENT> institution = self.get_institution() <NEW_LINE> return ( institution.nodes.filter(is_public=True, is_deleted=False, type='osf.node') .select_related('node_license', 'preprint_file') .include('contributor__user__guids', 'root__guids', 'tags', limit_includes=10) .annotate(region=F('addons_osfstorage_node_settings__region___id')) ) <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> if self.request.version < '2.2': <NEW_LINE> <INDENT> return self.get_queryset_from_request().get_roots() <NEW_LINE> <DEDENT> return self.get_queryset_from_request()
The documentation for this endpoint can be found [here](https://developer.osf.io/#operation/institutions_node_list).
62598fa9a17c0f6771d5c191
class Distribution(object): <NEW_LINE> <INDENT> r <NEW_LINE> has_rsample = False <NEW_LINE> has_enumerate_support = False <NEW_LINE> def __init__(self, batch_shape=torch.Size(), event_shape=torch.Size()): <NEW_LINE> <INDENT> self._batch_shape = batch_shape <NEW_LINE> self._event_shape = event_shape <NEW_LINE> <DEDENT> def sample(self, sample_shape=torch.Size()): <NEW_LINE> <INDENT> z = self.rsample(sample_shape) <NEW_LINE> return z.detach() if hasattr(z, 'detach') else z <NEW_LINE> <DEDENT> def rsample(self, sample_shape=torch.Size()): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def sample_n(self, n): <NEW_LINE> <INDENT> warnings.warn('sample_n will be deprecated. Use .sample((n,)) instead', UserWarning) <NEW_LINE> return self.sample(torch.Size((n,))) <NEW_LINE> <DEDENT> def log_prob(self, value): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def enumerate_support(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def entropy(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _extended_shape(self, sample_shape=()): <NEW_LINE> <INDENT> shape = sample_shape + self._batch_shape + self._event_shape <NEW_LINE> if not shape: <NEW_LINE> <INDENT> shape = torch.Size((1,)) <NEW_LINE> <DEDENT> return shape <NEW_LINE> <DEDENT> def _validate_log_prob_arg(self, value): <NEW_LINE> <INDENT> if not (torch.is_tensor(value) or isinstance(value, Variable)): <NEW_LINE> <INDENT> raise ValueError('The value argument to log_prob must be a Tensor or Variable instance.') <NEW_LINE> <DEDENT> event_dim_start = len(value.size()) - len(self._event_shape) <NEW_LINE> if value.size()[event_dim_start:] != self._event_shape: <NEW_LINE> <INDENT> raise ValueError('The right-most size of value must match event_shape: {} vs {}.'. format(value.size(), self._event_shape)) <NEW_LINE> <DEDENT> actual_shape = value.size() <NEW_LINE> expected_shape = self._batch_shape + self._event_shape <NEW_LINE> for i, j in zip(reversed(actual_shape), reversed(expected_shape)): <NEW_LINE> <INDENT> if i != 1 and j != 1 and i != j: <NEW_LINE> <INDENT> raise ValueError('Value is not broadcastable with batch_shape+event_shape: {} vs {}.'. format(actual_shape, expected_shape))
Distribution is the abstract base class for probability distributions.
62598fa9be8e80087fbbefbf
class EntityMissing(KeyError, Passthrough): <NEW_LINE> <INDENT> pass
Signal that a required entity does not exist in our schema.
62598fa985dfad0860cbfa22
class Restaurant(models.Model): <NEW_LINE> <INDENT> name = models.CharField(blank = False, max_length = 60, verbose_name = u"名称") <NEW_LINE> phone1 = models.CharField(blank = False, max_length = 60, verbose_name = u"电话1") <NEW_LINE> phone2 = models.CharField(blank = True, null = True, max_length = 60, verbose_name = u"电话2") <NEW_LINE> phone3 = models.CharField(blank = True, null = True, max_length = 60, verbose_name = u"电话3") <NEW_LINE> address = models.CharField(blank = False, max_length = 300, verbose_name = u"联系地址") <NEW_LINE> tips = models.TextField(blank = False, max_length = 1024, verbose_name = u"友情提示") <NEW_LINE> add_timestamp = models.IntegerField(default = (lambda: int(time.time())), editable = False) <NEW_LINE> order_number = models.IntegerField(default = 0, editable = False, verbose_name = u"被订次数") <NEW_LINE> max_person_everyday = models.IntegerField(default = 0, verbose_name = u"最多可订餐人数") <NEW_LINE> last_order_timestamp = models.IntegerField(default = 0, editable = False) <NEW_LINE> order_number_today= models.IntegerField(default = 0, editable = False, verbose_name = u"今日订餐人数") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = "mealing" <NEW_LINE> verbose_name = u"餐厅" <NEW_LINE> verbose_name_plural = u"餐厅" <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def readable_add_timestamp(self): <NEW_LINE> <INDENT> return time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(self.add_timestamp)) <NEW_LINE> <DEDENT> readable_add_timestamp.short_description = u"添加时间" <NEW_LINE> def get_menu_num(self): <NEW_LINE> <INDENT> return Menu.objects.filter(restaurant = self.id).count() <NEW_LINE> <DEDENT> get_menu_num.short_description = u"菜单数" <NEW_LINE> def add_order_number(self, number = 1): <NEW_LINE> <INDENT> last_order_date = datetime.datetime(1, 1, 1).fromtimestamp(self.last_order_timestamp) <NEW_LINE> if is_today(last_order_date): <NEW_LINE> <INDENT> self.order_number += number <NEW_LINE> self.order_number_today += number <NEW_LINE> self.last_order_timestamp = int(time.time()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.order_number += number <NEW_LINE> self.order_number_today = number <NEW_LINE> self.last_order_timestamp = int(time.time()) <NEW_LINE> <DEDENT> <DEDENT> def get_avg_price(self): <NEW_LINE> <INDENT> menus = Menu.objects.filter(restaurant = self) <NEW_LINE> if len(menus) is 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> total_price = 0 <NEW_LINE> for menu in menus: <NEW_LINE> <INDENT> total_price += menu.price <NEW_LINE> <DEDENT> return total_price / len(menus) <NEW_LINE> <DEDENT> get_avg_price.short_description= u"平均价格"
A Restaurant object #create some restaurants >>> r1 = Restaurant(name = u"test1", phone1 = u"12222", address = u"good", tips = u"test") >>> r1.save() >>> print r1 test1 >>> r1.add_timestamp = 0 >>> print r1.readable_add_timestamp() 1970-01-01 08:00:00 >>> print r1.get_menu_num() 0 >>> r1.add_order_number(10) >>> r1.save() >>> print r1.order_number 10 >>> print r1.get_avg_price() 0 >>> menu = Menu.objects.create(name = "menu1", price = 20, restaurant = r1) >>> print r1.get_avg_price() 20
62598fa96e29344779b005b8
class ExtensionsPool(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.extensions = {} <NEW_LINE> <DEDENT> def register_extension(self, ext, show=True): <NEW_LINE> <INDENT> self.extensions[ext.name] = dict(cls=ext, show=show) <NEW_LINE> <DEDENT> def get_extension(self, name): <NEW_LINE> <INDENT> if name not in self.extensions: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if "instance" not in self.extensions[name]: <NEW_LINE> <INDENT> self.extensions[name]["instance"] = self.extensions[name]["cls"]() <NEW_LINE> <DEDENT> return self.extensions[name]["instance"] <NEW_LINE> <DEDENT> def get_extension_infos(self, name): <NEW_LINE> <INDENT> instance = self.get_extension(name) <NEW_LINE> if instance is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return instance.infos() <NEW_LINE> <DEDENT> def load_extension(self, name): <NEW_LINE> <INDENT> __import__(name, locals(), globals(), ["modo_extension"]) <NEW_LINE> extinstance = self.get_extension(name) <NEW_LINE> if extinstance is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> result = None <NEW_LINE> try: <NEW_LINE> <INDENT> baseurl = ( extinstance.url if extinstance.url is not None else name ) <NEW_LINE> result = ( r'^%s/' % (baseurl), include("{0}.urls".format(name), namespace=name) ) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> extinstance.load() <NEW_LINE> return result <NEW_LINE> <DEDENT> def load_all(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for ext in settings.MODOBOA_APPS: <NEW_LINE> <INDENT> ext_urls = self.load_extension(ext) <NEW_LINE> if ext_urls is not None: <NEW_LINE> <INDENT> result += [ext_urls] <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def list_all(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for extname, extdef in self.extensions.iteritems(): <NEW_LINE> <INDENT> if not extdef["show"]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> infos = self.get_extension_infos(extname) <NEW_LINE> infos["id"] = extname <NEW_LINE> result += [infos] <NEW_LINE> <DEDENT> return sorted(result, key=lambda i: i["name"])
The extensions manager
62598fa966673b3332c30327
class Configurator(object): <NEW_LINE> <INDENT> def __init__(self, app): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> <DEDENT> def update_flask_config(self, conf): <NEW_LINE> <INDENT> self.app.config.update(conf) <NEW_LINE> <DEDENT> def register_blueprints(self, blueprints): <NEW_LINE> <INDENT> for name, conf in blueprints.items(): <NEW_LINE> <INDENT> module, _, func = conf.pop('factory').rpartition('.') <NEW_LINE> module = importlib.import_module(module) <NEW_LINE> factory = getattr(module, func) <NEW_LINE> bp = factory(name, conf) <NEW_LINE> self.app.register_blueprint(bp) <NEW_LINE> <DEDENT> <DEDENT> def register_services(self, services): <NEW_LINE> <INDENT> registered_services = self.app.config.get('SERVICES', {}) <NEW_LINE> registered_services.update(services) <NEW_LINE> self.app.config['SERVICES'] = registered_services <NEW_LINE> <DEDENT> def register_healthchecks(self, healthchecks): <NEW_LINE> <INDENT> registered_healthchecks = self.app.config.get('HEALTHCHECKS', {}) <NEW_LINE> registered_healthchecks.update(healthchecks) <NEW_LINE> self.app.config['HEALTHCHECKS'] = registered_healthchecks <NEW_LINE> <DEDENT> def from_yaml(self, yaml_path, silent=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conf = yaml.safe_load(open(yaml_path)) <NEW_LINE> <DEDENT> except IOError as e: <NEW_LINE> <INDENT> if silent: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> e.strerror = 'Unable to load configuration file (%s)' % e.strerror <NEW_LINE> raise <NEW_LINE> <DEDENT> self.update_flask_config(conf.get('flask', {})) <NEW_LINE> self.register_blueprints(conf.get('blueprints', {})) <NEW_LINE> self.register_services(conf.get('services', {})) <NEW_LINE> self.register_healthchecks(conf.get('healthchecks', {})) <NEW_LINE> <DEDENT> def from_envvar(self, envvar, silent=False): <NEW_LINE> <INDENT> rv = os.environ.get(envvar) <NEW_LINE> if not rv: <NEW_LINE> <INDENT> if silent: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> raise RuntimeError('The environment variable %r is not set ' 'and as such configuration could not be ' 'loaded. Set this variable and make it ' 'point to a configuration file' % envvar) <NEW_LINE> <DEDENT> return self.from_yaml(rv, silent=silent)
Provides basic configuration within Moxie. Currently we handle our configuration through `YAML <http://yaml.org>`_ files.
62598fa93539df3088ecc20f
@add_metaclass(ABCMeta) <NEW_LINE> class NetworkResponse(object): <NEW_LINE> <INDENT> @abstractmethod <NEW_LINE> def json(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def content(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def status_code(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def ok(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def headers(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def response_as_stream(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def access_token_used(self): <NEW_LINE> <INDENT> raise NotImplementedError
Abstract base class specifying the interface for a network response.
62598fa9aad79263cf42e731
class TestConf(unittest.TestCase): <NEW_LINE> <INDENT> def test_defaults_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['defaults']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_harmonization_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['harmonization']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_pipeline_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['pipeline']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_runtime_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['runtime']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_startup_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['startup']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_system_syntax(self): <NEW_LINE> <INDENT> with open(CONF_FILES['system']) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent) <NEW_LINE> <DEDENT> def test_BOTS_syntax(self): <NEW_LINE> <INDENT> with open(pkg_resources.resource_filename('intelmq', 'bots/BOTS')) as fhandle: <NEW_LINE> <INDENT> fcontent = fhandle.read() <NEW_LINE> <DEDENT> interpreted = json.loads(fcontent) <NEW_LINE> self.assertEqual(to_json(interpreted), fcontent)
A TestCase for configutation files.
62598fa97d43ff24874273b0
class MusicError(FrontException): <NEW_LINE> <INDENT> pass
Exception raised upon encountering an error in the music domain.
62598fa976e4537e8c3ef50a
class TestV1HTTPHeader(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1HTTPHeader(self): <NEW_LINE> <INDENT> pass
V1HTTPHeader unit test stubs
62598fa94527f215b58e9e3f
class ProxyRateLimitModelBackend(RateLimitModelBackend): <NEW_LINE> <INDENT> def get_ip(self, request): <NEW_LINE> <INDENT> return request.META["HTTP_X_FORWARDED_FOR"]
A rate limiting Backend that works behind a proxy.
62598fa99c8ee8231304011f
class ocni_server(object): <NEW_LINE> <INDENT> operationQuery = url_mapper.rest_controller(QueryDispatcher) <NEW_LINE> operationSingleEntity = url_mapper.rest_controller(SingleEntityDispatcher) <NEW_LINE> operationMultiEntity = url_mapper.rest_controller(MultiEntityDispatcher) <NEW_LINE> app = url_mapper.Router() <NEW_LINE> app.add_route('/-/', controller=operationQuery) <NEW_LINE> app.add_route('/{location}/', controller=operationMultiEntity) <NEW_LINE> app.add_route('/{location}/{idontknow}/', controller=operationMultiEntity) <NEW_LINE> app.add_route('/{location}/{idontknow}/{idontcare}/', controller=operationMultiEntity) <NEW_LINE> app.add_route('/{location}/{idontcare}', controller=operationSingleEntity) <NEW_LINE> app.add_route('/{location}/{idontknow}/{idontcare}', controller=operationSingleEntity) <NEW_LINE> def run_server(self): <NEW_LINE> <INDENT> print ("\n______________________________________________________________________________________\n" "The OCNI server is running at: " + config.OCNI_IP + ":" + config.OCNI_PORT) <NEW_LINE> wsgi.server(eventlet.listen((config.OCNI_IP, int(config.OCNI_PORT))), self.app) <NEW_LINE> print ("\n______________________________________________________________________________________\n" "Closing correctly PyOCNI server ")
The main OCNI REST server
62598fa9925a0f43d25e7f9c
class Triggered(Modifier): <NEW_LINE> <INDENT> points_per_rank_modifier = points.Points_Flat_Modifier(1) <NEW_LINE> modifier_needs_rank = True <NEW_LINE> modifier_name = "Triggered" <NEW_LINE> modifier_list_type = False <NEW_LINE> flat_modifier = True <NEW_LINE> def __init__(self, power, rank, starting_rank=0): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.link_modifier_flat_with_rank(starting_rank, rank, power) <NEW_LINE> <DEDENT> def when_applied(self, power): <NEW_LINE> <INDENT> self.when_applied_stored_in_extras(power) <NEW_LINE> <DEDENT> def when_removed(self, power): <NEW_LINE> <INDENT> self.when_removed_stored_in_extras(power)
You can “set” an instant duration effect with this modifier to activate under particular circumstances, such as in re196 Mutants & Master Mutants & Mastermminds D Deluxe Hero’s Handbook eluxe Hero’s Handbook Chapter 6: Powers Chapter 6: Powers sponse to a particular danger, after a set amount of time, in response to a particular event, and so forth—chosen when you apply the modifier. Once chosen, the trigger cannot be changed. The circumstances must be detectable by your senses. You can acquire Senses Limited and Linked to Triggered effects, if desired. Setting the effect requires the same action as using it normally. A Triggered effect lying in wait may be detected with a Perception check (DC 10 + effect rank) and in some cases disarmed with a successful skill or power check (such as Sleight of Hand, Technology, Nullify or another countering effect) with a DC of (10 + effect rank). A Triggered effect is good for one use per rank in this modifier. After its last activation, it stops working. You can apply an additional rank of Triggered to have a Variable Trigger, allowing you to change the effect’s trigger each time you set it.
62598fa9a219f33f346c6774
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255,unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> is_activate = models.BooleanField(default=True) <NEW_LINE> is_staff= models.BooleanField(default=False) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD ='email' <NEW_LINE> REQUIRED_FIELDS=['name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
Represent a "user profile" inside our system.
62598fa97c178a314d78d3fa
class IterableLengthMismatch(Exception): <NEW_LINE> <INDENT> pass
Custom error for equizip.
62598fa92ae34c7f260ab03f
class CustomerSingleResource(BaseResource): <NEW_LINE> <INDENT> def on_get(self, req, resp, id): <NEW_LINE> <INDENT> dbsession = self.db.session <NEW_LINE> customer = dbsession.query(Customer).filter(Customer.id == id).first() <NEW_LINE> if customer is None: <NEW_LINE> <INDENT> raise falcon.HTTPNotFound() <NEW_LINE> <DEDENT> resp.json = dict( id=customer.id, name=customer.name, dob=customer.dob.strftime('%Y-%m-%d') ) <NEW_LINE> dbsession.close() <NEW_LINE> <DEDENT> @falcon.before(validator, CustomerSchema(strict=True)) <NEW_LINE> def on_put(self, req, resp, id): <NEW_LINE> <INDENT> dbsession = self.db.session <NEW_LINE> customer = dbsession.query(Customer).filter(Customer.id == id).first() <NEW_LINE> if customer is None: <NEW_LINE> <INDENT> raise falcon.HTTPNotFound() <NEW_LINE> <DEDENT> customer.name = req.get_json('name') <NEW_LINE> customer.dob = req.get_json('dob') <NEW_LINE> if len(dbsession.dirty) > 0: <NEW_LINE> <INDENT> dbsession.commit() <NEW_LINE> <DEDENT> resp.json = dict( id=customer.id, name=customer.name, dob=customer.dob.strftime('%Y-%d-%m') ) <NEW_LINE> dbsession.close() <NEW_LINE> <DEDENT> def on_delete(self, req, resq, id): <NEW_LINE> <INDENT> dbsession = self.db.session <NEW_LINE> customer = dbsession.query(Customer).filter(Customer.id == id).first() <NEW_LINE> if customer is None: <NEW_LINE> <INDENT> raise falcon.HTTPNotFound() <NEW_LINE> <DEDENT> dbsession.delete(customer) <NEW_LINE> dbsession.commit() <NEW_LINE> dbsession.close()
CustomerSingleResource class handles the enpoints for single customers
62598fa93617ad0b5ee060b1
class DebtManager(models.Manager): <NEW_LINE> <INDENT> def create_debt(self, new_data, user): <NEW_LINE> <INDENT> data = new_data.cleaned_data <NEW_LINE> debt = self.model( user = data['user'], title = data['title'], amount = data['amount'], date_of_loan = data['date_of_loan'], date_of_repayment = data['date_of_repayment'], description = data['description'], accepted = True ) <NEW_LINE> debt.save() <NEW_LINE> return debt
class DebtManager - interfejs opowiedzialny za dostarczanie zapytań bazy danych do modelu Debts
62598fa9eab8aa0e5d30bce8
class MembraneRoleManager(BasePlugin, Cacheable): <NEW_LINE> <INDENT> meta_type = 'Membrane Role Manager' <NEW_LINE> security = ClassSecurityInfo() <NEW_LINE> implements(IMembraneRoleManagerPlugin) <NEW_LINE> def __init__(self, id, title=None): <NEW_LINE> <INDENT> self._id = self.id = id <NEW_LINE> self.title = title <NEW_LINE> <DEDENT> def getRolesForPrincipal(self, principal, request=None): <NEW_LINE> <INDENT> roles = {} <NEW_LINE> providers = findMembraneUserAspect( self, user_ifaces.IMembraneUserRoles, exact_getUserId=principal.getId()) <NEW_LINE> for provider in providers: <NEW_LINE> <INDENT> roles.update(dict.fromkeys( provider.getRolesForPrincipal(principal))) <NEW_LINE> <DEDENT> return tuple(roles.keys()) <NEW_LINE> <DEDENT> security.declarePrivate('getRolesForPrincipal')
PAS plugin for managing roles with Membrane.
62598fa9bd1bec0571e15072
@python_2_unicode_compatible <NEW_LINE> class LikeOfSong(TimeStampModel): <NEW_LINE> <INDENT> creator = models.ForeignKey(user_models.User, on_delete=models.PROTECT, null=True) <NEW_LINE> song = models.ForeignKey(Song, on_delete=models.PROTECT, null=True, related_name='like') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['-created_at']
LikeOfSong Model
62598fa963b5f9789fe850c3
class ProductInfoViewSet(viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> serializer_class = ProductInfoSerializer <NEW_LINE> filterset_fields = ('shop',) <NEW_LINE> ordering_fields = ('product', 'shop', 'quantity', 'price', 'price_rrc', 'id',) <NEW_LINE> search_fields = ('product__name', 'shop__name',) <NEW_LINE> ordering = ('product',) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> query = Q(shop__state=True) <NEW_LINE> shop_id = self.request.query_params.get('shop_id') <NEW_LINE> category_id = self.request.query_params.get('category_id') <NEW_LINE> if shop_id: <NEW_LINE> <INDENT> query = query & Q(shop_id=shop_id) <NEW_LINE> <DEDENT> if category_id: <NEW_LINE> <INDENT> query = query & Q(product__category_id=category_id) <NEW_LINE> <DEDENT> return ProductInfo.objects.filter( query).select_related( 'shop', 'product__category').prefetch_related( 'product_parameters__parameter').distinct()
Поиск товаров
62598fa9796e427e5384e6f1
class FlatBottomRestraintForce(FlatBottomRestraintForceMixIn, RadiallySymmetricCentroidRestraintForce): <NEW_LINE> <INDENT> pass
A restraint between the centroids of two groups of atoms using a flat potential well with harmonic walls. An alternative choice to receptor-ligand restraints that uses a flat potential inside most of the protein volume with harmonic restraining walls outside of this. It can be used to prevent the ligand from drifting too far from protein in implicit solvent calculations while still exploring the surface of the protein for putative binding sites. The restraint is applied between the centroids of two groups of atoms that belong to the receptor and the ligand respectively. The centroids are determined by a mass-weighted average of the group particles positions. More precisely, the energy expression of the restraint is given by ``E = controlling_parameter * step(r-r0) * (K/2)*(r-r0)^2`` where ``K`` is the spring constant, ``r`` is the distance between the restrained atoms, ``r0`` is another parameter defining the distance at which the restraint is imposed, and ``controlling_parameter`` is a scale factor that can be used to control the strength of the restraint. With OpenCL, only on 64bit platforms are supported. Parameters ---------- spring_constant : openmm.unit.Quantity The spring constant K (see energy expression above) in units compatible with joule/nanometer**2/mole. well_radius : openmm.unit.Quantity The distance r0 (see energy expression above) at which the harmonic restraint is imposed in units of distance. restrained_atom_indices1 : iterable of int The indices of the first group of atoms to restrain. restrained_atom_indices2 : iterable of int The indices of the second group of atoms to restrain. controlling_parameter_name : str, optional The name of the global parameter controlling the energy function. The default value is 'lambda_restraints'. Attributes ---------- spring_constant well_radius restrained_atom_indices1 restrained_atom_indices2 restraint_parameters controlling_parameter_name
62598fa956ac1b37e6302149
class MerchantTransaction(models.Model): <NEW_LINE> <INDENT> TRANSACTION_STATUS_CHOICE = ( ('success', 'Success'), ('failure', 'Failure'), ) <NEW_LINE> merchant_id = models.CharField(max_length = 20, verbose_name="Merchant id") <NEW_LINE> user = models.ForeignKey(User, verbose_name="User") <NEW_LINE> amount = models.DecimalField(max_digits=16, decimal_places=2) <NEW_LINE> gateway = models.CharField(max_length = 10, choices=GATEWAY_CHOICE, verbose_name="Gateway") <NEW_LINE> status = models.CharField(max_length = 10, choices=TRANSACTION_STATUS_CHOICE, verbose_name="Status") <NEW_LINE> message = models.TextField(blank=True, null=True, verbose_name="Message") <NEW_LINE> response = models.TextField(blank=True, null=True, verbose_name="Response") <NEW_LINE> timestamp = models.DateTimeField(auto_now_add=True, editable=False, verbose_name="Timestamp") <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.merchant_id <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Merchant transaction" <NEW_LINE> verbose_name_plural = "Merchant transactions"
merchant_id: Identifier of the Merchant used for this transaction user: User who made this transaction gateway: gateway used status: Status of the transaction message: Message returned by the Merchant response: Complete response of the Merchant
62598fa945492302aabfc42f
class InsuffcientPermissions(Exception): <NEW_LINE> <INDENT> def __init__(self, model, action): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.action = action
Token error for permissions failure.
62598fa9a17c0f6771d5c193
class ClientMemo(object): <NEW_LINE> <INDENT> def __init__(self, id=0, client_id=0, notes=None, date=None, modified_date=None, created_date=None): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.client_id = client_id <NEW_LINE> self.notes = notes <NEW_LINE> self.date = date <NEW_LINE> self.modified_date = dt.strptime( modified_date, DATE_INPUT_FORMAT) if modified_date else dt.now() <NEW_LINE> self.created_date = dt.strptime( created_date, DATE_INPUT_FORMAT) if created_date else dt.now() <NEW_LINE> <DEDENT> def set_notes(self, notes): <NEW_LINE> <INDENT> self.notes = notes <NEW_LINE> <DEDENT> def set_date(self, date): <NEW_LINE> <INDENT> self.date = date <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'client_id': self.client_id, 'notes': self.notes, 'date': self.date.strftime(DATE_ISO_FORMAT), }
Memo Data Shadowing AppletFormMemoEdit/Create Elements
62598fa9009cb60464d0147d
class TestContainerSerialiser(tests.TestCase): <NEW_LINE> <INDENT> def test_construct(self): <NEW_LINE> <INDENT> pack.ContainerSerialiser() <NEW_LINE> <DEDENT> def test_begin(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> self.assertEqual('Bazaar pack format 1 (introduced in 0.18)\n', serialiser.begin()) <NEW_LINE> <DEDENT> def test_end(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> self.assertEqual('E', serialiser.end()) <NEW_LINE> <DEDENT> def test_bytes_record_no_name(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> record = serialiser.bytes_record('bytes', []) <NEW_LINE> self.assertEqual('B5\n\nbytes', record) <NEW_LINE> <DEDENT> def test_bytes_record_one_name_with_one_part(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> record = serialiser.bytes_record('bytes', [('name',)]) <NEW_LINE> self.assertEqual('B5\nname\n\nbytes', record) <NEW_LINE> <DEDENT> def test_bytes_record_one_name_with_two_parts(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> record = serialiser.bytes_record('bytes', [('part1', 'part2')]) <NEW_LINE> self.assertEqual('B5\npart1\x00part2\n\nbytes', record) <NEW_LINE> <DEDENT> def test_bytes_record_two_names(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> record = serialiser.bytes_record('bytes', [('name1',), ('name2',)]) <NEW_LINE> self.assertEqual('B5\nname1\nname2\n\nbytes', record) <NEW_LINE> <DEDENT> def test_bytes_record_whitespace_in_name_part(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> self.assertRaises( errors.InvalidRecordError, serialiser.bytes_record, 'bytes', [('bad name',)]) <NEW_LINE> <DEDENT> def test_bytes_record_header(self): <NEW_LINE> <INDENT> serialiser = pack.ContainerSerialiser() <NEW_LINE> record = serialiser.bytes_header(32, [('name1',), ('name2',)]) <NEW_LINE> self.assertEqual('B32\nname1\nname2\n\n', record)
Tests for the ContainerSerialiser class.
62598fa956ac1b37e630214a
class TextRNN(Classifier): <NEW_LINE> <INDENT> def __init__(self, dataset, config): <NEW_LINE> <INDENT> super(TextRNN, self).__init__(dataset, config) <NEW_LINE> self.doc_embedding_type = config.TextRNN.doc_embedding_type <NEW_LINE> self.rnn = RNN( config.embedding.dimension, config.TextRNN.hidden_dimension, num_layers=config.TextRNN.num_layers, batch_first=True, bidirectional=config.TextRNN.bidirectional, rnn_type=config.TextRNN.rnn_type) <NEW_LINE> hidden_dimension = config.TextRNN.hidden_dimension <NEW_LINE> if config.TextRNN.bidirectional: <NEW_LINE> <INDENT> hidden_dimension *= 2 <NEW_LINE> <DEDENT> self.sum_attention = SumAttention(hidden_dimension, config.TextRNN.attention_dimension, config.device) <NEW_LINE> self.linear = torch.nn.Linear(hidden_dimension, len(dataset.label_map)) <NEW_LINE> self.dropout = torch.nn.Dropout(p=config.train.hidden_layer_dropout) <NEW_LINE> <DEDENT> def get_parameter_optimizer_dict(self): <NEW_LINE> <INDENT> params = super(TextRNN, self).get_parameter_optimizer_dict() <NEW_LINE> params.append({'params': self.rnn.parameters()}) <NEW_LINE> params.append({'params': self.linear.parameters()}) <NEW_LINE> if self.doc_embedding_type == DocEmbeddingType.ATTENTION: <NEW_LINE> <INDENT> params.append({'params': self.sum_attention.parameters()}) <NEW_LINE> <DEDENT> return params <NEW_LINE> <DEDENT> def update_lr(self, optimizer, epoch): <NEW_LINE> <INDENT> if epoch > self.config.train.num_epochs_static_embedding: <NEW_LINE> <INDENT> for param_group in optimizer.param_groups[:2]: <NEW_LINE> <INDENT> param_group["lr"] = self.config.optimizer.learning_rate <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for param_group in optimizer.param_groups[:2]: <NEW_LINE> <INDENT> param_group["lr"] = 0.0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self, batch): <NEW_LINE> <INDENT> if self.config.feature.feature_names[0] == "token": <NEW_LINE> <INDENT> embedding = self.token_embedding( batch[cDataset.DOC_TOKEN].to(self.config.device)) <NEW_LINE> length = batch[cDataset.DOC_TOKEN_LEN].to(self.config.device) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> embedding = self.char_embedding( batch[cDataset.DOC_CHAR].to(self.config.device)) <NEW_LINE> length = batch[cDataset.DOC_CHAR_LEN].to(self.config.device) <NEW_LINE> <DEDENT> output, last_hidden = self.rnn(embedding, length) <NEW_LINE> if self.doc_embedding_type == DocEmbeddingType.AVG: <NEW_LINE> <INDENT> doc_embedding = torch.sum(output, 1) / length.unsqueeze(1) <NEW_LINE> <DEDENT> elif self.doc_embedding_type == DocEmbeddingType.ATTENTION: <NEW_LINE> <INDENT> doc_embedding = self.sum_attention(output) <NEW_LINE> <DEDENT> elif self.doc_embedding_type == DocEmbeddingType.LAST_HIDDEN: <NEW_LINE> <INDENT> doc_embedding = last_hidden <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( "Unsupported rnn init type: %s. Supported rnn type is: %s" % ( self.doc_embedding_type, DocEmbeddingType.str())) <NEW_LINE> <DEDENT> return self.dropout(self.linear(doc_embedding))
Implement TextRNN, contains LSTM,BiLSTM,GRU,BiGRU Reference: "Effective LSTMs for Target-Dependent Sentiment Classification" "Bidirectional LSTM-CRF Models for Sequence Tagging" "Generative and discriminative text classification with recurrent neural networks"
62598fa955399d3f05626482
class _SelectorIOLoopIOHandle(nbio_interface.AbstractIOReference): <NEW_LINE> <INDENT> def __init__(self, subject): <NEW_LINE> <INDENT> self._cancel = subject.cancel <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> return self._cancel()
This module's adaptation of `nbio_interface.AbstractIOReference`
62598fa9d7e4931a7ef3bff9
class VersionCommitError(VersionError): <NEW_LINE> <INDENT> pass
An invalid combination of versions was used in a version commit.
62598fa957b8e32f525080ca
class MitsubaRenderSettingsPanel(bpy.types.Panel): <NEW_LINE> <INDENT> bl_label = "Mitsuba Render settings" <NEW_LINE> bl_idname = "SCENE_PT_layout" <NEW_LINE> bl_space_type = 'PROPERTIES' <NEW_LINE> bl_region_type = 'WINDOW' <NEW_LINE> bl_context = "render" <NEW_LINE> COMPAT_ENGINES = {'Mitsuba2_Renderer'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> engine = context.scene.render.engine <NEW_LINE> if engine != 'Mitsuba2_Renderer': <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def draw(self, context): <NEW_LINE> <INDENT> engine = context.scene.render.engine <NEW_LINE> if engine != 'Mitsuba2_Renderer': <NEW_LINE> <INDENT> bpy.utils.unregister_class(MitsubaRenderSettingsPanel) <NEW_LINE> <DEDENT> layout = self.layout <NEW_LINE> scene = context.scene <NEW_LINE> layout.label(text="Output folder path") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene, "exportpath") <NEW_LINE> layout.label(text="Environment Map") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"environmentmaptpath") <NEW_LINE> layout.label(text="Environment map scale:") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene, "environmentmapscale") <NEW_LINE> layout.label(text="Frame settings:") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene, "batch_frame_start") <NEW_LINE> row.prop(scene, "batch_frame_end") <NEW_LINE> layout.label(text="Resolution:") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene, "resolution_x") <NEW_LINE> row.prop(scene, "resolution_y") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"spp") <NEW_LINE> layout.label(text="Depth of field:") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"dofLookAt") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene, "lensradius") <NEW_LINE> layout.label(text="Integrator settings:") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"integrators") <NEW_LINE> if scene.integrators == 'path': <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> row.prop(scene,"path_integrator_hide_emitters") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"path_integrator_max_depth") <NEW_LINE> row.prop(scene,"path_integrator_rr_depth") <NEW_LINE> <DEDENT> if scene.integrators == 'volpathsimple': <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> row.prop(scene,"path_integrator_max_depth") <NEW_LINE> row.prop(scene,"path_integrator_rr_depth") <NEW_LINE> <DEDENT> if scene.integrators == 'volpath': <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> row.prop(scene,"path_integrator_max_depth") <NEW_LINE> row.prop(scene,"path_integrator_rr_depth") <NEW_LINE> <DEDENT> if scene.integrators == 'direct': <NEW_LINE> <INDENT> row = layout.row() <NEW_LINE> row.prop(scene,"path_integrator_hide_emitters") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"direct_integrator_emitter_samples") <NEW_LINE> row = layout.row() <NEW_LINE> row.prop(scene,"direct_integrator_bsdf_samples") <NEW_LINE> <DEDENT> layout.label(text="Export:") <NEW_LINE> row = layout.row() <NEW_LINE> layout.operator("scene.export", icon='MESH_CUBE', text="Export scene")
Creates a Mitsuba settings panel in the render context of the properties editor
62598fa923849d37ff851013
class listNode: <NEW_LINE> <INDENT> def __init__(self, data, next = None): <NEW_LINE> <INDENT> self.__data = data <NEW_LINE> self.__next = next <NEW_LINE> <DEDENT> def getData(self): <NEW_LINE> <INDENT> return self.__data <NEW_LINE> <DEDENT> def setData(self, data): <NEW_LINE> <INDENT> self.__data = data <NEW_LINE> <DEDENT> def getNext(self): <NEW_LINE> <INDENT> return self.__next <NEW_LINE> <DEDENT> def setNext(self, next): <NEW_LINE> <INDENT> self.__next = next
Node class
62598fa94f88993c371f04b9
@benchmark.Enabled('linux', 'mac', 'win', 'chromeos') <NEW_LINE> class PowerPPSControlDisabled(perf_benchmark.PerfBenchmark): <NEW_LINE> <INDENT> test = power.QuiescentPower <NEW_LINE> page_set = page_sets.PluginPowerSaverPageSet <NEW_LINE> options = {'pageset_repeat': 5} <NEW_LINE> def SetExtraBrowserOptions(self, options): <NEW_LINE> <INDENT> options.AppendExtraBrowserArgs(['--disable-plugin-power-saver']) <NEW_LINE> options.full_performance_mode = False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def Name(cls): <NEW_LINE> <INDENT> return 'power.pps_control_disabled'
A single page with a small-ish non-essential plugin. In this test, Plugin Power Saver (PPS) is disabled, so the plugin should continue animating and taking power.
62598fa9a8370b77170f033a
class IFrontEndFactory(Interface): <NEW_LINE> <INDENT> def __call__(engine=None, history=None): <NEW_LINE> <INDENT> pass
Factory interface for frontends.
62598fa9097d151d1a2c0f88
class ItemBehaviorDefinitionResource(object): <NEW_LINE> <INDENT> swagger_types = { 'behavior': 'Behavior', 'modifiable': 'bool', 'required': 'bool' } <NEW_LINE> attribute_map = { 'behavior': 'behavior', 'modifiable': 'modifiable', 'required': 'required' } <NEW_LINE> def __init__(self, behavior=None, modifiable=None, required=None): <NEW_LINE> <INDENT> self._behavior = None <NEW_LINE> self._modifiable = None <NEW_LINE> self._required = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.behavior = behavior <NEW_LINE> self.modifiable = modifiable <NEW_LINE> self.required = required <NEW_LINE> <DEDENT> @property <NEW_LINE> def behavior(self): <NEW_LINE> <INDENT> return self._behavior <NEW_LINE> <DEDENT> @behavior.setter <NEW_LINE> def behavior(self, behavior): <NEW_LINE> <INDENT> if behavior is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `behavior`, must not be `None`") <NEW_LINE> <DEDENT> self._behavior = behavior <NEW_LINE> <DEDENT> @property <NEW_LINE> def modifiable(self): <NEW_LINE> <INDENT> return self._modifiable <NEW_LINE> <DEDENT> @modifiable.setter <NEW_LINE> def modifiable(self, modifiable): <NEW_LINE> <INDENT> if modifiable is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `modifiable`, must not be `None`") <NEW_LINE> <DEDENT> self._modifiable = modifiable <NEW_LINE> <DEDENT> @property <NEW_LINE> def required(self): <NEW_LINE> <INDENT> return self._required <NEW_LINE> <DEDENT> @required.setter <NEW_LINE> def required(self, required): <NEW_LINE> <INDENT> if required is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `required`, must not be `None`") <NEW_LINE> <DEDENT> self._required = required <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ItemBehaviorDefinitionResource): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa95fc7496912d48232
class GameClass(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pygame.init() <NEW_LINE> self.screen = pygame.display.set_mode(config.screen_res) <NEW_LINE> self.clock = pygame.time.Clock() <NEW_LINE> self.inputHandler = InputHandler() <NEW_LINE> self.allSprites = pygame.sprite.RenderUpdates() <NEW_LINE> self.playerList = [] <NEW_LINE> self.constructLevel(config.level) <NEW_LINE> for i in range(0, config.num_players): <NEW_LINE> <INDENT> newPlayer = Player(i, self.inputHandler, self.screen, self.allSprites) <NEW_LINE> self.playerList.append(newPlayer) <NEW_LINE> self.allSprites.add(newPlayer.rocket) <NEW_LINE> <DEDENT> <DEDENT> def start(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type == pygame.QUIT: <NEW_LINE> <INDENT> exit() <NEW_LINE> <DEDENT> <DEDENT> self.clock.tick() <NEW_LINE> deltaTime = self.clock.get_time() <NEW_LINE> self.screen.fill(config.background_color) <NEW_LINE> self.update(deltaTime) <NEW_LINE> if(len(self.playerList) == 1): <NEW_LINE> <INDENT> print(f"Player {self.playerList[0].id+1} wins!") <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update(self, deltaTime): <NEW_LINE> <INDENT> self.inputHandler.update() <NEW_LINE> self.allSprites.update(deltaTime) <NEW_LINE> self.allSprites.draw(self.screen) <NEW_LINE> for player in self.playerList: <NEW_LINE> <INDENT> player.update() <NEW_LINE> if player.dead: <NEW_LINE> <INDENT> self.playerList.remove(player) <NEW_LINE> <DEDENT> <DEDENT> pygame.display.update() <NEW_LINE> <DEDENT> def constructLevel(self, input): <NEW_LINE> <INDENT> lines = input.split("\n")[1:-1] <NEW_LINE> blockHeight = int(config.screen_res[1] / len(lines)) <NEW_LINE> blockWidth = int(config.screen_res[0] / len(lines[0])) <NEW_LINE> terrainColor = choice(config.terrain_colors) <NEW_LINE> for x in range(0, len(lines[0])): <NEW_LINE> <INDENT> for y in range(0, len(lines)): <NEW_LINE> <INDENT> if lines[y][x] == "@": <NEW_LINE> <INDENT> self.allSprites.add(Terrain((x*blockWidth, y*blockHeight), blockWidth, blockHeight, terrainColor, self.allSprites ) ) <NEW_LINE> <DEDENT> elif lines[y][x] == "#": <NEW_LINE> <INDENT> self.allSprites.add(FuelPlatform((x*blockWidth, y*blockHeight), blockWidth, blockHeight*.5, config.fuelplatform_color, self.allSprites ) )
The game class for the Mayhem game. Handles pygame initialization and the game loop
62598fa97047854f4633f338
class WebTestGeolocation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'location': {'key': 'Id', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, location: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(WebTestGeolocation, self).__init__(**kwargs) <NEW_LINE> self.location = location
Geo-physical location to run a web test from. You must specify one or more locations for the test to run from. :ivar location: Location ID for the webtest to run from. :vartype location: str
62598fa9fff4ab517ebcd744
class TLETest(unittest.TestCase): <NEW_LINE> <INDENT> def check_example(self, tle): <NEW_LINE> <INDENT> self.assertEqual(tle.satnumber, "25544") <NEW_LINE> self.assertEqual(tle.classification, "U") <NEW_LINE> self.assertEqual(tle.id_launch_year, "98") <NEW_LINE> self.assertEqual(tle.id_launch_number, "067") <NEW_LINE> self.assertEqual(tle.id_launch_piece.strip(), "A") <NEW_LINE> self.assertEqual(tle.epoch_year, "08") <NEW_LINE> self.assertEqual(tle.epoch_day, 264.51782528) <NEW_LINE> epoch = (datetime.datetime(2008, 1, 1) + datetime.timedelta(days=264.51782528 - 1)) <NEW_LINE> self.assertEqual(tle.epoch, epoch) <NEW_LINE> self.assertEqual(tle.mean_motion_derivative, -.00002182) <NEW_LINE> self.assertEqual(tle.mean_motion_sec_derivative, 0.0) <NEW_LINE> self.assertEqual(tle.bstar, -.11606e-4) <NEW_LINE> self.assertEqual(tle.ephemeris_type, 0) <NEW_LINE> self.assertEqual(tle.element_number, 292) <NEW_LINE> self.assertEqual(tle.inclination, 51.6416) <NEW_LINE> self.assertEqual(tle.right_ascension, 247.4627) <NEW_LINE> self.assertEqual(tle.excentricity, .0006703) <NEW_LINE> self.assertEqual(tle.arg_perigee, 130.5360) <NEW_LINE> self.assertEqual(tle.mean_anomaly, 325.0288) <NEW_LINE> self.assertEqual(tle.mean_motion, 15.72125391) <NEW_LINE> self.assertEqual(tle.orbit, 56353) <NEW_LINE> <DEDENT> def test_from_line(self): <NEW_LINE> <INDENT> tle = Tle("ISS (ZARYA)", line1=line1, line2=line2) <NEW_LINE> self.check_example(tle) <NEW_LINE> <DEDENT> def test_from_file(self): <NEW_LINE> <INDENT> from tempfile import mkstemp <NEW_LINE> from os import write, close, remove <NEW_LINE> filehandle, filename = mkstemp() <NEW_LINE> try: <NEW_LINE> <INDENT> write(filehandle, "\n".join([line0, line1, line2]).encode('utf-8')) <NEW_LINE> close(filehandle) <NEW_LINE> tle = Tle("ISS (ZARYA)", filename) <NEW_LINE> self.check_example(tle) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> remove(filename)
Test TLE reading. We're using the wikipedia example:: ISS (ZARYA) 1 25544U 98067A 08264.51782528 -.00002182 00000-0 -11606-4 0 2927 2 25544 51.6416 247.4627 0006703 130.5360 325.0288 15.72125391563537
62598fa9aad79263cf42e734
class UserActivityUpdateSerializer(serializers.Serializer): <NEW_LINE> <INDENT> uuid = serializers.UUIDField() <NEW_LINE> name = serializers.CharField(required=False) <NEW_LINE> is_significant_activity = serializers.BooleanField(required=False) <NEW_LINE> is_negative_activity = serializers.BooleanField(required=False) <NEW_LINE> is_all_day_activity = serializers.BooleanField(required=False) <NEW_LINE> def update(self, instance, validated_data): <NEW_LINE> <INDENT> instance.name = validated_data.get('name', instance.name) <NEW_LINE> instance.is_significant_activity = validated_data.get('is_significant_activity', instance.is_significant_activity) <NEW_LINE> instance.is_negative_activity = validated_data.get('is_negative_activity', instance.is_negative_activity) <NEW_LINE> instance.is_all_day_activity = validated_data.get('is_all_day_activity', instance.is_all_day_activity) <NEW_LINE> instance.save() <NEW_LINE> return instance
The create and update serializers "could" be combined, but I rather be explicit separation for now, I can combine them later -- just don't want to build tests that assume they're nested.
62598fa967a9b606de545f2b
class LoadError(Exception): <NEW_LINE> <INDENT> pass
Load game from file exception
62598fa94428ac0f6e658483
class RequestTimeout(BaseError): <NEW_LINE> <INDENT> pass
The data provider is not responding
62598fa97d847024c075c323
class ML_CompoundVectorFormat(ML_VectorFormat, ML_Compound_Format): <NEW_LINE> <INDENT> def __init__(self, c_format_name, opencl_format_name, vector_size, scalar_format, sollya_precision = None, cst_callback = None, display_format="", header=None): <NEW_LINE> <INDENT> ML_VectorFormat.__init__(self, scalar_format, vector_size, c_format_name, header=header) <NEW_LINE> ML_Compound_Format.__init__(self, c_format_name, ["[%d]" % i for i in range(vector_size)], [scalar_format for i in range(vector_size)], "", display_format, sollya_precision, header=header) <NEW_LINE> self.name[OpenCL_Code] = opencl_format_name <NEW_LINE> self.cst_callback = cst_callback <NEW_LINE> <DEDENT> def get_cst_default(self, cst_value, language = C_Code): <NEW_LINE> <INDENT> elt_value_list = [self.scalar_format.get_cst(cst_value[i], language = language) for i in range(self.vector_size)] <NEW_LINE> if language is C_Code: <NEW_LINE> <INDENT> return "{%s}" % (", ".join(elt_value_list)) <NEW_LINE> <DEDENT> elif language is OpenCL_Code: <NEW_LINE> <INDENT> return "(%s)(%s)" % (self.get_name(language = OpenCL_Code), (", ".join(elt_value_list))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Log.report(Log.Error, "unsupported language in ML_CompoundVectorFormat.get_cst: %s" % (language)) <NEW_LINE> <DEDENT> <DEDENT> def get_cst(self, cst_value, language = C_Code): <NEW_LINE> <INDENT> if self.cst_callback is None: <NEW_LINE> <INDENT> return self.get_cst_default(cst_value, language) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.cst_callback(self, cst_value, language)
Generic class for ml_support_lib vector format
62598fa963b5f9789fe850c4
class Modal(lv.mbox): <NEW_LINE> <INDENT> def __init__(self, parent, *args, **kwargs): <NEW_LINE> <INDENT> modal_style = lv.style_t() <NEW_LINE> lv.style_copy(modal_style, lv.style_plain_color) <NEW_LINE> modal_style.body.main_color = modal_style.body.grad_color = lv.color_make(0,0,0) <NEW_LINE> modal_style.body.opa = lv.OPA._50 <NEW_LINE> self.bg = lv.obj(parent) <NEW_LINE> self.bg.set_style(modal_style) <NEW_LINE> self.bg.set_pos(0, 0) <NEW_LINE> self.bg.set_size(parent.get_width(), parent.get_height()) <NEW_LINE> self.bg.set_opa_scale_enable(True) <NEW_LINE> super().__init__(self.bg, *args, **kwargs) <NEW_LINE> self.align(None, lv.ALIGN.CENTER, 0, 0) <NEW_LINE> a = lv.anim_t() <NEW_LINE> lv.anim_init(a) <NEW_LINE> lv.anim_set_time(a, 500, 0) <NEW_LINE> lv.anim_set_values(a, lv.OPA.TRANSP, lv.OPA.COVER) <NEW_LINE> lv.anim_set_exec_cb(a, self.bg, lv.obj.set_opa_scale) <NEW_LINE> lv.anim_create(a) <NEW_LINE> super().set_event_cb(self.default_callback) <NEW_LINE> <DEDENT> def set_event_cb(self, callback): <NEW_LINE> <INDENT> self.callback = callback <NEW_LINE> <DEDENT> def get_event_cb(self): <NEW_LINE> <INDENT> return self.callback <NEW_LINE> <DEDENT> def default_callback(self, obj, evt): <NEW_LINE> <INDENT> if evt == lv.EVENT.DELETE: <NEW_LINE> <INDENT> self.get_parent().del_async() <NEW_LINE> <DEDENT> elif evt == lv.EVENT.VALUE_CHANGED: <NEW_LINE> <INDENT> self.start_auto_close(0) <NEW_LINE> <DEDENT> if self.callback is not None: <NEW_LINE> <INDENT> self.callback(obj, evt)
mbox with semi-transparent background
62598fa9d268445f26639b33
class NetCDFDataProxy(object): <NEW_LINE> <INDENT> __slots__ = ('shape', 'dtype', 'path', 'variable_name', 'fill_value') <NEW_LINE> def __init__(self, shape, dtype, path, variable_name, fill_value): <NEW_LINE> <INDENT> self.shape = shape <NEW_LINE> self.dtype = dtype <NEW_LINE> self.path = path <NEW_LINE> self.variable_name = variable_name <NEW_LINE> self.fill_value = fill_value <NEW_LINE> <DEDENT> @property <NEW_LINE> def ndim(self): <NEW_LINE> <INDENT> return len(self.shape) <NEW_LINE> <DEDENT> def __getitem__(self, keys): <NEW_LINE> <INDENT> dataset = netCDF4.Dataset(self.path) <NEW_LINE> try: <NEW_LINE> <INDENT> variable = dataset.variables[self.variable_name] <NEW_LINE> var = variable[keys] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> dataset.close() <NEW_LINE> <DEDENT> return np.asanyarray(var) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> fmt = '<{self.__class__.__name__} shape={self.shape}' ' dtype={self.dtype!r} path={self.path!r}' ' variable_name={self.variable_name!r}>' <NEW_LINE> return fmt.format(self=self) <NEW_LINE> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return {attr: getattr(self, attr) for attr in self.__slots__} <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> for key, value in six.iteritems(state): <NEW_LINE> <INDENT> setattr(self, key, value)
A reference to the data payload of a single NetCDF file variable.
62598fa945492302aabfc431
class WeightEntrySerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = WeightEntry <NEW_LINE> exclude = ('user',)
Weight serializer
62598fa95fcc89381b2660fc
class DeviceConfigVariable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.base_request = BaseRequest() <NEW_LINE> self.device = Device() <NEW_LINE> self.settings = Settings() <NEW_LINE> <DEDENT> def get_all(self, uuid): <NEW_LINE> <INDENT> device = self.device.get(uuid) <NEW_LINE> params = { 'filter': 'device', 'eq': device['id'] } <NEW_LINE> return self.base_request.request( 'device_config_variable', 'GET', params=params, endpoint=self.settings.get('pine_endpoint') )['d'] <NEW_LINE> <DEDENT> def get_all_by_application(self, app_id): <NEW_LINE> <INDENT> raw_query = '$filter=device/any(d:d/belongs_to__application%20eq%20{app_id})'.format(app_id=app_id) <NEW_LINE> return self.base_request.request( 'device_config_variable', 'GET', raw_query=raw_query, endpoint=self.settings.get('pine_endpoint') )['d'] <NEW_LINE> <DEDENT> def create(self, uuid, config_var_name, value): <NEW_LINE> <INDENT> if not _is_valid_config_var_name(config_var_name): <NEW_LINE> <INDENT> raise exceptions.InvalidParameter('config_var_name', config_var_name) <NEW_LINE> <DEDENT> device = self.device.get(uuid) <NEW_LINE> data = { 'device': device['id'], 'application': device['belongs_to__application']['__id'], 'name': config_var_name, 'value': value } <NEW_LINE> return json.loads(self.base_request.request( 'device_config_variable', 'POST', data=data, endpoint=self.settings.get('pine_endpoint') ).decode('utf-8')) <NEW_LINE> <DEDENT> def update(self, var_id, value): <NEW_LINE> <INDENT> params = { 'filter': 'id', 'eq': var_id } <NEW_LINE> data = { 'value': value } <NEW_LINE> return self.base_request.request( 'device_config_variable', 'PATCH', params=params, data=data, endpoint=self.settings.get('pine_endpoint') ) <NEW_LINE> <DEDENT> def remove(self, var_id): <NEW_LINE> <INDENT> params = { 'filter': 'id', 'eq': var_id } <NEW_LINE> return self.base_request.request( 'device_config_variable', 'DELETE', params=params, endpoint=self.settings.get('pine_endpoint') )
This class implements device config variable model for balena python SDK.
62598fa93539df3088ecc213
class VolumeHelper(AWSHelper): <NEW_LINE> <INDENT> def __init__(self, heet, size, instance, name, device, zone=None, snapshot=None, volume_type=None, iops=None, dry_run=False): <NEW_LINE> <INDENT> self.heet = heet <NEW_LINE> self.size = size <NEW_LINE> self.instance = instance <NEW_LINE> self.name = name <NEW_LINE> self.device = device <NEW_LINE> self.zone = zone <NEW_LINE> self.snapshot = snapshot <NEW_LINE> self.volume_type = volume_type <NEW_LINE> self.iops = iops <NEW_LINE> self.dry_run = dry_run <NEW_LINE> self.conn = boto.ec2.connect_to_region( heet.get_region(), aws_access_key_id=heet.access_key_id, aws_secret_access_key=heet.secret_access_key) <NEW_LINE> heet.add_resource(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Volume %s" % self.name <NEW_LINE> <DEDENT> def get_resource_object(self): <NEW_LINE> <INDENT> for volume in self.conn.get_all_volumes(filters={'tag:'+AWSHeet.TAG:self.name}): <NEW_LINE> <INDENT> return volume <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def wait_until_available(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> volume = self.get_resource_object() <NEW_LINE> if volume.status == 'available': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.heet.logger.info("waiting for %s to be available" % self) <NEW_LINE> time.sleep(3) <NEW_LINE> <DEDENT> <DEDENT> def converge(self): <NEW_LINE> <INDENT> volume = self.get_resource_object() <NEW_LINE> instance = self.instance.get_instance() <NEW_LINE> if not volume: <NEW_LINE> <INDENT> self.heet.logger.info("creating %s" % self) <NEW_LINE> if self.zone is None: <NEW_LINE> <INDENT> self.zone = instance.placement <NEW_LINE> <DEDENT> volume = self.conn.create_volume( self.size, self.zone, snapshot=self.snapshot, volume_type=self.volume_type, iops=self.iops, dry_run=self.dry_run ) <NEW_LINE> self.conn.create_tags(volume.id, {AWSHeet.TAG:self.name}) <NEW_LINE> self.wait_until_available() <NEW_LINE> <DEDENT> if not volume.status == 'in-use': <NEW_LINE> <INDENT> self.heet.logger.info("attaching volume %s to instance %s and device %s" % (volume.id, instance.id, self.device)) <NEW_LINE> self.conn.attach_volume(volume.id, instance.id, self.device) <NEW_LINE> <DEDENT> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> volume = self.get_resource_object() <NEW_LINE> if not volume: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if volume.status == 'in-use': <NEW_LINE> <INDENT> self.heet.logger.info("deattaching volume %s" % volume.id) <NEW_LINE> self.conn.detach_volume(volume.id) <NEW_LINE> <DEDENT> self.wait_until_available() <NEW_LINE> self.heet.logger.info("deleting Volume %s %s" % (self.name, volume.id)) <NEW_LINE> self.conn.delete_volume(volume.id)
modular and convergent EBS volumes. support for standard and io1 types (provisioned iops)
62598fa91f037a2d8b9e404d
class SrqMask(Flag): <NEW_LINE> <INDENT> NONE = 0b0 <NEW_LINE> DATA_READY = 0b1 <NEW_LINE> SYNTAX_ERROR = 0b100 <NEW_LINE> HARDWARE_ERROR = 0b1000 <NEW_LINE> FRONT_PANEL_SRQ = 0b10000 <NEW_LINE> CALIBRATION_FAILURE = 0b100000
The service interrupt register flags. See page 46 of the manual for details.
62598fa97d43ff24874273b2
class Media(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def validate(cls, filename): <NEW_LINE> <INDENT> pass
Represents a HLS media playlist. Attributes: filename (str): Media playlist filename Examples: >>> media_playlist = Media('media.m3u8')
62598fa9090684286d59368c
class BugfixTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_empty_file(self): <NEW_LINE> <INDENT> temp = tempfile.NamedTemporaryFile() <NEW_LINE> temp.flush() <NEW_LINE> self.assertRaises(MalformedFileError, NBTFile, temp.name)
Bugfix regression tests. These tend to not fit into nice categories.
62598fa96aa9bd52df0d4e29
class CamVidDataset(chainer.dataset.DatasetMixin): <NEW_LINE> <INDENT> def __init__(self, data_dir='auto', split='train'): <NEW_LINE> <INDENT> if split not in ['train', 'val', 'test']: <NEW_LINE> <INDENT> raise ValueError( 'Please pick split from \'train\', \'val\', \'test\'') <NEW_LINE> <DEDENT> if data_dir == 'auto': <NEW_LINE> <INDENT> data_dir = get_camvid() <NEW_LINE> <DEDENT> img_list_path = os.path.join(data_dir, '{}.txt'.format(split)) <NEW_LINE> self.paths = [ [os.path.join(data_dir, fn.replace('/SegNet/CamVid/', '')) for fn in line.split()] for line in open(img_list_path)] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.paths) <NEW_LINE> <DEDENT> def get_example(self, i): <NEW_LINE> <INDENT> if i >= len(self): <NEW_LINE> <INDENT> raise IndexError('index is too large') <NEW_LINE> <DEDENT> img_path, label_path = self.paths[i] <NEW_LINE> img = read_image(img_path, color=True) <NEW_LINE> label = read_image(label_path, dtype=np.int32, color=False)[0] <NEW_LINE> label[label == 11] = -1 <NEW_LINE> return img, label
Semantic segmentation dataset for `CamVid`_. .. _`CamVid`: https://github.com/alexgkendall/SegNet-Tutorial/tree/master/CamVid Args: data_dir (string): Path to the root of the training data. If this is :obj:`auto`, this class will automatically download data for you under :obj:`$CHAINER_DATASET_ROOT/pfnet/chainercv/camvid`. split ({'train', 'val', 'test'}): Select from dataset splits used in CamVid Dataset.
62598fa9e5267d203ee6b86b
class CHGate(TwoQubitGate, IFallbackOperation): <NEW_LINE> <INDENT> lowername = "ch" <NEW_LINE> cu_params = (-math.pi / 2.0, math.pi, 0.0, 0.0) <NEW_LINE> @classmethod <NEW_LINE> def create(cls, targets: Targets, params: tuple, options: Optional[dict] = None) -> 'CHGate': <NEW_LINE> <INDENT> if options: <NEW_LINE> <INDENT> raise ValueError(f"{cls.__name__} doesn't take options") <NEW_LINE> <DEDENT> return cls(targets, params) <NEW_LINE> <DEDENT> def dagger(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def fallback(self, _): <NEW_LINE> <INDENT> assert self.cu_params is not None <NEW_LINE> return [CUGate.create(self.targets, self.cu_params)] <NEW_LINE> <DEDENT> def matrix(self): <NEW_LINE> <INDENT> a = 1.0 / math.sqrt(2) <NEW_LINE> return np.array( [[1, 0, 0, 0], [0, a, 0, a], [0, 0, 1, 0], [0, a, 0, -a]], dtype=complex)
Controlled-H gate
62598fa95fc7496912d48233
class ACVStepMode(enum.Enum): <NEW_LINE> <INDENT> Off = "@OFF" <NEW_LINE> Top = "@1" <NEW_LINE> MiddleTop1 = "@2" <NEW_LINE> MiddleTop2 = "@3" <NEW_LINE> MiddleBottom2 = "@4" <NEW_LINE> MiddleBottom1 = "@5" <NEW_LINE> Bottom = "@6" <NEW_LINE> Swing = "@100"
The vertical step mode for an AC/HVAC device. Blades are numbered vertically from 1 (topmost) to 6. All is 100.
62598fa938b623060ffa8ff9
class TestSmartbchSep20Api(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = mainnet.api.smartbch_sep20_api.SmartbchSep20Api() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_all_balances(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_balance(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_deposit_address(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_deposit_qr(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_genesis(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_mint(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_send(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_send_max(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_smart_bch_sep20_token_info(self): <NEW_LINE> <INDENT> pass
SmartbchSep20Api unit test stubs
62598fa999fddb7c1ca62d99
class BaseCLITest(object): <NEW_LINE> <INDENT> @pytest.fixture <NEW_LINE> def mock_client(self, mocker): <NEW_LINE> <INDENT> m_client = mocker.patch('dropme.client.get_client') <NEW_LINE> return m_client.return_value <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def exec_command(command=''): <NEW_LINE> <INDENT> argv = shlex.split(command) <NEW_LINE> if '--debug' not in argv: <NEW_LINE> <INDENT> argv = ['--debug'] + argv <NEW_LINE> <DEDENT> return main_mod(argv=argv)
Base class for testing CLI.
62598fa99c8ee82313040121
class ProfileForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Profile <NEW_LINE> fields = ( 'language', 'languages', 'secondary_languages', ) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ProfileForm, self).__init__(*args, **kwargs) <NEW_LINE> qs = Language.objects.have_translation() <NEW_LINE> self.fields['languages'].queryset = qs <NEW_LINE> self.fields['secondary_languages'].queryset = qs
User profile editing.
62598fa9925a0f43d25e7fa0
class CliCommand(): <NEW_LINE> <INDENT> def __init__(self, subcommand, action, parser, req_method, req_path, success_codes): <NEW_LINE> <INDENT> self.subcommand = subcommand <NEW_LINE> self.action = action <NEW_LINE> self.parser = parser <NEW_LINE> self.req_method = req_method <NEW_LINE> self.req_path = req_path <NEW_LINE> self.success_codes = success_codes <NEW_LINE> self.args = None <NEW_LINE> self.req_payload = None <NEW_LINE> self.req_params = None <NEW_LINE> self.req_headers = None <NEW_LINE> self.response = None <NEW_LINE> self.min_server_version = QPC_MIN_SERVER_VERSION <NEW_LINE> <DEDENT> def _validate_args(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _build_req_params(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _build_data(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _handle_response_error(self): <NEW_LINE> <INDENT> handle_error_response(self.response) <NEW_LINE> sys.exit(1) <NEW_LINE> <DEDENT> def _handle_response_success(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _do_command(self): <NEW_LINE> <INDENT> self._build_req_params() <NEW_LINE> self._build_data() <NEW_LINE> self.response = request(method=self.req_method, path=self.req_path, params=self.req_params, payload=self.req_payload, headers=self.req_headers, parser=self.parser, min_server_version=self.min_server_version) <NEW_LINE> if self.response.status_code not in self.success_codes: <NEW_LINE> <INDENT> self._handle_response_error() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._handle_response_success() <NEW_LINE> <DEDENT> <DEDENT> def main(self, args): <NEW_LINE> <INDENT> self.args = args <NEW_LINE> self._validate_args() <NEW_LINE> log_args(self.args) <NEW_LINE> self._do_command()
Base class for all sub-commands.
62598fa97b25080760ed740e
class PyFunction(PyObject): <NEW_LINE> <INDENT> option_spec = PyObject.option_spec.copy() <NEW_LINE> option_spec.update({ 'async': directives.flag, }) <NEW_LINE> def get_signature_prefix(self, sig): <NEW_LINE> <INDENT> if 'async' in self.options: <NEW_LINE> <INDENT> return 'async ' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> <DEDENT> def needs_arglist(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_index_text(self, modname, name_cls): <NEW_LINE> <INDENT> name, cls = name_cls <NEW_LINE> if modname: <NEW_LINE> <INDENT> return _('%s() (in module %s)') % (name, modname) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _('%s() (built-in function)') % name
Description of a function.
62598fa97d847024c075c325
class MyStaticMplCanvas(MyMplCanvas): <NEW_LINE> <INDENT> def compute_initial_figure(self): <NEW_LINE> <INDENT> t = arange(0.0, 3.0, 0.01) <NEW_LINE> s = sin(2*pi*t) <NEW_LINE> s1 = cos( 2 * pi * t) <NEW_LINE> self.axes.plot(t, s,label='sin',color='r') <NEW_LINE> self.axes.plot( t, s1,label='cos',color='b' ) <NEW_LINE> self.axes.set_title('time demond') <NEW_LINE> self.axes.xaxis.grid(True) <NEW_LINE> self.axes.yaxis.grid( True) <NEW_LINE> self.axes.legend(loc='upper right')
静态画布:一条正弦线
62598fa9796e427e5384e6f5
class Square: <NEW_LINE> <INDENT> def __init__(self, size=0): <NEW_LINE> <INDENT> self.__size = size <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.__size <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, value): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError('size must be an integer') <NEW_LINE> <DEDENT> elif value < 0: <NEW_LINE> <INDENT> raise ValueError('size must be >= 0') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__size = value <NEW_LINE> <DEDENT> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__size ** 2
creates square
62598fa999cbb53fe6830e38
class SensorData: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.logger = logger_variable(__name__, 'SensorData.log') <NEW_LINE> self.serialOpen = serial.Serial('/dev/ttyACM0', 115200) <NEW_LINE> wpi.wiringPiSetupGpio() <NEW_LINE> wpi.pinMode(self.interrupt_pin, 1) <NEW_LINE> wpi.pinMode(26, 1) <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> sensor_data = "" <NEW_LINE> interrupt_pin = 25 <NEW_LINE> ack = False <NEW_LINE> request = "" <NEW_LINE> interrupt = True <NEW_LINE> wpi.digitalWrite(interrupt_pin, interrupt) <NEW_LINE> while request != "SEND REQ": <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> request = self.receive_from_arduino() <NEW_LINE> <DEDENT> except serial.ConnectionError: <NEW_LINE> <INDENT> print("Connection Error") <NEW_LINE> <DEDENT> <DEDENT> self.send_to_arduino("SENSOR DATA") <NEW_LINE> while ack is False: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sensor_data = self.receive_from_arduino() <NEW_LINE> <DEDENT> except ConnectionError: <NEW_LINE> <INDENT> print("Connection Error") <NEW_LINE> <DEDENT> filter_sensor_data = self.filter_data(sensor_data) <NEW_LINE> if filter_sensor_data[0] == '0xAB46CA': <NEW_LINE> <INDENT> filter_sensor_data['status'] = 'OK' <NEW_LINE> ack = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ack = False <NEW_LINE> <DEDENT> self.send_to_arduino(ack) <NEW_LINE> <DEDENT> return filter_sensor_data <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def filter_data(self, data): <NEW_LINE> <INDENT> filtered_data = {} <NEW_LINE> data_split = data.split() <NEW_LINE> data = ['signature', 'temperature', 'humidity', 'waterlevel', 'pH', 'turbidity', 'status', 'timestamp'] <NEW_LINE> filtered_data[data[0]] = data_split[0] <NEW_LINE> filtered_data[data[1]] = data_split[1] <NEW_LINE> filtered_data[data[2]] = data_split[2] <NEW_LINE> filtered_data[data[3]] = data_split[3] <NEW_LINE> filtered_data[data[4]] = data_split[4] <NEW_LINE> filtered_data[data[5]] = data_split[5] <NEW_LINE> filtered_data[data[6]] = "" <NEW_LINE> filtered_data[data[7]] = datetime.datetime.now() <NEW_LINE> return filtered_data <NEW_LINE> <DEDENT> def send_to_arduino(self, message): <NEW_LINE> <INDENT> self.serialOpen.write(message) <NEW_LINE> <DEDENT> def receive_from_arduino(self): <NEW_LINE> <INDENT> return self.serialOpen.readline() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def convert_to_json(self, data): <NEW_LINE> <INDENT> if type(data) == dict: <NEW_LINE> <INDENT> json_data = json.dumps(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Incorrect Data Format") <NEW_LINE> <DEDENT> return json_data
functions: get_data(), filter_data(), convert_to_json() public variables:
62598fa90a50d4780f70533f
class DummyClass(object): <NEW_LINE> <INDENT> def __init__(self, parent, **attrlinks): <NEW_LINE> <INDENT> dataDict = self.__dict__ <NEW_LINE> dataDict['parent'] = parent <NEW_LINE> storage = parent.__dict__['storage'] <NEW_LINE> dataDict['storage'] = storage <NEW_LINE> tag = self.__class__.siblingValues <NEW_LINE> ll = parent.__dict__.get(tag) <NEW_LINE> if ll is None: <NEW_LINE> <INDENT> ll = parent.__dict__[tag] = [] <NEW_LINE> <DEDENT> ll.append(self) <NEW_LINE> for key,value in attrlinks.items(): <NEW_LINE> <INDENT> setattr(self, key, value)
dummy class to store temporarily attributes of classes that are removed (not just changed). NB Assumes that class.storage == parent.storage
62598fa9cc0a2c111447af72
class Repository(Settings): <NEW_LINE> <INDENT> valid_keys = ["use-profile", "directory", "groups", "tag"] + Profile.valid_keys <NEW_LINE> def __init__(self, repo: str): <NEW_LINE> <INDENT> super().__init__(Repository.valid_keys) <NEW_LINE> self.repo = repo <NEW_LINE> <DEDENT> def get_repo(self): <NEW_LINE> <INDENT> return self.repo <NEW_LINE> <DEDENT> def get_local_path(self): <NEW_LINE> <INDENT> directory = self.get_optional_setting("directory", self.get_repo()) <NEW_LINE> directory_parts = directory.split("/") <NEW_LINE> return os.path.join(*directory_parts) <NEW_LINE> <DEDENT> def overlay(self, overlay_repo): <NEW_LINE> <INDENT> logger.debug("Overlaying repository " + self.repo) <NEW_LINE> assert self.repo == overlay_repo.get_repo() <NEW_LINE> super().overlay(overlay_repo) <NEW_LINE> <DEDENT> def todict(self): <NEW_LINE> <INDENT> dct = {"repository": self.repo} <NEW_LINE> dct.update(self.get_settings()) <NEW_LINE> return dct
Stores all settings related to a repository.
62598fa9d268445f26639b34
class V1CrossVersionObjectReference(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'kind': 'str', 'name': 'str', 'api_version': 'str' } <NEW_LINE> self.attribute_map = { 'kind': 'kind', 'name': 'name', 'api_version': 'apiVersion' } <NEW_LINE> self._kind = None <NEW_LINE> self._name = None <NEW_LINE> self._api_version = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def api_version(self): <NEW_LINE> <INDENT> return self._api_version <NEW_LINE> <DEDENT> @api_version.setter <NEW_LINE> def api_version(self, api_version): <NEW_LINE> <INDENT> self._api_version = api_version <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa98a43f66fc4bf20de
class CentralizedSGD(GenericCentralizedOptimizer): <NEW_LINE> <INDENT> def __init__( self, world_size=None, model=None, lr=required, momentum=0, dampening=0, weight_decay=0, nesterov=False, use_cuda=False, by_layer=False, agg_grad=True, ): <NEW_LINE> <INDENT> super().__init__( model=model, world_size=world_size, use_cuda=use_cuda, by_layer=by_layer, agg_grad=agg_grad, ) <NEW_LINE> self.optimizer = SGD( lr=lr, momentum=momentum, dampening=dampening, weight_decay=weight_decay, nesterov=nesterov, params=model.parameters(), )
Implements centralized stochastic gradient descent (optionally with momentum). Averages the reduced parameters over the world size. Args: world_size (int): Size of the network model (:obj:`nn.Module`): Model which contains parameters for SGD lr (float): learning rate momentum (float, optional): momentum factor (default: 0) weight_decay (float, optional): weight decay (L2 penalty) (default: 0) dampening (float, optional): dampening for momentum (default: 0) nesterov (bool, optional): enables Nesterov momentum (default: False) use_cuda (bool): Whether to use cuda tensors for aggregation by_layer (bool): Aggregate by layer instead of all layers at once agg_grad (bool): Aggregate the gradients before updating weights. If `False`, weights will be updated and then reduced across all workers. (default: `True`)
62598fa95fcc89381b2660fd
class EntryState(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @log <NEW_LINE> def evaluate(self, app, kw, context, test_answer): <NEW_LINE> <INDENT> if (not kw) and (not context): <NEW_LINE> <INDENT> logger.debug("not kw and not context", extra={'className': strip(self.__class__)}) <NEW_LINE> app.kw = kw <NEW_LINE> return ReadContextState() <NEW_LINE> <DEDENT> elif (not kw) and context: <NEW_LINE> <INDENT> logger.debug("not kw and context", extra={'className': strip(self.__class__)}) <NEW_LINE> app.new_context = True <NEW_LINE> ctx = utils.find_model_object(context, utils.Context) <NEW_LINE> if not ctx: <NEW_LINE> <INDENT> return CreateContextState() <NEW_LINE> <DEDENT> app.context = ctx <NEW_LINE> return NewKeywordState() <NEW_LINE> <DEDENT> elif kw and (not context): <NEW_LINE> <INDENT> app.kwnotcontext = True <NEW_LINE> logger.debug("kw and not context", extra={'className': strip(self.__class__)}) <NEW_LINE> app.keyword_context = kw.current_context <NEW_LINE> return UpdateKeywordState() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> app.kwandcontext = True <NEW_LINE> logger.debug("kw and context", extra={'className': strip(self.__class__)}) <NEW_LINE> return UpdateKeywordState()
Makes an initial evaluation of keyword & context and decides which is the next state.
62598fa9d7e4931a7ef3bffb
class PluginError(Exception): <NEW_LINE> <INDENT> pass
Raise when a parser is missing
62598fa976e4537e8c3ef50f
class SubnetMapping(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "SubnetId": (str, True), }
`SubnetMapping <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-networkfirewall-firewall-subnetmapping.html>`__
62598fa9167d2b6e312b6ed3
class TabuHillClimber(TabuOptimizer, HillClimber): <NEW_LINE> <INDENT> pass
Applies the tabu proccess in addition to a hill climbing search.
62598fa9a8370b77170f033d
class ServerConfig: <NEW_LINE> <INDENT> config_fp='/home/%s/qiime_web.conf' % getpass.getuser() <NEW_LINE> filefp=open(config_fp).read().split('\n') <NEW_LINE> config_options={} <NEW_LINE> for line in filefp: <NEW_LINE> <INDENT> if line != '': <NEW_LINE> <INDENT> key,val=line.strip().split('=') <NEW_LINE> config_options[key]=val <NEW_LINE> <DEDENT> <DEDENT> data_access_type = eval(config_options['data_access_type']) <NEW_LINE> home = config_options['home']
this is the cluster's server configuration to determine if it is a production or development server
62598fa991f36d47f2230e56