code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Slave: <NEW_LINE> <INDENT> pzd = () <NEW_LINE> pzdHook = None <NEW_LINE> ppo = ('PKW PKW PKW PKW PZD PZD PZD PZD', 'PKW PKW PKW PKW PZD PZD', 'PZD PZD PZD PZD', 'PZD PZD') <NEW_LINE> def __init__(self, slaveno: int) -> None: <NEW_LINE> <INDENT> assert slaveno < (1 << 5), f'Slave number cant be {1 << 5} or greater.' <NEW_LINE> self.slaveno = slaveno <NEW_LINE> <DEDENT> def simpleWrite(self, paramno: int, index: int, value: int) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def simpleRead(self, paramno: int, index: int) -> None: <NEW_LINE> <INDENT> pwes = bytes([0, 0]) <NEW_LINE> pkw = self._createPKW(1, paramno, index, pwes) + pwes <NEW_LINE> assert pzd <NEW_LINE> return self.telegram(pkw + bytes(pzd)) <NEW_LINE> <DEDENT> def telegram(self, netData: Union[bytes, str], *, special: bool = False, mirror: bool = False, broadcast: bool = False) -> None: <NEW_LINE> <INDENT> stx = 0x02 <NEW_LINE> assert len(netData) <= 252, 'Too big payload.' <NEW_LINE> lge = len(netData) + 2 <NEW_LINE> adr = self.slaveno <NEW_LINE> if broadcast: adr |= 1 << 5 <NEW_LINE> if mirror : adr |= 1 << 6 <NEW_LINE> if special : adr |= 1 << 7 <NEW_LINE> bcc = stx ^ lge ^ adr <NEW_LINE> for byte in netData: <NEW_LINE> <INDENT> bcc ^= byte <NEW_LINE> <DEDENT> if type(netData) is str: <NEW_LINE> <INDENT> netData = bytes.fromhex(netData) <NEW_LINE> <DEDENT> return bytes([stx, lge, adr]) + netData + bytes([bcc]) <NEW_LINE> <DEDENT> def _createPKW(self, ak: int, paramno: int, index: int, pwes: bytes, *, sp: bool = False, rw: int = 0, indexHighByte: int = 0) -> bytes: <NEW_LINE> <INDENT> assert ak in TASK_ID, 'Invalid AK' <NEW_LINE> assert paramno.bit_length() <= 11, f'PNU cannot be greater than {1 << 11}' <NEW_LINE> assert index.bit_length() <= 8, 'IND Low byte cannot be more than one byte' <NEW_LINE> assert rw.bit_length() <= 2, 'RW cannot be more than 2 bits' <NEW_LINE> assert indexHighByte.bit_length() <= 6, 'IND High byte cannot be more than 6 bits' <NEW_LINE> pke = ak << 1 <NEW_LINE> if sp: pke |= 1 <NEW_LINE> pke <<= 11 <NEW_LINE> pke |= paramno <NEW_LINE> ind = (indexHighByte << 10) | (rw << 8) | index <NEW_LINE> return bytes([pke & 0xff00 >> 8, pke & 0xff, ind & 0xff00 >> 8, ind & 0xff]) + pwes <NEW_LINE> <DEDENT> def _createPZD(self) -> bytes: <NEW_LINE> <INDENT> if callable(self.pzdHook): <NEW_LINE> <INDENT> self.pzdHook(self) <NEW_LINE> <DEDENT> return bytes(self.pzd)
Create telegram with self.telegram give me netdata netdata = pkw + pzd -> Create pkw/pzd with self._create{PKW, PZD} give me PWEs
62598f9d379a373c97d98de7
class CreateListener(neutronV20.CreateCommand): <NEW_LINE> <INDENT> resource = 'listener' <NEW_LINE> def add_known_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( '--admin-state-down', dest='admin_state', action='store_false', help=_('Set admin state up to false.')) <NEW_LINE> parser.add_argument( '--connection-limit', help=_('The maximum number of connections per second allowed for ' 'the vip. Positive integer or -1 for unlimited (default).')) <NEW_LINE> parser.add_argument( '--description', help=_('Description of the listener.')) <NEW_LINE> parser.add_argument( '--name', help=_('The name of the listener.')) <NEW_LINE> parser.add_argument( '--default-tls-container-ref', dest='default_tls_container_ref', help=_('Default TLS container reference' ' to retrieve TLS information.')) <NEW_LINE> parser.add_argument( '--sni-container-refs', dest='sni_container_refs', nargs='+', help=_('List of TLS container references for SNI.')) <NEW_LINE> parser.add_argument( '--loadbalancer', required=True, metavar='LOADBALANCER', help=_('ID or name of the load balancer.')) <NEW_LINE> parser.add_argument( '--protocol', required=True, choices=['TCP', 'HTTP', 'HTTPS', 'TERMINATED_HTTPS'], help=_('Protocol for the listener.')) <NEW_LINE> parser.add_argument( '--protocol-port', dest='protocol_port', required=True, metavar='PORT', help=_('Protocol port for the listener.')) <NEW_LINE> <DEDENT> def args2body(self, parsed_args): <NEW_LINE> <INDENT> if parsed_args.loadbalancer: <NEW_LINE> <INDENT> parsed_args.loadbalancer = _get_loadbalancer_id( self.get_client(), parsed_args.loadbalancer) <NEW_LINE> <DEDENT> body = { self.resource: { 'loadbalancer_id': parsed_args.loadbalancer, 'protocol': parsed_args.protocol, 'protocol_port': parsed_args.protocol_port, 'admin_state_up': parsed_args.admin_state, }, } <NEW_LINE> neutronV20.update_dict(parsed_args, body[self.resource], ['connection-limit', 'description', 'loadbalancer_id', 'name', 'default_tls_container_ref', 'sni_container_refs', 'tenant_id']) <NEW_LINE> return body
LBaaS v2 Create a listener.
62598f9d3539df3088ecc088
class SubtractSquare(Game): <NEW_LINE> <INDENT> player: str <NEW_LINE> current_state: 'SubtractSquareState' <NEW_LINE> INSTRUCTIONS = 'Players take turns subtracting square numbers from the ' 'starting number. The winner is the person who subtracts ' 'to 0.' <NEW_LINE> def __init__(self, is_p1_turn: bool) -> None: <NEW_LINE> <INDENT> Game.__init__(self, is_p1_turn) <NEW_LINE> initial_num = input("Enter a non-negative whole number to subtract " "from: ") <NEW_LINE> while (not initial_num.isdigit()) or int(initial_num) < 0: <NEW_LINE> <INDENT> initial_num = input("Enter a non-negative whole number to subtract " "from: ") <NEW_LINE> <DEDENT> self.current_state = SubtractSquareState(self.player, int(initial_num)) <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return 'The game is Subtract Square.' <NEW_LINE> <DEDENT> def get_instructions(self) -> str: <NEW_LINE> <INDENT> return SubtractSquare.INSTRUCTIONS <NEW_LINE> <DEDENT> def str_to_move(self, move: Union[str, int]) -> Optional[int]: <NEW_LINE> <INDENT> if type(move) is int: <NEW_LINE> <INDENT> return move <NEW_LINE> <DEDENT> elif move.isdigit(): <NEW_LINE> <INDENT> return int(move) <NEW_LINE> <DEDENT> return None
The Subtract Square game. player - the starting player of the game current_state - the current state of the game
62598f9d1f5feb6acb1629f4
class Robot(object): <NEW_LINE> <INDENT> def __init__(self, room, speed, capacity): <NEW_LINE> <INDENT> self.room = room <NEW_LINE> self.speed = speed <NEW_LINE> self.capacity = capacity <NEW_LINE> self.position = room.get_random_position() <NEW_LINE> self.direction = random.uniform(0, 360) <NEW_LINE> <DEDENT> def get_robot_position(self): <NEW_LINE> <INDENT> return self.position <NEW_LINE> <DEDENT> def get_robot_direction(self): <NEW_LINE> <INDENT> return self.direction <NEW_LINE> <DEDENT> def set_robot_position(self, position): <NEW_LINE> <INDENT> if self.room.is_position_valid(position): <NEW_LINE> <INDENT> self.position = position <NEW_LINE> <DEDENT> <DEDENT> def set_robot_direction(self, direction): <NEW_LINE> <INDENT> self.direction = direction <NEW_LINE> <DEDENT> def update_position_and_clean(self): <NEW_LINE> <INDENT> raise NotImplementedError
Represents a robot cleaning a particular room. At all times, the robot has a particular position and direction in the room. The robot also has a fixed speed and a fixed cleaning capacity. Subclasses of Robot should provide movement strategies by implementing update_position_and_clean, which simulates a single time-step.
62598f9de64d504609df92a1
class Variant(Base): <NEW_LINE> <INDENT> __tablename__ = 'hutt' <NEW_LINE> record_id = Column(BigInteger, primary_key=True, unique=True) <NEW_LINE> maf_imputed = Column(Float) <NEW_LINE> is_qc = Column(Boolean) <NEW_LINE> var_region = Column(String(255)) <NEW_LINE> var_mutation = Column(String(255)) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "Gene(id=%d, '%s')" % (self.id, self.name2,)
Variant annotation. See http://workshops.arl.arizona.edu/sql1/sql_workshop/mysql/ucscdatabase.html#querying-the-refgene-table
62598f9d498bea3a75a578f3
class memoize(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> update_wrapper(self, func) <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> return partial(self, obj) <NEW_LINE> <DEDENT> def __call__(self, *args, **kw): <NEW_LINE> <INDENT> obj = args[0] <NEW_LINE> try: <NEW_LINE> <INDENT> cache = obj.__cache <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> cache = obj.__cache = {} <NEW_LINE> <DEDENT> key = (self.func, args[1:], frozenset(kw.items())) <NEW_LINE> try: <NEW_LINE> <INDENT> res = cache[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> res = cache[key] = self.func(*args, **kw) <NEW_LINE> <DEDENT> return res
cache the return value of a method. This class is meant to be used as a decorator of methods. The return value from a given method invocation will be cached on the instance whose method was invoked. All arguments passed to a method decorated with memoize must be hashable. If a memoized method is invoked directly on its class the result will not be cached. Instead the method will be invoked like a static method:: class Obj(object): @memoize def add_to(self, arg): return self + arg Obj.add_to(1) # not enough arguments Obj.add_to(1, 2) # returns 3, result is not cached
62598f9d5f7d997b871f92c8
class PaymentPage(BasePage): <NEW_LINE> <INDENT> def credit_card_number(self, text): <NEW_LINE> <INDENT> self.driver.find_element(*PaymentPageLocators.CREDIT_CARD_NUMBER).send_keys(text) <NEW_LINE> <DEDENT> def expiration(self, text): <NEW_LINE> <INDENT> self.driver.find_element(*PaymentPageLocators.EXPIRATION).send_keys(text) <NEW_LINE> <DEDENT> def cvv(self, text): <NEW_LINE> <INDENT> self.driver.find_element(*PaymentPageLocators.CVV).send_keys(text) <NEW_LINE> <DEDENT> def click_continue(self): <NEW_LINE> <INDENT> self.driver.find_element(*PaymentPageLocators.CONTINUE_BUTTON).click()
Payment page action methods come here.
62598f9d55399d3f056262f4
class PrivateLinkServiceConnectionState(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'description': {'key': 'description', 'type': 'str'}, 'actions_required': {'key': 'actionsRequired', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, status: Optional[Union[str, "PrivateEndpointServiceConnectionStatus"]] = None, description: Optional[str] = None, actions_required: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(PrivateLinkServiceConnectionState, self).__init__(**kwargs) <NEW_LINE> self.status = status <NEW_LINE> self.description = description <NEW_LINE> self.actions_required = actions_required
A collection of information about the state of the connection between service consumer and provider. :ivar status: Indicates whether the connection has been Approved/Rejected/Removed by the owner of the service. Possible values include: "Pending", "Approved", "Rejected". :vartype status: str or ~azure.mgmt.compute.v2021_12_01.models.PrivateEndpointServiceConnectionStatus :ivar description: The reason for approval/rejection of the connection. :vartype description: str :ivar actions_required: A message indicating if changes on the service provider require any updates on the consumer. :vartype actions_required: str
62598f9d38b623060ffa8e64
class BulkUpdate(BulkUD): <NEW_LINE> <INDENT> def __init__(self, query, values, update_kwargs): <NEW_LINE> <INDENT> super(BulkUpdate, self).__init__(query) <NEW_LINE> self.values = values <NEW_LINE> self.update_kwargs = update_kwargs
BulkUD which handles UPDATEs.
62598f9d97e22403b383acde
class URLSchema(SchemaBase): <NEW_LINE> <INDENT> shortcut = fields.Str(location='view_args', description='The generated or manually set URL shortcut') <NEW_LINE> url = fields.URL(description='The original URL (the short URL target)') <NEW_LINE> short_url = fields.Method('_get_short_url', description='The short URL') <NEW_LINE> meta = fields.Dict(description='Additional metadata (provided on short URL creation)') <NEW_LINE> owner = fields.Str(attribute='token.name', description='The name of the token than created the short URL') <NEW_LINE> allow_reuse = fields.Boolean(load_only=True, default=False) <NEW_LINE> def _get_short_url(self, obj): <NEW_LINE> <INDENT> return posixpath.join(current_app.config['REDIRECTION_HOST'], obj.shortcut)
Schema class to validate URLs. Note: use one of the sub-classes below for validation, depending on the shortcut requirements.
62598f9d8e7ae83300ee8e72
class ResistantVirus(SimpleVirus): <NEW_LINE> <INDENT> def __init__(self, maxBirthProb, clearProb, resistances, mutProb): <NEW_LINE> <INDENT> SimpleVirus.__init__(self, maxBirthProb, clearProb) <NEW_LINE> self.resistances = resistances <NEW_LINE> self.mutProb = mutProb <NEW_LINE> <DEDENT> def getResistances(self): <NEW_LINE> <INDENT> return self.resistances <NEW_LINE> <DEDENT> def getMutProb(self): <NEW_LINE> <INDENT> return self.mutProb <NEW_LINE> <DEDENT> def isResistantTo(self, drug): <NEW_LINE> <INDENT> if drug in self.resistances: <NEW_LINE> <INDENT> return self.resistances[drug] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def reproduce(self, popDensity, activeDrugs): <NEW_LINE> <INDENT> resis = True <NEW_LINE> for i in activeDrugs: <NEW_LINE> <INDENT> if self.isResistantTo(i) == False: <NEW_LINE> <INDENT> resis = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if resis == True: <NEW_LINE> <INDENT> p = random.random() <NEW_LINE> if p <= self.maxBirthProb * (1 - popDensity): <NEW_LINE> <INDENT> childResistances = {} <NEW_LINE> for i in self.resistances: <NEW_LINE> <INDENT> pDrug = random.random() <NEW_LINE> if pDrug <= self.mutProb: <NEW_LINE> <INDENT> childResistances[i] = not(self.resistances[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> childResistances[i] = self.resistances[i] <NEW_LINE> <DEDENT> <DEDENT> childVirus = ResistantVirus(self.maxBirthProb, self.clearProb, childResistances, self.mutProb) <NEW_LINE> return childVirus <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NoChildException() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise NoChildException()
Representation of a virus which can have drug resistance.
62598f9d4a966d76dd5eecb4
class SourceFile(File): <NEW_LINE> <INDENT> _ITEMNAME = "None"
Add a generic file to use for building. These files will be included in the sdist, but will not be copied in-place or included in wheels.
62598f9dd7e4931a7ef3be6c
class TopicUpdateView(PermissionRequiredMixin, TopicFormView): <NEW_LINE> <INDENT> model = Topic <NEW_LINE> success_message = _('This message has been edited successfully.') <NEW_LINE> template_name = 'forum_conversation/topic_update.html' <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> return super().get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> return super().post(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_controlled_object(self): <NEW_LINE> <INDENT> return self.get_topic().first_post <NEW_LINE> <DEDENT> def get_object(self, queryset=None): <NEW_LINE> <INDENT> return self.get_topic() <NEW_LINE> <DEDENT> def get_post(self): <NEW_LINE> <INDENT> return self.get_topic().first_post <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return reverse( 'forum_conversation:topic', kwargs={ 'forum_slug': self.forum_post.topic.forum.slug, 'forum_pk': self.forum_post.topic.forum.pk, 'slug': self.forum_post.topic.slug, 'pk': self.forum_post.topic.pk, }, ) <NEW_LINE> <DEDENT> def perform_permissions_check(self, user, obj, perms): <NEW_LINE> <INDENT> return self.request.forum_permission_handler.can_edit_post(obj, user)
Allows users to update forum topics.
62598f9dcc0a2c111447addf
class User(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> first_name = models.CharField(max_length=40) <NEW_LINE> last_name = models.CharField(max_length=40) <NEW_LINE> image = models.ImageField('Profile picture', upload_to='profiles', blank=True, null=True) <NEW_LINE> is_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> is_superuser = models.BooleanField(default=False) <NEW_LINE> objects = UserManager() <NEW_LINE> REQUIRED_FIELDS = ('first_name', 'last_name') <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return f"{self.email}" <NEW_LINE> <DEDENT> def get_full_name(self): <NEW_LINE> <INDENT> return f"{self.first_name} {self.last_name}"
User information
62598f9df7d966606f747dbb
class LocationError(WeathereggException): <NEW_LINE> <INDENT> pass
Invalid location
62598f9dbe8e80087fbbee32
class Recursive: <NEW_LINE> <INDENT> @rpc() <NEW_LINE> def ep(self, a: int) -> int: <NEW_LINE> <INDENT> assert a > 0 <NEW_LINE> s = service(Recursive, origin(), ClientConfig(timeout_total=1., horz=False)) <NEW_LINE> if a == 1: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> elif a > 1: <NEW_LINE> <INDENT> return s.ep(a - 1) <NEW_LINE> <DEDENT> <DEDENT> @signal() <NEW_LINE> def exit(self): <NEW_LINE> <INDENT> raise TerminationException()
Recursively call itself till exit
62598f9d3539df3088ecc089
class StatusWriter: <NEW_LINE> <INDENT> def __init__(self, vers): <NEW_LINE> <INDENT> self.vers = vers <NEW_LINE> self.xmlLogHandle = None <NEW_LINE> self.pid = os.getpid() <NEW_LINE> <DEDENT> def setXmlLog ( self, xmlLogFile ): <NEW_LINE> <INDENT> xmlLogFile = xmlLogFile.strip() <NEW_LINE> try: <NEW_LINE> <INDENT> self.xmlLogHandle = open(xmlLogFile, 'w') <NEW_LINE> self.xmlLogHandle.write("<Log>\n") <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> sys.exit( "\nFailed to open {} for writing\n\n".format(xmlLogFile) ) <NEW_LINE> <DEDENT> <DEDENT> def closeXmlLog ( self ): <NEW_LINE> <INDENT> self.xmlLogHandle.write("</Log>\n") <NEW_LINE> self.xmlLogHandle.close() <NEW_LINE> <DEDENT> def outputInfo ( self, message ): <NEW_LINE> <INDENT> dateTime = self.getTime() <NEW_LINE> if self.xmlLogHandle: <NEW_LINE> <INDENT> self.xmlLogHandle.write('<info app="fastq-load.py" message="{}" pid="{}" timestamp="{}" version="{}"/>\n' .format(self.escape(message),self.pid,dateTime,self.vers)) <NEW_LINE> self.xmlLogHandle.flush() <NEW_LINE> <DEDENT> sys.stderr.write("{} fastq-load.py.{} info: {}\n".format(dateTime,self.vers,message) ) <NEW_LINE> sys.stderr.flush() <NEW_LINE> <DEDENT> def outputWarning ( self, message ): <NEW_LINE> <INDENT> dateTime = self.getTime() <NEW_LINE> if self.xmlLogHandle: <NEW_LINE> <INDENT> self.xmlLogHandle.write('<warning app="fastq-load.py" message="{}" pid="{}" timestamp="{}" version="{}"/>\n' .format(self.escape(message),self.pid,dateTime,self.vers)) <NEW_LINE> self.xmlLogHandle.flush() <NEW_LINE> <DEDENT> sys.stderr.write("{} fastq-load.py.{} warn: {}\n".format(dateTime,self.vers,message) ) <NEW_LINE> sys.stderr.flush() <NEW_LINE> <DEDENT> def outputErrorAndExit (self, message): <NEW_LINE> <INDENT> dateTime = self.getTime() <NEW_LINE> if self.xmlLogHandle: <NEW_LINE> <INDENT> self.xmlLogHandle.write('<error app="fastq-load.py" message="{}" pid="{}" timestamp="{}" version="{}"/>\n' .format(self.escape(message),self.pid,dateTime,self.vers)) <NEW_LINE> self.xmlLogHandle.flush() <NEW_LINE> self.closeXmlLog() <NEW_LINE> <DEDENT> sys.exit( "\n{} fastq-load.py.{} Error: {}\n\n".format(dateTime,self.vers,message) ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def escape(message): <NEW_LINE> <INDENT> message = message.replace('&', '&amp;') <NEW_LINE> message = message.replace('"', '&quot;') <NEW_LINE> message = message.replace("'", '&apos;') <NEW_LINE> message = message.replace('<', '&lt;') <NEW_LINE> message = message.replace('>', '&gt;') <NEW_LINE> return message <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getTime(): <NEW_LINE> <INDENT> now = datetime.datetime.utcnow() <NEW_LINE> now = now.replace(microsecond=0) <NEW_LINE> return now.isoformat()
Outputs status to stderr and optionally to an xml log file
62598f9d379a373c97d98de8
class _MarkovChainToGaussian(Deterministic): <NEW_LINE> <INDENT> _moments = GaussianMoments(1) <NEW_LINE> _parent_moments = (GaussianMarkovChainMoments(),) <NEW_LINE> def __init__(self, X, **kwargs): <NEW_LINE> <INDENT> if utils.is_numeric(X): <NEW_LINE> <INDENT> X = Constant(GaussianMarkovChain)(X) <NEW_LINE> <DEDENT> dims = ( X.dims[0][-1:], X.dims[1][-2:] ) <NEW_LINE> super().__init__(X, dims=dims, **kwargs) <NEW_LINE> <DEDENT> def _plates_to_parent(self, index): <NEW_LINE> <INDENT> return self.plates[:-1] <NEW_LINE> <DEDENT> def _plates_from_parent(self, index): <NEW_LINE> <INDENT> if index != 0: <NEW_LINE> <INDENT> raise ValueError("Invalid parent index.") <NEW_LINE> <DEDENT> parent = self.parents[0] <NEW_LINE> plates = parent.plates + (parent.dims[0][0],) <NEW_LINE> return plates <NEW_LINE> <DEDENT> def _compute_moments(self, u): <NEW_LINE> <INDENT> return u[:2] <NEW_LINE> <DEDENT> def _compute_mask_to_parent(self, index, mask): <NEW_LINE> <INDENT> if np.ndim(mask) >= 1: <NEW_LINE> <INDENT> mask = np.any(mask, axis=-1) <NEW_LINE> <DEDENT> return mask <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _compute_message_to_parent(index, m_children, *u_parents): <NEW_LINE> <INDENT> return [m_children[0], m_children[1], None]
Transform a Gaussian Markov chain node into a Gaussian node. This node is deterministic.
62598f9d56ac1b37e6301fbe
class OutOfRange(BadRequest): <NEW_LINE> <INDENT> grpc_status_code = grpc.StatusCode.OUT_OF_RANGE if grpc is not None else None
Exception mapping a :attr:`grpc.StatusCode.OUT_OF_RANGE` error.
62598f9d3eb6a72ae038a414
class _SelfAttentionBlock(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, key_channels, value_channels, out_channels=None, scale=1, norm_type=None,psp_size=(1,3,6,8)): <NEW_LINE> <INDENT> super(_SelfAttentionBlock, self).__init__() <NEW_LINE> self.scale = scale <NEW_LINE> self.in_channels = in_channels <NEW_LINE> self.out_channels = out_channels <NEW_LINE> self.key_channels = key_channels <NEW_LINE> self.value_channels = value_channels <NEW_LINE> if out_channels == None: <NEW_LINE> <INDENT> self.out_channels = in_channels <NEW_LINE> <DEDENT> self.pool = nn.MaxPool2d(kernel_size=(scale, scale)) <NEW_LINE> self.f_key = nn.Sequential( nn.Conv2d(in_channels=self.in_channels, out_channels=self.key_channels, kernel_size=1, stride=1, padding=0), ModuleHelper.BNReLU(self.key_channels, norm_type=norm_type), ) <NEW_LINE> self.f_query = self.f_key <NEW_LINE> self.f_value = nn.Conv2d(in_channels=self.in_channels, out_channels=self.value_channels, kernel_size=1, stride=1, padding=0) <NEW_LINE> self.W = nn.Conv2d(in_channels=self.value_channels, out_channels=self.out_channels, kernel_size=1, stride=1, padding=0) <NEW_LINE> self.psp = PSPModule(psp_size) <NEW_LINE> nn.init.constant_(self.W.weight, 0) <NEW_LINE> nn.init.constant_(self.W.bias, 0) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> batch_size, h, w = x.size(0), x.size(2), x.size(3) <NEW_LINE> if self.scale > 1: <NEW_LINE> <INDENT> x = self.pool(x) <NEW_LINE> <DEDENT> value = self.psp(self.f_value(x)) <NEW_LINE> query = self.f_query(x).view(batch_size, self.key_channels, -1) <NEW_LINE> query = query.permute(0, 2, 1) <NEW_LINE> key = self.f_key(x) <NEW_LINE> value = value.permute(0, 2, 1) <NEW_LINE> key = self.psp(key) <NEW_LINE> sim_map = torch.matmul(query, key) <NEW_LINE> sim_map = (self.key_channels ** -.5) * sim_map <NEW_LINE> sim_map = F.softmax(sim_map, dim=-1) <NEW_LINE> context = torch.matmul(sim_map, value) <NEW_LINE> context = context.permute(0, 2, 1).contiguous() <NEW_LINE> context = context.view(batch_size, self.value_channels, *x.size()[2:]) <NEW_LINE> context = self.W(context) <NEW_LINE> return context
The basic implementation for self-attention block/non-local block Input: N X C X H X W Parameters: in_channels : the dimension of the input feature map key_channels : the dimension after the key/query transform value_channels : the dimension after the value transform scale : choose the scale to downsample the input feature maps (save memory cost) Return: N X C X H X W position-aware context features.(w/o concate or add with the input)
62598f9ddd821e528d6d8d09
class StdSetPrinter: <NEW_LINE> <INDENT> class _iter: <NEW_LINE> <INDENT> def __init__(self, rbiter, type): <NEW_LINE> <INDENT> self.rbiter = rbiter <NEW_LINE> self.count = 0 <NEW_LINE> self.type = type <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> item = self.rbiter.next() <NEW_LINE> item = item.cast(self.type).dereference()['_M_value_field'] <NEW_LINE> result = ('[%d]' % self.count, item) <NEW_LINE> self.count = self.count + 1 <NEW_LINE> return result <NEW_LINE> <DEDENT> <DEDENT> def __init__ (self, typename, val): <NEW_LINE> <INDENT> self.typename = typename <NEW_LINE> self.val = val <NEW_LINE> <DEDENT> def to_string (self): <NEW_LINE> <INDENT> return '%s with %d elements' % (self.typename, len (RbtreeIterator (self.val))) <NEW_LINE> <DEDENT> def children (self): <NEW_LINE> <INDENT> rep_type = find_type(self.val.type, '_Rep_type') <NEW_LINE> node = find_type(rep_type, '_Link_type') <NEW_LINE> node = node.strip_typedefs() <NEW_LINE> return self._iter (RbtreeIterator (self.val), node)
Print a std::set or std::multiset
62598f9dcc0a2c111447ade0
class JSONRedisHashDict(RedisHashDict, JSONSerializer): <NEW_LINE> <INDENT> pass
Serialize hash-map values using JSON.
62598f9dbe383301e02535c9
class MessageSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Message <NEW_LINE> fields = ["content"]
Message serializer
62598f9dfff4ab517ebcd5c2
class MeteringConfiguration(BSElement): <NEW_LINE> <INDENT> element_type = "xs:string" <NEW_LINE> element_enumerations = [ "Direct metering", "Master meter without sub metering", "Master meter with sub metering", "Other", "Unknown", ]
The structure of how the various meters are arranged.
62598f9d3539df3088ecc08a
class VideoFileClip(VideoClip): <NEW_LINE> <INDENT> def __init__(self, filename, has_mask=False, audio=True, audio_buffersize = 200000, audio_fps=44100, audio_nbytes=2, verbose=False): <NEW_LINE> <INDENT> VideoClip.__init__(self) <NEW_LINE> pix_fmt= "rgba" if has_mask else "rgb24" <NEW_LINE> self.reader = FFMPEG_VideoReader(filename, pix_fmt=pix_fmt) <NEW_LINE> self.duration = self.reader.duration <NEW_LINE> self.end = self.reader.duration <NEW_LINE> self.fps = self.reader.fps <NEW_LINE> self.size = self.reader.size <NEW_LINE> if has_mask: <NEW_LINE> <INDENT> self.get_frame = lambda t: self.reader.get_frame(t)[:,:,:3] <NEW_LINE> mask_gf = lambda t: self.reader.get_frame(t)[:,:,3]/255.0 <NEW_LINE> self.mask = (VideoClip(ismask = True, get_frame = mask_gf) .set_duration(self.duration)) <NEW_LINE> self.mask.fps = self.fps <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.get_frame = lambda t: self.reader.get_frame(t) <NEW_LINE> <DEDENT> if audio and self.reader.infos['audio_found']: <NEW_LINE> <INDENT> self.audio = AudioFileClip(filename, buffersize= audio_buffersize, fps = audio_fps, nbytes = audio_nbytes)
A video clip originating from a movie file. For instance: :: >>> clip = VideofileClip("myHolidays.mp4") >>> clip2 = VideofileClip("myMaskVideo.avi") Parameters ------------ filename: The name of the video file. It can have any extension supported by ffmpeg: .ogv, .mp4, .mpeg, .avi, .mov etc. has_mask: Set this to 'True' if there is a mask included in the videofile. Video files rarely contain masks, but some video codecs enable that. For istance if you have a MoviePy VideoClip with a mask you can save it to a videofile with a mask. (see also ``VideoClip.to_videofile`` for more details). audio: Set to `False` if the clip doesn't have any audio or if you do not wish to read the audio. Attributes ----------- filename: Name of the original video file. fps: Frames per second in the original file.
62598f9da8ecb03325870fe0
@register <NEW_LINE> class FocusCell(DOMWidget): <NEW_LINE> <INDENT> _view_name = Unicode("FocusCell").tag(sync=True) <NEW_LINE> _view_module = Unicode("nbextensions/chmp-widgets/widgets").tag(sync=True) <NEW_LINE> _view_module_version = Unicode("0.1.0").tag(sync=True)
A widget to hide all other cells, but the one containing this widget. Usage:: # in a notebook cell widget = FocusCell() widget
62598f9d2ae34c7f260aaeb5
class Trafficlight (object): <NEW_LINE> <INDENT> def __init__ (self, pin_red, pin_green, pin_orange=None): <NEW_LINE> <INDENT> self.red = Lamp(pin_red) <NEW_LINE> self.green = Lamp(pin_green) <NEW_LINE> self._lamps = [self.red, self.green] <NEW_LINE> if pin_orange: <NEW_LINE> <INDENT> self.orange = Lamp(pin_orange) <NEW_LINE> self._lamps.append(self.orange) <NEW_LINE> <DEDENT> <DEDENT> def all_on (self): <NEW_LINE> <INDENT> for lamp in self._lamps: <NEW_LINE> <INDENT> lamp.on() <NEW_LINE> <DEDENT> <DEDENT> def all_off (self): <NEW_LINE> <INDENT> for lamp in self._lamps: <NEW_LINE> <INDENT> lamp.off()
one traffic light built of three lamps (red, orange, green)
62598f9d8e71fb1e983bb88a
class LoginForm(AuthenticationForm ): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> for field in self.fields.values(): <NEW_LINE> <INDENT> field.widget.attrs['class'] = 'login-form-control' <NEW_LINE> field.widget.attrs['placeholder'] = field.label <NEW_LINE> field.widget.attrs['onkeyup'] = "this.setAttribute('value', this.value);" <NEW_LINE> field.widget.attrs['value'] = ""
ログインフォーム
62598f9d01c39578d7f12b52
class BaseUpdateView(DocumentFormMixin, ProcessFormView): <NEW_LINE> <INDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> return super(BaseUpdateView, self).get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> return super(BaseUpdateView, self).post(request, *args, **kwargs)
Base view for updating an existing object. Using this base class requires subclassing to provide a response mixin.
62598f9d63b5f9789fe84f4a
class NimbusConfManager(ConfManager): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ConfManager.__init__(self, 'nimbus') <NEW_LINE> self.cloud_parser = SafeConfigParser() <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> ConfManager.read(self) <NEW_LINE> self.read_login_data() <NEW_LINE> self.read_cloud_conf() <NEW_LINE> <DEDENT> def read_login_data(self): <NEW_LINE> <INDENT> self.port = self.parser.get('nimbus', 'port') <NEW_LINE> self.path = self.parser.get('nimbus', 'path') <NEW_LINE> self.canonical_id = self.parser.get('nimbus', 'canonical_id') <NEW_LINE> self.ssh_key_id = self.parser.get('nimbus', 'ssh_key_id') <NEW_LINE> up = urlparse.urlparse(self.parser.get('autoscaling', 'phantom_url')) <NEW_LINE> self.phantom_host = up.hostname <NEW_LINE> self.phantom_port = up.port <NEW_LINE> <DEDENT> def read_cloud_conf(self): <NEW_LINE> <INDENT> self.cloud_parser.readfp(FakeSecHead(open(self.path + '/conf/hotel.conf'))) <NEW_LINE> self.ec2_url = self.cloud_parser.get('cloud', 'vws.repository') <NEW_LINE> self.ec2_host = self.ec2_url.split(":")[0] <NEW_LINE> self.ec2_port = self.ec2_url.split(":")[1] <NEW_LINE> self.ec2_access_key_id = self.cloud_parser.get('cloud', 'vws.repository.s3id') <NEW_LINE> self.ec2_secret_access_key = self.cloud_parser.get('cloud', 'vws.repository.s3key') <NEW_LINE> self.ec2_path = "" <NEW_LINE> <DEDENT> def get_repository_host(self): <NEW_LINE> <INDENT> return
nimbus configuration management
62598f9d97e22403b383ace0
class GitCommitTrigger(Model): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'commit_id': {'key': 'commitId', 'type': 'str'}, 'repository_url': {'key': 'repositoryUrl', 'type': 'str'}, 'branch_name': {'key': 'branchName', 'type': 'str'}, 'provider_type': {'key': 'providerType', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(GitCommitTrigger, self).__init__(**kwargs) <NEW_LINE> self.id = kwargs.get('id', None) <NEW_LINE> self.commit_id = kwargs.get('commit_id', None) <NEW_LINE> self.repository_url = kwargs.get('repository_url', None) <NEW_LINE> self.branch_name = kwargs.get('branch_name', None) <NEW_LINE> self.provider_type = kwargs.get('provider_type', None)
The git commit trigger that caused a build. :param id: The unique ID of the trigger. :type id: str :param commit_id: The unique ID that identifies a commit. :type commit_id: str :param repository_url: The repository URL. :type repository_url: str :param branch_name: The branch name in the repository. :type branch_name: str :param provider_type: The source control provider type. :type provider_type: str
62598f9d7cff6e4e811b57f7
class NumericInFilter(django_filters.BaseInFilter, django_filters.NumberFilter): <NEW_LINE> <INDENT> pass
Filters for set of numeric values. Example: id__in=100,200,300
62598f9d4f6381625f1993a6
class Evidence(dict): <NEW_LINE> <INDENT> def __setitem__(self, keys, values): <NEW_LINE> <INDENT> if not isinstance(keys, types.ListType): <NEW_LINE> <INDENT> keys = [keys] <NEW_LINE> values = [values] <NEW_LINE> <DEDENT> elif (not isinstance(values, types.ListType)) and (not isinstance(values, ArrayType)): <NEW_LINE> <INDENT> values = [values]*len(keys) <NEW_LINE> <DEDENT> items = zip(keys,values) <NEW_LINE> self.update(items) <NEW_LINE> <DEDENT> def __getitem__(self, keys): <NEW_LINE> <INDENT> if isinstance(keys, types.ListType): <NEW_LINE> <INDENT> values = [] <NEW_LINE> for key in keys: <NEW_LINE> <INDENT> values.append(self.get(key)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> values = self.get(keys) <NEW_LINE> <DEDENT> return values <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> nonEvidence = [] <NEW_LINE> for item in self.items(): <NEW_LINE> <INDENT> if item[1] == BLANKEVIDENCE: <NEW_LINE> <INDENT> nonEvidence.append(item[0]) <NEW_LINE> <DEDENT> <DEDENT> return nonEvidence <NEW_LINE> <DEDENT> def set_nodes(self): <NEW_LINE> <INDENT> ev = [] <NEW_LINE> for item in self.items(): <NEW_LINE> <INDENT> if item[1] != BLANKEVIDENCE: <NEW_LINE> <INDENT> ev.append(item[0]) <NEW_LINE> <DEDENT> <DEDENT> return ev <NEW_LINE> <DEDENT> def __copy__(self): <NEW_LINE> <INDENT> new = Evidence() <NEW_LINE> new.update(self.items()) <NEW_LINE> return new
This is the data structure for evidence. It acts exactly like a dictionary except that it will take lists of keys with the [] notation, rather than just single keys.
62598f9d4a966d76dd5eecb6
class PipelineServerTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(PipelineServerTestCase, self).setUp() <NEW_LINE> self.server = ThreadedHTTPServer() <NEW_LINE> self.addCleanup(self.server.shutdown) <NEW_LINE> self.url = "http://127.0.0.1:{port}".format(port=self.server.port) <NEW_LINE> <DEDENT> @patch("build_pipeline.build_pipeline.parse_webhook_payload") <NEW_LINE> def test_github_event(self, mock_downstream): <NEW_LINE> <INDENT> mock_downstream.return_value = 'foo' <NEW_LINE> headers = {'X-GitHub-Event': 'foo', 'content-type': 'application/json'} <NEW_LINE> payload = {'repository': 'bar'} <NEW_LINE> response = requests.post(self.url, headers=headers, data=json.dumps(payload)) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_get_request(self): <NEW_LINE> <INDENT> response = requests.get(self.url, data={}) <NEW_LINE> self.assertEqual(response.status_code, 501)
TestCase class for verifying the HTTP server that is servicing the webhooks from GitHub.
62598f9dd6c5a102081e1f1a
class EnrollmentByBirthYearPartitionTask(HivePartitionTask): <NEW_LINE> <INDENT> date = luigi.DateParameter() <NEW_LINE> @property <NEW_LINE> def hive_table_task(self): <NEW_LINE> <INDENT> return EnrollmentByBirthYearTaskTableTask( warehouse_path=self.warehouse_path, overwrite=self.overwrite ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def partition_value(self): <NEW_LINE> <INDENT> return self.date.isoformat()
Creates storage partition for the `course_enrollment_birth_year_daily` Hive table.
62598f9d009cb60464d012fa
class SanshokuDoukou(Yaku): <NEW_LINE> <INDENT> def set_attributes(self): <NEW_LINE> <INDENT> self.yaku_id = 26 <NEW_LINE> self.name = 'Sanshoku Doukou' <NEW_LINE> self.han_open = 2 <NEW_LINE> self.han_closed = 2 <NEW_LINE> self.is_yakuman = False <NEW_LINE> <DEDENT> def is_condition_met(self, hand, *args): <NEW_LINE> <INDENT> pon_sets = [i for i in hand if is_pon(i)] <NEW_LINE> if len(pon_sets) < 3: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> sou_pon = [] <NEW_LINE> pin_pon = [] <NEW_LINE> man_pon = [] <NEW_LINE> for item in pon_sets: <NEW_LINE> <INDENT> if is_sou(item[0]): <NEW_LINE> <INDENT> sou_pon.append(item) <NEW_LINE> <DEDENT> elif is_pin(item[0]): <NEW_LINE> <INDENT> pin_pon.append(item) <NEW_LINE> <DEDENT> elif is_man(item[0]): <NEW_LINE> <INDENT> man_pon.append(item) <NEW_LINE> <DEDENT> <DEDENT> for sou_item in sou_pon: <NEW_LINE> <INDENT> for pin_item in pin_pon: <NEW_LINE> <INDENT> for man_item in man_pon: <NEW_LINE> <INDENT> sou_item = [simplify(x) for x in sou_item] <NEW_LINE> pin_item = [simplify(x) for x in pin_item] <NEW_LINE> man_item = [simplify(x) for x in man_item] <NEW_LINE> if sou_item == pin_item == man_item: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return False
Three pon sets consisting of the same numbers in all three suits
62598f9d0c0af96317c56158
class CrossbarWampRawSocketClientProtocol(WampRawSocketClientProtocol): <NEW_LINE> <INDENT> pass
Crossbar.io WAMP-over-RawSocket client protocol.
62598f9d91af0d3eaad39be0
class CompareDist(rdFMCS.MCSAtomCompare): <NEW_LINE> <INDENT> def __init__(self, threshold=0.5, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.threshold = threshold <NEW_LINE> <DEDENT> def compare(self, p, mol1, atom1, mol2, atom2): <NEW_LINE> <INDENT> x_i = mol1.GetConformer(0).GetPositions()[atom1] <NEW_LINE> x_j = mol2.GetConformer(0).GetPositions()[atom2] <NEW_LINE> return bool(np.linalg.norm(x_i - x_j) <= self.threshold)
Custom atom comparison: use positions within generated conformer
62598f9d236d856c2adc9324
class mainFrame(wx.Frame): <NEW_LINE> <INDENT> def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> kwds["style"] = wx.DEFAULT_FRAME_STYLE <NEW_LINE> wx.Frame.__init__(self, *args, **kwds) <NEW_LINE> self.pickMaskBrowser = FileBrowseButton(self, -1, labelText='Mask File')
Creates the main window of the application.
62598f9df7d966606f747dbd
class profile(object): <NEW_LINE> <INDENT> def __init__(self, enabled=True): <NEW_LINE> <INDENT> self.enabled = enabled <NEW_LINE> self.function_events = None <NEW_LINE> if not self.enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.entered = False <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if not self.enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.entered: <NEW_LINE> <INDENT> raise RuntimeError("autograd profiler traces are not reentrant") <NEW_LINE> <DEDENT> self.entered = True <NEW_LINE> torch.autograd._enable_profiler(False) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> if not self.enabled: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> records = torch.autograd._disable_profiler() <NEW_LINE> self.function_events = EventList(parse_cpu_trace(records)) <NEW_LINE> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> return '<unfinished torch.autograd.profile>' <NEW_LINE> <DEDENT> return repr(self.function_events) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> return '<unfinished torch.autograd.profile>' <NEW_LINE> <DEDENT> return str(self.function_events) <NEW_LINE> <DEDENT> def table(self, sort_by=None): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> raise RuntimeError("can't export a trace that didn't finish running") <NEW_LINE> <DEDENT> return self.function_events.table(sort_by) <NEW_LINE> <DEDENT> table.__doc__ = EventList.table.__doc__ <NEW_LINE> def export_chrome_trace(self, path): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> raise RuntimeError("can't export a trace that didn't finish running") <NEW_LINE> <DEDENT> return self.function_events.export_chrome_trace(path) <NEW_LINE> <DEDENT> export_chrome_trace.__doc__ = EventList.export_chrome_trace.__doc__ <NEW_LINE> def key_averages(self): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> raise RuntimeError("can't average a trace that didn't finish running") <NEW_LINE> <DEDENT> return self.function_events.key_averages() <NEW_LINE> <DEDENT> key_averages.__doc__ = EventList.key_averages.__doc__ <NEW_LINE> def total_average(self): <NEW_LINE> <INDENT> if self.function_events is None: <NEW_LINE> <INDENT> raise RuntimeError("can't average a trace that didn't finish running") <NEW_LINE> <DEDENT> return self.function_events.total_average() <NEW_LINE> <DEDENT> total_average.__doc__ = EventList.total_average.__doc__
结果的评价指标. 参数: enabled (bool, 可选): 如果设置为 False ,则没有评价指标. Default: ``True``. .. 警告: 不应该递归地调用这个上下文管理器,即最多一个实例应该在任何给定的时间启用. Example: >>> x = Variable(torch.randn(1, 1), requires_grad=True) >>> with torch.autograd.profiler.profile() as prof: ... y = x ** 2 ... y.backward() >>> # NOTE: some columns were removed for brevity ... print(prof) ------------------------------------- --------------- --------------- Name CPU time CUDA time ------------------------------------- --------------- --------------- PowConstant 142.036us 0.000us N5torch8autograd9GraphRootE 63.524us 0.000us PowConstantBackward 184.228us 0.000us MulConstant 50.288us 0.000us PowConstant 28.439us 0.000us Mul 20.154us 0.000us N5torch8autograd14AccumulateGradE 13.790us 0.000us N5torch8autograd5CloneE 4.088us 0.000us
62598f9db5575c28eb712bb8
class RedisDataHandler(FileDataHandler): <NEW_LINE> <INDENT> def __init__(self, filename, host='localhost', port=6379, key_prefix=None): <NEW_LINE> <INDENT> self._trec_qrels = self._initialise_handler(filename=filename, host=host, port=port, key_prefix=key_prefix) <NEW_LINE> <DEDENT> def _initialise_handler(self, filename, host, port, key_prefix): <NEW_LINE> <INDENT> key = os.path.split(filename)[-1] <NEW_LINE> if key_prefix is None: <NEW_LINE> <INDENT> raise ValueError("A key prefix (string) must be specified for the RedisDataHandler.") <NEW_LINE> <DEDENT> key = '{key_prefix}::{hashed_key}'.format(key_prefix=key_prefix, hashed_key=hash(key)) <NEW_LINE> cache = redis.StrictRedis(host=host, port=port, db=0) <NEW_LINE> if cache.get(key): <NEW_LINE> <INDENT> dumped = cache.get(key) <NEW_LINE> return cPickle.loads(dumped) <NEW_LINE> <DEDENT> handler = super(RedisDataHandler, self)._initialise_handler(filename) <NEW_LINE> dumped = cPickle.dumps(handler) <NEW_LINE> cache.set(key, dumped) <NEW_LINE> return handler
Extends the FileDataHandler to consider a TrecQrelHandler object stored in a Redis cache. If it is found that a TrecQrelHandler object does not exist for the given key, a new TrecQrelHandler is instantiated using the filename given. This handler is then placed in the Redis cache, ready for the next use.
62598f9ddd821e528d6d8d0a
class NicknameApiTestCase(SimpleNameApiTestCase): <NEW_LINE> <INDENT> factory_class = factories.NicknameModelFactory <NEW_LINE> model_class = models.Nickname <NEW_LINE> serializer_class = serializers.NicknameSerializer <NEW_LINE> url_detail = "nickname-detail" <NEW_LINE> url_list = "nickname-list" <NEW_LINE> name = "smiley" <NEW_LINE> def test_create_nickname(self): <NEW_LINE> <INDENT> self.verify_create_name() <NEW_LINE> <DEDENT> def test_create_nickname_partial(self): <NEW_LINE> <INDENT> data = self.name_data() <NEW_LINE> self.verify_create_name(data=data) <NEW_LINE> <DEDENT> def test_get_nickname_name(self): <NEW_LINE> <INDENT> self.verify_get_defaults() <NEW_LINE> <DEDENT> def test_put_nickname_partial(self): <NEW_LINE> <INDENT> instance = self.create_instance_default() <NEW_LINE> data = dict(id=instance.id, name=self.name) <NEW_LINE> self.verify_put(self.url_detail, instance, data, self.serializer_class) <NEW_LINE> <DEDENT> def test_delete_nickname(self): <NEW_LINE> <INDENT> self.verify_delete_default()
Nickname API unit test class.
62598f9d3539df3088ecc08b
class Config(object): <NEW_LINE> <INDENT> def __init__(self, logcat_params=None, clear_log=True, output_file_path=None): <NEW_LINE> <INDENT> self.clear_log = clear_log <NEW_LINE> self.logcat_params = logcat_params if logcat_params else '' <NEW_LINE> self.output_file_path = output_file_path
Config object for logcat service. Attributes: clear_log: bool, clears the logcat before collection if True. logcat_params: string, extra params to be added to logcat command. output_file_path: string, the path on the host to write the log file to, including the actual filename. The service will automatically generate one if not specified.
62598f9d3c8af77a43b67e29
class NullListFilter(admin.SimpleListFilter): <NEW_LINE> <INDENT> def lookups(self, request, model_admin): <NEW_LINE> <INDENT> return (('0', 'Not None',), ('1', 'None',),) <NEW_LINE> <DEDENT> def queryset(self, request, queryset): <NEW_LINE> <INDENT> if self.value() in ('0', '1'): <NEW_LINE> <INDENT> kwargs = { '{}__isnull'.format(self.parameter_name): self.value() == '1' } <NEW_LINE> return queryset.filter(**kwargs) <NEW_LINE> <DEDENT> return queryset
Admin list filter to filter for whether a field is null or not null.
62598f9d4e4d5625663721f9
class Packet(object): <NEW_LINE> <INDENT> def __init__(self, route=None, command=None): <NEW_LINE> <INDENT> self.route = route <NEW_LINE> self.command = command <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def try_parse(packet=""): <NEW_LINE> <INDENT> packet_CRC = packet[-4:] <NEW_LINE> check_CRC = detect.Crc('crc-16') <NEW_LINE> check_CRC.update((packet[:-4]).encode('utf-8')) <NEW_LINE> if packet_CRC == check_CRC.hexdigest(): <NEW_LINE> <INDENT> p_components = packet[:-4].split(MeshConfiguration.separator) <NEW_LINE> route = classes.Route.from_string(p_components[0], p_components[1], p_components[2], p_components[3]) <NEW_LINE> command = classes.Command.from_string(p_components[4], p_components[5], p_components[6]) <NEW_LINE> return Packet(route=route, command=command) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise exceptions.CorruptPacketException(['CRC code did not match.', packet]) <NEW_LINE> <DEDENT> <DEDENT> def crc16(self): <NEW_LINE> <INDENT> crc = detect.Crc('crc-16') <NEW_LINE> crc.update(str(self).encode('utf-8')) <NEW_LINE> return crc.hexdigest() <NEW_LINE> <DEDENT> def next_addr(self): <NEW_LINE> <INDENT> return self.route.next_addr <NEW_LINE> <DEDENT> def dest_addr(self): <NEW_LINE> <INDENT> return self.route.dest_addr <NEW_LINE> <DEDENT> def last_addr(self): <NEW_LINE> <INDENT> return self.route.last_addr <NEW_LINE> <DEDENT> def source_addr(self): <NEW_LINE> <INDENT> return self.route.source_addr <NEW_LINE> <DEDENT> def command_id(self): <NEW_LINE> <INDENT> return self.command.id <NEW_LINE> <DEDENT> def command_code(self): <NEW_LINE> <INDENT> return self.command.code <NEW_LINE> <DEDENT> def command_param(self): <NEW_LINE> <INDENT> return self.command.parameters <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return MeshConfiguration.separator.join([str(self.route), str(self.command)])
Packet class defines the structure of a mesh network packet
62598f9dd486a94d0ba2bdab
class StochasticParameterMaskGen(IBatchwiseMaskGenerator): <NEW_LINE> <INDENT> def __init__(self, parameter, per_channel): <NEW_LINE> <INDENT> super(StochasticParameterMaskGen, self).__init__() <NEW_LINE> self.parameter = parameter <NEW_LINE> self.per_channel = iap.handle_probability_param(per_channel, "per_channel") <NEW_LINE> <DEDENT> def draw_masks(self, batch, random_state=None): <NEW_LINE> <INDENT> shapes = batch.get_rowwise_shapes() <NEW_LINE> random_state = iarandom.RNG(random_state) <NEW_LINE> per_channel = self.per_channel.draw_samples((len(shapes),), random_state=random_state) <NEW_LINE> return [self._draw_mask(shape, random_state, per_channel_i) for shape, per_channel_i in zip(shapes, per_channel)] <NEW_LINE> <DEDENT> def _draw_mask(self, shape, random_state, per_channel): <NEW_LINE> <INDENT> if len(shape) == 2 or per_channel >= 0.5: <NEW_LINE> <INDENT> mask = self.parameter.draw_samples(shape, random_state=random_state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mask = self.parameter.draw_samples(shape[0:2], random_state=random_state) <NEW_LINE> <DEDENT> if mask.size > 0: <NEW_LINE> <INDENT> assert 0 <= mask.item(0) <= 1.0, ( "Expected 'parameter' samples to be in the interval " "[0.0, 1.0]. Got min %.4f and max %.4f." % ( np.min(mask), np.max(mask),)) <NEW_LINE> <DEDENT> return mask
Mask generator that queries stochastic parameters for mask values. This class receives batches for which to generate masks, iterates over the batch rows (i.e. images) and generates one mask per row. For a row with shape ``(H, W, C)`` (= image shape), it generates either a ``(H, W)`` mask (if ``per_channel`` is false-like) or a ``(H, W, C)`` mask (if ``per_channel`` is true-like). The ``per_channel`` is sampled per batch for each row/image. Added in 0.4.0. Parameters ---------- parameter : imgaug.parameters.StochasticParameter Stochastic parameter to draw mask samples from. Expected to return values in interval ``[0.0, 1.0]`` (not all stochastic parameters do that) and must be able to handle sampling shapes ``(H, W)`` and ``(H, W, C)`` (all stochastic parameters should do that). per_channel : bool or float or imgaug.parameters.StochasticParameter, optional Whether to use the same mask for all channels (``False``) or to sample a new mask for each channel (``True``). If this value is a float ``p``, then for ``p`` percent of all rows (i.e. images) `per_channel` will be treated as ``True``, otherwise as ``False``.
62598f9da79ad16197769e3a
class EditProfileTestCase(TestCase): <NEW_LINE> <INDENT> def test_hard_no_more_than(self): <NEW_LINE> <INDENT> utils.create_user( "userexample", "userexample@admin.com", "userexample123456" ) <NEW_LINE> User = get_user_model() <NEW_LINE> username = get_object_or_404(User, username="userexample") <NEW_LINE> Profile.objects.filter(iduser=username).update( photo="", about="Example about", location="this.location", activation_key="", key_expires=timezone.now(), is_troll=False, receive_emails=True )
Test Edit profile
62598f9dd99f1b3c44d05486
class TestNpMetrics(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.hamming = npmetrics.np_hamming_distance <NEW_LINE> self.seq_similarity = npmetrics.np_seq_similarity <NEW_LINE> self.coverage_distance = npmetrics.np_coverage_distance <NEW_LINE> self.seq_distance = npmetrics.np_seq_distance <NEW_LINE> <DEDENT> def test_hamming_distance(self): <NEW_LINE> <INDENT> for args, res in hamming_equals: <NEW_LINE> <INDENT> self.assertEqual(self.hamming(seq2vec(args[0]), seq2vec(args[1])), strmetrics.str_hamming_distance(*args)) <NEW_LINE> <DEDENT> with self.assertRaises(AssertionError): <NEW_LINE> <INDENT> self.hamming(seq2vec('AAAA'), seq2vec('AA')) <NEW_LINE> <DEDENT> <DEDENT> def test_seq_similarity(self): <NEW_LINE> <INDENT> for i, (args, res) in enumerate(similarity_equals): <NEW_LINE> <INDENT> if args[2] is None: <NEW_LINE> <INDENT> sMat = matrices.binaryMat <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sMat = matrices.subst2mat(args[2]) <NEW_LINE> <DEDENT> self.assertEqual(self.seq_similarity(seq2vec(args[0]), seq2vec(args[1]), sMat, args[3], False), strmetrics.str_seq_similarity(*args), msg = "Test %d: (%s, %s)" % (i, args[0], args[1])) <NEW_LINE> <DEDENT> self.assertTrue(np.isnan(self.seq_similarity(seq2vec('-------'), seq2vec('---KKKK'), sMat, True, False))) <NEW_LINE> <DEDENT> def test_coverage_distance(self): <NEW_LINE> <INDENT> for i, (args, res) in enumerate(coverage_equals): <NEW_LINE> <INDENT> self.assertEqual(self.coverage_distance(seq2vec(args[0]), seq2vec(args[1]), args[2]), strmetrics.str_coverage_distance(*args), msg = "Test %d: (%s, %s)" % (i, args[0], args[1])) <NEW_LINE> <DEDENT> <DEDENT> def test_seq_distance(self): <NEW_LINE> <INDENT> for i, (args, res) in enumerate(distance_equals): <NEW_LINE> <INDENT> if args[2] is None: <NEW_LINE> <INDENT> sMat = matrices.binaryMat <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sMat = matrices.subst2mat(args[2]) <NEW_LINE> <DEDENT> self.assertEqual(self.seq_distance(seq2vec(args[0]), seq2vec(args[1]), sMat, args[3]), strmetrics.str_seq_distance(*args), msg = "Test %d: (%s, %s)" % (i, args[0], args[1]))
Numpy metrics
62598f9d8a43f66fc4bf1f52
class DateValidator(DataValidator): <NEW_LINE> <INDENT> def validate(self,date): <NEW_LINE> <INDENT> pass
Validates date objects
62598f9d097d151d1a2c0dfe
class soundcard(sos.plugintools.PluginBase): <NEW_LINE> <INDENT> def defaultenabled(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> self.addCopySpecs([ "/proc/asound/*", "/etc/alsa/*", "/etc/asound.*"]) <NEW_LINE> self.collectExtOutput("/sbin/lspci | grep -i audio") <NEW_LINE> self.collectExtOutput("/usr/bin/aplay -l") <NEW_LINE> self.collectExtOutput("/usr/bin/aplay -L") <NEW_LINE> self.collectExtOutput("/usr/bin/amixer") <NEW_LINE> self.collectExtOutput("/sbin/lsmod | /bin/grep snd | /bin/awk '{print $1}'", suggest_filename = "sndmodules_loaded")
Sound card information
62598f9d8e7ae83300ee8e76
class TraccarEntity(TrackerEntity, RestoreEntity): <NEW_LINE> <INDENT> def __init__(self, device, latitude, longitude, battery, accuracy, attributes): <NEW_LINE> <INDENT> self._accuracy = accuracy <NEW_LINE> self._attributes = attributes <NEW_LINE> self._name = device <NEW_LINE> self._battery = battery <NEW_LINE> self._latitude = latitude <NEW_LINE> self._longitude = longitude <NEW_LINE> self._unsub_dispatcher = None <NEW_LINE> self._unique_id = device <NEW_LINE> <DEDENT> @property <NEW_LINE> def battery_level(self): <NEW_LINE> <INDENT> return self._battery <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> return self._attributes <NEW_LINE> <DEDENT> @property <NEW_LINE> def latitude(self): <NEW_LINE> <INDENT> return self._latitude <NEW_LINE> <DEDENT> @property <NEW_LINE> def longitude(self): <NEW_LINE> <INDENT> return self._longitude <NEW_LINE> <DEDENT> @property <NEW_LINE> def location_accuracy(self): <NEW_LINE> <INDENT> return self._accuracy <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._unique_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return {"name": self._name, "identifiers": {(DOMAIN, self._unique_id)}} <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_type(self): <NEW_LINE> <INDENT> return SOURCE_TYPE_GPS <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> await super().async_added_to_hass() <NEW_LINE> self._unsub_dispatcher = async_dispatcher_connect( self.hass, TRACKER_UPDATE, self._async_receive_data ) <NEW_LINE> if self._latitude is not None or self._longitude is not None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> state = await self.async_get_last_state() <NEW_LINE> if state is None: <NEW_LINE> <INDENT> self._latitude = None <NEW_LINE> self._longitude = None <NEW_LINE> self._accuracy = None <NEW_LINE> self._attributes = { ATTR_ALTITUDE: None, ATTR_BEARING: None, ATTR_SPEED: None, } <NEW_LINE> self._battery = None <NEW_LINE> return <NEW_LINE> <DEDENT> attr = state.attributes <NEW_LINE> self._latitude = attr.get(ATTR_LATITUDE) <NEW_LINE> self._longitude = attr.get(ATTR_LONGITUDE) <NEW_LINE> self._accuracy = attr.get(ATTR_ACCURACY) <NEW_LINE> self._attributes = { ATTR_ALTITUDE: attr.get(ATTR_ALTITUDE), ATTR_BEARING: attr.get(ATTR_BEARING), ATTR_SPEED: attr.get(ATTR_SPEED), } <NEW_LINE> self._battery = attr.get(ATTR_BATTERY) <NEW_LINE> <DEDENT> async def async_will_remove_from_hass(self): <NEW_LINE> <INDENT> await super().async_will_remove_from_hass() <NEW_LINE> self._unsub_dispatcher() <NEW_LINE> <DEDENT> @callback <NEW_LINE> def _async_receive_data( self, device, latitude, longitude, battery, accuracy, attributes ): <NEW_LINE> <INDENT> if device != self.name: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._latitude = latitude <NEW_LINE> self._longitude = longitude <NEW_LINE> self._battery = battery <NEW_LINE> self._accuracy = accuracy <NEW_LINE> self._attributes.update(attributes) <NEW_LINE> self.async_write_ha_state()
Represent a tracked device.
62598f9d3d592f4c4edbaca5
class StateData(MappingSchema): <NEW_LINE> <INDENT> missing = drop <NEW_LINE> @deferred <NEW_LINE> def default(self, kw): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> name = StateName() <NEW_LINE> description = Text(missing='', default='') <NEW_LINE> start_date = DateTime(missing=None, default=None)
Resource specific data for a workflow state.
62598f9d60cbc95b06364124
class Commitment(Base): <NEW_LINE> <INDENT> __tablename__ = 'commitment' <NEW_LINE> id = Column(types.UnicodeText, primary_key=True, default=make_uuid) <NEW_LINE> created = Column(types.DateTime, default=datetime.now) <NEW_LINE> source = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> dataset_name = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> commitment_text = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> notes = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> dataset = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> publisher = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> author = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> state = Column(types.UnicodeText, nullable=False, index=True) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for k,v in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get(cls, id): <NEW_LINE> <INDENT> return model.Session.query(cls).filter(cls.id==id).first() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_for_publisher(cls, id): <NEW_LINE> <INDENT> return model.Session.query(cls).filter(cls.publisher==id)
A commitment that it either from an Open Data Strategy or one of the PMs letters.
62598f9d9c8ee82313040059
class SparseRandomProjection(BaseRandomProjection): <NEW_LINE> <INDENT> def __init__(self, n_components='auto', density='auto', eps=0.1, dense_output=False, random_state=None): <NEW_LINE> <INDENT> super(SparseRandomProjection, self).__init__( n_components=n_components, eps=eps, dense_output=dense_output, random_state=random_state) <NEW_LINE> self.density = density <NEW_LINE> self.density_ = None <NEW_LINE> <DEDENT> def _make_random_matrix(self, n_components, n_features): <NEW_LINE> <INDENT> random_state = check_random_state(self.random_state) <NEW_LINE> self.density_ = _check_density(self.density, n_features) <NEW_LINE> return sparse_random_matrix(n_components, n_features, density=self.density_, random_state=random_state)
Reduce dimensionality through sparse random projection Sparse random matrix is an alternative to dense random projection matrix that guarantees similar embedding quality while being much more memory efficient and allowing faster computation of the projected data. If we note `s = 1 / density` the components of the random matrix are drawn from: - -sqrt(s) / sqrt(n_components) with probability 1 / 2s - 0 with probability 1 - 1 / s - +sqrt(s) / sqrt(n_components) with probability 1 / 2s Parameters ---------- n_components : int or 'auto', optional (default = 'auto') Dimensionality of the target projection space. n_components can be automatically adjusted according to the number of samples in the dataset and the bound given by the Johnson-Lindenstrauss lemma. In that case the quality of the embedding is controlled by the ``eps`` parameter. It should be noted that Johnson-Lindenstrauss lemma can yield very conservative estimated of the required number of components as it makes no assumption on the structure of the dataset. density : float in range ]0, 1], optional (default='auto') Ratio of non-zero component in the random projection matrix. If density = 'auto', the value is set to the minimum density as recommended by Ping Li et al.: 1 / sqrt(n_features). Use density = 1 / 3.0 if you want to reproduce the results from Achlioptas, 2001. eps : strictly positive float, optional, (default=0.1) Parameter to control the quality of the embedding according to the Johnson-Lindenstrauss lemma when n_components is set to 'auto'. Smaller values lead to better embedding and higher number of dimensions (n_components) in the target projection space. dense_output : boolean, optional (default=False) If True, ensure that the output of the random projection is a dense numpy array even if the input and random projection matrix are both sparse. In practice, if the number of components is small the number of zero components in the projected data will be very small and it will be more CPU and memory efficient to use a dense representation. If False, the projected data uses a sparse representation if the input is sparse. random_state : integer, RandomState instance or None (default=None) Control the pseudo random number generator used to generate the matrix at fit time. Attributes ---------- n_component_ : int Concrete number of components computed when n_components="auto". components_ : CSR matrix with shape [n_components, n_features] Random matrix used for the projection. density_ : float in range 0.0 - 1.0 Concrete density computed from when density = "auto". See Also -------- GaussianRandomProjection References ---------- .. [1] Ping Li, T. Hastie and K. W. Church, 2006, "Very Sparse Random Projections". http://www.stanford.edu/~hastie/Papers/Ping/KDD06_rp.pdf .. [2] D. Achlioptas, 2001, "Database-friendly random projections", http://www.cs.ucsc.edu/~optas/papers/jl.pdf
62598f9d85dfad0860cbf960
class CommonTimeWindow(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Monday = None <NEW_LINE> self.Tuesday = None <NEW_LINE> self.Wednesday = None <NEW_LINE> self.Thursday = None <NEW_LINE> self.Friday = None <NEW_LINE> self.Saturday = None <NEW_LINE> self.Sunday = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Monday = params.get("Monday") <NEW_LINE> self.Tuesday = params.get("Tuesday") <NEW_LINE> self.Wednesday = params.get("Wednesday") <NEW_LINE> self.Thursday = params.get("Thursday") <NEW_LINE> self.Friday = params.get("Friday") <NEW_LINE> self.Saturday = params.get("Saturday") <NEW_LINE> self.Sunday = params.get("Sunday") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
通用时间窗
62598f9d435de62698e9bbcb
class json_cmp_result(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.errors = [] <NEW_LINE> <DEDENT> def add_error(self, error): <NEW_LINE> <INDENT> for line in error.splitlines(): <NEW_LINE> <INDENT> self.errors.append(line) <NEW_LINE> <DEDENT> <DEDENT> def has_errors(self): <NEW_LINE> <INDENT> return len(self.errors) > 0 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '\n'.join(self.errors)
json_cmp result class for better assertion messages
62598f9d236d856c2adc9325
class PaygenSigningRequirementsError(failures_lib.StepFailure): <NEW_LINE> <INDENT> pass
Paygen stage can't run if signing failed.
62598f9dadb09d7d5dc0a361
class InvalidTypeSignatureError(Exception): <NEW_LINE> <INDENT> pass
Thrown when `Pipeline.input_type` or `Pipeline.output_type` is not valid.
62598f9dbe8e80087fbbee36
class Critic(nn.Module): <NEW_LINE> <INDENT> def __init__(self, opt): <NEW_LINE> <INDENT> super(Critic, self).__init__() <NEW_LINE> self.opt = opt <NEW_LINE> self.embedding = nn.Embedding(opt.vocab_size, opt.emb_size) <NEW_LINE> self.rnn = nn.GRU(input_size=opt.emb_size, hidden_size=opt.critic_hidden_size, num_layers=opt.critic_layers, dropout=opt.critic_dropout, batch_first=True) <NEW_LINE> self.value = nn.Linear(opt.critic_hidden_size, 1) <NEW_LINE> self.zero_input = torch.LongTensor(opt.batch_size, 1).zero_().cuda() <NEW_LINE> self.zero_state = torch.zeros([opt.critic_layers, opt.batch_size, opt.critic_hidden_size]).cuda() <NEW_LINE> <DEDENT> def forward(self, actions): <NEW_LINE> <INDENT> padded_actions = torch.cat([self.zero_input, actions], 1) <NEW_LINE> inputs = self.embedding(Variable(padded_actions)) <NEW_LINE> outputs, _ = self.rnn(inputs, Variable(self.zero_state)) <NEW_LINE> outputs = outputs.contiguous() <NEW_LINE> flattened = outputs.view(-1, self.opt.critic_hidden_size) <NEW_LINE> flat_value = self.value(flattened) <NEW_LINE> value = flat_value.view(self.opt.batch_size, self.opt.seq_len + 1) <NEW_LINE> return value[:, :-1]
The imitation GAN critic used for stable training of the actor.
62598f9db5575c28eb712bb9
class TestASToratorPy3(unittest.TestCase, _TestUtil): <NEW_LINE> <INDENT> def test_annotations_identity(self): <NEW_LINE> <INDENT> _a, _b, _c, _d, _va, _kw, _return = [], [], [], [], [], [], [] <NEW_LINE> def real(a:_a, b:_b=1, *va:_va, c:_c, d:_d=1, **kw:_kw) -> _return: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.mutate(real) <NEW_LINE> _ = self.fake.__annotations__ <NEW_LINE> self.assertIs(_['a'], _a) <NEW_LINE> self.assertIs(_['b'], _b) <NEW_LINE> self.assertIs(_['c'], _c) <NEW_LINE> self.assertIs(_['d'], _d) <NEW_LINE> self.assertIs(_['va'], _va) <NEW_LINE> self.assertIs(_['kw'], _kw) <NEW_LINE> self.assertIs(_['return'], _return) <NEW_LINE> <DEDENT> def test_kwonly_arguments(self): <NEW_LINE> <INDENT> def real(*, c, d=1): <NEW_LINE> <INDENT> return hash((c,d)) <NEW_LINE> <DEDENT> self.mutate(real) <NEW_LINE> self.check_result(c=0) <NEW_LINE> self.check_result(c=0, d=1) <NEW_LINE> self.must_fail() <NEW_LINE> self.must_fail(0) <NEW_LINE> self.must_fail(0, 1) <NEW_LINE> self.must_fail(d=1) <NEW_LINE> <DEDENT> def test_all_argument_kinds(self): <NEW_LINE> <INDENT> def real(a, b=1, *args, c, d=1, **kwargs): <NEW_LINE> <INDENT> return hash((a,b,args,c,d,hd(kwargs))) <NEW_LINE> <DEDENT> self.mutate(real) <NEW_LINE> self.check_result(0, c=4) <NEW_LINE> self.check_result(a=0, c=4) <NEW_LINE> self.check_result(0, 1, c=4) <NEW_LINE> self.check_result(0, b=1, c=4) <NEW_LINE> self.check_result(0, 1, 2, 3, c=4) <NEW_LINE> self.check_result(0, c=4, d=5) <NEW_LINE> self.check_result(0, c=4, d=5, e=6) <NEW_LINE> self.check_result(0, 1, 2, 3, c=4, d=5, e=6) <NEW_LINE> self.must_fail() <NEW_LINE> self.must_fail(0) <NEW_LINE> self.must_fail(c=4) <NEW_LINE> self.must_fail(a=0) <NEW_LINE> self.must_fail(0, 4) <NEW_LINE> self.must_fail(0, 4, b=1)
Python3 only tests for black_magic.decorator.wraps (uses AST). Contains checks for annotations object identity and keyword-only arguments.
62598f9d379a373c97d98dec
class RenaultBatteryLevelSensor(RenaultBatteryDataEntity): <NEW_LINE> <INDENT> @property <NEW_LINE> def state(self) -> Optional[int]: <NEW_LINE> <INDENT> return self.data.batteryLevel <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self) -> str: <NEW_LINE> <INDENT> return DEVICE_CLASS_BATTERY <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self) -> str: <NEW_LINE> <INDENT> return PERCENTAGE <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self) -> str: <NEW_LINE> <INDENT> charging = self.data.get_charging_status() == ChargeState.CHARGE_IN_PROGRESS <NEW_LINE> return icon_for_battery_level(battery_level=self.state, charging=charging) <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self) -> Dict[str, Any]: <NEW_LINE> <INDENT> attrs = super().device_state_attributes <NEW_LINE> if self.data.batteryAvailableEnergy is not None: <NEW_LINE> <INDENT> attrs[ATTR_BATTERY_AVAILABLE_ENERGY] = self.data.batteryAvailableEnergy <NEW_LINE> <DEDENT> return attrs
Battery Level sensor.
62598f9d462c4b4f79dbb7e3
class LazyProperty(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self.func(instance) <NEW_LINE> setattr(instance, self.func.__name__, value) <NEW_LINE> return value
LazyProperty https://blog.csdn.net/handsomekang/article/details/39933553 参考 proxy_pool.Util.GetConfig
62598f9deab8aa0e5d30bb5d
class AtomAction(Action): <NEW_LINE> <INDENT> def __init__(self, function_tuple, **kwargs): <NEW_LINE> <INDENT> self.function_tuple = function_tuple <NEW_LINE> arg_types = function_tuple.args <NEW_LINE> super().__init__(arg_types, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def factory(cls, pysc2_function): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(self.__class__, self).__init__(pysc2_function, **kwargs) <NEW_LINE> <DEDENT> new_name = "".join(map(str.capitalize, pysc2_function.name.split('_'))) + "Action" <NEW_LINE> return type(new_name, (AtomAction,), {'__init__': __init__}) <NEW_LINE> <DEDENT> def get_pysc2_action_ids(self): <NEW_LINE> <INDENT> return [self.function_tuple.id] <NEW_LINE> <DEDENT> def transform_action(self, observation, action_values): <NEW_LINE> <INDENT> arg_values = [] <NEW_LINE> for i, arg_type in enumerate(self._arg_types): <NEW_LINE> <INDENT> if arg_type.name in self._defaults: <NEW_LINE> <INDENT> arg_values += [self._defaults[arg_type.name]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> arg_values += [translate_parameter_value(arg_type, action_values[i], self._feature_screen_size, self._feature_minimap_size)] <NEW_LINE> <DEDENT> <DEDENT> return self.function_tuple(*arg_values)
A Class made to directly mirror pysc2 523 default actions
62598f9d009cb60464d012fd
class CreateOAuthUser(OAuthRequestHandler): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> data = json_decode(self.request.body) <NEW_LINE> if 'api_auth_token' not in data: <NEW_LINE> <INDENT> raise HTTPError(400, 'api_auth_token not provided') <NEW_LINE> <DEDENT> if 'access_token' not in data: <NEW_LINE> <INDENT> raise HTTPError(400, 'access_token not provided') <NEW_LINE> <DEDENT> if 'refresh_token' not in data: <NEW_LINE> <INDENT> raise HTTPError(400, 'refresh_token not provided') <NEW_LINE> <DEDENT> if 'user_session_token' not in data: <NEW_LINE> <INDENT> raise HTTPError(400, 'user_session_token not provided') <NEW_LINE> <DEDENT> if 'username' not in data: <NEW_LINE> <INDENT> raise HTTPError(400, 'username not provided') <NEW_LINE> <DEDENT> self.api_auth_token = data['api_auth_token'] <NEW_LINE> self.access_token = data['access_token'] <NEW_LINE> self.refresh_token = data['refresh_token'] <NEW_LINE> self.user_session_token = data['user_session_token'] <NEW_LINE> self.username = data['username'] <NEW_LINE> if not ( self.api_auth_token == os.environ['oauth_manager_token'] ): <NEW_LINE> <INDENT> raise HTTPError(403) <NEW_LINE> <DEDENT> __all_tokens = self.load_tokens() <NEW_LINE> __all_tokens[ self.username ] = { 'access_token': self.access_token, 'refresh_token': self.refresh_token, 'user_session_token': self.user_session_token, } <NEW_LINE> self.store_tokens(__all_tokens)
The JupyterHub application itself should be the only 'user' that calls this. After the OAuthenticator that JupyterHub is currently using finishes the OAuth login process, it should call this function by using pre_spawn_start() so the OAuth manager knows which tokens to store for the user that just logged in. http://jupyterhub.readthedocs.io/en/latest/api/auth.html#jupyterhub.auth.Authenticator.pre_spawn_start
62598f9da8370b77170f01bb
class Group(_GroupMixin, _RawLayer): <NEW_LINE> <INDENT> def __init__(self, parent, index): <NEW_LINE> <INDENT> super(Group, self).__init__(parent, index) <NEW_LINE> self._layers = [] <NEW_LINE> self._bbox = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def closed(self): <NEW_LINE> <INDENT> divider = self._divider <NEW_LINE> if divider is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return divider.type == SectionDivider.CLOSED_FOLDER <NEW_LINE> <DEDENT> @property <NEW_LINE> def _divider(self): <NEW_LINE> <INDENT> return self.get_tag([TaggedBlock.SECTION_DIVIDER_SETTING, TaggedBlock.NESTED_SECTION_DIVIDER_SETTING]) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s: %r, layer_count=%d, mask=%s, visible=%d>" % ( self.kind, self.name, len(self.layers), self.mask, self.visible)
PSD layer group.
62598f9da79ad16197769e3c
class GoogleRefreshableOAuth2Client(GoogleOAuth2Client): <NEW_LINE> <INDENT> def Refresh(self): <NEW_LINE> <INDENT> raise NotImplementedError( 'You must subclass GoogleRefreshableOAuth2Client.')
A refreshable OAuth2 client for use with Google APIs. This interface assumes all responsibility for refreshing credentials when necessary.
62598f9dbe383301e02535cd
class AbstractMercator(SpatialReferenceSystem): <NEW_LINE> <INDENT> @property <NEW_LINE> def tile_size(self): <NEW_LINE> <INDENT> return self._tile_size <NEW_LINE> <DEDENT> @tile_size.setter <NEW_LINE> def tile_size(self, value): <NEW_LINE> <INDENT> self._tile_size = value <NEW_LINE> <DEDENT> def __init__(self, tile_size=256): <NEW_LINE> <INDENT> super(AbstractMercator, self).__init__() <NEW_LINE> self.tile_size = tile_size <NEW_LINE> self.radius = 6378137 <NEW_LINE> self.origin_shift = pi * self.radius <NEW_LINE> self.initial_resolution = 2 * self.origin_shift / self.tile_size <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def invert_y(z, y): <NEW_LINE> <INDENT> return (1 << z) - y - 1 <NEW_LINE> <DEDENT> def get_coord(self, z, x, y): <NEW_LINE> <INDENT> return self.tile_to_meters(z, x, y) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def truncate(coord): <NEW_LINE> <INDENT> return '%.2f' % (int(coord * 100) / float(100)) <NEW_LINE> <DEDENT> def pixel_x_size(self, z): <NEW_LINE> <INDENT> return AbstractMercator.pixel_size(z) <NEW_LINE> <DEDENT> def pixel_y_size(self, z): <NEW_LINE> <INDENT> return AbstractMercator.pixel_size(z) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def pixel_size(z): <NEW_LINE> <INDENT> return 156543.033928041 / 2 ** z <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def tile_to_meters(self, z, x, y): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @abstractmethod <NEW_LINE> def tile_to_lat_lon(z, x, y): <NEW_LINE> <INDENT> raise NotImplementedError()
Base Class for any Mercator projection classes. Sets-up the commonalities for the other Mercator projections such as Ellipsoidal, scaled world mercator, or Psuedo-Mercator.
62598f9d32920d7e50bc5e2e
class JSONtree(object): <NEW_LINE> <INDENT> def __init__(self, root_name, leaf=False, association=None, id=None): <NEW_LINE> <INDENT> super(JSONtree,self).__init__() <NEW_LINE> self.name = root_name <NEW_LINE> if not leaf: <NEW_LINE> <INDENT> self.children = list() <NEW_LINE> <DEDENT> self.leaf = leaf <NEW_LINE> self.association = association <NEW_LINE> self.id=id <NEW_LINE> <DEDENT> def add_child(self, child, association=None,id=None): <NEW_LINE> <INDENT> if isinstance(child,JSONtree): <NEW_LINE> <INDENT> child.association=association <NEW_LINE> self.children.append(child) <NEW_LINE> return child <NEW_LINE> <DEDENT> if isinstance(child,str): <NEW_LINE> <INDENT> node = JSONtree(root_name=child,leaf=False,association=association,id=id) <NEW_LINE> self.children.append(node) <NEW_LINE> return node <NEW_LINE> <DEDENT> <DEDENT> def add_leaf(self, name): <NEW_LINE> <INDENT> self.add_child(JSONtree(name,leaf=True)) <NEW_LINE> <DEDENT> def __getitem__(self,item): <NEW_LINE> <INDENT> if self.leaf: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for child in self.children: <NEW_LINE> <INDENT> if child.name==item: <NEW_LINE> <INDENT> return child <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__dict__()) <NEW_LINE> <DEDENT> def __dict__(self): <NEW_LINE> <INDENT> d = dict() <NEW_LINE> d['name'] = self.name <NEW_LINE> if self.id: <NEW_LINE> <INDENT> d['id'] = self.id <NEW_LINE> <DEDENT> if self.association: <NEW_LINE> <INDENT> d['association'] = self.association <NEW_LINE> <DEDENT> if self.leaf: <NEW_LINE> <INDENT> return d <NEW_LINE> <DEDENT> d['children'] = list() <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> d['children'].append(child.__dict__()) <NEW_LINE> <DEDENT> return d <NEW_LINE> <DEDENT> def to_j(self): <NEW_LINE> <INDENT> return json.dumps(self.__dict__())
Constructs a Tree-like object that is supported by the D3 Framework
62598f9d1f037a2d8b9e3ebf
class screenshotProp(SchemaProperty): <NEW_LINE> <INDENT> _prop_schema = 'screenshot' <NEW_LINE> _expected_schema = 'ImageObject' <NEW_LINE> _enum = False <NEW_LINE> _format_as = "URLField"
SchemaField for screenshot Usage: Include in SchemaObject SchemaFields as your_django_field = screenshotProp() schema.org description:A link to a screenshot image of the app. prop_schema returns just the property without url# format_as is used by app templatetags based upon schema.org datatype used to reference ImageObject
62598f9d9b70327d1c57eb78
class Vote(models.Model): <NEW_LINE> <INDENT> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> voter = models.OneToOneField('Voter', on_delete=models.CASCADE) <NEW_LINE> polling_station = models.ForeignKey('PollingStation', on_delete=models.CASCADE) <NEW_LINE> list_choice = models.ForeignKey('List', on_delete=models.CASCADE) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Voto" <NEW_LINE> verbose_name_plural = "Votos" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"Vote: {self.voter.document} in {self.polling_station.name}"
A vote from a voter. As simple as that.
62598f9d7047854f4633f1bb
class Profile(object): <NEW_LINE> <INDENT> def __init__(self, logger, doc, checkouts_manager): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.doc = doc <NEW_LINE> self.parameters = dict(doc.get('parameters', {})) <NEW_LINE> self.file_resolver = FileResolver(checkouts_manager, doc.get('package_dirs', [])) <NEW_LINE> self.checkouts_manager = checkouts_manager <NEW_LINE> self.hook_import_dirs = doc.get('hook_import_dirs', []) <NEW_LINE> self.packages = doc['packages'] <NEW_LINE> self._yaml_cache = {} <NEW_LINE> <DEDENT> def resolve(self, path): <NEW_LINE> <INDENT> return self.checkouts_manager.resolve(path) <NEW_LINE> <DEDENT> def _use_for_package(self, pkgname): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.packages[pkgname]['use'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return pkgname <NEW_LINE> <DEDENT> <DEDENT> def load_package_yaml(self, pkgname, parameters): <NEW_LINE> <INDENT> use = self._use_for_package(pkgname) <NEW_LINE> yaml_files = self._yaml_cache.get(('package', use), None) <NEW_LINE> if yaml_files is None: <NEW_LINE> <INDENT> yaml_filename = use + '.yaml' <NEW_LINE> hook_filename = self.search_hook_import_dirs(use + '.py') <NEW_LINE> matches = self.file_resolver.glob_files([yaml_filename, pjoin(use, yaml_filename), pjoin(use, use + '-*.yaml')], match_basename=True) <NEW_LINE> self._yaml_cache['package', use] = yaml_files = [ PackageYAML(filename, parameters, pattern != yaml_filename, hook_filename=hook_filename) for match, (pattern, filename) in matches.items()] <NEW_LINE> self.logger.info('Resolved package %s to %s', pkgname, [filename for match, (pattern, filename) in matches.items()]) <NEW_LINE> <DEDENT> no_when_file = None <NEW_LINE> with_when_file = None <NEW_LINE> for pkg in yaml_files: <NEW_LINE> <INDENT> if 'when' not in pkg.doc: <NEW_LINE> <INDENT> if no_when_file is not None: <NEW_LINE> <INDENT> raise PackageError(pkg.doc, "Two specs found for package %s without" " a when-clause to discriminate" % use) <NEW_LINE> <DEDENT> no_when_file = pkg <NEW_LINE> continue <NEW_LINE> <DEDENT> doc_when = pkg.doc['when'] <NEW_LINE> if eval_condition(doc_when, parameters): <NEW_LINE> <INDENT> if with_when_file is not None: <NEW_LINE> <INDENT> raise PackageError(doc_when, "Selected parameters for package %s matches both '%s' and '%s'" % (use, doc_when, with_when_file)) <NEW_LINE> <DEDENT> with_when_file = pkg <NEW_LINE> <DEDENT> <DEDENT> result = with_when_file if with_when_file is not None else no_when_file <NEW_LINE> if result is None: <NEW_LINE> <INDENT> raise ProfileError(use, 'No yaml file for package "{0}" found'.format(use)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def find_package_file(self, pkgname, filename): <NEW_LINE> <INDENT> use = self._use_for_package(pkgname) <NEW_LINE> return self.file_resolver.find_file([filename, pjoin(use, filename)]) <NEW_LINE> <DEDENT> def search_hook_import_dirs(self, filename): <NEW_LINE> <INDENT> files = [pjoin(path, filename) for path in self.doc.get('hook_import_dirs', [])] <NEW_LINE> return self.file_resolver.find_file(files) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Profile containing ' + ', '.join( key[1] for key in self._yaml_cache.keys() if key[0] == 'package')
Profiles acts as nodes in a tree, with `extends` containing the parent profiles (which are child nodes in a DAG).
62598f9d67a9b606de545da2
class CASMLReuseMethod(IReuseMethod): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CASMLReuseMethod, self).__init__() <NEW_LINE> <DEDENT> def execute(self, case, case_matches, fn_retrieve=None): <NEW_LINE> <INDENT> cluster = [] <NEW_LINE> id_map = {} <NEW_LINE> for i, m in enumerate(case_matches.itervalues()): <NEW_LINE> <INDENT> cluster.append(m.case["act"]) <NEW_LINE> id_map[i] = m.case.id <NEW_LINE> <DEDENT> revised_matches = fn_retrieve(case, "act", False, **{"data": cluster, "id_map": id_map}) <NEW_LINE> for id_, m in revised_matches.items(): <NEW_LINE> <INDENT> m.set_similarity("state", case_matches[id_].get_similarity("state")) <NEW_LINE> <DEDENT> return revised_matches
The reuse method implementation for :class:`CASML`. The solutions of the best (or set of best) retrieved cases are used to construct the solution for the query case; new generalizations and specializations may occur as a consequence of the solution transformation. The CASML reuse method further specializes the solution by identifying cases similar in both state and action.
62598f9d4428ac0f6e658303
class Iperf3Server(object): <NEW_LINE> <INDENT> def __init__(self, parameters, loop=None, use_processes=False): <NEW_LINE> <INDENT> if loop is None: <NEW_LINE> <INDENT> self._loop = asyncio.get_event_loop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._loop = loop <NEW_LINE> <DEDENT> self._use_processes = use_processes <NEW_LINE> self._logger = logging.getLogger('py3iperf3') <NEW_LINE> self._parameters = Iperf3TestSettings() <NEW_LINE> for attr, value in parameters.items(): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> setattr(self._parameters, attr, value) <NEW_LINE> <DEDENT> <DEDENT> self._server = None <NEW_LINE> self._orphans = {} <NEW_LINE> self._tests = {} <NEW_LINE> <DEDENT> def start_server(self): <NEW_LINE> <INDENT> if self._parameters.server_address != '': <NEW_LINE> <INDENT> local_addr = self._parameters.server_address <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self._parameters.ip_version == 4: <NEW_LINE> <INDENT> local_addr = '0.0.0.0' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> local_addr = '::' <NEW_LINE> <DEDENT> <DEDENT> if self._parameters.ip_version == 4: <NEW_LINE> <INDENT> ip_ver = socket.AF_INET <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ip_ver = socket.AF_INET6 <NEW_LINE> <DEDENT> self._logger.info('Binding on %s port %s', local_addr, self._parameters.server_port) <NEW_LINE> coro = self._server = self._loop.create_server( lambda: TcpTestProtocol(server=self), host=local_addr, port=self._parameters.server_port, family=ip_ver, reuse_address=True) <NEW_LINE> self._server = self._loop.run_until_complete(coro) <NEW_LINE> self._logger.info('Server running!') <NEW_LINE> <DEDENT> def tcp_connection_established(self, proto): <NEW_LINE> <INDENT> self._orphans[proto] = [] <NEW_LINE> <DEDENT> def control_data_received(self, proto, data): <NEW_LINE> <INDENT> self._orphans[proto].extend(data) <NEW_LINE> if len(self._orphans[proto]) == COOKIE_SIZE+1: <NEW_LINE> <INDENT> self.process_proto_with_cookie(proto) <NEW_LINE> <DEDENT> <DEDENT> def process_proto_with_cookie(self, proto): <NEW_LINE> <INDENT> cookie = self._orphans[proto] <NEW_LINE> cookie_string = bytes(cookie[:-1]).decode('ascii') <NEW_LINE> self._logger.debug('Connection: %s Cookie: %s', proto, cookie_string) <NEW_LINE> if cookie_string in self._tests: <NEW_LINE> <INDENT> self._tests[cookie_string].new_data_connection(proto) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> test = Iperf3Test(self, self._loop, {}) <NEW_LINE> self._tests[cookie_string] = test <NEW_LINE> test.set_control_connection(proto, cookie_string) <NEW_LINE> del self._orphans[proto] <NEW_LINE> <DEDENT> <DEDENT> def stop_server(self): <NEW_LINE> <INDENT> self._logger.info('Server closing') <NEW_LINE> self._server.close()
Big ToDo
62598f9d0c0af96317c5615b
@total_ordering <NEW_LINE> class Smallest(object): <NEW_LINE> <INDENT> def __neg__(self): <NEW_LINE> <INDENT> return Largest() <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if other.__class__ == self.__class__: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if other.__class__ == self.__class__: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "-~" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "-Inf" <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return 0x55555555
Represents the smallest value This type doesn't do much; it implements a pseudo-value that's smaller than everything but itself. >>> negInf = Smallest() >>> smallest = Smallest() >>> -264 < negInf False >>> -264 == negInf False >>> -264 > negInf True >>> negInf < negInf False >>> negInf == smallest True
62598f9d0a50d4780f7051b2
class MinLengthValidator(Validator): <NEW_LINE> <INDENT> def __init__(self, min_length, message=None): <NEW_LINE> <INDENT> self.min_length = min_length <NEW_LINE> super(MinLengthValidator, self).__init__(message if message else "Not allowed length") <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> return (self.error(value) if not value or len(value) < self.min_length else value)
Raises a ValidationError with a code of 'min_length' if length of value is less than min_length.
62598f9dd6c5a102081e1f1e
class SolutionRefer: <NEW_LINE> <INDENT> def reverseString(self, s: List[str]) -> None: <NEW_LINE> <INDENT> s.reverse()
I think that this way is abusing with python 196ms, 18.1MB (99.1%, 92%)
62598f9d4a966d76dd5eecba
class generalized_forward_backward(solver): <NEW_LINE> <INDENT> def __init__(self, lambda_=1, *args, **kwargs): <NEW_LINE> <INDENT> super(generalized_forward_backward, self).__init__(*args, **kwargs) <NEW_LINE> self.lambda_ = lambda_ <NEW_LINE> <DEDENT> def _pre(self, functions, x0): <NEW_LINE> <INDENT> if self.lambda_ <= 0 or self.lambda_ > 1: <NEW_LINE> <INDENT> raise ValueError('Lambda is bounded by 0 and 1.') <NEW_LINE> <DEDENT> self.z = [] <NEW_LINE> for f in functions: <NEW_LINE> <INDENT> if 'GRAD' in f.cap(x0): <NEW_LINE> <INDENT> self.smooth_funs.append(f) <NEW_LINE> <DEDENT> elif 'PROX' in f.cap(x0): <NEW_LINE> <INDENT> self.non_smooth_funs.append(f) <NEW_LINE> self.z.append(np.array(x0, copy=True)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Generalized forward-backward requires each ' 'function to implement prox() or grad().') <NEW_LINE> <DEDENT> <DEDENT> if self.verbosity == 'HIGH': <NEW_LINE> <INDENT> print('INFO: Generalized forward-backward minimizing {} smooth ' 'functions and {} non-smooth functions.'.format( len(self.smooth_funs), len(self.non_smooth_funs))) <NEW_LINE> <DEDENT> <DEDENT> def _algo(self): <NEW_LINE> <INDENT> grad = np.zeros_like(self.sol) <NEW_LINE> for f in self.smooth_funs: <NEW_LINE> <INDENT> grad += f.grad(self.sol) <NEW_LINE> <DEDENT> if not self.non_smooth_funs: <NEW_LINE> <INDENT> self.sol[:] -= self.step * grad <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sol = np.zeros_like(self.sol) <NEW_LINE> for i, g in enumerate(self.non_smooth_funs): <NEW_LINE> <INDENT> tmp = 2 * self.sol - self.z[i] - self.step * grad <NEW_LINE> tmp[:] = g.prox(tmp, self.step * len(self.non_smooth_funs)) <NEW_LINE> self.z[i] += self.lambda_ * (tmp - self.sol) <NEW_LINE> sol += 1. * self.z[i] / len(self.non_smooth_funs) <NEW_LINE> <DEDENT> self.sol[:] = sol <NEW_LINE> <DEDENT> <DEDENT> def _post(self): <NEW_LINE> <INDENT> del self.z
Generalized forward-backward proximal splitting algorithm. This algorithm solves convex optimization problems composed of the sum of any number of non-smooth (or smooth) functions. See generic attributes descriptions of the :class:`pyunlocbox.solvers.solver` base class. Parameters ---------- lambda_ : float, optional A relaxation parameter bounded by 0 and 1. Default is 1. Notes ----- This algorithm requires each function to either implement the :meth:`pyunlocbox.functions.func.prox` method or the :meth:`pyunlocbox.functions.func.grad` method. See :cite:`raguet2013generalizedFB` for details about the algorithm. Examples -------- >>> import numpy as np >>> from pyunlocbox import functions, solvers >>> y = [0.01, 0.2, 8, 0.3, 0 , 0.03, 7] >>> x0 = np.zeros(len(y)) >>> f1 = functions.norm_l2(y=y) >>> f2 = functions.norm_l1() >>> solver = solvers.generalized_forward_backward(lambda_=1, step=0.5) >>> ret = solvers.solve([f1, f2], x0, solver) Solution found after 2 iterations: objective function f(sol) = 1.463100e+01 stopping criterion: RTOL >>> ret['sol'] array([0. , 0. , 7.5, 0. , 0. , 0. , 6.5])
62598f9d10dbd63aa1c7098f
class DeltaCSMRatioFunction(AbstractRatioFunction): <NEW_LINE> <INDENT> ALLOWED_FUNCTIONS = {"smootherstep": ["delta_csm_min", "delta_csm_max"]} <NEW_LINE> def smootherstep(self, vals): <NEW_LINE> <INDENT> return smootherstep(vals, edges=[self.__dict__["delta_csm_min"], self.__dict__["delta_csm_max"]])
Concrete implementation of a series of ratio functions applied to differences of continuous symmetry measures (DeltaCSM). Uses "finite" ratio functions. See the following reference for details: ChemEnv: a fast and robust coordination environment identification tool, D. Waroquiers et al., Acta Cryst. B 76, 683 (2020).
62598f9d1b99ca400228f41a
class Article2Tag(models.Model): <NEW_LINE> <INDENT> nid = models.AutoField(primary_key=True) <NEW_LINE> article = models.ForeignKey(to="Article", to_field="nid") <NEW_LINE> tag = models.ForeignKey(to="Tag", to_field="nid") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{}-{}".format(self.article.title, self.tag.title) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = (("article", "tag"),) <NEW_LINE> verbose_name = "文章-标签" <NEW_LINE> verbose_name_plural = verbose_name
文章和标签的多对多关系表
62598f9d63d6d428bbee258b
class GetTypeInfo_result(object): <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRUCT, 'success', (TGetTypeInfoResp, TGetTypeInfoResp.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.success = TGetTypeInfoResp() <NEW_LINE> self.success.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('GetTypeInfo_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRUCT, 0) <NEW_LINE> self.success.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62598f9d0c0af96317c5615c
class Loader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.module = None <NEW_LINE> <DEDENT> def find_module(self, name, path): <NEW_LINE> <INDENT> sys.meta_path.remove(self) <NEW_LINE> try: <NEW_LINE> <INDENT> self.module = importlib.import_module(name) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> sys.meta_path.insert(0, self) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def load_module(self, name): <NEW_LINE> <INDENT> if not self.module: <NEW_LINE> <INDENT> raise ImportError("Unable to load module.") <NEW_LINE> <DEDENT> module = self.module <NEW_LINE> for hacker in _hackers: <NEW_LINE> <INDENT> module = hacker.hack(module) <NEW_LINE> <DEDENT> sys.modules[name] = module <NEW_LINE> return module
A class that import a module like normal and then passed to a hacker object that gets to do whatever it wants to the module. Then the return value from the hack call is put into sys.modules.
62598f9dcc0a2c111447ade5
class BatterStats(mlbgame.object.Object): <NEW_LINE> <INDENT> def nice_output(self): <NEW_LINE> <INDENT> if self.rbi > 0: <NEW_LINE> <INDENT> if self.hr > 0: <NEW_LINE> <INDENT> return "%s - %i for %i with %i RBI and %i Home Runs" % (self.name_display_first_last, self.h, self.ab, self.rbi, self.hr) <NEW_LINE> <DEDENT> return "%s - %i for %i with %i RBI" % (self.name_display_first_last, self.h, self.ab, self.rbi) <NEW_LINE> <DEDENT> return "%s - %i for %i" % (self.name_display_first_last, self.h, self.ab) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.nice_output()
Holds stats information for a batter. Check out `statmap.py` for a full list of object properties.
62598f9dc432627299fa2db2
class PlayScreen(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.map = Map() <NEW_LINE> self.map.genere() <NEW_LINE> self.commande = Commandes() <NEW_LINE> <DEDENT> def show(self, fenetre): <NEW_LINE> <INDENT> self.map.show(fenetre) <NEW_LINE> self.commande.show(fenetre)
classe de l'ecran de jeu
62598f9d8e7ae83300ee8e79
class Message(_MsgBase): <NEW_LINE> <INDENT> def __new__(cls, msg_id, symbol, location, msg, confidence): <NEW_LINE> <INDENT> return _MsgBase.__new__( cls, msg_id, symbol, msg, msg_id[0], MSG_TYPES[msg_id[0]], confidence, *location ) <NEW_LINE> <DEDENT> def format(self, template): <NEW_LINE> <INDENT> return template.format(**dict(zip(self._fields, self)))
This class represent a message to be issued by the reporters
62598f9da8370b77170f01bd
class TestCurrentArtist100(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.chart = billboard.ChartData('artist-100') <NEW_LINE> <DEDENT> def test_date(self): <NEW_LINE> <INDENT> self.assertIsNotNone(self.chart.date) <NEW_LINE> <DEDENT> def test_ranks(self): <NEW_LINE> <INDENT> ranks = list(entry.rank for entry in self.chart) <NEW_LINE> self.assertEqual(ranks, list(range(1, 101))) <NEW_LINE> <DEDENT> def test_entries_validity(self): <NEW_LINE> <INDENT> self.assertEqual(len(self.chart), 100) <NEW_LINE> for entry in self.chart: <NEW_LINE> <INDENT> self.assertEqual(entry.title, '') <NEW_LINE> self.assertGreater(len(entry.artist), 0) <NEW_LINE> self.assertTrue(1 <= entry.peakPos <= 100) <NEW_LINE> self.assertTrue(0 <= entry.lastPos <= 100) <NEW_LINE> self.assertGreaterEqual(entry.weeks, 0) <NEW_LINE> self.assertTrue(1 <= entry.rank <= 100) <NEW_LINE> self.assertIsInstance(entry.isNew, bool) <NEW_LINE> <DEDENT> <DEDENT> def test_entries_consistency(self): <NEW_LINE> <INDENT> for entry in self.chart: <NEW_LINE> <INDENT> if entry.isNew: <NEW_LINE> <INDENT> self.assertEqual(entry.lastPos, 0) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_json(self): <NEW_LINE> <INDENT> self.assertTrue(json.loads(self.chart.json())) <NEW_LINE> for entry in self.chart: <NEW_LINE> <INDENT> self.assertTrue(json.loads(entry.json()))
Checks that the ChartData object for the current Artist 100 chart has entries and instance variables that are valid and reasonable. Does not test whether the data is actually correct. The Artist 100 chart is special in that it does not provide titles.
62598f9db7558d5895463409
class GoogleCloudDialogflowV2IntentMessageListSelectItem(_messages.Message): <NEW_LINE> <INDENT> description = _messages.StringField(1) <NEW_LINE> image = _messages.MessageField('GoogleCloudDialogflowV2IntentMessageImage', 2) <NEW_LINE> info = _messages.MessageField('GoogleCloudDialogflowV2IntentMessageSelectItemInfo', 3) <NEW_LINE> title = _messages.StringField(4)
An item in the list. Fields: description: Optional. The main text describing the item. image: Optional. The image to display. info: Required. Additional information about this option. title: Required. The title of the list item.
62598f9d92d797404e388a53
class Item(): <NEW_LINE> <INDENT> def __init__(self, name, description): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.description = description <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "=====\n{}\n=====\n{}\n".format(self.name, self.description)
The base class for all items
62598f9d2c8b7c6e89bd35ab
class FoundryComment(BaseComment): <NEW_LINE> <INDENT> in_reply_to = models.ForeignKey('self', null=True, blank=True, db_index=True) <NEW_LINE> moderated = models.BooleanField(default=False, db_index=True) <NEW_LINE> @property <NEW_LINE> def replies(self): <NEW_LINE> <INDENT> return FoundryComment.objects.filter(in_reply_to=self).order_by('id') <NEW_LINE> <DEDENT> @property <NEW_LINE> def creator(self): <NEW_LINE> <INDENT> if not self.user: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return Member.objects.get(id=self.user_id) <NEW_LINE> <DEDENT> except Member.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def can_report(self, request): <NEW_LINE> <INDENT> return not self.commentreport_set.filter(reporter=request.user).exists()
Custom comment class
62598f9d6aa9bd52df0d4ca7
class Placeholders(object): <NEW_LINE> <INDENT> class Collection(object): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class KeyValueMappedCollectionProxy(object): <NEW_LINE> <INDENT> pass
Fake classes to use as placeholders. The purpose of this is for the dummy items, so they can be replaced once the parent item is expanded.
62598f9de76e3b2f99fd8811
class CloudAdminAllocationRequest(CloudAdminRequestDetailMixin, APIView): <NEW_LINE> <INDENT> model = AllocationRequest <NEW_LINE> serializer_class = ResolveAllocationRequestSerializer <NEW_LINE> def approve(self, pending_request): <NEW_LINE> <INDENT> membership = pending_request.membership <NEW_LINE> membership.allocation = pending_request.allocation <NEW_LINE> membership.save()
Manage user allocation requests
62598f9da17c0f6771d5c015
class Router(_messages.Message): <NEW_LINE> <INDENT> bgp = _messages.MessageField('RouterBgp', 1) <NEW_LINE> bgpPeers = _messages.MessageField('RouterBgpPeer', 2, repeated=True) <NEW_LINE> creationTimestamp = _messages.StringField(3) <NEW_LINE> description = _messages.StringField(4) <NEW_LINE> id = _messages.IntegerField(5, variant=_messages.Variant.UINT64) <NEW_LINE> interfaces = _messages.MessageField('RouterInterface', 6, repeated=True) <NEW_LINE> kind = _messages.StringField(7, default=u'compute#router') <NEW_LINE> name = _messages.StringField(8) <NEW_LINE> network = _messages.StringField(9) <NEW_LINE> region = _messages.StringField(10) <NEW_LINE> selfLink = _messages.StringField(11)
Router resource. Fields: bgp: BGP information specific to this router. bgpPeers: BGP information that needs to be configured into the routing stack to establish the BGP peering. It must specify peer ASN and either interface name, IP, or peer IP. Please refer to RFC4273. creationTimestamp: [Output Only] Creation timestamp in RFC3339 text format. description: An optional description of this resource. Provide this property when you create the resource. id: [Output Only] The unique identifier for the resource. This identifier is defined by the server. interfaces: Router interfaces. Each interface requires either one linked resource (e.g. linkedVpnTunnel), or IP address and IP address range (e.g. ipRange), or both. kind: [Output Only] Type of resource. Always compute#router for routers. name: Name of the resource. Provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. network: URI of the network to which this router belongs. region: [Output Only] URI of the region where the router resides. selfLink: [Output Only] Server-defined URL for the resource.
62598f9df7d966606f747dc2
class fudge_pow(Both): <NEW_LINE> <INDENT> def __init__(self, both, fudge, pow_, s2c): <NEW_LINE> <INDENT> self.hr_mod = both.hr_mod <NEW_LINE> self.resp_mod = both.resp_mod <NEW_LINE> self.n_states = both.hr_mod.n_states <NEW_LINE> self.dtype = both.dtype <NEW_LINE> self.P_Y = both.P_Y <NEW_LINE> self.fudge = fudge <NEW_LINE> self.pow = pow_ <NEW_LINE> self.s2c = s2c <NEW_LINE> <DEDENT> def calc(self, y ): <NEW_LINE> <INDENT> hr, context, resp = y <NEW_LINE> self.P_Y = self.hr_mod.calc((hr, context))**self.pow <NEW_LINE> self.P_Y *= self.resp_mod.calc((resp,)) <NEW_LINE> for s in range(self.n_states): <NEW_LINE> <INDENT> if self.s2c[s] == 0: <NEW_LINE> <INDENT> self.P_Y[:, s] *= self.fudge <NEW_LINE> <DEDENT> <DEDENT> return self.P_Y
Variant of class "Both" with parameters fudge and pow. "fudge" multiplies all probabilities for normal states, and the heart rate component of the likelihood is raised to power "pow"
62598f9d097d151d1a2c0e02
class Trajectory_of_cannon: <NEW_LINE> <INDENT> def __init__(self,time_step=0.05,X=0,Y=0,initial_speed=700,initial_angel=30,a=0.0065,α=2.5,V_wind=-4.5): <NEW_LINE> <INDENT> self.a=a <NEW_LINE> self.α=α <NEW_LINE> self.V_wind=V_wind <NEW_LINE> self.theta=initial_angel <NEW_LINE> self.Vx=[math.cos(self.theta*math.pi/180)*initial_speed] <NEW_LINE> self.Vy=[math.sin(self.theta*math.pi/180)*initial_speed] <NEW_LINE> self.X=[0] <NEW_LINE> self.Y=[0] <NEW_LINE> self.dt=time_step <NEW_LINE> self.t=[0] <NEW_LINE> self.C=0.00004 <NEW_LINE> <DEDENT> def calculate(self): <NEW_LINE> <INDENT> i=0 <NEW_LINE> while self.Y[i]>=0: <NEW_LINE> <INDENT> a_drag_x=-self.C*(self.Vx[i]-self.V_wind)*math.sqrt(math.pow(self.Vx[i]-self.V_wind,2)+math.pow(self.Vy[i],2)) <NEW_LINE> a_drag_y=-self.C*(self.Vy[i])*math.sqrt(math.pow(self.Vx[i]-self.V_wind,2)+math.pow(self.Vy[i],2)) <NEW_LINE> altitude=math.pow(1-self.a*self.Y[i]/288,self.α) <NEW_LINE> temp_X=self.X[i]+self.Vx[i]*self.dt <NEW_LINE> temp_Y=self.Y[i]+self.Vy[i]*self.dt <NEW_LINE> self.X.append(temp_X) <NEW_LINE> self.Y.append(temp_Y) <NEW_LINE> temp_Vx=self.Vx[i]+altitude*a_drag_x*self.dt <NEW_LINE> temp_Vy=self.Vy[i]-9.79*self.dt+altitude*a_drag_y*self.dt <NEW_LINE> self.Vx.append(temp_Vx) <NEW_LINE> self.Vy.append(temp_Vy) <NEW_LINE> self.t.append(self.t[i]+self.dt) <NEW_LINE> i+=1 <NEW_LINE> <DEDENT> <DEDENT> def show_result(self): <NEW_LINE> <INDENT> pl.plot(self.X,self.Y) <NEW_LINE> pl.xlabel("X(m)") <NEW_LINE> pl.ylabel("Y(m)") <NEW_LINE> pl.xlim(0,30000) <NEW_LINE> pl.ylim(0,20000) <NEW_LINE> pl.show()
Calculate the trajectory of the cannon shell including both air drag and the reduced air density at high altitudes.
62598f9d32920d7e50bc5e31
class ApiregistrationV1beta1ServiceReference(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'str', 'namespace': 'str' } <NEW_LINE> attribute_map = { 'name': 'name', 'namespace': 'namespace' } <NEW_LINE> def __init__(self, name=None, namespace=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._namespace = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if namespace is not None: <NEW_LINE> <INDENT> self.namespace = namespace <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def namespace(self): <NEW_LINE> <INDENT> return self._namespace <NEW_LINE> <DEDENT> @namespace.setter <NEW_LINE> def namespace(self, namespace): <NEW_LINE> <INDENT> self._namespace = namespace <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ApiregistrationV1beta1ServiceReference): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598f9da219f33f346c65f4
class Homepage(base.GSoCRequestHandler): <NEW_LINE> <INDENT> def templatePath(self): <NEW_LINE> <INDENT> return 'modules/gsoc/homepage/base.html' <NEW_LINE> <DEDENT> def djangoURLPatterns(self): <NEW_LINE> <INDENT> return [ url(r'homepage/%s$' % url_patterns.PROGRAM, self, name='gsoc_homepage'), url(r'program/home/%s$' % url_patterns.PROGRAM, self), django_url(r'^program/home/%s$' % url_patterns.PROGRAM, self), ] <NEW_LINE> <DEDENT> def checkAccess(self, data, check, mutator): <NEW_LINE> <INDENT> check.isProgramVisible() <NEW_LINE> <DEDENT> def context(self, data, check, mutator): <NEW_LINE> <INDENT> current_timeline = data.timeline.currentPeriod() <NEW_LINE> next_deadline = data.timeline.nextDeadline() <NEW_LINE> new_widget = True <NEW_LINE> timeline_data = json.dumps(timeline_logic.createTimelineDict(data.timeline)) <NEW_LINE> context = { 'timeline': Timeline(data, current_timeline, next_deadline, new_widget), 'timeline_data': timeline_data, 'apply': Apply(data), 'connect_with_us': ConnectWithUs(data), 'new_widget': new_widget, 'page_name': '%s - Home page' % (data.program.name), 'program': data.program, 'program_select': base_templates.ProgramSelect( 'modules/gsoc/homepage/_program_select.html', data, 'gsoc_homepage'), } <NEW_LINE> featured_project = project_logic.getFeaturedProject( current_timeline, data.program) <NEW_LINE> if featured_project: <NEW_LINE> <INDENT> context['featured_project'] = FeaturedProject(data, featured_project) <NEW_LINE> <DEDENT> return context
Encapsulate all the methods required to generate GSoC Home page.
62598f9d4527f215b58e9cbe
class SubtitleProvider(metaclass=ProviderMount): <NEW_LINE> <INDENT> _instances = [] <NEW_LINE> def __init__(self, name, address, code): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.address = address <NEW_LINE> self.code = code <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_providers(): <NEW_LINE> <INDENT> path = os.path.join(os.path.dirname(__file__), "providers") <NEW_LINE> modules = pkgutil.iter_modules(path=[path]) <NEW_LINE> for _, mod_name, _ in modules: <NEW_LINE> <INDENT> if mod_name not in sys.modules: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> mod_path = "sublime.providers." + mod_name <NEW_LINE> __import__(mod_path, fromlist=[mod_name]) <NEW_LINE> <DEDENT> except ImportError as error: <NEW_LINE> <INDENT> LOG.fatal("Cannot import {} provider: {}".format( mod_name, error)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not SubtitleProvider._instances: <NEW_LINE> <INDENT> SubtitleProvider._instances = [ provider() for provider in SubtitleProvider.providers ] <NEW_LINE> <DEDENT> return SubtitleProvider._instances <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.name == other.name and self.address == other.address and self.code == other.code)
Mount point for subtitles providers. Providers implementing this reference should provide the following attributes: name -- Name of the provider that will be displayed address -- Official address of the provider code -- Unique code for this provider
62598f9d21a7993f00c65d5d
class InlineResponse2003DetailsFindings(object): <NEW_LINE> <INDENT> swagger_types = { 'scanners': 'InlineResponse2003DetailsFindingsScanners', 'malware': 'int', 'vulnerabilities': 'InlineResponse2003DetailsFindingsVulnerabilities' } <NEW_LINE> attribute_map = { 'scanners': 'scanners', 'malware': 'malware', 'vulnerabilities': 'vulnerabilities' } <NEW_LINE> def __init__(self, scanners=None, malware=None, vulnerabilities=None): <NEW_LINE> <INDENT> self._scanners = None <NEW_LINE> self._malware = None <NEW_LINE> self._vulnerabilities = None <NEW_LINE> self.discriminator = None <NEW_LINE> if scanners is not None: <NEW_LINE> <INDENT> self.scanners = scanners <NEW_LINE> <DEDENT> if malware is not None: <NEW_LINE> <INDENT> self.malware = malware <NEW_LINE> <DEDENT> if vulnerabilities is not None: <NEW_LINE> <INDENT> self.vulnerabilities = vulnerabilities <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def scanners(self): <NEW_LINE> <INDENT> return self._scanners <NEW_LINE> <DEDENT> @scanners.setter <NEW_LINE> def scanners(self, scanners): <NEW_LINE> <INDENT> self._scanners = scanners <NEW_LINE> <DEDENT> @property <NEW_LINE> def malware(self): <NEW_LINE> <INDENT> return self._malware <NEW_LINE> <DEDENT> @malware.setter <NEW_LINE> def malware(self, malware): <NEW_LINE> <INDENT> self._malware = malware <NEW_LINE> <DEDENT> @property <NEW_LINE> def vulnerabilities(self): <NEW_LINE> <INDENT> return self._vulnerabilities <NEW_LINE> <DEDENT> @vulnerabilities.setter <NEW_LINE> def vulnerabilities(self, vulnerabilities): <NEW_LINE> <INDENT> self._vulnerabilities = vulnerabilities <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse2003DetailsFindings): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598f9d01c39578d7f12b58
class DocumentAdmin(MultilingualPublishMixin, M2MPlaceholderAdmin): <NEW_LINE> <INDENT> list_display = [ 'title', 'category', 'position', 'user', 'is_on_front_page', 'languages', 'is_published', ] <NEW_LINE> def title(self, obj): <NEW_LINE> <INDENT> lang = get_language() <NEW_LINE> return get_preferred_translation_from_lang(obj, lang).title <NEW_LINE> <DEDENT> title.short_description = _('Title')
Admin class for the ``Document`` model.
62598f9d91f36d47f2230d8d
class BibIndexItemCountTokenizer(BibIndexEmptyTokenizer): <NEW_LINE> <INDENT> def __init__(self, stemming_language = None, remove_stopwords = False, remove_html_markup = False, remove_latex_markup = False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tokenize(self, record): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> try: <NEW_LINE> <INDENT> count = record['_number_of_copies'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return [str(count)] <NEW_LINE> <DEDENT> def get_tokenizing_function(self, wordtable_type): <NEW_LINE> <INDENT> return self.tokenize
Returns a number of copies of a book which is owned by the library.
62598f9da17c0f6771d5c016
class Locations(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=40) <NEW_LINE> description = models.CharField(max_length=200, null=True, blank=True) <NEW_LINE> lat = models.FloatField() <NEW_LINE> lon = models.FloatField() <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated_at = models.DateTimeField(auto_now=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
Locations AKA landmarks model
62598f9d45492302aabfc2b3