code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class BioethicsSpider(scrapy.Spider): <NEW_LINE> <INDENT> name = "bioetich" <NEW_LINE> allowed_domains = ["bioethics.virginia.edu"] <NEW_LINE> start_urls = ( 'http://bioethics.virginia.edu/people', ) <NEW_LINE> def parse(self, response): <NEW_LINE> <INDENT> sel = Selector(response) <NEW_LINE> people_sel = sel.xpath('//table[@class="views-table cols-0"]') <NEW_LINE> for profile_sel in people_sel: <NEW_LINE> <INDENT> bioetich = University() <NEW_LINE> name = profile_sel.xpath('//tr/td/h1/a/text()').extract() <NEW_LINE> if name: <NEW_LINE> <INDENT> bioetich['name'] = name <NEW_LINE> <DEDENT> title = profile_sel.xpath('//tr/td/h5/text()').extract() <NEW_LINE> if title: <NEW_LINE> <INDENT> bioetich['title'] = title <NEW_LINE> <DEDENT> url = profile_sel.xpath('//tr/td/h1/a/@href').extract() <NEW_LINE> if url: <NEW_LINE> <INDENT> bioetich['url'] = url <NEW_LINE> <DEDENT> phone = profile_sel.xpath('//tr/td[2]/a/text()').extract() <NEW_LINE> if phone: <NEW_LINE> <INDENT> bioetich['phone'] = phone <NEW_LINE> <DEDENT> email = profile_sel.xpath('//tr/td[2]/text()').extract() <NEW_LINE> if email: <NEW_LINE> <INDENT> bioetich['email'] = email <NEW_LINE> <DEDENT> bioetich['department'] = "Bioethics" <NEW_LINE> bioetich['institution'] = "University of Virginia" <NEW_LINE> bioetich['division'] = "Arts and Science" <NEW_LINE> return bioetich
Scrape all profiles from http://www.bioethics.virginia.edu
62598faa66656f66f7d5a365
class ClientApi(flask.ext.restful.Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> dequeue = [str(item) for item in request_queue.queue] <NEW_LINE> return {"ok": True, "queue": dequeue}, 200 <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> message = request_queue.get(timeout=1) <NEW_LINE> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> return {"ok": True, "message": "heartbeat"}, 200 <NEW_LINE> <DEDENT> return {"ok": True, "message": message}, 200
Client API Send requests from server to client. A heartbeat is emitted once every second. GET /client POST /client
62598faa2c8b7c6e89bd373a
class DeletedSecretBundle(SecretBundle): <NEW_LINE> <INDENT> _validation = { 'kid': {'readonly': True}, 'managed': {'readonly': True}, 'scheduled_purge_date': {'readonly': True}, 'deleted_date': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': 'str'}, 'id': {'key': 'id', 'type': 'str'}, 'content_type': {'key': 'contentType', 'type': 'str'}, 'attributes': {'key': 'attributes', 'type': 'SecretAttributes'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'kid': {'key': 'kid', 'type': 'str'}, 'managed': {'key': 'managed', 'type': 'bool'}, 'recovery_id': {'key': 'recoveryId', 'type': 'str'}, 'scheduled_purge_date': {'key': 'scheduledPurgeDate', 'type': 'unix-time'}, 'deleted_date': {'key': 'deletedDate', 'type': 'unix-time'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(DeletedSecretBundle, self).__init__(**kwargs) <NEW_LINE> self.recovery_id = kwargs.get('recovery_id', None) <NEW_LINE> self.scheduled_purge_date = None <NEW_LINE> self.deleted_date = None
A Deleted Secret consisting of its previous id, attributes and its tags, as well as information on when it will be purged. Variables are only populated by the server, and will be ignored when sending a request. :param value: The secret value. :type value: str :param id: The secret id. :type id: str :param content_type: The content type of the secret. :type content_type: str :param attributes: The secret management attributes. :type attributes: ~azure.keyvault.v7_3_preview.models.SecretAttributes :param tags: A set of tags. Application specific metadata in the form of key-value pairs. :type tags: dict[str, str] :ivar kid: If this is a secret backing a KV certificate, then this field specifies the corresponding key backing the KV certificate. :vartype kid: str :ivar managed: True if the secret's lifetime is managed by key vault. If this is a secret backing a certificate, then managed will be true. :vartype managed: bool :param recovery_id: The url of the recovery object, used to identify and recover the deleted secret. :type recovery_id: str :ivar scheduled_purge_date: The time when the secret is scheduled to be purged, in UTC. :vartype scheduled_purge_date: ~datetime.datetime :ivar deleted_date: The time when the secret was deleted, in UTC. :vartype deleted_date: ~datetime.datetime
62598faa30dc7b766599f7c1
class UnsupportedFunctional(ConversionError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return "The {} interaction type is not supported in {}.".format( self.could_not_convert.__class__.__name__, self.engine.upper())
Force functional that is not supported in a specific engine.
62598faaf9cc0f698b1c5283
class Neighborhood(object): <NEW_LINE> <INDENT> def __init__(self, rows, cols): <NEW_LINE> <INDENT> self.rows = rows <NEW_LINE> self.cols = cols <NEW_LINE> self.numMonsters = 0 <NEW_LINE> self.homes = [[Home(self) for j in range(cols)] for i in range(rows)] <NEW_LINE> <DEDENT> def getRows(self): <NEW_LINE> <INDENT> return self.rows <NEW_LINE> <DEDENT> def getCols(self): <NEW_LINE> <INDENT> return self.cols <NEW_LINE> <DEDENT> def getHome(self, row, col): <NEW_LINE> <INDENT> return self.homes[row][col] <NEW_LINE> <DEDENT> def getMonsterList(self, row, col): <NEW_LINE> <INDENT> return self.getHome(row,col).getMonsters() <NEW_LINE> <DEDENT> def getNumMonsters(self): <NEW_LINE> <INDENT> numMonsters = 0 <NEW_LINE> for row in range(self.rows): <NEW_LINE> <INDENT> for col in range(self.cols): <NEW_LINE> <INDENT> monsters = self.getHome(row,col).getMonsters() <NEW_LINE> for monster in monsters: <NEW_LINE> <INDENT> if (monster.getName() != "Person"): <NEW_LINE> <INDENT> numMonsters = numMonsters + 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return numMonsters <NEW_LINE> <DEDENT> def addMonsters(self,addMonsters): <NEW_LINE> <INDENT> self.numMonsters = self.numMonsters + addMonsters <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.numMonsters = self.numMonsters - 1
Neighborhood class information
62598faa0c0af96317c562f7
class RegenerateAccessKeyParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'key_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'key_type': {'key': 'keyType', 'type': 'str'}, 'key': {'key': 'key', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, key_type: Union[str, "KeyType"], key: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(RegenerateAccessKeyParameters, self).__init__(**kwargs) <NEW_LINE> self.key_type = key_type <NEW_LINE> self.key = key
Parameters supplied to the Regenerate Authorization Rule operation, specifies which key needs to be reset. All required parameters must be populated in order to send to Azure. :param key_type: Required. The access key to regenerate. Possible values include: "PrimaryKey", "SecondaryKey". :type key_type: str or ~azure.mgmt.servicebus.v2021_06_01_preview.models.KeyType :param key: Optional, if the key value provided, is reset for KeyType value or autogenerate Key value set for keyType. :type key: str
62598faa7d847024c075c338
class UserDeviceSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserDevice
serializer for user device
62598faa0a50d4780f705352
class TestNLPIR(unittest.TestCase): <NEW_LINE> <INDENT> def test_load_library(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(nlpir.libNLPIR, ctypes.CDLL))
Unit tests for the pynlpir.nlpir module.
62598faa3cc13d1c6d4656e0
class Connection(object): <NEW_LINE> <INDENT> default_host = 'localhost' <NEW_LINE> default_port = 8125 <NEW_LINE> default_sample_rate = 1 <NEW_LINE> default_disabled = False <NEW_LINE> @classmethod <NEW_LINE> def set_defaults( cls, host='localhost', port=8125, sample_rate=1, disabled=False): <NEW_LINE> <INDENT> cls.default_host = host <NEW_LINE> cls.default_port = port <NEW_LINE> cls.default_sample_rate = sample_rate <NEW_LINE> cls.default_disabled = disabled <NEW_LINE> <DEDENT> def __init__(self, host=None, port=None, sample_rate=None, disabled=None): <NEW_LINE> <INDENT> self._host = host or self.default_host <NEW_LINE> self._port = int(port or self.default_port) <NEW_LINE> self._sample_rate = sample_rate or self.default_sample_rate <NEW_LINE> self._disabled = disabled or self.default_disabled <NEW_LINE> self.logger = logging.getLogger( '%s.%s' % (__name__, self.__class__.__name__)) <NEW_LINE> self.udp_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) <NEW_LINE> self.udp_sock.connect((self._host, self._port)) <NEW_LINE> self.logger.debug( 'Initialized connection to %s:%d with P(%.1f)', self._host, self._port, self._sample_rate) <NEW_LINE> <DEDENT> def send(self, data, sample_rate=None): <NEW_LINE> <INDENT> if self._disabled: <NEW_LINE> <INDENT> self.logger.debug('Connection disabled, not sending data') <NEW_LINE> return False <NEW_LINE> <DEDENT> if sample_rate is None: <NEW_LINE> <INDENT> sample_rate = self._sample_rate <NEW_LINE> <DEDENT> sampled_data = {} <NEW_LINE> if sample_rate < 1: <NEW_LINE> <INDENT> if random.random() <= sample_rate: <NEW_LINE> <INDENT> for stat, value in compat.iter_dict(data): <NEW_LINE> <INDENT> sampled_data[stat] = '%s|@%s' % (data[stat], sample_rate) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sampled_data = data <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for stat, value in compat.iter_dict(sampled_data): <NEW_LINE> <INDENT> send_data = ('%s:%s' % (stat, value)).encode("utf-8") <NEW_LINE> self.udp_sock.send(send_data) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.logger.exception('unexpected error %r while sending data', e) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s[%s:%d] P(%.1f)>' % ( self.__class__.__name__, self._host, self._port, self._sample_rate, )
Statsd Connection :keyword host: The statsd host to connect to, defaults to `localhost` :type host: str :keyword port: The statsd port to connect to, defaults to `8125` :type port: int :keyword sample_rate: The sample rate, defaults to `1` (meaning always) :type sample_rate: int :keyword disabled: Turn off sending UDP packets, defaults to ``False`` :type disabled: bool
62598faa23849d37ff851029
class Command(BaseCommand): <NEW_LINE> <INDENT> help = '新着エピソードを取得' <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument('--verbose', action='store_true', dest='verbose', default=False, help='Print progress on command line') <NEW_LINE> <DEDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> verbose = options['verbose'] <NEW_LINE> channels = Channel.objects.all() <NEW_LINE> num_channels = len(channels) <NEW_LINE> start = datetime.now() <NEW_LINE> exec_time = start.strftime('%Y/%m/%d %H:%M:%S') <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('##########################################################################') <NEW_LINE> print('[%s] %d channels to process..' % ( exec_time, num_channels)) <NEW_LINE> <DEDENT> for i, channel in enumerate(channels): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print('(%d/%d) Processing Channels' % ( i + 1, num_channels)) <NEW_LINE> <DEDENT> get_feed(channel.feed_url) <NEW_LINE> <DEDENT> end = datetime.now() <NEW_LINE> end_time = end.strftime('%Y/%m/%d %H:%M:%S') <NEW_LINE> print('[%s] logue get_feeds completed successfully' % ( end_time)) <NEW_LINE> logger.info('[%s] logue get_feeds completed successfully' % ( end_time))
既存チャンネルの更新をする Cronから定期的に呼ばれることを想定
62598faa9c8ee8231304012b
class instantiableclassmethod(object): <NEW_LINE> <INDENT> def __init__(self, getter): <NEW_LINE> <INDENT> self.getter = getter <NEW_LINE> <DEDENT> def __get__(self, obj, cls): <NEW_LINE> <INDENT> if obj is not None: <NEW_LINE> <INDENT> def wrapper(*args, **kargs): <NEW_LINE> <INDENT> return self.getter(obj, *args, **kargs) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> def wrapper(*args, **kargs): <NEW_LINE> <INDENT> return self.getter(cls, *args, **kargs) <NEW_LINE> <DEDENT> <DEDENT> return wrapper
A method that takes the class as its first argument if called on the class and the instance as an argument if called on an instance.
62598faa8e71fb1e983bba27
class BaseNumericalModel(BaseModel): <NEW_LINE> <INDENT> @keywordonly(connectivity_mapping=None) <NEW_LINE> def __init__(self, model, independent_vars=None, params=None, **kwargs): <NEW_LINE> <INDENT> connectivity_mapping = kwargs.pop('connectivity_mapping') <NEW_LINE> if (connectivity_mapping is None and independent_vars is not None and params is not None): <NEW_LINE> <INDENT> if not isinstance(model, Mapping): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> iter(model) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> model = [model] <NEW_LINE> <DEDENT> model = {Variable(): expr for expr in model} <NEW_LINE> <DEDENT> warnings.warn(DeprecationWarning( '`independent_vars` and `params` have been deprecated.' ' Use `connectivity_mapping` instead.' )) <NEW_LINE> self.independent_vars = sorted(independent_vars, key=str) <NEW_LINE> self.params = sorted(params, key=str) <NEW_LINE> self.connectivity_mapping = {var: set(independent_vars + params) for var in model} <NEW_LINE> <DEDENT> elif connectivity_mapping: <NEW_LINE> <INDENT> if not isinstance(model, Mapping): <NEW_LINE> <INDENT> raise TypeError('Please provide the model as a mapping, ' 'corresponding to `connectivity_mapping`.') <NEW_LINE> <DEDENT> sub_model = {} <NEW_LINE> for var, expr in model.items(): <NEW_LINE> <INDENT> if isinstance(expr, sympy.Basic): <NEW_LINE> <INDENT> sub_model[var] = expr <NEW_LINE> <DEDENT> <DEDENT> if sub_model: <NEW_LINE> <INDENT> sub_model = BaseModel(sub_model) <NEW_LINE> sub_model.connectivity_mapping.update(connectivity_mapping) <NEW_LINE> connectivity_mapping = sub_model.connectivity_mapping <NEW_LINE> <DEDENT> self.connectivity_mapping = connectivity_mapping.copy() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('Please provide `connectivity_mapping`.') <NEW_LINE> <DEDENT> super(BaseNumericalModel, self).__init__(model, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def connectivity_mapping(self): <NEW_LINE> <INDENT> return self._connectivity_mapping <NEW_LINE> <DEDENT> @connectivity_mapping.setter <NEW_LINE> def connectivity_mapping(self, value): <NEW_LINE> <INDENT> self._connectivity_mapping = value <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self.connectivity_mapping != other.connectivity_mapping: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for key, func in self.model_dict.items(): <NEW_LINE> <INDENT> if func != other[key]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> new_model_dict = {} <NEW_LINE> for key, callable_expr in self.model_dict.values(): <NEW_LINE> <INDENT> new_model_dict[key] = lambda *args, **kwargs: - callable_expr(*args, **kwargs) <NEW_LINE> <DEDENT> return self.__class__(new_model_dict) <NEW_LINE> <DEDENT> @property <NEW_LINE> def shared_parameters(self): <NEW_LINE> <INDENT> raise NotImplementedError( 'Shared parameters can not be inferred for {}'.format(self.__class__.__name__) )
ABC for Numerical Models. These are models whose components are generic python callables.
62598faa3539df3088ecc228
class PyMataSerial(threading.Thread): <NEW_LINE> <INDENT> arduino = serial.Serial() <NEW_LINE> port_id = "" <NEW_LINE> baud_rate = 57600 <NEW_LINE> timeout = 1 <NEW_LINE> command_deque = None <NEW_LINE> def __init__(self, port_id, command_deque, baud_rate): <NEW_LINE> <INDENT> self.port_id = port_id <NEW_LINE> self.command_deque = command_deque <NEW_LINE> self.baud_rate = baud_rate <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> self.daemon = True <NEW_LINE> self.arduino = serial.Serial(self.port_id, self.baud_rate, timeout=int(self.timeout), writeTimeout=0) <NEW_LINE> self.stop_event = threading.Event() <NEW_LINE> if sys.platform == 'linux': <NEW_LINE> <INDENT> self.arduino.nonblocking() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.stop_event.set() <NEW_LINE> <DEDENT> def is_stopped(self): <NEW_LINE> <INDENT> return self.stop_event.is_set() <NEW_LINE> <DEDENT> def open(self, verbose): <NEW_LINE> <INDENT> if verbose: <NEW_LINE> <INDENT> print('\nOpening Arduino Serial port %s ' % self.port_id) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.arduino.close() <NEW_LINE> time.sleep(1) <NEW_LINE> self.arduino.open() <NEW_LINE> time.sleep(1) <NEW_LINE> return self.arduino <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.arduino.close() <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if sys.version_info[0] < 3: <NEW_LINE> <INDENT> self.arduino.write(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.arduino.write(bytes([ord(data)])) <NEW_LINE> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self.is_stopped(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.arduino.inWaiting(): <NEW_LINE> <INDENT> c = self.arduino.read() <NEW_LINE> self.command_deque.append(ord(c)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> time.sleep(.1) <NEW_LINE> <DEDENT> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> <DEDENT> self.close()
This class manages the serial port for Arduino serial communications
62598faa99fddb7c1ca62da3
class ContentSessionLog(BaseLogModel): <NEW_LINE> <INDENT> user = models.ForeignKey(FacilityUser, blank=True, null=True) <NEW_LINE> content_id = UUIDField(db_index=True) <NEW_LINE> channel_id = UUIDField() <NEW_LINE> start_timestamp = models.DateTimeField() <NEW_LINE> end_timestamp = models.DateTimeField(blank=True, null=True) <NEW_LINE> time_spent = models.FloatField(help_text="(in seconds)", default=0.0, validators=[MinValueValidator(0)]) <NEW_LINE> progress = models.FloatField(default=0, validators=[MinValueValidator(0)]) <NEW_LINE> kind = models.CharField(max_length=200) <NEW_LINE> extra_fields = models.TextField(default="{}")
This model provides a record of interactions with a content item within a single visit to that content page.
62598faad486a94d0ba2bf43
class Population(object): <NEW_LINE> <INDENT> def __init__( self, Ne=None, sample_size=None, initial_size=None, growth_rate=None): <NEW_LINE> <INDENT> self.Ne = Ne <NEW_LINE> self.initial_size = initial_size * self.Ne <NEW_LINE> self.growth_rate = growth_rate / (4 * Ne) <NEW_LINE> <DEDENT> def get_size(self, time): <NEW_LINE> <INDENT> size = self.initial_size <NEW_LINE> if self.growth_rate != 0: <NEW_LINE> <INDENT> size = self.initial_size * math.exp(-self.growth_rate * time) <NEW_LINE> <DEDENT> return size
Simple class to represent the state of a population in terms of its demographic parameters. This is intended to be initialised from the corresponding low-level values so that they can be rescaled back into input units.
62598faaeab8aa0e5d30bd00
class RPCClient(object): <NEW_LINE> <INDENT> JSON_RPC_VERSION = "2.0" <NEW_LINE> _ALLOWED_REPLY_KEYS = sorted(['id', 'jsonrpc', 'error', 'result']) <NEW_LINE> _ALLOWED_REQUEST_KEYS = sorted(['id', 'jsonrpc', 'method', 'params']) <NEW_LINE> def parse_reply(self, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> rep = json.loads(data) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise InvalidReplyError(e) <NEW_LINE> <DEDENT> for k in rep.keys(): <NEW_LINE> <INDENT> if not k in self._ALLOWED_REPLY_KEYS: <NEW_LINE> <INDENT> raise InvalidReplyError('Key not allowed: %s' % k) <NEW_LINE> <DEDENT> <DEDENT> if not 'jsonrpc' in rep: <NEW_LINE> <INDENT> raise InvalidReplyError('Missing jsonrpc (version) in response.') <NEW_LINE> <DEDENT> if rep['jsonrpc'] != self.JSON_RPC_VERSION: <NEW_LINE> <INDENT> raise InvalidReplyError('Wrong JSONRPC version') <NEW_LINE> <DEDENT> if not 'id' in rep: <NEW_LINE> <INDENT> raise InvalidReplyError('Missing id in response') <NEW_LINE> <DEDENT> if ('error' in rep) == ('result' in rep): <NEW_LINE> <INDENT> raise InvalidReplyError( 'Reply must contain exactly one of result and error.' ) <NEW_LINE> <DEDENT> if 'error' in rep: <NEW_LINE> <INDENT> response = JSONRPCErrorResponse() <NEW_LINE> error = rep['error'] <NEW_LINE> response.error = error['message'] <NEW_LINE> response._jsonrpc_error_code = error['code'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = JSONRPCSuccessResponse() <NEW_LINE> response.result = rep.get('result', None) <NEW_LINE> <DEDENT> response.unique_id = rep['id'] <NEW_LINE> return response <NEW_LINE> <DEDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.protocol = JSONRPCProtocol() <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> def _send_and_handle_reply(self, req): <NEW_LINE> <INDENT> headers = {'content-type': 'application/json'} <NEW_LINE> reply = requests.post(self.url, req.serialize(), headers=headers) <NEW_LINE> response = self.parse_reply(str(reply.json()).replace("'", '"')) <NEW_LINE> if hasattr(response, 'error'): <NEW_LINE> <INDENT> raise RPCError('Error calling remote procedure: %s' % response.error) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def call(self, method, args, kwargs, one_way=False): <NEW_LINE> <INDENT> req = self.protocol.create_request(method, args, kwargs, one_way) <NEW_LINE> return self._send_and_handle_reply(req).result <NEW_LINE> <DEDENT> def get_proxy(self, prefix='', one_way=False): <NEW_LINE> <INDENT> return RPCProxy(self, prefix, one_way) <NEW_LINE> <DEDENT> def batch_call(self, calls): <NEW_LINE> <INDENT> req = self.protocol.create_batch_request() <NEW_LINE> for call_args in calls: <NEW_LINE> <INDENT> req.append(self.protocol.create_request(*call_args)) <NEW_LINE> <DEDENT> return self._send_and_handle_reply(req)
Client for making RPC calls to connected servers. :param protocol: An :py:class:`~tinyrpc.RPCProtocol` instance. :param transport: A :py:class:`~tinyrpc.transports.ClientTransport` instance.
62598faa442bda511e95c3cc
class DetectionBlock(BaseModule): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, conv_cfg=None, norm_cfg=dict(type='BN', requires_grad=True), act_cfg=dict(type='LeakyReLU', negative_slope=0.1), init_cfg=None): <NEW_LINE> <INDENT> super(DetectionBlock, self).__init__(init_cfg) <NEW_LINE> double_out_channels = out_channels * 2 <NEW_LINE> cfg = dict(conv_cfg=conv_cfg, norm_cfg=norm_cfg, act_cfg=act_cfg) <NEW_LINE> self.conv1 = ConvModule(in_channels, out_channels, 1, **cfg) <NEW_LINE> self.conv2 = ConvModule( out_channels, double_out_channels, 3, padding=1, **cfg) <NEW_LINE> self.conv3 = ConvModule(double_out_channels, out_channels, 1, **cfg) <NEW_LINE> self.conv4 = ConvModule( out_channels, double_out_channels, 3, padding=1, **cfg) <NEW_LINE> self.conv5 = ConvModule(double_out_channels, out_channels, 1, **cfg) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> tmp = self.conv1(x) <NEW_LINE> tmp = self.conv2(tmp) <NEW_LINE> tmp = self.conv3(tmp) <NEW_LINE> tmp = self.conv4(tmp) <NEW_LINE> out = self.conv5(tmp) <NEW_LINE> return out
Detection block in YOLO neck. Let out_channels = n, the DetectionBlock contains: Six ConvLayers, 1 Conv2D Layer and 1 YoloLayer. The first 6 ConvLayers are formed the following way: 1x1xn, 3x3x2n, 1x1xn, 3x3x2n, 1x1xn, 3x3x2n. The Conv2D layer is 1x1x255. Some block will have branch after the fifth ConvLayer. The input channel is arbitrary (in_channels) Args: in_channels (int): The number of input channels. out_channels (int): The number of output channels. conv_cfg (dict): Config dict for convolution layer. Default: None. norm_cfg (dict): Dictionary to construct and config norm layer. Default: dict(type='BN', requires_grad=True) act_cfg (dict): Config dict for activation layer. Default: dict(type='LeakyReLU', negative_slope=0.1). init_cfg (dict or list[dict], optional): Initialization config dict. Default: None
62598faa99cbb53fe6830e4c
class Particle: <NEW_LINE> <INDENT> def __init__(self, seed = -1, x= 0, y = 0): <NEW_LINE> <INDENT> if seed == -1: <NEW_LINE> <INDENT> seed = random.randint(2, 1000000) <NEW_LINE> <DEDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.RNG = np.random.RandomState(seed) <NEW_LINE> <DEDENT> def move(self, step_size = 1): <NEW_LINE> <INDENT> switch = self.RNG.randint(1, 5) <NEW_LINE> if(switch == 1): <NEW_LINE> <INDENT> if(self.y + step_size < 50): <NEW_LINE> <INDENT> self.y += step_size <NEW_LINE> <DEDENT> <DEDENT> elif(switch == 2): <NEW_LINE> <INDENT> if(self.x + step_size < 50): <NEW_LINE> <INDENT> self.x += step_size <NEW_LINE> <DEDENT> <DEDENT> elif(switch == 3): <NEW_LINE> <INDENT> if(self.y + step_size > 50): <NEW_LINE> <INDENT> self.y -= step_size <NEW_LINE> <DEDENT> if(self.x == 25): <NEW_LINE> <INDENT> self.y -= step_size <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if(self.x + step_size > 50): <NEW_LINE> <INDENT> self.x -= step_size
Defines particle objects that are seeded at initialization and move around accordingly
62598faaac7a0e7691f7247f
class NotImplemented(Error): <NEW_LINE> <INDENT> pass
Raised when request is correct, but feature is not implemented by library. For example non-sequential blockwise transfers
62598faa4f6381625f199479
class STD_ANON (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/gberg/code/src/fstr/darwinpush/xsd/rttiPPTForecasts_v2.xsd', 38, 5) <NEW_LINE> _Documentation = None
An atomic simple type.
62598faa7047854f4633f34f
class DagsterInvalidConfigDefinitionError(DagsterError): <NEW_LINE> <INDENT> def __init__(self, original_root, current_value, stack, reason=None, **kwargs): <NEW_LINE> <INDENT> self.original_root = original_root <NEW_LINE> self.current_value = current_value <NEW_LINE> self.stack = stack <NEW_LINE> super(DagsterInvalidConfigDefinitionError, self).__init__( ( "Error defining config. Original value passed: {original_root}. " "{stack_str}{current_value} " "cannot be resolved.{reason_str}" + CONFIG_ERROR_VERBIAGE ).format( original_root=repr(original_root), stack_str="Error at stack path :" + ":".join(stack) + ". " if stack else "", current_value=repr(current_value), reason_str=" Reason: {reason}.".format(reason=reason) if reason else "", ), **kwargs, )
Indicates that you have attempted to construct a config with an invalid value Acceptable values for config types are any of: 1. A Python primitive type that resolves to a Dagster config type (:py:class:`~python:int`, :py:class:`~python:float`, :py:class:`~python:bool`, :py:class:`~python:str`, or :py:class:`~python:list`). 2. A Dagster config type: :py:data:`~dagster.Int`, :py:data:`~dagster.Float`, :py:data:`~dagster.Bool`, :py:data:`~dagster.String`, :py:data:`~dagster.StringSource`, :py:data:`~dagster.Any`, :py:class:`~dagster.Array`, :py:data:`~dagster.Noneable`, :py:data:`~dagster.Enum`, :py:class:`~dagster.Selector`, :py:class:`~dagster.Shape`, or :py:class:`~dagster.Permissive`. 3. A bare python dictionary, which will be automatically wrapped in :py:class:`~dagster.Shape`. Values of the dictionary are resolved recursively according to the same rules. 4. A bare python list of length one which itself is config type. Becomes :py:class:`Array` with list element as an argument. 5. An instance of :py:class:`~dagster.Field`.
62598faa91af0d3eaad39d85
@dataclass <NEW_LINE> class FileInfo(BaseFileInfo, SerializableAttrs): <NEW_LINE> <INDENT> thumbnail_info: Optional[ThumbnailInfo] = None <NEW_LINE> thumbnail_file: Optional[EncryptedFile] = None <NEW_LINE> thumbnail_url: Optional[ContentURI] = None
Information about a document message.
62598faa1f037a2d8b9e4063
class WeightedRandomSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, weights, num_samples, replacement=True): <NEW_LINE> <INDENT> if not isinstance(num_samples, _int_classes) or isinstance(num_samples, bool) or num_samples <= 0: <NEW_LINE> <INDENT> raise ValueError("num_samples should be a positive integeral " "value, but got num_samples={}".format(num_samples)) <NEW_LINE> <DEDENT> if not isinstance(replacement, bool): <NEW_LINE> <INDENT> raise ValueError("replacement should be a boolean value, but got " "replacement={}".format(replacement)) <NEW_LINE> <DEDENT> self.weights = torch.as_tensor(weights, dtype=torch.double) <NEW_LINE> self.num_samples = num_samples <NEW_LINE> self.replacement = replacement <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(torch.multinomial(self.weights, self.num_samples, self.replacement).tolist()) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.num_samples
Samples elements from [0,..,len(weights)-1] with given probabilities (weights). Arguments: weights (sequence) : a sequence of weights, not necessary summing up to one num_samples (int): number of samples to draw replacement (bool): if ``True``, samples are drawn with replacement. If not, they are drawn without replacement, which means that when a sample index is drawn for a row, it cannot be drawn again for that row.
62598faa4428ac0f6e65849a
class EZSPv6(EZSPv5): <NEW_LINE> <INDENT> COMMANDS = commands.COMMANDS <NEW_LINE> SCHEMAS = { bellows.config.CONF_EZSP_CONFIG: voluptuous.Schema(config.EZSP_SCHEMA), bellows.config.CONF_EZSP_POLICIES: voluptuous.Schema(config.EZSP_POLICIES_SCH), } <NEW_LINE> types = v6_types
EZSP Version 6 Protocol version handler.
62598faaf548e778e596b51a
class BuildProperties(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'provisioning_state': {'readonly': True}, 'triggered_build_result': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'relative_path': {'key': 'relativePath', 'type': 'str'}, 'builder': {'key': 'builder', 'type': 'str'}, 'agent_pool': {'key': 'agentPool', 'type': 'str'}, 'provisioning_state': {'key': 'provisioningState', 'type': 'str'}, 'env': {'key': 'env', 'type': '{str}'}, 'triggered_build_result': {'key': 'triggeredBuildResult', 'type': 'TriggeredBuildResult'}, } <NEW_LINE> def __init__( self, *, relative_path: Optional[str] = None, builder: Optional[str] = None, agent_pool: Optional[str] = None, env: Optional[Dict[str, str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(BuildProperties, self).__init__(**kwargs) <NEW_LINE> self.relative_path = relative_path <NEW_LINE> self.builder = builder <NEW_LINE> self.agent_pool = agent_pool <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.env = env <NEW_LINE> self.triggered_build_result = None
Build resource properties payload. Variables are only populated by the server, and will be ignored when sending a request. :ivar relative_path: The relative path of source code. :vartype relative_path: str :ivar builder: The resource id of builder to build the source code. :vartype builder: str :ivar agent_pool: The resource id of agent pool. :vartype agent_pool: str :ivar provisioning_state: Provisioning state of the KPack build result. Possible values include: "Creating", "Updating", "Succeeded", "Failed", "Deleting". :vartype provisioning_state: str or ~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildProvisioningState :ivar env: The environment variables for this build. :vartype env: dict[str, str] :ivar triggered_build_result: The build result triggered by this build. :vartype triggered_build_result: ~azure.mgmt.appplatform.v2022_01_01_preview.models.TriggeredBuildResult
62598faa4a966d76dd5eee58
class OntoClass(RDF_Entity): <NEW_LINE> <INDENT> def __init__(self, uri, rdftype=None, namespaces=None): <NEW_LINE> <INDENT> super(OntoClass, self).__init__(uri, rdftype, namespaces) <NEW_LINE> self.slug = "class-" + slugify(self.qname) <NEW_LINE> self.domain_of = [] <NEW_LINE> self.range_of = [] <NEW_LINE> self.domain_of_inferred = [] <NEW_LINE> self.range_of_inferred = [] <NEW_LINE> self.ontology = None <NEW_LINE> self.queryHelper = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Class *%s*>" % (self.uri) <NEW_LINE> <DEDENT> def instances(self): <NEW_LINE> <INDENT> return self.all() <NEW_LINE> <DEDENT> def all(self): <NEW_LINE> <INDENT> out = [] <NEW_LINE> if self.queryHelper: <NEW_LINE> <INDENT> qres = self.queryHelper.getClassInstances(self.uri) <NEW_LINE> out = [x[0] for x in qres] <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> def count(self): <NEW_LINE> <INDENT> if self.queryHelper: <NEW_LINE> <INDENT> return self.queryHelper.getClassInstancesCount(self.uri) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def printStats(self): <NEW_LINE> <INDENT> printDebug("----------------") <NEW_LINE> printDebug("Parents......: %d" % len(self.parents())) <NEW_LINE> printDebug("Children.....: %d" % len(self.children())) <NEW_LINE> printDebug("Ancestors....: %d" % len(self.ancestors())) <NEW_LINE> printDebug("Descendants..: %d" % len(self.descendants())) <NEW_LINE> printDebug("Domain of....: %d" % len(self.domain_of)) <NEW_LINE> printDebug("Range of.....: %d" % len(self.range_of)) <NEW_LINE> printDebug("Instances....: %d" % self.count()) <NEW_LINE> printDebug("----------------") <NEW_LINE> <DEDENT> def describe(self): <NEW_LINE> <INDENT> self.printTriples() <NEW_LINE> self.printStats()
Python representation of a generic class within an ontology. Includes methods for representing and querying RDFS/OWL classes domain_of_inferred: a list of dict [{<Class *http://xmlns.com/foaf/0.1/Person*>: [<Property *http://xmlns.com/foaf/0.1/currentProject*>,<Property *http://xmlns.com/foaf/0.1/familyName*>, etc....]}, {<Class *http://www.w3.org/2003/01/geo/wgs84_pos#SpatialThing*>: [<Property *http://xmlns.com/foaf/0.1/based_near*>, etc...]}, ]
62598faa44b2445a339b692b
class HuaweiEM770(HuaweiDBusDevicePlugin): <NEW_LINE> <INDENT> name = "Huawei EM770" <NEW_LINE> version = "0.1" <NEW_LINE> author = u"Andrew Bird" <NEW_LINE> custom = HuaweiCustomizer <NEW_LINE> __remote_name__ = "EM770" <NEW_LINE> __properties__ = { 'usb_device.vendor_id': [0x12d1], 'usb_device.product_id': [0x1001], }
L{vmc.common.plugin.DBusDevicePlugin} for Huawei's EM770
62598faa379a373c97d98f89
class BertForSequenceClassification(PreTrainedBertModel): <NEW_LINE> <INDENT> def __init__(self, config, num_labels=2, focal_loss=False, gamma=0, alpha=None): <NEW_LINE> <INDENT> super(BertForSequenceClassification, self).__init__(config) <NEW_LINE> self.num_labels = num_labels <NEW_LINE> self.focal_loss = focal_loss <NEW_LINE> self.gamma = gamma <NEW_LINE> self.alpha = alpha <NEW_LINE> self.bert = BertModel(config) <NEW_LINE> self.dropout = nn.Dropout(config.hidden_dropout_prob) <NEW_LINE> self.classifier = nn.Linear(config.hidden_size, num_labels) <NEW_LINE> self.apply(self.init_bert_weights) <NEW_LINE> <DEDENT> def forward(self, input_ids, token_type_ids=None, attention_mask=None, labels=None): <NEW_LINE> <INDENT> _, pooled_output = self.bert(input_ids, token_type_ids, attention_mask, output_all_encoded_layers=False) <NEW_LINE> pooled_output = self.dropout(pooled_output) <NEW_LINE> logits = self.classifier(pooled_output) <NEW_LINE> if labels is not None: <NEW_LINE> <INDENT> if self.focal_loss == False: <NEW_LINE> <INDENT> loss_fct = CrossEntropyLoss() <NEW_LINE> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <NEW_LINE> return loss <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loss_fct = FocalLoss(gamma=self.gamma, alpha=self.alpha) <NEW_LINE> loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1)) <NEW_LINE> return loss <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> softmax_layer = nn.Softmax(dim=1) <NEW_LINE> scores = softmax_layer(logits) <NEW_LINE> return scores
BERT model for classification. This module is composed of the BERT model with a linear layer on top of the pooled output. Params: `config`: a BertConfig class instance with the configuration to build a new model. `num_labels`: the number of classes for the classifier. Default = 2. Inputs: `input_ids`: a torch.LongTensor of shape [batch_size, sequence_length] with the word token indices in the vocabulary(see the tokens preprocessing logic in the scripts `extract_features.py`, `run_classifier.py` and `run_squad.py`) `token_type_ids`: an optional torch.LongTensor of shape [batch_size, sequence_length] with the token types indices selected in [0, 1]. Type 0 corresponds to a `sentence A` and type 1 corresponds to a `sentence B` token (see BERT paper for more details). `attention_mask`: an optional torch.LongTensor of shape [batch_size, sequence_length] with indices selected in [0, 1]. It's a mask to be used if the input sequence length is smaller than the max input sequence length in the current batch. It's the mask that we typically use for attention when a batch has varying length sentences. `labels`: labels for the classification output: torch.LongTensor of shape [batch_size] with indices selected in [0, ..., num_labels]. Outputs: if `labels` is not `None`: Outputs the CrossEntropy classification loss of the output with the labels. if `labels` is `None`: Outputs the classification logits of shape [batch_size, num_labels]. Example usage: ```python # Already been converted into WordPiece token ids input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]]) input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]]) token_type_ids = torch.LongTensor([[0, 0, 1], [0, 1, 0]]) config = BertConfig(vocab_size_or_config_json_file=32000, hidden_size=768, num_hidden_layers=12, num_attention_heads=12, intermediate_size=3072) num_labels = 2 model = BertForSequenceClassification(config, num_labels) logits = model(input_ids, token_type_ids, input_mask) ```
62598faa4c3428357761a231
class CardUser(caching.base.CachingMixin, models.Model): <NEW_LINE> <INDENT> classname = models.CharField(max_length=64, editable=False, null=True) <NEW_LINE> objects = caching.base.CachingManager() <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(CardUser, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.classname: <NEW_LINE> <INDENT> cur_class = self.__class__ <NEW_LINE> class_str = cur_class.__name__ <NEW_LINE> while cur_class.__name__ != "CardUser": <NEW_LINE> <INDENT> cur_class = cur_class.__base__().__class__ <NEW_LINE> class_str = "%s.%s" % (cur_class.__name__, class_str) <NEW_LINE> <DEDENT> self.classname = class_str <NEW_LINE> <DEDENT> super(CardUser, self). save(*args, **kwargs) <NEW_LINE> <DEDENT> def get_class(self): <NEW_LINE> <INDENT> if self.classname == self.__class__.__name__: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> ptr = self <NEW_LINE> for sub_class in self.classname.lower().split('.')[1:]: <NEW_LINE> <INDENT> ptr = ptr.__getattribute__(sub_class) <NEW_LINE> <DEDENT> return ptr <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> app_label = "game" <NEW_LINE> verbose_name = "Card User"
Class to get around the limitation that Card can't be ForeignKey'd to either Deck or CardLibrary
62598faa38b623060ffa900f
class InlineQueryResultCachedGif(InlineQueryCachedResult): <NEW_LINE> <INDENT> def __init__(self, id, gif_file_id, title=None, caption=None, reply_markup=None, input_message_content=None): <NEW_LINE> <INDENT> super(InlineQueryResultCachedGif, self).__init__(id, "gif") <NEW_LINE> assert(id is not None) <NEW_LINE> assert(isinstance(id, str)) <NEW_LINE> self.id = id <NEW_LINE> assert(gif_file_id is not None) <NEW_LINE> assert(isinstance(gif_file_id, str)) <NEW_LINE> self.gif_file_id = gif_file_id <NEW_LINE> assert(title is None or isinstance(title, str)) <NEW_LINE> self.title = title <NEW_LINE> assert(caption is None or isinstance(caption, str)) <NEW_LINE> self.caption = caption <NEW_LINE> assert(reply_markup is None or isinstance(reply_markup, InlineKeyboardMarkup)) <NEW_LINE> self.reply_markup = reply_markup <NEW_LINE> assert(input_message_content is None or isinstance(input_message_content, InputMessageContent)) <NEW_LINE> self.input_message_content = input_message_content <NEW_LINE> <DEDENT> def to_array(self): <NEW_LINE> <INDENT> array = super(InlineQueryResultCachedGif, self).to_array() <NEW_LINE> array['gif_file_id'] = str(self.gif_file_id) <NEW_LINE> if self.title is not None: <NEW_LINE> <INDENT> array['title'] = str(self.title) <NEW_LINE> <DEDENT> if self.caption is not None: <NEW_LINE> <INDENT> array['caption'] = str(self.caption) <NEW_LINE> <DEDENT> if self.reply_markup is not None: <NEW_LINE> <INDENT> array['reply_markup'] = self.reply_markup.to_array() <NEW_LINE> <DEDENT> if self.input_message_content is not None: <NEW_LINE> <INDENT> array['input_message_content'] = self.input_message_content.to_array() <NEW_LINE> <DEDENT> return array <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_array(array): <NEW_LINE> <INDENT> if array is None or not array: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> assert(isinstance(array, dict)) <NEW_LINE> from pytgbot.api_types.sendable.reply_markup import InlineKeyboardMarkup <NEW_LINE> data = {} <NEW_LINE> data['id'] = str(array.get('id')) <NEW_LINE> data['gif_file_id'] = str(array.get('gif_file_id')) <NEW_LINE> data['title'] = str(array.get('title')) if array.get('title') is not None else None <NEW_LINE> data['caption'] = str(array.get('caption')) if array.get('caption') is not None else None <NEW_LINE> data['reply_markup'] = InlineKeyboardMarkup.from_array(array.get('reply_markup')) if array.get('reply_markup') is not None else None <NEW_LINE> data['input_message_content'] = InputMessageContent.from_array(array.get('input_message_content')) if array.get('input_message_content') is not None else None <NEW_LINE> return InlineQueryResultCachedGif(**data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "InlineQueryResultCachedGif(type={self.type!r}, id={self.id!r}, gif_file_id={self.gif_file_id!r}, title={self.title!r}, caption={self.caption!r}, reply_markup={self.reply_markup!r}, input_message_content={self.input_message_content!r})".format(self=self) <NEW_LINE> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return key in ["type", "id", "gif_file_id", "title", "caption", "reply_markup", "input_message_content"]
Represents a link to an animated GIF file stored on the Telegram servers. By default, this animated GIF file will be sent by the user with an optional caption. Alternatively, you can use input_message_content to send a message with specified content instead of the animation. https://core.telegram.org/bots/api#inlinequeryresultcachedgif
62598faa4428ac0f6e65849b
class Meta: <NEW_LINE> <INDENT> model = WorkDay <NEW_LINE> fields = ["id", "start", "end", "day", "employee"]
Meta.
62598faa7c178a314d78d414
class RemoteExceptionMixin(object): <NEW_LINE> <INDENT> def __init__(self, module, clazz, message, trace): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> self.clazz = clazz <NEW_LINE> self.message = message <NEW_LINE> self.trace = trace <NEW_LINE> self._str_msgs = message + "\n" + "\n".join(trace) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._str_msgs
Used for constructing dynamic exception type during deserialization of remote exception. It defines unified '__init__' method signature and exception message format
62598faa442bda511e95c3ce
class BasicParser(Parser2): <NEW_LINE> <INDENT> pass
A parser without the Pythonic features for converting builtin functions and common methods.
62598faa67a9b606de545f43
class BatchCorrelator(object): <NEW_LINE> <INDENT> def __init__(self, xs, zs, size): <NEW_LINE> <INDENT> self.size = int(size) <NEW_LINE> self.dtype = xs[0].dtype <NEW_LINE> self.num_vectors = len(xs) <NEW_LINE> self.x = Array([v.ptr for v in xs], dtype=numpy.int) <NEW_LINE> self.z = Array([v.ptr for v in zs], dtype=numpy.int) <NEW_LINE> <DEDENT> @pycbc.scheme.schemed(BACKEND_PREFIX) <NEW_LINE> def batch_correlate_execute(self, y): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> execute = batch_correlate_execute
Create a batch correlation engine
62598faa99cbb53fe6830e4e
class BrowseKodiVfsCommand(sublime_plugin.WindowCommand): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> self.nodes = [["video", "library://video"], ["music", "library://music"]] <NEW_LINE> self.window.show_quick_panel(items=self.nodes, on_select=self.on_done, selected_index=0) <NEW_LINE> <DEDENT> @utils.run_async <NEW_LINE> def on_done(self, index): <NEW_LINE> <INDENT> if index == -1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> node = self.nodes[index] <NEW_LINE> data = kodi.request(method="Files.GetDirectory", params={"directory": node[1], "media": "files"}) <NEW_LINE> self.nodes = [[item["label"], item["file"]] for item in data["result"]["files"]] <NEW_LINE> self.window.show_quick_panel(items=self.nodes, on_select=self.on_done, selected_index=0)
Allows to browse the Kodi VFS via JSON-RPC
62598faa7d847024c075c33b
class RequestGameStart(object): <NEW_LINE> <INDENT> __slots__ = ( '_firstRequest', ) <NEW_LINE> @property <NEW_LINE> def firstRequest(self): <NEW_LINE> <INDENT> return self._firstRequest <NEW_LINE> <DEDENT> @firstRequest.setter <NEW_LINE> def firstRequest(self, value): <NEW_LINE> <INDENT> self._firstRequest = msgbuffers.validate_bool( 'RequestGameStart.firstRequest', value) <NEW_LINE> <DEDENT> def __init__(self, firstRequest=False): <NEW_LINE> <INDENT> self.firstRequest = firstRequest <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack(cls, buffer): <NEW_LINE> <INDENT> reader = msgbuffers.BinaryReader(buffer) <NEW_LINE> value = cls.unpack_from(reader) <NEW_LINE> if reader.tell() != len(reader): <NEW_LINE> <INDENT> raise msgbuffers.ReadError( ('RequestGameStart.unpack received a buffer of length {length}, ' + 'but only {position} bytes were read.').format( length=len(reader), position=reader.tell())) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack_from(cls, reader): <NEW_LINE> <INDENT> _firstRequest = bool(reader.read('b')) <NEW_LINE> return cls(_firstRequest) <NEW_LINE> <DEDENT> def pack(self): <NEW_LINE> <INDENT> writer = msgbuffers.BinaryWriter() <NEW_LINE> self.pack_to(writer) <NEW_LINE> return writer.dumps() <NEW_LINE> <DEDENT> def pack_to(self, writer): <NEW_LINE> <INDENT> writer.write(int(self._firstRequest), 'b') <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return self._firstRequest == other._firstRequest <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) is type(other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return (msgbuffers.size(self._firstRequest, 'b')) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{type}(firstRequest={firstRequest})'.format( type=type(self).__name__, firstRequest=self._firstRequest) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{type}(firstRequest={firstRequest})'.format( type=type(self).__name__, firstRequest=repr(self._firstRequest))
Generated message-passing message.
62598faa435de62698e9bd6e
class StackedPeriodogram(Periodogram): <NEW_LINE> <INDENT> def __init__(self, lc_list, bins=None, calc_error=True, **kwargs): <NEW_LINE> <INDENT> self.periodograms = [] <NEW_LINE> for lc in lc_list: <NEW_LINE> <INDENT> self.periodograms.append(Periodogram(lc, **kwargs)) <NEW_LINE> <DEDENT> self.bins = bins <NEW_LINE> freq = [] <NEW_LINE> per = [] <NEW_LINE> err = [] <NEW_LINE> ferr = [] <NEW_LINE> if bins is not None: <NEW_LINE> <INDENT> freq = bins.bin_cent <NEW_LINE> ferr = bins.x_error() <NEW_LINE> per, err = self.calculate(calc_error) <NEW_LINE> <DEDENT> Periodogram.__init__(self, f=freq, per=per, err=err, ferr=ferr) <NEW_LINE> <DEDENT> def calculate_slow(self, calc_error=True): <NEW_LINE> <INDENT> per_points = [] <NEW_LINE> for b in self.bins.bin_cent: <NEW_LINE> <INDENT> per_points.append([]) <NEW_LINE> <DEDENT> for per in self.periodograms: <NEW_LINE> <INDENT> this_per = per.points_in_bins(self.bins) <NEW_LINE> for i, points in enumerate(this_per): <NEW_LINE> <INDENT> per_points[i] += points <NEW_LINE> <DEDENT> <DEDENT> per = [] <NEW_LINE> err = [] <NEW_LINE> for freq_points in per_points: <NEW_LINE> <INDENT> per.append(np.mean(freq_points)) <NEW_LINE> err.append(np.std(freq_points) / np.sqrt(len(freq_points))) <NEW_LINE> <DEDENT> return np.array(per), np.array(err) <NEW_LINE> <DEDENT> def calculate(self, calc_error=True): <NEW_LINE> <INDENT> freq_list = np.hstack([p.freq for p in self.periodograms]) <NEW_LINE> per_list = np.hstack([p.periodogram for p in self.periodograms]) <NEW_LINE> if calc_error: <NEW_LINE> <INDENT> error = self.bins.std_error(freq_list, per_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error = None <NEW_LINE> <DEDENT> return self.bins.bin(freq_list, per_list), error <NEW_LINE> <DEDENT> def freq_average_slow(self, fmin, fmax): <NEW_LINE> <INDENT> per_points = [] <NEW_LINE> for per in self.periodograms: <NEW_LINE> <INDENT> per_points += per.points_in_freqrange(fmin, fmax) <NEW_LINE> <DEDENT> return np.mean(per_points) <NEW_LINE> <DEDENT> def freq_average(self, fmin, fmax): <NEW_LINE> <INDENT> freq_list = np.hstack([p.freq for p in self.periodograms]) <NEW_LINE> per_list = np.hstack([p.periodogram for p in self.periodograms]) <NEW_LINE> bin_edges = [fmin, fmax] <NEW_LINE> per_mean, _, _ = binned_statistic(freq_list, per_list, statistic='mean', bins=bin_edges) <NEW_LINE> return per_mean[0]
pylag.StackedPeriodogram(Periodogram) calculate the average periodogram from multiple pairs of light curves with some frequency binning. The periodogram is calculated for each pair of light curves in turn, then the data points are sorted into bins. The final periodogram in each bin is the average over all of the individual frequency points from all of the light curves that fall into that bin. The resulting periodogram is accessible in the same manner as a single cross spectrum and analysis can be conducted in the same way. Constructor: pylag.StackedPeriodogram(lc1_list, lc2_list, bins) Constructor Arguments --------------------- lc_list : list (of LightCurve objects) List containing the pyLag LightCurve objects bins : Binning, optional (default=None) pyLag Binning object specifying the binning. If no binning is specified, routines accessing the cross spectrum as a function of frequency will not be accessible, but the cross spectrum can be averaged over specified frequency ranges
62598faa8c0ade5d55dc364d
class InvalidConfigException(Exception): <NEW_LINE> <INDENT> def __init__(self, errinfo: str): <NEW_LINE> <INDENT> self.info = errinfo
invalid configure or some errors found.
62598faa0a50d4780f705355
class AuthView(InsecureAPIView): <NEW_LINE> <INDENT> def get(self, request, format=None): <NEW_LINE> <INDENT> username = request.GET.get('username') <NEW_LINE> password = request.GET.get('password') <NEW_LINE> if not (username and password): <NEW_LINE> <INDENT> return Response(status=status.HTTP_401_UNAUTHORIZED) <NEW_LINE> <DEDENT> result = handlers.user_authentication(username, password) <NEW_LINE> if result is True: <NEW_LINE> <INDENT> return Response() <NEW_LINE> <DEDENT> return Response(status=status.HTTP_401_UNAUTHORIZED)
Validate `username` and `password` using `user_authentication` handler.
62598faabe383301e0253771
class AnthemAVR(MediaPlayerDevice): <NEW_LINE> <INDENT> def __init__(self, avr, name): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.avr = avr <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> def _lookup(self, propname, dval=None): <NEW_LINE> <INDENT> return getattr(self.avr.protocol, propname, dval) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self): <NEW_LINE> <INDENT> return SUPPORT_ANTHEMAV <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name or self._lookup('model') <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> pwrstate = self._lookup('power') <NEW_LINE> if pwrstate is True: <NEW_LINE> <INDENT> return STATE_ON <NEW_LINE> <DEDENT> elif pwrstate is False: <NEW_LINE> <INDENT> return STATE_OFF <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return STATE_UNKNOWN <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_volume_muted(self): <NEW_LINE> <INDENT> return self._lookup('mute', False) <NEW_LINE> <DEDENT> @property <NEW_LINE> def volume_level(self): <NEW_LINE> <INDENT> return self._lookup('volume_as_percentage', 0.0) <NEW_LINE> <DEDENT> @property <NEW_LINE> def media_title(self): <NEW_LINE> <INDENT> return self._lookup('input_name', 'No Source') <NEW_LINE> <DEDENT> @property <NEW_LINE> def app_name(self): <NEW_LINE> <INDENT> return self._lookup('video_input_resolution_text', '') + ' ' + self._lookup('audio_input_name', '') <NEW_LINE> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> return self._lookup('input_name', "Unknown") <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_list(self): <NEW_LINE> <INDENT> return self._lookup('input_list', ["Unknown"]) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_select_source(self, source): <NEW_LINE> <INDENT> self._update_avr('input_name', source) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_turn_off(self): <NEW_LINE> <INDENT> self._update_avr('power', False) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_turn_on(self): <NEW_LINE> <INDENT> self._update_avr('power', True) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_set_volume_level(self, volume): <NEW_LINE> <INDENT> self._update_avr('volume_as_percentage', volume) <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_mute_volume(self, mute): <NEW_LINE> <INDENT> self._update_avr('mute', mute) <NEW_LINE> <DEDENT> def _update_avr(self, propname, value): <NEW_LINE> <INDENT> _LOGGER.info( "Sending command to AVR: set %s to %s", propname, str(value)) <NEW_LINE> setattr(self.avr.protocol, propname, value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dump_avrdata(self): <NEW_LINE> <INDENT> attrs = vars(self) <NEW_LINE> return( 'dump_avrdata: ' + ', '.join('%s: %s' % item for item in attrs.items()))
Entity reading values from Anthem AVR protocol.
62598faa8a43f66fc4bf20f4
class ShippingEventQuantity(models.Model): <NEW_LINE> <INDENT> event = models.ForeignKey( 'order.ShippingEvent', related_name='line_quantities', verbose_name=_("Event")) <NEW_LINE> line = models.ForeignKey( 'order.Line', related_name="shipping_event_quantities", verbose_name=_("Line")) <NEW_LINE> quantity = models.PositiveIntegerField(_("Quantity")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Shipping Event Quantity") <NEW_LINE> verbose_name_plural = _("Shipping Event Quantities") <NEW_LINE> unique_together = ('event', 'line') <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.quantity: <NEW_LINE> <INDENT> self.quantity = self.line.quantity <NEW_LINE> <DEDENT> if not self.line.is_shipping_event_permitted( self.event.event_type, self.quantity): <NEW_LINE> <INDENT> raise exceptions.InvalidShippingEvent <NEW_LINE> <DEDENT> super(ShippingEventQuantity, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return _("%(product)s - quantity %(qty)d") % { 'product': self.line.product, 'qty': self.quantity}
A "through" model linking lines to shipping events. This exists to track the quantity of a line that is involved in a particular shipping event.
62598faa2c8b7c6e89bd373d
class ItemStatusForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = ItemStatus <NEW_LINE> fields = ('name', 'active', 'hidden', 'details')
ItemStatus add/edit form
62598faabd1bec0571e1507f
class FileMatcher(object): <NEW_LINE> <INDENT> def __init__(self, line_matchers, min_progress, max_progress, filename): <NEW_LINE> <INDENT> if not 0.0 <= min_progress <= max_progress <= 1.0: <NEW_LINE> <INDENT> raise IndexError( '%s restriction is not mat: 0.0 <= min_progress' '(%s) <= max_progress(%s) <= 1.0' % ( self.__class__.__name__, min_progress, max_progress)) <NEW_LINE> <DEDENT> if 'start' not in line_matchers: <NEW_LINE> <INDENT> raise KeyError( 'key `start` does not in line matchers %s' % line_matchers ) <NEW_LINE> <DEDENT> self.line_matchers_ = line_matchers <NEW_LINE> self.min_progress_ = min_progress <NEW_LINE> self.max_progress_ = max_progress <NEW_LINE> self.progress_diff_ = max_progress - min_progress <NEW_LINE> self.filename_ = filename <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ( '%s[ filename: %s, progress:[%s:%s], ' 'line_matchers: %s]' % ( self.__class__.__name__, self.filename_, self.min_progress_, self.max_progress_, self.line_matchers_) ) <NEW_LINE> <DEDENT> def update_progress_from_log_history(self, state, log_history): <NEW_LINE> <INDENT> file_percentage = log_history['percentage'] <NEW_LINE> percentage = max( self.min_progress_, min( self.max_progress_, self.min_progress_ + file_percentage * self.progress_diff_ ) ) <NEW_LINE> if ( percentage > state['percentage'] or ( percentage == state['percentage'] and log_history['message'] != state['message'] ) ): <NEW_LINE> <INDENT> state['percentage'] = percentage <NEW_LINE> state['message'] = log_history['message'] <NEW_LINE> state['severity'] = log_history['severity'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.debug( 'ingore update state %s from log history %s ' 'since the updated progress %s lag behind', state, log_history, percentage ) <NEW_LINE> <DEDENT> <DEDENT> def update_progress(self, file_reader_factory, name, state, log_history): <NEW_LINE> <INDENT> file_reader = file_reader_factory.get_file_reader( name, self.filename_, log_history) <NEW_LINE> if not file_reader: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> line_matcher_name = log_history['line_matcher_name'] <NEW_LINE> for line in file_reader.readline(): <NEW_LINE> <INDENT> if line_matcher_name not in self.line_matchers_: <NEW_LINE> <INDENT> logging.debug('early exit at\n%s\nbecause %s is not in %s', line, line_matcher_name, self.line_matchers_) <NEW_LINE> break <NEW_LINE> <DEDENT> same_line_matcher_name = line_matcher_name <NEW_LINE> while same_line_matcher_name in self.line_matchers_: <NEW_LINE> <INDENT> line_matcher = self.line_matchers_[same_line_matcher_name] <NEW_LINE> same_line_matcher_name, line_matcher_name = ( line_matcher.update_progress(line, log_history) ) <NEW_LINE> <DEDENT> <DEDENT> log_history['line_matcher_name'] = line_matcher_name <NEW_LINE> logging.debug( 'updated log history %s after processing %s', log_history, self ) <NEW_LINE> self.update_progress_from_log_history(state, log_history)
File matcher to get the installing progress from the log file.
62598faa30bbd72246469934
class EvenniaTestSuiteRunner(DjangoTestSuiteRunner): <NEW_LINE> <INDENT> def build_suite(self, test_labels, extra_tests=None, **kwargs): <NEW_LINE> <INDENT> if not test_labels: <NEW_LINE> <INDENT> test_labels = [applabel.rsplit('.', 1)[1] for applabel in settings.INSTALLED_APPS if (applabel.startswith('src.') or applabel.startswith('game.'))] <NEW_LINE> <DEDENT> return super(EvenniaTestSuiteRunner, self).build_suite(test_labels, extra_tests=extra_tests, **kwargs)
This test runner only runs tests on the apps specified in src/ and game/ to avoid running the large number of tests defined by Django
62598faa63d6d428bbee2723
class OverridableTemplate(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def template(self): <NEW_LINE> <INDENT> return self.index
Subclasses of this class must set the template they want to use as the default template as the ``index`` attribute, not the ``template`` attribute that's normally used for forms. Users of this package may override the template used by one of the forms by using the ``browser`` directive and specifying their own template.
62598faad268445f26639b3f
class _DependencyNode(object): <NEW_LINE> <INDENT> def __init__( self, system: str, target_distribution: str, parent: typing.Optional[_DependencyNode], exec_obj_list: typing.List[ExecObject], ) -> None: <NEW_LINE> <INDENT> assert system <NEW_LINE> assert len(exec_obj_list) >= 1 <NEW_LINE> self.parent = parent <NEW_LINE> self.children: typing.List[_DependencyNode] = [] <NEW_LINE> self.system = system <NEW_LINE> self.target_distribution = target_distribution <NEW_LINE> self.exec_obj_list = exec_obj_list <NEW_LINE> <DEDENT> def find(self, system: str) -> typing.Optional[_DependencyNode]: <NEW_LINE> <INDENT> if self.system == system: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> for cn in self.children: <NEW_LINE> <INDENT> result = cn.find(system) <NEW_LINE> if result: <NEW_LINE> <INDENT> return result <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def walk(self) -> typing.Generator[_DependencyNode, None, None]: <NEW_LINE> <INDENT> yield (self) <NEW_LINE> for child in self.children: <NEW_LINE> <INDENT> for node in child.walk(): <NEW_LINE> <INDENT> yield node <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def depth(self) -> int: <NEW_LINE> <INDENT> if self.parent: <NEW_LINE> <INDENT> return self.parent.depth + 1 <NEW_LINE> <DEDENT> return 0 <NEW_LINE> <DEDENT> def append_child(self, child: _DependencyNode): <NEW_LINE> <INDENT> if self.target_distribution and child.target_distribution: <NEW_LINE> <INDENT> if self.target_distribution != child.target_distribution: <NEW_LINE> <INDENT> raise ParseError( f"Target distribution mismatch between {self.system}(== {self.target_distribution}) " + f"and {child.system}(== {child.target_distribution})" ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if self.target_distribution: <NEW_LINE> <INDENT> child.target_distribution = self.target_distribution <NEW_LINE> <DEDENT> elif child.target_distribution: <NEW_LINE> <INDENT> self.target_distribution = child.target_distribution <NEW_LINE> for c in self.children: <NEW_LINE> <INDENT> assert not c.target_distribution <NEW_LINE> c.target_distribution = self.target_distribution <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.children.append(child)
Node of the dependency tree of all systems.
62598faa167d2b6e312b6ee9
class VPGBuffer: <NEW_LINE> <INDENT> def __init__(self, obs_dim, act_dim, size, gamma=0.99, lam=0.95): <NEW_LINE> <INDENT> self.obs_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32) <NEW_LINE> self.act_buf = np.zeros(core.combined_shape(size, act_dim), dtype=np.float32) <NEW_LINE> self.adv_buf = np.zeros(size, dtype=np.float32) <NEW_LINE> self.rew_buf = np.zeros(size, dtype=np.float32) <NEW_LINE> self.ret_buf = np.zeros(size, dtype=np.float32) <NEW_LINE> self.val_buf = np.zeros(size, dtype=np.float32) <NEW_LINE> self.logp_buf = np.zeros(size, dtype=np.float32) <NEW_LINE> self.gamma, self.lam = gamma, lam <NEW_LINE> self.ptr, self.path_start_idx, self.max_size = 0, 0, size <NEW_LINE> <DEDENT> def store(self, obs, act, rew, val, logp): <NEW_LINE> <INDENT> assert self.ptr < self.max_size <NEW_LINE> self.obs_buf[self.ptr] = obs <NEW_LINE> self.act_buf[self.ptr] = act <NEW_LINE> self.rew_buf[self.ptr] = rew <NEW_LINE> self.val_buf[self.ptr] = val <NEW_LINE> self.logp_buf[self.ptr] = logp <NEW_LINE> self.ptr += 1 <NEW_LINE> <DEDENT> def finish_path(self, last_val=0): <NEW_LINE> <INDENT> path_slice = slice(self.path_start_idx, self.ptr) <NEW_LINE> rews = np.append(self.rew_buf[path_slice], last_val) <NEW_LINE> vals = np.append(self.val_buf[path_slice], last_val) <NEW_LINE> deltas = rews[:-1] + self.gamma * vals[1:] - vals[:-1] <NEW_LINE> self.adv_buf[path_slice] = core.discount_cumsum(deltas, self.gamma * self.lam) <NEW_LINE> self.ret_buf[path_slice] = core.discount_cumsum(rews, self.gamma)[:-1] <NEW_LINE> self.path_start_idx = self.ptr <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> assert self.ptr == self.max_size <NEW_LINE> self.ptr, self.path_start_idx = 0, 0 <NEW_LINE> adv_mean, adv_std = mpi_statistics_scalar(self.adv_buf) <NEW_LINE> self.adv_buf = (self.adv_buf - adv_mean) / adv_std <NEW_LINE> return [self.obs_buf, self.act_buf, self.adv_buf, self.ret_buf, self.logp_buf]
A buffer for storing trajectories experienced by a VPG agent interacting with the environment, and using Generalized Advantage Estimation (GAE-Lambda) for calculating the advantages of state-action pairs.
62598faa7d43ff24874273be
class EabiCheck(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.flag = "FAIL" <NEW_LINE> self.tag = True <NEW_LINE> self.info = "" <NEW_LINE> <DEDENT> def checkEnv(self): <NEW_LINE> <INDENT> self.checkInstall() <NEW_LINE> if self.tag: <NEW_LINE> <INDENT> self.checkEabiVersion() <NEW_LINE> <DEDENT> <DEDENT> def checkInstall(self): <NEW_LINE> <INDENT> returnCode,self.eabigcc = commands.getstatusoutput("which arm-eabi-gcc") <NEW_LINE> if returnCode != 0: <NEW_LINE> <INDENT> self.info = "you have not installed arm-eabi-gcc" <NEW_LINE> self.tag = False <NEW_LINE> result("arm-eabi-gcc","","",self.flag,self.info) <NEW_LINE> global checkResult <NEW_LINE> checkResult = 1 <NEW_LINE> <DEDENT> <DEDENT> def checkEabiVersion(self): <NEW_LINE> <INDENT> eabiVersion = commands.getoutput("%s --version" % self.eabigcc) <NEW_LINE> pattern = re.compile("arm-eabi-gcc.*?([\d\.]+)",re.S) <NEW_LINE> match = pattern.match(eabiVersion) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.versionNo = match.group(1) <NEW_LINE> number = self.versionNo.split(".") <NEW_LINE> if int(number[0]) < 4 or (int(number[0]) == 4 and int(number[1]) < 4) or (int(number[0]) == 4 and int(number[1]) == 4 and int(number[2]) < 3): <NEW_LINE> <INDENT> self.info = "your arm-eabi-gcc version is lower than recommendation" <NEW_LINE> <DEDENT> elif int(number[0]) == 4 and int(number[1]) == 4 and int(number[2]) == 3: <NEW_LINE> <INDENT> self.flag = "OK" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.flag = "WARNING" <NEW_LINE> self.info = "your arm-eabi-gcc version is higher than recommendation" <NEW_LINE> <DEDENT> <DEDENT> else: self.versionNo = "unknown" <NEW_LINE> eabiBit = commands.getoutput("file -bL %s" % self.eabigcc) <NEW_LINE> pattern = re.compile("ELF\s*(\d+)-bit\s*LSB\s*executable.*") <NEW_LINE> match = pattern.match(eabiBit) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.bit = match.group(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bit = "unknown" <NEW_LINE> <DEDENT> result("arm-eabi-gcc",self.versionNo,self.bit,self.flag,self.info) <NEW_LINE> if self.flag == "FAIL": <NEW_LINE> <INDENT> global checkResult <NEW_LINE> checkResult = 1
check the requirement for arm-eabi-gcc compiler
62598faabaa26c4b54d4f22a
class SecretsManager(object): <NEW_LINE> <INDENT> barbican_driver = driver.BarbicanDriver() <NEW_LINE> def create(self, secret_doc): <NEW_LINE> <INDENT> encryption_type = secret_doc['metadata']['storagePolicy'] <NEW_LINE> secret_type = self._get_secret_type(secret_doc['schema']) <NEW_LINE> if encryption_type == ENCRYPTED: <NEW_LINE> <INDENT> kwargs = { 'name': secret_doc['metadata']['name'], 'secret_type': secret_type, 'payload': secret_doc['data'] } <NEW_LINE> resp = self.barbican_driver.create_secret(**kwargs) <NEW_LINE> secret_ref = resp['secret_ref'] <NEW_LINE> created_secret = secret_ref <NEW_LINE> <DEDENT> elif encryption_type == CLEARTEXT: <NEW_LINE> <INDENT> created_secret = secret_doc['data'] <NEW_LINE> <DEDENT> return created_secret <NEW_LINE> <DEDENT> def _get_secret_type(self, schema): <NEW_LINE> <INDENT> _schema = schema.split('/')[1].lower().strip() <NEW_LINE> if _schema == 'certificatekey': <NEW_LINE> <INDENT> return 'private' <NEW_LINE> <DEDENT> elif _schema == 'certificateauthority': <NEW_LINE> <INDENT> return 'certificate' <NEW_LINE> <DEDENT> elif _schema == 'certificateauthoritykey': <NEW_LINE> <INDENT> return 'private' <NEW_LINE> <DEDENT> elif _schema == 'publickey': <NEW_LINE> <INDENT> return 'public' <NEW_LINE> <DEDENT> return _schema
Internal API resource for interacting with Barbican. Currently only supports Barbican.
62598faa32920d7e50bc5fcd
class ImageUri(AnyUri): <NEW_LINE> <INDENT> pass
A special kind of String that holds the uri of an image.
62598faa627d3e7fe0e06e25
class BrowserWidget(QWidget): <NEW_LINE> <INDENT> def __init__(self, url, process=None, parent=None): <NEW_LINE> <INDENT> QWidget.__init__(self, parent) <NEW_LINE> self._process = process <NEW_LINE> vbox = QVBoxLayout(self) <NEW_LINE> self.webFrame = QWebView(self) <NEW_LINE> self.webFrame.setAcceptDrops(False) <NEW_LINE> self.webFrame.load(QUrl(url)) <NEW_LINE> vbox.addWidget(self.webFrame) <NEW_LINE> if process is not None: <NEW_LINE> <INDENT> time.sleep(0.5) <NEW_LINE> self.webFrame.load(QUrl(url)) <NEW_LINE> <DEDENT> self.webFrame.page().currentFrame().setScrollBarPolicy( Qt.Vertical, Qt.ScrollBarAsNeeded) <NEW_LINE> self.webFrame.page().currentFrame().setScrollBarPolicy( Qt.Horizontal, Qt.ScrollBarAsNeeded) <NEW_LINE> <DEDENT> def start_page_operations(self, url): <NEW_LINE> <INDENT> opt = file_manager.get_basename(url.toString()) <NEW_LINE> self.emit(SIGNAL(opt)) <NEW_LINE> <DEDENT> def shutdown_pydoc(self): <NEW_LINE> <INDENT> if self._process is not None: <NEW_LINE> <INDENT> self._process.kill() <NEW_LINE> <DEDENT> <DEDENT> def find_match(self, word, back=False, sensitive=False, whole=False): <NEW_LINE> <INDENT> self.webFrame.page().findText(word)
openProject(QString) openPreferences() dontOpenStartPage()
62598faa3cc13d1c6d4656e4
class Actor(): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, action_low, action_high): <NEW_LINE> <INDENT> self.state_size = state_size <NEW_LINE> self.action_size = action_size <NEW_LINE> self.action_low = action_low <NEW_LINE> self.action_high = action_high <NEW_LINE> self.action_range = self.action_high - self.action_low <NEW_LINE> self.build_model() <NEW_LINE> <DEDENT> def build_model(self): <NEW_LINE> <INDENT> states = layers.Input(shape=(self.state_size ,), name = 'states') <NEW_LINE> net = layers.Dense(units = 32, kernel_regularizer = regularizers.l2(0.01), activity_regularizer = regularizers.l1(0.01))(states) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> net = layers.Dropout(0.4)(net) <NEW_LINE> net = layers.Dense(units = 64, kernel_regularizer = regularizers.l2(0.01), activity_regularizer = regularizers.l1(0.01))(net) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> net = layers.Dropout(0.4)(net) <NEW_LINE> net = layers.Dense(units = 128, kernel_regularizer = regularizers.l2(0.01), activity_regularizer = regularizers.l1(0.01))(net) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> net = layers.Dropout(0.4)(net) <NEW_LINE> net = layers.Dense(units = 256, kernel_regularizer = regularizers.l2(0.01), activity_regularizer = regularizers.l1(0.01))(net) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> net = layers.Dropout(0.4)(net) <NEW_LINE> net = layers.Dense(units = 128, kernel_regularizer = regularizers.l2(0.01), activity_regularizer = regularizers.l1(0.01))(net) <NEW_LINE> net = layers.BatchNormalization()(net) <NEW_LINE> net = layers.Activation('relu')(net) <NEW_LINE> net = layers.Dropout(0.4)(net) <NEW_LINE> raw_actions = layers.Dense(units = self.action_size, activation = 'sigmoid', name = 'raw_actions')(net) <NEW_LINE> actions = layers.Lambda(lambda x: (x * self.action_range) + self.action_low, name = 'actions')(raw_actions) <NEW_LINE> self.model = models.Model(inputs = states, outputs = actions) <NEW_LINE> action_gradients = layers.Input(shape=(self.action_size,)) <NEW_LINE> loss = K.mean(-action_gradients * actions) <NEW_LINE> optimizer = optimizers.Adam() <NEW_LINE> updates_op = optimizer.get_updates(params=self.model.trainable_weights, loss=loss) <NEW_LINE> self.train_fn = K.function(inputs=[self.model.input, action_gradients, K.learning_phase()], outputs=[], updates=updates_op)
Actor policy model
62598faa4a966d76dd5eee5a
class AsyncJSONMessage(botornado.sqs.message._AsyncMessage, JSONMessage): <NEW_LINE> <INDENT> pass
Acts like a dictionary but encodes it's data as a Base64 encoded JSON payload.
62598faa23849d37ff85102d
class Config: <NEW_LINE> <INDENT> UPLOADED_PHOTOS_DEST = "app/static/photos" <NEW_LINE> SECRET_KEY = os.environ.get("SECRET_KEY") <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://ryan:12345@localhost/pitch' <NEW_LINE> MAIL_SERVER = "smtp.gmail.com" <NEW_LINE> MAIL_PORT = 587 <NEW_LINE> MAIL_USE_TLS = True <NEW_LINE> MAIL_USERNAME = os.environ.get("MAIL_USERNAME") <NEW_LINE> MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
This is the parent class which will have the general configurations
62598faa4c3428357761a233
class LoginForm(FlaskForm): <NEW_LINE> <INDENT> email = StringField('邮箱', description='邮箱用于登录', validators=[DataRequired(), Email()]) <NEW_LINE> password = PasswordField('密码', validators=[DataRequired()]) <NEW_LINE> remember_me = BooleanField('记住密码') <NEW_LINE> submit = SubmitField('登录')
登录表单
62598faa38b623060ffa9011
class CityDataBase: <NEW_LINE> <INDENT> DOWNLOAD_URL = 'https://datanova.legroupe.laposte.fr/api/records/1.0/download' <NEW_LINE> _logger = _module_logger.getChild('FrenchZipCodeDataBase') <NEW_LINE> def __init__(self, json_path=None): <NEW_LINE> <INDENT> if json_path: <NEW_LINE> <INDENT> json_data = self._load_json(json_path) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_data = self._download() <NEW_LINE> <DEDENT> self._entries = [CityDataBaseEntry.from_json(record) for record in json_data] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _load_json(cls, json_path): <NEW_LINE> <INDENT> cls._logger.info('Read {}'.format(json_path)) <NEW_LINE> with open(json_path) as fh: <NEW_LINE> <INDENT> data = fh.read().replace("'", '"') <NEW_LINE> json_data = json.loads(data) <NEW_LINE> <DEDENT> return json_data <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _download(cls): <NEW_LINE> <INDENT> cls._logger.info('Download data from {} ...'.format(cls.DOWNLOAD_URL)) <NEW_LINE> params = dict(dataset='laposte_hexasmal', format='json') <NEW_LINE> response = requests.get(url=cls.DOWNLOAD_URL, params=params) <NEW_LINE> response.raise_for_status() <NEW_LINE> json_data = response.json() <NEW_LINE> number_of_cities = len(json_data) <NEW_LINE> cls._logger.info('Retrieved {:_} cities'.format(number_of_cities)) <NEW_LINE> return json_data <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._entries) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._entries) <NEW_LINE> <DEDENT> def __getitem__(self,_slice): <NEW_LINE> <INDENT> return self._entries[_slice]
Class for the French City Database.
62598faa8da39b475be0315d
class glancesGrabHDDTemp: <NEW_LINE> <INDENT> cache = "" <NEW_LINE> address = "127.0.0.1" <NEW_LINE> port = 7634 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> sck.connect((self.address, self.port)) <NEW_LINE> sck.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.initok = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.initok = True <NEW_LINE> <DEDENT> <DEDENT> def __update__(self): <NEW_LINE> <INDENT> self.hddtemp_list = [] <NEW_LINE> if self.initok: <NEW_LINE> <INDENT> data = "" <NEW_LINE> try: <NEW_LINE> <INDENT> sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> sck.connect((self.address, self.port)) <NEW_LINE> data = sck.recv(4096) <NEW_LINE> sck.close() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> hddtemp_current = {} <NEW_LINE> hddtemp_current['label'] = "hddtemp is gone" <NEW_LINE> hddtemp_current['value'] = 0 <NEW_LINE> self.hddtemp_list.append(hddtemp_current) <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(data) < 14: <NEW_LINE> <INDENT> if len(self.cache) == 0: <NEW_LINE> <INDENT> data = "|hddtemp error||0||" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = self.cache <NEW_LINE> <DEDENT> <DEDENT> self.cache = data <NEW_LINE> fields = data.split("|") <NEW_LINE> devices = (len(fields) - 1) / 5 <NEW_LINE> for i in range(0, devices): <NEW_LINE> <INDENT> offset = i * 5 <NEW_LINE> hddtemp_current = {} <NEW_LINE> temperature = fields[offset + 3] <NEW_LINE> if temperature == "ERR": <NEW_LINE> <INDENT> hddtemp_current['label'] = "hddtemp error" <NEW_LINE> hddtemp_current['value'] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hddtemp_current['label'] = fields[offset + 1].split("/")[-1] <NEW_LINE> hddtemp_current['value'] = int(temperature) <NEW_LINE> <DEDENT> self.hddtemp_list.append(hddtemp_current) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.__update__() <NEW_LINE> return self.hddtemp_list
Get hddtemp stats using a socket connection
62598faa63b5f9789fe850de
class PermuteLayer(caffe.Layer): <NEW_LINE> <INDENT> def setup(self, bottom, top): <NEW_LINE> <INDENT> self.top_names = ['top'] <NEW_LINE> params = eval(self.param_str) <NEW_LINE> self.permuteIndex = np.asarray(params['permuteIndex']) <NEW_LINE> self.bottomShape = bottom[0].data.shape <NEW_LINE> if len(self.permuteIndex) == 2: <NEW_LINE> <INDENT> top[0].reshape(self.bottomShape[self.permuteIndex[0]], self.bottomShape[self.permuteIndex[1]]) <NEW_LINE> <DEDENT> elif len(self.permuteIndex) == 3: <NEW_LINE> <INDENT> top[0].reshape(self.bottomShape[self.permuteIndex[0]], self.bottomShape[self.permuteIndex[1]], self.bottomShape[self.permuteIndex[2]]) <NEW_LINE> <DEDENT> elif len(self.permuteIndex) == 4: <NEW_LINE> <INDENT> top[0].reshape(self.bottomShape[self.permuteIndex[0]], self.bottomShape[self.permuteIndex[1]], self.bottomShape[self.permuteIndex[2]], self.bottomShape[self.permuteIndex[3]]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def forward(self, bottom, top): <NEW_LINE> <INDENT> top[0].data[...] = bottom[0].data.transpose(self.permuteIndex) <NEW_LINE> <DEDENT> def reshape(self, bottom, top): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def backward(self, top, propagate_down, bottom): <NEW_LINE> <INDENT> bottom[0].diff[...] = top[0].diff.transpose(self.permuteIndex)
classdocs
62598faa1f5feb6acb162b99
class ServiceForm(forms.ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ServiceForm, self).__init__(*args, **kwargs) <NEW_LINE> self.helper = FormHelper() <NEW_LINE> self.helper.form_id = 'service-form' <NEW_LINE> self.helper.form_class = 'form-horizontal' <NEW_LINE> self.helper.label_class = 'col-sm-3' <NEW_LINE> self.helper.field_class = 'col-sm-9' <NEW_LINE> self.helper.html5_required = True <NEW_LINE> self.helper.layout = Layout( Fieldset('FTP Settings', 'ftp_host', 'ftp_username', 'ftp_password', 'ftp_path' ), Fieldset('Repository', Field('repo_source', data_action=reverse( 'ftpdeploy_repo_api', args=(0, '__'))), 'repo_name', 'repo_slug_name', 'repo_branch' ), Fieldset('Notification', 'notification' ), Fieldset('Security', 'secret_key' ), Div( Div( Submit('save', 'Submit', css_class='pull-right'), css_class='col-sm-12' ), css_class='row' ) ) <NEW_LINE> <DEDENT> def clean_ftp_path(self): <NEW_LINE> <INDENT> data = self.cleaned_data['ftp_path'] <NEW_LINE> if not data.endswith('/'): <NEW_LINE> <INDENT> data = '%s/' % data <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = Service <NEW_LINE> exclude = ['status', 'status_date', 'status_message'] <NEW_LINE> widgets = { 'ftp_password': forms.PasswordInput(render_value=True), }
Add/Edit service form
62598faae5267d203ee6b883
class TeacherViewset(viewsets.GenericViewSet, mixins.CreateModelMixin, mixins.ListModelMixin, mixins.UpdateModelMixin, mixins.DestroyModelMixin, mixins.RetrieveModelMixin): <NEW_LINE> <INDENT> serializer_class = TeacherSerializer <NEW_LINE> pagination_class = P1 <NEW_LINE> queryset = Teacher.objects.all() <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filter_fields = ('college',) <NEW_LINE> authentication_classes = (JSONWebTokenAuthentication, authentication.SessionAuthentication) <NEW_LINE> def get_permissions(self): <NEW_LINE> <INDENT> if self.action in ("create", 'update'): <NEW_LINE> <INDENT> return (IsAuthenticated(),) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ()
指导老师逻辑
62598faa442bda511e95c3d0
class FgmsHandler: <NEW_LINE> <INDENT> def __init__(self, aircraft): <NEW_LINE> <INDENT> self.aircraft = aircraft <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.socket = socket(AF_INET, SOCK_DGRAM) <NEW_LINE> self.socket.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1) <NEW_LINE> <DEDENT> except OSError as error: <NEW_LINE> <INDENT> self.socket = None <NEW_LINE> print('Connection error: %s' % error) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.FGMS_handshaker = FGMShandshaker(self.socket, self.aircraft) <NEW_LINE> self.fgms_handshake() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.is_running(): <NEW_LINE> <INDENT> self.socket = None <NEW_LINE> self.FGMS_handshaker.set_status(False) <NEW_LINE> <DEDENT> <DEDENT> def is_running(self): <NEW_LINE> <INDENT> return self.socket is not None <NEW_LINE> <DEDENT> def fgms_handshake(self): <NEW_LINE> <INDENT> self.FGMS_handshaker.start()
Creates sockets and starts the fgms connection for each aircraft.
62598faaac7a0e7691f72483
class Comment(models.Model): <NEW_LINE> <INDENT> text = models.CharField(max_length=100) <NEW_LINE> track = models.ForeignKey(Track, related_name="comments")
Represent comment to any track measure.
62598faa2ae34c7f260ab05b
class SSHClient(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for k, v in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> def run(self, cmd): <NEW_LINE> <INDENT> t = paramiko.SSHClient() <NEW_LINE> t.set_missing_host_key_policy(paramiko.AutoAddPolicy()) <NEW_LINE> t.connect(self.hostname, port = self.port, username = self.username, password = self.password) <NEW_LINE> outs = t.exec_command(cmd) <NEW_LINE> if outs: <NEW_LINE> <INDENT> res = outs[1].read() <NEW_LINE> t.close() <NEW_LINE> return res <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> t.close() <NEW_LINE> raise Exception('ssh output is None')
SSH warapper around the paramiko library that can run commands and return the results in string.
62598faae5267d203ee6b884
class OsPlugin(UserAgentPlugin): <NEW_LINE> <INDENT> slug = 'os' <NEW_LINE> title = _('Operating Systems') <NEW_LINE> tag = 'os' <NEW_LINE> tag_label = _('Operating System') <NEW_LINE> def get_tag_from_ua(self, ua): <NEW_LINE> <INDENT> if 'flavor' in ua: <NEW_LINE> <INDENT> tag = ua['flavor']['name'] <NEW_LINE> if 'version' in ua['flavor']: <NEW_LINE> <INDENT> tag += ' ' + ua['version'] <NEW_LINE> <DEDENT> <DEDENT> elif 'os' in ua: <NEW_LINE> <INDENT> tag = ua['os']['name'] <NEW_LINE> if 'version' in ua['os']: <NEW_LINE> <INDENT> tag += ' ' + ua['version'] <NEW_LINE> <DEDENT> elif 'dist' in ua: <NEW_LINE> <INDENT> tag += ua['dist']['name'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return tag
Adds additional support for showing information about operating systems including: * A panel which shows all operating systems a message was seen on. * A sidebar module which shows the operating systems most actively seen on.
62598faa435de62698e9bd6f
class ExportReadGroupSetRequest(_messages.Message): <NEW_LINE> <INDENT> exportUri = _messages.StringField(1) <NEW_LINE> projectId = _messages.StringField(2) <NEW_LINE> referenceNames = _messages.StringField(3, repeated=True)
The read group set export request. Fields: exportUri: Required. A Google Cloud Storage URI for the exported BAM file. The currently authenticated user must have write access to the new file. An error will be returned if the URI already contains data. projectId: Required. The Google Developers Console project ID that owns this export. The caller must have WRITE access to this project. referenceNames: The reference names to export. If this is not specified, all reference sequences, including unmapped reads, are exported. Use `*` to export only unmapped reads.
62598faa796e427e5384e70d
class IReader(Reader): <NEW_LINE> <INDENT> SECTION = re.compile(r'(\[)([^]]+)(])') <NEW_LINE> PROPERTY = re.compile(r'(:|=)') <NEW_LINE> def __init__(self, fp): <NEW_LINE> <INDENT> self.fp = fp <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> d = {} <NEW_LINE> fp = self.fp <NEW_LINE> section = None <NEW_LINE> while True: <NEW_LINE> <INDENT> line = fp.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if line.startswith('#'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> match = IReader.SECTION.match(line) <NEW_LINE> if match: <NEW_LINE> <INDENT> section = match.group(2) <NEW_LINE> section = section.strip() <NEW_LINE> d[section] = {} <NEW_LINE> continue <NEW_LINE> <DEDENT> if not section: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> parts = IReader.PROPERTY.split(line, 1) <NEW_LINE> if len(parts) == 3: <NEW_LINE> <INDENT> p = parts[0].strip() <NEW_LINE> v = parts[2].strip() <NEW_LINE> d[section][p] = v <NEW_LINE> <DEDENT> <DEDENT> return d
INI Reader
62598faa99cbb53fe6830e51
class LuigiTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_mwa_fornax_pg(self): <NEW_LINE> <INDENT> self._test_graph('mwa_fornax_pg') <NEW_LINE> <DEDENT> def test_testGraphLuigiDriven(self): <NEW_LINE> <INDENT> self._test_graph('testGraphLuigiDriven') <NEW_LINE> <DEDENT> def test_chiles_pg(self): <NEW_LINE> <INDENT> self._test_graph('chiles_pg', 8) <NEW_LINE> <DEDENT> def test_complex_graph(self): <NEW_LINE> <INDENT> self._test_graph('complex_graph', 5) <NEW_LINE> <DEDENT> def test_container_pg(self): <NEW_LINE> <INDENT> self._test_graph('container_pg') <NEW_LINE> <DEDENT> def test_testGraphMixed(self): <NEW_LINE> <INDENT> self._test_graph('testGraphMixed') <NEW_LINE> <DEDENT> def test_testGraphDropDriven(self): <NEW_LINE> <INDENT> self._test_graph('testGraphDropDriven') <NEW_LINE> <DEDENT> def test_test_pg(self): <NEW_LINE> <INDENT> self._test_graph('test_pg') <NEW_LINE> <DEDENT> def test_complexFromFile(self): <NEW_LINE> <INDENT> self._test_graphFromFile("complex.js", 5) <NEW_LINE> <DEDENT> def _test_graphFromFile(self, f, socketListeners=1): <NEW_LINE> <INDENT> with pkg_resources.resource_stream("test", "graphs/%s" % (f)) as f: <NEW_LINE> <INDENT> self._test_graph(graph_loader.createGraphFromDropSpecList(json.load(codecs.getreader('utf-8')(f))), socketListeners) <NEW_LINE> <DEDENT> <DEDENT> def _test_graph(self, pgCreator, socketListeners=1): <NEW_LINE> <INDENT> if isinstance(pgCreator, six.string_types): <NEW_LINE> <INDENT> pgCreator = "test.graphsRepository.%s" % (pgCreator) <NEW_LINE> <DEDENT> task = FinishGraphExecution(pgCreator=pgCreator) <NEW_LINE> sch = scheduler.CentralPlannerScheduler() <NEW_LINE> w = worker.Worker(scheduler=sch) <NEW_LINE> w.add(task) <NEW_LINE> for drop,_ in droputils.breadFirstTraverse(task.roots): <NEW_LINE> <INDENT> if isinstance(drop, SocketListenerApp): <NEW_LINE> <INDENT> threading.Thread(target=lambda drop: drop.execute(), args=(drop,)).start() <NEW_LINE> <DEDENT> <DEDENT> for i in range(socketListeners): <NEW_LINE> <INDENT> threading.Thread(target=utils.write_to, name='socketWriter', args=("localhost", 1111+i, test_data, 2)).start() <NEW_LINE> <DEDENT> w.run() <NEW_LINE> w.stop() <NEW_LINE> for drop,_ in droputils.breadFirstTraverse(task.roots): <NEW_LINE> <INDENT> self.assertTrue(drop.isCompleted() and drop.exists(), "%s is not COMPLETED or doesn't exist" % (drop.uid))
A class with one testing method for each of the graphs created by the graphsRepository module. Although I could have written a single method that executes automatically all graphs contained in the graphsRepository module I preferred to have explicit separated methods for each graph to be able to pinpoint failures more easily.
62598faabe383301e0253773
class UploadPinForm(Form): <NEW_LINE> <INDENT> file = forms.ImageField(required=True)
Pin resource upload form.
62598faa4e4d56256637239f
class AndroidPackager(DistTarball): <NEW_LINE> <INDENT> def __init__(self, config, package, store): <NEW_LINE> <INDENT> DistTarball.__init__(self, config, package, store) <NEW_LINE> <DEDENT> def files_list(self, package_type, force): <NEW_LINE> <INDENT> if self.config.target_arch != Architecture.UNIVERSAL: <NEW_LINE> <INDENT> return PackagerBase.files_list(self, package_type, force) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if package_type == PackageType.DEVEL: <NEW_LINE> <INDENT> files = self.package.devel_files_list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> files = self.package.files_list() <NEW_LINE> <DEDENT> all_files = [] <NEW_LINE> if isinstance(self.config.universal_archs, list): <NEW_LINE> <INDENT> archs = self.config.universal_archs <NEW_LINE> <DEDENT> elif isinstance(self.config.universal_archs, dict): <NEW_LINE> <INDENT> archs = self.config.universal_archs.keys() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ConfigurationError('universal_archs must be a list or a dict') <NEW_LINE> <DEDENT> for arch in archs: <NEW_LINE> <INDENT> all_files += [os.path.join(str(arch), f) for f in files] <NEW_LINE> <DEDENT> return all_files <NEW_LINE> <DEDENT> <DEDENT> def _create_tarball(self, output_dir, package_type, files, force, package_prefix): <NEW_LINE> <INDENT> filenames = [] <NEW_LINE> if package_type == PackageType.DEVEL: <NEW_LINE> <INDENT> for filt in ['bin/', 'share/aclocal']: <NEW_LINE> <INDENT> files = [x for x in files if not x.startswith(filt)] <NEW_LINE> <DEDENT> <DEDENT> filename = os.path.join(output_dir, self._get_name(package_type)) <NEW_LINE> if os.path.exists(filename): <NEW_LINE> <INDENT> if force: <NEW_LINE> <INDENT> os.remove(filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise UsageError("File %s already exists" % filename) <NEW_LINE> <DEDENT> <DEDENT> tar = tarfile.open(filename, "w:bz2") <NEW_LINE> for f in files: <NEW_LINE> <INDENT> filepath = os.path.join(self.prefix, f) <NEW_LINE> tar.add(filepath, os.path.join(package_prefix, f)) <NEW_LINE> <DEDENT> tar.close() <NEW_LINE> filenames.append(filename) <NEW_LINE> return ' '.join(filenames) <NEW_LINE> <DEDENT> def _get_name(self, package_type, ext='tar.bz2'): <NEW_LINE> <INDENT> if package_type == PackageType.DEVEL: <NEW_LINE> <INDENT> package_type = '' <NEW_LINE> <DEDENT> elif package_type == PackageType.RUNTIME: <NEW_LINE> <INDENT> package_type = '-runtime' <NEW_LINE> <DEDENT> return "%s%s-%s-%s-%s%s.%s" % (self.package_prefix, self.package.name, self.config.target_platform, self.config.target_arch, self.package.version, package_type, ext)
Creates a distribution tarball for Android
62598faa45492302aabfc44b
class TestNonlin(object): <NEW_LINE> <INDENT> def _check_nonlin_func(self, f, func, f_tol=1e-2): <NEW_LINE> <INDENT> x = func(f, f.xin, f_tol=f_tol, maxiter=200, verbose=0) <NEW_LINE> assert_(np.absolute(f(x)).max() < f_tol) <NEW_LINE> <DEDENT> def _check_root(self, f, method, f_tol=1e-2): <NEW_LINE> <INDENT> res = root(f, f.xin, method=method, options={'ftol': f_tol, 'maxiter': 200, 'disp': 0}) <NEW_LINE> assert_(np.absolute(res.fun).max() < f_tol) <NEW_LINE> <DEDENT> @dec.knownfailureif(True) <NEW_LINE> def _check_func_fail(self, *a, **kw): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_problem_nonlin(self): <NEW_LINE> <INDENT> for f in [F, F2, F2_lucky, F3, F4_powell, F5, F6]: <NEW_LINE> <INDENT> for func in SOLVERS.values(): <NEW_LINE> <INDENT> if func in f.KNOWN_BAD.values(): <NEW_LINE> <INDENT> if func in MUST_WORK.values(): <NEW_LINE> <INDENT> yield self._check_func_fail, f, func <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> yield self._check_nonlin_func, f, func <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_problem_root(self): <NEW_LINE> <INDENT> for f in [F, F2, F2_lucky, F3, F4_powell, F5, F6]: <NEW_LINE> <INDENT> for meth in SOLVERS: <NEW_LINE> <INDENT> if meth in f.KNOWN_BAD: <NEW_LINE> <INDENT> if meth in MUST_WORK: <NEW_LINE> <INDENT> yield self._check_func_fail, f, meth <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> yield self._check_root, f, meth
Check the Broyden methods for a few test problems. broyden1, broyden2, and newton_krylov must succeed for all functions. Some of the others don't -- tests in KNOWN_BAD are skipped.
62598faa63d6d428bbee2725
class TestCheck(Check): <NEW_LINE> <INDENT> __test__ = True <NEW_LINE> @property <NEW_LINE> def this_check(self): <NEW_LINE> <INDENT> return chk <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.l = [['colour', 'color']] <NEW_LINE> self.err = 'error message' <NEW_LINE> self.msg = 'inconsistent form of {} vs {}' <NEW_LINE> <DEDENT> def test_smoke(self): <NEW_LINE> <INDENT> assert chk( "Painting colour on color", self.l, self.err, self.msg) != [] <NEW_LINE> assert chk( "Painting colour on colour", self.l, self.err, self.msg) == [] <NEW_LINE> assert chk( "Painting color on color", self.l, self.err, self.msg) == []
The test class for tools.consistency_check.
62598faa71ff763f4b5e76e8
class Dict(dict): <NEW_LINE> <INDENT> def __init__(self, names=(), values=(), **kw): <NEW_LINE> <INDENT> super(Dict, self).__init__(**kw) <NEW_LINE> for k, v in zip(names, values): <NEW_LINE> <INDENT> self[k] = v <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, item): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[item] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(r"'Dict' object has no attribute '%s'" % key) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> self[key] = value
Simple dict but support access as x.y style.
62598faa30dc7b766599f7c7
class FinderContextManager: <NEW_LINE> <INDENT> extensions = tuple() <NEW_LINE> _position = 0 <NEW_LINE> finder = FileFinder <NEW_LINE> @property <NEW_LINE> def loader(self): <NEW_LINE> <INDENT> return type(self) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> id, details = get_loader_details() <NEW_LINE> details.insert(self._position, (self.loader, self.extensions)) <NEW_LINE> sys.path_hooks[id] = self.finder.path_hook(*details) <NEW_LINE> sys.path_importer_cache.clear() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, *excepts): <NEW_LINE> <INDENT> id, details = get_loader_details() <NEW_LINE> details.pop(self._position) <NEW_LINE> sys.path_hooks[id] = self.finder.path_hook(*details) <NEW_LINE> sys.path_importer_cache.clear()
FinderContextManager is the base class for the notebook loader. It provides a context manager that replaces `FileFinder` in the `sys.path_hooks` to include an instance of the class in the python findering system. >>> with FinderContextManager() as f: ... id, ((loader_cls, _), *_) = get_loader_details() ... assert issubclass(loader_cls, FinderContextManager) >>> id, ((loader_cls, _), *_) = get_loader_details() >>> loader_cls = inspect.unwrap(loader_cls) >>> assert not (isinstance(loader_cls, type) and issubclass(loader_cls, FinderContextManager))
62598faa32920d7e50bc5fcf
class AggregateAddressCfg(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.b_key = "aggregate-address-cfg" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.aggregate_address = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value)
This class does not support CRUD Operations please use parent. :param aggregate_address: {"type": "string", "description": "Set aggregate RIP route announcement (Aggregate network)", "format": "ipv6-address-plen"} :param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
62598faa8da39b475be0315f
class CustomLayerFromClusterableLayerNoWeights(layers.Reshape): <NEW_LINE> <INDENT> pass
A custom layer class that does not have any weights. Derived from a built-in clusterable layer.
62598faaeab8aa0e5d30bd06
class DebComponent(ContentUnit): <NEW_LINE> <INDENT> TYPE_ID = 'deb_component' <NEW_LINE> UNIT_KEY_DEB_COMPONENT = ('name', 'distribution', 'repoid') <NEW_LINE> meta = { 'collection': "units_deb_component", 'indexes': list(UNIT_KEY_DEB_COMPONENT), } <NEW_LINE> unit_key_fields = UNIT_KEY_DEB_COMPONENT <NEW_LINE> name = mongoengine.StringField(required=True) <NEW_LINE> distribution = mongoengine.StringField(required=True) <NEW_LINE> release = mongoengine.StringField(required=True) <NEW_LINE> repoid = mongoengine.StringField(required=True) <NEW_LINE> packages = mongoengine.ListField() <NEW_LINE> _ns = mongoengine.StringField(required=True, default=meta['collection']) <NEW_LINE> _content_type_id = mongoengine.StringField(required=True, default=TYPE_ID) <NEW_LINE> @property <NEW_LINE> def plain_component(self): <NEW_LINE> <INDENT> return self.name.strip('/').split('/')[-1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def prefixed_component(self): <NEW_LINE> <INDENT> prefix = '/'.join(self.distribution.split('/')[1:]).strip('/') <NEW_LINE> return (prefix + '/' + self.plain_component).strip('/')
This unittype represents a deb release/distribution component.
62598faa5fc7496912d48240
class CPUContainerMetric(ContainerMetric): <NEW_LINE> <INDENT> pass
stores values from the container cgroup
62598faa1f5feb6acb162b9b
class RPng(RPackage): <NEW_LINE> <INDENT> homepage = "http://www.rforge.net/png/" <NEW_LINE> url = "https://cran.r-project.org/src/contrib/png_0.1-7.tar.gz" <NEW_LINE> list_url = "https://cran.r-project.org/src/contrib/Archive/png" <NEW_LINE> version('0.1-7', '1ebc8b8aa5979b12c5ec2384b30d649f') <NEW_LINE> depends_on('libpng')
This package provides an easy and simple way to read, write and display bitmap images stored in the PNG format. It can read and write both files and in-memory raw vectors.
62598faa99cbb53fe6830e52
class StopwordCountTransformer(TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.language = kwargs['language'] <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> self.stop_words_ = set(stopwords.words(self.language)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def count_stop_words(self, sentence): <NEW_LINE> <INDENT> words = sentence.split() <NEW_LINE> filtered_words = [] <NEW_LINE> for word in words: <NEW_LINE> <INDENT> if word not in self.stop_words_: <NEW_LINE> <INDENT> filtered_words.append(word) <NEW_LINE> <DEDENT> <DEDENT> return len(filtered_words) <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> return [[self.count_stop_words(x)] for x in X]
Class used to transform a text feature into the count of stopwords present in the text
62598faa4f6381625f19947c
class Elf(Being): <NEW_LINE> <INDENT> _elf_count = 0 <NEW_LINE> _vitality = 5 <NEW_LINE> _dexterity = 6 <NEW_LINE> _agility = 4 <NEW_LINE> def __init__(self, name, home): <NEW_LINE> <INDENT> super().__init__(name, home) <NEW_LINE> Elf._elf_count += 1
Being subclass for creating player characters. Is highly skilled.
62598faa009cb60464d0149b
class ForeignKey(BaseType): <NEW_LINE> <INDENT> def stringify(self, value): <NEW_LINE> <INDENT> return str(self.validate(data=value)) <NEW_LINE> <DEDENT> def destringify(self, value): <NEW_LINE> <INDENT> return self.validate(data=value) <NEW_LINE> <DEDENT> def validate(self, data): <NEW_LINE> <INDENT> model_code = self.option.get('model', None) <NEW_LINE> if not model_code: <NEW_LINE> <INDENT> raise ValueError('ForeinKey需要关联Model') <NEW_LINE> <DEDENT> model = Model.objects.filter(code=model_code).first() <NEW_LINE> if not model: <NEW_LINE> <INDENT> raise ValueError("没有{}类型的Model".format(model_code)) <NEW_LINE> <DEDENT> field_code = self.option.get('field', None) <NEW_LINE> if not field_code: <NEW_LINE> <INDENT> raise ValueError('ForeinKey需要设置关联的field') <NEW_LINE> <DEDENT> if field_code == 'id': <NEW_LINE> <INDENT> value = Instance.objects.filter(model=model, id=int(data), deleted=False).first() <NEW_LINE> if not value: <NEW_LINE> <INDENT> raise ValueError("对象({})不存在".format(data)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return data <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> field = Field.objects.filter(model=model, code=field_code).first() <NEW_LINE> if not field: <NEW_LINE> <INDENT> raise ValueError('{}不存在字段{}'.format(model_code, field_code)) <NEW_LINE> <DEDENT> <DEDENT> value = Value.objects.filter(model=model, field=field, value=str(data)).first() <NEW_LINE> if not value: <NEW_LINE> <INDENT> raise ValueError('值为{}的{}不存在'.format(data, model_code)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return data
外键类型
62598faa8e7ae83300ee901d
class Facebook(OAuth2): <NEW_LINE> <INDENT> user_authorization_url = 'https://www.facebook.com/dialog/oauth' <NEW_LINE> access_token_url = 'https://graph.facebook.com/oauth/access_token' <NEW_LINE> user_info_url = 'https://graph.facebook.com/me' <NEW_LINE> user_info_scope = ['user_about_me', 'email'] <NEW_LINE> same_origin = False <NEW_LINE> @classmethod <NEW_LINE> def _x_request_elements_filter(cls, request_type, request_elements, credentials): <NEW_LINE> <INDENT> if request_type == cls.REFRESH_TOKEN_REQUEST_TYPE: <NEW_LINE> <INDENT> url, method, params, headers, body = request_elements <NEW_LINE> params['fb_exchange_token'] = params.pop('refresh_token') <NEW_LINE> params['grant_type'] = 'fb_exchange_token' <NEW_LINE> request_elements = core.RequestElements(url, method, params, headers, body) <NEW_LINE> <DEDENT> return request_elements <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Facebook, self).__init__(*args, **kwargs) <NEW_LINE> if self.offline: <NEW_LINE> <INDENT> if not 'offline_access' in self.scope: <NEW_LINE> <INDENT> self.scope.append('offline_access') <NEW_LINE> <DEDENT> <DEDENT> if self.popup: <NEW_LINE> <INDENT> self.user_authorization_url += '?display=popup' <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _x_user_parser(user, data): <NEW_LINE> <INDENT> user.picture = 'http://graph.facebook.com/{}/picture?type=large'.format(data.get('username')) <NEW_LINE> return user <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _x_credentials_parser(credentials, data): <NEW_LINE> <INDENT> credentials.expire_in = data.get('expires') <NEW_LINE> return credentials <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _x_refresh_credentials_if(credentials): <NEW_LINE> <INDENT> return True
Facebook |oauth2| provider. * Dashboard: https://developers.facebook.com/apps * Docs: http://developers.facebook.com/docs/howtos/login/server-side-login/ * API reference: http://developers.facebook.com/docs/reference/api/ * API explorer: http://developers.facebook.com/tools/explorer
62598faa8e7ae83300ee901e
class BackupUsageSummariesOperations(object): <NEW_LINE> <INDENT> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2016-12-01" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, vault_name, resource_group_name, filter=None, skip_token=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/Subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.RecoveryServices/vaults/{vaultName}/backupUsageSummaries' <NEW_LINE> path_format_arguments = { 'vaultName': self._serialize.url("vault_name", vault_name, 'str'), 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> if filter is not None: <NEW_LINE> <INDENT> query_parameters['$filter'] = self._serialize.query("filter", filter, 'str') <NEW_LINE> <DEDENT> if skip_token is not None: <NEW_LINE> <INDENT> query_parameters['$skipToken'] = self._serialize.query("skip_token", skip_token, 'str') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.BackupManagementUsagePaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.BackupManagementUsagePaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
BackupUsageSummariesOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. :ivar api_version: Client Api Version. Constant value: "2016-12-01".
62598faa6e29344779b005d8
class Hand(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.image, self.rect = load_image('Hand.png', 'Mouse') <NEW_LINE> self.image.set_colorkey((255,242,0)) <NEW_LINE> self.punching = 0 <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.pos = pygame.mouse.get_pos() <NEW_LINE> self.rect.midtop = self.pos <NEW_LINE> <DEDENT> def Get_Current_Mouse_POS(self): <NEW_LINE> <INDENT> return self.pos
moves a hand on the screen, following the mouse
62598faa4e4d5625663723a1
class FakeTime(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._current_time = 1e9 <NEW_LINE> self._delta = 0.001 <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> self._current_time = self._current_time + self._delta <NEW_LINE> return self._current_time <NEW_LINE> <DEDENT> def set_epoch(self, epoch): <NEW_LINE> <INDENT> self._current_time = epoch <NEW_LINE> <DEDENT> def set_delta(self, delta): <NEW_LINE> <INDENT> self._delta = delta <NEW_LINE> <DEDENT> def sleep(self, second): <NEW_LINE> <INDENT> self._current_time += second
"Allow to mock time.time for tests `time.time` returns a defined `current_time` instead. Any `time.time` call also increase the `current_time` of `delta` seconds.
62598faadd821e528d6d8eb1
class Command: <NEW_LINE> <INDENT> __slots__ = ('cmd', 'cwd', 'env', 'shell') <NEW_LINE> def __init__(self, cmd, *, cwd, env=None, shell=False): <NEW_LINE> <INDENT> self.cmd = cmd <NEW_LINE> self.cwd = cwd <NEW_LINE> self.env = env <NEW_LINE> self.shell = shell <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> string = "Invoking command in '{self.cwd}': ".format_map(locals()) <NEW_LINE> string += self._get_env_string() <NEW_LINE> string += self._get_cmd_string() <NEW_LINE> return string <NEW_LINE> <DEDENT> def _get_env_string(self): <NEW_LINE> <INDENT> env = {} <NEW_LINE> for var_name, new_value in (self.env or {}).items(): <NEW_LINE> <INDENT> if sys.platform != 'win32': <NEW_LINE> <INDENT> if var_name in ('PWD', ): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> org_value = os.environ.get(var_name, None) <NEW_LINE> if new_value == org_value: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if sys.platform != 'win32': <NEW_LINE> <INDENT> var = '${' + var_name + '}' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> var = '%' + var_name + '%' <NEW_LINE> <DEDENT> if not org_value: <NEW_LINE> <INDENT> value = new_value <NEW_LINE> <DEDENT> elif new_value.startswith(org_value): <NEW_LINE> <INDENT> value = var + new_value[len(org_value):] <NEW_LINE> <DEDENT> elif new_value.endswith(org_value): <NEW_LINE> <INDENT> value = new_value[:-len(org_value)] + var <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = new_value <NEW_LINE> <DEDENT> env[var_name] = value <NEW_LINE> <DEDENT> string = '' <NEW_LINE> if env: <NEW_LINE> <INDENT> for name in sorted(env.keys()): <NEW_LINE> <INDENT> value = env[name] <NEW_LINE> string += '{name}={value} '.format_map(locals()) <NEW_LINE> <DEDENT> <DEDENT> return string <NEW_LINE> <DEDENT> def _get_cmd_string(self): <NEW_LINE> <INDENT> return ' '.join([ escape_shell_argument(c) if self.shell else c for c in self.cmd])
An event containing an invoked command.
62598faa01c39578d7f12cfb
class PacketPort: <NEW_LINE> <INDENT> def __init__(self, device, baud=3000000): <NEW_LINE> <INDENT> self.serial = serial.Serial(device, baud, timeout=2) <NEW_LINE> self._rxBuffer = [] <NEW_LINE> self._escBuffer = '' <NEW_LINE> <DEDENT> def _escape(self, data): <NEW_LINE> <INDENT> return data.replace('}', '}]').replace('~', '}^') <NEW_LINE> <DEDENT> def _unescape(self, data): <NEW_LINE> <INDENT> chunks = data.split('}') <NEW_LINE> for i in range(1, len(chunks)): <NEW_LINE> <INDENT> chunks[i] = chr(0x20 ^ ord(chunks[i][0])) + chunks[i][1:] <NEW_LINE> <DEDENT> return ''.join(chunks) <NEW_LINE> <DEDENT> def write(self, packet): <NEW_LINE> <INDENT> self.serial.write('~' + self._escape(packet)) <NEW_LINE> <DEDENT> def _readBuffer(self, limit=None): <NEW_LINE> <INDENT> if (len(self._rxBuffer) > 1 or (limit is not None and self._rxBuffer and len(self._rxBuffer[0]) >= limit)): <NEW_LINE> <INDENT> packet = self._rxBuffer[0] <NEW_LINE> del self._rxBuffer[0] <NEW_LINE> return packet <NEW_LINE> <DEDENT> <DEDENT> def _splitPackets(self, data): <NEW_LINE> <INDENT> data = self._escBuffer + data <NEW_LINE> self._escBuffer = '' <NEW_LINE> data = re.sub('}+', '}', data) <NEW_LINE> data = data.replace('}~', '~') <NEW_LINE> if data[-1] == '}': <NEW_LINE> <INDENT> self._escBuffer = '}' <NEW_LINE> data = data[:-1] <NEW_LINE> <DEDENT> return map(self._unescape, data.split('~')) <NEW_LINE> <DEDENT> def read(self, limit=None): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> packet = self._readBuffer(limit) <NEW_LINE> if packet: <NEW_LINE> <INDENT> return packet <NEW_LINE> <DEDENT> bufferedBytes = len(self._rxBuffer) and len(self._rxBuffer[0]) <NEW_LINE> blockSize = max(1, (limit or 0) - bufferedBytes) <NEW_LINE> block = self.serial.read(blockSize) <NEW_LINE> if not block: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> chunks = self._splitPackets(block) <NEW_LINE> if self._rxBuffer: <NEW_LINE> <INDENT> self._rxBuffer[-1] += chunks[0] <NEW_LINE> <DEDENT> self._rxBuffer.extend(chunks[1:])
Low-level serial port with packet framing semantics. This implements a 'byte stuffing' framing mechanism, based on RFC1622. This isn't actually as complicated as it looks.. A lot of the string processing acrobatics here are so we can properly unescape received packets without any slow character-by-character loops in Python.
62598faa30bbd72246469936
class SwiftStorageSaveChecksumTests(trove_testtools.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(SwiftStorageSaveChecksumTests, self).setUp() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(SwiftStorageSaveChecksumTests, self).tearDown() <NEW_LINE> <DEDENT> def test_swift_checksum_save(self): <NEW_LINE> <INDENT> context = trove_testtools.TroveTestContext(self) <NEW_LINE> backup_id = '123' <NEW_LINE> user = 'user' <NEW_LINE> password = 'password' <NEW_LINE> swift_client = FakeSwiftConnection() <NEW_LINE> with patch.object(swift, 'create_swift_client', return_value=swift_client): <NEW_LINE> <INDENT> storage_strategy = SwiftStorage(context) <NEW_LINE> with MockBackupRunner(filename=backup_id, user=user, password=password) as runner: <NEW_LINE> <INDENT> (success, note, checksum, location) = storage_strategy.save(runner.manifest, runner) <NEW_LINE> <DEDENT> <DEDENT> self.assertTrue(success, "The backup should have been successful.") <NEW_LINE> self.assertIsNotNone(note, "A note should have been returned.") <NEW_LINE> self.assertEqual('http://mockswift/v1/database_backups/123.gz.enc', location, "Incorrect swift location was returned.") <NEW_LINE> <DEDENT> @patch('trove.common.strategies.storage.swift.LOG') <NEW_LINE> def test_swift_segment_checksum_etag_mismatch(self, mock_logging): <NEW_LINE> <INDENT> context = trove_testtools.TroveTestContext(self) <NEW_LINE> backup_id = 'bad_segment_etag_123' <NEW_LINE> user = 'user' <NEW_LINE> password = 'password' <NEW_LINE> swift_client = FakeSwiftConnection() <NEW_LINE> with patch.object(swift, 'create_swift_client', return_value=swift_client): <NEW_LINE> <INDENT> storage_strategy = SwiftStorage(context) <NEW_LINE> with MockBackupRunner(filename=backup_id, user=user, password=password) as runner: <NEW_LINE> <INDENT> (success, note, checksum, location) = storage_strategy.save(runner.manifest, runner) <NEW_LINE> <DEDENT> <DEDENT> self.assertFalse(success, "The backup should have failed!") <NEW_LINE> self.assertTrue(note.startswith("Error saving data to Swift!")) <NEW_LINE> self.assertIsNone(checksum, "Swift checksum should be None for failed backup.") <NEW_LINE> self.assertEqual('http://mockswift/v1/database_backups/' 'bad_segment_etag_123.gz.enc', location, "Incorrect swift location was returned.") <NEW_LINE> <DEDENT> @patch('trove.common.strategies.storage.swift.LOG') <NEW_LINE> def test_swift_checksum_etag_mismatch(self, mock_logging): <NEW_LINE> <INDENT> context = trove_testtools.TroveTestContext(self) <NEW_LINE> backup_id = 'bad_manifest_etag_123' <NEW_LINE> user = 'user' <NEW_LINE> password = 'password' <NEW_LINE> swift_client = FakeSwiftConnection() <NEW_LINE> with patch.object(swift, 'create_swift_client', return_value=swift_client): <NEW_LINE> <INDENT> storage_strategy = SwiftStorage(context) <NEW_LINE> with MockBackupRunner(filename=backup_id, user=user, password=password) as runner: <NEW_LINE> <INDENT> (success, note, checksum, location) = storage_strategy.save(runner.manifest, runner) <NEW_LINE> <DEDENT> <DEDENT> self.assertFalse(success, "The backup should have failed!") <NEW_LINE> self.assertTrue(note.startswith("Error saving data to Swift!")) <NEW_LINE> self.assertIsNone(checksum, "Swift checksum should be None for failed backup.") <NEW_LINE> self.assertEqual('http://mockswift/v1/database_backups/' 'bad_manifest_etag_123.gz.enc', location, "Incorrect swift location was returned.")
SwiftStorage.save is used to save a backup to Swift.
62598faa63d6d428bbee2727
class Downloader(_Downloader): <NEW_LINE> <INDENT> ok = True <NEW_LINE> def __init__(self, cache=None, account_accessor=None, logger=None, working_dir='', callback=None): <NEW_LINE> <INDENT> from rowgenerators import get_cache <NEW_LINE> super().__init__(cache or get_cache('metapack'), account_accessor, logger, working_dir, callback) <NEW_LINE> <DEDENT> def download(self, url): <NEW_LINE> <INDENT> return super().download(url)
"Local version of the downloader. Also should be used as the source of the cache
62598faa2c8b7c6e89bd3742
@resources.register('emr-security-configuration') <NEW_LINE> class EMRSecurityConfiguration(QueryResourceManager): <NEW_LINE> <INDENT> class resource_type(TypeInfo): <NEW_LINE> <INDENT> service = 'emr' <NEW_LINE> arn_type = 'emr' <NEW_LINE> permission_prefix = 'elasticmapreduce' <NEW_LINE> enum_spec = ('list_security_configurations', 'SecurityConfigurations', None) <NEW_LINE> detail_spec = ('describe_security_configuration', 'Name', 'Name', None) <NEW_LINE> id = name = 'Name' <NEW_LINE> cfn_type = 'AWS::EMR::SecurityConfiguration' <NEW_LINE> <DEDENT> permissions = ('elasticmapreduce:ListSecurityConfigurations', 'elasticmapreduce:DescribeSecurityConfiguration',) <NEW_LINE> def augment(self, resources): <NEW_LINE> <INDENT> resources = super().augment(resources) <NEW_LINE> for r in resources: <NEW_LINE> <INDENT> r['SecurityConfiguration'] = json.loads(r['SecurityConfiguration']) <NEW_LINE> <DEDENT> return resources
Resource manager for EMR Security Configuration
62598faaa8370b77170f0357
@implementer(IDictionaryInheritance) <NEW_LINE> class InheritingDictionary(Inheritance, dict): <NEW_LINE> <INDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> if ILocaleInheritance.providedBy(value): <NEW_LINE> <INDENT> value.__parent__ = self <NEW_LINE> value.__name__ = name <NEW_LINE> <DEDENT> super(InheritingDictionary, self).__setitem__(name, value) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> if name not in self: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> selfUp = self.getInheritedSelf() <NEW_LINE> <DEDENT> except NoParentException: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return selfUp.__getitem__(name) <NEW_LINE> <DEDENT> <DEDENT> return super(InheritingDictionary, self).__getitem__(name) <NEW_LINE> <DEDENT> def get(self, name, default=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> def items(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> d = dict(self.getInheritedSelf()) <NEW_LINE> <DEDENT> except NoParentException: <NEW_LINE> <INDENT> d = {} <NEW_LINE> <DEDENT> d.update(self) <NEW_LINE> return d.items() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return [item[0] for item in self.items()] <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> return [item[1] for item in self.items()]
Implementation of a dictionary that can also inherit values. Example:: >>> from zope.i18n.locales.tests.test_docstrings import \ ... LocaleInheritanceStub >>> root = LocaleInheritanceStub() >>> root.data = InheritingDictionary({1: 'one', 2: 'two', 3: 'three'}) >>> root.data2 = AttributeInheritance() >>> root.data2.dict = InheritingDictionary({1: 'i', 2: 'ii', 3: 'iii'}) >>> locale = LocaleInheritanceStub(root) >>> locale.data = InheritingDictionary({1: 'eins'}) >>> locale.data2 = AttributeInheritance() >>> locale.data2.dict = InheritingDictionary({1: 'I'}) Here is a dictionary lookup directly from the locale:: >>> locale.data[1] 'eins' >>> locale.data[2] 'two' ... however, we can also have any amount of nesting:: >>> locale.data2.dict[1] 'I' >>> locale.data2.dict[2] 'ii' We also have to overwrite 'get()', 'keys()' and 'items()' since we want to make sure that all upper locales are consulted before returning the default or to construct the list of elements, respectively:: >>> locale.data2.dict.get(2) 'ii' >>> locale.data2.dict.get(4) is None True >>> locale.data.keys() [1, 2, 3] >>> list(locale.data.items()) [(1, 'eins'), (2, 'two'), (3, 'three')]
62598faaa8370b77170f0358
class Document(File): <NEW_LINE> <INDENT> document = models.FileField(upload_to='docs')
Other file types.
62598faa3cc13d1c6d4656e8
class Review(BaseModel): <NEW_LINE> <INDENT> place_id = "" <NEW_LINE> user_id = "" <NEW_LINE> text = "" <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs)
Review's class
62598faa92d797404e388b23
class Solution: <NEW_LINE> <INDENT> def isUnique(self, str): <NEW_LINE> <INDENT> dict ={} <NEW_LINE> for i in str: <NEW_LINE> <INDENT> if i in dict: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict[i]=1 <NEW_LINE> <DEDENT> <DEDENT> return True
@param: str: A string @return: a boolean
62598faa4e4d5625663723a2
class InfoRefsContainer(RefsContainer): <NEW_LINE> <INDENT> def __init__(self, f): <NEW_LINE> <INDENT> self._refs = {} <NEW_LINE> self._peeled = {} <NEW_LINE> for l in f.readlines(): <NEW_LINE> <INDENT> sha, name = l.rstrip("\n").split("\t") <NEW_LINE> if name.endswith("^{}"): <NEW_LINE> <INDENT> name = name[:-3] <NEW_LINE> if not check_ref_format(name): <NEW_LINE> <INDENT> raise ValueError("invalid ref name '%s'" % name) <NEW_LINE> <DEDENT> self._peeled[name] = sha <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not check_ref_format(name): <NEW_LINE> <INDENT> raise ValueError("invalid ref name '%s'" % name) <NEW_LINE> <DEDENT> self._refs[name] = sha <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def allkeys(self): <NEW_LINE> <INDENT> return self._refs.keys() <NEW_LINE> <DEDENT> def read_loose_ref(self, name): <NEW_LINE> <INDENT> return self._refs.get(name, None) <NEW_LINE> <DEDENT> def get_packed_refs(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def get_peeled(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._peeled[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return self._refs[name]
Refs container that reads refs from a info/refs file.
62598faa9c8ee8231304012f
class DrsObject: <NEW_LINE> <INDENT> def __init__( self, id: str, size: int, created: str, checksums: Iterable[Checksum], access_methods: Iterable[AccessMethod], name: str = '', updated: str = '', version: str = '', mime_type: str = '', description: str = '', aliases: Iterable[str] = [], ) -> None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.size = size <NEW_LINE> self.created = created <NEW_LINE> self.checksums = checksums <NEW_LINE> self.access_methods = access_methods <NEW_LINE> self.name = name <NEW_LINE> self.updated = updated <NEW_LINE> self.version = version <NEW_LINE> self.mime_type = mime_type <NEW_LINE> self.description = description <NEW_LINE> self.aliases = aliases <NEW_LINE> <DEDENT> def to_dict(self) -> Dict: <NEW_LINE> <INDENT> return { "id": self.id, "size": self.size, "created": self.created, "checksums": [c.to_dict() for c in self.checksums], "access_methods": [m.to_dict() for m in self.access_methods], "name": self.name, "updated": self.updated, "version": self.version, "mime_type": self.mime_type, "description": self.description, "aliases": self.aliases, }
Schema describing DRS object metadata and access methods.
62598faae76e3b2f99fd89b3
class Policy: <NEW_LINE> <INDENT> def select_action(self, values): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def __call__(self, values): <NEW_LINE> <INDENT> return self.select_action(values)
Decides which action to take.
62598faa8da39b475be03161
class TransitionProbabilitiesProperty( ProcessingPlasmaProperty, metaclass=ABCMeta ): <NEW_LINE> <INDENT> @abstractproperty <NEW_LINE> def transition_probabilities_outputs(self): <NEW_LINE> <INDENT> pass
Used for plasma properties that have unnormalized transition probabilities as one of their outputs. This makes it possible to easily track all transition probabilities and to later combine them.
62598faaaad79263cf42e751
class DepartmentViewSet(ViewSet): <NEW_LINE> <INDENT> queryset = Department.objects.all() <NEW_LINE> fields = ('id', 'name', 'description', 'members', 'organization') <NEW_LINE> def create(self, request): <NEW_LINE> <INDENT> data = request.data <NEW_LINE> serializer = DepartmentSerializer(data=data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> data = serializer.data <NEW_LINE> return Response(*create_response( data['name'], payload={'data':data}, msg_key='create_entry', status='created')) <NEW_LINE> <DEDENT> return Response(*create_response( payload={'errors': serializer.errors}, msg_key='unsuccessful', success=False)) <NEW_LINE> <DEDENT> def retrieve(self, request, pk): <NEW_LINE> <INDENT> fields = get_query_fields(request) or self.fields <NEW_LINE> department = entry_exists(Department, pk) <NEW_LINE> serializer = DepartmentSerializer(department, fields=fields) <NEW_LINE> return Response(*create_response( 'department', payload={'data': serializer.data}, msg_key='fetch_entries', status='ok' )) <NEW_LINE> <DEDENT> def partial_update(self, request, pk): <NEW_LINE> <INDENT> department = entry_exists(Department, pk) <NEW_LINE> request.data.update({'edited_on': timezone.now()}) <NEW_LINE> serializer = DepartmentSerializer(department, data=request.data, partial=True) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> data = {'data': serializer.data} <NEW_LINE> return Response(*create_response( 'organization', payload=data, msg_key='update_entry', status='ok')) <NEW_LINE> <DEDENT> data = {'errors': serializer.errors} <NEW_LINE> return Response(*create_response( payload=data, status='bad_request', msg_key='unsuccessful', success=False)) <NEW_LINE> <DEDENT> def destroy(self, request, pk): <NEW_LINE> <INDENT> department = entry_exists(Department, pk) <NEW_LINE> delete_check(request, department) <NEW_LINE> delete_details = {'deleted_on': timezone.now(), 'deleted': True} <NEW_LINE> serializer = DepartmentSerializer( department, data=delete_details, partial=True) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> data = {'data': serializer.data} <NEW_LINE> return Response(*create_response( 'department', payload=data, msg_key='delete_entry', status='ok')) <NEW_LINE> <DEDENT> data = {'data': serializer.errors} <NEW_LINE> return Response(*create_response( 'department', msg_key='already_deleted', success=False)) <NEW_LINE> <DEDENT> def list(self, request): <NEW_LINE> <INDENT> fields = get_query_fields(request) or self.fields <NEW_LINE> filter_params = {} if request.query_params.get('deleted', False) else {'deleted': False} <NEW_LINE> departments = self.queryset.filter(**filter_params) <NEW_LINE> serializer = DepartmentSerializer(departments, fields=fields, many=True) <NEW_LINE> payload = {'count': len(serializer.data), 'data': serializer.data} <NEW_LINE> return Response(*create_response( 'department', payload=payload, msg_key='fetch_entries', status='ok' ))
Department viewset class.
62598faa99fddb7c1ca62da7
class Authenticator: <NEW_LINE> <INDENT> def __init__(self, keyfile=None): <NEW_LINE> <INDENT> self.passwords = {} <NEW_LINE> if keyfile is not None: <NEW_LINE> <INDENT> for line in open(keyfile): <NEW_LINE> <INDENT> if '#' in line: <NEW_LINE> <INDENT> line = line[:line.index("#")] <NEW_LINE> <DEDENT> line = line.strip() <NEW_LINE> if not line: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> (keyid, keytype, passwd) = line.split() <NEW_LINE> self.passwords[int(keyid)] = (keytype, passwd) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.passwords) <NEW_LINE> <DEDENT> def __getitem__(self, keyid): <NEW_LINE> <INDENT> return self.passwords.get(keyid) <NEW_LINE> <DEDENT> def control(self, keyid=None): <NEW_LINE> <INDENT> if keyid is not None: <NEW_LINE> <INDENT> if keyid in self.passwords: <NEW_LINE> <INDENT> return (keyid,) + self.passwords[keyid] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (keyid, None, None) <NEW_LINE> <DEDENT> <DEDENT> for line in open("/etc/ntp.conf"): <NEW_LINE> <INDENT> if line.startswith("control"): <NEW_LINE> <INDENT> keyid = int(line.split()[1]) <NEW_LINE> (keytype, passwd) = self.passwords[keyid] <NEW_LINE> if passwd is None: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if len(passwd) > 20: <NEW_LINE> <INDENT> passwd = ntp.util.hexstr2octets(passwd) <NEW_LINE> <DEDENT> return (keyid, keytype, passwd) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def compute_mac(payload, keyid, keytype, passwd): <NEW_LINE> <INDENT> hasher = hashlib.new(keytype) <NEW_LINE> hasher.update(ntp.poly.polybytes(passwd)) <NEW_LINE> hasher.update(payload) <NEW_LINE> if hasher.digest_size == 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return struct.pack("!I", keyid) + hasher.digest() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def have_mac(packet): <NEW_LINE> <INDENT> return len(packet) > ntp.magic.LEN_PKT_NOMAC <NEW_LINE> <DEDENT> def verify_mac(self, packet): <NEW_LINE> <INDENT> HASHLEN = 16 <NEW_LINE> payload = packet[:-HASHLEN-4] <NEW_LINE> keyid = packet[-HASHLEN-4:-HASHLEN] <NEW_LINE> mac = packet[-HASHLEN:] <NEW_LINE> (keyid,) = struct.unpack("!I", keyid) <NEW_LINE> if keyid not in self.passwords: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> (keytype, passwd) = self.passwords[keyid] <NEW_LINE> hasher = hashlib.new(keytype) <NEW_LINE> hasher.update(passwd) <NEW_LINE> hasher.update(payload) <NEW_LINE> return ntp.poly.polybytes(hasher.digest()) == mac
MAC authentication manager for NTP packets.
62598faacb5e8a47e493c137