code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ArrayVarianceMutator(Mutator): <NEW_LINE> <INDENT> def __init__(self, peach, node, name="ArrayVarianceMutator"): <NEW_LINE> <INDENT> Mutator.__init__(self) <NEW_LINE> ArrayVarianceMutator.weight = 2 <NEW_LINE> if not ArrayVarianceMutator.supportedDataElement(node): <NEW_LINE> <INDENT> raise Exception("ArrayVarianceMutator created with bad node.") <NEW_LINE> <DEDENT> self.isFinite = True <NEW_LINE> self.name = name <NEW_LINE> self._peach = peach <NEW_LINE> self._n = self._getN(node, 50) <NEW_LINE> self._arrayCount = node.getArrayCount() <NEW_LINE> self._minCount = self._arrayCount - self._n <NEW_LINE> self._maxCount = self._arrayCount + self._n <NEW_LINE> self.changedName = "" <NEW_LINE> self._minCount = 0 if self._minCount < 0 else self._minCount <NEW_LINE> self._currentCount = self._minCount <NEW_LINE> <DEDENT> def _getN(self, node, n): <NEW_LINE> <INDENT> for c in node.hints: <NEW_LINE> <INDENT> if c.name == ('{}-N'.format(self.name)): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> n = int(c.value) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise PeachException("Expected numerical value for Hint " "named [{}]".format(c.name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return n <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> self._currentCount += 1 <NEW_LINE> if self._currentCount > self._maxCount: <NEW_LINE> <INDENT> raise MutatorCompleted() <NEW_LINE> <DEDENT> <DEDENT> def getCount(self): <NEW_LINE> <INDENT> return self._maxCount - self._minCount <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def supportedDataElement(e): <NEW_LINE> <INDENT> if isinstance(e, DataElement) and e.isArray() and e.arrayPosition == 0 and e.isMutable: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def sequentialMutation(self, node): <NEW_LINE> <INDENT> self.changedName = node.getFullnameInDataModel() <NEW_LINE> self._performMutation(node, self._currentCount) <NEW_LINE> <DEDENT> def randomMutation(self, node, rand): <NEW_LINE> <INDENT> self.changedName = node.getFullnameInDataModel() <NEW_LINE> count = rand.randint(self._minCount, self._maxCount) <NEW_LINE> self._performMutation(node, count) <NEW_LINE> <DEDENT> def _performMutation(self, node, count): <NEW_LINE> <INDENT> n = count <NEW_LINE> arrayHead = node <NEW_LINE> if n == 0: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif n < self._arrayCount: <NEW_LINE> <INDENT> for i in range(self._arrayCount - 1, n - 1, -1): <NEW_LINE> <INDENT> obj = arrayHead.getArrayElementAt(i) <NEW_LINE> if obj is None: <NEW_LINE> <INDENT> raise Exception("Could not locate item at pos {} (max " "of {})".format((i, self._arrayCount))) <NEW_LINE> <DEDENT> obj.parent.__delitem__(obj.name) <NEW_LINE> <DEDENT> <DEDENT> elif n > self._arrayCount: <NEW_LINE> <INDENT> headIndex = arrayHead.parent.index(arrayHead) <NEW_LINE> obj = arrayHead.getArrayElementAt(arrayHead.getArrayCount() - 1) <NEW_LINE> try: <NEW_LINE> <INDENT> obj.value = obj.getValue() * (n - self._arrayCount) <NEW_LINE> obj.arrayPosition = n - 1 <NEW_LINE> <DEDENT> except MemoryError: <NEW_LINE> <INDENT> pass
Change the length of arrays to count - N to count + N.
62598fa44e4d5625663722d8
class ClientConfiguration: <NEW_LINE> <INDENT> def __init__(self, server_url, command_port, request_port, server_version=1, timeout=30, wait_period=10, linger_period=1, polling_limit=10, logger=None, verbosity=4): <NEW_LINE> <INDENT> self.server_url = server_url <NEW_LINE> self.command_port = command_port <NEW_LINE> self.request_port = request_port <NEW_LINE> self.server_version = server_version <NEW_LINE> self.timeout = timeout <NEW_LINE> self.wait_period = wait_period <NEW_LINE> self.linger_period = linger_period <NEW_LINE> self.polling_limit = polling_limit <NEW_LINE> if logger is None: <NEW_LINE> <INDENT> self.logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> self.verbosity = verbosity
Configuration of network worker evaluating the environments
62598fa44527f215b58e9d97
class CaptureLog(logging.Handler): <NEW_LINE> <INDENT> def __init__(self, logger_name, level=logging.WARNING): <NEW_LINE> <INDENT> super().__init__(level) <NEW_LINE> self.logger_name = logger_name <NEW_LINE> self.level = level <NEW_LINE> self.records = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def record_tuples(self): <NEW_LINE> <INDENT> return [(r.name, r.levelno, r.getMessage()) for r in self.records] <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> self.records.append(record) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.records = [] <NEW_LINE> logger = logging.getLogger(self.logger_name) <NEW_LINE> logger.addHandler(self) <NEW_LINE> logger.setLevel(self.level) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> logger = logging.getLogger(self.logger_name) <NEW_LINE> logger.removeHandler(self) <NEW_LINE> logger.setLevel(logging.NOTSET)
Context manager to capture log entries. Usage:: with CaptureLog('foo', logging.DEBUG) as captured: logger = logging.getLogger('foo') logger.debug("Debug") >>> captured.records[0].getMessage() 'Debug' :param logger_name: the name of the logger, can be prefix too. :param level: the loglevel to capture at, only log entries equal or higher than this level are captured.
62598fa4f548e778e596b459
class Birds: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.N = N <NEW_LINE> self.minDist = minDist <NEW_LINE> self.maxRuleVel = maxRuleVel <NEW_LINE> self.maxVel = maxVel <NEW_LINE> self.pos = [width / 2.0, height / 2.0] + 10 * np.random.rand(2 * N).reshape(N, 2) <NEW_LINE> angles = 2 * math.pi * np.random.rand(N) <NEW_LINE> self.vel = np.array(list(zip(np.sin(angles), np.cos(angles)))) <NEW_LINE> <DEDENT> def savef(self): <NEW_LINE> <INDENT> with open("douban.txt", "a") as f: <NEW_LINE> <INDENT> f.write(str(self.pos.reshape(1, N * 2))) <NEW_LINE> print <NEW_LINE> str(self.pos.reshape(1, N * 2)) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> <DEDENT> def tick(self, frameNum, pts, beak): <NEW_LINE> <INDENT> self.distMatrix = squareform(pdist(self.pos)) <NEW_LINE> self.vel += self.apply_rules() <NEW_LINE> self.limit(self.vel, self.maxVel) <NEW_LINE> self.pos += self.vel <NEW_LINE> self.apply_bc() <NEW_LINE> pts.set_data(self.pos.reshape(2 * self.N)[::2], self.pos.reshape(2 * self.N)[1::2]) <NEW_LINE> vec = self.pos + 10 * self.vel / self.maxVel <NEW_LINE> beak.set_data(vec.reshape(2 * self.N)[::2], vec.reshape(2 * self.N)[1::2]) <NEW_LINE> self.savef() <NEW_LINE> <DEDENT> def limit_vec(self, vec, max_val): <NEW_LINE> <INDENT> mag = norm(vec) <NEW_LINE> if mag > max_val: <NEW_LINE> <INDENT> vec[0], vec[1] = vec[0] * max_val / mag, vec[1] * max_val / mag <NEW_LINE> <DEDENT> <DEDENT> def limit(self, x, max_val): <NEW_LINE> <INDENT> for vec in x: <NEW_LINE> <INDENT> self.limit_vec(vec, max_val) <NEW_LINE> <DEDENT> <DEDENT> def apply_bc(self): <NEW_LINE> <INDENT> deltaR = 2.0 <NEW_LINE> for coord in self.pos: <NEW_LINE> <INDENT> if coord[0] > width + deltaR: <NEW_LINE> <INDENT> coord[0] = - deltaR <NEW_LINE> <DEDENT> if coord[0] < - deltaR: <NEW_LINE> <INDENT> coord[0] = width + deltaR <NEW_LINE> <DEDENT> if coord[1] > height + deltaR: <NEW_LINE> <INDENT> coord[1] = - deltaR <NEW_LINE> <DEDENT> if coord[1] < - deltaR: <NEW_LINE> <INDENT> coord[1] = height + deltaR <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def apply_rules(self): <NEW_LINE> <INDENT> D = self.distMatrix < 20.0 <NEW_LINE> vel = self.pos * D.sum(axis=1).reshape(self.N, 1) - D.dot(self.pos) <NEW_LINE> self.limit(vel, self.maxRuleVel) <NEW_LINE> D = self.distMatrix < 50.0 <NEW_LINE> vel2 = D.dot(self.vel) <NEW_LINE> self.limit(vel2, self.maxRuleVel) <NEW_LINE> vel += vel2 <NEW_LINE> vel3 = D.dot(self.pos) - self.pos <NEW_LINE> self.limit(vel3, self.maxRuleVel) <NEW_LINE> vel += vel3 <NEW_LINE> return vel
Simulates flock behaviour of birds, using the realistic-looking Boids model (1986)
62598fa4e5267d203ee6b7c2
class ModelInfoResponse(Model): <NEW_LINE> <INDENT> _validation = { 'id': {'required': True}, 'readable_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type_id': {'key': 'typeId', 'type': 'int'}, 'readable_type': {'key': 'readableType', 'type': 'str'}, 'roles': {'key': 'roles', 'type': '[EntityRole]'}, 'children': {'key': 'children', 'type': '[ChildEntity]'}, 'sub_lists': {'key': 'subLists', 'type': '[SubClosedListResponse]'}, 'custom_prebuilt_domain_name': {'key': 'customPrebuiltDomainName', 'type': 'str'}, 'custom_prebuilt_model_name': {'key': 'customPrebuiltModelName', 'type': 'str'}, 'regex_pattern': {'key': 'regexPattern', 'type': 'str'}, 'explicit_list': {'key': 'explicitList', 'type': '[ExplicitListItem]'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(ModelInfoResponse, self).__init__(**kwargs) <NEW_LINE> self.id = kwargs.get('id', None) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.type_id = kwargs.get('type_id', None) <NEW_LINE> self.readable_type = kwargs.get('readable_type', None) <NEW_LINE> self.roles = kwargs.get('roles', None) <NEW_LINE> self.children = kwargs.get('children', None) <NEW_LINE> self.sub_lists = kwargs.get('sub_lists', None) <NEW_LINE> self.custom_prebuilt_domain_name = kwargs.get('custom_prebuilt_domain_name', None) <NEW_LINE> self.custom_prebuilt_model_name = kwargs.get('custom_prebuilt_model_name', None) <NEW_LINE> self.regex_pattern = kwargs.get('regex_pattern', None) <NEW_LINE> self.explicit_list = kwargs.get('explicit_list', None)
An application model info. All required parameters must be populated in order to send to Azure. :param id: Required. The ID of the Entity Model. :type id: str :param name: Name of the Entity Model. :type name: str :param type_id: The type ID of the Entity Model. :type type_id: int :param readable_type: Required. Possible values include: 'Entity Extractor', 'Child Entity Extractor', 'Hierarchical Entity Extractor', 'Hierarchical Child Entity Extractor', 'Composite Entity Extractor', 'List Entity Extractor', 'Prebuilt Entity Extractor', 'Intent Classifier', 'Pattern.Any Entity Extractor', 'Closed List Entity Extractor', 'Regex Entity Extractor' :type readable_type: str or ~azure.cognitiveservices.language.luis.authoring.models.enum :param roles: :type roles: list[~azure.cognitiveservices.language.luis.authoring.models.EntityRole] :param children: List of child entities. :type children: list[~azure.cognitiveservices.language.luis.authoring.models.ChildEntity] :param sub_lists: List of sublists. :type sub_lists: list[~azure.cognitiveservices.language.luis.authoring.models.SubClosedListResponse] :param custom_prebuilt_domain_name: The domain name. :type custom_prebuilt_domain_name: str :param custom_prebuilt_model_name: The intent name or entity name. :type custom_prebuilt_model_name: str :param regex_pattern: The Regular Expression entity pattern. :type regex_pattern: str :param explicit_list: :type explicit_list: list[~azure.cognitiveservices.language.luis.authoring.models.ExplicitListItem]
62598fa4009cb60464d013d9
class ATM: <NEW_LINE> <INDENT> def __init__(self, account_holder, balance=0.00, interest_rate=0.1): <NEW_LINE> <INDENT> self.account_holder = account_holder <NEW_LINE> self.balance = balance <NEW_LINE> self.interest_rate = interest_rate <NEW_LINE> self.transactions = [] <NEW_LINE> <DEDENT> def check_balance(self): <NEW_LINE> <INDENT> return f"Your current balance is: ${self.balance}" <NEW_LINE> <DEDENT> def deposit(self, amount): <NEW_LINE> <INDENT> self.balance += amount <NEW_LINE> self.transactions.append(f"{self.account_holder} deposited ${amount} on {now.strftime('%Y-%m-%d at %H:%M:%S')}.") <NEW_LINE> return f"You have deposited ${amount}. Your current balance is: ${round(self.balance, 2)}" <NEW_LINE> <DEDENT> def check_withdrawal(self, amount): <NEW_LINE> <INDENT> if amount > self.balance: <NEW_LINE> <INDENT> return "You have sufficient funds for this withdrawal." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"You have insufficient funds. Your current balance is: ${round(self.balance, 2)}" <NEW_LINE> <DEDENT> <DEDENT> def withdraw(self, amount): <NEW_LINE> <INDENT> if amount < self.balance: <NEW_LINE> <INDENT> self.balance -= amount <NEW_LINE> self.transactions.append(f"{self.account_holder} withdrew ${amount} on {now.strftime('%Y-%m-%d at %H:%M:%S')}.") <NEW_LINE> return f"You have withdrawn ${amount}. Your current balance is: ${round(self.balance, 2)}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return f"You have insufficient funds. Your current balance is: ${round(self.balance, 2)}" <NEW_LINE> <DEDENT> <DEDENT> def calc_interest(self): <NEW_LINE> <INDENT> self.balance += self.balance * self.interest_rate <NEW_LINE> return f"Your current interest rate is {self.interest_rate}%. Your current balance is: ${round(self.balance, 2)}" <NEW_LINE> <DEDENT> def print_transactions(self): <NEW_LINE> <INDENT> return "These are your past transactions:\n\t" + '\n\t'.join(self.transactions)
This is our ATM class that does ATM-type things
62598fa4435de62698e9bca9
class Content(object): <NEW_LINE> <INDENT> def __init__(self, base_size, enhancement_size, prob): <NEW_LINE> <INDENT> super(Content, self).__init__() <NEW_LINE> self.base_size = base_size <NEW_LINE> self.enhancement_size = enhancement_size <NEW_LINE> self.prob = prob <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_contents(): <NEW_LINE> <INDENT> contents = [] <NEW_LINE> prob = 0 <NEW_LINE> for i in range(0, 100): <NEW_LINE> <INDENT> base_size = random.expovariate(1/mean_base_content_size) <NEW_LINE> enhancement_size = random.expovariate(1/mean_enhancement_content_size) <NEW_LINE> prob += 1/((i+1) * math.log(1.78*100)) <NEW_LINE> content = Content(base_size, enhancement_size, min(prob, 1)) <NEW_LINE> contents.append(content) <NEW_LINE> <DEDENT> return contents <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_random_content(): <NEW_LINE> <INDENT> rand = random.random() <NEW_LINE> for content in contents: <NEW_LINE> <INDENT> if rand <= content.prob: <NEW_LINE> <INDENT> return content
docstring for Content
62598fa47b25080760ed735f
class WriteBufferTests(unittest.SynchronousTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.transport = StringTransport() <NEW_LINE> <DEDENT> def test_partialWrite(self): <NEW_LINE> <INDENT> buf = imap4.WriteBuffer(self.transport) <NEW_LINE> data = b'x' * buf.bufferSize <NEW_LINE> buf.write(data) <NEW_LINE> self.assertFalse(self.transport.value()) <NEW_LINE> <DEDENT> def test_overlongWrite(self): <NEW_LINE> <INDENT> buf = imap4.WriteBuffer(self.transport) <NEW_LINE> data = b'x' * (buf.bufferSize + 1) <NEW_LINE> buf.write(data) <NEW_LINE> self.assertEqual(self.transport.value(), data) <NEW_LINE> <DEDENT> def test_writesImplyFlush(self): <NEW_LINE> <INDENT> buf = imap4.WriteBuffer(self.transport) <NEW_LINE> firstData = b'x' * buf.bufferSize <NEW_LINE> secondData = b'y' <NEW_LINE> buf.write(firstData) <NEW_LINE> self.assertFalse(self.transport.value()) <NEW_LINE> buf.write(secondData) <NEW_LINE> self.assertEqual(self.transport.value(), firstData + secondData) <NEW_LINE> <DEDENT> def test_explicitFlush(self): <NEW_LINE> <INDENT> buf = imap4.WriteBuffer(self.transport) <NEW_LINE> data = b'x' * (buf.bufferSize) <NEW_LINE> buf.write(data) <NEW_LINE> self.assertFalse(self.transport.value()) <NEW_LINE> buf.flush() <NEW_LINE> self.assertEqual(self.transport.value(), data) <NEW_LINE> <DEDENT> def test_explicitFlushEmptyBuffer(self): <NEW_LINE> <INDENT> buf = imap4.WriteBuffer(self.transport) <NEW_LINE> buf.flush() <NEW_LINE> self.assertFalse(self.transport.value())
Tests for L{imap4.WriteBuffer}.
62598fa432920d7e50bc5f0c
class Identity(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def f(cls, x): <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def g(cls, y): <NEW_LINE> <INDENT> return y <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def df(cls, x): <NEW_LINE> <INDENT> if numx.isscalar(x): <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return numx.ones(x.shape) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def ddf(cls, x): <NEW_LINE> <INDENT> if numx.isscalar(x): <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return numx.zeros(x.shape) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def dg(cls, y): <NEW_LINE> <INDENT> if numx.isscalar(y): <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return numx.ones(y.shape)
Identity function. :Info: http://www.wolframalpha.com/input/?i=line
62598fa499cbb53fe6830d89
class VerticalityTriplet(VerticalityNTuplet): <NEW_LINE> <INDENT> def __init__(self, listofVerticalities): <NEW_LINE> <INDENT> VerticalityNTuplet.__init__(self, listofVerticalities) <NEW_LINE> self.tnlsDict = {} <NEW_LINE> self._calcTNLS() <NEW_LINE> <DEDENT> def _calcTNLS(self): <NEW_LINE> <INDENT> for partNum in range(min(len(self.verticalities[0].getObjectsByClass(note.Note)), len(self.verticalities[1].getObjectsByClass(note.Note)), len(self.verticalities[2].getObjectsByClass(note.Note))) ): <NEW_LINE> <INDENT> self.tnlsDict[partNum] = ThreeNoteLinearSegment([ self.verticalities[0].getObjectsByPart(partNum, note.Note), self.verticalities[1].getObjectsByPart(partNum, note.Note), self.verticalities[2].getObjectsByPart(partNum, note.Note)]) <NEW_LINE> <DEDENT> <DEDENT> def hasPassingTone(self, partNumToIdentify, unaccentedOnly=False): <NEW_LINE> <INDENT> if partNumToIdentify in self.tnlsDict: <NEW_LINE> <INDENT> ret = self.tnlsDict[partNumToIdentify].couldBePassingTone() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if unaccentedOnly: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = ret and (self.tnlsDict[partNumToIdentify].n2.beatStrength < 0.5) <NEW_LINE> <DEDENT> except (AttributeError, NameError, base.Music21ObjectException): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if (ret and self.chordList[0].isConsonant() and not self.chordList[1].isConsonant() and self.chordList[2].isConsonant()): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def hasNeighborTone(self, partNumToIdentify, unaccentedOnly=False): <NEW_LINE> <INDENT> if partNumToIdentify in self.tnlsDict: <NEW_LINE> <INDENT> ret = self.tnlsDict[partNumToIdentify].couldBeNeighborTone() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if unaccentedOnly: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ret = ret and (self.tnlsDict[partNumToIdentify].n2.beatStrength < 0.5) <NEW_LINE> <DEDENT> except (AttributeError, NameError, base.Music21ObjectException): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> return ret and not self.chordList[1].isConsonant()
a collection of three vertical slices
62598fa467a9b606de545e80
class IProcessor(Interface): <NEW_LINE> <INDENT> export_as_webservice_entry(publish_web_link=False, as_of='beta') <NEW_LINE> id = Attribute("The Processor ID") <NEW_LINE> name = exported( TextLine(title=_("Name"), description=_("The Processor Name")), as_of='devel', readonly=True) <NEW_LINE> title = exported( TextLine(title=_("Title"), description=_("The Processor Title")), as_of='devel', readonly=True) <NEW_LINE> description = exported( Text(title=_("Description"), description=_("The Processor Description")), as_of='devel', readonly=True) <NEW_LINE> restricted = exported( Bool(title=_("Whether this processor is restricted.")), as_of='devel', readonly=True)
The SQLObject Processor Interface
62598fa4a219f33f346c66cf
class SenderResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'data': 'SenderFullResponse' } <NEW_LINE> attribute_map = { 'data': 'data' } <NEW_LINE> def __init__(self, data=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and data is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `data`, must not be `None`") <NEW_LINE> <DEDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SenderResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, SenderResponse): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fa4eab8aa0e5d30bc3e
@admin.register(KeyFigure) <NEW_LINE> class KeyFigureAdmin(SortableAdminMixin, admin.ModelAdmin): <NEW_LINE> <INDENT> pass
Key figure admin panel. Key figures can be sorted through a drag'n'drop interface (thanks to django-admin-sortable2).
62598fa4d53ae8145f918342
@ddt.ddt <NEW_LINE> class TestDeleteTeamAPI(EventTestMixin, TeamAPITestCase): <NEW_LINE> <INDENT> shard = 6 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(TestDeleteTeamAPI, self).setUp('lms.djangoapps.teams.utils.tracker') <NEW_LINE> <DEDENT> @ddt.data( (None, 401), ('student_inactive', 401), ('student_unenrolled', 403), ('student_enrolled', 403), ('staff', 204), ('course_staff', 204), ('community_ta', 204) ) <NEW_LINE> @ddt.unpack <NEW_LINE> def test_access(self, user, status): <NEW_LINE> <INDENT> self.delete_team(self.solar_team.team_id, status, user=user) <NEW_LINE> if status == 204: <NEW_LINE> <INDENT> self.assert_event_emitted( 'edx.team.deleted', team_id=self.solar_team.team_id, ) <NEW_LINE> self.assert_event_emitted( 'edx.team.learner_removed', team_id=self.solar_team.team_id, remove_method='team_deleted', user_id=self.users['student_enrolled'].id ) <NEW_LINE> <DEDENT> <DEDENT> def test_does_not_exist(self): <NEW_LINE> <INDENT> self.delete_team('nonexistent', 404) <NEW_LINE> <DEDENT> def test_memberships_deleted(self): <NEW_LINE> <INDENT> self.assertEqual(CourseTeamMembership.objects.filter(team=self.solar_team).count(), 1) <NEW_LINE> self.delete_team(self.solar_team.team_id, 204, user='staff') <NEW_LINE> self.assert_event_emitted( 'edx.team.deleted', team_id=self.solar_team.team_id, ) <NEW_LINE> self.assert_event_emitted( 'edx.team.learner_removed', team_id=self.solar_team.team_id, remove_method='team_deleted', user_id=self.users['student_enrolled'].id ) <NEW_LINE> self.assertEqual(CourseTeamMembership.objects.filter(team=self.solar_team).count(), 0)
Test cases for the team delete endpoint.
62598fa4baa26c4b54d4f166
class LutronCasetaDevice(Entity): <NEW_LINE> <INDENT> def __init__(self, device, bridge): <NEW_LINE> <INDENT> self._device_id = device["device_id"] <NEW_LINE> self._device_type = device["type"] <NEW_LINE> self._device_name = device["name"] <NEW_LINE> self._state = None <NEW_LINE> self._smartbridge = bridge <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_added_to_hass(self): <NEW_LINE> <INDENT> self._smartbridge.add_subscriber(self._device_id, self._update_callback) <NEW_LINE> <DEDENT> def _update_callback(self): <NEW_LINE> <INDENT> self.schedule_update_ha_state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._device_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_state_attributes(self): <NEW_LINE> <INDENT> attr = {'Lutron Integration ID': self._device_id} <NEW_LINE> return attr <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return False
Common base class for all Lutron Caseta devices.
62598fa48e7ae83300ee8f57
class Person(CachingMixin, models.Model): <NEW_LINE> <INDENT> person_id = models.PositiveIntegerField(primary_key=True) <NEW_LINE> name = models.CharField(max_length=150) <NEW_LINE> objects = CachingManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
Person model.
62598fa457b8e32f52508076
class Adagrad(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_instance(learning_rate): <NEW_LINE> <INDENT> return tf.train.AdagradOptimizer( learning_rate=learning_rate, initial_accumulator_value=0.1, use_locking=False, name='Adagrad')
Adagrad optimiser with default hyper parameters
62598fa4498bea3a75a579d8
class GraphModule(object): <NEW_LINE> <INDENT> def __init__(self, module, ctx, graph_json_str, debug): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> self._set_input = module["set_input"] <NEW_LINE> self._run = module["run"] <NEW_LINE> self._get_output = module["get_output"] <NEW_LINE> self._get_input = module["get_input"] <NEW_LINE> self._set_debug_buffer = module["set_debug_buffer"] <NEW_LINE> self._debug_run = module["debug_run"] <NEW_LINE> self._load_params = module["load_params"] <NEW_LINE> self.ctx = ctx <NEW_LINE> self.debug = debug <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> self.dbgobj = debugruntime.create(self, graph_json_str) <NEW_LINE> <DEDENT> <DEDENT> def set_input(self, key=None, value=None, **params): <NEW_LINE> <INDENT> if key: <NEW_LINE> <INDENT> self._set_input(key, nd.array(value, ctx=self.ctx)) <NEW_LINE> <DEDENT> for k, v in params.items(): <NEW_LINE> <INDENT> self._set_input(k, nd.array(v, ctx=self.ctx)) <NEW_LINE> <DEDENT> if self.debug: <NEW_LINE> <INDENT> debugruntime.set_input(self.dbgobj, key, value, **params) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def set_debug_buffer(self): <NEW_LINE> <INDENT> if not hasattr(self, '_set_debug_buffer'): <NEW_LINE> <INDENT> raise RuntimeError("Please compile runtime with USE_GRAPH_RUNTIME_DEBUG = 0") <NEW_LINE> <DEDENT> for ndbuffer in self.ndarraylist: <NEW_LINE> <INDENT> self._set_debug_buffer(ndbuffer) <NEW_LINE> <DEDENT> <DEDENT> def debug_run(self): <NEW_LINE> <INDENT> self.set_debug_buffer() <NEW_LINE> self._debug_run() <NEW_LINE> debugruntime.dump_output(self.dbgobj, self.ndarraylist) <NEW_LINE> <DEDENT> def run(self, **input_dict): <NEW_LINE> <INDENT> if input_dict: <NEW_LINE> <INDENT> self.set_input(**input_dict) <NEW_LINE> <DEDENT> if not self.debug: <NEW_LINE> <INDENT> self._run() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dbgobj.run("") <NEW_LINE> <DEDENT> <DEDENT> def get_input(self, index, out): <NEW_LINE> <INDENT> self._get_input(index, out) <NEW_LINE> return out <NEW_LINE> <DEDENT> def get_output(self, index, out): <NEW_LINE> <INDENT> self._get_output(index, out) <NEW_LINE> return out <NEW_LINE> <DEDENT> def load_params(self, params_bytes): <NEW_LINE> <INDENT> self._load_params(bytearray(params_bytes)) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.module[key]
Wrapper runtime module. This is a thin wrapper of the underlying TVM module. you can also directly call set_input, run, and get_output of underlying module functions Parameters ---------- module : Module The interal tvm module that holds the actual graph functions. ctx : TVMContext The context this module is under Attributes ---------- module : Module The interal tvm module that holds the actual graph functions. ctx : TVMContext The context this module is under
62598fa4090684286d593636
class ObjectModelWithRefProps( DictSchema ): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> @property <NEW_LINE> def myNumber(cls) -> typing.Type['NumberWithValidations']: <NEW_LINE> <INDENT> return NumberWithValidations <NEW_LINE> <DEDENT> myString = StrSchema <NEW_LINE> myBoolean = BoolSchema <NEW_LINE> def __new__( cls, *args: typing.Union[dict, frozendict, ], myNumber: typing.Union['NumberWithValidations', Unset] = unset, myString: typing.Union[myString, Unset] = unset, myBoolean: typing.Union[myBoolean, Unset] = unset, _configuration: typing.Optional[Configuration] = None, **kwargs: typing.Type[Schema], ) -> 'ObjectModelWithRefProps': <NEW_LINE> <INDENT> return super().__new__( cls, *args, myNumber=myNumber, myString=myString, myBoolean=myBoolean, _configuration=_configuration, **kwargs, )
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. a model that includes properties which should stay primitive (String + Boolean) and one which is defined as a class, NumberWithValidations
62598fa476e4537e8c3ef463
class _HeaderFooterPart(Strict): <NEW_LINE> <INDENT> text = String(allow_none=True) <NEW_LINE> font = String(allow_none=True) <NEW_LINE> size = Integer(allow_none=True) <NEW_LINE> RGB = ("^[A-Fa-f0-9]{6}$") <NEW_LINE> color = MatchPattern(allow_none=True, pattern=RGB) <NEW_LINE> def __init__(self, text=None, font=None, size=None, color=None): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> self.font = font <NEW_LINE> self.size = size <NEW_LINE> self.color = color <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> fmt = [] <NEW_LINE> if self.font: <NEW_LINE> <INDENT> fmt.append(u'&"{0}"'.format(self.font)) <NEW_LINE> <DEDENT> if self.size: <NEW_LINE> <INDENT> fmt.append("&{0} ".format(self.size)) <NEW_LINE> <DEDENT> if self.color: <NEW_LINE> <INDENT> fmt.append("&K{0}".format(self.color)) <NEW_LINE> <DEDENT> return u"".join(fmt + [self.text]) <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self.text) <NEW_LINE> <DEDENT> __nonzero__ = __bool__ <NEW_LINE> @classmethod <NEW_LINE> def from_str(cls, text): <NEW_LINE> <INDENT> keys = ('font', 'color', 'size') <NEW_LINE> kw = dict((k, v) for match in FORMAT_REGEX.findall(text) for k, v in zip(keys, match) if v) <NEW_LINE> kw['text'] = FORMAT_REGEX.sub('', text) <NEW_LINE> return cls(**kw)
Individual left/center/right header/footer part Do not use directly. Header & Footer ampersand codes: * &A Inserts the worksheet name * &B Toggles bold * &D or &[Date] Inserts the current date * &E Toggles double-underline * &F or &[File] Inserts the workbook name * &I Toggles italic * &N or &[Pages] Inserts the total page count * &S Toggles strikethrough * &T Inserts the current time * &[Tab] Inserts the worksheet name * &U Toggles underline * &X Toggles superscript * &Y Toggles subscript * &P or &[Page] Inserts the current page number * &P+n Inserts the page number incremented by n * &P-n Inserts the page number decremented by n * &[Path] Inserts the workbook path * && Escapes the ampersand character * &"fontname" Selects the named font * &nn Selects the specified 2-digit font point size Colours are in RGB Hex
62598fa43eb6a72ae038a4fa
class PathUI(gtk.HBox): <NEW_LINE> <INDENT> def __init__(self, req, backend, width): <NEW_LINE> <INDENT> super(PathUI, self).__init__() <NEW_LINE> self.backend = backend <NEW_LINE> self.req = req <NEW_LINE> self._populate_gtk(width) <NEW_LINE> <DEDENT> def _populate_gtk(self, width): <NEW_LINE> <INDENT> label = gtk.Label(_("Filename:")) <NEW_LINE> label.set_line_wrap(True) <NEW_LINE> label.set_alignment(xalign=0, yalign=0.5) <NEW_LINE> label.set_size_request(width=width, height=-1) <NEW_LINE> self.pack_start(label, False) <NEW_LINE> align = gtk.Alignment(xalign=0, yalign=0.5, xscale=1) <NEW_LINE> align.set_padding(0, 0, 10, 0) <NEW_LINE> self.pack_start(align, True) <NEW_LINE> self.textbox = gtk.Entry() <NEW_LINE> self.textbox.set_text(self.backend.get_parameters()['path']) <NEW_LINE> self.textbox.connect('changed', self.on_path_modified) <NEW_LINE> align.add(self.textbox) <NEW_LINE> self.button = gtk.Button(stock=gtk.STOCK_EDIT) <NEW_LINE> self.button.connect('clicked', self.on_button_clicked) <NEW_LINE> self.pack_start(self.button, False) <NEW_LINE> <DEDENT> def commit_changes(self): <NEW_LINE> <INDENT> self.backend.set_parameter('path', self.textbox.get_text()) <NEW_LINE> <DEDENT> def on_path_modified(self, sender): <NEW_LINE> <INDENT> if self.backend.is_enabled() and not self.backend.is_default(): <NEW_LINE> <INDENT> self.req.set_backend_enabled(self.backend.get_id(), False) <NEW_LINE> <DEDENT> <DEDENT> def on_button_clicked(self, sender): <NEW_LINE> <INDENT> self.chooser = gtk.FileChooserDialog( title=None, action=gtk.FILE_CHOOSER_ACTION_SAVE, buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK)) <NEW_LINE> self.chooser.set_default_response(gtk.RESPONSE_OK) <NEW_LINE> dirname, basename = os.path.split(self.textbox.get_text()) <NEW_LINE> self.chooser.set_current_name(basename) <NEW_LINE> self.chosser.set_current_folder(dirname) <NEW_LINE> afilter = gtk.FileFilter() <NEW_LINE> afilter.set_name("All files") <NEW_LINE> afilter.add_pattern("*") <NEW_LINE> self.chooser.add_filter(afilter) <NEW_LINE> afilter = gtk.FileFilter() <NEW_LINE> afilter.set_name("XML files") <NEW_LINE> afilter.add_mime_type("text/plain") <NEW_LINE> afilter.add_pattern("*.xml") <NEW_LINE> self.chooser.add_filter(afilter) <NEW_LINE> response = self.chooser.run() <NEW_LINE> if response == gtk.RESPONSE_OK: <NEW_LINE> <INDENT> self.textbox.set_text(self.chooser.get_filename()) <NEW_LINE> <DEDENT> self.chooser.destroy()
Gtk widgets to show a path in a textbox, and a button to bring up a filesystem explorer to modify that path (also, a label to describe those)
62598fa42c8b7c6e89bd367b
class Cache: <NEW_LINE> <INDENT> __allInstances = set() <NEW_LINE> maxAge = 3600 <NEW_LINE> collectionInterval = 2 <NEW_LINE> __stopCollecting = False <NEW_LINE> def __init__(self, func): <NEW_LINE> <INDENT> Cache.__allInstances.add(self) <NEW_LINE> self._store = {} <NEW_LINE> self.__func = func <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self in Cache.__allInstances: <NEW_LINE> <INDENT> Cache.__allInstances.remove(self) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, *args, **kw): <NEW_LINE> <INDENT> key = (args, tuple(sorted(kw.items()))) <NEW_LINE> if key in self._store: <NEW_LINE> <INDENT> return self._store[key][1] <NEW_LINE> <DEDENT> result = self.__func(*args, **kw) <NEW_LINE> self._store[key] = (time.time(), result) <NEW_LINE> return result <NEW_LINE> <DEDENT> def invalidate(self): <NEW_LINE> <INDENT> self._store.clear() <NEW_LINE> <DEDENT> def invalidate_one(self, *args, **kw): <NEW_LINE> <INDENT> key = (args, tuple(sorted(kw.items()))) <NEW_LINE> if key in self._store: <NEW_LINE> <INDENT> del self._store[key] <NEW_LINE> <DEDENT> <DEDENT> def collect(self): <NEW_LINE> <INDENT> now = time.time() <NEW_LINE> for key, v in list(self._store.items()): <NEW_LINE> <INDENT> t, value = v <NEW_LINE> if self.maxAge > 0 and now - t > self.maxAge: <NEW_LINE> <INDENT> del self._store[key] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def collectAll(cls): <NEW_LINE> <INDENT> for instance in cls.__allInstances: <NEW_LINE> <INDENT> instance.collect() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _startCollection(cls): <NEW_LINE> <INDENT> while cls.__stopCollecting is not True: <NEW_LINE> <INDENT> time.sleep(cls.collectionInterval) <NEW_LINE> cls.collectAll() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def startCollection(cls): <NEW_LINE> <INDENT> cls.collectorThread = threading.Thread(target=cls._startCollection) <NEW_LINE> cls.collectorThread.setDaemon(False) <NEW_LINE> cls.collectorThread.start() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def stopCollection(cls): <NEW_LINE> <INDENT> cls.__stopCollecting = True
A cached function
62598fa463d6d428bbee2668
class BlockLoaderTestCase(TestCase): <NEW_LINE> <INDENT> BLOB_WORKING_DIRECTORY = 'blob/working/directory' <NEW_LINE> DEFAULT_KWARGS = { 'blob_working_directory': BLOB_WORKING_DIRECTORY } <NEW_LINE> def wipe_loader(self): <NEW_LINE> <INDENT> del self.block_loader <NEW_LINE> <DEDENT> def build_loader(self, *args, **kwargs): <NEW_LINE> <INDENT> if not kwargs: <NEW_LINE> <INDENT> kwargs = self.DEFAULT_KWARGS <NEW_LINE> <DEDENT> move_patcher = patch.object(BlockLoader, 'move_to_working_directory') <NEW_LINE> move_patcher.start() <NEW_LINE> validate_patcher = patch.object(BlockLoader, 'validate') <NEW_LINE> validate_patcher.start() <NEW_LINE> self.block_loader = BlockLoader(*args, **kwargs) <NEW_LINE> validate_patcher.stop() <NEW_LINE> move_patcher.stop() <NEW_LINE> self.addCleanup(self.wipe_loader)
Collects common items and defaults across test cases
62598fa4851cf427c66b817f
class CodingFormatter(string.Formatter): <NEW_LINE> <INDENT> def __init__(self, coding): <NEW_LINE> <INDENT> self._coding = coding <NEW_LINE> <DEDENT> def format(self, format_string, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(format_string, bytes): <NEW_LINE> <INDENT> format_string = format_string.decode(self._coding) <NEW_LINE> <DEDENT> return super().format(format_string, *args, **kwargs) <NEW_LINE> <DEDENT> def convert_field(self, value, conversion): <NEW_LINE> <INDENT> converted = super().convert_field(value, conversion) <NEW_LINE> if isinstance(converted, bytes): <NEW_LINE> <INDENT> return converted.decode(self._coding) <NEW_LINE> <DEDENT> return converted
A variant of `string.Formatter` that converts everything to `unicode` strings. This is necessary on Python 2, where formatting otherwise occurs on bytestrings. It intercepts two points in the formatting process to decode the format string and all fields using the specified encoding. If decoding fails, the values are used as-is.
62598fa4be383301e02536ae
class AutomaticScaling(_messages.Message): <NEW_LINE> <INDENT> coolDownPeriod = _messages.StringField(1) <NEW_LINE> cpuUtilization = _messages.MessageField('CpuUtilization', 2) <NEW_LINE> customMetrics = _messages.MessageField('CustomMetric', 3, repeated=True) <NEW_LINE> diskUtilization = _messages.MessageField('DiskUtilization', 4) <NEW_LINE> maxConcurrentRequests = _messages.IntegerField(5, variant=_messages.Variant.INT32) <NEW_LINE> maxIdleInstances = _messages.IntegerField(6, variant=_messages.Variant.INT32) <NEW_LINE> maxPendingLatency = _messages.StringField(7) <NEW_LINE> maxTotalInstances = _messages.IntegerField(8, variant=_messages.Variant.INT32) <NEW_LINE> minIdleInstances = _messages.IntegerField(9, variant=_messages.Variant.INT32) <NEW_LINE> minPendingLatency = _messages.StringField(10) <NEW_LINE> minTotalInstances = _messages.IntegerField(11, variant=_messages.Variant.INT32) <NEW_LINE> networkUtilization = _messages.MessageField('NetworkUtilization', 12) <NEW_LINE> requestUtilization = _messages.MessageField('RequestUtilization', 13) <NEW_LINE> standardSchedulerSettings = _messages.MessageField('StandardSchedulerSettings', 14)
Automatic scaling is based on request rate, response latencies, and other application metrics. Fields: coolDownPeriod: Amount of time that the Autoscaler (https://cloud.google.com/compute/docs/autoscaler/) should wait between changes to the number of virtual machines. Only applicable in the App Engine flexible environment. cpuUtilization: Target scaling by CPU usage. customMetrics: Target scaling by user-provided metrics. diskUtilization: Target scaling by disk usage. maxConcurrentRequests: Number of concurrent requests an automatic scaling instance can accept before the scheduler spawns a new instance.Defaults to a runtime-specific value. maxIdleInstances: Maximum number of idle instances that should be maintained for this version. maxPendingLatency: Maximum amount of time that a request should wait in the pending queue before starting a new instance to handle it. maxTotalInstances: Maximum number of instances that should be started to handle requests for this version. minIdleInstances: Minimum number of idle instances that should be maintained for this version. Only applicable for the default version of a service. minPendingLatency: Minimum amount of time a request should wait in the pending queue before starting a new instance to handle it. minTotalInstances: Minimum number of running instances that should be maintained for this version. networkUtilization: Target scaling by network usage. requestUtilization: Target scaling by request utilization. standardSchedulerSettings: Scheduler settings for standard environment.
62598fa41f5feb6acb162ad8
class Test(object): <NEW_LINE> <INDENT> def __init__(self, level, _id): <NEW_LINE> <INDENT> self.level = level <NEW_LINE> self.id = _id <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.level == other.level and self.id == other.id <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s %s" % (self.level, self.id)
Identifies a test. Level - The level of the test. Test Number - A number of some other ID (like letters) that identifies a test within a level.
62598fa4dd821e528d6d8deb
class Songs: <NEW_LINE> <INDENT> def on_get(self, req, resp): <NEW_LINE> <INDENT> albums = req.get_param_as_list('album') <NEW_LINE> if albums: <NEW_LINE> <INDENT> songs = [song for song in _songs.values() if song['album'] in albums] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> songs = list(_songs.values()) <NEW_LINE> <DEDENT> resp.body = json.dumps(songs) <NEW_LINE> resp.status = falcon.HTTP_200 <NEW_LINE> <DEDENT> def on_post(self, req, resp): <NEW_LINE> <INDENT> payload = req.stream.read().decode('utf-8') <NEW_LINE> if not payload: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest(title='Empty body', description='Valid JSON document required') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> song = json.loads(payload) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise falcon.HTTPBadRequest(title='Invalid body', description='Valid JSON document required') <NEW_LINE> <DEDENT> song['id'] = uuid.uuid4().hex <NEW_LINE> _songs.update({song['id']: song}) <NEW_LINE> resp.set_header('Location', '%s/%s' % (req.uri, song['id'])) <NEW_LINE> resp.body = json.dumps(song) <NEW_LINE> resp.status = falcon.HTTP_201
API resource for the collection of songs.
62598fa4d486a94d0ba2be84
class ExpansionModel(AddonModel): <NEW_LINE> <INDENT> name = '__expansion__' <NEW_LINE> def __init__(self, expansion): <NEW_LINE> <INDENT> self.num_sdv = 15 <NEW_LINE> self.sdv_names = ['EM'+COMPONENT_SEP+x for x in SYMMETRIC_COMPONENTS] <NEW_LINE> self.sdv_names.extend(['FM'+COMPONENT_SEP+x for x in TENSOR_COMPONENTS]) <NEW_LINE> if is_scalarlike(expansion): <NEW_LINE> <INDENT> expansion = [expansion] * 3 <NEW_LINE> <DEDENT> if not is_listlike(expansion): <NEW_LINE> <INDENT> raise ValueError('Expected expansion to be array_like') <NEW_LINE> <DEDENT> if len(expansion) == 3: <NEW_LINE> <INDENT> expansion = [x for x in expansion] + [0, 0, 0] <NEW_LINE> <DEDENT> if len(expansion) != 6: <NEW_LINE> <INDENT> raise ValueError('Expected len(expansion) to be 3 or 6') <NEW_LINE> <DEDENT> self.data = np.array([float(x) for x in expansion]) <NEW_LINE> <DEDENT> def sdvini(self, statev): <NEW_LINE> <INDENT> statev = np.append(np.zeros(6), np.array([1.,0.,0.,0.,1.,0.,0.,0.,1.])) <NEW_LINE> return statev <NEW_LINE> <DEDENT> def eval(self, kappa, time, dtime, temp, dtemp, F0, F, strain, d, stress, statev, initial_temp=0., **kwds): <NEW_LINE> <INDENT> assert len(statev) == 15 <NEW_LINE> thermal_strain = (temp + dtemp - initial_temp) * self.data <NEW_LINE> strain -= thermal_strain <NEW_LINE> F0[:9] = np.array(statev[6:15]) <NEW_LINE> F[:9] = defgrad_from_strain(strain, kappa, flatten=1) <NEW_LINE> thermal_d = self.data * dtemp / dtime <NEW_LINE> d -= thermal_d <NEW_LINE> statev[:self.num_sdv] = np.append(strain, F) <NEW_LINE> return None
Thermal expansion model
62598fa4a17c0f6771d5c0eb
class UserSmartListMixin(object): <NEW_LINE> <INDENT> search_fields = ('first_name', 'last_name', 'email') <NEW_LINE> ordering_fields = ( ('first_name', 'first_name'), ('last_name', 'last_name'), ('email', 'email'), ('date_joined', 'created_at') ) <NEW_LINE> ordering = ('first_name',) <NEW_LINE> filter_backends = (DateRangeFilter, OrderingFilter, SearchFilter)
``User`` list which is also searchable and sortable. The queryset can be further filtered to a before date with ``ends_at``. The queryset can be further filtered by passing a ``q`` parameter. The value in ``q`` will be matched against: - User.first_name - User.last_name - User.email The result queryset can be ordered by: - User.first_name - User.last_name - User.email - User.created_at
62598fa4a8370b77170f0291
class FactorList(ListResource): <NEW_LINE> <INDENT> def __init__(self, version, service_sid, identity): <NEW_LINE> <INDENT> super(FactorList, self).__init__(version) <NEW_LINE> self._solution = {'service_sid': service_sid, 'identity': identity, } <NEW_LINE> self._uri = '/Services/{service_sid}/Entities/{identity}/Factors'.format(**self._solution) <NEW_LINE> <DEDENT> def create(self, binding, factor_type, friendly_name, config=values.unset): <NEW_LINE> <INDENT> data = values.of({ 'Binding': binding, 'FactorType': factor_type, 'FriendlyName': friendly_name, 'Config': config, }) <NEW_LINE> payload = self._version.create( 'POST', self._uri, data=data, ) <NEW_LINE> return FactorInstance( self._version, payload, service_sid=self._solution['service_sid'], identity=self._solution['identity'], ) <NEW_LINE> <DEDENT> def stream(self, limit=None, page_size=None): <NEW_LINE> <INDENT> limits = self._version.read_limits(limit, page_size) <NEW_LINE> page = self.page(page_size=limits['page_size'], ) <NEW_LINE> return self._version.stream(page, limits['limit'], limits['page_limit']) <NEW_LINE> <DEDENT> def list(self, limit=None, page_size=None): <NEW_LINE> <INDENT> return list(self.stream(limit=limit, page_size=page_size, )) <NEW_LINE> <DEDENT> def page(self, page_token=values.unset, page_number=values.unset, page_size=values.unset): <NEW_LINE> <INDENT> params = values.of({'PageToken': page_token, 'Page': page_number, 'PageSize': page_size, }) <NEW_LINE> response = self._version.page( 'GET', self._uri, params=params, ) <NEW_LINE> return FactorPage(self._version, response, self._solution) <NEW_LINE> <DEDENT> def get_page(self, target_url): <NEW_LINE> <INDENT> response = self._version.domain.twilio.request( 'GET', target_url, ) <NEW_LINE> return FactorPage(self._version, response, self._solution) <NEW_LINE> <DEDENT> def get(self, sid): <NEW_LINE> <INDENT> return FactorContext( self._version, service_sid=self._solution['service_sid'], identity=self._solution['identity'], sid=sid, ) <NEW_LINE> <DEDENT> def __call__(self, sid): <NEW_LINE> <INDENT> return FactorContext( self._version, service_sid=self._solution['service_sid'], identity=self._solution['identity'], sid=sid, ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Twilio.Authy.V1.FactorList>'
PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact help@twilio.com.
62598fa4435de62698e9bcab
class YoloImageGenerator(tf.keras.utils.Sequence): <NEW_LINE> <INDENT> def __init__(self,filenames,batch_size=32,reshape_size=(608,1280)): <NEW_LINE> <INDENT> self.batch_size = batch_size <NEW_LINE> self.filenames = filenames <NEW_LINE> self.channels = 3 <NEW_LINE> self.reshape_size = reshape_size <NEW_LINE> self.reshape_horizontal_size = reshape_size <NEW_LINE> self.reshape_vertical_size = (reshape_size[1],reshape_size[0]) <NEW_LINE> self.horizontal_image = [] <NEW_LINE> self.vertical_image = [] <NEW_LINE> self.images_size = {} <NEW_LINE> for pth in self.filenames: <NEW_LINE> <INDENT> img = Image.open(pth).convert('RGB') <NEW_LINE> w,h = img.size <NEW_LINE> self.images_size[pth] = (h,w) <NEW_LINE> if w >= h: <NEW_LINE> <INDENT> self.horizontal_image.append(pth) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.vertical_image.append(pth) <NEW_LINE> <DEDENT> <DEDENT> self.horizontal_batch = math.ceil(len(self.horizontal_image) / self.batch_size) <NEW_LINE> self.vertical_batch = math.ceil(len(self.vertical_image) / self.batch_size) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.horizontal_batch + self.vertical_batch <NEW_LINE> <DEDENT> def __getitem__(self,idx): <NEW_LINE> <INDENT> if idx < self.horizontal_batch: <NEW_LINE> <INDENT> batch_image = self.horizontal_image[idx * self.batch_size:(idx + 1) * self.batch_size] <NEW_LINE> batch_array = [] <NEW_LINE> batch_shape = [self.images_size[impth] for impth in batch_image] <NEW_LINE> batch_shape_padded = [] <NEW_LINE> for pth in batch_image: <NEW_LINE> <INDENT> img = Image.open(pth).convert('RGB') <NEW_LINE> img,(w,h)= pad_image(img,self.reshape_size) <NEW_LINE> batch_array.append(img) <NEW_LINE> batch_shape_padded.append(self.reshape_horizontal_size) <NEW_LINE> <DEDENT> return np.stack(batch_array),batch_shape,batch_shape_padded,batch_image <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_idx = idx - self.horizontal_batch <NEW_LINE> batch_image = self.vertical_image[new_idx * self.batch_size:(new_idx + 1) * self.batch_size] <NEW_LINE> batch_array = [] <NEW_LINE> batch_shape = [self.images_size[impth] for impth in batch_image] <NEW_LINE> batch_shape_padded = [] <NEW_LINE> for pth in batch_image: <NEW_LINE> <INDENT> img = Image.open(pth).convert('RGB') <NEW_LINE> img,(w,h) = pad_image(img,self.reshape_size) <NEW_LINE> batch_array.append(img) <NEW_LINE> batch_shape_padded.append(self.reshape_vertical_size) <NEW_LINE> <DEDENT> return np.stack(batch_array),batch_shape,batch_shape_padded,batch_image
读取图片,预处理,送入YOLO网络寻找文字区域
62598fa47b25080760ed7361
class NodeDistributedSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, dataset, num_replicas=None, rank=None, local_rank=None, local_size=None, shuffle=True): <NEW_LINE> <INDENT> if num_replicas is None: <NEW_LINE> <INDENT> if not dist.is_available(): <NEW_LINE> <INDENT> raise RuntimeError("Requires distributed package to be available") <NEW_LINE> <DEDENT> num_replicas = dist.get_world_size() <NEW_LINE> <DEDENT> if rank is None: <NEW_LINE> <INDENT> if not dist.is_available(): <NEW_LINE> <INDENT> raise RuntimeError("Requires distributed package to be available") <NEW_LINE> <DEDENT> rank = dist.get_rank() <NEW_LINE> <DEDENT> if local_rank is None: <NEW_LINE> <INDENT> local_rank = int(os.environ.get('LOCAL_RANK', 0)) <NEW_LINE> <DEDENT> if local_size is None: <NEW_LINE> <INDENT> local_size = int(os.environ.get('LOCAL_SIZE', 1)) <NEW_LINE> <DEDENT> self.dataset = dataset <NEW_LINE> self.shuffle = shuffle <NEW_LINE> self.num_replicas = num_replicas <NEW_LINE> self.num_parts = local_size <NEW_LINE> self.rank = rank <NEW_LINE> self.local_rank = local_rank <NEW_LINE> self.epoch = 0 <NEW_LINE> self.num_samples = int(math.ceil(len(self.dataset) * 1.0 / self.num_replicas)) <NEW_LINE> self.total_size = self.num_samples * self.num_replicas <NEW_LINE> self.total_size_parts = self.num_samples * self.num_replicas // self.num_parts <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> if self.shuffle: <NEW_LINE> <INDENT> g = torch.Generator() <NEW_LINE> g.manual_seed(self.epoch) <NEW_LINE> indices = torch.randperm(len(self.dataset), generator=g).tolist() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indices = torch.arange(len(self.dataset)).tolist() <NEW_LINE> <DEDENT> indices = [i for i in indices if i % self.num_parts == self.local_rank] <NEW_LINE> indices += indices[:(self.total_size_parts - len(indices))] <NEW_LINE> assert len(indices) == self.total_size_parts <NEW_LINE> indices = indices[self.rank // self.num_parts:self.total_size_parts:self.num_replicas // self.num_parts] <NEW_LINE> assert len(indices) == self.num_samples <NEW_LINE> return iter(indices) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.num_samples <NEW_LINE> <DEDENT> def set_epoch(self, epoch): <NEW_LINE> <INDENT> self.epoch = epoch
Sampler that restricts data loading to a subset of the dataset. It is especially useful in conjunction with :class:`torch.nn.parallel.DistributedDataParallel`. In such case, each process can pass a DistributedSampler instance as a DataLoader sampler, and load a subset of the original dataset that is exclusive to it. .. note:: Dataset is assumed to be of constant size. Arguments: dataset: Dataset used for sampling. num_replicas (optional): Number of processes participating in distributed training. rank (optional): Rank of the current process within num_replicas.
62598fa4442bda511e95c30c
@dataclass <NEW_LINE> class BufferedByteReceiveStream(ByteReceiveStream): <NEW_LINE> <INDENT> receive_stream: AnyByteReceiveStream <NEW_LINE> _buffer: bytearray = field(init=False, default_factory=bytearray) <NEW_LINE> _closed: bool = field(init=False, default=False) <NEW_LINE> async def aclose(self) -> None: <NEW_LINE> <INDENT> await self.receive_stream.aclose() <NEW_LINE> self._closed = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def buffer(self) -> bytes: <NEW_LINE> <INDENT> return bytes(self._buffer) <NEW_LINE> <DEDENT> @property <NEW_LINE> def extra_attributes(self): <NEW_LINE> <INDENT> return self.receive_stream.extra_attributes <NEW_LINE> <DEDENT> async def receive(self, max_bytes: int = 65536) -> bytes: <NEW_LINE> <INDENT> if self._closed: <NEW_LINE> <INDENT> raise ClosedResourceError <NEW_LINE> <DEDENT> if self._buffer: <NEW_LINE> <INDENT> chunk = bytes(self._buffer[:max_bytes]) <NEW_LINE> del self._buffer[:max_bytes] <NEW_LINE> return chunk <NEW_LINE> <DEDENT> elif isinstance(self.receive_stream, ByteReceiveStream): <NEW_LINE> <INDENT> return await self.receive_stream.receive(max_bytes) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chunk = await self.receive_stream.receive() <NEW_LINE> if len(chunk) > max_bytes: <NEW_LINE> <INDENT> self._buffer.extend(chunk[max_bytes:]) <NEW_LINE> return chunk[:max_bytes] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return chunk <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> async def receive_exactly(self, nbytes: int) -> bytes: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> remaining = nbytes - len(self._buffer) <NEW_LINE> if remaining <= 0: <NEW_LINE> <INDENT> retval = self._buffer[:nbytes] <NEW_LINE> del self._buffer[:nbytes] <NEW_LINE> return bytes(retval) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if isinstance(self.receive_stream, ByteReceiveStream): <NEW_LINE> <INDENT> chunk = await self.receive_stream.receive(remaining) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chunk = await self.receive_stream.receive() <NEW_LINE> <DEDENT> <DEDENT> except EndOfStream as exc: <NEW_LINE> <INDENT> raise IncompleteRead from exc <NEW_LINE> <DEDENT> self._buffer.extend(chunk) <NEW_LINE> <DEDENT> <DEDENT> async def receive_until(self, delimiter: bytes, max_bytes: int) -> bytes: <NEW_LINE> <INDENT> delimiter_size = len(delimiter) <NEW_LINE> offset = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> index = self._buffer.find(delimiter, offset) <NEW_LINE> if index >= 0: <NEW_LINE> <INDENT> found = self._buffer[:index] <NEW_LINE> del self._buffer[:index + len(delimiter):] <NEW_LINE> return bytes(found) <NEW_LINE> <DEDENT> if len(self._buffer) >= max_bytes: <NEW_LINE> <INDENT> raise DelimiterNotFound(max_bytes) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = await self.receive_stream.receive() <NEW_LINE> <DEDENT> except EndOfStream as exc: <NEW_LINE> <INDENT> raise IncompleteRead from exc <NEW_LINE> <DEDENT> offset = max(len(self._buffer) - delimiter_size + 1, 0) <NEW_LINE> self._buffer.extend(data)
Wraps any bytes-based receive stream and uses a buffer to provide sophisticated receiving capabilities in the form of a byte stream.
62598fa499cbb53fe6830d8b
class PoiExtHotelSkuResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'error_code': 'ErrorCode', 'description': 'Description', 'data': 'list[PoiExtHotelSkuResponseData]' } <NEW_LINE> attribute_map = { 'error_code': 'error_code', 'description': 'description', 'data': 'data' } <NEW_LINE> def __init__(self, error_code=None, description=None, data=None): <NEW_LINE> <INDENT> self._error_code = None <NEW_LINE> self._description = None <NEW_LINE> self._data = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.error_code = error_code <NEW_LINE> self.description = description <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def error_code(self): <NEW_LINE> <INDENT> return self._error_code <NEW_LINE> <DEDENT> @error_code.setter <NEW_LINE> def error_code(self, error_code): <NEW_LINE> <INDENT> if error_code is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `error_code`, must not be `None`") <NEW_LINE> <DEDENT> self._error_code = error_code <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @description.setter <NEW_LINE> def description(self, description): <NEW_LINE> <INDENT> if description is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `description`, must not be `None`") <NEW_LINE> <DEDENT> self._description = description <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `data`, must not be `None`") <NEW_LINE> <DEDENT> self._data = data <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(PoiExtHotelSkuResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, PoiExtHotelSkuResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa48e7ae83300ee8f58
class ChargeMethodUnavailableError(OpenFFToolkitException): <NEW_LINE> <INDENT> pass
A toolkit does not support the requested partial_charge_method combination
62598fa4eab8aa0e5d30bc40
class Requestor(object): <NEW_LINE> <INDENT> def __init__(self, api_app_id, api_app_secret, endpoint, api_host=COVEAPI_HOST): <NEW_LINE> <INDENT> self.api_app_id = api_app_id <NEW_LINE> self.api_app_secret = api_app_secret <NEW_LINE> self.endpoint = endpoint <NEW_LINE> self.api_host = api_host <NEW_LINE> <DEDENT> def get(self, resource, **params): <NEW_LINE> <INDENT> if type(resource) == int: <NEW_LINE> <INDENT> endpoint = '%s%s/' % (self.endpoint, resource) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if resource.startswith('http://'): <NEW_LINE> <INDENT> endpoint = resource <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> endpoint = '%s%s' % (self.api_host, resource) <NEW_LINE> <DEDENT> <DEDENT> return self._make_request(endpoint, params) <NEW_LINE> <DEDENT> def filter(self, **params): <NEW_LINE> <INDENT> return self._make_request(self.endpoint, params) <NEW_LINE> <DEDENT> def deleted_since(self, **params): <NEW_LINE> <INDENT> return self._make_request(self.endpoint, params) <NEW_LINE> <DEDENT> def _make_request(self, endpoint, params=None): <NEW_LINE> <INDENT> if not params: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> query = endpoint <NEW_LINE> if params: <NEW_LINE> <INDENT> params = params.items() <NEW_LINE> params.sort() <NEW_LINE> query = '%s?%s' % (query, urllib.urlencode(params)) <NEW_LINE> <DEDENT> request = urllib2.Request(query) <NEW_LINE> auth = PBSAuthorization(self.api_app_id, self.api_app_secret) <NEW_LINE> signed_request = auth.sign_request(request) <NEW_LINE> response = urllib2.urlopen(signed_request) <NEW_LINE> return json.loads(response.read())
Handle API requests. Keyword arguments: `api_app_id` -- your COVE API app id `api_app_secret` -- your COVE API secret key `endpoint` -- endpoint of COVE API request Returns: `coveapi.connection.Requestor` instance
62598fa47d847024c075c27d
class GcloudDeploymentManager(GcloudService): <NEW_LINE> <INDENT> service_name = "deploymentmanager" <NEW_LINE> default_api_version = "v2"
A class that wraps the Deployment Manager service for the Hastexo XBlock.
62598fa4097d151d1a2c0ede
class Serializer(base.Serializer): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.options = None <NEW_LINE> self.stream = None <NEW_LINE> self.fields = None <NEW_LINE> self.excludes = None <NEW_LINE> self.relations = None <NEW_LINE> self.extras = None <NEW_LINE> self.use_natural_keys = None <NEW_LINE> self.use_choices = None <NEW_LINE> super(Serializer, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def serialize(self, queryset, **options): <NEW_LINE> <INDENT> self.options = options <NEW_LINE> self.stream = options.pop("stream", StringIO()) <NEW_LINE> self.fields = options.pop("fields", []) <NEW_LINE> self.excludes = options.pop("excludes", []) <NEW_LINE> self.relations = options.pop("relations", []) <NEW_LINE> self.extras = options.pop("extras", []) <NEW_LINE> self.use_choices = options.pop("choices", False) <NEW_LINE> self.use_natural_keys = options.pop("use_natural_keys", False) <NEW_LINE> already_processed = [] <NEW_LINE> self.start_serialization() <NEW_LINE> for obj in queryset: <NEW_LINE> <INDENT> self.start_object(obj) <NEW_LINE> concrete_class = obj._meta.proxy_for_model or obj.__class__ <NEW_LINE> for field in concrete_class._meta.local_fields: <NEW_LINE> <INDENT> attname = field.attname <NEW_LINE> already_processed.append(attname) <NEW_LINE> if field.serialize: <NEW_LINE> <INDENT> if field.rel is None: <NEW_LINE> <INDENT> if attname not in self.excludes: <NEW_LINE> <INDENT> if not self.fields or attname in self.fields: <NEW_LINE> <INDENT> self.handle_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if attname[:-3] not in self.excludes: <NEW_LINE> <INDENT> if not self.fields or attname[:-3] in self.fields: <NEW_LINE> <INDENT> self.handle_fk_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for field in concrete_class._meta.many_to_many: <NEW_LINE> <INDENT> already_processed.append(field.attname) <NEW_LINE> if field.serialize: <NEW_LINE> <INDENT> if field.attname not in self.excludes: <NEW_LINE> <INDENT> if not self.fields or field.attname in self.fields: <NEW_LINE> <INDENT> self.handle_m2m_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> for fname in self.relations: <NEW_LINE> <INDENT> if fname in already_processed: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> field = getattr(obj, fname) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if field is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> field._priv_name = fname <NEW_LINE> if (field.__module__ + '.' + field.__class__.__name__ == "django.db.models.fields.related.RelatedManager"): <NEW_LINE> <INDENT> if len(field.get_query_set()) == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.handle_m2m_field(obj, field) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.handle_fk_field(obj, field) <NEW_LINE> <DEDENT> <DEDENT> for extra in self.extras: <NEW_LINE> <INDENT> self.handle_extra_field(obj, extra) <NEW_LINE> <DEDENT> self.end_object(obj) <NEW_LINE> <DEDENT> self.end_serialization() <NEW_LINE> return self.getvalue() <NEW_LINE> <DEDENT> def handle_extra_field(self, obj, extra): <NEW_LINE> <INDENT> raise NotImplementedError
Serializer for Django models inspired by Ruby on Rails serializer.
62598fa41b99ca400228f48b
class ScanTemplateDatabase(object): <NEW_LINE> <INDENT> swagger_types = { 'db2': 'str', 'links': 'list[Link]', 'oracle': 'list[str]', 'postgres': 'str' } <NEW_LINE> attribute_map = { 'db2': 'db2', 'links': 'links', 'oracle': 'oracle', 'postgres': 'postgres' } <NEW_LINE> def __init__(self, db2=None, links=None, oracle=None, postgres=None): <NEW_LINE> <INDENT> self._db2 = None <NEW_LINE> self._links = None <NEW_LINE> self._oracle = None <NEW_LINE> self._postgres = None <NEW_LINE> self.discriminator = None <NEW_LINE> if db2 is not None: <NEW_LINE> <INDENT> self.db2 = db2 <NEW_LINE> <DEDENT> if links is not None: <NEW_LINE> <INDENT> self.links = links <NEW_LINE> <DEDENT> if oracle is not None: <NEW_LINE> <INDENT> self.oracle = oracle <NEW_LINE> <DEDENT> if postgres is not None: <NEW_LINE> <INDENT> self.postgres = postgres <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def db2(self): <NEW_LINE> <INDENT> return self._db2 <NEW_LINE> <DEDENT> @db2.setter <NEW_LINE> def db2(self, db2): <NEW_LINE> <INDENT> self._db2 = db2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def links(self): <NEW_LINE> <INDENT> return self._links <NEW_LINE> <DEDENT> @links.setter <NEW_LINE> def links(self, links): <NEW_LINE> <INDENT> self._links = links <NEW_LINE> <DEDENT> @property <NEW_LINE> def oracle(self): <NEW_LINE> <INDENT> return self._oracle <NEW_LINE> <DEDENT> @oracle.setter <NEW_LINE> def oracle(self, oracle): <NEW_LINE> <INDENT> self._oracle = oracle <NEW_LINE> <DEDENT> @property <NEW_LINE> def postgres(self): <NEW_LINE> <INDENT> return self._postgres <NEW_LINE> <DEDENT> @postgres.setter <NEW_LINE> def postgres(self, postgres): <NEW_LINE> <INDENT> self._postgres = postgres <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ScanTemplateDatabase, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ScanTemplateDatabase): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa4a79ad16197769f19
class Task(object): <NEW_LINE> <INDENT> def __init__(self, taskfile): <NEW_LINE> <INDENT> self.name = taskfile <NEW_LINE> with open(taskfile) as t: <NEW_LINE> <INDENT> msg, delay = t.read().split(";") <NEW_LINE> <DEDENT> self.msg = msg <NEW_LINE> self.delay = int(delay) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> print(self.msg) <NEW_LINE> time.sleep(self.delay)
read a task file with the following content: Hello Parallel Tasks;5
62598fa467a9b606de545e83
class itkInPlaceImageFilterIRGBAUS2IRGBAUC2(itkImageToImageFilterAPython.itkImageToImageFilterIRGBAUS2IRGBAUC2): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputImageDimension = _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_InputImageDimension <NEW_LINE> OutputImageDimension = _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_OutputImageDimension <NEW_LINE> def SetInPlace(self, *args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_SetInPlace(self, *args) <NEW_LINE> <DEDENT> def GetInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_GetInPlace(self) <NEW_LINE> <DEDENT> def InPlaceOn(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_InPlaceOn(self) <NEW_LINE> <DEDENT> def InPlaceOff(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_InPlaceOff(self) <NEW_LINE> <DEDENT> def CanRunInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_CanRunInPlace(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkInPlaceImageFilterAPython.delete_itkInPlaceImageFilterIRGBAUS2IRGBAUC2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIRGBAUS2IRGBAUC2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkInPlaceImageFilterIRGBAUS2IRGBAUC2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkInPlaceImageFilterIRGBAUS2IRGBAUC2 class
62598fa4009cb60464d013dc
class AkismetValidatorView(BrowserView): <NEW_LINE> <INDENT> def __init__(self, context, request): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> self.request = request <NEW_LINE> registry = queryUtility(IRegistry) <NEW_LINE> self.settings = registry.forInterface(IAkismetSettings) <NEW_LINE> <DEDENT> def verify(self, input=None): <NEW_LINE> <INDENT> if self.settings.akismet_key and self.settings.akismet_key_site: <NEW_LINE> <INDENT> request = self.request <NEW_LINE> data = request.form <NEW_LINE> api = Akismet(self.settings.akismet_key, self.settings.akismet_key_site) <NEW_LINE> d = {} <NEW_LINE> d['user_ip'] = request.getClientAddr() <NEW_LINE> d['user_agent'] = request.get('HTTP_USER_AGENT', '') <NEW_LINE> d['referrer'] = request.get('HTTP_REFERER', '') <NEW_LINE> d['comment_author'] = data['form.widgets.author_name'] <NEW_LINE> d['comment_author_email'] = data['form.widgets.author_email'] <NEW_LINE> comment = data['form.widgets.text'] <NEW_LINE> if isinstance(comment, unicode): <NEW_LINE> <INDENT> comment = comment.encode("utf-8") <NEW_LINE> <DEDENT> if isinstance(d['comment_author'], unicode): <NEW_LINE> <INDENT> d['comment_author'] = d['comment_author'].encode("utf-8") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if not api.verify_key(): <NEW_LINE> <INDENT> self.context.plone_log("collective.akismet was not able to verify the Akismet key. Please check your settings.") <NEW_LINE> raise <NEW_LINE> <DEDENT> if api.comment_check(comment, d): <NEW_LINE> <INDENT> self.context.plone_log("Akismet thinks this comment is spam: %s" % comment) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> except APIKeyError: <NEW_LINE> <INDENT> self.context.plone_log("collective.akismet was not able to find a valid Akismet key. Please check your settings.") <NEW_LINE> pass <NEW_LINE> <DEDENT> except (HTTPError, URLError): <NEW_LINE> <INDENT> self.context.plone_log("Akismet web service temporarily unavailable") <NEW_LINE> return True <NEW_LINE> <DEDENT> except AkismetError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.context.plone_log("collective.akismet raised an unexpected error.")
Akismet validator view
62598fa416aa5153ce4003b9
class EmailAddress(Container): <NEW_LINE> <INDENT> def __init__(self, username, hostname, fuzz_delim=True, fuzzable=True, name=None): <NEW_LINE> <INDENT> fields = [ _to_string_field(_merge(name, 'username'), username, fuzzable=True), Delimiter('@', fuzzable=fuzz_delim), _to_string_field(_merge(name, 'hostname'), hostname, fuzzable=True), ] <NEW_LINE> super(EmailAddress, self).__init__(fields=fields, fuzzable=fuzzable, name=name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_string(cls, the_str, fuzz_delims=True, fuzzable=True, name=None): <NEW_LINE> <INDENT> email = the_str <NEW_LINE> if email.count('@') != 1: <NEW_LINE> <INDENT> raise KittyException('invalid email address: %s' % email) <NEW_LINE> <DEDENT> username = email.split('@')[0] <NEW_LINE> host = email.split('@')[1] <NEW_LINE> hostname = HostPort(host) <NEW_LINE> return EmailAddress(username=username, hostname=hostname, fuzz_delim=fuzz_delims, fuzzable=fuzzable, name=name)
Container to fuzz email address
62598fa410dbd63aa1c70a68
class UserProfile(AbstractBaseUser, PermissionsMixin): <NEW_LINE> <INDENT> email = models.EmailField(max_length=255, unique=True) <NEW_LINE> name = models.CharField(max_length=255) <NEW_LINE> s_active = models.BooleanField(default=True) <NEW_LINE> is_staff = models.BooleanField(default=False) <NEW_LINE> objects = UserProfileManager() <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = ['name'] <NEW_LINE> def get_full_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_short_name(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
Represents a "user profile" inside our system.
62598fa407f4c71912baf2fb
class document: <NEW_LINE> <INDENT> def __init__(self, pages=None): <NEW_LINE> <INDENT> if pages is None: <NEW_LINE> <INDENT> self.pages = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pages = pages <NEW_LINE> <DEDENT> <DEDENT> def append(self, page): <NEW_LINE> <INDENT> self.pages.append(page) <NEW_LINE> <DEDENT> def writeEPSfile(self, file=None, **kwargs): <NEW_LINE> <INDENT> pswriter.EPSwriter(self, _outputstream(file, "eps"), **kwargs) <NEW_LINE> <DEDENT> def writePSfile(self, file=None, **kwargs): <NEW_LINE> <INDENT> pswriter.PSwriter(self, _outputstream(file, "ps"), **kwargs) <NEW_LINE> <DEDENT> def writePDFfile(self, file=None, **kwargs): <NEW_LINE> <INDENT> pdfwriter.PDFwriter(self, _outputstream(file, "pdf"), **kwargs) <NEW_LINE> <DEDENT> def writetofile(self, filename, **kwargs): <NEW_LINE> <INDENT> if filename.endswith(".eps"): <NEW_LINE> <INDENT> self.writeEPSfile(open(filename, "wb"), **kwargs) <NEW_LINE> <DEDENT> elif filename.endswith(".ps"): <NEW_LINE> <INDENT> self.writePSfile(open(filename, "wb"), **kwargs) <NEW_LINE> <DEDENT> elif filename.endswith(".pdf"): <NEW_LINE> <INDENT> self.writePDFfile(open(filename, "wb"), **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("unknown file extension")
holds a collection of page instances which are output as pages of a document
62598fa42c8b7c6e89bd367d
class StoreOutput(base_handler.PipelineBase): <NEW_LINE> <INDENT> def run(self, mr_type, encoded_key, output): <NEW_LINE> <INDENT> logging.debug("output is %s" % str(output)) <NEW_LINE> key = db.Key(encoded=encoded_key) <NEW_LINE> m = FileMetadata.get(key) <NEW_LINE> url_path = [] <NEW_LINE> for o in output: <NEW_LINE> <INDENT> blobstore_filename = "/gs" + o <NEW_LINE> blobstore_gs_key = blobstore.create_gs_key(blobstore_filename) <NEW_LINE> curr_path = "/blobstore/" + blobstore_gs_key <NEW_LINE> url_path += [curr_path] <NEW_LINE> <DEDENT> if mr_type == "PurchaseCount": <NEW_LINE> <INDENT> m.purchasecount_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "RevenuePerSong": <NEW_LINE> <INDENT> m.revenuepersong_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "ArtistSongCount": <NEW_LINE> <INDENT> m.artistsongcount_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "RevenuePerArtist": <NEW_LINE> <INDENT> m.revenueperartist_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "PurchaseCountRock": <NEW_LINE> <INDENT> m.purchasecountrock_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "RevenuePerSongRock": <NEW_LINE> <INDENT> m.revenuepersongrock_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "ArtistSongCountRock": <NEW_LINE> <INDENT> m.artistsongcountrock_link = url_path <NEW_LINE> <DEDENT> elif mr_type == "RevenuePerArtistRock": <NEW_LINE> <INDENT> m.revenueperartistrock_link = url_path <NEW_LINE> <DEDENT> m.put()
A pipeline to store the result of the MapReduce job in the database. Args: mr_type: the type of mapreduce job run (e.g., WordCount, Index) encoded_key: the DB key corresponding to the metadata of this job output: the gcs file path where the output of the job is stored
62598fa40a50d4780f705294
class PowerSupplyListField(base.ListField): <NEW_LINE> <INDENT> firmware_version = base.Field('FirmwareVersion') <NEW_LINE> identity = base.Field('MemberId') <NEW_LINE> indicator_led = base.MappedField('IndicatorLed', res_cons.IndicatorLED) <NEW_LINE> input_ranges = InputRangeListField('InputRanges', default=[]) <NEW_LINE> last_power_output_watts = base.Field('LastPowerOutputWatts', adapter=utils.int_or_none) <NEW_LINE> line_input_voltage = base.Field('LineInputVoltage', adapter=utils.int_or_none) <NEW_LINE> line_input_voltage_type = base.MappedField('LineInputVoltageType', pow_cons.LineInputVoltageType) <NEW_LINE> manufacturer = base.Field('Manufacturer') <NEW_LINE> model = base.Field('Model') <NEW_LINE> name = base.Field('Name') <NEW_LINE> part_number = base.Field('PartNumber') <NEW_LINE> power_capacity_watts = base.Field('PowerCapacityWatts', adapter=utils.int_or_none) <NEW_LINE> power_supply_type = base.MappedField('PowerSupplyType', pow_cons.PowerSupplyType) <NEW_LINE> serial_number = base.Field('SerialNumber') <NEW_LINE> spare_part_number = base.Field('SparePartNumber') <NEW_LINE> status = common.StatusField('Status')
The power supplies associated with this Power resource
62598fa44a966d76dd5eed9a
class ReviewDocumentBackend(object): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> user = None <NEW_LINE> try: <NEW_LINE> <INDENT> review = ReviewDocument.objects.get(slug=username) <NEW_LINE> pk = review.get_auth(auth_key=password) <NEW_LINE> if pk is None: <NEW_LINE> <INDENT> logger.error('ReviewDocument not found for: %s %s' % (review, password,)) <NEW_LINE> raise ObjectDoesNotExist <NEW_LINE> <DEDENT> user = User.objects.get(pk=pk) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error('ReviewDocument.auth does not exist: %s reason: %s' % (username, e)) <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(pk=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None
Authenticated based on the url /review/:slug/:unique_key where the slug gets the ReviewDocument and the :unique_key provides a lookup in the ReviewDocument.data['auth'] which will provide the user pk
62598fa4851cf427c66b8181
class TestConfigCASigningProfilesTls(): <NEW_LINE> <INDENT> def test_config_ca_signing_profiles_tls_serialization(self): <NEW_LINE> <INDENT> config_ca_signing_profiles_tls_model_json = {} <NEW_LINE> config_ca_signing_profiles_tls_model_json['usage'] = ['cert sign'] <NEW_LINE> config_ca_signing_profiles_tls_model_json['expiry'] = '43800h' <NEW_LINE> config_ca_signing_profiles_tls_model = ConfigCASigningProfilesTls.from_dict(config_ca_signing_profiles_tls_model_json) <NEW_LINE> assert config_ca_signing_profiles_tls_model != False <NEW_LINE> config_ca_signing_profiles_tls_model_dict = ConfigCASigningProfilesTls.from_dict(config_ca_signing_profiles_tls_model_json).__dict__ <NEW_LINE> config_ca_signing_profiles_tls_model2 = ConfigCASigningProfilesTls(**config_ca_signing_profiles_tls_model_dict) <NEW_LINE> assert config_ca_signing_profiles_tls_model == config_ca_signing_profiles_tls_model2 <NEW_LINE> config_ca_signing_profiles_tls_model_json2 = config_ca_signing_profiles_tls_model.to_dict() <NEW_LINE> assert config_ca_signing_profiles_tls_model_json2 == config_ca_signing_profiles_tls_model_json
Test Class for ConfigCASigningProfilesTls
62598fa4be8e80087fbbef1a
class DraftPaymentEntry(BunqModel): <NEW_LINE> <INDENT> _id_ = None <NEW_LINE> _amount = None <NEW_LINE> _alias = None <NEW_LINE> _counterparty_alias = None <NEW_LINE> _description = None <NEW_LINE> _merchant_reference = None <NEW_LINE> _type_ = None <NEW_LINE> _attachment = None <NEW_LINE> _amount_field_for_request = None <NEW_LINE> _counterparty_alias_field_for_request = None <NEW_LINE> _description_field_for_request = None <NEW_LINE> _merchant_reference_field_for_request = None <NEW_LINE> _attachment_field_for_request = None <NEW_LINE> def __init__(self, amount=None, counterparty_alias=None, description=None, merchant_reference=None, attachment=None): <NEW_LINE> <INDENT> self._amount_field_for_request = amount <NEW_LINE> self._counterparty_alias_field_for_request = counterparty_alias <NEW_LINE> self._description_field_for_request = description <NEW_LINE> self._merchant_reference_field_for_request = merchant_reference <NEW_LINE> self._attachment_field_for_request = attachment <NEW_LINE> <DEDENT> @property <NEW_LINE> def id_(self): <NEW_LINE> <INDENT> return self._id_ <NEW_LINE> <DEDENT> @property <NEW_LINE> def amount(self): <NEW_LINE> <INDENT> return self._amount <NEW_LINE> <DEDENT> @property <NEW_LINE> def alias(self): <NEW_LINE> <INDENT> return self._alias <NEW_LINE> <DEDENT> @property <NEW_LINE> def counterparty_alias(self): <NEW_LINE> <INDENT> return self._counterparty_alias <NEW_LINE> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> @property <NEW_LINE> def merchant_reference(self): <NEW_LINE> <INDENT> return self._merchant_reference <NEW_LINE> <DEDENT> @property <NEW_LINE> def type_(self): <NEW_LINE> <INDENT> return self._type_ <NEW_LINE> <DEDENT> @property <NEW_LINE> def attachment(self): <NEW_LINE> <INDENT> return self._attachment <NEW_LINE> <DEDENT> def is_all_field_none(self): <NEW_LINE> <INDENT> if self._id_ is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._amount is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._alias is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._counterparty_alias is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._description is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._merchant_reference is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._type_ is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self._attachment is not None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json(json_str): <NEW_LINE> <INDENT> return converter.json_to_class(DraftPaymentEntry, json_str)
:param _amount: The amount of the payment. :type _amount: Amount :param _counterparty_alias: The LabelMonetaryAccount containing the public information of the other (counterparty) side of the DraftPayment. :type _counterparty_alias: MonetaryAccountReference :param _description: The description for the DraftPayment. Maximum 140 characters for DraftPayments to external IBANs, 9000 characters for DraftPayments to only other bunq MonetaryAccounts. :type _description: str :param _merchant_reference: Optional data to be included with the Payment specific to the merchant. :type _merchant_reference: str :param _attachment: The Attachments attached to the DraftPayment. :type _attachment: list[AttachmentMonetaryAccountPayment] :param _id_: The id of the draft payment entry. :type _id_: int :param _alias: The LabelMonetaryAccount containing the public information of 'this' (party) side of the DraftPayment. :type _alias: MonetaryAccountReference :param _type_: The type of the draft payment entry. :type _type_: str
62598fa44f88993c371f0466
class _stream_info: <NEW_LINE> <INDENT> def __init__(self, infos: Iterable[_pcap.packet_info]) -> None: <NEW_LINE> <INDENT> self.total_packets = 0 <NEW_LINE> self.encapsulation_protocol = set() <NEW_LINE> self.timestamp_min = float('inf') <NEW_LINE> self.timestamp_max = float('-inf') <NEW_LINE> self.udp_streams: Dict[_UDPStreamKey, _UDPStreamInfo] = defaultdict(_UDPStreamInfo) <NEW_LINE> for i in infos: <NEW_LINE> <INDENT> self.total_packets += 1 <NEW_LINE> self.encapsulation_protocol.add(i.encapsulation_protocol) <NEW_LINE> self.timestamp_min = min(self.timestamp_min, i.timestamp) <NEW_LINE> self.timestamp_max = max(self.timestamp_max, i.timestamp) <NEW_LINE> val = self.udp_streams[_UDPStreamKey(i.src_ip, i.dst_ip, i.src_port, i.dst_port)] <NEW_LINE> val.count += 1 <NEW_LINE> val.payload_size.add(i.payload_size) <NEW_LINE> val.fragments_in_packet.add(i.fragments_in_packet) <NEW_LINE> val.ip_version.add(i.ip_version)
Gather some useful info about UDP data in a pcap.
62598fa4a8370b77170f0292
class EclCase: <NEW_LINE> <INDENT> def __init__(self , input_case): <NEW_LINE> <INDENT> warnings.warn("The EclCase class is deprecated - instantiate the EclSum / EclGrid / ... classes directly." , DeprecationWarning) <NEW_LINE> self.case = input_case <NEW_LINE> (path , tmp) = os.path.split( input_case ) <NEW_LINE> if path: <NEW_LINE> <INDENT> self.__path = os.path.abspath( path ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__path = os.getcwd() <NEW_LINE> <DEDENT> (self.__base , self.ext) = os.path.splitext( tmp ) <NEW_LINE> self.__sum = None <NEW_LINE> self.__grid = None <NEW_LINE> self.__data_file = None <NEW_LINE> self.__rft = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def datafile( self ): <NEW_LINE> <INDENT> if not self.__data_file: <NEW_LINE> <INDENT> self.__data_file = "%s/%s.DATA" % ( self.__path , self.__base ) <NEW_LINE> <DEDENT> return self.__data_file <NEW_LINE> <DEDENT> @property <NEW_LINE> def sum( self ): <NEW_LINE> <INDENT> if not self.__sum: <NEW_LINE> <INDENT> self.__sum = ecl_sum.EclSum( self.case ) <NEW_LINE> <DEDENT> return self.__sum <NEW_LINE> <DEDENT> @property <NEW_LINE> def grid( self ): <NEW_LINE> <INDENT> if not self.__grid: <NEW_LINE> <INDENT> self.__grid = ecl_grid.EclGrid( self.case ) <NEW_LINE> <DEDENT> return self.__grid <NEW_LINE> <DEDENT> @property <NEW_LINE> def rft_file( self ): <NEW_LINE> <INDENT> if not self.__rft: <NEW_LINE> <INDENT> self.__rft = ecl_rft.EclRFTFile( self.case ) <NEW_LINE> <DEDENT> return self.__rft <NEW_LINE> <DEDENT> @property <NEW_LINE> def base( self ): <NEW_LINE> <INDENT> return self.__base <NEW_LINE> <DEDENT> @property <NEW_LINE> def path( self ): <NEW_LINE> <INDENT> return self.__path <NEW_LINE> <DEDENT> def run( self , ecl_cmd = ecl_default.default.ecl_cmd , ecl_version = ecl_default.default.ecl_version , driver = None , driver_type = ecl_default.default.driver_type, driver_options = None, blocking = False ): <NEW_LINE> <INDENT> num_cpu = ecl_util.get_num_cpu( self.datafile ) <NEW_LINE> argv = [ecl_version , self.datafile , num_cpu] <NEW_LINE> if driver is None: <NEW_LINE> <INDENT> if driver_options is None: <NEW_LINE> <INDENT> driver_options = ecl_default.default.driver_options[ driver_type ] <NEW_LINE> <DEDENT> driver = queue_driver.Driver( driver_type , max_running = 0 , options = driver_options ) <NEW_LINE> <DEDENT> job = driver.submit( self.base , ecl_cmd , self.path , argv , blocking = blocking) <NEW_LINE> return job <NEW_LINE> <DEDENT> def submit( self , queue ): <NEW_LINE> <INDENT> queue.submit( self.datafile )
Small container for one ECLIPSE case. Mostly a wrapper around an ECLIPSE datafile, along with properties to load the corresponding summary, grid and rft files. In addition there are methods run() and submit() to run the ECLIPSE simulation.
62598fa401c39578d7f12c38
class TestJobCreateParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'parameters': {'key': 'parameters', 'type': '{str}'}, 'run_on': {'key': 'runOn', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, parameters: Optional[Dict[str, str]] = None, run_on: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(TestJobCreateParameters, self).__init__(**kwargs) <NEW_LINE> self.parameters = parameters <NEW_LINE> self.run_on = run_on
The parameters supplied to the create test job operation. :param parameters: Gets or sets the parameters of the test job. :type parameters: dict[str, str] :param run_on: Gets or sets the runOn which specifies the group name where the job is to be executed. :type run_on: str
62598fa48da39b475be0309a
class Instruction51l(Instruction): <NEW_LINE> <INDENT> length = 10 <NEW_LINE> def __init__(self, cm, buff): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.cm = cm <NEW_LINE> self.OP, self.AA, self.BBBBBBBBBBBBBBBB = cm.packer["BBq"].unpack(buff[:self.length]) <NEW_LINE> <DEDENT> def get_output(self, idx=-1): <NEW_LINE> <INDENT> return "v{}, {}".format(self.AA, self.BBBBBBBBBBBBBBBB) <NEW_LINE> <DEDENT> def get_operands(self, idx=-1): <NEW_LINE> <INDENT> return [(Operand.REGISTER, self.AA), (Operand.LITERAL, self.BBBBBBBBBBBBBBBB)] <NEW_LINE> <DEDENT> def get_literals(self): <NEW_LINE> <INDENT> return [self.BBBBBBBBBBBBBBBB] <NEW_LINE> <DEDENT> def get_raw(self): <NEW_LINE> <INDENT> return self.cm.packer["BBq"].pack(self.OP, self.AA, self.BBBBBBBBBBBBBBBB)
This class represents all instructions which have the 51l format
62598fa43317a56b869be4a6
class KNeighborsCounterfactual(BaseCounterfactual): <NEW_LINE> <INDENT> def __init__(self, random_state=None): <NEW_LINE> <INDENT> self.random_state = random_state <NEW_LINE> <DEDENT> def fit(self, estimator): <NEW_LINE> <INDENT> if not isinstance(estimator, KNeighborsClassifier): <NEW_LINE> <INDENT> raise ValueError("not a valid estimator") <NEW_LINE> <DEDENT> check_is_fitted(estimator) <NEW_LINE> if estimator.metric != "euclidean": <NEW_LINE> <INDENT> raise ValueError( "only euclidean distance is supported, got %r" % estimator.metric ) <NEW_LINE> <DEDENT> x = estimator._fit_X <NEW_LINE> y = estimator._y <NEW_LINE> classes = estimator.classes_ <NEW_LINE> n_clusters = x.shape[0] // estimator.n_neighbors <NEW_LINE> kmeans = KMeans(n_clusters=n_clusters, random_state=self.random_state).fit(x) <NEW_LINE> n_classes = len(classes) <NEW_LINE> label_nn = {} <NEW_LINE> for cls in classes: <NEW_LINE> <INDENT> to_idx = np.nonzero(classes == cls)[0][0] <NEW_LINE> center_majority = np.zeros([kmeans.n_clusters, n_classes]) <NEW_LINE> for cluster_label, class_label in zip(kmeans.labels_, y): <NEW_LINE> <INDENT> center_majority[cluster_label, class_label] += 1 <NEW_LINE> <DEDENT> center_prob = center_majority / np.sum(center_majority, axis=1).reshape( -1, 1 ) <NEW_LINE> majority_class = center_prob[:, to_idx] > (1.0 / n_classes) <NEW_LINE> maximum_class = ( center_majority[:, to_idx] >= (estimator.n_neighbors // n_classes) + 1 ) <NEW_LINE> cluster_centers = kmeans.cluster_centers_ <NEW_LINE> majority_centers = cluster_centers[majority_class & maximum_class, :] <NEW_LINE> majority_center_nn = NearestNeighbors(n_neighbors=1, metric="euclidean") <NEW_LINE> if majority_centers.shape[0] > 0: <NEW_LINE> <INDENT> majority_center_nn.fit(majority_centers) <NEW_LINE> <DEDENT> label_nn[cls] = (majority_center_nn, majority_centers) <NEW_LINE> <DEDENT> self.explainer_ = label_nn <NEW_LINE> return self <NEW_LINE> <DEDENT> def transform(self, x, y): <NEW_LINE> <INDENT> check_is_fitted(self, ["explainer_"]) <NEW_LINE> x_counter = x.copy() <NEW_LINE> labels = np.unique(y) <NEW_LINE> for label in labels: <NEW_LINE> <INDENT> label_indices = np.where(y == label)[0] <NEW_LINE> nn, mc = self.explainer_[label] <NEW_LINE> if mc.shape[0] > 0: <NEW_LINE> <INDENT> closest = nn.kneighbors(x[label_indices, :], return_distance=False) <NEW_LINE> x_counter[label_indices, :] = mc[closest[:, 0], :] <NEW_LINE> <DEDENT> <DEDENT> return x_counter
Fit a counterfactual explainer to a k-nearest neighbors classifier Attributes ---------- explainer_ : dict The explainer for each label References ---------- Karlsson, I., Rebane, J., Papapetrou, P., & Gionis, A. (2020). Locally and globally explainable time series tweaking. Knowledge and Information Systems, 62(5), 1671-1700.
62598fa4cc0a2c111447aec8
class MyConsole(code.InteractiveConsole): <NEW_LINE> <INDENT> lastCode = None <NEW_LINE> def getLastCode(self): <NEW_LINE> <INDENT> return self.lastCode <NEW_LINE> <DEDENT> def runcode(self, code): <NEW_LINE> <INDENT> self.lastCode = code <NEW_LINE> try: <NEW_LINE> <INDENT> exec(code, self.locals) <NEW_LINE> <DEDENT> except SystemExit: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except HTTPError as e: <NEW_LINE> <INDENT> if e.code == 401: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> self.showtraceback() <NEW_LINE> <DEDENT> except JSONRPCException as e: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> err = e.error["error"] <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> err = str(e) <NEW_LINE> <DEDENT> error_id = C.get_error_id(err) <NEW_LINE> if error_id: <NEW_LINE> <INDENT> locs = locale.getdefaultlocale() <NEW_LINE> info = self.proxy.getError(error_id, ".".join(locs if locs != (None, None) else ("en_US", "UTF-8"))) <NEW_LINE> detail = "" <NEW_LINE> if info['details']: <NEW_LINE> <INDENT> detail = " - %s [%s]" % (info['details'][0]['detail'], info['details'][0]['index']) <NEW_LINE> <DEDENT> if info['topic']: <NEW_LINE> <INDENT> print(info['text'] + detail + ": " + info['topic']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(info['text'] + detail) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(err) <NEW_LINE> <DEDENT> if softspace(sys.stdout, 0): <NEW_LINE> <INDENT> print() <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> exc_type, exc_obj, exc_tb = sys.exc_info() <NEW_LINE> self.showtraceback() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if softspace(sys.stdout, 0): <NEW_LINE> <INDENT> print()
MyConsole Subclass of code.InteractiveConsole
62598fa41f037a2d8b9e3fa4
class Vertex: <NEW_LINE> <INDENT> radius = 10 <NEW_LINE> color = "lightgray" <NEW_LINE> def __init__(self, x=None, y=None, label=None, radius=None, color=None): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.label = label <NEW_LINE> if color: <NEW_LINE> <INDENT> self.color = color <NEW_LINE> <DEDENT> if radius and radius >= 0: <NEW_LINE> <INDENT> self.radius = radius
Represents one vertex in the graph. Has some additional properties so it can be displayed via GraphCanvas. If x & y == None, Vertex can't be drawn.
62598fa4d6c5a102081e2000
class MonthlyCollectionImmutable( _ImmutableCollectionBase, MonthlyCollection): <NEW_LINE> <INDENT> def to_mutable(self): <NEW_LINE> <INDENT> new_obj = MonthlyCollection(self.header, self.values, self.datetimes) <NEW_LINE> new_obj._validated_a_period = self._validated_a_period <NEW_LINE> return new_obj
Immutable Monthly Data Collection.
62598fa45f7d997b871f933d
class FunctionWrapper(dict): <NEW_LINE> <INDENT> def __init__(self, opts, minion): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.opts = opts <NEW_LINE> self.minion = minion <NEW_LINE> self.local = LocalClient(self.opts["conf_file"]) <NEW_LINE> self.functions = self.__load_functions() <NEW_LINE> <DEDENT> def __missing__(self, key): <NEW_LINE> <INDENT> if key not in self.functions: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> return self.run_key(key) <NEW_LINE> <DEDENT> def __load_functions(self): <NEW_LINE> <INDENT> return set( self.local.cmd(self.minion, "sys.list_functions").get(self.minion, []) ) <NEW_LINE> <DEDENT> def run_key(self, key): <NEW_LINE> <INDENT> def func(*args, **kwargs): <NEW_LINE> <INDENT> args = list(args) <NEW_LINE> for _key, _val in kwargs.items(): <NEW_LINE> <INDENT> args.append("{}={}".format(_key, _val)) <NEW_LINE> <DEDENT> return self.local.cmd(self.minion, key, args) <NEW_LINE> <DEDENT> return func
Create a function wrapper that looks like the functions dict on the minion but invoked commands on the minion via a LocalClient. This allows SLS files to be loaded with an object that calls down to the minion when the salt functions dict is referenced.
62598fa416aa5153ce4003bb
class GetClientStats(flow_base.FlowBase, GetClientStatsProcessResponseMixin): <NEW_LINE> <INDENT> category = "/Administrative/" <NEW_LINE> result_types = (rdf_client_stats.ClientStats,) <NEW_LINE> def Start(self): <NEW_LINE> <INDENT> self.CallClient( server_stubs.GetClientStats, next_state=compatibility.GetName(self.StoreResults)) <NEW_LINE> <DEDENT> def StoreResults(self, responses): <NEW_LINE> <INDENT> if not responses.success: <NEW_LINE> <INDENT> self.Error("Failed to retrieve client stats.") <NEW_LINE> return <NEW_LINE> <DEDENT> for response in responses: <NEW_LINE> <INDENT> downsampled = self.ProcessResponse(self.client_urn.Basename(), response) <NEW_LINE> self.SendReply(downsampled)
This flow retrieves information about the GRR client process.
62598fa43617ad0b5ee0600d
class TestMappingUsersRulesRuleOptions(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testMappingUsersRulesRuleOptions(self): <NEW_LINE> <INDENT> pass
MappingUsersRulesRuleOptions unit test stubs
62598fa4cb5e8a47e493c0d4
class MemberAuth(Resource): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.auth_post_parser = reqparse.RequestParser() <NEW_LINE> self.auth_post_parser.add_argument( 'email', location='json', required=True, type=str, ) <NEW_LINE> self.auth_post_parser.add_argument( 'password', location='json', required=True, type=str, ) <NEW_LINE> <DEDENT> def post(self): <NEW_LINE> <INDENT> if 'application/json' in request.headers['Content-Type']: <NEW_LINE> <INDENT> args = self.auth_post_parser.parse_args() <NEW_LINE> email = args.email <NEW_LINE> password = args.password <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise abort(406, message='server cannot understand') <NEW_LINE> <DEDENT> member = Member.query.filter_by(email=email).one() <NEW_LINE> if not member.verify_password(password): <NEW_LINE> <INDENT> raise abort(401, message='id or pw is invalid') <NEW_LINE> <DEDENT> login_user(member) <NEW_LINE> member.recent_login_timestamp = datetime.utcnow() <NEW_LINE> db.session.commit() <NEW_LINE> token_payload = { 'id': member.id, 'exp': datetime.utcnow() + timedelta(days=7) } <NEW_LINE> token = jwt.encode(token_payload, current_app.config['SECRET_KEY'], algorithm='HS256').decode('utf-8') <NEW_LINE> session_ttl = int(current_app.config['SESSION_ALIVE_MINUTES'] * 60) <NEW_LINE> p = youngs_redis.pipeline() <NEW_LINE> if youngs_redis.exists('auth:token:'+token) is False: <NEW_LINE> <INDENT> p.hmset('auth:token:'+token, {'id': member.id}) <NEW_LINE> <DEDENT> p.expire('auth:token:'+token, session_ttl) <NEW_LINE> p.execute() <NEW_LINE> session['token'] = token <NEW_LINE> member.token = token <NEW_LINE> log.info('Login : '+member.email) <NEW_LINE> return marshal(member, auth_member_fields, envelope='results') <NEW_LINE> <DEDENT> @login_required <NEW_LINE> def delete(self): <NEW_LINE> <INDENT> if 'token' in session: <NEW_LINE> <INDENT> log.info('session token %s', session['token']) <NEW_LINE> youngs_redis.delete('auth:token'+session['token']) <NEW_LINE> <DEDENT> if 'user_id' in session: <NEW_LINE> <INDENT> log.info('cleared session userid %s', session['user_id']) <NEW_LINE> <DEDENT> logout_user() <NEW_LINE> return jsonify({'result': 'success'})
Employee class that create employee or read employee list.
62598fa444b2445a339b68cb
class Item(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(20), unique=True, nullable=False) <NEW_LINE> price = db.Column(db.Float, nullable=False) <NEW_LINE> desc = db.Column(db.String(300), nullable=False) <NEW_LINE> owner_username = db.Column( db.String(50), db.ForeignKey('user.username'), nullable=False) <NEW_LINE> category_name = db.Column(db.String(20), db.ForeignKey( 'category.name'), nullable=False)
items that exist in each category stored in db
62598fa430dc7b766599f708
class terminus(Symbol): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def __init__(self, cid: Hashable) -> None: <NEW_LINE> <INDENT> super().__init__("terminus", cid)
A terminus symbol.
62598fa4460517430c431fb8
class Filter2(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Filter2, self).__init__() <NEW_LINE> self.conv1 = ConvBNReLU(in_channels=1, out_channels=10, kernel_size=32, stride=2) <NEW_LINE> self.maxpool1 = nn.AvgPool1d(kernel_size=10, stride=2) <NEW_LINE> self.conv2 = ConvBNReLU(in_channels=10, out_channels=50, kernel_size=16, stride=2) <NEW_LINE> self.maxpool2 = nn.AvgPool1d(kernel_size=5, stride=2) <NEW_LINE> self.conv3 = ConvBNReLU(in_channels=50, out_channels=100, kernel_size=8, stride=2) <NEW_LINE> self.maxpool3 = nn.AvgPool1d(kernel_size=3, stride=2) <NEW_LINE> self.fc1 = nn.Linear(in_features=117*50, out_features=2000) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.conv1(x) <NEW_LINE> x = self.maxpool1(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> x = self.maxpool2(x) <NEW_LINE> x = x.view(-1, self.flatten_features(x)) <NEW_LINE> x = self.fc1(x) <NEW_LINE> x = x.unsqueeze(-1) <NEW_LINE> return x <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def flatten_features(x): <NEW_LINE> <INDENT> size = x.size()[1:] <NEW_LINE> num_features = 1 <NEW_LINE> for s in size: <NEW_LINE> <INDENT> num_features *= s <NEW_LINE> <DEDENT> return num_features
神经网络滤波器 input - Conv1 - Maxpool - Conv2 - Maxpool - Conv3 - Maxpool - Conv4 - Conv5 - Globaverg (1, 2000) - (10, 997) - (10, 498)- (100, 248) - (100, 122) - (500, 60) - (500, 29) - (1000, 14)-(2000, 6)-(2000,1)
62598fa40a50d4780f705296
class HiddenVAE(nn.Module): <NEW_LINE> <INDENT> def __init__(self, img_channels, latent_size): <NEW_LINE> <INDENT> super(HiddenVAE, self).__init__() <NEW_LINE> self.encoder = Encoder(img_channels, latent_size) <NEW_LINE> self.decoder = Decoder(img_channels, latent_size) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> mu, logsigma = self.encoder(x) <NEW_LINE> sigma = logsigma.exp() <NEW_LINE> eps = torch.randn_like(sigma) <NEW_LINE> z = eps.mul(sigma).add_(mu) <NEW_LINE> recon_x = self.decoder(z) <NEW_LINE> return recon_x, mu, logsigma
Variational Autoencoder
62598fa4796e427e5384e64e
class DescribeInstancesRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DescribeInstancesRequest, self).__init__( '/regions/{regionId}/instances', 'GET', header, version) <NEW_LINE> self.parameters = parameters
查询实例列表
62598fa44a966d76dd5eed9c
class Blob(sc.prettyobj): <NEW_LINE> <INDENT> def __init__(self, obj=None, key=None, objtype=None, uid=None, force=True): <NEW_LINE> <INDENT> if uid is None: <NEW_LINE> <INDENT> if force: <NEW_LINE> <INDENT> uid = sc.uuid() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> errormsg = 'DataStore: Not creating a new Blob UUID since force is set to False: key=%s, objtype=%s, uid=%s, obj=%s' % (key, objtype, uid, obj) <NEW_LINE> raise Exception(errormsg) <NEW_LINE> <DEDENT> <DEDENT> if not key: key = '%s%s%s' % (objtype, default_separator, uid) <NEW_LINE> self.key = key <NEW_LINE> self.objtype = objtype <NEW_LINE> self.uid = uid <NEW_LINE> self.created = sc.now() <NEW_LINE> self.modified = [self.created] <NEW_LINE> self.obj = obj <NEW_LINE> return None <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> now = sc.now() <NEW_LINE> self.modified.append(now) <NEW_LINE> return now <NEW_LINE> <DEDENT> def save(self, obj): <NEW_LINE> <INDENT> self.obj = obj <NEW_LINE> self.update() <NEW_LINE> return None <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> output = self.obj <NEW_LINE> return output
Wrapper for any Python object we want to store in the DataStore.
62598fa4d58c6744b42dc231
class Meta: <NEW_LINE> <INDENT> database = DB <NEW_LINE> primary_key = CompositeKey('citation', 'doi')
PeeWee meta class contains the database and the primary key.
62598fa4be8e80087fbbef1c
class UploadFile(AuthenticatedMethod): <NEW_LINE> <INDENT> method_name = 'wp.uploadFile' <NEW_LINE> method_args = ('data',)
Upload a file to the blog. Note: the file is not attached to or inserted into any blog posts. Parameters: `data`: `dict` with three items: * `name`: filename * `type`: MIME-type of the file * `bits`: base-64 encoded contents of the file. See xmlrpclib.Binary() * `overwrite` (optional): flag to override an existing file with this name Returns: `dict` with keys `id`, `file` (filename), `url` (public URL), and `type` (MIME-type).
62598fa4dd821e528d6d8dee
class OpenSubtitlesError(ProviderError): <NEW_LINE> <INDENT> pass
Base class for non-generic :class:`OpenSubtitlesProvider` exceptions
62598fa4d486a94d0ba2be88
class PluginSettings(object): <NEW_LINE> <INDENT> def __init__(self, pluginName): <NEW_LINE> <INDENT> self.s = QSettings() <NEW_LINE> self.name = pluginName <NEW_LINE> <DEDENT> def pluginValue(self, key, default): <NEW_LINE> <INDENT> return self.s.value('plugins/{0}/{1}'.format(self.name, key), default) <NEW_LINE> <DEDENT> def setPluginValue(self, key, value): <NEW_LINE> <INDENT> self.s.setValue('plugins/{0}/{1}'.format(self.name, key), value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def configPrefix(self): <NEW_LINE> <INDENT> prefix = self.s.value('application/config_prefix').toString() <NEW_LINE> return unicode(prefix).encode('utf-8')
Wrapper Settings class for plugins. This is a generic settings class, which provide consistent loading and saving of settings for plugins. It works by providing the plugin name to the constructor, and using this value to retrive and save settings in the main luma settings file.
62598fa42ae34c7f260aaf9c
class Qualifier(models.Model): <NEW_LINE> <INDENT> __model_label__ = "qualifier" <NEW_LINE> qualifier = models.CharField( 'Qualifier', max_length=200, null=True, blank=True) <NEW_LINE> pcsRow_fk = models.ForeignKey('PcsRow') <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return "%s: %s" % (self.qualifier, self.pcsRow_fk)
The ICD 10 PCS qualifier models.
62598fa4236d856c2adc9398
class ManualLoanForm(Form): <NEW_LINE> <INDENT> def __init__(self, **kw): <NEW_LINE> <INDENT> super().__init__(**kw) <NEW_LINE> self.fields = OrderedDict() <NEW_LINE> self.fields["loan_issue"] = Field( label=_("Loan Issue"), required=True, template="iwp_website.selection_field", choices=self._choices_loan_issue, ) <NEW_LINE> self.fields["quantity"] = Field( label=_("Quantity"), required=True, att={"min": 1}, validators=[self._validate_quantity], template="iwp_website.input_field", input_type="number", ) <NEW_LINE> self.fields["total_amount"] = Field( label=_("Total Amount"), readonly=True, template="iwp_website.input_field", input_type="text", ) <NEW_LINE> self.fields["date"] = Field( label=_("Date"), required=True, att={"max": date.today().isoformat()}, validators=[self._validate_date], template="iwp_website.input_field", input_type="date", ) <NEW_LINE> <DEDENT> def _validate_quantity(self, value): <NEW_LINE> <INDENT> minimum = self.fields["quantity"].att.get("min") <NEW_LINE> if minimum and value < minimum: <NEW_LINE> <INDENT> raise FormValidationError("Minimun %d." % minimum) <NEW_LINE> <DEDENT> <DEDENT> def _validate_date(self, value): <NEW_LINE> <INDENT> maximum = self.fields["date"].att.get("max") <NEW_LINE> if maximum and value > self.fields["date"].to_python(maximum): <NEW_LINE> <INDENT> raise FormValidationError("Please enter date in the past.") <NEW_LINE> <DEDENT> <DEDENT> def _choices_loan_issue(self): <NEW_LINE> <INDENT> user = self.context.get("user") <NEW_LINE> struct = self.context.get("struct") <NEW_LINE> if user.is_company: <NEW_LINE> <INDENT> loan_issues = struct.loan_issue_ids.filtered( lambda r: r.display_on_website and r.by_company ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loan_issues = struct.loan_issue_ids.filtered( lambda r: r.display_on_website and r.by_individual ) <NEW_LINE> <DEDENT> choices = [] <NEW_LINE> if struct: <NEW_LINE> <INDENT> for issue in loan_issues: <NEW_LINE> <INDENT> choices.append( Choice( value=issue.id, display="%s - %s" % (issue.name, monetary_to_text(issue.face_value)), att={"data-face_value": issue.face_value}, obj=issue, ) ) <NEW_LINE> <DEDENT> <DEDENT> return choices
Form to create new Manual Share for a user
62598fa456ac1b37e63020a7
class CheckFilename(CheckPath): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(CheckFilename, self).__init__(**kwargs) <NEW_LINE> self.message = "File does not exist." <NEW_LINE> self.allow_none = True <NEW_LINE> self.msg_level = "warning"
Checks whether a directory exists. These are files that the may be created or not necessary to run. E.g. Log files
62598fa47d847024c075c280
class HTTPTemporaryRedirect(HTTPStatus): <NEW_LINE> <INDENT> def __init__(self, location, headers=None): <NEW_LINE> <INDENT> if headers is None: <NEW_LINE> <INDENT> headers = {} <NEW_LINE> <DEDENT> headers.setdefault('location', location) <NEW_LINE> super(HTTPTemporaryRedirect, self).__init__(falcon.HTTP_307, headers)
307 Temporary Redirect. The 307 (Temporary Redirect) status code indicates that the target resource resides temporarily under a different URI and the user agent MUST NOT change the request method if it performs an automatic redirection to that URI. Since the redirection can change over time, the client ought to continue using the original effective request URI for future requests. Note: This status code is similar to 302 (Found), except that it does not allow changing the request method from POST to GET. (See also: RFC 7231, Section 6.4.7) Args: location (str): URI to provide as the Location header in the response.
62598fa4a8370b77170f0294
class NatGatewayListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[NatGateway]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(NatGatewayListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = kwargs.get('next_link', None)
Response for ListNatGateways API service call. :param value: A list of Nat Gateways that exists in a resource group. :type value: list[~azure.mgmt.network.v2019_11_01.models.NatGateway] :param next_link: The URL to get the next set of results. :type next_link: str
62598fa47b25080760ed7365
class GetRequestPaginator(object): <NEW_LINE> <INDENT> def __init__(self, queryset, page_requested, page_size): <NEW_LINE> <INDENT> self.queryset = queryset <NEW_LINE> self.curr_page = 1 if page_requested < 1 else page_requested <NEW_LINE> self.page_size = page_size if page_size < MAX_GET_RESPONSE_PER_PAGE else MAX_GET_RESPONSE_PER_PAGE <NEW_LINE> self.total_count = None <NEW_LINE> self.next_page = None <NEW_LINE> self.previous_page = None <NEW_LINE> self.sub_queryset = None <NEW_LINE> <DEDENT> def paginate(self): <NEW_LINE> <INDENT> self.total_count = self.queryset.count() <NEW_LINE> paginator = Paginator(self.queryset, self.page_size) <NEW_LINE> try: <NEW_LINE> <INDENT> self.sub_queryset = paginator.page(self.curr_page) <NEW_LINE> if self.sub_queryset.has_next(): <NEW_LINE> <INDENT> self.next_page = self.curr_page + 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.next_page = self.curr_page <NEW_LINE> <DEDENT> <DEDENT> except EmptyPage: <NEW_LINE> <INDENT> self.sub_queryset = paginator.page(paginator.num_pages) <NEW_LINE> self.next_page = paginator.num_pages <NEW_LINE> <DEDENT> self.previous_page = self.next_page - 2 <NEW_LINE> if self.previous_page < 1: <NEW_LINE> <INDENT> self.previous_page = 1 <NEW_LINE> <DEDENT> meta_data = { 'total_count': self.total_count, 'next_page': self.next_page, 'previous_page': self.previous_page } <NEW_LINE> return self.sub_queryset, meta_data
Used to paginate queryset: i.e. breaking large number of items in queryset in pages of smaller subset
62598fa423849d37ff850f70
class EditForm(FlaskForm): <NEW_LINE> <INDENT> username = TextField('username', [ Required(message='must provide an email address.')]) <NEW_LINE> email = TextField('email address', [Email(), Required(message='must provide an email address.')]) <NEW_LINE> password = PasswordField('password')
FlaskForm to edit existing users.
62598fa463d6d428bbee266d
class GDataClient(gdata.service.GDataService): <NEW_LINE> <INDENT> def __init__(self, application_name=None, tokens=None): <NEW_LINE> <INDENT> gdata.service.GDataService.__init__(self, source=application_name, tokens=tokens) <NEW_LINE> <DEDENT> def ClientLogin(self, username, password, service_name, source=None, account_type=None, auth_url=None, login_token=None, login_captcha=None): <NEW_LINE> <INDENT> gdata.service.GDataService.ClientLogin(self, username=username, password=password, account_type=account_type, service=service_name, auth_service_url=auth_url, source=source, captcha_token=login_token, captcha_response=login_captcha) <NEW_LINE> <DEDENT> def Get(self, url, parser): <NEW_LINE> <INDENT> return gdata.service.GDataService.Get(self, uri=url, converter=parser) <NEW_LINE> <DEDENT> def Post(self, data, url, parser, media_source=None): <NEW_LINE> <INDENT> return gdata.service.GDataService.Post(self, data=data, uri=url, media_source=media_source, converter=parser) <NEW_LINE> <DEDENT> def Put(self, data, url, parser, media_source=None): <NEW_LINE> <INDENT> return gdata.service.GDataService.Put(self, data=data, uri=url, media_source=media_source, converter=parser) <NEW_LINE> <DEDENT> def Delete(self, url): <NEW_LINE> <INDENT> return gdata.service.GDataService.Delete(self, uri=url)
This class is deprecated. All functionality has been migrated to gdata.service.GDataService.
62598fa491af0d3eaad39cc9
@add_metaclass(abc.ABCMeta) <NEW_LINE> class LabelingAsRoot(loopbackedtestcase.LoopBackedTestCase): <NEW_LINE> <INDENT> _fs_class = abc.abstractproperty( doc="The class of the filesystem being tested on.") <NEW_LINE> _invalid_label = abc.abstractproperty( doc="A label which is invalid for this filesystem.") <NEW_LINE> def __init__(self, methodName='runTest'): <NEW_LINE> <INDENT> super(LabelingAsRoot, self).__init__(methodName=methodName, deviceSpec=[Size("100 MiB")]) <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> an_fs = self._fs_class() <NEW_LINE> if not an_fs.formattable: <NEW_LINE> <INDENT> self.skipTest("can not create filesystem %s" % an_fs.name) <NEW_LINE> <DEDENT> if not an_fs.labeling(): <NEW_LINE> <INDENT> self.skipTest("can not label filesystem %s" % an_fs.name) <NEW_LINE> <DEDENT> super(LabelingAsRoot, self).setUp() <NEW_LINE> <DEDENT> def testLabeling(self): <NEW_LINE> <INDENT> an_fs = self._fs_class(device=self.loopDevices[0], label=self._invalid_label) <NEW_LINE> if an_fs._readlabel.availabilityErrors or not an_fs.relabels(): <NEW_LINE> <INDENT> self.skipTest("can not read or write label for filesystem %s" % an_fs.name) <NEW_LINE> <DEDENT> self.assertIsNone(an_fs.create()) <NEW_LINE> with self.assertRaises(FSReadLabelError): <NEW_LINE> <INDENT> an_fs.readLabel() <NEW_LINE> <DEDENT> an_fs.label = "an fs" <NEW_LINE> with self.assertRaises(FSError): <NEW_LINE> <INDENT> an_fs.writeLabel() <NEW_LINE> <DEDENT> <DEDENT> def testCreating(self): <NEW_LINE> <INDENT> an_fs = self._fs_class(device=self.loopDevices[0], label="start") <NEW_LINE> self.assertIsNone(an_fs.create()) <NEW_LINE> <DEDENT> def testCreatingNone(self): <NEW_LINE> <INDENT> an_fs = self._fs_class(device=self.loopDevices[0], label=None) <NEW_LINE> self.assertIsNone(an_fs.create()) <NEW_LINE> <DEDENT> def testCreatingEmpty(self): <NEW_LINE> <INDENT> an_fs = self._fs_class(device=self.loopDevices[0], label="") <NEW_LINE> self.assertIsNone(an_fs.create())
Tests various aspects of labeling a filesystem where there is no easy way to read the filesystem's label once it has been set and where the filesystem can not be relabeled.
62598fa46e29344779b00518
class P1(): <NEW_LINE> <INDENT> def __init__(self, unitcell_size, det_shape, dtype=np.complex128): <NEW_LINE> <INDENT> T0 = 1 <NEW_LINE> self.translations = np.array([T0]) <NEW_LINE> self.unitcell_size = unitcell_size <NEW_LINE> self.syms = np.zeros((1,) + tuple(det_shape), dtype=dtype) <NEW_LINE> <DEDENT> def solid_syms_Fourier(self, solid, apply_translation = True): <NEW_LINE> <INDENT> self.syms[0] = solid <NEW_LINE> self.syms *= self.translations <NEW_LINE> return self.syms <NEW_LINE> <DEDENT> def solid_syms_real(self, solid): <NEW_LINE> <INDENT> self.syms[0] = solid <NEW_LINE> return self.syms <NEW_LINE> <DEDENT> def unflip_modes_Fourier(self, U, apply_translation = True): <NEW_LINE> <INDENT> out = np.empty_like(U) <NEW_LINE> out[0] = U[0] <NEW_LINE> return out <NEW_LINE> <DEDENT> def solid_syms_crystal_real(self, solid): <NEW_LINE> <INDENT> tiles = np.ceil(2*np.array(solid.shape, dtype=np.float) / np.array(self.unitcell_size, dtype=np.float) - 1.0).astype(np.int) <NEW_LINE> syms = self.solid_syms_real(solid) <NEW_LINE> syms = np.fft.fftshift(syms, axes=(1,2,3)) <NEW_LINE> syms_crystal = np.zeros( (len(syms) * np.prod(tiles),) + syms.shape[1:], dtype=syms.dtype) <NEW_LINE> syms_crystal[: len(syms)] = syms <NEW_LINE> index = len(syms) <NEW_LINE> for sym in syms : <NEW_LINE> <INDENT> for i in (np.arange(tiles[0]) - tiles[0]//2): <NEW_LINE> <INDENT> for j in (np.arange(tiles[1]) - tiles[1]//2): <NEW_LINE> <INDENT> for k in (np.arange(tiles[2]) - tiles[2]//2): <NEW_LINE> <INDENT> if i == 0 and j == 0 and k == 0 : <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> shift = np.array([i, j, k]) * np.array(self.unitcell_size) <NEW_LINE> syms_crystal[index] = multiroll_nowrap(sym, shift) <NEW_LINE> index += 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> syms_crystal = np.fft.ifftshift(syms_crystal, axes=(1,2,3)) <NEW_LINE> return syms_crystal
Store arrays to make the crystal mapping more efficient. Assume that Fourier space arrays are fft shifted. Perform symmetry operations with the np.fft.fftfreq basis so that (say) a flip operation behaves like: a = [0, 1, 2, 3, 4, 5, 6, 7] a flipped = [0, 7, 6, 5, 4, 3, 2, 1] or: i = np.fft.fftfreq(8)*8 = [ 0, 1, 2, 3, -4, -3, -2, -1] i flipped = [ 0, -1, -2, -3, -4, 3, 2, 1]
62598fa47047854f4633f294
class AssignmentShortPollSerializer(AssignmentAllPollSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> list_serializer_class = FilterPollListSerializer <NEW_LINE> model = AssignmentPoll <NEW_LINE> fields = ( "id", "pollmethod", "description", "published", "options", "votesabstain", "votesno", "votesvalid", "votesinvalid", "votescast", "has_votes", )
Serializer for assignment.models.AssignmentPoll objects. Serializes only short polls (excluded unpublished polls).
62598fa4f548e778e596b460
class PatchvversionservicerequestsserviceOperations(object): <NEW_LINE> <INDENT> models = models <NEW_LINE> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def id( self, service_request_id, service_request, version, ms_correlation_id=None, ms_request_id=None, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> url = self.id.metadata['url'] <NEW_LINE> path_format_arguments = { 'service_request_id': self._serialize.url("service_request_id", service_request_id, 'str'), 'version': self._serialize.url("version", version, 'str') } <NEW_LINE> url = self._client.format_url(url, **path_format_arguments) <NEW_LINE> query_parameters = {} <NEW_LINE> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if ms_correlation_id is not None: <NEW_LINE> <INDENT> header_parameters['MS-CorrelationId'] = self._serialize.header("ms_correlation_id", ms_correlation_id, 'str') <NEW_LINE> <DEDENT> if ms_request_id is not None: <NEW_LINE> <INDENT> header_parameters['MS-RequestId'] = self._serialize.header("ms_request_id", ms_request_id, 'str') <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> body_content = self._serialize.body(service_request, 'MicrosoftPartnerSdkContractsV1ServiceRequest') <NEW_LINE> request = self._client.patch(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, body_content, stream=False, **operation_config) <NEW_LINE> if response.status_code not in [200, 201, 400, 401, 403, 404, 500]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> deserialized = None <NEW_LINE> if response.status_code in [200, 201]: <NEW_LINE> <INDENT> deserialized = self._deserialize('MicrosoftPartnerSdkContractsV1ServiceRequest', response) <NEW_LINE> <DEDENT> if raw: <NEW_LINE> <INDENT> client_raw_response = ClientRawResponse(deserialized, response) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> id.metadata = {'url': '/v{version}/servicerequests/{service_request_id}'}
PatchvversionservicerequestsserviceOperations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
62598fa4fff4ab517ebcd6a1
class LiteracyAndLanguageResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the LiteracyAndLanguage Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62598fa44a966d76dd5eed9e
class Call_IdHeader( DeepClass("_cidh_", { "host": {}, "key": { dck.gen: "GenerateKey", dck.check: lambda x: isinstance(x, bytes)} }), Header): <NEW_LINE> <INDENT> parseinfo = { Parser.Pattern: b"(.*)$", Parser.Mappings: [("key",)], Parser.PassMappingsToInit: True, } <NEW_LINE> @classmethod <NEW_LINE> def GenerateKey(cls): <NEW_LINE> <INDENT> keyval = randint(0, 2 ** 24 - 1) <NEW_LINE> dt = datetime.now() <NEW_LINE> keydate = ( b"%(year)04d%(month)02d%(day)02d%(hour)02d%(minute)02d" b"%(second)02d" % { abytes(key): getattr(dt, key) for key in ( 'year', 'month', 'day', 'hour', 'minute', 'second' ) }) <NEW_LINE> return b"%06x-%s" % (keyval, keydate) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> key = self.key <NEW_LINE> if not key: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> host = self.host <NEW_LINE> if host: <NEW_LINE> <INDENT> return b"%s@%s" % (self.key, self.host) <NEW_LINE> <DEDENT> return b"%s" % (self.key) <NEW_LINE> <DEDENT> def bytesGen(self): <NEW_LINE> <INDENT> yield self._hdr_prepend() <NEW_LINE> if self.key is None: <NEW_LINE> <INDENT> raise Incomplete("Call ID header has no key.") <NEW_LINE> <DEDENT> yield self.key <NEW_LINE> if self.host: <NEW_LINE> <INDENT> yield b'@' <NEW_LINE> yield bytes(self.host)
Call ID header. To paraphrase: https://tools.ietf.org/html/rfc3261#section-8.1.1.4 This value should be generated uniquely over space and time for each new dialogue initiated by the UA. It must be the same for all messages during a dialogue. It SHOULD also be the same for each REGISTER sent to maintain a registration by the UA. I.e. being registered => being in a dialogue.
62598fa455399d3f056263e0
class SentSimClassInterface(object): <NEW_LINE> <INDENT> def __init__(self, model, vocab, session, opt): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.vocab = vocab <NEW_LINE> self.session = session <NEW_LINE> self.opt = opt <NEW_LINE> self.model_saver = tf.train.Saver() <NEW_LINE> <DEDENT> def infer_step(self, batch_input): <NEW_LINE> <INDENT> feed_dict = { self.model.encoder_a.input_idx: batch_input[0][0], self.model.encoder_b.input_idx: batch_input[0][1], self.model.labels: batch_input[1], self.model.encoder_a.static_keep_prob: 1.0, self.model.encoder_a.rnn_keep_prob: 1.0, self.model.encoder_b.static_keep_prob: 1.0, self.model.encoder_b.rnn_keep_prob: 1.0 } <NEW_LINE> ops = [self.model.predictions, self.model.loss] <NEW_LINE> predictions, prediction_error = self.session.run(ops, feed_dict=feed_dict) <NEW_LINE> return predictions, prediction_error
A small interface for the evaluation of sentence similarity classifier; extendable.
62598fa47d43ff2487427360
class FitResultCollector(varial.tools.Tool): <NEW_LINE> <INDENT> io = varial.dbio <NEW_LINE> def run(self): <NEW_LINE> <INDENT> fit_chains = varial.analysis.lookup_children_names('../..') <NEW_LINE> fitters = list( (name, varial.analysis.lookup( '../../%s/TemplateFitToolData' % name)) for name in fit_chains ) <NEW_LINE> for name, res in fitters[:]: <NEW_LINE> <INDENT> if res and 'TemplateFitTool' in res.name: <NEW_LINE> <INDENT> res.name = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fitters.remove((name, res)) <NEW_LINE> <DEDENT> <DEDENT> self.result = sorted((res for _, res in fitters), key=lambda w: w.name)
Collect fit results (numbers only).
62598fa401c39578d7f12c3b
class CreateUDBSlaveResponseSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "DBId": fields.Str(required=False, load_from="DBId"), }
CreateUDBSlave - 创建UDB实例的slave
62598fa4a8370b77170f0296
class ParseBaseException(Exception): <NEW_LINE> <INDENT> def __init__(self, pstr, loc=0, msg=None, elem=None): <NEW_LINE> <INDENT> self.loc = loc <NEW_LINE> if msg is None: <NEW_LINE> <INDENT> self.msg = pstr <NEW_LINE> self.pstr = "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> self.pstr = pstr <NEW_LINE> <DEDENT> self.parserElement = elem <NEW_LINE> self.args = (pstr, loc, msg) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_exception(cls, pe): <NEW_LINE> <INDENT> return cls(pe.pstr, pe.loc, pe.msg, pe.parserElement) <NEW_LINE> <DEDENT> def __getattr__(self, aname): <NEW_LINE> <INDENT> if aname == "lineno": <NEW_LINE> <INDENT> return lineno(self.loc, self.pstr) <NEW_LINE> <DEDENT> elif aname in ("col", "column"): <NEW_LINE> <INDENT> return col(self.loc, self.pstr) <NEW_LINE> <DEDENT> elif aname == "line": <NEW_LINE> <INDENT> return line(self.loc, self.pstr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError(aname) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.pstr: <NEW_LINE> <INDENT> if self.loc >= len(self.pstr): <NEW_LINE> <INDENT> foundstr = ', found end of text' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> foundstr = (', found %r' % self.pstr[self.loc:self.loc + 1]).replace(r'\\', '\\') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> foundstr = '' <NEW_LINE> <DEDENT> return ("%s%s (at char %d), (line:%d, col:%d)" % (self.msg, foundstr, self.loc, self.lineno, self.column)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return _ustr(self) <NEW_LINE> <DEDENT> def markInputline(self, markerString=">!<"): <NEW_LINE> <INDENT> line_str = self.line <NEW_LINE> line_column = self.column - 1 <NEW_LINE> if markerString: <NEW_LINE> <INDENT> line_str = "".join((line_str[:line_column], markerString, line_str[line_column:])) <NEW_LINE> <DEDENT> return line_str.strip() <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return "lineno col line".split() + dir(type(self))
base exception class for all parsing runtime exceptions
62598fa47b25080760ed7367
@benchmark.Disabled('reference') <NEW_LINE> class SmoothnessTop25(_Smoothness): <NEW_LINE> <INDENT> page_set = page_sets.Top25SmoothPageSet <NEW_LINE> @classmethod <NEW_LINE> def Name(cls): <NEW_LINE> <INDENT> return 'smoothness.top_25_smooth'
Measures rendering statistics while scrolling down the top 25 web pages. http://www.chromium.org/developers/design-documents/rendering-benchmarks
62598fa48da39b475be0309e
class Fellow(Person): <NEW_LINE> <INDENT> def __init__(self, first_name, second_name, person_type='FELLOW', lspace_option='N'): <NEW_LINE> <INDENT> super(Fellow, self).__init__(first_name, second_name, person_type, lspace_option='N')
New fellow blueprint but also inherits from the Person class person type(f) translates to fellow and (s) to staff.
62598fa456b00c62f0fb276f
class MidptTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_mi_to_m(self): <NEW_LINE> <INDENT> assert mi_to_m(3) == 4828 <NEW_LINE> <DEDENT> def test_stricter_radius(self): <NEW_LINE> <INDENT> self.assertEqual(stricter_radius(3, 2), 2) <NEW_LINE> <DEDENT> def test_midpt_formula(self): <NEW_LINE> <INDENT> self.assertEqual(midpt_formula([37.86408, -122.254124], [37.8684539, -122.259389]), [37.866266949999996, -122.2567565])
Unit tests for the calculations behind the making of the midpoint between two user locations.
62598fa499cbb53fe6830d92
class DirectoryIterator(image.DirectoryIterator, Iterator): <NEW_LINE> <INDENT> def __init__(self, directory, image_data_generator, target_size=(256, 256), color_mode='rgb', classes=None, class_mode='categorical', batch_size=32, shuffle=True, seed=None, data_format=None, save_to_dir=None, save_prefix='', save_format='png', follow_links=False, subset=None, interpolation='nearest', dtype=None): <NEW_LINE> <INDENT> if data_format is None: <NEW_LINE> <INDENT> data_format = backend.image_data_format() <NEW_LINE> <DEDENT> kwargs = {} <NEW_LINE> if 'dtype' in inspect.getargspec( image.ImageDataGenerator.__init__).args: <NEW_LINE> <INDENT> if dtype is None: <NEW_LINE> <INDENT> dtype = backend.floatx() <NEW_LINE> <DEDENT> kwargs['dtype'] = dtype <NEW_LINE> <DEDENT> super(DirectoryIterator, self).__init__( directory, image_data_generator, target_size=target_size, color_mode=color_mode, classes=classes, class_mode=class_mode, batch_size=batch_size, shuffle=shuffle, seed=seed, data_format=data_format, save_to_dir=save_to_dir, save_prefix=save_prefix, save_format=save_format, follow_links=follow_links, subset=subset, interpolation=interpolation, **kwargs)
Iterator capable of reading images from a directory on disk. # Arguments directory: Path to the directory to read images from. Each subdirectory in this directory will be considered to contain images from one class, or alternatively you could specify class subdirectories via the `classes` argument. image_data_generator: Instance of `ImageDataGenerator` to use for random transformations and normalization. target_size: tuple of integers, dimensions to resize input images to. color_mode: One of `"rgb"`, `"rgba"`, `"grayscale"`. Color mode to read images. classes: Optional list of strings, names of subdirectories containing images from each class (e.g. `["dogs", "cats"]`). It will be computed automatically if not set. class_mode: Mode for yielding the targets: `"binary"`: binary targets (if there are only two classes), `"categorical"`: categorical targets, `"sparse"`: integer targets, `"input"`: targets are images identical to input images (mainly used to work with autoencoders), `None`: no targets get yielded (only input images are yielded). batch_size: Integer, size of a batch. shuffle: Boolean, whether to shuffle the data between epochs. seed: Random seed for data shuffling. data_format: String, one of `channels_first`, `channels_last`. save_to_dir: Optional directory where to save the pictures being yielded, in a viewable format. This is useful for visualizing the random transformations being applied, for debugging purposes. save_prefix: String prefix to use for saving sample images (if `save_to_dir` is set). save_format: Format to use for saving sample images (if `save_to_dir` is set). subset: Subset of data (`"training"` or `"validation"`) if validation_split is set in ImageDataGenerator. interpolation: Interpolation method used to resample the image if the target size is different from that of the loaded image. Supported methods are "nearest", "bilinear", and "bicubic". If PIL version 1.1.3 or newer is installed, "lanczos" is also supported. If PIL version 3.4.0 or newer is installed, "box" and "hamming" are also supported. By default, "nearest" is used. dtype: Dtype to use for generated arrays.
62598fa45fdd1c0f98e5de55
class IncompatibleTypesException(Exception): <NEW_LINE> <INDENT> def __init__(self, value=None): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "%s" %(self.value,)
Raised when we assign some values that breaksour rules @see ArgCompatibility class for allowed situations
62598fa44f6381625f19941c
class TransformerEncoder(nn.Module): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(self, num_layers, d_model=512, n_head=8, dim_ff=2048, dropout=0.1): <NEW_LINE> <INDENT> super(TransformerEncoder, self).__init__() <NEW_LINE> self.layers = nn.ModuleList([TransformerSeq2SeqEncoderLayer(d_model = d_model, n_head = n_head, dim_ff = dim_ff, dropout = dropout) for _ in range(num_layers)]) <NEW_LINE> self.norm = nn.LayerNorm(d_model, eps=1e-6) <NEW_LINE> <DEDENT> def forward(self, x, seq_mask=None): <NEW_LINE> <INDENT> output = x <NEW_LINE> if seq_mask is None: <NEW_LINE> <INDENT> seq_mask = x.new_ones(x.size(0), x.size(1)).bool() <NEW_LINE> <DEDENT> for layer in self.layers: <NEW_LINE> <INDENT> output = layer(output, seq_mask) <NEW_LINE> <DEDENT> return self.norm(output)
transformer的encoder模块,不包含embedding层
62598fa463d6d428bbee266f
class FindFilesNode(Node): <NEW_LINE> <INDENT> def __init__(self, name, parent): <NEW_LINE> <INDENT> super(FindFilesNode, self).__init__(name, parent) <NEW_LINE> self._location = StringAttribute("location", self) <NEW_LINE> self._location.setSpecializationOverride("Path") <NEW_LINE> self._pattern = StringAttribute("pattern", self) <NEW_LINE> self._files = StringAttribute("files", self) <NEW_LINE> self._setAttributeAllowedSpecializations(self._location, ["Path"]) <NEW_LINE> self._setAttributeAllowedSpecializations(self._pattern, ["String"]) <NEW_LINE> self._setAttributeAllowedSpecializations(self._files, ["PathArray"]) <NEW_LINE> self.addInputAttribute(self._location) <NEW_LINE> self.addInputAttribute(self._pattern) <NEW_LINE> self.addOutputAttribute(self._files) <NEW_LINE> self._setAttributeAffect(self._location, self._files) <NEW_LINE> self._setAttributeAffect(self._pattern, self._files) <NEW_LINE> self._setSliceable(True) <NEW_LINE> <DEDENT> def update(self, attribute): <NEW_LINE> <INDENT> coralApp.logDebug("FindFiles.update") <NEW_LINE> location = self._location.value().stringValueAt(0) <NEW_LINE> pattern = self._pattern.value().stringValueAt(0) <NEW_LINE> assets = [] <NEW_LINE> if location == "": <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not os.path.isdir(location): <NEW_LINE> <INDENT> coralApp.logError("Location does not exist.") <NEW_LINE> return <NEW_LINE> <DEDENT> filenames = os.listdir(location) <NEW_LINE> for filename in filenames: <NEW_LINE> <INDENT> if fnmatch.fnmatch(filename, pattern): <NEW_LINE> <INDENT> fullPath = os.path.join(location, filename) <NEW_LINE> if os.path.isfile(fullPath): <NEW_LINE> <INDENT> assets.append(fullPath) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._files.outValue().setPathValues(assets) <NEW_LINE> coralApp.logDebug("FindFiles.update: Done")
This node uses fnmatch to find matching file paths in the given folder @ivar _location: base folder to search in @ivar _pattern: fnmatch pattern to search for @ivar _files: all files found
62598fa4d6c5a102081e2004
class ArtiefactDiscovery(Base): <NEW_LINE> <INDENT> __tablename__ = 'artiefact_discovery' <NEW_LINE> artiefact_id = Column(BIGINT, ForeignKey('artiefact.id'), primary_key=True) <NEW_LINE> user_id = Column(BIGINT, ForeignKey('artiefact_user.id'), index=True) <NEW_LINE> uploaded_at = Column(DateTime(timezone=True), nullable=False) <NEW_LINE> artiefact = relationship('Artiefact', backref=backref('ArtiefactDiscovery', uselist=False)) <NEW_LINE> user = relationship('ArtiefactUser', backref=backref('Artiefact', uselist=False))
Artiefact Discovery
62598fa4cc0a2c111447aecd
class TestSpellRapidTranqProperty(TransactionCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestSpellRapidTranqProperty, self).setUp() <NEW_LINE> self.spell_model = self.registry('nh.clinical.spell') <NEW_LINE> self.patient_model = self.registry('nh.clinical.patient') <NEW_LINE> cr, uid = self.cr, self.uid <NEW_LINE> patient_id = self.patient_model.create(cr, uid, { 'given_name': 'Test', 'family_name': 'Icicles', 'patient_identifier': '666', 'other_identifier': '1337' }) <NEW_LINE> self.spell = self.spell_model.create(cr, uid, { 'patient_id': patient_id, 'pos_id': 1 }) <NEW_LINE> <DEDENT> def test_rapid_tranq_property(self): <NEW_LINE> <INDENT> cr, uid = self.cr, self.uid <NEW_LINE> spell = self.spell_model.read(cr, uid, self.spell) <NEW_LINE> self.assertTrue('rapid_tranq' in spell) <NEW_LINE> <DEDENT> def test_rapid_tranq_defaults_to_false(self): <NEW_LINE> <INDENT> cr, uid = self.cr, self.uid <NEW_LINE> spell = self.spell_model.read(cr, uid, self.spell) <NEW_LINE> self.assertFalse(spell.get('rapid_tranq', True)) <NEW_LINE> <DEDENT> def test_set_rapid_tranq_to_true(self): <NEW_LINE> <INDENT> cr, uid = self.cr, self.uid <NEW_LINE> self.spell_model.write(cr, uid, self.spell, {'rapid_tranq': True}) <NEW_LINE> spell = self.spell_model.read(cr, uid, self.spell) <NEW_LINE> self.assertTrue(spell.get('rapid_tranq', False)) <NEW_LINE> <DEDENT> def test_set_rapid_tranq_to_false(self): <NEW_LINE> <INDENT> cr, uid = self.cr, self.uid <NEW_LINE> self.spell_model.write(cr, uid, self.spell, {'rapid_tranq': False}) <NEW_LINE> spell = self.spell_model.read(cr, uid, self.spell) <NEW_LINE> self.assertFalse(spell.get('rapid_tranq', True))
Test that the rapid_tranq column on nh.clinical.spell is present and can be set
62598fa41f037a2d8b9e3fa8
class RemoveHandler(handler.TriggeredHandler): <NEW_LINE> <INDENT> handles_what = { 'message_matcher': matchers.match_or( matchers.match_slack("message"), matchers.match_telnet("message") ), 'channel_matcher': matchers.match_channel(c.TARGETED), 'triggers': [ trigger.Trigger('alias remove', True), ], 'schema': Schema({ Required("short"): All(su.string_types(), Length(min=1)), }), 'args': { 'order': [ 'short', ], 'help': { 'short': 'alias of full command to remove', }, }, } <NEW_LINE> def _run(self, short): <NEW_LINE> <INDENT> from_who = self.message.body.user_id <NEW_LINE> if not from_who: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> from_who = "user:%s" % from_who <NEW_LINE> lines = [] <NEW_LINE> with self.bot.locks.brain: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_info = self.bot.brain[from_who] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> user_info = {} <NEW_LINE> <DEDENT> user_aliases = user_info.get('aliases', {}) <NEW_LINE> try: <NEW_LINE> <INDENT> long = user_aliases.pop(short) <NEW_LINE> self.bot.brain[from_who] = user_info <NEW_LINE> self.bot.brain.sync() <NEW_LINE> lines = [ ("Alias of `%s` to `%s` has" " been removed.") % (short, long), ] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> lines = [ "No alias found for `%s`" % short, ] <NEW_LINE> <DEDENT> <DEDENT> if lines: <NEW_LINE> <INDENT> replier = self.message.reply_text <NEW_LINE> replier("\n".join(lines), threaded=True, prefixed=False)
Remove a alias to a long command (for the calling user).
62598fa438b623060ffa8f53