code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Link(base.BaseRichTextComponent): <NEW_LINE> <INDENT> name = 'Link' <NEW_LINE> category = 'Basic Input' <NEW_LINE> description = 'A link to a URL.' <NEW_LINE> frontend_name = 'link' <NEW_LINE> tooltip = 'Insert link' <NEW_LINE> _customization_arg_specs = [{ 'name': 'url', 'description': ( 'The link URL. If no protocol is specified, HTTPS will be used.'), 'schema': { 'type': 'custom', 'obj_type': 'SanitizedUrl', }, 'default_value': 'https://www.example.com', }, { 'name': 'text', 'description': ( 'The link text. If left blank, the link URL will be used.'), 'schema': { 'type': 'unicode', }, 'default_value': '', }, { 'name': 'open_link_in_same_window', 'description': 'Open the link in the same window?', 'schema': { 'type': 'bool' }, 'default_value': False, }]
A rich-text component for displaying links.
62598fc23346ee7daa33778c
class MetadataPreprocessor(Preprocessor): <NEW_LINE> <INDENT> def run(self, lines): <NEW_LINE> <INDENT> meta = Metadata() <NEW_LINE> key = None <NEW_LINE> while lines: <NEW_LINE> <INDENT> line = lines.pop(0) <NEW_LINE> if line.strip() == '': <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> m1 = META_RE.match(line) <NEW_LINE> if m1: <NEW_LINE> <INDENT> key = m1.group('key').lower().strip() <NEW_LINE> value = m1.group('value').strip() <NEW_LINE> try: <NEW_LINE> <INDENT> meta[key].append(value) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> meta[key] = [value] if value else None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> m2 = META_MORE_RE.match(line) <NEW_LINE> if m2 and key: <NEW_LINE> <INDENT> meta[key].append(m2.group('value').strip()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lines.insert(0, line) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.markdown.metadata = meta <NEW_LINE> return lines
Get Meta-Data.
62598fc260cbc95b063645c4
class Converter(object): <NEW_LINE> <INDENT> __hash__ = object.__hash__ <NEW_LINE> def __init__(self, decode, encode=None): <NEW_LINE> <INDENT> fallback_encode = getattr(__builtins__, "unicode", str) <NEW_LINE> self.single_decode = decode <NEW_LINE> self.single_encode = encode or fallback_encode <NEW_LINE> <DEDENT> def decode(self, strings): <NEW_LINE> <INDENT> if len(strings) != 1: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> return self.single_decode(strings[0]) <NEW_LINE> <DEDENT> def encode(self, value): <NEW_LINE> <INDENT> return [self.single_encode(value)] <NEW_LINE> <DEDENT> def is_missing(self, value): <NEW_LINE> <INDENT> return value == [] <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Converter): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return (self.single_decode is other.single_decode and self.single_encode is other.single_encode) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Decode from strings to objects and back. Used internally by the :meth:`morepath.App.converter` directive. Only used for decoding for a list with a single value, will error if more or less than one value is entered. Used for decoding/encoding URL parameters and path parameters.
62598fc25166f23b2e243668
class DisablePoll_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, '_valueId', (RemoteValueID, RemoteValueID.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, _valueId=None,): <NEW_LINE> <INDENT> self._valueId = _valueId <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self._valueId = RemoteValueID() <NEW_LINE> self._valueId.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('DisablePoll_args') <NEW_LINE> if self._valueId is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('_valueId', TType.STRUCT, 1) <NEW_LINE> self._valueId.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - _valueId
62598fc28a349b6b436864c6
class WhatPage(TiedToUser): <NEW_LINE> <INDENT> MODEL_CHOICES = ( ("item", "item"), ("purchase", "purchase"), ) <NEW_LINE> obj = models.CharField(max_length=15, choices=MODEL_CHOICES, unique=True) <NEW_LINE> page_number = models.IntegerField() <NEW_LINE> number_per_page = models.IntegerField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"obj: {0}\nPer page: {1}\nPage number: {2}".format( self.obj, self.number_per_page, self.page_number) <NEW_LINE> <DEDENT> def decrement_page_number(self): <NEW_LINE> <INDENT> page_number = self.page_number <NEW_LINE> if page_number == 1: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.page_number = page_number - 1 <NEW_LINE> self.save() <NEW_LINE> if page_number - 1 == self.page_number: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def increase_page_number(self): <NEW_LINE> <INDENT> page_number = self.page_number <NEW_LINE> self.page_number = page_number + 1 <NEW_LINE> self.save() <NEW_LINE> if page_number + 1 == self.page_number: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> <DEDENT> def change_number_per_page(self, number_per_page): <NEW_LINE> <INDENT> if number_per_page >= 5: <NEW_LINE> <INDENT> self.number_per_page = number_per_page <NEW_LINE> self.save()
Where was the User looking last?
62598fc23d592f4c4edbb13f
class VisaIOWarning(Warning): <NEW_LINE> <INDENT> def __init__(self, error_code): <NEW_LINE> <INDENT> abbreviation, description = completion_and_error_messages.get(error_code, ('?', 'Unknown code.')) <NEW_LINE> super(VisaIOWarning, self).__init__('%s (%d): %s' % (abbreviation, error_code, description)) <NEW_LINE> self.error_code = error_code <NEW_LINE> self.abbreviation = abbreviation <NEW_LINE> self.description = description <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return (VisaIOWarning, (self.error_code,)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__
Exception class for VISA I/O warnings. According to the specification VPP-4.3.2 and the NI implementation.
62598fc23317a56b869be694
class BaseNetworkTask(task.Task): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self._network_driver = None <NEW_LINE> self.task_utils = task_utils.TaskUtils() <NEW_LINE> self.lb_repo = repositories.LoadBalancerRepository() <NEW_LINE> <DEDENT> @property <NEW_LINE> def network_driver(self): <NEW_LINE> <INDENT> if self._network_driver is None: <NEW_LINE> <INDENT> self._network_driver = utils.get_network_driver() <NEW_LINE> <DEDENT> return self._network_driver
Base task to load drivers common to the tasks.
62598fc2ec188e330fdf8b1c
class ComponentsResource(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'location': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__( self, *, location: str, tags: Optional[Dict[str, str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ComponentsResource, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.location = location <NEW_LINE> self.tags = tags
An azure resource object. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar id: Azure resource Id. :vartype id: str :ivar name: Azure resource name. :vartype name: str :ivar type: Azure resource type. :vartype type: str :ivar location: Required. Resource location. :vartype location: str :ivar tags: A set of tags. Resource tags. :vartype tags: dict[str, str]
62598fc297e22403b383b192
class Products(models.Model): <NEW_LINE> <INDENT> productid = models.AutoField(primary_key=True) <NEW_LINE> name = models.CharField(max_length=50) <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "<Product %u: %u>" % (self.productid, self.name)
>>> names=['D', 'F', 'B', 'A', 'C', 'E', 'G'] >>> for n in names: product = Products.objects.create(name=n) >>> p = Products.objects >>> len(list(p.all())) 7 >>> len(list(p.all()[:3])) 3 >>> len(list(p.all()[2:5])) 3 >>> len(list(p.all()[5:])) 2 >>> p.all()[0:0] [] >>> p.all()[0:0][:10] [] >>> pn = p.order_by('name') >>> list(pn) [A, B, C, D, E, F, G] >>> list(pn[:3]) [A, B, C] >>> list(pn[2:5]) [C, D, E] >>> list(pn[5:]) [F, G]
62598fc24c3428357761a544
class Message: <NEW_LINE> <INDENT> def __init__(self, delivery_info, properties, body): <NEW_LINE> <INDENT> self.delivery_info = delivery_info <NEW_LINE> self.properties = properties <NEW_LINE> self.body = body
A wrapper around the tuple that pika returns when reading a message.
62598fc25fcc89381b266291
class NerRDFTypeFilter(object): <NEW_LINE> <INDENT> def __init__(self, endpoint, accepted_types): <NEW_LINE> <INDENT> self.endpoint = endpoint <NEW_LINE> self.accepted_types = accepted_types <NEW_LINE> self.query = 'SELECT ?type WHERE{<%(uri)s> rdf:type ?type}' <NEW_LINE> <DEDENT> def __call__(self, named_entities): <NEW_LINE> <INDENT> filtered_named_entities = [] <NEW_LINE> seen_uris = {} <NEW_LINE> for uri, p, t in named_entities: <NEW_LINE> <INDENT> if uri in seen_uris: <NEW_LINE> <INDENT> if seen_uris[uri]: <NEW_LINE> <INDENT> filtered_named_entities.append((uri, p, t)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> results = sparqlquery(self.endpoint, self.query % {'uri': uri}) <NEW_LINE> types = set([r[0] for r in results]) <NEW_LINE> if not len(types.intersection(self.accepted_types)): <NEW_LINE> <INDENT> seen_uris[uri] = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seen_uris[uri] = True <NEW_LINE> filtered_named_entities.append((uri, p, t)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return filtered_named_entities
A filter based on the RDF type on entity E.g. filter = NerRDFTypeFilter('http://dbpedia.org/sparql', ('http://schema.org/Place', 'http://dbpedia.org/ontology/Agent', 'http://dbpedia.org/ontology/Place'))
62598fc263b5f9789fe853fb
class SchedulerInitTestCase(test.NoDBTestCase): <NEW_LINE> <INDENT> driver_cls = fakes.FakeScheduler <NEW_LINE> @mock.patch.object(host_manager.HostManager, '_init_instance_info') <NEW_LINE> @mock.patch.object(host_manager.HostManager, '_init_aggregates') <NEW_LINE> def test_init_using_default_hostmanager(self, mock_init_agg, mock_init_inst): <NEW_LINE> <INDENT> manager = self.driver_cls().host_manager <NEW_LINE> self.assertIsInstance(manager, host_manager.HostManager) <NEW_LINE> <DEDENT> @mock.patch.object(ironic_host_manager.IronicHostManager, '_init_instance_info') <NEW_LINE> @mock.patch.object(host_manager.HostManager, '_init_aggregates') <NEW_LINE> def test_init_using_ironic_hostmanager(self, mock_init_agg, mock_init_inst): <NEW_LINE> <INDENT> self.flags(host_manager='ironic_host_manager', group='scheduler') <NEW_LINE> manager = self.driver_cls().host_manager <NEW_LINE> self.assertIsInstance(manager, ironic_host_manager.IronicHostManager)
Test case for base scheduler driver initiation.
62598fc2d486a94d0ba2c259
class ArchitectureValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> ARCHITECTURE_UNSPECIFIED = 0 <NEW_LINE> X86 = 1 <NEW_LINE> X64 = 2
The CPU architecture for which packages in this distribution channel were built Values: ARCHITECTURE_UNSPECIFIED: Unknown architecture X86: X86 architecture X64: x64 architecture
62598fc226068e7796d4cbe4
class _RestIterable(object): <NEW_LINE> <INDENT> def __init__(self, response): <NEW_LINE> <INDENT> resp = response.json() <NEW_LINE> if 'errors' in resp: <NEW_LINE> <INDENT> self.results = resp['errors'] <NEW_LINE> <DEDENT> elif 'statuses' in resp: <NEW_LINE> <INDENT> self.results = resp['statuses'] <NEW_LINE> <DEDENT> elif hasattr(resp, '__iter__') and not isinstance(resp, dict): <NEW_LINE> <INDENT> if len(resp) > 0 and 'trends' in resp[0]: <NEW_LINE> <INDENT> self.results = resp[0]['trends'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.results = resp <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.results = (resp,) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for item in self.results: <NEW_LINE> <INDENT> yield item
Iterate statuses, errors or other iterable objects in a REST API response. :param response: The request.Response from a Twitter REST API request
62598fc24527f215b58ea158
class LivePipelineGetRequest(MethodRequestEmptyBodyBase): <NEW_LINE> <INDENT> _validation = { 'method_name': {'required': True, 'readonly': True}, 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'method_name': {'key': 'methodName', 'type': 'str'}, 'api_version': {'key': '@apiVersion', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: str, api_version: Optional[str] = "1.1", **kwargs ): <NEW_LINE> <INDENT> super(LivePipelineGetRequest, self).__init__(api_version=api_version, name=name, **kwargs) <NEW_LINE> self.method_name = 'livePipelineGet'
Retrieves an existing live pipeline. Variables are only populated by the server, and will be ignored when sending a request. All required parameters must be populated in order to send to Azure. :ivar method_name: Required. Direct method method name.Constant filled by server. :vartype method_name: str :ivar api_version: Video Analyzer API version. The only acceptable values to pass in are None and "1.1". The default value is "1.1". :vartype api_version: str :ivar name: Required. Resource name. :vartype name: str
62598fc299fddb7c1ca62f31
class NotQueuedError(Error): <NEW_LINE> <INDENT> def __init__(self, message: str, team: Team): <NEW_LINE> <INDENT> super().__init__(message) <NEW_LINE> self.team = team
Team is not queued
62598fc2aad79263cf42ea5e
class SendInvoiceHandler(EmailHandler): <NEW_LINE> <INDENT> def message(self, sender, order, **kwargs): <NEW_LINE> <INDENT> email = render_to_string('plata/notifications/order_completed.txt', self.context(kwargs)).splitlines() <NEW_LINE> content = StringIO.StringIO() <NEW_LINE> pdf = PDFDocument(content) <NEW_LINE> invoice_pdf(pdf, order) <NEW_LINE> message = EmailMessage( subject=email[0], body=u'\n'.join(email[2:]), to=[order.email], ) <NEW_LINE> message.attach('invoice-%09d.pdf' % order.id, content.getvalue(), 'application/pdf') <NEW_LINE> return message
Send an e-mail with attached invoice to the customer after successful order completion, optionally BCC'ing the addresses passed as ``always_bcc`` to the handler upon initialization. Usage:: signals.order_completed.connect( SendInvoiceHandler(always_bcc=['owner@example.com']), weak=False)
62598fc23d592f4c4edbb141
@test(groups=["plugins", "murano_plugin"]) <NEW_LINE> class MuranoPlugin(TestBasic): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(MuranoPlugin, self).__init__() <NEW_LINE> check_plugin_path_env( var_name='MURANO_PLUGIN_PATH', plugin_path=settings.MURANO_PLUGIN_PATH ) <NEW_LINE> <DEDENT> def setup_murano_plugin(self, cluster_id, murano_user='murano', murano_db_password='murano_password', cfapi=False, glare=False, apps_url='http://storage.apps.openstack.org/'): <NEW_LINE> <INDENT> plugin_name = 'detach-murano' <NEW_LINE> assert_true( self.fuel_web.check_plugin_exists(cluster_id, plugin_name), "Plugin couldn't be enabled. Check plugin version. Test aborted") <NEW_LINE> plugin_options = { 'metadata/enabled': True, 'metadata/versions/murano_user_password': murano_user, 'metadata/versions/murano_db_password': murano_db_password, 'metadata/versions/murano_glance_artifacts/value': glare, 'metadata/versions/murano_cfapi/value': cfapi, 'metadata/versions/murano_repo_url/value': apps_url } <NEW_LINE> self.fuel_web.update_plugin_data( cluster_id, plugin_name, plugin_options) <NEW_LINE> <DEDENT> @test(depends_on=[SetupEnvironment.prepare_slaves_5], groups=["deploy_murano_with_glare_ha_one_controller"]) <NEW_LINE> @log_snapshot_after_test <NEW_LINE> def deploy_murano_with_glare_ha_one_controller(self): <NEW_LINE> <INDENT> self.env.revert_snapshot("ready_with_5_slaves") <NEW_LINE> utils.upload_tarball( ip=self.ssh_manager.admin_ip, tar_path=settings.MURANO_PLUGIN_PATH, tar_target="/var") <NEW_LINE> utils.install_plugin_check_code( ip=self.ssh_manager.admin_ip, plugin=os.path.basename(settings.MURANO_PLUGIN_PATH)) <NEW_LINE> cluster_id = self.fuel_web.create_cluster( name=self.__class__.__name__, mode=settings.DEPLOYMENT_MODE, configure_ssl=False ) <NEW_LINE> self.setup_murano_plugin(cluster_id, glare=True) <NEW_LINE> self.fuel_web.update_nodes( cluster_id, { "slave-01": ["controller"], "slave-02": ["compute"], "slave-03": ["cinder"], "slave-04": ["murano-node"] } ) <NEW_LINE> self.fuel_web.deploy_cluster_wait(cluster_id) <NEW_LINE> self.fuel_web.verify_network(cluster_id) <NEW_LINE> self.fuel_web.run_ostf(cluster_id=cluster_id, test_sets=['sanity']) <NEW_LINE> logger.debug('Run OSTF platform tests') <NEW_LINE> test_class_main = ('fuel_health.tests.tests_platform' '.test_murano_linux.MuranoDeployLinuxServicesTests') <NEW_LINE> tests_names = ['test_deploy_dummy_app_with_glare'] <NEW_LINE> test_classes = [] <NEW_LINE> for test_name in tests_names: <NEW_LINE> <INDENT> test_classes.append('{0}.{1}'.format(test_class_main, test_name)) <NEW_LINE> <DEDENT> for test_name in test_classes: <NEW_LINE> <INDENT> self.fuel_web.run_single_ostf_test( cluster_id=cluster_id, test_sets=['tests_platform'], test_name=test_name, timeout=60 * 20) <NEW_LINE> <DEDENT> self.env.make_snapshot("deploy_murano_with_glare_ha_one_controller")
Murano Plugin Tests.
62598fc23317a56b869be695
class TestDeleteMember(unittest.TestCase): <NEW_LINE> <INDENT> layer = INTEGRATION_ANON_SURVEY_TESTING <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.portal = self.layer['portal'] <NEW_LINE> data_path = os.path.dirname(utils.__file__) <NEW_LINE> data_catch = open(os.path.join(data_path, 'user_import'), 'rU') <NEW_LINE> input = data_catch.read() <NEW_LINE> data_catch.close() <NEW_LINE> self.portal.s1.uploadRespondents(input=input) <NEW_LINE> <DEDENT> def testDeleteRespondent(self): <NEW_LINE> <INDENT> s1 = getattr(self.portal, 's1') <NEW_LINE> respondents = s1.getAuthenticatedRespondents() <NEW_LINE> assert len(respondents) == 2 <NEW_LINE> s1.deleteAuthenticatedRespondent('user1@here.com') <NEW_LINE> respondents = s1.getAuthenticatedRespondents() <NEW_LINE> assert len(respondents) == 1 <NEW_LINE> assert respondents[0]['fullname'] == 'User Two' <NEW_LINE> <DEDENT> def testDeleteProperties(self): <NEW_LINE> <INDENT> s1 = getattr(self.portal, 's1') <NEW_LINE> s1.deleteAuthenticatedRespondent('user2@here.com') <NEW_LINE> assert s1.getAuthenticatedRespondents()[0]['email_sent'] == '' <NEW_LINE> s1.registerRespondentSent('user1@here.com') <NEW_LINE> assert s1.getAuthenticatedRespondents()[0]['email_sent'] != '' <NEW_LINE> s1.deleteAuthenticatedRespondent('user1@here.com') <NEW_LINE> respondents = s1.getAuthenticatedRespondents() <NEW_LINE> assert len(respondents) == 0 <NEW_LINE> data_path = os.path.dirname(utils.__file__) <NEW_LINE> data_catch = open(os.path.join(data_path, 'user_import'), 'rU') <NEW_LINE> input = data_catch.read() <NEW_LINE> data_catch.close() <NEW_LINE> s1.uploadRespondents(input=input) <NEW_LINE> s1.deleteAuthenticatedRespondent('user2@here.com') <NEW_LINE> assert s1.getAuthenticatedRespondents()[0]['email_sent'] == '', 'Known error'
Test member deletion
62598fc271ff763f4b5e7a07
class StatusCodeEnum(str, Enum): <NEW_LINE> <INDENT> JOB_PENDING = 'job_pending' <NEW_LINE> JOB_IN_PROGRESS = 'job_in_progress' <NEW_LINE> JOB_FINISHED = 'job_finished' <NEW_LINE> JOB_FAILED = 'job_failed' <NEW_LINE> JOB_CANCELLED = 'job_cancelled'
Status of Jobs.
62598fc20fa83653e46f516f
class model_file_read(BaseModel): <NEW_LINE> <INDENT> task: Task <NEW_LINE> filegroup: Union[StrictStr, List[StrictStr]] <NEW_LINE> base_url: Optional[StrictStr] = "/var/nornir-salt/" <NEW_LINE> task_name: Optional[StrictStr] = None <NEW_LINE> last: Optional[StrictInt] = 1 <NEW_LINE> index: Optional[StrictStr] = "common" <NEW_LINE> class Config: <NEW_LINE> <INDENT> arbitrary_types_allowed = True <NEW_LINE> extra = "forbid"
Model for nornir_salt.plugins.tasks.file_read plugin arguments
62598fc2a8370b77170f066c
class Pbkdf1_Test(TestCase): <NEW_LINE> <INDENT> descriptionPrefix = "pbkdf1" <NEW_LINE> pbkdf1_tests = [ (b('password'), hb('78578E5A5D63CB06'), 1000, 16, 'sha1', hb('dc19847e05c64d2faf10ebfb4a3d2a20')), (b('password'), b('salt'), 1000, 0, 'md5', b('')), (b('password'), b('salt'), 1000, 1, 'md5', hb('84')), (b('password'), b('salt'), 1000, 8, 'md5', hb('8475c6a8531a5d27')), (b('password'), b('salt'), 1000, 16, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), (b('password'), b('salt'), 1000, None, 'md5', hb('8475c6a8531a5d27e386cd496457812c')), (b('password'), b('salt'), 1000, None, 'sha1', hb('4a8fd48e426ed081b535be5769892fa396293efb')), ] <NEW_LINE> if not (PYPY or JYTHON): <NEW_LINE> <INDENT> pbkdf1_tests.append( (b('password'), b('salt'), 1000, None, 'md4', hb('f7f2e91100a8f96190f2dd177cb26453')) ) <NEW_LINE> <DEDENT> def test_known(self): <NEW_LINE> <INDENT> from lib.passlib.utils.pbkdf2 import pbkdf1 <NEW_LINE> for secret, salt, rounds, keylen, digest, correct in self.pbkdf1_tests: <NEW_LINE> <INDENT> result = pbkdf1(secret, salt, rounds, keylen, digest) <NEW_LINE> self.assertEqual(result, correct) <NEW_LINE> <DEDENT> <DEDENT> def test_border(self): <NEW_LINE> <INDENT> from lib.passlib.utils.pbkdf2 import pbkdf1 <NEW_LINE> def helper(secret=b('secret'), salt=b('salt'), rounds=1, keylen=1, hash='md5'): <NEW_LINE> <INDENT> return pbkdf1(secret, salt, rounds, keylen, hash) <NEW_LINE> <DEDENT> helper() <NEW_LINE> self.assertRaises(TypeError, helper, secret=1) <NEW_LINE> self.assertRaises(TypeError, helper, salt=1) <NEW_LINE> self.assertRaises(ValueError, helper, hash='missing') <NEW_LINE> self.assertRaises(ValueError, helper, rounds=0) <NEW_LINE> self.assertRaises(TypeError, helper, rounds='1') <NEW_LINE> self.assertRaises(ValueError, helper, keylen=-1) <NEW_LINE> self.assertRaises(ValueError, helper, keylen=17, hash='md5') <NEW_LINE> self.assertRaises(TypeError, helper, keylen='1')
test kdf helpers
62598fc25fc7496912d483c0
class AdministrationRights(object): <NEW_LINE> <INDENT> _0 = "0" <NEW_LINE> _1 = "1" <NEW_LINE> _2 = "2" <NEW_LINE> _4 = "4" <NEW_LINE> _8 = "8" <NEW_LINE> _16 = "16" <NEW_LINE> _32 = "32" <NEW_LINE> _64 = "64" <NEW_LINE> swagger_types = { } <NEW_LINE> attribute_map = { } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.discriminator = None <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(AdministrationRights, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AdministrationRights): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fc2d8ef3951e32c7fa1
class Not(Validator): <NEW_LINE> <INDENT> def __init__(self, other, message): <NEW_LINE> <INDENT> self.other = other <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __call__(self, obj, location="/"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.other(obj, location) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return obj <NEW_LINE> <DEDENT> raise ValidationException(self.message, "Not %s" % self.other, str(obj), location) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Not(%s)" % self.other
Validate a structure by specifying what it shouldn't be.
62598fc2be7bc26dc9251fa1
class LeadMassActionForm(forms.Form): <NEW_LINE> <INDENT> status = forms.ModelChoiceField(queryset=[], required=False) <NEW_LINE> assignedto = forms.ModelChoiceField(queryset=[], required=False) <NEW_LINE> instance = None <NEW_LINE> def __init__(self, user, *args, **kwargs): <NEW_LINE> <INDENT> if 'instance' in kwargs: <NEW_LINE> <INDENT> self.instance = kwargs['instance'] <NEW_LINE> del kwargs['instance'] <NEW_LINE> <DEDENT> super(LeadMassActionForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['status'].queryset = Object.filter_permitted(user, SaleStatus.objects.filter( use_leads=True), mode='x') <NEW_LINE> self.fields['status'].label = _("Status:") <NEW_LINE> self.fields['assignedto'].queryset = User.objects <NEW_LINE> self.fields['assignedto'].label = _("Assign To:") <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.instance: <NEW_LINE> <INDENT> if self.is_valid(): <NEW_LINE> <INDENT> if self.cleaned_data['status']: <NEW_LINE> <INDENT> self.instance.status = self.cleaned_data['status'] <NEW_LINE> <DEDENT> if self.cleaned_data['assignedto']: <NEW_LINE> <INDENT> self.instance.assigned.add(self.cleaned_data['assignedto']) <NEW_LINE> <DEDENT> self.instance.save()
Mass action form for Orders
62598fc2ec188e330fdf8b1e
class CopyFileToFile(RsyncClone): <NEW_LINE> <INDENT> def __init__(self, src, dst, **kwargs): <NEW_LINE> <INDENT> super().__init__(src, dst, **kwargs) <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> PythonBatchCommandBase.__call__(self, *args, **kwargs) <NEW_LINE> resolved_src: Path = utils.ExpandAndResolvePath(self.src) <NEW_LINE> resolved_dst: Path = utils.ExpandAndResolvePath(self.dst) <NEW_LINE> if self.output_script and sys.platform == 'darwin': <NEW_LINE> <INDENT> utils.write_shell_command(f" cp \"{resolved_src}\" \"{resolved_dst}\" \n", self.output_script) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with MakeDir(resolved_dst.parent, report_own_progress=False) as md: <NEW_LINE> <INDENT> md() <NEW_LINE> <DEDENT> self.top_destination_does_not_exist = False <NEW_LINE> self.copy_file_to_file(resolved_src, resolved_dst)
copy a file src to dst, dst is a full path to the destination file {options_doc_str}
62598fc2796e427e5384ea20
class ScrapeSensor(Entity): <NEW_LINE> <INDENT> def __init__(self, hass, rest, name, select, value_template, unit): <NEW_LINE> <INDENT> self.rest = rest <NEW_LINE> self._name = name <NEW_LINE> self._state = STATE_UNKNOWN <NEW_LINE> self._select = select <NEW_LINE> self._value_template = value_template <NEW_LINE> self._unit_of_measurement = unit <NEW_LINE> self.update() <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._unit_of_measurement <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.rest.update() <NEW_LINE> from bs4 import BeautifulSoup <NEW_LINE> raw_data = BeautifulSoup(self.rest.data, 'html.parser') <NEW_LINE> _LOGGER.debug(raw_data) <NEW_LINE> value = raw_data.select(self._select)[0].text <NEW_LINE> _LOGGER.debug(value) <NEW_LINE> if self._value_template is not None: <NEW_LINE> <INDENT> self._state = self._value_template.render_with_possible_json_value( value, STATE_UNKNOWN) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = value
Representation of a web scrape sensor.
62598fc25fcc89381b266292
class Interval(object): <NEW_LINE> <INDENT> def __init__(self, s=0, e=0): <NEW_LINE> <INDENT> self.start = s <NEW_LINE> self.end = e <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.start) + "-" + str(self.end)
表示start到stop的数字区间
62598fc2f9cc0f698b1c5416
class Farsnews(scrapy.Spider): <NEW_LINE> <INDENT> name = "Farsnews" <NEW_LINE> allowed_domains = ["farsnews.com"] <NEW_LINE> urls = [ "http://www.farsnews.com/RSS", "http://www.farsnews.com/rss/world", "http://www.farsnews.com/rss/politics", "http://www.farsnews.com/rss/universities", "http://www.farsnews.com/rss/sports", "http://www.farsnews.com/rss/economy", "http://www.farsnews.com/rss/foreign-policy", "http://www.farsnews.com/rss/culture", "http://www.farsnews.com/rss/cyberspace", "http://www.farsnews.com/rss/resistance", ] <NEW_LINE> def start_requests(self): <NEW_LINE> <INDENT> for url in self.urls: <NEW_LINE> <INDENT> yield scrapy.Request(url=url, callback=self.parse_RSS, dont_filter=True, meta={'cat': url.split('/')[-1]}) <NEW_LINE> <DEDENT> <DEDENT> def parse_RSS(self, response): <NEW_LINE> <INDENT> for i in response.xpath('/rss/channel/item'): <NEW_LINE> <INDENT> news_id = i.xpath('link/text()').extract()[0].split('/')[-1] <NEW_LINE> if newsItem.objects(newsId=news_id).count() == 0: <NEW_LINE> <INDENT> a = newsItem(newsId=news_id) <NEW_LINE> a.category = response.meta['cat'] <NEW_LINE> a.title = i.xpath('title/text()').extract()[0] <NEW_LINE> try: <NEW_LINE> <INDENT> a.description = i.xpath('description/text()').extract()[0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> a.link = i.xpath('link/text()').extract()[0] <NEW_LINE> a.date = datetime.strptime(str(i.xpath('pubDate/text()').extract()[0]), '%a, %d %b %Y %H:%M:%S') <NEW_LINE> a.press = self.name <NEW_LINE> a.save() <NEW_LINE> print("News %s from %s has saved from rss" % (a.newsId, self.name)) <NEW_LINE> yield scrapy.Request(url=a.link, callback=self.parse_news, dont_filter=True, meta={"id": news_id}) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_news(self, response): <NEW_LINE> <INDENT> text = '' <NEW_LINE> for p in response.xpath( "/html/body/div[@class='row']/div[@class='container mainframe']/div[@class='cen-lef-col']/" "div[@class='centercolumn col-md-7 col-sm-12 col-xs-12']/div[@class='nwstxtmainpane']/" "span[@id='nwstxtBodyPane']"): <NEW_LINE> <INDENT> for line in p.xpath('p[@class="rtejustify"]'): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> text += line.xpath('text()').extract()[0] + '\n' <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if len(text): <NEW_LINE> <INDENT> a = newsItem.objects(newsId=response.meta['id'], press=self.name).first() <NEW_LINE> a.text = text <NEW_LINE> a.finished = True <NEW_LINE> a.save() <NEW_LINE> print("News %s from %s has finished crawling." % (a.newsId, self.name)) <NEW_LINE> del a
Farsnews Crawler
62598fc2a219f33f346c6a92
class company(object): <NEW_LINE> <INDENT> def symbol(self,symbol): <NEW_LINE> <INDENT> self.symbol=symbol
This a a general company description
62598fc226068e7796d4cbe6
class PackageHandler(BaseAptRepoHandler): <NEW_LINE> <INDENT> allowed_methods = ('GET', 'DELETE') <NEW_LINE> model = server.aptrepo.models.Package <NEW_LINE> _DEFAULT_MAX_PACKAGES = 100 <NEW_LINE> @handle_exception <NEW_LINE> def read(self, request, **kwargs): <NEW_LINE> <INDENT> if (len(kwargs) == 0): <NEW_LINE> <INDENT> return self._constrain_queryset(request, self.model.objects.all(), self._DEFAULT_MAX_PACKAGES) <NEW_LINE> <DEDENT> return self._find_package(**kwargs) <NEW_LINE> <DEDENT> @handle_exception <NEW_LINE> def delete(self, request, **kwargs): <NEW_LINE> <INDENT> package = self._find_package(**kwargs) <NEW_LINE> repository = get_repository_controller(request=request) <NEW_LINE> repository.remove_all_package_instances(package.id) <NEW_LINE> return rc.DELETED <NEW_LINE> <DEDENT> def _find_package(self, id=None, package_name=None, version=None, architecture=None): <NEW_LINE> <INDENT> if id: <NEW_LINE> <INDENT> return self.model.objects.get(id=id) <NEW_LINE> <DEDENT> elif package_name and version and architecture: <NEW_LINE> <INDENT> return self.model.objects.get(package_name=package_name, version=version, architecture=architecture)
REST API call handler for packages
62598fc2a05bb46b3848aaf7
class MissingCorpusException(Exception): <NEW_LINE> <INDENT> def __init__(self, message=MISSING_CORPUS_MESSAGE, *args, **kwargs): <NEW_LINE> <INDENT> super(MissingCorpusException, self).__init__(message, *args, **kwargs)
Exception thrown when a user tries to use a feature that requires a dataset or model that the user does not have on their system.
62598fc2851cf427c66b8541
class Wizard(Character): <NEW_LINE> <INDENT> def __init__(self, is_evil): <NEW_LINE> <INDENT> super().__init__(15, 10, 12, 700, 1, 100, "Wizard") <NEW_LINE> self._is_evil = is_evil <NEW_LINE> self._mana = 150 <NEW_LINE> <DEDENT> def level_up(self): <NEW_LINE> <INDENT> super().level_up() <NEW_LINE> if self._is_evil: <NEW_LINE> <INDENT> self._mana += 47 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._mana += 74 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_evil(self): <NEW_LINE> <INDENT> return self._is_evil <NEW_LINE> <DEDENT> @property <NEW_LINE> def mana(self): <NEW_LINE> <INDENT> return self._mana <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self._is_evil: <NEW_LINE> <INDENT> return "Fire Wizard " + super().__str__() + " | MANA: " + str(self._mana) + '.' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "Water Wizard " + super().__str__() + " | MANA: " + str(self._mana) + '.'
Wizard Character Type Class
62598fc25166f23b2e24366c
class UsernamePasswordTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_interface(self): <NEW_LINE> <INDENT> I, C = credentials.IUsernamePassword, mc.UsernamePassword <NEW_LINE> self.assertTrue(I.implementedBy(C)) <NEW_LINE> <DEDENT> def test_simple(self): <NEW_LINE> <INDENT> username, password = "username", "password" <NEW_LINE> up = mc.UsernamePassword(username=username, password=password) <NEW_LINE> self.assertEqual(up.username, username) <NEW_LINE> self.assertEqual(up.password, password)
Tests for a storable username and password pair.
62598fc24428ac0f6e6587b0
class option(nodes.General, nodes.Element): <NEW_LINE> <INDENT> pass
node for defining an option
62598fc24a966d76dd5ef160
class Cost(object): <NEW_LINE> <INDENT> def __init__(self, vcls): <NEW_LINE> <INDENT> self.l = 0.0 <NEW_LINE> self.r = 0.0 <NEW_LINE> self.i = 0.0 <NEW_LINE> self.path = vcls() <NEW_LINE> <DEDENT> def set_lri(self, value): <NEW_LINE> <INDENT> self.l = self.r = self.i = value
Represents a Cost for opt strategy calculation
62598fc24f88993c371f0650
class MicrosoftPartnerSdkContractsV1CollectionsPagedResourceCollectionMicrosoftPartnerSdkContractsV1Invoice(Model): <NEW_LINE> <INDENT> _validation = { 'total_count': {'readonly': True}, 'items': {'readonly': True}, 'attributes': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'continuation_token': {'key': 'continuationToken', 'type': 'str'}, 'total_count': {'key': 'totalCount', 'type': 'int'}, 'items': {'key': 'items', 'type': '[MicrosoftPartnerSdkContractsV1Invoice]'}, 'links': {'key': 'links', 'type': 'MicrosoftPartnerSdkContractsV1CommonResourceLinks'}, 'attributes': {'key': 'attributes', 'type': 'MicrosoftPartnerSdkContractsV1CommonResourceAttributes'}, } <NEW_LINE> def __init__(self, continuation_token=None, links=None): <NEW_LINE> <INDENT> super(MicrosoftPartnerSdkContractsV1CollectionsPagedResourceCollectionMicrosoftPartnerSdkContractsV1Invoice, self).__init__() <NEW_LINE> self.continuation_token = continuation_token <NEW_LINE> self.total_count = None <NEW_LINE> self.items = None <NEW_LINE> self.links = links <NEW_LINE> self.attributes = None
Paged Resource Collection. Variables are only populated by the server, and will be ignored when sending a request. :param continuation_token: Gets or sets the continuation token. :type continuation_token: str :ivar total_count: Gets the total count. :vartype total_count: int :ivar items: Gets the collection items. :vartype items: list[~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsV1Invoice] :param links: Gets or sets the links. :type links: ~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsV1CommonResourceLinks :ivar attributes: Gets the attributes. :vartype attributes: ~microsoft.store.partnercenterservices.models.MicrosoftPartnerSdkContractsV1CommonResourceAttributes
62598fc2283ffb24f3cf3b10
class LinkWorldAngularVelocityState(LinkState): <NEW_LINE> <INDENT> def __init__(self, robot, link_ids=None, window_size=1, axis=None, ticks=1): <NEW_LINE> <INDENT> super(LinkWorldAngularVelocityState, self).__init__(robot, link_ids, window_size=window_size, axis=axis, ticks=ticks) <NEW_LINE> <DEDENT> def _read(self): <NEW_LINE> <INDENT> self.data = self.robot.get_link_world_angular_velocities(self.links, flatten=True)
Link world angular velocity state
62598fc27047854f4633f65f
class http_completion(BaseHTTPRequestHandler): <NEW_LINE> <INDENT> def _set_headers(self): <NEW_LINE> <INDENT> self.send_response(200) <NEW_LINE> self.send_header("Content-type", "application/json") <NEW_LINE> self.end_headers() <NEW_LINE> <DEDENT> def do_POST(self): <NEW_LINE> <INDENT> self._set_headers() <NEW_LINE> length = int(self.headers.get('content-length', 0)) <NEW_LINE> read = self.rfile.read(length) <NEW_LINE> if sys.version_info >= (3, 0): <NEW_LINE> <INDENT> read = read.decode('utf-8') <NEW_LINE> <DEDENT> read = json.loads(read) <NEW_LINE> payload = completions(read["source"], read["line"], read["column"]) <NEW_LINE> payload = json.dumps(payload) <NEW_LINE> if sys.version_info >= (3, 0): <NEW_LINE> <INDENT> payload = payload.encode('utf-8') <NEW_LINE> <DEDENT> self.wfile.write(payload) <NEW_LINE> return
Completion handler which returns the completions for a given source, line and cursor positon.
62598fc2ff9c53063f51a8da
class TestMergeCounts(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.subdir = './test_merge_counts_files/' <NEW_LINE> self.counts1 = '{0}/counts1.txt'.format(self.subdir) <NEW_LINE> self.counts2 = '{0}/counts2.txt'.format(self.subdir) <NEW_LINE> self.expected = '{0}/expected_merge.txt'.format(self.subdir) <NEW_LINE> self.expectednorm = '{0}/expected_merge_normalized.txt'.format(self.subdir) <NEW_LINE> for f in [self.counts1, self.counts2, self.expected, self.expectednorm]: <NEW_LINE> <INDENT> self.assertTrue(os.path.isfile(f), 'Cannot find required file {0}'.format(f)) <NEW_LINE> <DEDENT> self.testmerge = '{0}/test_merge.txt'.format(self.subdir) <NEW_LINE> self.testmergenorm = '{0}/test_merge_normalized.txt'.format(self.subdir) <NEW_LINE> for f in [self.testmerge, self.testmergenorm]: <NEW_LINE> <INDENT> if os.path.isfile(f): <NEW_LINE> <INDENT> os.remove(f) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_Merge(self): <NEW_LINE> <INDENT> for (out, expected, extracmd) in [ (self.testmerge, self.expected, []), (self.testmergenorm, self.expectednorm, ['--normalize']), ]: <NEW_LINE> <INDENT> cmds = ['dms_merge', out, 'sum', self.counts1, self.counts2] + extracmd <NEW_LINE> sys.stderr.write('\nRunning the following command:\n{0}\n'.format(' '.join(cmds))) <NEW_LINE> subprocess.call(cmds) <NEW_LINE> self.assertTrue(os.path.isfile(out), 'Failed to create file {0} with command:\n{1}\n'.format(out, ' '.join(cmds))) <NEW_LINE> actual = dms_tools.file_io.ReadDMSCounts(out, chartype='codon') <NEW_LINE> expect = dms_tools.file_io.ReadDMSCounts(expected, chartype='codon') <NEW_LINE> sites = set(actual.keys()) <NEW_LINE> self.assertTrue(sites == set(expect.keys()), 'Created output {0} does not match expected output {1} in terms of the sites keys.'.format(out, expected)) <NEW_LINE> for site in sites: <NEW_LINE> <INDENT> self.assertTrue(actual[site] == expect[site], 'Created output {0} does not match expected output {1} at site {2}'.format(out, expected, site)) <NEW_LINE> <DEDENT> sys.stderr.write('Successfully created expected output.')
Runs ``dms_merge`` on test data to add counts.
62598fc2442bda511e95c6ec
class PreferenceType5: <NEW_LINE> <INDENT> q = 0 <NEW_LINE> p = 1 <NEW_LINE> def __init__(self, q=0, p=1): <NEW_LINE> <INDENT> self.q = q <NEW_LINE> self.p = p <NEW_LINE> <DEDENT> def value(self, diff): <NEW_LINE> <INDENT> if (diff <= self.q): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if (diff <= self.p): <NEW_LINE> <INDENT> return (diff - self.q) / (self.p - self.q) <NEW_LINE> <DEDENT> return 1
Linear preferences and indifference zone.
62598fc221bff66bcd722ef7
class CCT(NumericValue): <NEW_LINE> <INDENT> bank = BANK_1 <NEW_LINE> unit = 'K' <NEW_LINE> locations = MemoryRange(start=0x21, end=0x22, default=0xff, type_=MemoryType.NVM_RW_L) <NEW_LINE> mask_supported = True <NEW_LINE> max_value = 17000 <NEW_LINE> @classmethod <NEW_LINE> def raw_to_value(cls, raw): <NEW_LINE> <INDENT> if raw == bytes([0xff, 0xfe]): <NEW_LINE> <INDENT> return "Part 209 implemented" <NEW_LINE> <DEDENT> return super().raw_to_value(raw) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_valid(cls, raw): <NEW_LINE> <INDENT> if raw == bytes([0xff, 0xfe]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return super().is_valid(raw)
CCT in K Valid when in the range 0..17000 May return "Part 209 implemented" for raw value 0xfffe
62598fc263b5f9789fe853ff
class Car: <NEW_LINE> <INDENT> def __init__(self, name, fuel=0): <NEW_LINE> <INDENT> self.fuel = fuel <NEW_LINE> self.odometer = 0 <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def str(self): <NEW_LINE> <INDENT> return "{}, fuel= {}, Odometer: {}".format(self.name, self.fuel, self.odometer) <NEW_LINE> <DEDENT> def add_fuel(self, amount): <NEW_LINE> <INDENT> self.fuel += amount <NEW_LINE> <DEDENT> def drive(self, distance): <NEW_LINE> <INDENT> if distance > self.fuel: <NEW_LINE> <INDENT> distance = self.fuel <NEW_LINE> self.fuel = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fuel -= distance <NEW_LINE> <DEDENT> self.odometer += distance <NEW_LINE> return distance
Represent a Car object.
62598fc2d486a94d0ba2c25d
class V2beta2ContainerResourceMetricSource(object): <NEW_LINE> <INDENT> openapi_types = { 'container': 'str', 'name': 'str', 'target': 'V2beta2MetricTarget' } <NEW_LINE> attribute_map = { 'container': 'container', 'name': 'name', 'target': 'target' } <NEW_LINE> def __init__(self, container=None, name=None, target=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration.get_default_copy() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._container = None <NEW_LINE> self._name = None <NEW_LINE> self._target = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.container = container <NEW_LINE> self.name = name <NEW_LINE> self.target = target <NEW_LINE> <DEDENT> @property <NEW_LINE> def container(self): <NEW_LINE> <INDENT> return self._container <NEW_LINE> <DEDENT> @container.setter <NEW_LINE> def container(self, container): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and container is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `container`, must not be `None`") <NEW_LINE> <DEDENT> self._container = container <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and name is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `name`, must not be `None`") <NEW_LINE> <DEDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def target(self): <NEW_LINE> <INDENT> return self._target <NEW_LINE> <DEDENT> @target.setter <NEW_LINE> def target(self, target): <NEW_LINE> <INDENT> if self.local_vars_configuration.client_side_validation and target is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `target`, must not be `None`") <NEW_LINE> <DEDENT> self._target = target <NEW_LINE> <DEDENT> def to_dict(self, serialize=False): <NEW_LINE> <INDENT> result = {} <NEW_LINE> def convert(x): <NEW_LINE> <INDENT> if hasattr(x, "to_dict"): <NEW_LINE> <INDENT> args = getfullargspec(x.to_dict).args <NEW_LINE> if len(args) == 1: <NEW_LINE> <INDENT> return x.to_dict() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return x.to_dict(serialize) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return x <NEW_LINE> <DEDENT> <DEDENT> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> attr = self.attribute_map.get(attr, attr) if serialize else attr <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: convert(x), value )) <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], convert(item[1])), value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = convert(value) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V2beta2ContainerResourceMetricSource): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, V2beta2ContainerResourceMetricSource): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict()
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fc25fcc89381b266293
class ScriptConfig: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.conf_path = '/grafana_script/config/scriptconfig.ini' <NEW_LINE> self.filepath = self.load_filepath() <NEW_LINE> <DEDENT> def load_filepath(self): <NEW_LINE> <INDENT> return os.path.abspath('') + self.conf_path <NEW_LINE> <DEDENT> def get_value(self, section, option): <NEW_LINE> <INDENT> config = ConfigParser() <NEW_LINE> config.read(self.filepath) <NEW_LINE> section_option = config.get(section, option) <NEW_LINE> return section_option
Get values from your configuration.ini file
62598fc255399d3f056267a4
class SequenceFileDataset(dataset_ops.DatasetSource): <NEW_LINE> <INDENT> @deprecation.deprecated( None, "tf.contrib.hadoop will be removed in 2.0, the support for Apache Hadoop " "will continue to be provided through the tensorflow/io GitHub project.") <NEW_LINE> def __init__(self, filenames): <NEW_LINE> <INDENT> self._filenames = ops.convert_to_tensor( filenames, dtype=dtypes.string, name="filenames") <NEW_LINE> variant_tensor = gen_dataset_ops.sequence_file_dataset( self._filenames, self._flat_types) <NEW_LINE> super(SequenceFileDataset, self).__init__(variant_tensor) <NEW_LINE> <DEDENT> @property <NEW_LINE> def element_spec(self): <NEW_LINE> <INDENT> return (tensor_spec.TensorSpec([], dtypes.string), tensor_spec.TensorSpec([], dtypes.string))
A Sequence File Dataset that reads the sequence file.
62598fc29f288636728189c2
class Ne(Binary): <NEW_LINE> <INDENT> pass
An AST that checks if the left node does not equal the right node when evaluating.
62598fc2a05bb46b3848aaf9
class TransformerDecoderLayer(TransformerDecoderLayerBase): <NEW_LINE> <INDENT> def __init__( self, d_model, heads, d_ff, dropout, attention_dropout, self_attn_type="scaled-dot", max_relative_positions=0, aan_useffn=False, full_context_alignment=False, alignment_heads=0, pos_ffn_activation_fn=ActivationFunction.relu, ): <NEW_LINE> <INDENT> super(TransformerDecoderLayer, self).__init__( d_model, heads, d_ff, dropout, attention_dropout, self_attn_type, max_relative_positions, aan_useffn, full_context_alignment, alignment_heads, pos_ffn_activation_fn=pos_ffn_activation_fn, ) <NEW_LINE> self.context_attn = MultiHeadedAttention( heads, d_model, dropout=attention_dropout ) <NEW_LINE> self.layer_norm_2 = nn.LayerNorm(d_model, eps=1e-6) <NEW_LINE> <DEDENT> def update_dropout(self, dropout, attention_dropout): <NEW_LINE> <INDENT> super(TransformerDecoderLayer, self).update_dropout( dropout, attention_dropout ) <NEW_LINE> self.context_attn.update_dropout(attention_dropout) <NEW_LINE> <DEDENT> def _forward( self, inputs, memory_bank, src_pad_mask, tgt_pad_mask, layer_cache=None, step=None, future=False, ): <NEW_LINE> <INDENT> dec_mask = None <NEW_LINE> if inputs.size(1) > 1: <NEW_LINE> <INDENT> dec_mask = self._compute_dec_mask(tgt_pad_mask, future) <NEW_LINE> <DEDENT> inputs_norm = self.layer_norm_1(inputs) <NEW_LINE> query, _ = self._forward_self_attn( inputs_norm, dec_mask, layer_cache, step ) <NEW_LINE> query = self.drop(query) + inputs <NEW_LINE> query_norm = self.layer_norm_2(query) <NEW_LINE> mid, attns = self.context_attn( memory_bank, memory_bank, query_norm, mask=src_pad_mask, layer_cache=layer_cache, attn_type="context", ) <NEW_LINE> output = self.feed_forward(self.drop(mid) + query) <NEW_LINE> return output, attns
Transformer Decoder layer block in Pre-Norm style. Pre-Norm style is an improvement w.r.t. Original paper's Post-Norm style, providing better converge speed and performance. This is also the actual implementation in tensor2tensor and also avalable in fairseq. See https://tunz.kr/post/4 and :cite:`DeeperTransformer`. .. mermaid:: graph LR %% "*SubLayer" can be self-attn, src-attn or feed forward block A(input) --> B[Norm] B --> C["*SubLayer"] C --> D[Drop] D --> E((+)) A --> E E --> F(out)
62598fc27b180e01f3e49196
class IgnSismologiaFeedManager(FeedManagerBase): <NEW_LINE> <INDENT> def __init__( self, generate_callback, update_callback, remove_callback, coordinates, filter_radius=None, filter_minimum_magnitude=None, ): <NEW_LINE> <INDENT> feed = IgnSismologiaFeed( coordinates, filter_radius=filter_radius, filter_minimum_magnitude=filter_minimum_magnitude, ) <NEW_LINE> super().__init__(feed, generate_callback, update_callback, remove_callback)
Feed Manager for IGN Sismología feed.
62598fc260cbc95b063645ca
class RequestTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.environ = { 'HTTP_COOKIE': COOKIE_STRING, 'REQUEST_METHOD': 'get', 'PATH_INFO': '/hello/world', 'QUERY_STRING': 'a=1&b=text', } <NEW_LINE> self.request = Request(self, self.environ) <NEW_LINE> <DEDENT> def testApp(self): <NEW_LINE> <INDENT> assert self.request.app == self, 'Error setting response application' <NEW_LINE> <DEDENT> def testRequestMethod(self): <NEW_LINE> <INDENT> assert self.request.method == 'get', ('Wrong request method: %s' % self.request.method) <NEW_LINE> <DEDENT> def testPathInfo(self): <NEW_LINE> <INDENT> assert self.request.path == '/hello/world', ('Wrong request path: %s' % self.request.path) <NEW_LINE> <DEDENT> def testCookies(self): <NEW_LINE> <INDENT> assert self.request.cookies['csrftoken'].value == CSRFTOKEN, ( 'Wrong cookie "csrftoken": %s' % self.request.cookies['csrftoken'].value) <NEW_LINE> assert self.request.cookies['CLTRACK'].value == CLTRACK, ( 'Wrong cookie "CLTRACK": %s' % self.request.cookies['CLTRACK'].value) <NEW_LINE> <DEDENT> def testHeaders(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testRequestParams(self): <NEW_LINE> <INDENT> assert self.request.params['a'] == '1', ('Wrong param "a" value: %s' % self.request.params['a']) <NEW_LINE> assert self.request.params['b'] == 'text', ( 'Wrong param "b" value: %s' % self.request.params['b']) <NEW_LINE> <DEDENT> def testRequestParamsAccess(self): <NEW_LINE> <INDENT> assert self.request['a'] == '1', ('Wrong param "a" value: %s' % self.request['a']) <NEW_LINE> assert self.request['b'] == 'text', ('Wrong param "b" value: %s' % self.request['b']) <NEW_LINE> assert 'a' in self.request, '"a" param was not found in request'
Test case for partial template execution
62598fc2cc40096d6161a31f
class BoxOAuth2Token(basetoken.OAuth2Token): <NEW_LINE> <INDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> kw.setdefault("auth_uri", "https://api.box.com/oauth2/authorize") <NEW_LINE> kw.setdefault("token_uri", "https://api.box.com/oauth2/token") <NEW_LINE> kw.setdefault("revoke_uri", "https://api.box.com/oauth2/revoke") <NEW_LINE> super(BoxOAuth2Token, self).__init__(*args, **kw) <NEW_LINE> self._dirty=False <NEW_LINE> <DEDENT> def _refresh(self, *args, **kw): <NEW_LINE> <INDENT> ret=super(BoxOAuth2Token, self)._refresh(*args, **kw) <NEW_LINE> if not self.invalid: <NEW_LINE> <INDENT> self._dirty=True <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def get_access_token(self, *args, **kw): <NEW_LINE> <INDENT> ret=super(BoxOAuth2Token, self).get_access_token(*args, **kw) <NEW_LINE> self._dirty=True <NEW_LINE> return ret <NEW_LINE> <DEDENT> @property <NEW_LINE> def dirty(self): <NEW_LINE> <INDENT> return getattr(self, '_dirty', False) <NEW_LINE> <DEDENT> def modify_request(self, http_request): <NEW_LINE> <INDENT> super(BoxOAuth2Token, self).modify_request(http_request) <NEW_LINE> http_request.headers['Authorization'] = '%s%s' % (OAUTH2_AUTH_LABEL, self.access_token) <NEW_LINE> return http_request <NEW_LINE> <DEDENT> ModifyRequest = modify_request <NEW_LINE> __call__ = modify_request
Token object for OAuth 2.0 as described on <http://code.google.com/apis/accounts/docs/OAuth2.html>. Uses "Bearer" rather than OAuth as HTTP auth scheme
62598fc2091ae35668704eb5
class Solution: <NEW_LINE> <INDENT> @printTime() <NEW_LINE> def minCut(self, s: str) -> int: <NEW_LINE> <INDENT> self.len = len(s) <NEW_LINE> mem = [[True for _ in range(self.len)] for _ in range(self.len)] <NEW_LINE> for i in range(self.len - 1, - 1, -1): <NEW_LINE> <INDENT> for j in range(i + 1, self.len): <NEW_LINE> <INDENT> mem[i][j] = s[i] == s[j] and mem[i + 1][j - 1] <NEW_LINE> <DEDENT> <DEDENT> dp = [0 for i in range(self.len)] <NEW_LINE> dp[0] = 0 <NEW_LINE> for i in range(1, self.len): <NEW_LINE> <INDENT> dp[i] = dp[i - 1] + 1 <NEW_LINE> for j in range(i + 1): <NEW_LINE> <INDENT> if mem[j][i]: <NEW_LINE> <INDENT> if j == 0: <NEW_LINE> <INDENT> dp[i] = 0 <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dp[i] = min(dp[i], dp[j - 1] + 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return dp[-1]
DP
62598fc23317a56b869be697
class JobPriority(util.OrderedEnum): <NEW_LINE> <INDENT> EVENT_SERVICE = 1 <NEW_LINE> EVENT_STATE = 2 <NEW_LINE> EVENT_TIME = 3 <NEW_LINE> EVENT_DEFAULT = 4 <NEW_LINE> @staticmethod <NEW_LINE> def from_event_type(event_type): <NEW_LINE> <INDENT> if event_type == EVENT_TIME_CHANGED: <NEW_LINE> <INDENT> return JobPriority.EVENT_TIME <NEW_LINE> <DEDENT> elif event_type == EVENT_STATE_CHANGED: <NEW_LINE> <INDENT> return JobPriority.EVENT_STATE <NEW_LINE> <DEDENT> elif event_type == EVENT_CALL_SERVICE: <NEW_LINE> <INDENT> return JobPriority.EVENT_SERVICE <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return JobPriority.EVENT_DEFAULT
Provides priorities for bus events.
62598fc27c178a314d78d72d
class Group(Container): <NEW_LINE> <INDENT> def add_criterion(self, name, priority, and_or, search_type, value): <NEW_LINE> <INDENT> criterion = SearchCriteria(name, priority, and_or, search_type, value) <NEW_LINE> self.criteria.append(criterion) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_smart(self): <NEW_LINE> <INDENT> result = False <NEW_LINE> if self.findtext("is_smart") == "true": <NEW_LINE> <INDENT> result = True <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @is_smart.setter <NEW_LINE> def is_smart(self, value): <NEW_LINE> <INDENT> self.set_bool("is_smart", value) <NEW_LINE> if value is True: <NEW_LINE> <INDENT> if self.find("criteria") is None: <NEW_LINE> <INDENT> self.criteria = ElementTree.SubElement(self, "criteria") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_device(self, device, container): <NEW_LINE> <INDENT> if self.findtext("is_smart") == "false": <NEW_LINE> <INDENT> self.add_object_to_path(device, container) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Devices may not be added to smart groups.") <NEW_LINE> <DEDENT> <DEDENT> def has_member(self, device_object): <NEW_LINE> <INDENT> if device_object.tag == "computer": <NEW_LINE> <INDENT> container_search = "computers/computer" <NEW_LINE> <DEDENT> elif device_object.tag == "mobile_device": <NEW_LINE> <INDENT> container_search = "mobile_devices/mobile_device" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> return len([device for device in self.findall(container_search) if device.findtext("id") == device_object.id]) is not 0
Abstract class for ComputerGroup and MobileDeviceGroup.
62598fc250812a4eaa620d2c
@test(groups=[VOLUMES_DRIVER], depends_on_classes=[AddVolume]) <NEW_LINE> class AfterVolumeIsAdded(VolumeTest): <NEW_LINE> <INDENT> @time_out(120) <NEW_LINE> def test_api_get(self): <NEW_LINE> <INDENT> volume = poll_until(lambda: self.story.get_volume(), lambda volume: volume["status"] != "creating") <NEW_LINE> self.assertEqual(volume["status"], "available") <NEW_LINE> self.assert_volume_as_expected(volume) <NEW_LINE> self.assertTrue(volume["attach_status"], "detached") <NEW_LINE> <DEDENT> @time_out(60) <NEW_LINE> def test_mgmt_volume_check(self): <NEW_LINE> <INDENT> print("self.story.original_device_info : %r" % self.story.original_device_info) <NEW_LINE> info = self.story.api.get_storage_device_info(self.story.context) <NEW_LINE> print("device_info : %r" % info) <NEW_LINE> self.assertNotEqual(info, None, "the storage device information should exist") <NEW_LINE> self.assertEqual(self.story.original_device_info['raw_total'], info['raw_total']) <NEW_LINE> volume_size = int(self.story.volume['size']) * (1024 ** 3) * 2 <NEW_LINE> print("volume_size: %r" % volume_size) <NEW_LINE> print("self.story.volume['size']: %r" % self.story.volume['size']) <NEW_LINE> avail = int(self.story.original_device_info['raw_avail']) - volume_size <NEW_LINE> print("avail space: %r" % avail) <NEW_LINE> self.assertEqual(int(info['raw_avail']), avail)
Check that the volume can be retrieved via the API, and setup. All we want to see returned is a list-like with an initial string.
62598fc2283ffb24f3cf3b12
class GetMediaItem(AuthenticatedMethod): <NEW_LINE> <INDENT> method_name = 'wp.getMediaItem' <NEW_LINE> method_args = ('attachment_id',) <NEW_LINE> results_class = WordPressMedia
Retrieve an individual media item. Parameters: `attachment_id`: ID of the media item. Returns: :class:`WordPressMedia` instance.
62598fc2ec188e330fdf8b22
class SpikeDetector(base.Component): <NEW_LINE> <INDENT> waveform_src = base.RequiredFeature("SignalSource", base.HasAttributes("signal")) <NEW_LINE> def __init__(self, thresh='auto', contact=0, type='max', resample=1, sp_win=(-0.2, 0.8), align=True): <NEW_LINE> <INDENT> self._thresh = thresh <NEW_LINE> self.contact = contact <NEW_LINE> self.type = type <NEW_LINE> self.align = align <NEW_LINE> self.resample = resample <NEW_LINE> self.sp_win = sp_win <NEW_LINE> self.sp_times = None <NEW_LINE> self._est_thresh = None <NEW_LINE> super(SpikeDetector, self).__init__() <NEW_LINE> <DEDENT> def _get_threshold(self): <NEW_LINE> <INDENT> return self._est_thresh or self._thresh <NEW_LINE> <DEDENT> def _set_threshold(self, value): <NEW_LINE> <INDENT> self._thresh = value <NEW_LINE> self._est_thresh = None <NEW_LINE> <DEDENT> threshold = property(_get_threshold, _set_threshold) <NEW_LINE> def _detect(self): <NEW_LINE> <INDENT> sp = self.waveform_src.signal <NEW_LINE> spt = sort.extract.detect_spikes(sp, edge=self.type, contact=self.contact, thresh=self._thresh) <NEW_LINE> self._est_thresh = spt['thresh'] <NEW_LINE> if self.align: <NEW_LINE> <INDENT> self.sp_times = sort.extract.align_spikes(sp, spt, self.sp_win, type=self.type, contact=self.contact, resample=self.resample) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sp_times = spt <NEW_LINE> <DEDENT> <DEDENT> def _update(self): <NEW_LINE> <INDENT> self._detect() <NEW_LINE> <DEDENT> def read_events(self): <NEW_LINE> <INDENT> if self.sp_times is None: <NEW_LINE> <INDENT> self._detect() <NEW_LINE> <DEDENT> return self.sp_times <NEW_LINE> <DEDENT> events = property(read_events)
Detect Spikes with alignment
62598fc2f9cc0f698b1c5418
class MacroFrame(ga._AnagFrame): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not kwargs.has_key('title') and len(args) < 3: <NEW_LINE> <INDENT> kwargs['title'] = FRAME_TITLE <NEW_LINE> <DEDENT> ga._AnagFrame.__init__(self, *args, **kwargs) <NEW_LINE> self.LoadAnagPanel(MacroPanel(self, -1))
Frame Gestione tabella Macro.
62598fc2a05bb46b3848aafb
class SGDParams(Params): <NEW_LINE> <INDENT> class Fields: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> class Defaults: <NEW_LINE> <INDENT> pass
Stochastic gradient descent updates.
62598fc25166f23b2e243670
class CodeMetrics(object): <NEW_LINE> <INDENT> def __init__(self, code, ast): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.ast = ast <NEW_LINE> <DEDENT> @property <NEW_LINE> def loc(self): <NEW_LINE> <INDENT> return len(self.code.split("\n")) <NEW_LINE> <DEDENT> @property <NEW_LINE> def linelength(self): <NEW_LINE> <INDENT> lines = self.code.split("\n") <NEW_LINE> return sum([ len(l) for l in lines ]) / len(lines) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nodesbag(self): <NEW_LINE> <INDENT> bag = collections.defaultdict(int) <NEW_LINE> for x in ast.walk(self.ast): <NEW_LINE> <INDENT> bag[x.__class__.__name__.lower()] += 1 <NEW_LINE> <DEDENT> return bag <NEW_LINE> <DEDENT> @property <NEW_LINE> def vocabulary(self): <NEW_LINE> <INDENT> nodesbag = self.nodesbag <NEW_LINE> return len(nodesbag) / sum(nodesbag.values()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> class V(ast.NodeVisitor): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.heights = {} <NEW_LINE> <DEDENT> def visit(self, node): <NEW_LINE> <INDENT> self.heights.setdefault(node, 0) <NEW_LINE> for child in ast.iter_child_nodes(node): <NEW_LINE> <INDENT> self.heights[child] = self.heights[node] + 1 <NEW_LINE> self.visit(child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> v = V() <NEW_LINE> v.visit(self.ast) <NEW_LINE> return max(v.heights.values()) + 1
Some simple metrics about the code
62598fc2cc40096d6161a320
class Fila: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dados = [] <NEW_LINE> self.tamanho = 0 <NEW_LINE> <DEDENT> def push(self, valor): <NEW_LINE> <INDENT> self.dados.append(valor) <NEW_LINE> self.tamanho += 1 <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if not self.empty(): <NEW_LINE> <INDENT> self.tamanho -= 1 <NEW_LINE> primeiroRetirar = self.dados.pop(0) <NEW_LINE> return primeiroRetirar <NEW_LINE> <DEDENT> <DEDENT> def peek(self): <NEW_LINE> <INDENT> valorRetirar = self.dados[self.tamanho - self.tamanhoFila()] <NEW_LINE> return valorRetirar <NEW_LINE> <DEDENT> def empty(self): <NEW_LINE> <INDENT> return self.tamanhoFila() == 0 <NEW_LINE> <DEDENT> def imprimeFila(self): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> while i < len(self.dados): <NEW_LINE> <INDENT> print(self.dados[i]) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> <DEDENT> def inverteFila(self): <NEW_LINE> <INDENT> i = len(self.dados) - 1 <NEW_LINE> print("--------------------") <NEW_LINE> print("Fila Invertida: ") <NEW_LINE> while i >= 0: <NEW_LINE> <INDENT> print(self.dados[i]) <NEW_LINE> i -= 1 <NEW_LINE> <DEDENT> <DEDENT> def tamanhoFila(self): <NEW_LINE> <INDENT> return self.tamanho
Implementa uma fila de tamanho estatico
62598fc260cbc95b063645cc
class Environment(object): <NEW_LINE> <INDENT> def __init__(self, name, environment_tree): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.tree = environment_tree <NEW_LINE> <DEDENT> def provider(self): <NEW_LINE> <INDENT> return self.tree.get('provider', 'heat') <NEW_LINE> <DEDENT> def provider_params(self): <NEW_LINE> <INDENT> return self.tree.get('provider_params', {}) <NEW_LINE> <DEDENT> def floating_ips(self): <NEW_LINE> <INDENT> return self.tree.get('floating_ips', {}) <NEW_LINE> <DEDENT> def logdispatchers_params(self): <NEW_LINE> <INDENT> return self.tree.get('logdispatchers', [])
The object that describe the customer environment.
62598fc2aad79263cf42ea65
class MainHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def get(self, path=None): <NEW_LINE> <INDENT> gfl = GenerateFileList() <NEW_LINE> if path: <NEW_LINE> <INDENT> p = path if path[0] == '/' else '/' + path <NEW_LINE> fida = FileInfoDictAssembler(item_path=p) <NEW_LINE> (dir_list, file_list) = fida.fetch() <NEW_LINE> root_path = gfl.get_root_path(p) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> root_path = None <NEW_LINE> fida = FileInfoDictAssembler(item_path=PHOTO_PATH) <NEW_LINE> (dir_list, file_list) = fida.fetch() <NEW_LINE> <DEDENT> self.render("base.html", dir_list=dir_list, file_list=file_list, root_path=root_path, loop_cycling=loop_cycling, title="smp")
top main url
62598fc27c178a314d78d72f
class Options(Structure): <NEW_LINE> <INDENT> _fields_ = [("level", c_int), ("n_empties", c_int), ("midgame_error", c_int) , ("endcut_error", c_int), ("verbosity", c_int)];
Options class.
62598fc271ff763f4b5e7a0d
class RunCommandResult(OperationStatusResponse): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'status': {'readonly': True}, 'start_time': {'readonly': True}, 'end_time': {'readonly': True}, 'error': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'status': {'key': 'status', 'type': 'str'}, 'start_time': {'key': 'startTime', 'type': 'iso-8601'}, 'end_time': {'key': 'endTime', 'type': 'iso-8601'}, 'error': {'key': 'error', 'type': 'ApiError'}, 'output': {'key': 'properties.output', 'type': 'object'}, } <NEW_LINE> def __init__(self, output=None): <NEW_LINE> <INDENT> super(RunCommandResult, self).__init__() <NEW_LINE> self.output = output
Run command operation response. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: Operation ID :vartype name: str :ivar status: Operation status :vartype status: str :ivar start_time: Start time of the operation :vartype start_time: datetime :ivar end_time: End time of the operation :vartype end_time: datetime :ivar error: Api error :vartype error: :class:`ApiError <azure.mgmt.compute.v2017_03_30.models.ApiError>` :param output: Operation output data (raw JSON) :type output: object
62598fc250812a4eaa620d2d
class Plugin(ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.driver = None <NEW_LINE> self.message_listeners: Dict[ re.Pattern, Sequence[MessageFunction] ] = defaultdict(list) <NEW_LINE> self.webhook_listeners: Dict[ re.Pattern, Sequence[WebHookFunction] ] = defaultdict(list) <NEW_LINE> self.help = listen_to("^help$", needs_mention=True)(Plugin.help) <NEW_LINE> self.help = listen_to("^!help$")(self.help) <NEW_LINE> <DEDENT> def initialize(self, driver: Driver, settings: Optional[Settings] = None): <NEW_LINE> <INDENT> self.driver = driver <NEW_LINE> for attribute in dir(self): <NEW_LINE> <INDENT> attribute = getattr(self, attribute) <NEW_LINE> if isinstance(attribute, Function): <NEW_LINE> <INDENT> for function in [attribute] + attribute.siblings: <NEW_LINE> <INDENT> function.plugin = self <NEW_LINE> if isinstance(function, MessageFunction): <NEW_LINE> <INDENT> self.message_listeners[function.matcher].append(function) <NEW_LINE> <DEDENT> elif isinstance(function, WebHookFunction): <NEW_LINE> <INDENT> self.webhook_listeners[function.matcher].append(function) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError( f"{self.__class__.__name__} has a function of unsupported" f" type {type(function)}." ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def on_start(self): <NEW_LINE> <INDENT> logging.debug(f"Plugin {self.__class__.__name__} started!") <NEW_LINE> return self <NEW_LINE> <DEDENT> def on_stop(self): <NEW_LINE> <INDENT> logging.debug(f"Plugin {self.__class__.__name__} stopped!") <NEW_LINE> return self <NEW_LINE> <DEDENT> async def call_function( self, function: Function, event: EventWrapper, groups: Optional[Sequence[str]] = [], ): <NEW_LINE> <INDENT> if function.is_coroutine: <NEW_LINE> <INDENT> await function(event, *groups) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.driver.threadpool.add_task(function, event, *groups) <NEW_LINE> <DEDENT> <DEDENT> def get_help_string(self): <NEW_LINE> <INDENT> string = f"Plugin {self.__class__.__name__} has the following functions:\n" <NEW_LINE> string += "----\n" <NEW_LINE> for functions in self.message_listeners.values(): <NEW_LINE> <INDENT> for function in functions: <NEW_LINE> <INDENT> string += f"- {function.get_help_string()}" <NEW_LINE> <DEDENT> string += "----\n" <NEW_LINE> <DEDENT> if len(self.webhook_listeners) > 0: <NEW_LINE> <INDENT> string += "### Registered webhooks:\n" <NEW_LINE> for functions in self.webhook_listeners.values(): <NEW_LINE> <INDENT> for function in functions: <NEW_LINE> <INDENT> string += f"- {function.get_help_string()}" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return string <NEW_LINE> <DEDENT> async def help(self, message: Message): <NEW_LINE> <INDENT> self.driver.reply_to(message, self.get_help_string())
A Plugin is a self-contained class that defines what functions should be executed given different inputs. It will be called by the EventHandler whenever one of its listeners is triggered, but execution of the corresponding function is handled by the plugin itself. This way, you can implement multithreading or multiprocessing as desired.
62598fc27047854f4633f664
class TableRowGroupBox(ParentBox): <NEW_LINE> <INDENT> proper_table_child = True <NEW_LINE> internal_table_or_caption = True <NEW_LINE> tabular_container = True <NEW_LINE> proper_parents = (TableBox, InlineTableBox) <NEW_LINE> header_group = False <NEW_LINE> footer_group = False
Box for elements with ``display: table-row-group``
62598fc27cff6e4e811b5cb5
class AutosCreateView(LoginRequiredMixin, CreateView): <NEW_LINE> <INDENT> def form_valid(self, form): <NEW_LINE> <INDENT> print('form_valid called') <NEW_LINE> object = form.save(commit=False) <NEW_LINE> object.owner = self.request.user <NEW_LINE> object.save() <NEW_LINE> return super(AutosCreateView, self).form_valid(form)
Sub-class of the CreateView to automatically pass the Request to the Form and add the owner to the saved object.
62598fc2a8370b77170f0672
class Event(resource.Resource): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def sample(cls): <NEW_LINE> <INDENT> return cls()
Event descriptor.
62598fc2ff9c53063f51a8de
class AladdinDevice(CoverEntity): <NEW_LINE> <INDENT> _attr_device_class = CoverDeviceClass.GARAGE <NEW_LINE> _attr_supported_features = SUPPORTED_FEATURES <NEW_LINE> def __init__(self, acc: AladdinConnectClient, device: DoorDevice) -> None: <NEW_LINE> <INDENT> self._acc = acc <NEW_LINE> self._device_id = device["device_id"] <NEW_LINE> self._number = device["door_number"] <NEW_LINE> self._attr_name = device["name"] <NEW_LINE> self._attr_unique_id = f"{self._device_id}-{self._number}" <NEW_LINE> <DEDENT> def close_cover(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> self._acc.close_door(self._device_id, self._number) <NEW_LINE> <DEDENT> def open_cover(self, **kwargs: Any) -> None: <NEW_LINE> <INDENT> self._acc.open_door(self._device_id, self._number) <NEW_LINE> <DEDENT> def update(self) -> None: <NEW_LINE> <INDENT> status = STATES_MAP.get( self._acc.get_door_status(self._device_id, self._number) ) <NEW_LINE> self._attr_is_opening = status == STATE_OPENING <NEW_LINE> self._attr_is_closing = status == STATE_CLOSING <NEW_LINE> self._attr_is_closed = None if status is None else status == STATE_CLOSED
Representation of Aladdin Connect cover.
62598fc223849d37ff851343
class BaseHandler(web.RequestHandler): <NEW_LINE> <INDENT> def initialize(self, database): <NEW_LINE> <INDENT> self.db = database
Not really necessary class. It can be used in order to extend base functionality of all specific request handlers. Now it provides quick access to database object, although it is available in singleton manner.
62598fc2d486a94d0ba2c261
class DataStats(object): <NEW_LINE> <INDENT> _PREC = 1e-9 <NEW_LINE> def __init__(self, var_names=None, raw_data=None): <NEW_LINE> <INDENT> self.var_names = var_names <NEW_LINE> if raw_data is not None: <NEW_LINE> <INDENT> self.raw_data = raw_data <NEW_LINE> <DEDENT> <DEDENT> def get_corr_mat(self): <NEW_LINE> <INDENT> if not hasattr(self,'corr_mat'): <NEW_LINE> <INDENT> if hasattr(self,'raw_data'): <NEW_LINE> <INDENT> self.corr_mat = np.corrcoef(self.raw_data,rowvar=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Exception('Raw data not present, cannot compute correlation matrix.') <NEW_LINE> <DEDENT> <DEDENT> return self.corr_mat <NEW_LINE> <DEDENT> def set_corr_mat(self, corr_mat): <NEW_LINE> <INDENT> if not hasattr(self,'raw_data'): <NEW_LINE> <INDENT> self.corr_mat = corr_mat <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Exception('Raw data present! Compute correlation matrix using get_corr_mat().') <NEW_LINE> <DEDENT> <DEDENT> def get_prec_mat(self, var_set_key): <NEW_LINE> <INDENT> if not hasattr(self,'prec_mat'): <NEW_LINE> <INDENT> self.prec_mat = dict() <NEW_LINE> <DEDENT> if var_set_key not in self.prec_mat.keys(): <NEW_LINE> <INDENT> corr_mat = self.get_corr_mat() <NEW_LINE> var_list = list(var_set_key) <NEW_LINE> var_corr = corr_mat[np.ix_(var_list,var_list)] <NEW_LINE> self.prec_mat[var_set_key] = np.linalg.pinv(var_corr) <NEW_LINE> <DEDENT> return self.prec_mat[var_set_key] <NEW_LINE> <DEDENT> def get_part_corr(self, pair, cond_set): <NEW_LINE> <INDENT> var_set = list(pair)+list(cond_set) <NEW_LINE> var_set_keys = tuple(sorted(var_set)) <NEW_LINE> prec_mat = self.get_prec_mat(var_set_keys) <NEW_LINE> var_range = range(len(var_set_keys)) <NEW_LINE> xi = [ind for ind in var_range if var_set_keys[ind]==pair[0]][0] <NEW_LINE> yi = [ind for ind in var_range if var_set_keys[ind]==pair[1]][0] <NEW_LINE> part_corr = -prec_mat[xi,yi]/np.sqrt(prec_mat[xi,xi]*prec_mat[yi,yi]) <NEW_LINE> nsamp = self.get_nsamp() <NEW_LINE> ncond = len(cond_set) <NEW_LINE> with np.errstate(divide='ignore'): <NEW_LINE> <INDENT> if abs(part_corr) > 1-self._PREC: <NEW_LINE> <INDENT> part_corr = 1-2*self._PREC <NEW_LINE> <DEDENT> fisher_z = np.sqrt(nsamp-ncond-3)*np.arctanh(part_corr) <NEW_LINE> <DEDENT> return part_corr,fisher_z <NEW_LINE> <DEDENT> def get_nsamp(self): <NEW_LINE> <INDENT> if hasattr(self,'raw_data'): <NEW_LINE> <INDENT> nsamp = np.shape(self.raw_data)[0] <NEW_LINE> <DEDENT> elif hasattr(self,'nsamp'): <NEW_LINE> <INDENT> nsamp = self.nsamp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Exception('Number of samples is unknown and cannot be computed from data.') <NEW_LINE> <DEDENT> return nsamp <NEW_LINE> <DEDENT> def set_nsamp(self,nsamp): <NEW_LINE> <INDENT> if not hasattr(self,'raw_data'): <NEW_LINE> <INDENT> self.nsamp = nsamp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Exception('Raw data present! Compute number of samples using get_nsamp().')
Manages statistical information on causal model. Allows for specification of statistics "by hand" when sample data is not available.
62598fc2adb09d7d5dc0a80d
class BaseModel(): <NEW_LINE> <INDENT> def __init__(self, object_name, table): <NEW_LINE> <INDENT> self.table = table <NEW_LINE> self.object_name = object_name <NEW_LINE> self.error_message = "" <NEW_LINE> self.error_code = 200 <NEW_LINE> self.id = generate_id(table) <NEW_LINE> <DEDENT> def as_json(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> self.table.append(self.as_json()) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> for i in range(len(self.table)): <NEW_LINE> <INDENT> if self.table[i]['id'] == self.id: <NEW_LINE> <INDENT> return self.table.pop(i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def validate_object(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def find_by_id(self, id): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> for i in range(len(self.table)): <NEW_LINE> <INDENT> if self.table[i]['id'] == id: <NEW_LINE> <INDENT> return self.table[i] <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def from_json(self, json): <NEW_LINE> <INDENT> return self
model that defines all models
62598fc2283ffb24f3cf3b15
class CopyTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.l = [0, 3, 16, 1] <NEW_LINE> <DEDENT> def testcopyPitches(self): <NEW_LINE> <INDENT> a = PCSet(self.l) <NEW_LINE> b = PSet(self.l) <NEW_LINE> c = a.copy() <NEW_LINE> d = b.copy() <NEW_LINE> self.assertTrue(c.pitches == self.l and d.pitches == self.l) <NEW_LINE> <DEDENT> def testcopySettings(self): <NEW_LINE> <INDENT> def verify(current): <NEW_LINE> <INDENT> self.assertEqual(current.__class__, PCSet) <NEW_LINE> self.assertTrue(current._ordered == False and current._multiset == False) <NEW_LINE> self.assertEqual(current._mod, 7) <NEW_LINE> self.assertTrue(current._canon_t, True) <NEW_LINE> self.assertEqual(current._canon_i, False) <NEW_LINE> self.assertEqual(current._canon_m, True) <NEW_LINE> self.assertEqual(current.pitches, self.l) <NEW_LINE> <DEDENT> a = PCSet(self.l) <NEW_LINE> a.mod(7) <NEW_LINE> a.ordered(False) <NEW_LINE> a.multiset(False) <NEW_LINE> a.canon(True, False, True) <NEW_LINE> a2 = a.copy() <NEW_LINE> verify(a2) <NEW_LINE> b = PSet(self.l) <NEW_LINE> self.assertEqual(b.__class__, PSet) <NEW_LINE> b = a.copy() <NEW_LINE> verify(b)
Create a new instance with the pitches, settings and class of the original
62598fc2fff4ab517ebcda77
class TestBravoFactoryStarted(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.d = tempfile.mkdtemp() <NEW_LINE> self.name = "unittest" <NEW_LINE> self.bcp = BravoConfigParser() <NEW_LINE> self.bcp.add_section("world unittest") <NEW_LINE> d = { "authenticator" : "offline", "automatons" : "", "generators" : "", "mode" : "creative", "port" : "0", "seasons" : "winter, spring", "serializer" : "alpha", "url" : "file://%s" % self.d, } <NEW_LINE> for k, v in d.items(): <NEW_LINE> <INDENT> self.bcp.set("world unittest", k, v) <NEW_LINE> <DEDENT> self.f = BravoFactory(self.bcp, self.name) <NEW_LINE> self.f.startFactory() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.f.stopFactory() <NEW_LINE> shutil.rmtree(self.d) <NEW_LINE> <DEDENT> def test_trivial(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_create_entity_pickup(self): <NEW_LINE> <INDENT> entity = self.f.create_entity(0, 0, 0, "Item") <NEW_LINE> self.assertEqual(entity.eid, 2) <NEW_LINE> self.assertEqual(self.f.eid, 2) <NEW_LINE> <DEDENT> def test_create_entity_player(self): <NEW_LINE> <INDENT> entity = self.f.create_entity(0, 0, 0, "Player", username="unittest") <NEW_LINE> self.assertEqual(entity.eid, 2) <NEW_LINE> self.assertEqual(entity.username, "unittest") <NEW_LINE> self.assertEqual(self.f.eid, 2) <NEW_LINE> <DEDENT> def test_give(self): <NEW_LINE> <INDENT> self.f.give((0, 0, 0), (2, 0), 1) <NEW_LINE> <DEDENT> def test_give_oversized(self): <NEW_LINE> <INDENT> count = [0] <NEW_LINE> def broadcast(packet): <NEW_LINE> <INDENT> count[0] += 1 <NEW_LINE> <DEDENT> self.patch(self.f, "broadcast", broadcast) <NEW_LINE> self.f.give((0, 0, 0), (2, 0), 65) <NEW_LINE> self.assertEqual(count[0], 2) <NEW_LINE> <DEDENT> def test_players_near(self): <NEW_LINE> <INDENT> players = [ self.f.create_entity(0, 0, 0, "Player", username=""), self.f.create_entity(0, 2, 0, "Player", username=""), self.f.create_entity(1, 0, 3, "Player", username=""), self.f.create_entity(0, 4, 1, "Player", username=""), ] <NEW_LINE> for i, player in enumerate(players): <NEW_LINE> <INDENT> self.f.protocols[i] = MockProtocol(player) <NEW_LINE> <DEDENT> expected_results = [ (players[0], 1, []), (players[0], 2, [3]), (players[0], 4, [3, 4]), (players[0], 5, [3, 4, 5]), (players[1], 3, [2, 5]), ] <NEW_LINE> for player, radius, result in expected_results: <NEW_LINE> <INDENT> found = [p.eid for p in self.f.players_near(player, radius)] <NEW_LINE> self.assertEqual(set(found), set(result))
Tests which require ``startFactory()`` to be called.
62598fc24527f215b58ea160
class OnetSkillListProcessor(object): <NEW_LINE> <INDENT> def __init__(self, onet_source, output_filename, hash_function, ksa_types=None): <NEW_LINE> <INDENT> self.output_filename = output_filename <NEW_LINE> self.onet_source = onet_source <NEW_LINE> self.hash_function = hash_function <NEW_LINE> self.ksa_types = ksa_types or KSA_TYPE_CONFIG.keys() <NEW_LINE> <DEDENT> def onet_to_pandas(self, filename, col_name, ksa_type, use_relevance=True): <NEW_LINE> <INDENT> logging.info('Converting ONET %s to pandas', filename) <NEW_LINE> with self.onet_source.ensure_file(filename) as fullpath: <NEW_LINE> <INDENT> with open(fullpath) as f: <NEW_LINE> <INDENT> if use_relevance: <NEW_LINE> <INDENT> onet = [ row for row in csv.DictReader(f, delimiter='\t') if row['Scale ID'] == 'LV' and row['Not Relevant'] == 'N' and float(row['Data Value']) > 0 ] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> onet = [row for row in csv.DictReader(f, delimiter='\t')] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> onet = pd.DataFrame(onet) <NEW_LINE> if ksa_type: <NEW_LINE> <INDENT> col_name = col_name + ['ksa_type'] <NEW_LINE> onet['ksa_type'] = ksa_type <NEW_LINE> <DEDENT> for col in col_name: <NEW_LINE> <INDENT> onet[col] = onet[col].astype(str).str.lower() <NEW_LINE> <DEDENT> return onet[col_name] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> dataframes = [ self.onet_to_pandas(*(KSA_TYPE_CONFIG[ksa_type])) for ksa_type in self.ksa_types ] <NEW_LINE> new_columns = ['O*NET-SOC Code', 'Element ID', 'ONET KSA', 'ksa_type'] <NEW_LINE> for df in dataframes: <NEW_LINE> <INDENT> df.columns = new_columns <NEW_LINE> <DEDENT> onet_modelreference = self.onet_to_pandas( 'Content Model Reference.txt', ['Element ID', 'Description'], ksa_type=None, use_relevance=False ) <NEW_LINE> onet_ksas = pd.concat( dataframes, ignore_index=True ) <NEW_LINE> onet_ksas = pd.merge( onet_ksas, onet_modelreference, how='left', on=['Element ID'] ) <NEW_LINE> logging.info('Uniqifying skills') <NEW_LINE> onet_ksas.drop_duplicates('ONET KSA', inplace=True) <NEW_LINE> onet_ksas['skill_uuid'] = onet_ksas['ONET KSA'] .apply(self.hash_function) <NEW_LINE> onet_ksas[transforms[0]] = onet_ksas['ONET KSA'] .apply(lowercase_strip_punc) <NEW_LINE> onet_ksas.to_csv(self.output_filename, sep='\t')
An object that creates a skills CSV based on ONET data Originally written by Kwame Porter Robinson
62598fc2a05bb46b3848aafd
class StopTrackingDownloadCount(BackendMessage): <NEW_LINE> <INDENT> pass
Stop tracking the download count.
62598fc3167d2b6e312b7208
class Resnet_2D(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect', n_downsampling = 2): <NEW_LINE> <INDENT> assert(n_blocks >= 0) <NEW_LINE> super(Resnet_2D, self).__init__() <NEW_LINE> if type(norm_layer) == functools.partial: <NEW_LINE> <INDENT> use_bias = norm_layer.func == nn.InstanceNorm2d <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> use_bias = norm_layer == nn.InstanceNorm2d <NEW_LINE> <DEDENT> print("\n------Initiating ResNet------\n") <NEW_LINE> model = [] <NEW_LINE> p = 0 <NEW_LINE> if padding_type == 'reflect': <NEW_LINE> <INDENT> model += [nn.ReflectionPad2d(3)] <NEW_LINE> <DEDENT> elif padding_type == 'replicate': <NEW_LINE> <INDENT> model += [nn.ReplicationPad2d(3)] <NEW_LINE> <DEDENT> elif padding_type == 'zero': <NEW_LINE> <INDENT> p = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('padding [%s] is not implemented' % padding_type) <NEW_LINE> <DEDENT> model += [nn.Conv2d(input_nc, ngf, kernel_size=7, padding=p, bias=use_bias), norm_layer(ngf), nn.ReLU(True)] <NEW_LINE> for i in range(n_downsampling): <NEW_LINE> <INDENT> mult = 2 ** i <NEW_LINE> model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, stride=2, padding=1, bias=use_bias), norm_layer(ngf * mult * 2), nn.ReLU(True)] <NEW_LINE> <DEDENT> mult = 2 ** n_downsampling <NEW_LINE> for i in range(n_blocks): <NEW_LINE> <INDENT> model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] <NEW_LINE> <DEDENT> for i in range(n_downsampling): <NEW_LINE> <INDENT> mult = 2 ** (n_downsampling - i) <NEW_LINE> model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), kernel_size=3, stride=2, padding=1, output_padding=1, bias=use_bias), norm_layer(int(ngf * mult / 2)), nn.ReLU(True)] <NEW_LINE> <DEDENT> p = 0 <NEW_LINE> if padding_type == 'reflect': <NEW_LINE> <INDENT> model += [nn.ReflectionPad2d(3)] <NEW_LINE> model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] <NEW_LINE> <DEDENT> elif padding_type == 'replicate': <NEW_LINE> <INDENT> model += [nn.ReplicationPad2d(3)] <NEW_LINE> model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] <NEW_LINE> <DEDENT> elif padding_type == 'zero': <NEW_LINE> <INDENT> model += [nn.ConvTranspose2d(ngf, output_nc, kernel_size=7, padding=1)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError('padding [%s] is not implemented' % padding_type) <NEW_LINE> <DEDENT> model += [nn.Tanh()] <NEW_LINE> self.model = nn.Sequential(*model) <NEW_LINE> self.input_nc = input_nc <NEW_LINE> self.output_nc = output_nc <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> return self.model(input)
Resnet-based generator that consists of Resnet blocks between a few downsampling/upsampling operations. We adapt Torch code and idea from Justin Johnson's neural style transfer project(https://github.com/jcjohnson/fast-neural-style)
62598fc37047854f4633f666
class DS18B20_on_Arduino_HTTPServer_RequestHandler(BaseHTTPRequestHandler): <NEW_LINE> <INDENT> __ports = None <NEW_LINE> def do_GET(self): <NEW_LINE> <INDENT> if self.__ports is None: <NEW_LINE> <INDENT> serial_devices = [] <NEW_LINE> for g in SERIAL_FILENAME_GLOBS: <NEW_LINE> <INDENT> serial_devices.extend(glob.glob(g)) <NEW_LINE> <DEDENT> if DEBUG: <NEW_LINE> <INDENT> print('available devices include: {}\n'.format(serial_devices), file=sys.stderr, flush=True) <NEW_LINE> <DEDENT> self.__ports = [] <NEW_LINE> for s in serial_devices: <NEW_LINE> <INDENT> self.__ports.append(serial.Serial(s, PORT_SPEED)) <NEW_LINE> <DEDENT> if DEBUG: <NEW_LINE> <INDENT> print('ports include: {}\n'.format(self.__ports), file=sys.stderr, flush=True) <NEW_LINE> <DEDENT> time.sleep(3) <NEW_LINE> <DEDENT> self.send_response(200) <NEW_LINE> self.send_header('Content-type','application/json') <NEW_LINE> self.end_headers() <NEW_LINE> result=[] <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print('self.__ports is {}'.format(self.__ports), file=sys.stderr, flush=True) <NEW_LINE> <DEDENT> for port in self.__ports: <NEW_LINE> <INDENT> port.write(NL) <NEW_LINE> split_line = port.readline().decode('UTF-8').split() <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print('line: "{}"'.format(split_line)) <NEW_LINE> <DEDENT> while len(split_line) > 0: <NEW_LINE> <INDENT> if len(split_line) == 3: <NEW_LINE> <INDENT> sample = {} <NEW_LINE> sample['type'] = 'DS18B20_on_Arduino' <NEW_LINE> sample['id']=split_line[1].replace('.','') <NEW_LINE> sample['temp_C'] = split_line[2] <NEW_LINE> sample['when'] = datetime.datetime.now(datetime.timezone.utc).isoformat() <NEW_LINE> result.append(sample) <NEW_LINE> <DEDENT> split_line = port.readline().decode('UTF-8').split() <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> print('line: "{}"'.format(split_line)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.wfile.write(bytes(json.dumps(result, indent=1), "utf8")) <NEW_LINE> return
A subclass of BaseHTTPRequestHandler to provide information about DS18B20 sensors.
62598fc350812a4eaa620d2e
class Solution: <NEW_LINE> <INDENT> def topSort(self, graph): <NEW_LINE> <INDENT> incoming = {} <NEW_LINE> for node in graph: <NEW_LINE> <INDENT> if node not in incoming: <NEW_LINE> <INDENT> incoming[node] = set() <NEW_LINE> <DEDENT> for des in node.neighbors: <NEW_LINE> <INDENT> if des not in incoming: <NEW_LINE> <INDENT> incoming[des] = set() <NEW_LINE> <DEDENT> incoming[des].add(node) <NEW_LINE> <DEDENT> <DEDENT> soln = [] <NEW_LINE> V = len(graph) <NEW_LINE> while len(incoming) > 0: <NEW_LINE> <INDENT> for node in graph: <NEW_LINE> <INDENT> if node in incoming and len(incoming[node]) == 0: <NEW_LINE> <INDENT> soln.append(node) <NEW_LINE> del incoming[node] <NEW_LINE> for des in node.neighbors: <NEW_LINE> <INDENT> incoming[des].remove(node) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return soln
@param graph: A list of Directed graph node @return: A list of integer
62598fc357b8e32f52508267
class UtahJazz(NBA_Team): <NEW_LINE> <INDENT> full_name = "Utah Jazz" <NEW_LINE> name = "Jazz" <NEW_LINE> team_id = 1610612762 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super().__init__()
NBA's Washington Wizards Static Information
62598fc363b5f9789fe85405
class PersonWeekQuotaShotsResource(Resource, ArgsMixin): <NEW_LINE> <INDENT> @jwt_required <NEW_LINE> def get(self, person_id, year, week): <NEW_LINE> <INDENT> project_id = self.get_project_id() <NEW_LINE> task_type_id = self.get_task_type_id() <NEW_LINE> user_service.check_person_access(person_id) <NEW_LINE> weighted = self.get_bool_parameter("weighted", default="true") <NEW_LINE> try: <NEW_LINE> <INDENT> return shots_service.get_week_quota_shots( person_id, year, week, project_id=project_id, task_type_id=task_type_id, weighted=weighted, ) <NEW_LINE> <DEDENT> except WrongDateFormatException: <NEW_LINE> <INDENT> abort(404)
Get ended shots used for quota calculation of this week.
62598fc355399d3f056267aa
class RegistryStatistics(Model): <NEW_LINE> <INDENT> _validation = { 'total_device_count': {'readonly': True}, 'enabled_device_count': {'readonly': True}, 'disabled_device_count': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'total_device_count': {'key': 'totalDeviceCount', 'type': 'long'}, 'enabled_device_count': {'key': 'enabledDeviceCount', 'type': 'long'}, 'disabled_device_count': {'key': 'disabledDeviceCount', 'type': 'long'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(RegistryStatistics, self).__init__(**kwargs) <NEW_LINE> self.total_device_count = None <NEW_LINE> self.enabled_device_count = None <NEW_LINE> self.disabled_device_count = None
Identity registry statistics. Variables are only populated by the server, and will be ignored when sending a request. :ivar total_device_count: The total count of devices in the identity registry. :vartype total_device_count: long :ivar enabled_device_count: The count of enabled devices in the identity registry. :vartype enabled_device_count: long :ivar disabled_device_count: The count of disabled devices in the identity registry. :vartype disabled_device_count: long
62598fc3d486a94d0ba2c263
class TestRail(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.configs = self.load_configs() <NEW_LINE> u = self.configs['user'] <NEW_LINE> p = self.configs['password'] <NEW_LINE> self.url = self.configs['url'] <NEW_LINE> self.rest = restclient.RestClient(u, p) <NEW_LINE> <DEDENT> def load_configs(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> config_file = helpers.bigrobot_configs_path() + "/testrail.yaml" <NEW_LINE> config = helpers.load_config(config_file) <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> s = "Unable to open TestRail config file %s" % config_file <NEW_LINE> helpers.error_exit(s) <NEW_LINE> <DEDENT> return config <NEW_LINE> <DEDENT> def get_projects(self): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_projects") <NEW_LINE> <DEDENT> def get_project(self, project_id): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_project/%s" % project_id) <NEW_LINE> <DEDENT> def get_testsuites(self, project_id): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_suites/%s" % project_id) <NEW_LINE> <DEDENT> def get_testsuite(self, testsuite_id): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_suite/%s" % testsuite_id) <NEW_LINE> <DEDENT> def get_testsections(self, project_id, testsuite_id): <NEW_LINE> <INDENT> return self.rest.get( self.url + "/get_sections/%s&suite_id=%s" % (project_id, testsuite_id)) <NEW_LINE> <DEDENT> def get_testsection(self, testsection_id): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_section/%s" % testsection_id) <NEW_LINE> <DEDENT> def get_testcases(self, project_id, testsuite_id, testsection_id=None): <NEW_LINE> <INDENT> if testsection_id: <NEW_LINE> <INDENT> return self.rest.get( self.url + "/get_cases/%s&suite_id=%s&section_id=%s" % (project_id, testsuite_id, testsection_id)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.rest.get( self.url + "/get_cases/%s&suite_id=%s" % (project_id, testsuite_id)) <NEW_LINE> <DEDENT> <DEDENT> def get_testcase(self, testcase_id): <NEW_LINE> <INDENT> return self.rest.get(self.url + "/get_case/%s" % testcase_id)
Interface to the TestRail API
62598fc3377c676e912f6ebd
class ModifyPublishSubscribeNameRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.PublishSubscribeId = None <NEW_LINE> self.PublishSubscribeName = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.PublishSubscribeId = params.get("PublishSubscribeId") <NEW_LINE> self.PublishSubscribeName = params.get("PublishSubscribeName") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
ModifyPublishSubscribeName请求参数结构体
62598fc35fcc89381b266296
class Annotation(): <NEW_LINE> <INDENT> def __init__(self, start, end, annotation): <NEW_LINE> <INDENT> self.start = start <NEW_LINE> self.end = end <NEW_LINE> self.annotation = annotation
Wraps a semantic annotation with offset information
62598fc326068e7796d4cbee
class InlineResponse2013(object): <NEW_LINE> <INDENT> def __init__(self, status=None, schedule=None): <NEW_LINE> <INDENT> self.swagger_types = { 'status': 'str', 'schedule': 'Schedule' } <NEW_LINE> self.attribute_map = { 'status': 'status', 'schedule': 'schedule' } <NEW_LINE> self._status = status <NEW_LINE> self._schedule = schedule <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self._status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def schedule(self): <NEW_LINE> <INDENT> return self._schedule <NEW_LINE> <DEDENT> @schedule.setter <NEW_LINE> def schedule(self, schedule): <NEW_LINE> <INDENT> self._schedule = schedule <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fc33346ee7daa337792
class FASTQMetadataFile(MetadataFile): <NEW_LINE> <INDENT> category_name = 'FASTQ'; <NEW_LINE> def collect_metadata(self): <NEW_LINE> <INDENT> return {}
A metadata file type for fastq.gz files
62598fc397e22403b383b19d
class DeleteEnvironmentRequest(proto.Message): <NEW_LINE> <INDENT> name = proto.Field(proto.STRING, number=1,)
The request message for [Environments.DeleteEnvironment][google.cloud.dialogflow.v2beta1.Environments.DeleteEnvironment]. Attributes: name (str): Required. The name of the environment to delete. / Format: - ``projects/<Project Number / ID>/agent/environments/<Environment ID>`` - ``projects/<Project Number / ID>/locations/<Location ID>/agent/environments/<Environment ID>``
62598fc366673b3332c30668
class MockClusterGenerator(): <NEW_LINE> <INDENT> def __init__(self, num_clusters, nodes_per_cluster, x, y, stddev, seed=None): <NEW_LINE> <INDENT> self.num_clusters = num_clusters <NEW_LINE> self.nodes_per_cluster = nodes_per_cluster <NEW_LINE> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.stddev = stddev <NEW_LINE> self._clusters = dict() <NEW_LINE> self._nodes = set() <NEW_LINE> self.seed = seed <NEW_LINE> <DEDENT> def generate(self, plot=False): <NEW_LINE> <INDENT> random.seed(self.seed) <NEW_LINE> np.random.seed(self.seed) <NEW_LINE> for _ in range(self.num_clusters): <NEW_LINE> <INDENT> coords = tuple([random.randint(0, self.x), random.randint(0, self.y)]) <NEW_LINE> while coords in self._clusters: <NEW_LINE> <INDENT> coords = tuple([random.randint(0, self.x), random.randint(0, self.y)]) <NEW_LINE> <DEDENT> self._clusters[coords] = set() <NEW_LINE> nodes = np.random.normal(0, self.stddev, size=(self.nodes_per_cluster, 2)) <NEW_LINE> for node in nodes: <NEW_LINE> <INDENT> self._clusters[coords].add(tuple(np.add(node, coords))) <NEW_LINE> <DEDENT> <DEDENT> if plot: <NEW_LINE> <INDENT> for cluster in self._clusters: <NEW_LINE> <INDENT> nodes = self._clusters[cluster] <NEW_LINE> x = [coords[0] for coords in nodes] <NEW_LINE> y = [coords[1] for coords in nodes] <NEW_LINE> plt.scatter(x, y) <NEW_LINE> <DEDENT> plt.show() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def clusters(self): <NEW_LINE> <INDENT> return self._clusters <NEW_LINE> <DEDENT> @property <NEW_LINE> def nodes(self): <NEW_LINE> <INDENT> return [tuple(node) for cluster in self._clusters for node in self._clusters[cluster]]
Class that handles and generates mock clusters and nodes in 2D Args: num_clusters(``int``): Number of clusters to generate nodes_per_cluster(``int``): Total nodes per cluster x(``float``): Max x coord y(``float``): Max y coord stddev(``float``): stddev of nodes around each cluster centre Example Usage: >>> mock_clusters = MockClusterGenerator(5, 40, 10, 10, 2) ... mock_clusters.generate(plot=True)
62598fc38a349b6b436864d2
class LxFdtDump(gdb.Command): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(LxFdtDump, self).__init__("lx-fdtdump", gdb.COMMAND_DATA, gdb.COMPLETE_FILENAME) <NEW_LINE> <DEDENT> def fdthdr_to_cpu(self, fdt_header): <NEW_LINE> <INDENT> fdt_header_be = ">IIIIIII" <NEW_LINE> fdt_header_le = "<IIIIIII" <NEW_LINE> if utils.get_target_endianness() == 1: <NEW_LINE> <INDENT> output_fmt = fdt_header_le <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output_fmt = fdt_header_be <NEW_LINE> <DEDENT> return unpack(output_fmt, pack(fdt_header_be, fdt_header['magic'], fdt_header['totalsize'], fdt_header['off_dt_struct'], fdt_header['off_dt_strings'], fdt_header['off_mem_rsvmap'], fdt_header['version'], fdt_header['last_comp_version'])) <NEW_LINE> <DEDENT> def invoke(self, arg, from_tty): <NEW_LINE> <INDENT> if not constants.LX_CONFIG_OF: <NEW_LINE> <INDENT> raise gdb.GdbError("Kernel not compiled with CONFIG_OF\n") <NEW_LINE> <DEDENT> if len(arg) == 0: <NEW_LINE> <INDENT> filename = "fdtdump.dtb" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> filename = arg <NEW_LINE> <DEDENT> py_fdt_header_ptr = gdb.parse_and_eval( "(const struct fdt_header *) initial_boot_params") <NEW_LINE> py_fdt_header = py_fdt_header_ptr.dereference() <NEW_LINE> fdt_header = self.fdthdr_to_cpu(py_fdt_header) <NEW_LINE> if fdt_header[0] != constants.LX_OF_DT_HEADER: <NEW_LINE> <INDENT> raise gdb.GdbError("No flattened device tree magic found\n") <NEW_LINE> <DEDENT> gdb.write("fdt_magic: 0x{:02X}\n".format(fdt_header[0])) <NEW_LINE> gdb.write("fdt_totalsize: 0x{:02X}\n".format(fdt_header[1])) <NEW_LINE> gdb.write("off_dt_struct: 0x{:02X}\n".format(fdt_header[2])) <NEW_LINE> gdb.write("off_dt_strings: 0x{:02X}\n".format(fdt_header[3])) <NEW_LINE> gdb.write("off_mem_rsvmap: 0x{:02X}\n".format(fdt_header[4])) <NEW_LINE> gdb.write("version: {}\n".format(fdt_header[5])) <NEW_LINE> gdb.write("last_comp_version: {}\n".format(fdt_header[6])) <NEW_LINE> inf = gdb.inferiors()[0] <NEW_LINE> fdt_buf = utils.read_memoryview(inf, py_fdt_header_ptr, fdt_header[1]).tobytes() <NEW_LINE> try: <NEW_LINE> <INDENT> f = open(filename, 'wb') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise gdb.GdbError("Could not open file to dump fdt") <NEW_LINE> <DEDENT> f.write(fdt_buf) <NEW_LINE> f.close() <NEW_LINE> gdb.write("Dumped fdt blob to " + filename + "\n")
Output Flattened Device Tree header and dump FDT blob to the filename specified as the command argument. Equivalent to 'cat /proc/fdt > fdtdump.dtb' on a running target
62598fc3aad79263cf42ea68
class Number(Type): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Type.__init__(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def validate(cls, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> float(value) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> raise ValueError("Invalid value '%s'" % value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError("Invalid value '%s'" % value) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def cast(cls, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return float(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise CastException() <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "number"
This class represents an integer or float in the configuration model. On these integers the following operations are supported: +, -, /, *
62598fc3e1aae11d1e7ce96f
class SingleSwitchTopo(Topo): <NEW_LINE> <INDENT> def build(self): <NEW_LINE> <INDENT> s3 = self.addSwitch('s3') <NEW_LINE> s1 = self.addSwitch('s1') <NEW_LINE> s2 = self.addSwitch('s2') <NEW_LINE> s4 = self.addSwitch('s4') <NEW_LINE> s5 = self.addSwitch('s5') <NEW_LINE> h1 = self.addHost('h1') <NEW_LINE> h2 = self.addHost('h2') <NEW_LINE> self.addLink(s1, s2) <NEW_LINE> self.addLink(s2, s3) <NEW_LINE> self.addLink(s1, s4) <NEW_LINE> self.addLink(s4, s5) <NEW_LINE> self.addLink(s3, s5) <NEW_LINE> self.addLink(h1, s1) <NEW_LINE> self.addLink(h2, s5)
Single switch connected to n hosts.
62598fc37cff6e4e811b5cb9
class Meta(NamedModel.Meta): <NEW_LINE> <INDENT> app_label = _app_label <NEW_LINE> db_table = db_table(_app_label, _email_type) <NEW_LINE> verbose_name = _(_email_type_verbose) <NEW_LINE> verbose_name_plural = _(pluralize(_email_type_verbose))
Model meta class declaration.
62598fc37d847024c075c651
class AverageMeter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.val = 0 <NEW_LINE> self.avg = 0 <NEW_LINE> self.sum = 0 <NEW_LINE> self.count = 0 <NEW_LINE> self.max = 0 <NEW_LINE> <DEDENT> def update(self, val, n=1): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.sum += val * n <NEW_LINE> self.count += n <NEW_LINE> self.avg = self.sum / self.count <NEW_LINE> self.max = max(self.max, val)
Computes and stores the average and current value
62598fc392d797404e388cac
class TextProcess(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, index_num): <NEW_LINE> <INDENT> self.index_num = index_num <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def transform(self, X, y=None): <NEW_LINE> <INDENT> if type(X) == pd.core.frame.DataFrame: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> X = pd.DataFrame(X) <NEW_LINE> <DEDENT> text = X.iloc[:, self.index_num] <NEW_LINE> processed = text.apply(lambda x: re.sub(r'[^\w\s]', '', x.lower())) <NEW_LINE> length = processed.apply(lambda x: len(x)) <NEW_LINE> words_not_stopwords = processed.apply(lambda x: len([t for t in x.split(' ') if t not in stopWords])) <NEW_LINE> commas = text.apply(lambda x: x.count(',')) <NEW_LINE> mean_length = processed.apply(lambda x: np.mean([len(w) for w in str(x).split()])) <NEW_LINE> new = np.column_stack((processed, length, words_not_stopwords, commas, mean_length)) <NEW_LINE> new_df = pd.DataFrame(new, columns=['text', 'length', 'words_not_stopwords', 'commas', 'mean_length']) <NEW_LINE> new_df[['length', 'words_not_stopwords', 'commas', 'mean_length']] = new_df[['length', 'words_not_stopwords', 'commas', 'mean_length']].apply(pd.to_numeric) <NEW_LINE> return new_df
process text data with making new features
62598fc3283ffb24f3cf3b19
class Connection(object): <NEW_LINE> <INDENT> def __init__(self, connection): <NEW_LINE> <INDENT> self._connection = connection <NEW_LINE> <DEDENT> def _get_or_create_user(self, user_data): <NEW_LINE> <INDENT> User = get_user_model() <NEW_LINE> attributes = user_data["attributes"] <NEW_LINE> user_fields = { field_name: attributes.get(attribute_name, ("",))[0] for field_name, attribute_name in settings.LDAP_AUTH_USER_FIELDS.items() } <NEW_LINE> user_data = settings.LDAP_AUTH_CLEAN_USER_DATA(user_fields) <NEW_LINE> user_lookup = { field_name: user_fields.pop(field_name, "") for field_name in settings.LDAP_AUTH_USER_LOOKUP_FIELDS } <NEW_LINE> user, created = User.objects.update_or_create( defaults = user_fields, **user_lookup ) <NEW_LINE> return user <NEW_LINE> <DEDENT> def iter_users(self): <NEW_LINE> <INDENT> paged_entries = self._connection.extend.standard.paged_search( search_base = settings.LDAP_AUTH_SEARCH_BASE, search_filter = "(objectClass={object_class})".format( object_class = clean_ldap_name(settings.LDAP_AUTH_OBJECT_CLASS), ), search_scope = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE, attributes = list(settings.LDAP_AUTH_USER_FIELDS.values()), paged_size = 30, ) <NEW_LINE> return ( self._get_or_create_user(entry) for entry in paged_entries ) <NEW_LINE> <DEDENT> def get_user(self, *args, **kwargs): <NEW_LINE> <INDENT> user_identifier = resolve_user_identifier(settings.LDAP_AUTH_USER_LOOKUP_FIELDS, True, args, kwargs) <NEW_LINE> search_filter = "(&(objectClass={object_class}){user_identifier})".format( object_class = clean_ldap_name(settings.LDAP_AUTH_OBJECT_CLASS), user_identifier = "".join( "({attribute_name}={field_value})".format( attribute_name = clean_ldap_name(settings.LDAP_AUTH_USER_FIELDS[field_name]), field_value = clean_ldap_name(field_value), ) for field_name, field_value in user_identifier.items() ), ) <NEW_LINE> if self._connection.search( search_base = settings.LDAP_AUTH_SEARCH_BASE, search_filter = search_filter, search_scope = ldap3.SEARCH_SCOPE_WHOLE_SUBTREE, attributes = list(settings.LDAP_AUTH_USER_FIELDS.values()), size_limit = 1, ): <NEW_LINE> <INDENT> return self._get_or_create_user(self._connection.response[0]) <NEW_LINE> <DEDENT> return None
A connection to an LDAP server.
62598fc35fcc89381b266297
class _IncrDecr(_RExprNode): <NEW_LINE> <INDENT> def __init__(self, expr): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.expr = expr <NEW_LINE> <DEDENT> descrip = None <NEW_LINE> cmd = None <NEW_LINE> return_new = None <NEW_LINE> def make_il(self, il_code, symbol_table, c): <NEW_LINE> <INDENT> lval = self.expr.lvalue(il_code, symbol_table, c) <NEW_LINE> if not lval or not lval.modable(): <NEW_LINE> <INDENT> err = f"operand of {self.descrip} operator not a modifiable lvalue" <NEW_LINE> raise CompilerError(err, self.expr.r) <NEW_LINE> <DEDENT> val = self.expr.make_il(il_code, symbol_table, c) <NEW_LINE> one = ILValue(val.ctype) <NEW_LINE> if val.ctype.is_arith(): <NEW_LINE> <INDENT> il_code.register_literal_var(one, 1) <NEW_LINE> <DEDENT> elif val.ctype.is_pointer() and val.ctype.arg.is_complete(): <NEW_LINE> <INDENT> il_code.register_literal_var(one, val.ctype.arg.size) <NEW_LINE> <DEDENT> elif val.ctype.is_pointer(): <NEW_LINE> <INDENT> err = "invalid arithmetic on pointer to incomplete type" <NEW_LINE> raise CompilerError(err, self.expr.r) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> err = f"invalid type for {self.descrip} operator" <NEW_LINE> raise CompilerError(err, self.expr.r) <NEW_LINE> <DEDENT> new_val = ILValue(val.ctype) <NEW_LINE> if self.return_new: <NEW_LINE> <INDENT> il_code.add(self.cmd(new_val, val, one)) <NEW_LINE> lval.set_to(new_val, il_code, self.expr.r) <NEW_LINE> return new_val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> old_val = ILValue(val.ctype) <NEW_LINE> il_code.add(value_cmds.Set(old_val, val)) <NEW_LINE> il_code.add(self.cmd(new_val, val, one)) <NEW_LINE> lval.set_to(new_val, il_code, self.expr.r) <NEW_LINE> return old_val
Base class for prefix/postfix increment/decrement operators.
62598fc35fdd1c0f98e5e228
class PfsSqliteRegistry(PfsSqlRegistry): <NEW_LINE> <INDENT> placeHolder = "?" <NEW_LINE> def __init__(self, location): <NEW_LINE> <INDENT> if os.path.exists(location): <NEW_LINE> <INDENT> conn = sqlite3.connect(location) <NEW_LINE> conn.text_factory = str <NEW_LINE> self.root = location <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> conn = None <NEW_LINE> <DEDENT> SqlRegistry.__init__(self, conn)
A SQLite-based registry
62598fc3091ae35668704ebc
class DutyAssignmentSubmittedEvent(Model): <NEW_LINE> <INDENT> def __init__(self, event_type=None, duty_id=None): <NEW_LINE> <INDENT> self.openapi_types = { 'event_type': str, 'duty_id': str } <NEW_LINE> self.attribute_map = { 'event_type': 'eventType', 'duty_id': 'dutyId' } <NEW_LINE> self._event_type = event_type <NEW_LINE> self._duty_id = duty_id <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt) -> 'DutyAssignmentSubmittedEvent': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def event_type(self): <NEW_LINE> <INDENT> return self._event_type <NEW_LINE> <DEDENT> @event_type.setter <NEW_LINE> def event_type(self, event_type): <NEW_LINE> <INDENT> if event_type is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `event_type`, must not be `None`") <NEW_LINE> <DEDENT> self._event_type = event_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def duty_id(self): <NEW_LINE> <INDENT> return self._duty_id <NEW_LINE> <DEDENT> @duty_id.setter <NEW_LINE> def duty_id(self, duty_id): <NEW_LINE> <INDENT> self._duty_id = duty_id
NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech). Do not edit the class manually.
62598fc34f88993c371f0655