code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Position(object): <NEW_LINE> <INDENT> def __init__(self, X=None, Y=None,): <NEW_LINE> <INDENT> self.X = X <NEW_LINE> self.Y = Y <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.X = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.Y = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('Position') <NEW_LINE> if self.X is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('X', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.X) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.Y is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('Y', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.Y) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - X - Y
62598fa03cc13d1c6d4655a4
class Modelim(SystemTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.interpreter.pipe.create('elim', 'mf') <NEW_LINE> <DEDENT> def test_te_200ns(self): <NEW_LINE> <INDENT> self.interpreter.results.read(file='final_results_trunc_1.3_v2', dir=status.install_path + sep+'test_suite'+sep+'shared_data'+sep+'model_free'+sep+'OMP') <NEW_LINE> self.interpreter.value.set(200*1e-9, 'te', spin_id=":11") <NEW_LINE> self.interpreter.eliminate() <NEW_LINE> self.assert_(return_spin(':9@N').select) <NEW_LINE> self.assert_(return_spin(':10@N').select) <NEW_LINE> self.assert_(not return_spin(':11@N').select) <NEW_LINE> self.assert_(return_spin(':12@N').select) <NEW_LINE> <DEDENT> def test_tm_51ns(self): <NEW_LINE> <INDENT> self.script_exec(status.install_path + sep+'test_suite'+sep+'system_tests'+sep+'scripts'+sep+'local_tm_model_elimination.py') <NEW_LINE> self.assert_(return_spin(':13').select) <NEW_LINE> self.assert_(return_spin(':14').select) <NEW_LINE> self.assert_(not return_spin(':15').select) <NEW_LINE> self.assert_(return_spin(':16').select)
Class for testing model selection.
62598fa0435de62698e9bc2c
class CanCache(ndb.Model): <NEW_LINE> <INDENT> updated_at = ndb.DateTimeProperty(auto_now=True)
Super brilliant Idea: Lots of things will change very infrequently, so store the date it was last updated. If it wasn't updated, then list is cached client side. The benefit of this is in drastically reduces Data-Store Reads Reduce DB Reads => Save Money
62598fa001c39578d7f12bb7
class LObs(GenericMedia): <NEW_LINE> <INDENT> supported_domains = ['www.nouvelobs.com', 'bibliobs.nouvelobs.com'] <NEW_LINE> id = 'l_obs' <NEW_LINE> articles_regex = [r'\.html$'] <NEW_LINE> display_name = 'L\'Obs'
Class used for french media "L'Obs".
62598fa0009cb60464d0135d
class JsonEncoder(flask.json.JSONEncoder): <NEW_LINE> <INDENT> def __init__(self, *args: Any, **kwargs: Any) -> None: <NEW_LINE> <INDENT> kwargs['namedtuple_as_object'] = False <NEW_LINE> kwargs['tuple_as_array'] = False <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def default(self, obj: object) -> Any: <NEW_LINE> <INDENT> if isinstance(obj, Enum): <NEW_LINE> <INDENT> return obj.value <NEW_LINE> <DEDENT> elif isinstance(obj, Match): <NEW_LINE> <INDENT> comp: SRComp = g.comp_man.get_comp() <NEW_LINE> return match_json_info(comp, obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().default(obj)
A JSON encoder that deals with various types used in SRComp.
62598fa0a17c0f6771d5c072
class TestDataPaths(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> print("TestDataCRUD:setUp__:begin") <NEW_LINE> self.data_config = dc <NEW_LINE> self.db_targets = dc.DB_TARGETS() <NEW_LINE> print("TestDataCRUD:setUp__:end") <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> print("TestDataCRUD:tearDown__:begin") <NEW_LINE> self.data_config = None <NEW_LINE> self.db_targets = None <NEW_LINE> print("TestDataCRUD:tearDown__:end") <NEW_LINE> <DEDENT> def test_character_dir(self): <NEW_LINE> <INDENT> char_dir = dc.character_dir <NEW_LINE> self.assertTrue(os.path.exists(char_dir), "{} does not exist.".format(char_dir)) <NEW_LINE> <DEDENT> def test_mission_dir(self): <NEW_LINE> <INDENT> mission_dir = self.db_targets.mission_data_dir <NEW_LINE> self.assertTrue(os.path.exists(mission_dir), "{} does not exist.".format(mission_dir)) <NEW_LINE> <DEDENT> def test_service_branch_dir(self): <NEW_LINE> <INDENT> service_dir = self.db_targets.service_branches <NEW_LINE> self.assertTrue(os.path.exists(service_dir), "{} does not exist.".format(service_dir))
Test data access in the filesystem.
62598fa0a8370b77170f021d
class LbvsEntry(plugin_api.PluginInterface): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.stream = None <NEW_LINE> self.counter = 0 <NEW_LINE> self.first_entry = False <NEW_LINE> <DEDENT> def execute(self, files): <NEW_LINE> <INDENT> query = LbvsEntry._load_file(files["query_file"]) <NEW_LINE> database = LbvsEntry._load_file(files["database_file"]) <NEW_LINE> with open(files["output_file"], "w") as stream: <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> self.write_output_header() <NEW_LINE> self.compute_and_write_similarities_for_items(query, database) <NEW_LINE> self.write_output_footer() <NEW_LINE> <DEDENT> <DEDENT> def write_output_header(self): <NEW_LINE> <INDENT> self.stream.write('{"data":[') <NEW_LINE> <DEDENT> def write_output_footer(self): <NEW_LINE> <INDENT> self.stream.write(']}') <NEW_LINE> <DEDENT> def compute_and_write_similarities_for_items(self, query, database): <NEW_LINE> <INDENT> self.first_entry = True <NEW_LINE> for query_item in query: <NEW_LINE> <INDENT> for database_item in database: <NEW_LINE> <INDENT> self._write_separator_if_needed() <NEW_LINE> self.first_entry = False <NEW_LINE> self._compute_and_write_similarity(query_item, database_item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _write_separator_if_needed(self): <NEW_LINE> <INDENT> if not self.first_entry: <NEW_LINE> <INDENT> self.stream.write(",") <NEW_LINE> <DEDENT> <DEDENT> def _compute_and_write_similarity(self, query, item): <NEW_LINE> <INDENT> similarity = LbvsEntry._compute_similarity( query["value"], item["value"]) <NEW_LINE> json.dump({ "query": query["id"], "id": item["id"], "value": similarity }, self.stream) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _load_file(path): <NEW_LINE> <INDENT> with open(path) as stream: <NEW_LINE> <INDENT> return [{ "id": item["id"], "value": LbvsEntry._as_sparse_vector(item["value"]) } for item in json.load(stream)["data"]] <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _as_sparse_vector(data): <NEW_LINE> <INDENT> vector = DataStructs.cDataStructs.IntSparseIntVect(8388608) <NEW_LINE> for key in data: <NEW_LINE> <INDENT> vector[(int)(key)] = (int)(data[key]) <NEW_LINE> <DEDENT> return vector <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _compute_similarity(left, right): <NEW_LINE> <INDENT> return DataStructs.TanimotoSimilarity(left, right) <NEW_LINE> <DEDENT> def get_metadata(self) -> object: <NEW_LINE> <INDENT> return { "id": "rdkit/tanimoto" }
Compute Tanimoto similarity.
62598fa076e4537e8c3ef3f0
class FreqList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.words, self.word_freq, self.word_rank = list(), dict(), dict() <NEW_LINE> self.ready = False <NEW_LINE> <DEDENT> def load(self, filename=None, stream=None): <NEW_LINE> <INDENT> if filename is not None: <NEW_LINE> <INDENT> self.words, self.word_freq, self.word_rank = pickle.load(open(filename, 'rb')) <NEW_LINE> <DEDENT> elif stream is not None: <NEW_LINE> <INDENT> self.words = list() <NEW_LINE> self.word_freq = dict() <NEW_LINE> self.word_rank = dict() <NEW_LINE> from myparser import tokens, normalize <NEW_LINE> cnt = 0 <NEW_LINE> for token in tokens(stream): <NEW_LINE> <INDENT> word = normalize(token) <NEW_LINE> f = FREQLIST.freq(word) <NEW_LINE> if f is not None and f > 0: <NEW_LINE> <INDENT> self.word_freq[word] = 1 + self.word_freq.get(word, 0) <NEW_LINE> cnt += 1 <NEW_LINE> <DEDENT> <DEDENT> freq_word = [(f, w) for (w, f) in self.word_freq.items()] <NEW_LINE> freq_word.sort(reverse=True) <NEW_LINE> for (_, word) in freq_word: <NEW_LINE> <INDENT> self.word_freq[word] /= cnt <NEW_LINE> self.word_rank[word] = len(self.words) <NEW_LINE> self.words.append(word) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.words, self.word_freq, self.word_rank = list(), dict(), dict() <NEW_LINE> <DEDENT> self.ready = True; <NEW_LINE> <DEDENT> def wait(self): <NEW_LINE> <INDENT> while not self.ready: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def freq(self, word): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> return self.word_freq.get(word, None) <NEW_LINE> <DEDENT> def rank(self, word): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> return self.word_rank.get(word, None) <NEW_LINE> <DEDENT> def word(self, rank): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> if rank >= len(self.words): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.words[rank] <NEW_LINE> <DEDENT> def dump(self, filename): <NEW_LINE> <INDENT> self.wait() <NEW_LINE> pickle.dump((self.words, self.word_freq, self.word_rank), open(filename, 'wb'))
Klasa opisuje listę frekwencyjną.
62598fa08e71fb1e983bb8ef
class MplRegularPolygonProperties(MplPatchProperties): <NEW_LINE> <INDENT> _input_ports = [ ("xy", "basic:String", {'optional': True, 'docstring': 'A length 2 tuple (x, y) of the center.'}), ("radius", "basic:Integer", {'optional': True, 'docstring': 'The distance from the center to each of the vertices.', 'defaults': "['5']"}), ("orientation", "basic:Integer", {'optional': True, 'docstring': 'rotates the polygon (in radians).', 'defaults': "['0']"}), ("numVertices", "basic:String", {'optional': True, 'docstring': 'the number of vertices.'}), ] <NEW_LINE> _output_ports = [("self", "(MplRegularPolygonProperties)")] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> MplPatchProperties.__init__(self) <NEW_LINE> self.props = {} <NEW_LINE> self.constructor_props = {} <NEW_LINE> self.sub_props = {} <NEW_LINE> <DEDENT> def compute(self): <NEW_LINE> <INDENT> MplPatchProperties.compute(self) <NEW_LINE> if self.hasInputFromPort('xy'): <NEW_LINE> <INDENT> self.constructor_props['xy'] = self.getInputFromPort('xy') <NEW_LINE> <DEDENT> if self.hasInputFromPort('radius'): <NEW_LINE> <INDENT> self.constructor_props['radius'] = self.getInputFromPort('radius') <NEW_LINE> <DEDENT> if self.hasInputFromPort('orientation'): <NEW_LINE> <INDENT> self.constructor_props['orientation'] = self.getInputFromPort('orientation') <NEW_LINE> <DEDENT> if self.hasInputFromPort('numVertices'): <NEW_LINE> <INDENT> self.constructor_props['numVertices'] = self.getInputFromPort('numVertices') <NEW_LINE> <DEDENT> <DEDENT> def update_props(self, objs): <NEW_LINE> <INDENT> matplotlib.artist.setp(objs, **self.props) <NEW_LINE> <DEDENT> def update_kwargs(self, kwargs): <NEW_LINE> <INDENT> kwargs.update(self.constructor_props) <NEW_LINE> kwargs.update(self.props)
A regular polygon patch.
62598fa0925a0f43d25e7e76
class ProjectsLocationsJobsMessagesService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'projects_locations_jobs_messages' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(DataflowV1b3.ProjectsLocationsJobsMessagesService, self).__init__(client) <NEW_LINE> self._upload_configs = { } <NEW_LINE> <DEDENT> def List(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('List') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> List.method_config = lambda: base_api.ApiMethodInfo( http_method=u'GET', method_id=u'dataflow.projects.locations.jobs.messages.list', ordered_params=[u'projectId', u'location', u'jobId'], path_params=[u'jobId', u'location', u'projectId'], query_params=[u'endTime', u'minimumImportance', u'pageSize', u'pageToken', u'startTime'], relative_path=u'v1b3/projects/{projectId}/locations/{location}/jobs/{jobId}/messages', request_field='', request_type_name=u'DataflowProjectsLocationsJobsMessagesListRequest', response_type_name=u'ListJobMessagesResponse', supports_download=False, )
Service class for the projects_locations_jobs_messages resource.
62598fa085dfad0860cbf991
class TailLogEntriesResponse(proto.Message): <NEW_LINE> <INDENT> class SuppressionInfo(proto.Message): <NEW_LINE> <INDENT> class Reason(proto.Enum): <NEW_LINE> <INDENT> REASON_UNSPECIFIED = 0 <NEW_LINE> RATE_LIMIT = 1 <NEW_LINE> NOT_CONSUMED = 2 <NEW_LINE> <DEDENT> reason = proto.Field( proto.ENUM, number=1, enum="TailLogEntriesResponse.SuppressionInfo.Reason", ) <NEW_LINE> suppressed_count = proto.Field( proto.INT32, number=2, ) <NEW_LINE> <DEDENT> entries = proto.RepeatedField( proto.MESSAGE, number=1, message=log_entry.LogEntry, ) <NEW_LINE> suppression_info = proto.RepeatedField( proto.MESSAGE, number=2, message=SuppressionInfo, )
Result returned from ``TailLogEntries``. Attributes: entries (Sequence[google.cloud.logging_v2.types.LogEntry]): A list of log entries. Each response in the stream will order entries with increasing values of ``LogEntry.timestamp``. Ordering is not guaranteed between separate responses. suppression_info (Sequence[google.cloud.logging_v2.types.TailLogEntriesResponse.SuppressionInfo]): If entries that otherwise would have been included in the session were not sent back to the client, counts of relevant entries omitted from the session with the reason that they were not included. There will be at most one of each reason per response. The counts represent the number of suppressed entries since the last streamed response.
62598fa0e5267d203ee6b746
class ImageSource(abc.ABC): <NEW_LINE> <INDENT> def __getitem__(self, index: int) -> str: <NEW_LINE> <INDENT> return self.images[index] <NEW_LINE> <DEDENT> images: Dict[int, str] <NEW_LINE> def __iter__(self) -> Iterable[str]: <NEW_LINE> <INDENT> for image in self.images.values(): <NEW_LINE> <INDENT> yield image
Base class for image sources.
62598fa0596a897236127ab5
class SchemaTab(BaseResource): <NEW_LINE> <INDENT> def render_tab(self, request): <NEW_LINE> <INDENT> return html <NEW_LINE> <DEDENT> def render_POST(self, request): <NEW_LINE> <INDENT> path = request.args['path'][0] <NEW_LINE> def send_response(data): <NEW_LINE> <INDENT> response = { 'schema': extract_result(data[0]), 'keys': extract_result(data[1]) } <NEW_LINE> request.sdata.add_to_push_queue('schema', text=dumps(response)) <NEW_LINE> request.sdata.highlight('#schema_result_schema') <NEW_LINE> request.sdata.highlight('#schema_result_keys') <NEW_LINE> request.sdata.log('got reply {}'.format(response)) <NEW_LINE> <DEDENT> d_schema = get_schema(request.sdata.api, path) <NEW_LINE> d_keys = get_keys(request.sdata.api, path) <NEW_LINE> d = defer.gatherResults([d_schema, d_keys]) <NEW_LINE> def canonicalize(results): <NEW_LINE> <INDENT> schema, keys = results <NEW_LINE> if 'message' in keys and 'get_keys is not supported for leaf nodes' in keys['message']: <NEW_LINE> <INDENT> keys = 'No keys (leaf node)' <NEW_LINE> <DEDENT> return (schema, keys) <NEW_LINE> <DEDENT> d.addCallback(canonicalize) <NEW_LINE> d.addCallback(send_response) <NEW_LINE> request.setHeader('Content-Type', 'application/json') <NEW_LINE> return '{}'
Schema tab
62598fa007f4c71912baf27e
class PhysicalVerification(SamlBase): <NEW_LINE> <INDENT> c_tag = 'PhysicalVerification' <NEW_LINE> c_namespace = NAMESPACE <NEW_LINE> c_children = SamlBase.c_children.copy() <NEW_LINE> c_attributes = SamlBase.c_attributes.copy() <NEW_LINE> c_child_order = SamlBase.c_child_order[:] <NEW_LINE> c_cardinality = SamlBase.c_cardinality.copy() <NEW_LINE> c_attributes['credentialLevel'] = ('credential_level', 'None', False) <NEW_LINE> def __init__(self, credential_level=None, text=None, extension_elements=None, extension_attributes=None): <NEW_LINE> <INDENT> SamlBase.__init__(self, text=text, extension_elements=extension_elements, extension_attributes=extension_attributes) <NEW_LINE> self.credential_level = credential_level
The urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken: PhysicalVerification element
62598fa08e7ae83300ee8ed9
class UpdatePeerSettings(TLObject): <NEW_LINE> <INDENT> __slots__: List[str] = ["peer", "settings"] <NEW_LINE> ID = 0x6a7e7366 <NEW_LINE> QUALNAME = "types.UpdatePeerSettings" <NEW_LINE> def __init__(self, *, peer: "raw.base.Peer", settings: "raw.base.PeerSettings") -> None: <NEW_LINE> <INDENT> self.peer = peer <NEW_LINE> self.settings = settings <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(data: BytesIO, *args: Any) -> "UpdatePeerSettings": <NEW_LINE> <INDENT> peer = TLObject.read(data) <NEW_LINE> settings = TLObject.read(data) <NEW_LINE> return UpdatePeerSettings(peer=peer, settings=settings) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> data = BytesIO() <NEW_LINE> data.write(Int(self.ID, False)) <NEW_LINE> data.write(self.peer.write()) <NEW_LINE> data.write(self.settings.write()) <NEW_LINE> return data.getvalue()
This object is a constructor of the base type :obj:`~pyrogram.raw.base.Update`. Details: - Layer: ``122`` - ID: ``0x6a7e7366`` Parameters: peer: :obj:`Peer <pyrogram.raw.base.Peer>` settings: :obj:`PeerSettings <pyrogram.raw.base.PeerSettings>`
62598fa0ac7a0e7691f72344
class HostUser(models.Model): <NEW_LINE> <INDENT> auth_type_choices = ((0,'ssh-password'),(1,'ssh-key')) <NEW_LINE> auth_type = models.SmallIntegerField(choices=auth_type_choices) <NEW_LINE> username = models.CharField(max_length=32) <NEW_LINE> password = models.CharField(blank=True,null=True,max_length=128) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s-%s-%s" %(self.get_auth_type_display(),self.username,self.password) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = ('username','password')
存储远程主机的用户信息 root 123 root abc root sfsfs
62598fa00c0af96317c561ba
class Project(object): <NEW_LINE> <INDENT> def __init__(self, energy_trace_set, interventions, site): <NEW_LINE> <INDENT> self.energy_trace_set = energy_trace_set <NEW_LINE> self.interventions = interventions <NEW_LINE> self.site = site <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ( "Project(energy_trace_set={}, interventions={}, site={})" .format(self.energy_trace_set, self.interventions, self.site) )
Container for storing project data. Parameters ---------- trace_set : eemeter.structures.TraceSet Complete set of energy traces for this project. For a project site that has, for example, two electricity meters, each with two traces (supplied electricity kWh, and solar-generated kWh) and one natural gas meter with one trace (consumed natural gas therms), the `trace_set` should contain 5 traces, regardless of the availablity of that data. Traces which are unavailable should be represented as 'placeholder' traces. interventions : list of eemeter.structures.Intervention Complete set of interventions, planned, ongoing, or completed, that have taken or will take place at this site as part of this project. site : eemeter.structures.Site The site of this project.
62598fa00c0af96317c561bb
class Remote(object): <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.url = url.endswith("/") and url or url + "/" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def all(cls): <NEW_LINE> <INDENT> if not Configuration.has_section('remotes'): <NEW_LINE> <INDENT> cls.create("https://raw.github.com/mwrlabs/mercury-modules/repository/") <NEW_LINE> <DEDENT> return Configuration.get_all_values('remotes') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, url): <NEW_LINE> <INDENT> if cls.get(url) == None: <NEW_LINE> <INDENT> Configuration.set('remotes', url, url) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def delete(cls, url): <NEW_LINE> <INDENT> if cls.get(url) != None: <NEW_LINE> <INDENT> Configuration.delete('remotes', url) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise UnknownRemote(url) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get(cls, url): <NEW_LINE> <INDENT> url = Configuration.get('remotes', url) <NEW_LINE> if url != None: <NEW_LINE> <INDENT> return cls(url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def buildPath(self, path): <NEW_LINE> <INDENT> return self.url + str(path) <NEW_LINE> <DEDENT> def download(self, module): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.getPath(module) <NEW_LINE> <DEDENT> except urllib2.HTTPError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> except urllib2.URLError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def getPath(self, path): <NEW_LINE> <INDENT> r = urllib2.urlopen(self.buildPath(path)) <NEW_LINE> socket = FakeSocket(r.read()) <NEW_LINE> r.close() <NEW_LINE> response = httplib.HTTPResponse(socket) <NEW_LINE> response.begin() <NEW_LINE> data = response.read() <NEW_LINE> response.close() <NEW_LINE> return data
Remote is a wrapper around a set of Mercury remote repositories, and provides methods for managing them. A Remote can be instantiated to provide API access to the repository, to get information about available modules and download their source.
62598fa03539df3088ecc0ee
class ProductionConfig(BaseConfig): <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = 'mysql:///example'
Production configuration.
62598fa07d847024c075c1ff
class TestPrettifyCombo(TestCSVFlatten): <NEW_LINE> <INDENT> def test_prettify_multiline_records_disable_max_length(self): <NEW_LINE> <INDENT> self.assertLines( ["-L", "0", "examples/dummy2m.csv"], [ "| field | value |", "| ----- | ----- |", "| a | 1 |", "| b | 2 |", "| c | 3, |", "| | 4 |", "| ===== | |", "| a | 8 |", "| b | 9 |", "| c | 10 |", ], ) <NEW_LINE> <DEDENT> def test_prettify_eor_none(self): <NEW_LINE> <INDENT> self.assertLines( ["-S", "none", "examples/dummy2.csv"], [ "| field | value |", "| ----- | ----- |", "| a | 1 |", "| b | 2 |", "| c | 3 |", "| a | 8 |", "| b | 9 |", "| c | 10 |", ], ) <NEW_LINE> <DEDENT> def test_prettify_rec_ids(self): <NEW_LINE> <INDENT> self.assertLines( ["--rec-id", "examples/dummy2.csv"], [ "| recid | field | value |", "| ----- | ----- | ----- |", "| 0 | a | 1 |", "| 0 | b | 2 |", "| 0 | c | 3 |", "| 1 | a | 8 |", "| 1 | b | 9 |", "| 1 | c | 10 |", ], )
tklegacy test prettify with other settings to make make sure side-effects don't conflict legacy tests from before non-default-prettify
62598fa0c432627299fa2e13
class UniversalP3PHeader (object): <NEW_LINE> <INDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> response['P3P'] = 'CP="Shareabouts does not have a P3P policy."' <NEW_LINE> return response
Sets P3P headers on the response. This header does not specify a valid P3P policy, but it is enough to get past IE. See http://stackoverflow.com/a/17710503/123776
62598fa0498bea3a75a5795b
@pytest.mark.skipif(IS_JYTHON or IS_IRONPYTHON, reason='CPython/pypy only') <NEW_LINE> class TestSetLocals(unittest.TestCase): <NEW_LINE> <INDENT> def test_set_locals_using_save_locals(self): <NEW_LINE> <INDENT> x = check_method(use_save_locals) <NEW_LINE> self.assertEqual(x, 2) <NEW_LINE> <DEDENT> def test_frame_simple_change(self): <NEW_LINE> <INDENT> frame = sys._getframe() <NEW_LINE> a = 20 <NEW_LINE> frame.f_locals['a'] = 50 <NEW_LINE> save_locals(frame) <NEW_LINE> self.assertEqual(50, a) <NEW_LINE> <DEDENT> def test_frame_co_freevars(self): <NEW_LINE> <INDENT> outer_var = 20 <NEW_LINE> def func(): <NEW_LINE> <INDENT> frame = sys._getframe() <NEW_LINE> frame.f_locals['outer_var'] = 50 <NEW_LINE> save_locals(frame) <NEW_LINE> self.assertEqual(50, outer_var) <NEW_LINE> <DEDENT> func() <NEW_LINE> <DEDENT> def test_frame_co_cellvars(self): <NEW_LINE> <INDENT> def check_co_vars(a): <NEW_LINE> <INDENT> frame = sys._getframe() <NEW_LINE> def function2(): <NEW_LINE> <INDENT> print(a) <NEW_LINE> <DEDENT> assert 'a' in frame.f_code.co_cellvars <NEW_LINE> frame = sys._getframe() <NEW_LINE> frame.f_locals['a'] = 50 <NEW_LINE> save_locals(frame) <NEW_LINE> self.assertEqual(50, a) <NEW_LINE> <DEDENT> check_co_vars(1) <NEW_LINE> <DEDENT> def test_frame_change_in_inner_frame(self): <NEW_LINE> <INDENT> def change(f): <NEW_LINE> <INDENT> self.assertTrue(f is not sys._getframe()) <NEW_LINE> f.f_locals['a']= 50 <NEW_LINE> save_locals(f) <NEW_LINE> <DEDENT> frame = sys._getframe() <NEW_LINE> a = 20 <NEW_LINE> change(frame) <NEW_LINE> self.assertEqual(50, a)
Test setting locals in one function from another function using several approaches.
62598fa0f548e778e596b3e7
class Offset(Vector): <NEW_LINE> <INDENT> pass
This class implements an offset.
62598fa063d6d428bbee25eb
class File(BIO): <NEW_LINE> <INDENT> def __init__(self, pyfile, close_pyfile=1): <NEW_LINE> <INDENT> BIO.__init__(self, _pyfree=1) <NEW_LINE> self.pyfile = pyfile <NEW_LINE> self.close_pyfile = close_pyfile <NEW_LINE> if hasattr(m2, "bio_new_pyfd"): <NEW_LINE> <INDENT> self.bio = m2.bio_new_pyfd(pyfile.fileno(), m2.bio_noclose) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bio = m2.bio_new_pyfile(pyfile, m2.bio_noclose) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.closed = 1 <NEW_LINE> if self.close_pyfile: <NEW_LINE> <INDENT> self.pyfile.close() <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> return super(File, self).reset()
Object interface to BIO_s_pyfd This class interfaces Python to OpenSSL functions that expect BIO \*. For general file manipulation in Python, use Python's builtin file object.
62598fa0236d856c2adc9357
class Log(base.PlexCommand): <NEW_LINE> <INDENT> name = 'log' <NEW_LINE> type_map = { 'StatusNotification': 'Status', 'ProgressNotification': 'Progress' } <NEW_LINE> @asyncio.coroutine <NEW_LINE> def notifications(self, table): <NEW_LINE> <INDENT> server = self.serverapi.uri.split('://', 1)[1] <NEW_LINE> notif_url = 'ws://%s/:/websockets/notifications' % server <NEW_LINE> feed = yield from websockets.connect(notif_url) <NEW_LINE> while True: <NEW_LINE> <INDENT> data = yield from feed.recv() <NEW_LINE> if data is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> table.print(json.loads(data).get('_children')) <NEW_LINE> <DEDENT> yield from feed.close() <NEW_LINE> <DEDENT> def get_ts(self, obj): <NEW_LINE> <INDENT> return datetime.datetime.now().strftime('%I:%M:%S %p') <NEW_LINE> <DEDENT> def get_type(self, obj): <NEW_LINE> <INDENT> return self.type_map[obj['_elementType']] <NEW_LINE> <DEDENT> def get_msg(self, obj): <NEW_LINE> <INDENT> if 'message' in obj: <NEW_LINE> <INDENT> return obj['message'] <NEW_LINE> <DEDENT> return '%s: %s' % (obj['title'], obj['description']) <NEW_LINE> <DEDENT> def run(self, args): <NEW_LINE> <INDENT> headers = ['Date', 'Type', 'Message'] <NEW_LINE> accessors = [self.get_ts, self.get_type, self.get_msg] <NEW_LINE> table = Table(headers=headers, accessors=accessors) <NEW_LINE> evloop = asyncio.get_event_loop() <NEW_LINE> with evloop.run_until_complete(self.notifications(table)): <NEW_LINE> <INDENT> pass
Show activity log
62598fa0d268445f26639aa0
class SourceModel(collections.abc.Sequence): <NEW_LINE> <INDENT> def __init__(self, src_groups, name='', investigation_time='', start_time=''): <NEW_LINE> <INDENT> self.src_groups = src_groups <NEW_LINE> self.name = name <NEW_LINE> self.investigation_time = investigation_time <NEW_LINE> self.start_time = start_time <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> return self.src_groups[i] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.src_groups) <NEW_LINE> <DEDENT> def __toh5__(self): <NEW_LINE> <INDENT> dic = {} <NEW_LINE> for i, grp in enumerate(self.src_groups): <NEW_LINE> <INDENT> grpname = grp.name or 'group-%d' % i <NEW_LINE> srcs = [(src.source_id, src) for src in grp if hasattr(src, '__toh5__')] <NEW_LINE> if srcs: <NEW_LINE> <INDENT> dic[grpname] = hdf5.Group(srcs, {'trt': grp.trt}) <NEW_LINE> <DEDENT> <DEDENT> attrs = dict(name=self.name, investigation_time=self.investigation_time or 'NA', start_time=self.start_time or 'NA') <NEW_LINE> if not dic: <NEW_LINE> <INDENT> raise ValueError('There are no serializable sources in %s' % self) <NEW_LINE> <DEDENT> return dic, attrs <NEW_LINE> <DEDENT> def __fromh5__(self, dic, attrs): <NEW_LINE> <INDENT> vars(self).update(attrs) <NEW_LINE> self.src_groups = [] <NEW_LINE> for grp_name, grp in dic.items(): <NEW_LINE> <INDENT> trt = grp.attrs['trt'] <NEW_LINE> srcs = [] <NEW_LINE> for src_id in sorted(grp): <NEW_LINE> <INDENT> src = grp[src_id] <NEW_LINE> src.num_ruptures = src.count_ruptures() <NEW_LINE> srcs.append(src) <NEW_LINE> <DEDENT> grp = sourceconverter.SourceGroup(trt, srcs, grp_name) <NEW_LINE> self.src_groups.append(grp)
A container of source groups with attributes name, investigation_time and start_time. It is serialize on hdf5 as follows: >> with openquake.baselib.hdf5.File('/tmp/sm.hdf5', 'w') as f: .. f['/'] = source_model
62598fa01b99ca400228f44b
class Request: <NEW_LINE> <INDENT> def __init__(self, url, cookies, delay, userAgent, bauth): <NEW_LINE> <INDENT> self.cookies = cookies <NEW_LINE> self.delay = delay <NEW_LINE> self.url = url <NEW_LINE> self.headers = { "User-Agent": userAgent, "Authorization": "Basic " + bauth } <NEW_LINE> self.session = requests.Session() <NEW_LINE> requests.packages.urllib3.disable_warnings() <NEW_LINE> <DEDENT> def get(self, path=""): <NEW_LINE> <INDENT> time.sleep(self.delay) <NEW_LINE> return self.session.get(self.url + path, headers=self.headers, cookies=self.cookies, verify=False)
This class wraps the requests module in order to setup cookies, User-Agent, etc.
62598fa0796e427e5384e5cd
class Config(object): <NEW_LINE> <INDENT> def __init__(self, poetry_file, weight_file, max_len, batch_size, learning_rate, form): <NEW_LINE> <INDENT> self.poetry_file = poetry_file <NEW_LINE> self.weight_file = weight_file <NEW_LINE> self.max_len = max_len <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.learning_rate = learning_rate <NEW_LINE> self.form = form
form_ equals 5 means we choose 5 character poems as our training data, equals 7 means we choose 7 character poems as training data, while other nums means we do not choose. max equals form plus 1
62598fa0442bda511e95c294
class LinkADRReqTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_encode(self): <NEW_LINE> <INDENT> expected = '\x03\x21\xff\x00\x00' <NEW_LINE> chmask = int('FF', 16) <NEW_LINE> m = lora_mac.LinkADRReq(datarate=2, txpower=1, chmask=chmask, chmaskcntl=0, nbrep=0) <NEW_LINE> result = m.encode() <NEW_LINE> self.assertEqual(expected, result)
Test LinkADRReq class
62598fa0442bda511e95c295
class AdminTeamGamesHandler(webapp2.RequestHandler): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> file_name = self.request.get('file_name') <NEW_LINE> with open(file_name, 'rb') as schedule: <NEW_LINE> <INDENT> next(schedule) <NEW_LINE> reader = csv.reader(schedule) <NEW_LINE> home_team = self.__get_home_team(file_name) <NEW_LINE> for row in reader: <NEW_LINE> <INDENT> if row[LOCATION_INDEX].startswith(home_team.stadium_name): <NEW_LINE> <INDENT> away_team = self.__get_away_team(row[TEAMS_INDEX]) <NEW_LINE> if None == away_team: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> game_time = self.__get_game_time( row[GAME_DATE_INDEX], row[GAME_TIME_INDEX]) <NEW_LINE> query = game.Game.gql( 'WHERE home_team = :1 AND ' 'away_team = :2 AND ' 'game_time = :3', home_team.key, away_team.key, game_time) <NEW_LINE> a_game = query.get() or game.Game() <NEW_LINE> a_game.home_team = home_team.key <NEW_LINE> a_game.away_team = away_team.key <NEW_LINE> a_game.game_time = game_time <NEW_LINE> a_game.put() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def __get_home_team(self, file_name): <NEW_LINE> <INDENT> match = re.search('[a-z]{2,3}\.csv\Z', file_name) <NEW_LINE> home_team_abbr = match.group(0)[:-4] <NEW_LINE> return team.Team.query(team.Team.name_abbr == home_team_abbr).get() <NEW_LINE> <DEDENT> def __get_away_team(self, game_info_str): <NEW_LINE> <INDENT> match = re.search('^.* at', game_info_str) <NEW_LINE> if None == match: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> away_team_name = match.group(0)[:-3] <NEW_LINE> if away_team_name == 'D-backs': <NEW_LINE> <INDENT> away_team_name = 'Diamondbacks' <NEW_LINE> <DEDENT> return team.Team.query(team.Team.name == away_team_name).get() <NEW_LINE> <DEDENT> def __get_game_time(self, date_str, time_str): <NEW_LINE> <INDENT> if "" == time_str: <NEW_LINE> <INDENT> time_str = "12:00 PM" <NEW_LINE> <DEDENT> datetime_str = '%s %s' %(date_str, time_str) <NEW_LINE> game_time = time.strptime(datetime_str, '%m/%d/%y %I:%M %p') <NEW_LINE> logging.debug(game_time) <NEW_LINE> return datetime.datetime.fromtimestamp(time.mktime(game_time))
Controller to load the given team's home games into the data store
62598fa021bff66bcd722a9e
class Hostname(ConfigType): <NEW_LINE> <INDENT> def __init__(self, type_name='hostname value'): <NEW_LINE> <INDENT> super(Hostname, self).__init__(type_name=type_name) <NEW_LINE> <DEDENT> def __call__(self, value): <NEW_LINE> <INDENT> if len(value) == 0: <NEW_LINE> <INDENT> raise ValueError("Cannot have an empty hostname") <NEW_LINE> <DEDENT> if len(value) > 253: <NEW_LINE> <INDENT> raise ValueError("hostname is greater than 253 characters: %s" % value) <NEW_LINE> <DEDENT> if value.endswith("."): <NEW_LINE> <INDENT> value = value[:-1] <NEW_LINE> <DEDENT> allowed = re.compile("(?!-)[A-Z0-9-]{1,63}(?<!-)$", re.IGNORECASE) <NEW_LINE> if any((not allowed.match(x)) for x in value.split(".")): <NEW_LINE> <INDENT> raise ValueError("%s is an invalid hostname" % value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'Hostname' <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__class__ == other.__class__ <NEW_LINE> <DEDENT> def _formatter(self, value): <NEW_LINE> <INDENT> return value
Hostname type. A hostname refers to a valid DNS or hostname. It must not be longer than 253 characters, have a segment greater than 63 characters, nor start or end with a hyphen. :param type_name: Type name to be used in the sample config file.
62598fa0a17c0f6771d5c075
class ValueIterationAgent(ValueEstimationAgent): <NEW_LINE> <INDENT> def __init__(self, mdp, discount = 0.9, iterations = 100): <NEW_LINE> <INDENT> self.mdp = mdp <NEW_LINE> self.discount = discount <NEW_LINE> self.iterations = iterations <NEW_LINE> self.values = util.Counter() <NEW_LINE> self.runValueIteration() <NEW_LINE> <DEDENT> def runValueIteration(self): <NEW_LINE> <INDENT> for i in range(self.iterations): <NEW_LINE> <INDENT> ctr = util.Counter() <NEW_LINE> for state in self.mdp.getStates(): <NEW_LINE> <INDENT> max_value = float("-inf") <NEW_LINE> for action in self.mdp.getPossibleActions(state): <NEW_LINE> <INDENT> q = self.computeQValueFromValues(state, action) <NEW_LINE> if q > max_value: <NEW_LINE> <INDENT> max_value = q <NEW_LINE> <DEDENT> ctr[state] = max_value <NEW_LINE> <DEDENT> <DEDENT> self.values = ctr <NEW_LINE> <DEDENT> <DEDENT> def getValue(self, state): <NEW_LINE> <INDENT> return self.values[state] <NEW_LINE> <DEDENT> def computeQValueFromValues(self, state, action): <NEW_LINE> <INDENT> lst = self.mdp.getTransitionStatesAndProbs(state, action) <NEW_LINE> rewards = [prob * (self.mdp.getReward(state, action, nex) + self.discount * self.getValue(nex)) for nex, prob in lst] <NEW_LINE> return sum(rewards) <NEW_LINE> <DEDENT> def computeActionFromValues(self, state): <NEW_LINE> <INDENT> optimal = None <NEW_LINE> max_value = float("-inf") <NEW_LINE> for action in self.mdp.getPossibleActions(state): <NEW_LINE> <INDENT> q = self.computeQValueFromValues(state, action) <NEW_LINE> if q > max_value: <NEW_LINE> <INDENT> max_value = q <NEW_LINE> optimal = action <NEW_LINE> <DEDENT> <DEDENT> return optimal <NEW_LINE> <DEDENT> def getPolicy(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getAction(self, state): <NEW_LINE> <INDENT> return self.computeActionFromValues(state) <NEW_LINE> <DEDENT> def getQValue(self, state, action): <NEW_LINE> <INDENT> return self.computeQValueFromValues(state, action)
* Please read learningAgents.py before reading this.* A ValueIterationAgent takes a Markov decision process (see mdp.py) on initialization and runs value iteration for a given number of iterations using the supplied discount factor.
62598fa057b8e32f52508039
class set_all_blades_default_power_state_off: <NEW_LINE> <INDENT> def __init__ (self, parameters): <NEW_LINE> <INDENT> self.args = parameters <NEW_LINE> <DEDENT> def pre_check (self): <NEW_LINE> <INDENT> run_pre_check (command_name_enum.set_blade_config) <NEW_LINE> <DEDENT> def get_response (self): <NEW_LINE> <INDENT> return set_all_blades_command (set_blade_default_power_state_off, self.args) <NEW_LINE> <DEDENT> def get_row_manager (self): <NEW_LINE> <INDENT> return set_all_blades_command (set_blade_default_power_state_off, self.args, row_manager = True)
Handler for the SetAllBladesDefaultPowerStateOff command.
62598fa076e4537e8c3ef3f1
@api.route('/login') <NEW_LINE> class UserLogin(Resource): <NEW_LINE> <INDENT> @api.doc('user_login') <NEW_LINE> @api.expect(user_auth, validate=True) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> post_data = request.json <NEW_LINE> return auth.login_user(data=post_data)
User Login Resource
62598fa021a7993f00c65dbe
class FileAsserterInterface(AsserterInterface): <NEW_LINE> <INDENT> def get_return(self, path): <NEW_LINE> <INDENT> return path <NEW_LINE> <DEDENT> meets = abc.abstractproperty() <NEW_LINE> message = abc.abstractproperty() <NEW_LINE> exception = abc.abstractproperty()
Parent interface for all File-related assertions.
62598fa03539df3088ecc0ef
class QDScreen(SelectorMixin, BaseEstimator): <NEW_LINE> <INDENT> def __init__(self, absolute_eps=None, relative_eps=None): <NEW_LINE> <INDENT> self.absolute_eps = absolute_eps <NEW_LINE> self.relative_eps = relative_eps <NEW_LINE> <DEDENT> def fit(self, X, y=None): <NEW_LINE> <INDENT> X = self._validate_data(X, accept_sparse=False, dtype=np.object, force_all_finite='allow-nan') <NEW_LINE> forest_ = qd_screen(X, absolute_eps=self.absolute_eps, relative_eps=self.relative_eps) <NEW_LINE> self.model_ = forest_.fit_selector_model(X) <NEW_LINE> return self <NEW_LINE> <DEDENT> def check_is_fitted(self): <NEW_LINE> <INDENT> if not hasattr(self, "model_"): <NEW_LINE> <INDENT> msg = ("This %(name)s instance is not fitted yet. Call 'fit' with " "appropriate arguments before using this estimator.") <NEW_LINE> raise NotFittedError(msg % {'name': type(self).__name__}) <NEW_LINE> <DEDENT> <DEDENT> def _get_support_mask(self): <NEW_LINE> <INDENT> self.check_is_fitted() <NEW_LINE> return self.model_.forest.roots_mask_ar <NEW_LINE> <DEDENT> def inverse_transform(self, X): <NEW_LINE> <INDENT> Xt = super(QDScreen, self).inverse_transform(X) <NEW_LINE> self.model_.predict_qd(Xt, inplace=True) <NEW_LINE> return Xt <NEW_LINE> <DEDENT> def _more_tags(self): <NEW_LINE> <INDENT> return {'allow_nan': True}
Feature selector that removes all features that are (quasi-)deterministically predicted from others. This feature selection algorithm looks only at the features (X), not the desired outputs (y), and can thus be used for unsupervised learning. Read more in the User Guide. Parameters ---------- absolute_eps : float, optional Absolute entropy threshold. Any feature Y that can be predicted from another feature X in a quasi-deterministic way, that is, where conditional entropy H(Y|X) <= absolute_eps, will be removed. The default value is 0 and corresponds to removing deterministic relationships only. relative_eps : float, optional Relative entropy threshold. Any feature Y that can be predicted from another feature X in a quasi-deterministic way, that is, where relative conditional entropy H(Y|X)/H(Y) <= relative_eps (between 0 and 1), will be removed. Only one of absolute_eps and relative_eps should be provided. Attributes ---------- model_ : instance of ``QDForest`` Variances of individual features. Notes ----- Allows NaN in the input. Examples -------- TODO make this better ? see test_readme.py The following dataset has integer features, two of which are constant, and all of which being 'predictable' from the third one:: >>> X = [[0, 2, 0, 3], ... [0, 1, 4, 3], ... [0, 1, 1, 3]] >>> selector = QDScreen() >>> Xsel = selector.fit_transform(X) >>> Xsel array([[0], [4], [1]]) >>> selector.inverse_transform(Xsel) array([[0, 2, 0, 3], ... [0, 1, 4, 3], ... [0, 1, 1, 3]])
62598fa099fddb7c1ca62d05
class FactoryTest(TestCase): <NEW_LINE> <INDENT> def test_instance(self): <NEW_LINE> <INDENT> google = Company.factory("Google") <NEW_LINE> self.assertTrue(isinstance(google, Company)) <NEW_LINE> <DEDENT> def test_factory_initialize_object(self): <NEW_LINE> <INDENT> apple = Company.factory("Apple") <NEW_LINE> self.assertTrue(isinstance(apple, Apple)) <NEW_LINE> <DEDENT> def test_subclass_Company(self): <NEW_LINE> <INDENT> self.assertTrue(issubclass(Google, Company)) <NEW_LINE> <DEDENT> def test_can_create_different_objects(self): <NEW_LINE> <INDENT> obj_1 = Company.factory("Google") <NEW_LINE> obj_2 = Company.factory("Apple") <NEW_LINE> self.assertFalse(obj_1 is obj_2)
Test for checking Factory Design Pattern.
62598fa0be383301e0253631
class validate_test(unittest.TestCase): <NEW_LINE> <INDENT> def test_validate_simple(self): <NEW_LINE> <INDENT> inputs=( ( ({b'title':[b'Macro4']},[]), {},[],[],[],{}, ( ({b'title': [b'Macro4']}, []), {b'title': [b'title cannot be Macro4', b'title cannot contain 4']} ) ), ( ({b'captchahidden': [b'78515'], b'captchapublic': [b'3553257016741604514572636108748']}, []) , {b'captchahidden': [b'captcha error']} , [(b'atpiccom', None)] , [(b'user', None)] , [b'post', b'post'] , {}, ( ({b'captchahidden': [b'78515'], b'captchapublic': [b'3553257016741604514572636108748']}, []) , {b'password': [b'password cannot be empty'], b'login': [b'login cannot be empty'], b'email': [b'email cannot be empty'], b'captchahidden': [b'captcha error']} ) ), ) <NEW_LINE> i=0 <NEW_LINE> for (indata,dataerror,hxplo1,pxplo1,actions,environ,expect) in inputs: <NEW_LINE> <INDENT> i=i+1 <NEW_LINE> print('++++++++',i,'++++++++++') <NEW_LINE> print('doing',hxplo1,pxplo1,actions) <NEW_LINE> hxplo=atpic.xplo.Xplo(hxplo1) <NEW_LINE> pxplo=atpic.xplo.Xplo(pxplo1) <NEW_LINE> res=atpic.validate.validate_simple(indata,dataerror,hxplo,pxplo,actions,environ) <NEW_LINE> self.assertEqual(res,expect)
Validate tests
62598fa03d592f4c4edbad09
class Vector4(Tuple): <NEW_LINE> <INDENT> default_value = (0, 0, 0, 0) <NEW_LINE> info_text = 'a four-element vector' <NEW_LINE> def __init__(self, trait=CFloat, default_value=Undefined, **kwargs): <NEW_LINE> <INDENT> if default_value is Undefined: <NEW_LINE> <INDENT> default_value = self.default_value <NEW_LINE> <DEDENT> super(Vector4, self).__init__(*(trait, trait, trait, trait), default_value=default_value, **kwargs)
A trait for a 4-tuple corresponding to a three.js Vector4.
62598fa055399d3f0562635d
class UnsupportedResponseTypeError(OAuth2Error): <NEW_LINE> <INDENT> error = 'unsupported_response_type' <NEW_LINE> def __init__(self, response_type): <NEW_LINE> <INDENT> super(UnsupportedResponseTypeError, self).__init__() <NEW_LINE> self.response_type = response_type <NEW_LINE> <DEDENT> def get_error_description(self): <NEW_LINE> <INDENT> return f'response_type={self.response_type} is not supported'
The authorization server does not support obtaining an access token using this method.
62598fa007f4c71912baf280
class RAfex(RPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/singmann/afex" <NEW_LINE> cran = "afex" <NEW_LINE> version('0.28-1', sha256='cfb0b79bfa01b590afc3354a5b2ad3640d2f4974b036d6c256fa8e684bc69c2e') <NEW_LINE> depends_on('r@3.5.0:', type=('build', 'run')) <NEW_LINE> depends_on('r-lme4@1.1-8:', type=('build', 'run')) <NEW_LINE> depends_on('r-pbkrtest@0.4-1:', type=('build', 'run')) <NEW_LINE> depends_on('r-lmertest@3.0-0:', type=('build', 'run')) <NEW_LINE> depends_on('r-car', type=('build', 'run')) <NEW_LINE> depends_on('r-reshape2', type=('build', 'run'))
Analysis of Factorial Experiments Convenience functions for analyzing factorial experiments using ANOVA or mixed models. aov_ez(), aov_car(), and aov_4() allow specification of between, within (i.e., repeated-measures), or mixed (i.e., split-plot) ANOVAs for data in long format (i.e., one observation per row), automatically aggregating multiple observations per individual and cell of the design. mixed() fits mixed models using lme4::lmer() and computes p-values for all fixed effects using either Kenward-Roger or Satterthwaite approximation for degrees of freedom (LMM only), parametric bootstrap (LMMs and GLMMs), or likelihood ratio tests (LMMs and GLMMs). afex_plot() provides a high-level interface for interaction or one-way plots using ggplot2, combining raw data and model estimates. afex uses type 3 sums of squares as default (imitating commercial statistical software).
62598fa0462c4b4f79dbb847
class MaterialFactory: <NEW_LINE> <INDENT> factories = {} <NEW_LINE> def addFactory(material, materialFactory): <NEW_LINE> <INDENT> MaterialFactory.factories.put[material] = materialFactory <NEW_LINE> <DEDENT> def createMaterial(material, **kwargs): <NEW_LINE> <INDENT> if not material in MaterialFactory.factories: <NEW_LINE> <INDENT> MaterialFactory.factories[material] = eval(material + '.Factory()') <NEW_LINE> <DEDENT> return MaterialFactory.factories[material].create(**kwargs) <NEW_LINE> <DEDENT> addFactory = staticmethod(addFactory) <NEW_LINE> createMaterial = staticmethod(createMaterial)
Factory class to encapsulate creation of materials. Using **kwargs lets you pass values to the material constructors, eg. for an Elastic material createMaterial(materials.ELASTIC, name="mat", youngs_modulus = 20000, poissons_ratio = 2)
62598fa05fdd1c0f98e5ddd4
@unittest.skipUnless(os.getenv(ENV), REASON) <NEW_LINE> @patch('doorstop.settings.ADDREMOVE_FILES', False) <NEW_LINE> class TestReorder(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.prefix = 'tut' <NEW_LINE> cls.path = os.path.join('docs', 'reqs', 'tutorial', 'index.yml') <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> common.delete(self.path) <NEW_LINE> <DEDENT> @patch('doorstop.core.editor.launch') <NEW_LINE> @patch('builtins.input', Mock(return_value='yes')) <NEW_LINE> def test_reorder_document_yes(self, mock_launch): <NEW_LINE> <INDENT> self.assertIs(None, main(['reorder', self.prefix])) <NEW_LINE> mock_launch.assert_called_once_with(self.path, tool=None) <NEW_LINE> self.assertFalse(os.path.exists(self.path)) <NEW_LINE> <DEDENT> @patch('doorstop.core.editor.launch') <NEW_LINE> @patch('builtins.input', Mock(return_value='no')) <NEW_LINE> def test_reorder_document_no(self, mock_launch): <NEW_LINE> <INDENT> self.assertIs(None, main(['reorder', self.prefix])) <NEW_LINE> mock_launch.assert_called_once_with(self.path, tool=None) <NEW_LINE> self.assertFalse(os.path.exists(self.path)) <NEW_LINE> <DEDENT> @patch('doorstop.core.editor.launch') <NEW_LINE> def test_reorder_document_auto(self, mock_launch): <NEW_LINE> <INDENT> self.assertIs(None, main(['reorder', self.prefix, '--auto'])) <NEW_LINE> mock_launch.assert_never_called() <NEW_LINE> <DEDENT> @patch('doorstop.core.document.Document._reorder_automatic') <NEW_LINE> @patch('doorstop.core.editor.launch') <NEW_LINE> @patch('builtins.input', Mock(return_value='no')) <NEW_LINE> def test_reorder_document_manual(self, mock_launch, mock_reorder_auto): <NEW_LINE> <INDENT> self.assertIs(None, main(['reorder', self.prefix, '--manual'])) <NEW_LINE> mock_launch.assert_called_once_with(self.path, tool=None) <NEW_LINE> mock_reorder_auto.assert_never_called() <NEW_LINE> self.assertFalse(os.path.exists(self.path)) <NEW_LINE> <DEDENT> @patch('builtins.input', Mock(return_value='yes')) <NEW_LINE> def test_reorder_document_error(self): <NEW_LINE> <INDENT> def bad_yaml_edit(path, **_): <NEW_LINE> <INDENT> common.write_text("%bad", path) <NEW_LINE> <DEDENT> with patch('doorstop.core.editor.launch', bad_yaml_edit): <NEW_LINE> <INDENT> self.assertRaises(SystemExit, main, ['reorder', self.prefix]) <NEW_LINE> <DEDENT> self.assertTrue(os.path.exists(self.path)) <NEW_LINE> <DEDENT> def test_reorder_document_unknown(self): <NEW_LINE> <INDENT> self.assertRaises(SystemExit, main, ['reorder', 'FAKE'])
Integration tests for the 'doorstop reorder' command.
62598fa010dbd63aa1c709e9
class ButtonEntry(Button, HelpedWidget): <NEW_LINE> <INDENT> def __init__(self, master, title, variable, help): <NEW_LINE> <INDENT> Button.__init__(self, master, text=title) <NEW_LINE> self.help = help <NEW_LINE> self.bind("<Button-3>", self.helpme) <NEW_LINE> self.master.ncallbacks += 1 <NEW_LINE> self.config(command=getattr(self.master, 'callback_%d' % self.master.ncallbacks) ) <NEW_LINE> self.bind("<Button-3>", self.helpme) <NEW_LINE> self.focus_set()
If the command is one to just 'activate'
62598fa099cbb53fe6830d0e
class Note(models.Model): <NEW_LINE> <INDENT> content = models.TextField(null=False) <NEW_LINE> date_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> name = models.CharField(max_length=255, null=False) <NEW_LINE> image = models.ImageField(null=True) <NEW_LINE> owner = models.ForeignKey(User, null=False, default=1) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_owner(self): <NEW_LINE> <INDENT> return self.owner.first_name + self.owner.last_name
Note model. Defines: Name - The name of the note Date_created - The date and time the note was created Content - Of course, the actual content of the note
62598fa0f548e778e596b3e9
class TableBuilder(object): <NEW_LINE> <INDENT> def __init__(self, *headers): <NEW_LINE> <INDENT> self.headers = list(headers) <NEW_LINE> self.results = [[] for x in self.headers] <NEW_LINE> <DEDENT> def col(self, name): <NEW_LINE> <INDENT> self.headers.append(name) <NEW_LINE> self.results.append(["" for x in range(len(self.results[0]))]) <NEW_LINE> <DEDENT> def cols(self, names): <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> if name not in self.headers: <NEW_LINE> <INDENT> self.col(name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def row(self, *items, **by_name): <NEW_LINE> <INDENT> if len(items) > len(self.headers): <NEW_LINE> <INDENT> raise ValueError("Size mismatch: %s != %s" % (len(items), len(self.headers))) <NEW_LINE> <DEDENT> for idx in range(len(self.results)): <NEW_LINE> <INDENT> value = None <NEW_LINE> if idx < len(items): <NEW_LINE> <INDENT> value = items[idx] <NEW_LINE> <DEDENT> self.results[idx].append(value) <NEW_LINE> <DEDENT> size = len(self.results[0]) <NEW_LINE> for k, v in by_name.items(): <NEW_LINE> <INDENT> if k not in self.headers: <NEW_LINE> <INDENT> raise KeyError("%s not in %s" % (k, self.headers)) <NEW_LINE> <DEDENT> idx = self.headers.index(k) <NEW_LINE> self.results[idx][-1] = by_name[self.headers[idx]] <NEW_LINE> <DEDENT> <DEDENT> def build(self): <NEW_LINE> <INDENT> columns = [] <NEW_LINE> for i, x in enumerate(self.headers): <NEW_LINE> <INDENT> columns.append(Column(x, self.results[i])) <NEW_LINE> <DEDENT> return Table(*columns) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.build())
OMERO-addition to make working with Tables easier
62598fa01f5feb6acb162a5e
class GeneralLayerPropPage(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{49C65517-D260-11D2-9F48-00C04F8ED21A}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{D92377DC-FAB1-4DFB-A4C1-61BD8C40DBEB}', 10, 2)
General property page for managing layer properties.
62598fa0435de62698e9bc2f
class MainHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> self.render( "index.html", title="Home", header_text="Welcome", footer_text="")
默认请求
62598fa0627d3e7fe0e06ce7
class Status(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def to_console(indent_qty, message): <NEW_LINE> <INDENT> indent = ' ' <NEW_LINE> text = indent * indent_qty <NEW_LINE> text += message <NEW_LINE> print(text) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def time_format(time): <NEW_LINE> <INDENT> return str(time/10.0)
Used to communicate with console.
62598fa056ac1b37e6302027
class _TimerHandle(nbio_interface.AbstractTimerReference): <NEW_LINE> <INDENT> def __init__(self, handle): <NEW_LINE> <INDENT> self._handle = handle <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> if self._handle is not None: <NEW_LINE> <INDENT> self._handle.cancel() <NEW_LINE> self._handle = None
This module's adaptation of `nbio_interface.AbstractTimerReference`.
62598fa01b99ca400228f44c
class TestAzureGroup(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testAzureGroup(self): <NEW_LINE> <INDENT> model = esp_sdk.models.azure_group.AzureGroup()
AzureGroup unit test stubs
62598fa06aa9bd52df0d4d07
class TestReadArticle(TestUsingLoggedInUser, TestConfig): <NEW_LINE> <INDENT> def retrieve_user_read_stats(self): <NEW_LINE> <INDENT> return self.client.get( reverse("user_read_stats"), content_type='application/json', HTTP_AUTHORIZATION="Token {}".format(self.access_token) ) <NEW_LINE> <DEDENT> def test_1_start_read_article(self): <NEW_LINE> <INDENT> response = self.get_request("article") <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> response2 = self.retrieve_user_read_stats() <NEW_LINE> self.check_if_article_is_read(response2, False) <NEW_LINE> response = self.get_request("article_read") <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> response2 = self.retrieve_user_read_stats() <NEW_LINE> self.check_if_article_is_read(response2, True) <NEW_LINE> <DEDENT> def test_update_a_non_existing_article_status__to_read(self): <NEW_LINE> <INDENT> response = self.get_request("article") <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> response2 = self.retrieve_user_read_stats() <NEW_LINE> self.check_if_article_is_read(response2, False) <NEW_LINE> update_to_read = self.client.get( reverse( "article_read", kwargs={ "slug": "self.stored_articles[2].slug" }), content_type='application/json', HTTP_AUTHORIZATION="Token {}".format(self.access_token) ) <NEW_LINE> self.assertEqual(update_to_read.status_code, status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> def get_request(self, url): <NEW_LINE> <INDENT> return self.client.get( reverse( url, kwargs={ "slug": self.stored_articles[2].slug }), content_type='application/json', HTTP_AUTHORIZATION="Token {}".format(self.access_token) ) <NEW_LINE> <DEDENT> def check_if_article_is_read(self, response, is_read): <NEW_LINE> <INDENT> read_article = None <NEW_LINE> for article in response.data['results']: <NEW_LINE> <INDENT> if article['article']['slug'] == self.stored_articles[2].slug: <NEW_LINE> <INDENT> read_article = article <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self.assertNotEqual(read_article, None) <NEW_LINE> self.assertEqual(read_article['read'], is_read)
test suite for user read stat
62598fa0442bda511e95c296
class MockedOut(object): <NEW_LINE> <INDENT> def write(self, *args, **kwargs): <NEW_LINE> <INDENT> pass
the one which has no isatty
62598fa0851cf427c66b8104
class router_volume(AbstractResourceRoute): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.abstract = super(router_volume, self) <NEW_LINE> self.controller = ControllerVolume <NEW_LINE> <DEDENT> @token_required <NEW_LINE> def get(self, mongoid=None): <NEW_LINE> <INDENT> app.logger.debug("***{} fired as GET request".format(self.__class__.__name__)) <NEW_LINE> if mongoid: <NEW_LINE> <INDENT> app.logger.debug("get one for id:{}".format(mongoid)) <NEW_LINE> <DEDENT> return self.abstract.get(mongoid) <NEW_LINE> <DEDENT> @token_required <NEW_LINE> def post(self): <NEW_LINE> <INDENT> app.logger.debug("***{} fired as POST request".format(self.__class__.__name__)) <NEW_LINE> return self.abstract.post() <NEW_LINE> <DEDENT> @token_required <NEW_LINE> def put(self, mongoid): <NEW_LINE> <INDENT> app.logger.debug("***{} fired as PUT request with id:{}".format(self.__class__.__name__, mongoid)) <NEW_LINE> return self.abstract.put(mongoid=mongoid) <NEW_LINE> <DEDENT> @token_required <NEW_LINE> def delete(self, mongoid): <NEW_LINE> <INDENT> app.logger.debug("***{} fired as DELETE request with id:{}".format(self.__class__.__name__, mongoid)) <NEW_LINE> return self.abstract.delete(db_id=mongoid)
Extends of flask restfull resource :param Resource extended class
62598fa032920d7e50bc5e92
class testKelvinToFarenheit2(unittest.TestCase): <NEW_LINE> <INDENT> knownValues = [(300, 80.0), (270, 26.0), (455, 359.0), (600, 620.0)] <NEW_LINE> def testCorrectResult(self): <NEW_LINE> <INDENT> for kel, far in self.knownValues: <NEW_LINE> <INDENT> result = conversions_refactored.allConversions('kelvin', 'farenheit', kel) <NEW_LINE> self.assertEqual(far, result)
convertCelsiusToFarenheit should return the correct value
62598fa04a966d76dd5eed1e
class GameCategory(BaseModel): <NEW_LINE> <INDENT> create_time = models.DateTimeField(auto_now_add=True) <NEW_LINE> stick_time = models.DateTimeField(null=True, blank=True) <NEW_LINE> game = models.ForeignKey(Game) <NEW_LINE> category = models.ForeignKey(Category) <NEW_LINE> rank = models.IntegerField(default=1) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> db_table = 'game_categorys' <NEW_LINE> unique_together = ('game', 'category')
游戏系列映射
62598fa099cbb53fe6830d0f
class Dog(): <NEW_LINE> <INDENT> def __init__(self, name, age): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def sit(self): <NEW_LINE> <INDENT> print(self.name.title() + "is now sitting.") <NEW_LINE> <DEDENT> def roll_over(self): <NEW_LINE> <INDENT> print(self.name.title()+" rolled over !")
一次模拟小狗的简单尝试
62598fa092d797404e388a84
class FakeOvsdbRow(FakeResource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_one_ovsdb_row(attrs=None, methods=None): <NEW_LINE> <INDENT> attrs = attrs or {} <NEW_LINE> methods = methods or {} <NEW_LINE> fake_uuid = uuidutils.generate_uuid() <NEW_LINE> ovsdb_row_attrs = { 'uuid': fake_uuid, 'name': 'name-' + fake_uuid, 'external_ids': {}, } <NEW_LINE> ovsdb_row_methods = { 'addvalue': None, 'delete': None, 'delvalue': None, 'verify': None, 'setkey': None, } <NEW_LINE> ovsdb_row_attrs.update(attrs) <NEW_LINE> ovsdb_row_methods.update(methods) <NEW_LINE> result = FakeResource(info=copy.deepcopy(ovsdb_row_attrs), loaded=True, methods=copy.deepcopy(ovsdb_row_methods)) <NEW_LINE> result.setkey.side_effect = lambda col, k, v: ( getattr(result, col).__setitem__(k, v)) <NEW_LINE> def fake_addvalue(col, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> getattr(result, col).append(val) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def fake_delvalue(col, val): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> getattr(result, col).remove(val) <NEW_LINE> <DEDENT> except (AttributeError, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> result.addvalue.side_effect = fake_addvalue <NEW_LINE> result.delvalue.side_effect = fake_delvalue <NEW_LINE> return result
Fake one or more OVSDB rows.
62598fa0d7e4931a7ef3bed6
class ProvisioningState(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> PROVISIONING = "Provisioning" <NEW_LINE> SUCCEEDED = "Succeeded" <NEW_LINE> DELETING = "Deleting" <NEW_LINE> FAILED = "Failed" <NEW_LINE> DELETE_ERROR = "DeleteError"
Resource provisioning state
62598fa091f36d47f2230dbf
class TopicDeleteView(LoginRequiredMixin, PermissionMixin, DeleteView): <NEW_LINE> <INDENT> model = Topic <NEW_LINE> permissions_required = ['edit_and_delete_topic_and_answer'] <NEW_LINE> def get_discipline(self): <NEW_LINE> <INDENT> discipline = Discipline.objects.get( slug=self.kwargs.get('slug', '') ) <NEW_LINE> return discipline <NEW_LINE> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> topic = Topic.objects.get( discipline=self.get_discipline(), pk=self.kwargs.get('pk', '') ) <NEW_LINE> return topic <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> discipline = self.get_discipline() <NEW_LINE> success_url = reverse_lazy( 'forum:list', kwargs={ 'slug': discipline.slug } ) <NEW_LINE> messages.success(self.request, _("Topic deleted successfully.")) <NEW_LINE> return success_url
View to delete a specific topic.
62598fa0097d151d1a2c0e66
class MSELoss: <NEW_LINE> <INDENT> def __init__(self, reduction='mean'): <NEW_LINE> <INDENT> assert reduction in ('none', 'mean') <NEW_LINE> self.reduction = reduction <NEW_LINE> <DEDENT> def __call__(self, pred, target): <NEW_LINE> <INDENT> if target.shape[-1] == 1 and len(target.shape) - len(pred.shape) == 1: <NEW_LINE> <INDENT> pred = np.expand_dims(pred, -1) <NEW_LINE> <DEDENT> elif pred.shape[-1] == 1 and len(pred.shape) - len(target.shape) == 1: <NEW_LINE> <INDENT> target = np.expand_dims(target, -1) <NEW_LINE> <DEDENT> elif not target.shape == pred.shape: <NEW_LINE> <INDENT> raise ValueError('shape mismatch, pred has shape {} and target ' 'has shape {}'.format(pred.shape, target.shape)) <NEW_LINE> <DEDENT> loss = np.sum( np.reshape((pred - target) ** 2, (len(pred), -1)), axis=1) <NEW_LINE> if self.reduction == 'mean': <NEW_LINE> <INDENT> return np.mean(loss) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return loss
MSE loss that sums over non-batch dimensions.
62598fa056b00c62f0fb26ed
class SpanOrCrossEntropyLoss(tf.keras.losses.Loss): <NEW_LINE> <INDENT> def call(self, y_true, y_pred): <NEW_LINE> <INDENT> y_pred_masked = y_pred - tf.cast(y_true < 0.5, tf.float32) * 1e6 <NEW_LINE> or_cross_entropy = ( tf.math.reduce_logsumexp(y_pred, axis=-2) - tf.math.reduce_logsumexp(y_pred_masked, axis=-2)) <NEW_LINE> return tf.math.reduce_sum(or_cross_entropy, -1)
Cross entropy loss for multiple correct answers. See https://arxiv.org/abs/1710.10723.
62598fa00c0af96317c561bf
class EditableListCtrl(wx.ListCtrl, listmix.TextEditMixin): <NEW_LINE> <INDENT> def __init__(self, parent, ID=wx.ID_ANY, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0): <NEW_LINE> <INDENT> wx.ListCtrl.__init__(self, parent, ID, pos, size, style) <NEW_LINE> listmix.TextEditMixin.__init__(self)
TextEditMixin allows any column to be edited.
62598fa02c8b7c6e89bd3604
class TournamentMakerController: <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> pass
Docstring
62598fa085dfad0860cbf993
class Track: <NEW_LINE> <INDENT> def __init__(self, track, number, index): <NEW_LINE> <INDENT> self._track = track <NEW_LINE> self.number = number <NEW_LINE> self.index = index <NEW_LINE> self._frame_info_list = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create(cls, track, number, index): <NEW_LINE> <INDENT> t = FFMS_GetTrackType(track) <NEW_LINE> for c in cls.__subclasses__(): <NEW_LINE> <INDENT> if c.type == t: <NEW_LINE> <INDENT> cls = c <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return cls(track, number, index) <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return FFMS_GetTrackType(self._track) <NEW_LINE> <DEDENT> @property <NEW_LINE> def frame_info_list(self): <NEW_LINE> <INDENT> if self._frame_info_list is None: <NEW_LINE> <INDENT> self._frame_info_list = [ FFMS_GetFrameInfo(self._track, n)[0] for n in range(FFMS_GetNumFrames(self._track)) ] <NEW_LINE> <DEDENT> return self._frame_info_list <NEW_LINE> <DEDENT> def _get_output_file(self, ext): <NEW_LINE> <INDENT> index_file = (self.index.index_file or self.index.source_file + FFINDEX_EXT) <NEW_LINE> return "{}_track{:02}.{}.txt".format(index_file, self.number, ext)
FFMS_Track
62598fa0e1aae11d1e7ce742
class IBMLogicalDisk(LogicalDisk, IBMComponent): <NEW_LINE> <INDENT> writeCacheMode = '' <NEW_LINE> status = 1 <NEW_LINE> statusmap ={1: (DOT_GREEN, SEV_CLEAN, 'Online'), 2: (DOT_RED, SEV_CRITICAL, 'Critical'), 3: (DOT_ORANGE, SEV_ERROR, 'Offline'), 4: (DOT_YELLOW, SEV_WARNING, 'Migrating'), 5: (DOT_GREEN, SEV_CLEAN, 'Free'), 9: (DOT_GREY, SEV_WARNING, 'Unknown'), } <NEW_LINE> _properties = LogicalDisk._properties + ( {'id':'writeCacheMode', 'type':'string', 'mode':'w'}, ) <NEW_LINE> factory_type_information = ( { 'id' : 'HardDisk', 'meta_type' : 'HardDisk', 'description' : """Arbitrary device grouping class""", 'icon' : 'HardDisk_icon.gif', 'product' : 'ZenModel', 'factory' : 'manage_addHardDisk', 'immediate_view' : 'viewIBMLogicalDisk', 'actions' : ( { 'id' : 'status' , 'name' : 'Status' , 'action' : 'viewIBMLogicalDisk' , 'permissions' : (ZEN_VIEW,) }, { 'id' : 'perfConf' , 'name' : 'Template' , 'action' : 'objTemplates' , 'permissions' : (ZEN_CHANGE_DEVICE, ) }, { 'id' : 'viewHistory' , 'name' : 'Modifications' , 'action' : 'viewHistory' , 'permissions' : (ZEN_VIEW_MODIFICATIONS,) }, ) }, ) <NEW_LINE> def getRRDTemplates(self): <NEW_LINE> <INDENT> templates = [] <NEW_LINE> for tname in [self.__class__.__name__]: <NEW_LINE> <INDENT> templ = self.getRRDTemplateByName(tname) <NEW_LINE> if templ: templates.append(templ) <NEW_LINE> <DEDENT> return templates
IBMLogicalDisk object
62598fa0a79ad16197769ea1
class HassIODiscovery(HomeAssistantView): <NEW_LINE> <INDENT> name = "api:hassio_push:discovery" <NEW_LINE> url = "/api/hassio_push/discovery/{uuid}" <NEW_LINE> def __init__(self, hass: HomeAssistant, hassio): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.hassio = hassio <NEW_LINE> <DEDENT> async def post(self, request, uuid): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = await self.hassio.get_discovery_message(uuid) <NEW_LINE> <DEDENT> except HassioAPIError as err: <NEW_LINE> <INDENT> _LOGGER.error("Can't read discovery data: %s", err) <NEW_LINE> raise HTTPServiceUnavailable() from None <NEW_LINE> <DEDENT> await self.async_process_new(data) <NEW_LINE> return web.Response() <NEW_LINE> <DEDENT> async def delete(self, request, uuid): <NEW_LINE> <INDENT> data = await request.json() <NEW_LINE> await self.async_process_del(data) <NEW_LINE> return web.Response() <NEW_LINE> <DEDENT> async def async_process_new(self, data): <NEW_LINE> <INDENT> service = data[ATTR_SERVICE] <NEW_LINE> config_data = data[ATTR_CONFIG] <NEW_LINE> try: <NEW_LINE> <INDENT> addon_info = await self.hassio.get_addon_info(data[ATTR_ADDON]) <NEW_LINE> <DEDENT> except HassioAPIError as err: <NEW_LINE> <INDENT> _LOGGER.error("Can't read add-on info: %s", err) <NEW_LINE> return <NEW_LINE> <DEDENT> config_data[ATTR_ADDON] = addon_info[ATTR_NAME] <NEW_LINE> await self.hass.config_entries.flow.async_init( service, context={"source": config_entries.SOURCE_HASSIO}, data=config_data ) <NEW_LINE> <DEDENT> async def async_process_del(self, data): <NEW_LINE> <INDENT> service = data[ATTR_SERVICE] <NEW_LINE> uuid = data[ATTR_UUID] <NEW_LINE> try: <NEW_LINE> <INDENT> data = await self.hassio.get_discovery_message(uuid) <NEW_LINE> <DEDENT> except HassioAPIError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _LOGGER.warning("Retrieve wrong unload for %s", service) <NEW_LINE> return <NEW_LINE> <DEDENT> for entry in self.hass.config_entries.async_entries(service): <NEW_LINE> <INDENT> if entry.source != config_entries.SOURCE_HASSIO: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> await self.hass.config_entries.async_remove(entry)
Hass.io view to handle base part.
62598fa055399d3f0562635f
class Profile(ndb.Model): <NEW_LINE> <INDENT> displayName = ndb.StringProperty() <NEW_LINE> mainEmail = ndb.StringProperty() <NEW_LINE> teeShirtSize = ndb.StringProperty(default='NOT_SPECIFIED') <NEW_LINE> conferenceKeysToAttend = ndb.StringProperty(repeated=True) <NEW_LINE> sessionKeysToWishlist = ndb.StringProperty(repeated=True)
Profile -- User profile object
62598fa0fbf16365ca793ef7
class RepoTag(GitHubCore): <NEW_LINE> <INDENT> def _update_attributes(self, tag): <NEW_LINE> <INDENT> self.name = tag.get('name') <NEW_LINE> self.zipball_url = tag.get('zipball_url') <NEW_LINE> self.tarball_url = tag.get('tarball_url') <NEW_LINE> self.commit = tag.get('commit', {}) <NEW_LINE> <DEDENT> def _repr(self): <NEW_LINE> <INDENT> return '<Repository Tag [{0}]>'.format(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
The :class:`RepoTag <RepoTag>` object. This stores the information representing a tag that was created on a repository. See also: http://developer.github.com/v3/repos/#list-tags
62598fa01f037a2d8b9e3f26
class TranslationClassesInfo(DeviceDataRecord): <NEW_LINE> <INDENT> def __init__(self, traversal, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self.traversal = traversal <NEW_LINE> <DEDENT> def copy(self, **kwargs): <NEW_LINE> <INDENT> traversal = kwargs.pop("traversal", self.traversal) <NEW_LINE> return self.__class__(traversal=traversal, **self.get_copy_kwargs(**kwargs)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nfrom_sep_siblings_translation_classes(self): <NEW_LINE> <INDENT> return len(self.from_sep_siblings_translation_class_to_distance_vector)
Interaction lists to help with for translations that benefit from precomputing distance related values .. attribute:: nfrom_sep_siblings_translation_classes The number of distinct translation classes. .. attribute:: from_sep_siblings_translation_classes ``int32 [*]`` A list, corresponding to *from_sep_siblings_lists* of :attr:`traversal`, of the translation classes of each box pair. .. attribute:: from_sep_siblings_translation_class_to_distance_vector ``coord_vec_t [nfrom_sep_siblings_translation_classes]`` Maps translation classes in *from_sep_siblings_translation_classes* to distance (translation) vectors from source box center to target box center. .. attribute:: from_sep_siblings_translation_classes_level_starts ``int32 [nlevels + 1]`` A list with an entry for each level giving the starting translation class id for that level. Translation classes are numbered contiguously by level. .. attribute:: traversal A :class:`boxtree.traversal.FMMTraversalInfo` object corresponding to the traversal that these translation classes refer to.
62598fa056ac1b37e6302028
class SecureThreadedTCPServer(SocketServer.ThreadingMixIn, SocketServer.TCPServer): <NEW_LINE> <INDENT> def __init__(self, server_address, config): <NEW_LINE> <INDENT> SocketServer.BaseServer.__init__(self, server_address, ThreadedTCPRequestHandler) <NEW_LINE> key_mgr = KeyManager('.') <NEW_LINE> key_mgr.create_keys() <NEW_LINE> self.socket = socket.socket(self.address_family, self.socket_type) <NEW_LINE> self.socket = ssl.wrap_socket(self.socket, keyfile=key_mgr.get_key_file(), certfile=key_mgr.get_cert_file(), server_side=True) <NEW_LINE> self.server_bind() <NEW_LINE> self.server_activate() <NEW_LINE> self.status_mgr = StatusManager(self.server_address[1], config) <NEW_LINE> self.parser = BotParser() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> print(server.server_address) <NEW_LINE> self.status_mgr.notify_startup() <NEW_LINE> try: <NEW_LINE> <INDENT> self.serve_forever() <NEW_LINE> <DEDENT> except (KeyboardInterrupt, SystemExit) as e: <NEW_LINE> <INDENT> print("Caught an interrupt") <NEW_LINE> self.status_mgr.notify_shutdown(message="Performing a clean shutdown") <NEW_LINE> raise e <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.status_mgr.notify_shutdown(message="Shutting down due to error: {0}".format(e)) <NEW_LINE> raise e <NEW_LINE> <DEDENT> <DEDENT> def put_file(self, filename, content): <NEW_LINE> <INDENT> handle = open(filename, 'w') <NEW_LINE> handle.write(content) <NEW_LINE> handle.close() <NEW_LINE> <DEDENT> def exec_file(self, filename): <NEW_LINE> <INDENT> execfile(filename)
Threaded TCP server using SSL encryption
62598fa0097d151d1a2c0e67
class Score(db.Model): <NEW_LINE> <INDENT> __tablename__ = "scores" <NEW_LINE> score_id = db.Column(db.Integer, autoincrement=True, primary_key=True) <NEW_LINE> username = db.Column(db.String(64), nullable=True) <NEW_LINE> score = db.Column(db.Integer, nullable=False) <NEW_LINE> date = db.Column(db.DateTime, nullable=False, default=datetime.utcnow)
Scores of Hangman.
62598fa0d53ae8145f9182cb
class CustomHandlebarsCompiler(HandlebarsCompiler): <NEW_LINE> <INDENT> def __call__(self, asset): <NEW_LINE> <INDENT> asset.attributes.path_without_suffix = asset.attributes.path_without_suffix.replace('templates/', '') <NEW_LINE> if re.search('<pre', asset.processed_source, re.I) is None: <NEW_LINE> <INDENT> asset.processed_source = re.sub('[ \t\n]+', ' ', asset.processed_source) <NEW_LINE> <DEDENT> super(CustomHandlebarsCompiler, self).__call__(asset)
Handlebars compiler that strips template/ from the name
62598fa0435de62698e9bc31
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> password = validated_data.pop('password', None) <NEW_LINE> user = super().update(instance, validated_data) <NEW_LINE> if password: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user
Serializer for user object
62598fa01f5feb6acb162a60
class PHPApp(WebService): <NEW_LINE> <INDENT> php_root = "" <NEW_LINE> num_workers = 1 <NEW_LINE> def php_setup(self): <NEW_LINE> <INDENT> web_root = join(self.directory, self.php_root) <NEW_LINE> self.start_cmd = "uwsgi --protocol=http --plugin php -p {1} --force-cwd {0} --http-socket-modifier1 14 --php-index index.html --php-index index.php --check-static {0} --static-skip-ext php --logto /dev/null".format( web_root, self.num_workers)
Class for PHP web apps
62598fa0d268445f26639aa2
class FormatTest(VimivTestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.init_test(cls, ["vimiv/"]) <NEW_LINE> <DEDENT> def test_format_files(self): <NEW_LINE> <INDENT> shutil.copytree("testimages/", "testimages_to_format/") <NEW_LINE> self.run_command("./testimages_to_format/arch-logo.png") <NEW_LINE> self.vimiv["library"].toggle() <NEW_LINE> fileactions.format_files(self.vimiv, "formatted_") <NEW_LINE> files = [fil for fil in os.listdir() if "formatted_" in fil] <NEW_LINE> files = sorted(files) <NEW_LINE> expected_files = ["formatted_001.png", "formatted_002.jpg", "formatted_003", "formatted_004.bmp", "formatted_005.svg", "formatted_006.tiff"] <NEW_LINE> for fil in expected_files: <NEW_LINE> <INDENT> self.assertIn(fil, files) <NEW_LINE> <DEDENT> self.vimiv.populate([]) <NEW_LINE> fileactions.format_files(self.vimiv, "formatted_") <NEW_LINE> self.check_statusbar("INFO: No files in path") <NEW_LINE> <DEDENT> def test_format_files_with_exif(self): <NEW_LINE> <INDENT> os.mkdir("testimages_to_format") <NEW_LINE> shutil.copyfile("testimages/arch_001.jpg", "testimages_to_format/arch_001.jpg") <NEW_LINE> self.run_command("./testimages_to_format/arch_001.jpg") <NEW_LINE> self.vimiv["library"].toggle() <NEW_LINE> fileactions.format_files(self.vimiv, "formatted_%Y_") <NEW_LINE> self.assertIn("formatted_2016_001.jpg", os.listdir()) <NEW_LINE> <DEDENT> def test_fail_format_files_with_exif(self): <NEW_LINE> <INDENT> os.mkdir("testimages_to_format") <NEW_LINE> shutil.copyfile("testimages/arch-logo.png", "testimages_to_format/arch-logo.png") <NEW_LINE> self.run_command("./testimages_to_format/arch-logo.png") <NEW_LINE> self.vimiv["library"].toggle() <NEW_LINE> fileactions.format_files(self.vimiv, "formatted_%Y_") <NEW_LINE> message = self.vimiv["statusbar"].get_message() <NEW_LINE> self.assertIn("No exif data for", message) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> if os.path.basename(os.getcwd()) != "vimiv": <NEW_LINE> <INDENT> self.vimiv["library"].move_up() <NEW_LINE> fileactions.format_files(self.vimiv, "formatted_") <NEW_LINE> self.check_statusbar( "INFO: Format only works on opened image files") <NEW_LINE> <DEDENT> if os.path.isdir("testimages_to_format"): <NEW_LINE> <INDENT> shutil.rmtree("testimages_to_format")
Test formatting files.
62598fa056ac1b37e6302029
class NLLSequenceLoss(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, w=[0.55, 0.45]): <NEW_LINE> <INDENT> super(NLLSequenceLoss, self).__init__() <NEW_LINE> self.criterion = torch.nn.NLLLoss(reduction='none', weight=torch.Tensor([0.8, 0.8, 0.8, 0.5]).cuda()) <NEW_LINE> <DEDENT> def forward(self, input, length, target): <NEW_LINE> <INDENT> loss = [] <NEW_LINE> transposed = input.transpose(0, 1).contiguous() <NEW_LINE> for i in range(transposed.size(0)): <NEW_LINE> <INDENT> loss.append(self.criterion(transposed[i,], target).unsqueeze(1)) <NEW_LINE> <DEDENT> loss = torch.cat(loss, 1) <NEW_LINE> mask = torch.zeros(loss.size(0), loss.size(1)).float().cuda() <NEW_LINE> for i in range(length.size(0)): <NEW_LINE> <INDENT> L = min(mask.size(1), length[i]) <NEW_LINE> mask[i, :L - 1] = 1.0 <NEW_LINE> <DEDENT> loss = (loss * mask).sum() / mask.sum() <NEW_LINE> return loss
Custom loss function. Returns a loss that is the sum of all losses at each time step.
62598fa0851cf427c66b8106
class ConfigPathsTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_venv(self): <NEW_LINE> <INDENT> venv = os.environ.get('VIRTUAL_ENV') <NEW_LINE> if venv: <NEW_LINE> <INDENT> self.assertIn(os.path.join(venv, 'openquake.cfg'), config.paths) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.warning('To run the tests, you should use a ' 'development installation with a virtualenv') <NEW_LINE> <DEDENT> <DEDENT> def test_config_file(self): <NEW_LINE> <INDENT> cfgfile = os.environ.get('OQ_CONFIG_FILE') <NEW_LINE> if cfgfile: <NEW_LINE> <INDENT> self.assertEqual(config.paths[0], cfgfile)
Make sure that the config path search logic is tested
62598fa0442bda511e95c299
class Cleanup(object): <NEW_LINE> <INDENT> __slots__ = ( 'paths', ) <NEW_LINE> def __init__(self, root, buckets=spawn.BUCKETS): <NEW_LINE> <INDENT> self.paths = spawn.SpawnPaths(root, buckets) <NEW_LINE> fs.mkdir_safe(self.paths.cleanup_dir) <NEW_LINE> <DEDENT> def _nuke(self, scan_dir): <NEW_LINE> <INDENT> _LOGGER.debug('Nuking directory %r', scan_dir) <NEW_LINE> try: <NEW_LINE> <INDENT> supervisor.control_svscan(scan_dir, ( supervisor.SvscanControlAction.alarm, supervisor.SvscanControlAction.nuke )) <NEW_LINE> <DEDENT> except subproc.CalledProcessError as ex: <NEW_LINE> <INDENT> _LOGGER.warning(ex) <NEW_LINE> <DEDENT> <DEDENT> def _on_created(self, path): <NEW_LINE> <INDENT> if os.path.basename(path).startswith('.'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> job, bucket, running = spawn_utils.get_instance_path(path, self.paths) <NEW_LINE> _LOGGER.debug('Deleting - (%r, %r)', job, running) <NEW_LINE> if not os.path.exists(running): <NEW_LINE> <INDENT> _LOGGER.debug('Delete %r failed - does not exist', running) <NEW_LINE> return <NEW_LINE> <DEDENT> fs.rm_safe(running) <NEW_LINE> self._nuke(bucket) <NEW_LINE> shutil.rmtree(job, ignore_errors=True) <NEW_LINE> fs.rm_safe(path) <NEW_LINE> <DEDENT> def sync(self): <NEW_LINE> <INDENT> for name in os.listdir(self.paths.cleanup_dir): <NEW_LINE> <INDENT> self._on_created(os.path.join(self.paths.cleanup_dir, name)) <NEW_LINE> <DEDENT> <DEDENT> def get_dir_watch(self): <NEW_LINE> <INDENT> watch = dirwatch.DirWatcher(self.paths.cleanup_dir) <NEW_LINE> watch.on_created = self._on_created <NEW_LINE> return watch
Treadmill spawn cleanup.
62598fa06aa9bd52df0d4d09
class _PandasPlotter: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def create(cls, data): <NEW_LINE> <INDENT> if isinstance(data, pd.Series): <NEW_LINE> <INDENT> return _SeriesPlotter(data) <NEW_LINE> <DEDENT> elif isinstance(data, pd.DataFrame): <NEW_LINE> <INDENT> return _DataFramePlotter(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError(f"data of type {type(data)}") <NEW_LINE> <DEDENT> <DEDENT> def _get_mark_def(self, mark, kwargs): <NEW_LINE> <INDENT> if isinstance(mark, str): <NEW_LINE> <INDENT> mark = {"type": mark} <NEW_LINE> <DEDENT> if isinstance(kwargs.get("alpha"), float): <NEW_LINE> <INDENT> mark["opacity"] = kwargs.pop("alpha") <NEW_LINE> <DEDENT> if isinstance(kwargs.get("color"), str): <NEW_LINE> <INDENT> mark["color"] = kwargs.pop("color") <NEW_LINE> <DEDENT> return mark <NEW_LINE> <DEDENT> def _kde(self, data, bw_method=None, ind=None, **kwargs): <NEW_LINE> <INDENT> if bw_method == "scott" or bw_method is None: <NEW_LINE> <INDENT> bandwidth = 0 <NEW_LINE> <DEDENT> elif bw_method == "silverman": <NEW_LINE> <INDENT> n = data.shape[0] <NEW_LINE> d = 1 <NEW_LINE> bandwidth = (n * (d + 2) / 4.0) ** (-1.0 / (d + 4)) <NEW_LINE> <DEDENT> elif callable(bw_method): <NEW_LINE> <INDENT> if 1 < data.shape[1]: <NEW_LINE> <INDENT> warnings.warn( "Using a callable argument for ind using the Altair" " plotting backend sets the bandwidth for all" " columns", category=UserWarning, ) <NEW_LINE> <DEDENT> bandwidth = bw_method(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bandwidth = bw_method <NEW_LINE> <DEDENT> if ind is None: <NEW_LINE> <INDENT> steps = 1_000 <NEW_LINE> <DEDENT> elif isinstance(ind, np.ndarray): <NEW_LINE> <INDENT> warnings.warn( "The Altair plotting backend does not support sequences for ind", category=UserWarning, ) <NEW_LINE> steps = 1_000 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> steps = ind <NEW_LINE> <DEDENT> chart = ( alt.Chart(data, mark=self._get_mark_def("area", kwargs)) .transform_fold( data.columns.to_numpy(), as_=["Column", "value"], ) .transform_density( density="value", bandwidth=bandwidth, groupby=["Column"], extent=[data.min().min(), data.max().max()], steps=steps, ) .encode( x=alt.X("value", type="quantitative"), y=alt.Y("density", type="quantitative", stack="zero"), tooltip=[ alt.Tooltip("value", type="quantitative"), alt.Tooltip("density", type="quantitative"), alt.Tooltip("Column", type="nominal"), ], ) .interactive() ) <NEW_LINE> if 1 < data.shape[1]: <NEW_LINE> <INDENT> chart = chart.encode(color=alt.Color("Column", type="nominal")) <NEW_LINE> <DEDENT> return chart
Base class for pandas plotting.
62598fa097e22403b383ad4b
class UITestCaseMixin: <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.assertTrue(hasattr(self, 'driver_path'), 'driver_path attribute must bet set for all classes inheriting from UITestCaseMixin.' ' Drivers can be downloaded at https://www.seleniumhq.org/download/') <NEW_LINE> self.assertTrue(hasattr(self, 'pom'), 'pom attribute must be set for all ' 'classes inheriting from UITestCaseMixin') <NEW_LINE> self.assertTrue(isinstance(self.pom, PageObjectModel), 'pom attribute must an instance of PageObjectModel') <NEW_LINE> options = self.get_options() <NEW_LINE> if hasattr(self, 'browser') and not self.browser == 'chrome': <NEW_LINE> <INDENT> if self.browser == 'firefox': <NEW_LINE> <INDENT> self.driver = webdriver.Firefox(executable_path=self.driver_path, firefox_options=options) <NEW_LINE> <DEDENT> elif self.browser == 'opera': <NEW_LINE> <INDENT> self.driver = webdriver.Opera(executable_path=self.driver_path, options=options) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.driver = webdriver.Chrome(executable_path=self.driver_path, chrome_options=options) <NEW_LINE> <DEDENT> self.driver = EventFiringWebDriver(self.driver, UIListener(self.pom)) <NEW_LINE> <DEDENT> def get_options(self): <NEW_LINE> <INDENT> options_dict = { 'chrome': ChromeOptions, 'firefox': FirefoxOptions, 'opera': OperaOptions, } <NEW_LINE> key = self.browser if hasattr(self, 'browser') else 'chrome' <NEW_LINE> options = options_dict[key]() <NEW_LINE> if hasattr(self, 'options'): <NEW_LINE> <INDENT> for opt in self.options: <NEW_LINE> <INDENT> options.add_argument(opt) <NEW_LINE> <DEDENT> <DEDENT> return options
This class can be inherited by any TestCase Class with a defined driver_path and pom attributes driver_path: refers to the path to the Chrome driver with the
62598fa07d43ff2487427321
class MPMenuCallFileDialog(object): <NEW_LINE> <INDENT> def __init__(self, flags=None, title='Filename', wildcard='*.*'): <NEW_LINE> <INDENT> self.flags = flags or ('open',) <NEW_LINE> self.title = title <NEW_LINE> self.wildcard = wildcard <NEW_LINE> <DEDENT> def call(self): <NEW_LINE> <INDENT> from MAVProxy.modules.lib.wx_loader import wx <NEW_LINE> flag_map = { 'open': wx.FD_OPEN, 'save': wx.FD_SAVE, 'overwrite_prompt': wx.FD_OVERWRITE_PROMPT, } <NEW_LINE> flagsMapped = map(lambda x: flag_map[x], self.flags) <NEW_LINE> if len(flagsMapped) == 1: <NEW_LINE> <INDENT> dlg = wx.FileDialog(None, self.title, '', "", self.wildcard, flagsMapped[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dlg = wx.FileDialog(None, self.title, '', "", self.wildcard, flagsMapped[0]|flagsMapped[1]) <NEW_LINE> <DEDENT> if dlg.ShowModal() != wx.ID_OK: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return "\"" + dlg.GetPath().encode('utf8') + "\""
used to create a file dialog callback
62598fa063b5f9789fe84fb3
class OccurrenceManager(models.Manager.from_queryset(OccurrenceQuerySet), models.Manager, OccurrenceQuerySetMixin): <NEW_LINE> <INDENT> pass
Manager des récurrences d'événements
62598fa057b8e32f5250803b
class CollationDeleteTestCase(BaseTestGenerator): <NEW_LINE> <INDENT> scenarios = [ ('Fetch collation Node URL', dict(url='/browser/collation/obj/')) ] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.schema_info = parent_node_dict["schema"][-1] <NEW_LINE> self.schema_name = self.schema_info["schema_name"] <NEW_LINE> self.db_name = parent_node_dict["database"][-1]["db_name"] <NEW_LINE> coll_name = "collation_get_%s" % str(uuid.uuid4())[1:6] <NEW_LINE> self.collation = collation_utils.create_collation(self.server, self.schema_name, coll_name, self.db_name) <NEW_LINE> <DEDENT> def runTest(self): <NEW_LINE> <INDENT> server_id = self.schema_info["server_id"] <NEW_LINE> db_id = self.schema_info["db_id"] <NEW_LINE> db_con = database_utils.connect_database(self, utils.SERVER_GROUP, server_id, db_id) <NEW_LINE> if not db_con['data']["connected"]: <NEW_LINE> <INDENT> raise Exception("Could not connect to database.") <NEW_LINE> <DEDENT> schema_response = schema_utils.verify_schemas(self.server, self.db_name, self.schema_name) <NEW_LINE> if not schema_response: <NEW_LINE> <INDENT> raise Exception("Could not find the schema.") <NEW_LINE> <DEDENT> collation_id = self.collation[0] <NEW_LINE> schema_id = self.schema_info["schema_id"] <NEW_LINE> get_response = self.tester.delete( self.url + str(utils.SERVER_GROUP) + '/' + str( server_id) + '/' + str(db_id) + '/' + str(schema_id) + '/' + str(collation_id), content_type='html/json') <NEW_LINE> self.assertEquals(get_response.status_code, 200) <NEW_LINE> database_utils.disconnect_database(self, server_id, db_id) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass
This class will delete added collation under schema node.
62598fa021a7993f00c65dc2
class Tree(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = None <NEW_LINE> <DEDENT> def add(self, item): <NEW_LINE> <INDENT> node = Node(item) <NEW_LINE> if self.root is None: <NEW_LINE> <INDENT> self.root = node <NEW_LINE> return <NEW_LINE> <DEDENT> queue = [self.root] <NEW_LINE> while queue: <NEW_LINE> <INDENT> cur_node = queue.pop(0) <NEW_LINE> if cur_node.l_child is None: <NEW_LINE> <INDENT> cur_node.l_child = node <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> queue.append(cur_node.l_child) <NEW_LINE> <DEDENT> if cur_node.r_child is None: <NEW_LINE> <INDENT> cur_node.r_child = node <NEW_LINE> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> queue.append(cur_node.r_child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def breath_travel(self): <NEW_LINE> <INDENT> if self.root is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> queue = [self.root] <NEW_LINE> while queue: <NEW_LINE> <INDENT> cur_node = queue.pop(0) <NEW_LINE> print(cur_node.elem, end=' ') <NEW_LINE> if cur_node.l_child is not None: <NEW_LINE> <INDENT> queue.append(cur_node.l_child) <NEW_LINE> <DEDENT> if cur_node.r_child is not None: <NEW_LINE> <INDENT> queue.append(cur_node.r_child) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def preorder(self, node): <NEW_LINE> <INDENT> if not node: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> print(node.elem, end=' ') <NEW_LINE> self.preorder(node.l_child) <NEW_LINE> self.preorder(node.r_child) <NEW_LINE> <DEDENT> def inorder(self, node): <NEW_LINE> <INDENT> if not node: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.inorder(node.l_child) <NEW_LINE> print(node.elem, end=' ') <NEW_LINE> self.inorder(node.r_child) <NEW_LINE> <DEDENT> def postorder(self, node): <NEW_LINE> <INDENT> if not node: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.postorder(node.l_child) <NEW_LINE> self.postorder(node.r_child) <NEW_LINE> print(node.elem, end=' ')
二叉树
62598fa0379a373c97d98e55
class CaptialFilter(FilterBase): <NEW_LINE> <INDENT> def __init__( self, csv_path: str, category: str, indicator: str, frequency: TemporalFrequency): <NEW_LINE> <INDENT> super().__init__(csv_path=csv_path) <NEW_LINE> self.category = category <NEW_LINE> self.indicator = indicator <NEW_LINE> self.frequency = frequency <NEW_LINE> <DEDENT> def isValid(self, row: dict) -> bool: <NEW_LINE> <INDENT> if (ESTIMATE_KEY not in row or row[ESTIMATE_KEY] != ESTIMATE_VALUE): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (GEO_KEY not in row or row[GEO_KEY] != GEO_VALUE): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if PRICE_KEY not in row or row[PRICE_KEY] != PRICE_VALUE: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if (VALUE_KEY not in row or SCALAR_KEY not in row or DATE_KEY not in row): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def getIndicator(self, _) -> str: <NEW_LINE> <INDENT> return self.indicator <NEW_LINE> <DEDENT> def getCategory(self, _) -> str: <NEW_LINE> <INDENT> return self.category <NEW_LINE> <DEDENT> def getFrequency(self, _) -> TemporalFrequency: <NEW_LINE> <INDENT> return self.frequency <NEW_LINE> <DEDENT> def getValue(self, row) -> int: <NEW_LINE> <INDENT> value = tryint(row[VALUE_KEY]) <NEW_LINE> multiplier = scalar_multiplier(row[SCALAR_KEY]) <NEW_LINE> return value * multiplier <NEW_LINE> <DEDENT> def getDate(self, row: dict) -> Date: <NEW_LINE> <INDENT> date_string = row[DATE_KEY] <NEW_LINE> dt = DateTime.strptime(date_string, "%Y") <NEW_LINE> return dt.date()
Captial Filter.
62598fa0cb5e8a47e493c095
class CompanyAddForm(Form): <NEW_LINE> <INDENT> name = StringField( "name", validators=[validators.DataRequired( "Please enter name." )] ) <NEW_LINE> web_page = StringField( "web_page", validators=[validators.DataRequired( "Please enter web site address." )] ) <NEW_LINE> address = TextAreaField( "address", validators=[validators.DataRequired( "Please enter address." )] ) <NEW_LINE> telephone = StringField( "telephone", validators=[validators.DataRequired( "Please enter pay reminder text." )] )
Mail subject and text edit form.
62598fa0a79ad16197769ea3
class EditorTabBar(QTabBar): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> QTabBar.__init__(self, parent) <NEW_LINE> self._startDragPosition = None <NEW_LINE> <DEDENT> def mousePressEvent(self, e): <NEW_LINE> <INDENT> if (e.buttons() & Qt.LeftButton): <NEW_LINE> <INDENT> self._startDragPosition = e.pos() <NEW_LINE> self._tab = self.tabAt(e.pos()) <NEW_LINE> self.move = False <NEW_LINE> <DEDENT> QTabBar.mousePressEvent(self, e) <NEW_LINE> <DEDENT> def mouseMoveEvent(self, e): <NEW_LINE> <INDENT> if (e.buttons() & Qt.LeftButton): <NEW_LINE> <INDENT> x, y, w, h, s = e.pos().x(), e.pos().y(), self.width(), self.height(), 10 <NEW_LINE> self.move = self.move or abs(x - self._startDragPosition.x()) > s <NEW_LINE> if not self.move and (y < -s or y > h + s): <NEW_LINE> <INDENT> drag = QDrag(self) <NEW_LINE> url = QUrl.fromLocalFile(str(self.tabToolTip(self._tab))) <NEW_LINE> mimeData = QMimeData() <NEW_LINE> mimeData.setUrls([url]) <NEW_LINE> drag.setMimeData(mimeData) <NEW_LINE> drag.exec_() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> QTabBar.mouseMoveEvent(self, e) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> QTabBar.mouseMoveEvent(self, e)
A QTabBar with tabs that are both movable horizontally and drad-and-droppable by initiating the drag vertically.
62598fa0a79ad16197769ea4
class ListManagerOrderView(ManagerOwnerMixin, ListAPIView): <NEW_LINE> <INDENT> from .serializers import OrderListSerializer <NEW_LINE> filter_fields = ('outlet', 'status') <NEW_LINE> serializer_class = OrderListSerializer <NEW_LINE> def filter_queryset(self, queryset): <NEW_LINE> <INDENT> from django.db.models import Q <NEW_LINE> queryset = super(ListManagerOrderView, self).filter_queryset(queryset= queryset) <NEW_LINE> return queryset.filter( Q(outlet__outletmanager__manager=self.request.user) | Q(outlet__created_by=self.request.user)).distinct()
get: Lists orders for Manager
62598fa056b00c62f0fb26ef
class VDFSerializationError(VDFError): <NEW_LINE> <INDENT> VDF_ERROR_MSG = "Unable to serialize the given data. :("
Could not serialize the given data to VDF format.
62598fa0e5267d203ee6b74c
class ComputeNearestMean(BaseEstimator, TransformerMixin): <NEW_LINE> <INDENT> def __init__(self, window=5): <NEW_LINE> <INDENT> self.window = window <NEW_LINE> <DEDENT> def get_params(self, **kwargs): <NEW_LINE> <INDENT> return {'window': self.window} <NEW_LINE> <DEDENT> def fit(self, X, y): <NEW_LINE> <INDENT> X = X.sort_values(by=['saledate_converted']) <NEW_LINE> g = X.groupby('ModelID')['SalePrice'] <NEW_LINE> m = g.apply(lambda x: x.rolling(self.window).agg([np.mean])) <NEW_LINE> ids = X[['saledate_converted', 'ModelID', 'SalesID']] <NEW_LINE> z = pd.concat([m, ids], axis=1) <NEW_LINE> z['saledate_converted'] = z.saledate_converted + timedelta(1) <NEW_LINE> z = z.drop('SalesID', axis=1) <NEW_LINE> groups = ['ModelID', 'saledate_converted'] <NEW_LINE> self.averages = z.groupby(groups).apply(lambda x: x.tail(1)) <NEW_LINE> self.default_mean = X.SalePrice.mean() <NEW_LINE> return self <NEW_LINE> <DEDENT> def transform(self, X): <NEW_LINE> <INDENT> near_price = pd.merge(self.averages, X, how='outer', on=['ModelID', 'saledate_converted']) <NEW_LINE> nxcols = ['ModelID', 'saledate_converted'] <NEW_LINE> near_price = near_price.set_index(nxcols).sort_index() <NEW_LINE> g = near_price['mean'].groupby(level=0) <NEW_LINE> filled_means = g.transform(lambda x: x.fillna(method='ffill')) <NEW_LINE> near_price['filled_mean_price'] = filled_means <NEW_LINE> near_price = near_price[near_price['SalesID'].notnull()] <NEW_LINE> missing_mean = near_price.filled_mean_price.isnull() <NEW_LINE> near_price['no_recent_transactions'] = missing_mean <NEW_LINE> near_price['filled_mean_price'].fillna(self.default_mean, inplace=True) <NEW_LINE> return near_price
Compute a mean price for similar vehicles.
62598fa055399d3f05626361
class BlobHandle(FileHandle): <NEW_LINE> <INDENT> def __init__(self, blob_service, container, blob, chunk_size): <NEW_LINE> <INDENT> super(BlobHandle, self).__init__() <NEW_LINE> self._blob_service = blob_service <NEW_LINE> self._container_name = container <NEW_LINE> self._blob_name = blob <NEW_LINE> self._chunk_size = chunk_size <NEW_LINE> self._blob_properties = {} <NEW_LINE> self._pointer = 0 <NEW_LINE> if blob.endswith(".gz"): <NEW_LINE> <INDENT> decompress = zlib.decompressobj(16 | zlib.MAX_WBITS) <NEW_LINE> self._decompress = decompress.decompress <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._decompress = lambda value: value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def blob_properties(self): <NEW_LINE> <INDENT> if not self._blob_properties: <NEW_LINE> <INDENT> self._blob_properties = self._blob_service.get_blob_properties( container_name=self._container_name, blob_name=self._blob_name) <NEW_LINE> <DEDENT> return self._blob_properties <NEW_LINE> <DEDENT> def _chunk_offsets(self): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> blob_size = self.blob_properties.get('content-length') <NEW_LINE> while index < blob_size: <NEW_LINE> <INDENT> yield index <NEW_LINE> index = index + self._chunk_size <NEW_LINE> <DEDENT> <DEDENT> def _chunk_iter(self): <NEW_LINE> <INDENT> for chunk_offset in self._chunk_offsets(): <NEW_LINE> <INDENT> yield self._download_chunk(chunk_offset=chunk_offset, chunk_size=self._chunk_size) <NEW_LINE> <DEDENT> <DEDENT> def _download_chunk_with_retries(self, chunk_offset, chunk_size, retries=3, retry_wait=1): <NEW_LINE> <INDENT> import azure <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> chunk = self._download_chunk(chunk_offset, chunk_size) <NEW_LINE> <DEDENT> except azure.WindowsAzureError: <NEW_LINE> <INDENT> if retries > 0: <NEW_LINE> <INDENT> retries = retries - 1 <NEW_LINE> time.sleep(retry_wait) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return chunk <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _download_chunk(self, chunk_offset, chunk_size): <NEW_LINE> <INDENT> range_id = 'bytes={0}-{1}'.format( chunk_offset, chunk_offset + chunk_size - 1) <NEW_LINE> return self._blob_service.get_blob( container_name=self._container_name, blob_name=self._blob_name, x_ms_range=range_id) <NEW_LINE> <DEDENT> def read(self, size=sys.maxsize): <NEW_LINE> <INDENT> blob_size = int(self.blob_properties.get('content-length')) <NEW_LINE> if self._pointer < blob_size: <NEW_LINE> <INDENT> chunk = self._download_chunk_with_retries( chunk_offset=self._pointer, chunk_size=size) <NEW_LINE> self._pointer += size <NEW_LINE> return chunk <NEW_LINE> <DEDENT> <DEDENT> def next(self): <NEW_LINE> <INDENT> return next(self._iter) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass
File object for the Azure Blob files.
62598fa0ac7a0e7691f7234a
class TestIPIBaseNumberValid(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.ipi = special.ipi_base_number() <NEW_LINE> <DEDENT> def test_common(self): <NEW_LINE> <INDENT> code = 'I-000000229-7' <NEW_LINE> result = self.ipi.parseString(code)[0] <NEW_LINE> self.assertEqual('I-000000229-7', result) <NEW_LINE> <DEDENT> def test_max(self): <NEW_LINE> <INDENT> code = 'I-999999999-9' <NEW_LINE> result = self.ipi.parseString(code)[0] <NEW_LINE> self.assertEqual('I-999999999-9', result) <NEW_LINE> <DEDENT> def test_min(self): <NEW_LINE> <INDENT> code = 'I-000000000-0' <NEW_LINE> result = self.ipi.parseString(code)[0] <NEW_LINE> self.assertEqual('I-000000000-0', result)
Tests that the IPI Base Number accepts and parses valid values.
62598fa00a50d4780f705219
class TestSendMail(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> options = self.driver = webdriver.ChromeOptions() <NEW_LINE> options.add_experimental_option("prefs", {"credentials_enable_service": False, "profile.password_manager_enabled": False}) <NEW_LINE> self.driver = webdriver.Chrome(chrome_options=options) <NEW_LINE> self.driver.implicitly_wait(10) <NEW_LINE> self.base_url = root.getElementsByTagName('url')[0].firstChild.data <NEW_LINE> <DEDENT> def test_send1(self): <NEW_LINE> <INDENT> driver = self.driver <NEW_LINE> driver.get(self.base_url) <NEW_LINE> sleep(3) <NEW_LINE> login_info = root.getElementsByTagName('normal') <NEW_LINE> username = login_info[0].getAttribute('username') <NEW_LINE> password = login_info[0].getAttribute('password') <NEW_LINE> login.login(self, username, password) <NEW_LINE> driver.find_element_by_xpath("//div[@id='dvNavTop']/ul/li[2]").click() <NEW_LINE> driver.find_element_by_css_selector("input.nui-editableAddr-ipt").send_keys('838927564@qq.com') <NEW_LINE> driver.find_element_by_xpath("//header[@class='frame-main-cont-head']//span[@class='nui-btn-text']").click() <NEW_LINE> driver.find_element_by_css_selector(".nui-msgbox-ft-btns>div>span").click() <NEW_LINE> sleep(3) <NEW_LINE> text = driver.find_element_by_class_name("tK1").text <NEW_LINE> self.assertIn(u'发送成功', text) <NEW_LINE> login.logout(self) <NEW_LINE> <DEDENT> def test_send2(self): <NEW_LINE> <INDENT> driver = self.driver <NEW_LINE> driver.get(self.base_url) <NEW_LINE> sleep(3) <NEW_LINE> login_info = root.getElementsByTagName('normal') <NEW_LINE> username = login_info[0].getAttribute('username') <NEW_LINE> password = login_info[0].getAttribute('password') <NEW_LINE> login.login(self, username, password) <NEW_LINE> driver.find_element_by_xpath("//div[@id='dvNavTop']/ul/li[2]").click() <NEW_LINE> driver.find_element_by_css_selector("input.nui-editableAddr-ipt").send_keys('838927564@qq.com') <NEW_LINE> driver.find_element_by_xpath("//input[@class='nui-ipt-input' and @maxlength='256']").send_keys(u'自动发送的主题') <NEW_LINE> frame = driver.find_element_by_css_selector("iframe.APP-editor-iframe") <NEW_LINE> driver.switch_to.frame(frame) <NEW_LINE> driver.find_element_by_xpath("//body[@class='nui-scroll']").send_keys(u'自动发送的正文内容,啊哈哈哈哈哈') <NEW_LINE> driver.switch_to.default_content() <NEW_LINE> driver.find_element_by_xpath("//input[@type='file']").send_keys("F:\\attach.txt") <NEW_LINE> driver.find_element_by_xpath("//header[@class='frame-main-cont-head']//span[@class='nui-btn-text']").click() <NEW_LINE> sleep(3) <NEW_LINE> text = driver.find_element_by_class_name("tK1").text <NEW_LINE> self.assertIn(u'发送成功', text) <NEW_LINE> login.logout(self) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.driver.quit()
测试发送邮件
62598fa05f7d997b871f92ff
class BulletinBoard(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField("UserProfile", null=True, blank=True, on_delete=models.CASCADE, related_name="bulletin_board_u") <NEW_LINE> organization = models.OneToOneField("Organization", null=True, blank=True, on_delete=models.CASCADE, related_name="bulletin_board_o") <NEW_LINE> project = models.OneToOneField("Project", null=True, blank=True, on_delete=models.CASCADE, related_name="bulletin_board_p") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> name = "" <NEW_LINE> if self.user is not None: <NEW_LINE> <INDENT> name = self.user.display_name <NEW_LINE> <DEDENT> elif self.organization is not None: <NEW_LINE> <INDENT> name = self.organization.name <NEW_LINE> <DEDENT> elif self.project is not None: <NEW_LINE> <INDENT> name = self.project.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "N/A" <NEW_LINE> <DEDENT> if self.id: <NEW_LINE> <INDENT> return "掲示板/" + str(self.id) + "/" + name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "掲示板/" + name
掲示板 スレッドの紐付け先
62598fa0090684286d5935fa
class TinyMCE3x3ExampleMainWidget(TinyMCE2x2ExampleMainWidget): <NEW_LINE> <INDENT> plugin_uid = 'tinymce_memo_3x3' <NEW_LINE> cols = 3 <NEW_LINE> rows = 3
Dummy2x2 plugin widget for Example layout (placeholder `main`).
62598fa056ac1b37e630202b
class Operation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, } <NEW_LINE> def __init__( self, *, name: Optional[str] = None, display: Optional["OperationDisplay"] = None, **kwargs ): <NEW_LINE> <INDENT> super(Operation, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.display = display
Microsoft.Resources operation. :ivar name: Operation name: {provider}/{resource}/{operation}. :vartype name: str :ivar display: The object that represents the operation. :vartype display: ~azure.mgmt.resource.links.v2016_09_01.models.OperationDisplay
62598fa04527f215b58e9d23
class PethappyDogMedSpider(scrapy.Spider): <NEW_LINE> <INDENT> name = 'pethappy_dog_meds' <NEW_LINE> allowed_domains = ['https://www.pethappy.cl'] <NEW_LINE> start_urls = [ 'https://www.pethappy.cl/perros-2/medicamentos-2?page=%s' % page for page in range(1, 3) ] <NEW_LINE> def parse(self, response): <NEW_LINE> <INDENT> for product in response.css('div.in'): <NEW_LINE> <INDENT> item = ProjectPetsItem() <NEW_LINE> item['name'] = parse_name(product.css('h1 a::text').extract_first()) <NEW_LINE> item['href'] = "https://www.pethappy.cl" + product.css('p.foto a::attr(href)').extract()[0] <NEW_LINE> item['price'] = parse_price(product.css('p.precio::text').extract()[0]) <NEW_LINE> item['image_href'] = product.css('p a img::attr(src)').extract()[0] <NEW_LINE> item['store'] = "Pet Happy" <NEW_LINE> item['category'] = "medicine" <NEW_LINE> item['animal'] = "dog" <NEW_LINE> item['date'] = datetime.today() <NEW_LINE> item['date_str'] = item['date'].strftime('%Y-%m-%d') <NEW_LINE> yield item <NEW_LINE> <DEDENT> next_page = response.css('li.next a::attr(href)').extract_first() <NEW_LINE> if next_page is not None: <NEW_LINE> <INDENT> yield response.follow(next_page, callback=self.parse)
Spider only for the dog med pages
62598fa0be8e80087fbbee9f
class Ellipsoid(BelongingChecker): <NEW_LINE> <INDENT> r <NEW_LINE> def __init__(): <NEW_LINE> <INDENT> pass
This checker will use the analytical representation of the ellipsoid to detect *inside*-*outside* status.
62598fa021a7993f00c65dc4
class TemplateHUD(object): <NEW_LINE> <INDENT> BANNER_FONT_SIZE = 90 <NEW_LINE> SCORE_FONT_SIZE = 80 <NEW_LINE> TITLE_FONT_SIZE = 60 <NEW_LINE> HINT_FONT_SIZE = 30 <NEW_LINE> WHITE = (255, 255, 255) <NEW_LINE> BLACK = (0, 0, 0) <NEW_LINE> GRAY = (150, 150, 150) <NEW_LINE> ORANGE = (255, 132, 42) <NEW_LINE> CONTROLS_HINT_X = 0.03 <NEW_LINE> CONTROLS_HINT_Y = 0.915 <NEW_LINE> def __init__(self, display): <NEW_LINE> <INDENT> self.display = display <NEW_LINE> self.display_width = Assets.get().display_width <NEW_LINE> self.display_height = Assets.get().display_height <NEW_LINE> self.font_path = os.path.join(fonts_path, 'zx_spectrum-7_bold.ttf') <NEW_LINE> self.setup_fonts() <NEW_LINE> self.controls_hint_text = self.hint_renderer.render(_("[SPACE] or [UP] to Jump"), 1, self.WHITE) <NEW_LINE> self.controls_hint_text_x = self.display_width * self.CONTROLS_HINT_X <NEW_LINE> self.controls_hint_text_y = self.display_height * self.CONTROLS_HINT_Y <NEW_LINE> self.quit_hint_text = self.hint_renderer.render(_("[ESC] or [Q] to Quit"), 1, self.WHITE) <NEW_LINE> self.quit_hint_text_x = self.display_width * self.CONTROLS_HINT_X <NEW_LINE> self.quit_hint_text_y = self.display_height * self.CONTROLS_HINT_Y + 20 <NEW_LINE> <DEDENT> def setup_fonts(self): <NEW_LINE> <INDENT> self.score_renderer = pygame.font.Font(self.font_path, self.SCORE_FONT_SIZE) <NEW_LINE> self.banner_renderer = pygame.font.Font(self.font_path, self.BANNER_FONT_SIZE) <NEW_LINE> self.title_renderer = pygame.font.Font(self.font_path, self.TITLE_FONT_SIZE) <NEW_LINE> self.hint_renderer = pygame.font.Font(self.font_path, self.HINT_FONT_SIZE) <NEW_LINE> self.fps_renderer = pygame.font.SysFont('Bariol', 30) <NEW_LINE> <DEDENT> def update(self, fps): <NEW_LINE> <INDENT> self.display.blit(self.controls_hint_text, (self.controls_hint_text_x, self.controls_hint_text_y)) <NEW_LINE> self.display.blit(self.quit_hint_text, (self.quit_hint_text_x, self.quit_hint_text_y)) <NEW_LINE> if DEBUG: <NEW_LINE> <INDENT> fps_text = self.fps_renderer.render('{}'.format(round(fps, 3)), 1, self.ORANGE) <NEW_LINE> self.display.blit(fps_text, (self.display_width * 0.93, self.display_height * 0.93)) <NEW_LINE> <DEDENT> self.state_update() <NEW_LINE> <DEDENT> def state_update(self): <NEW_LINE> <INDENT> pass
This is a template of a HUD for the individual gamestates of the game. It defines the rendering parameters - colors, fonts sizes, positions. It also defines part of the update method which is to draw the controls in the bottom left of the Display.
62598fa057b8e32f5250803c