code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class StateHitEventHandler(BaseEventHandler): <NEW_LINE> <INDENT> EVENT_TYPE = feconf.EVENT_TYPE_STATE_HIT <NEW_LINE> @classmethod <NEW_LINE> def _handle_event( cls, exp_id, exp_version, state_name, session_id, params, play_type): <NEW_LINE> <INDENT> stats_models.StateHitEventLogEntryModel.create( exp_id, exp_version, state_name, session_id, params, play_type)
Event handler for recording state hit events.
62598fac10dbd63aa1c70b6e
class orbit: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.rot = [0.,0.,0.] <NEW_LINE> self.tgt = [0.,0.,0.] <NEW_LINE> self.dist = 1.0 <NEW_LINE> self.ori = ['x','-z','y'] <NEW_LINE> <DEDENT> def matrix(self): <NEW_LINE> <INDENT> o = orientation_matrix(*self.ori) <NEW_LINE> Rz = so3.rotation([0.,0.,1.],self.rot[2]) <NEW_LINE> Rx = so3.rotation([1.,0.,0.],self.rot[1]) <NEW_LINE> Ry = so3.rotation([0.,1.,0.],self.rot[0]) <NEW_LINE> R = so3.mul(Rz,so3.mul(Rx,Ry)) <NEW_LINE> R = so3.mul(R,o) <NEW_LINE> return (R,vectorops.add(self.tgt,so3.apply(R,[0.,0.,self.dist]))) <NEW_LINE> <DEDENT> def set_orientation(self,R,ori=None): <NEW_LINE> <INDENT> import math <NEW_LINE> if ori is not None: <NEW_LINE> <INDENT> o = orientation_matrix(*self.ori) <NEW_LINE> oR = orientation_matrix(*ori) <NEW_LINE> R = so3.mul(R,so3.mul(so3.inv(oR),o)) <NEW_LINE> <DEDENT> m = so3.matrix(R) <NEW_LINE> cx = m[0][1]**2 + m[1][1]**2 <NEW_LINE> sx = m[2][1] <NEW_LINE> self.rot[1] = math.atan2(sx,cx) <NEW_LINE> if abs(cx) > 1e-5: <NEW_LINE> <INDENT> sz = -m[0][1] <NEW_LINE> cz = m[1][1] <NEW_LINE> sy = -m[2][0] <NEW_LINE> cy = m[2][2] <NEW_LINE> self.rot[2] = math.atan2(sz,cz) <NEW_LINE> self.rot[0] = math.atan2(sy,cy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.rot[0] = 0 <NEW_LINE> cz = m[0][0] <NEW_LINE> sz = m[1][0] <NEW_LINE> self.rot[2] = math.atan2(sz,cz)
An orbit camera that is controlled using a rotation, target point, distance, and orientation. Attributes: - tgt: target point - rot: euler angle rotation (roll-pitch-yaw entries relative to default view with fwd = +y, right = +x, up = +z) - dist: target distance - ori: orientation matrix type (see :func:`orientation_matrix`)
62598fac1f5feb6acb162bda
class Slack_ForeachWriter: <NEW_LINE> <INDENT> def __init__(self, url): <NEW_LINE> <INDENT> self.webhook_url = url <NEW_LINE> <DEDENT> def open(self, partition_id, epoch_id): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def process(self, row): <NEW_LINE> <INDENT> import json <NEW_LINE> import requests <NEW_LINE> print(row['count']) <NEW_LINE> print(self.webhook_url) <NEW_LINE> if row['count'] % 5 == 0: <NEW_LINE> <INDENT> slack_data = {'text': "Reached {} count for {}".format(row['count'], row['value'])} <NEW_LINE> response = requests.post( self.webhook_url, data=json.dumps(slack_data), headers={'Content-Type': 'application/json'} ) <NEW_LINE> if response.status_code != 200: <NEW_LINE> <INDENT> raise ValueError( 'Request to slack returned an error %s, the response is:\n%s' % (response.status_code, response.text) ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def close(self, err): <NEW_LINE> <INDENT> if err: <NEW_LINE> <INDENT> raise err
Class to send alerts to a Slack Channel. When used with `foreach`, copies of this class is going to be used to write multiple rows in the executor. See the python docs for `DataStreamWriter.foreach` for more details.
62598face1aae11d1e7ce801
class Structured_Imitation(Dynamics): <NEW_LINE> <INDENT> def interact(self): <NEW_LINE> <INDENT> i = self.net.get_random_vertex() <NEW_LINE> group_trait = 0.0 <NEW_LINE> if self.params["group"] == 0: <NEW_LINE> <INDENT> for j in self.net.get_neighbors(i): <NEW_LINE> <INDENT> group_trait += j.get_trait() <NEW_LINE> <DEDENT> group_trait /= len(self.net.get_neighbors(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for count in range(0, self.params["group"]): <NEW_LINE> <INDENT> j = self.net.get_random_neighbor(i) <NEW_LINE> if j == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> group_trait += j.get_trait() <NEW_LINE> <DEDENT> group_trait /= self.params["group"] <NEW_LINE> <DEDENT> payoff = self.payoff(i.get_trait(), group_trait) <NEW_LINE> i.set_payoff(payoff) <NEW_LINE> i.set_fitness(self.fitness(payoff)) <NEW_LINE> <DEDENT> def post_interaction(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> i = self.net.get_random_vertex() <NEW_LINE> total_fitness = 0.0 <NEW_LINE> fitness_cdf = [] <NEW_LINE> for vertex in self.net.get_neighbors(i): <NEW_LINE> <INDENT> total_fitness += vertex.get_fitness() <NEW_LINE> fitness_cdf.append([self.total_fitness, vertex]) <NEW_LINE> <DEDENT> total_fitness += i.get_fitness() <NEW_LINE> fitness_cdf.append([total_fitness, i]) <NEW_LINE> j = self.roulette(fitness_cdf, total_fitness) <NEW_LINE> trait = j.get_trait() <NEW_LINE> if random.random() < self.params["mutation"]: <NEW_LINE> <INDENT> trait = max(0.0, min(random.gauss(trait, self.params["stddev"]), self.params["max_trait"])) <NEW_LINE> <DEDENT> i.inherit_trait(trait)
Imitation dynamics on a structured network.
62598fac6e29344779b00618
class TweetActions: <NEW_LINE> <INDENT> space_gif = 'space_gif' <NEW_LINE> retweet_scott_kelly = 'retweet_scott_kelly' <NEW_LINE> retweet_astro_kjell = 'retweet_astro_kjell' <NEW_LINE> retweet_astro_kimiya = 'retweet_astro_kimiya' <NEW_LINE> retweet_volkov_iss = 'retweet_volkov_iss' <NEW_LINE> retweet_astro_jeff = 'retweet_astro_jeff' <NEW_LINE> retweet_astro_tim = 'retweet_astro_tim' <NEW_LINE> retweet_astro_timpeake = 'retweet_astro_timpeake' <NEW_LINE> retweet_thom_astro = 'retweet_thom_astro' <NEW_LINE> retweet_astropeggy = 'retweet_astropeggy' <NEW_LINE> retweet_astro_kimbrough = 'retweet_astro_kimbrough'
Tweets constants TODO: write once those freaking actons
62598fac796e427e5384e74f
class Category(AtomBase): <NEW_LINE> <INDENT> _tag = 'category' <NEW_LINE> _namespace = ATOM_NAMESPACE <NEW_LINE> _children = AtomBase._children.copy() <NEW_LINE> _attributes = AtomBase._attributes.copy() <NEW_LINE> _attributes['term'] = 'term' <NEW_LINE> _attributes['scheme'] = 'scheme' <NEW_LINE> _attributes['label'] = 'label' <NEW_LINE> def __init__(self, term=None, scheme=None, label=None, text=None, extension_elements=None, extension_attributes=None): <NEW_LINE> <INDENT> self.term = term <NEW_LINE> self.scheme = scheme <NEW_LINE> self.label = label <NEW_LINE> self.text = text <NEW_LINE> self.extension_elements = extension_elements or [] <NEW_LINE> self.extension_attributes = extension_attributes or {}
The atom:category element
62598fac4c3428357761a275
class WindowsVolumeCreationEventFormatterTest( test_lib.EventFormatterTestCase): <NEW_LINE> <INDENT> def testInitialization(self): <NEW_LINE> <INDENT> event_formatter = windows.WindowsVolumeCreationEventFormatter() <NEW_LINE> self.assertNotEqual(event_formatter, None) <NEW_LINE> <DEDENT> def testGetFormatStringAttributeNames(self): <NEW_LINE> <INDENT> event_formatter = windows.WindowsVolumeCreationEventFormatter() <NEW_LINE> expected_attribute_names = [ u'device_path', u'serial_number', u'origin'] <NEW_LINE> self._TestGetFormatStringAttributeNames( event_formatter, expected_attribute_names)
Tests for the Windows volume creation event formatter.
62598fac57b8e32f525080f9
class TestRadians(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.g = Geod(ellps='clrk66') <NEW_LINE> self.boston_d = (-71. - (7. / 60.), 42. + (15. / 60.)) <NEW_LINE> self.boston_r = (math.radians(self.boston_d[0]), math.radians(self.boston_d[1])) <NEW_LINE> self.portland_d = (-123. - (41. / 60.), 45. + (31. / 60.)) <NEW_LINE> self.portland_r = (math.radians(self.portland_d[0]), math.radians(self.portland_d[1])) <NEW_LINE> <DEDENT> def test_inv_radians(self): <NEW_LINE> <INDENT> az12_d, az21_d, dist_d = self.g.inv( self.boston_d[0], self.boston_d[1], self.portland_d[0], self.portland_d[1], radians=False) <NEW_LINE> az12_r, az21_r, dist_r = self.g.inv( self.boston_r[0], self.boston_r[1], self.portland_r[0], self.portland_r[1], radians=True) <NEW_LINE> self.assertAlmostEqual(az12_d, math.degrees(az12_r)) <NEW_LINE> self.assertAlmostEqual(az21_d, math.degrees(az21_r)) <NEW_LINE> self.assertAlmostEqual(dist_d, dist_r) <NEW_LINE> <DEDENT> def test_fwd_radians(self): <NEW_LINE> <INDENT> az12_d, az21_d, dist = self.g.inv( self.boston_d[0], self.boston_d[1], self.portland_d[0], self.portland_d[1], radians=False) <NEW_LINE> endlon_d, endlat_d, backaz_d = self.g.fwd( self.boston_d[0], self.boston_d[1], az12_d, dist, radians=False) <NEW_LINE> endlon_r, endlat_r, backaz_r = self.g.fwd( self.boston_r[0], self.boston_r[1], math.radians(az12_d), dist, radians=True) <NEW_LINE> self.assertAlmostEqual(endlon_d, math.degrees(endlon_r)) <NEW_LINE> self.assertAlmostEqual(endlat_d, math.degrees(endlat_r)) <NEW_LINE> self.assertAlmostEqual(backaz_d, math.degrees(backaz_r)) <NEW_LINE> self.assertAlmostEqual(endlon_d, self.portland_d[0]) <NEW_LINE> self.assertAlmostEqual(endlat_d, self.portland_d[1]) <NEW_LINE> <DEDENT> def test_npts_radians(self): <NEW_LINE> <INDENT> points_d = self.g.npts( lon1=self.boston_d[0], lat1=self.boston_d[1], lon2=self.portland_d[0], lat2=self.portland_d[1], npts=10, radians=False) <NEW_LINE> points_r = self.g.npts( lon1=self.boston_r[0], lat1=self.boston_r[1], lon2=self.portland_r[0], lat2=self.portland_r[1], npts=10, radians=True) <NEW_LINE> for index, dpoint in enumerate(points_d): <NEW_LINE> <INDENT> self.assertAlmostEqual(dpoint[0], math.degrees(points_r[index][0])) <NEW_LINE> self.assertAlmostEqual(dpoint[1], math.degrees(points_r[index][1]))
Tests issue #84
62598fac63b5f9789fe85122
class RelatedObjectSegmentValue(BaseValue): <NEW_LINE> <INDENT> def __init__(self, path, content_type, translation_key, **kwargs): <NEW_LINE> <INDENT> self.content_type = content_type <NEW_LINE> self.translation_key = translation_key <NEW_LINE> super().__init__(path, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_instance(cls, path, instance): <NEW_LINE> <INDENT> model = instance.get_translation_model() <NEW_LINE> return cls( path, ContentType.objects.get_for_model(model), instance.translation_key ) <NEW_LINE> <DEDENT> def get_instance(self, locale): <NEW_LINE> <INDENT> from ..models import pk <NEW_LINE> return self.content_type.get_object_for_this_type( translation_key=self.translation_key, locale_id=pk(locale) ) <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return RelatedObjectSegmentValue( self.path, self.content_type, self.translation_key, order=self.order ) <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> return self.content_type is None and self.translation_key is None <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return ( isinstance(other, RelatedObjectSegmentValue) and self.path == other.path and self.content_type == other.content_type and self.translation_key == other.translation_key ) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<RelatedObjectSegmentValue {} {} {}>".format( self.path, self.content_type, self.translation_key )
Represents a reference to a foreign translatable object. Attributes: path (str): The content path of the segment. content_type (ContentType): The content type of the base model of the foreign object. translation_key (UUID): The value of the foreign object's `translation_key` field. order (int): The index that this segment appears on a page.
62598fac851cf427c66b8278
class LoadProjectsBucketsAclsPipelineTest(ForsetiTestCase): <NEW_LINE> <INDENT> FAKE_PROJECT_NUMBERS = ['11111'] <NEW_LINE> FAKE_BUCKETS = ['fakebucket1'] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.cycle_timestamp = '20001225T120000Z' <NEW_LINE> self.configs = fake_configs.FAKE_CONFIGS <NEW_LINE> self.mock_gcs_acl = mock.create_autospec(storage.StorageClient) <NEW_LINE> self.mock_dao = mock.create_autospec(bucket_dao.BucketDao) <NEW_LINE> self.maxDiff = None <NEW_LINE> self.pipeline = ( load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline( self.cycle_timestamp, self.configs, self.mock_gcs_acl, self.mock_dao)) <NEW_LINE> <DEDENT> def test_can_transform_bucket_acls(self): <NEW_LINE> <INDENT> loadable_buckets = list(self.pipeline._transform( fake_buckets.FAKE_BUCKET_ACL_MAP)) <NEW_LINE> self.assertEquals( fake_buckets.EXPECTED_LOADABLE_BUCKET_ACLS, loadable_buckets) <NEW_LINE> <DEDENT> def test_retrieve_bucket_acls(self): <NEW_LINE> <INDENT> mock_get_raw_buckets = mock.MagicMock() <NEW_LINE> self.mock_dao.get_raw_buckets = mock_get_raw_buckets <NEW_LINE> mock_get_raw_buckets.return_value = fake_buckets.FAKE_RAW_BUCKET_ROW <NEW_LINE> bucket_acls = self.pipeline._retrieve() <NEW_LINE> mock_get_raw_buckets.assert_called_once_with(self.cycle_timestamp) <NEW_LINE> self.assertEquals( fake_buckets.EXPECTED_RAW_BUCKET_JSON, bucket_acls) <NEW_LINE> <DEDENT> @mock.patch.object( load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline, '_get_loaded_count') <NEW_LINE> @mock.patch.object( load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline, '_load') <NEW_LINE> @mock.patch.object( load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline, '_transform') <NEW_LINE> @mock.patch.object( load_projects_buckets_acls_pipeline.LoadProjectsBucketsAclsPipeline, '_retrieve') <NEW_LINE> def test_subroutines_are_called_by_run(self, mock_retrieve, mock_transform,mock_load, mock_get_loaded_count): <NEW_LINE> <INDENT> mock_retrieve.return_value = ( fake_buckets.FAKE_BUCKET_ACL_MAP) <NEW_LINE> mock_transform.return_value = ( fake_buckets.EXPECTED_LOADABLE_BUCKET_ACLS) <NEW_LINE> self.pipeline.run() <NEW_LINE> mock_retrieve.assert_called_once_with() <NEW_LINE> mock_transform.assert_called_once_with( fake_buckets.FAKE_BUCKET_ACL_MAP) <NEW_LINE> self.assertEquals(1, mock_load.call_count) <NEW_LINE> called_args, called_kwargs = mock_load.call_args_list[0] <NEW_LINE> expected_args = ( self.pipeline.RESOURCE_NAME, fake_buckets.EXPECTED_LOADABLE_BUCKET_ACLS) <NEW_LINE> self.assertEquals(expected_args, called_args) <NEW_LINE> mock_get_loaded_count.assert_called_once
Tests for the load_projects_buckets_acls_pipeline.
62598fac4428ac0f6e6584e0
@dataclasses.dataclass <NEW_LINE> class AudioListenerWillBeDestroyed: <NEW_LINE> <INDENT> contextId: GraphObjectId <NEW_LINE> listenerId: GraphObjectId <NEW_LINE> @classmethod <NEW_LINE> def from_json(cls, json: dict) -> AudioListenerWillBeDestroyed: <NEW_LINE> <INDENT> return cls(GraphObjectId(json["contextId"]), GraphObjectId(json["listenerId"]))
Notifies that a new AudioListener has been created. Attributes ---------- contextId: GraphObjectId listenerId: GraphObjectId
62598fac7d43ff24874273e0
class suppress_stdout_stderr(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.null_fds = [os.open(os.devnull, os.O_RDWR) for _ in range(2)] <NEW_LINE> self.save_fds = [os.dup(1), os.dup(2)] <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> os.dup2(self.null_fds[0], 1) <NEW_LINE> os.dup2(self.null_fds[1], 2) <NEW_LINE> <DEDENT> def __exit__(self, *_): <NEW_LINE> <INDENT> os.dup2(self.save_fds[0], 1) <NEW_LINE> os.dup2(self.save_fds[1], 2) <NEW_LINE> for fd in self.null_fds + self.save_fds: <NEW_LINE> <INDENT> os.close(fd)
A context manager for doing a "deep suppression" of stdout and stderr in Python, i.e. will suppress all print, even if the print originates in a compiled C/Fortran sub-function. This will not suppress raised exceptions, since exceptions are printed to stderr just before a script exits, and after the context manager has exited (at least, I think that is why it lets exceptions through). https://github.com/facebook/prophet/issues/223
62598fac2c8b7c6e89bd3781
class PrivateIPAddress(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'ip_address': {'key': 'ipAddress', 'type': 'str'}, 'subnet_resource_id': {'key': 'subnetResourceId', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, ip_address: Optional[str] = None, subnet_resource_id: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(PrivateIPAddress, self).__init__(**kwargs) <NEW_LINE> self.ip_address = ip_address <NEW_LINE> self.subnet_resource_id = subnet_resource_id
A private IP address bound to the availability group listener. :ivar ip_address: Private IP address bound to the availability group listener. :vartype ip_address: str :ivar subnet_resource_id: Subnet used to include private IP. :vartype subnet_resource_id: str
62598fac3539df3088ecc26e
class ICalendarView(generic_views.View): <NEW_LINE> <INDENT> def get_meetup_summary(self, meetup): <NEW_LINE> <INDENT> return "PyGRAZ-Meetup am {0}".format(meetup.start_date.date()) <NEW_LINE> <DEDENT> def get_meetup_description(self, meetup): <NEW_LINE> <INDENT> return """Details: https://{0}{1}""".format( Site.objects.get_current().domain, meetup.get_absolute_url()) <NEW_LINE> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> cal = icalendar.Calendar() <NEW_LINE> cal.add('X-WR-CALNAME', settings.MEETUPS_CALENDAR_NAME) <NEW_LINE> site = Site.objects.get_current() <NEW_LINE> for meetup in models.Meetup.objects.all(): <NEW_LINE> <INDENT> evt = icalendar.Event() <NEW_LINE> evt.add('summary', self.get_meetup_summary(meetup)) <NEW_LINE> evt.add('description', self.get_meetup_description(meetup)) <NEW_LINE> evt.add('dtstart', meetup.start_date) <NEW_LINE> evt['uid'] = '{0}/meetups/{1}'.format(site.domain, meetup.pk) <NEW_LINE> cal.add_component(evt) <NEW_LINE> <DEDENT> response = HttpResponse(cal.to_ical(), content_type='text/calendar') <NEW_LINE> response['Content-Disposition'] = 'attachment;filename=pygraz.ics' <NEW_LINE> return response
This offers a simple ical rendering of all the meetups.
62598fac23849d37ff851071
class OperationValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> OPERATION_UNSPECIFIED = 0 <NEW_LINE> FORWARD = 1 <NEW_LINE> REWRITE = 2
Required. Indicates which action will be applied. If FORWARD, the messages will be imported from cloud to edge or exported from edge to cloud. If REWRITE, the messages will be republished within the edge device with new topic name, that is defined in `rewrite_topic_name`. Cannot be unspecified. Values: OPERATION_UNSPECIFIED: Default value, used when its value unspecified. FORWARD: Forward the topic without changing the topic. REWRITE: Rewrite the message to the specified destination topic.
62598fac7c178a314d78d459
class HoldingChangeList: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pack_req(cls, code, holder_type, conn_id, start_date, end_date=None): <NEW_LINE> <INDENT> ret, content = split_stock_str(code) <NEW_LINE> if ret == RET_ERROR: <NEW_LINE> <INDENT> error_str = content <NEW_LINE> return RET_ERROR, error_str, None <NEW_LINE> <DEDENT> market_code, stock_code = content <NEW_LINE> if start_date is None: <NEW_LINE> <INDENT> msg = "The start date is none." <NEW_LINE> return RET_ERROR, msg, None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret, msg = normalize_date_format(start_date) <NEW_LINE> if ret != RET_OK: <NEW_LINE> <INDENT> return ret, msg, None <NEW_LINE> <DEDENT> start_date = msg <NEW_LINE> <DEDENT> if end_date is None: <NEW_LINE> <INDENT> today = datetime.today() <NEW_LINE> end_date = today.strftime("%Y-%m-%d") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret, msg = normalize_date_format(end_date) <NEW_LINE> if ret != RET_OK: <NEW_LINE> <INDENT> return ret, msg, None <NEW_LINE> <DEDENT> end_date = msg <NEW_LINE> <DEDENT> from futuquant.common.pb.Qot_GetHoldingChangeList_pb2 import Request <NEW_LINE> req = Request() <NEW_LINE> req.c2s.security.market = market_code <NEW_LINE> req.c2s.security.code = stock_code <NEW_LINE> req.c2s.holderCategory = holder_type <NEW_LINE> req.c2s.beginTime = start_date <NEW_LINE> if end_date: <NEW_LINE> <INDENT> req.c2s.endTime = end_date <NEW_LINE> <DEDENT> return pack_pb_req(req, ProtoId.Qot_GetHoldingChangeList, conn_id) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unpack_rsp(cls, rsp_pb): <NEW_LINE> <INDENT> if rsp_pb.retType != RET_OK: <NEW_LINE> <INDENT> return RET_ERROR, rsp_pb.retMsg, [] <NEW_LINE> <DEDENT> raw_quote_list = rsp_pb.s2c.holdingChangeList <NEW_LINE> data_list = [] <NEW_LINE> for record in raw_quote_list: <NEW_LINE> <INDENT> quote_list = { 'holder_name': record.holderName, 'holding_qty': record.holdingQty, 'holding_ratio': record.holdingRatio, 'change_qty': record.changeQty, 'change_ratio': record.changeRatio, 'time': record.time, } <NEW_LINE> data_list.append(quote_list) <NEW_LINE> <DEDENT> return RET_OK, "", data_list
Query Conversion for getting holding change list.
62598fac0c0af96317c5633f
class RequestTimeMiddleware(object): <NEW_LINE> <INDENT> def __init__(self, get_response): <NEW_LINE> <INDENT> self.get_response = get_response <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> request.start_time = datetime.now() <NEW_LINE> response = self.get_response(request) <NEW_LINE> request.end_time = datetime.now() <NEW_LINE> response.write('<br />Request took:' + str(request.end_time - request.start_time)) <NEW_LINE> return response
Display request time on a page
62598facaad79263cf42e791
class Module(ModMetricFranke): <NEW_LINE> <INDENT> def save(self, mod, ev): <NEW_LINE> <INDENT> if self._stage != 3: <NEW_LINE> <INDENT> raise ModuleExecutionError('save initiated when module was not finalised!') <NEW_LINE> <DEDENT> for row in self.result[0].value: <NEW_LINE> <INDENT> rval = ResultMetricFFranke(evaluation=ev, module=mod) <NEW_LINE> rval.unit_gt = row[0] <NEW_LINE> rval.unit_ev = row[1] <NEW_LINE> rval.KS = toint(row[2]) <NEW_LINE> rval.KSO = toint(row[3]) <NEW_LINE> rval.FS = toint(row[4]) <NEW_LINE> rval.TP = toint(row[5]) <NEW_LINE> rval.TPO = toint(row[6]) <NEW_LINE> rval.FPA = toint(row[7]) <NEW_LINE> rval.FPAE = toint(row[8]) <NEW_LINE> rval.FPAO = toint(row[9]) <NEW_LINE> rval.FPAOE = toint(row[10]) <NEW_LINE> rval.FN = toint(row[11]) <NEW_LINE> rval.FNO = toint(row[12]) <NEW_LINE> rval.FP = toint(row[13]) <NEW_LINE> rval.save()
module for the ffranke spikesorting metric - for use in the django context
62598facf548e778e596b561
class PausingTask(Task): <NEW_LINE> <INDENT> def __init__(self, config, control_protocol=protocols.task_control): <NEW_LINE> <INDENT> super().__init__(config, control_protocol) <NEW_LINE> self.paused = False <NEW_LINE> <DEDENT> def handle_control(self, queue): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super().handle_control(queue) <NEW_LINE> <DEDENT> except TaskControl as ctrl: <NEW_LINE> <INDENT> if ctrl.msg == 'PAUSE': <NEW_LINE> <INDENT> self.logger.info('paused') <NEW_LINE> self.paused = True <NEW_LINE> <DEDENT> elif ctrl.msg == 'RESUME': <NEW_LINE> <INDENT> if not self.paused: <NEW_LINE> <INDENT> self.logger.warning('resumed while not paused') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.paused = False <NEW_LINE> self.logger.info('resumed') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise
Derivative of :class:`Task` that uses a :attr:`paused` flag to indicate to internal handlers when it's paused. It is up to your queue handlers to honour :attr:`paused` when it's set.
62598fac4f6381625f19949d
class HG12(HG12BaseClass): <NEW_LINE> <INDENT> H = Parameter(description='H parameter') <NEW_LINE> G12 = Parameter(description='G12 parameter') <NEW_LINE> @property <NEW_LINE> def _G1(self): <NEW_LINE> <INDENT> return self._G12_to_G1(self.G12.value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _G2(self): <NEW_LINE> <INDENT> return self._G12_to_G2(self.G12.value) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _G12_to_G1(g12): <NEW_LINE> <INDENT> if g12<0.2: <NEW_LINE> <INDENT> return 0.7527*g12+0.06164 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0.9529*g12+0.02162 <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _G12_to_G2(g12): <NEW_LINE> <INDENT> if g12<0.2: <NEW_LINE> <INDENT> return -0.9612*g12+0.6270 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -0.6125*g12+0.5572 <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def evaluate(ph, h, g): <NEW_LINE> <INDENT> g1 = HG12._G12_to_G1(g) <NEW_LINE> g2 = HG12._G12_to_G2(g) <NEW_LINE> return HG1G2.evaluate(ph, h, g1, g2) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fit_deriv(ph, h, g): <NEW_LINE> <INDENT> if hasattr(ph, '__iter__'): <NEW_LINE> <INDENT> ddh = np.ones_like(ph) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ddh = 1. <NEW_LINE> <DEDENT> g1 = HG12._G12_to_G1(g) <NEW_LINE> g2 = HG12._G12_to_G2(g) <NEW_LINE> phi1 = HG1G2._phi1(ph) <NEW_LINE> phi2 = HG1G2._phi2(ph) <NEW_LINE> phi3 = HG1G2._phi3(ph) <NEW_LINE> dom = (g1*phi1+g2*phi2+(1-g1-g2)*phi3) <NEW_LINE> if g<0.2: <NEW_LINE> <INDENT> p1 = 0.7527 <NEW_LINE> p2 = -0.9612 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> p1 = 0.9529 <NEW_LINE> p2 = -0.6125 <NEW_LINE> <DEDENT> ddg = 1.085736205*((phi3-phi1)*p1+(phi3-phi1)*p2)/dom <NEW_LINE> return [ddh, ddg]
HG12 photometric phase model (Muinonen et al. 2010) Examples -------- >>> # Define the phase function for Themis with >>> # H = 7.121, G12 = 0.68 >>> >>> from sbpy.photometry import HG12 >>> themis = HG12(7.121, 0.68, radius=100) >>> print('{0:.4f}'.format(themis.geoalb)) 0.0639 >>> print('{0:.4f}'.format(themis.phaseint)) 0.3949
62598fac7b180e01f3e4902f
class ParsingException(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.msg
Represent a controlled exception raised by the library.
62598fac55399d3f056264e1
class rayleigh_gen(rv_continuous): <NEW_LINE> <INDENT> _support_mask = rv_continuous._open_support_mask <NEW_LINE> def _rvs(self): <NEW_LINE> <INDENT> return chi.rvs(2, size=self._size, random_state=self._random_state) <NEW_LINE> <DEDENT> def _pdf(self, r): <NEW_LINE> <INDENT> return np.exp(self._logpdf(r)) <NEW_LINE> <DEDENT> def _logpdf(self, r): <NEW_LINE> <INDENT> return np.log(r) - 0.5 * r * r <NEW_LINE> <DEDENT> def _cdf(self, r): <NEW_LINE> <INDENT> return -sc.expm1(-0.5 * r**2) <NEW_LINE> <DEDENT> def _ppf(self, q): <NEW_LINE> <INDENT> return np.sqrt(-2 * sc.log1p(-q)) <NEW_LINE> <DEDENT> def _sf(self, r): <NEW_LINE> <INDENT> return np.exp(self._logsf(r)) <NEW_LINE> <DEDENT> def _logsf(self, r): <NEW_LINE> <INDENT> return -0.5 * r * r <NEW_LINE> <DEDENT> def _isf(self, q): <NEW_LINE> <INDENT> return np.sqrt(-2 * np.log(q)) <NEW_LINE> <DEDENT> def _stats(self): <NEW_LINE> <INDENT> val = 4 - np.pi <NEW_LINE> return (np.sqrt(np.pi/2), val/2, 2*(np.pi-3)*np.sqrt(np.pi)/val**1.5, 6*np.pi/val-16/val**2) <NEW_LINE> <DEDENT> def _entropy(self): <NEW_LINE> <INDENT> return _EULER/2.0 + 1 - 0.5*np.log(2)
A Rayleigh continuous random variable. %(before_notes)s Notes ----- The probability density function for `rayleigh` is: .. math:: f(r) = r \exp(-r^2/2) for :math:`x \ge 0`. `rayleigh` is a special case of `chi` with ``df == 2``. %(after_notes)s %(example)s
62598fac63d6d428bbee2768
class Operations: <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer) -> None: <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> @distributed_trace <NEW_LINE> def list( self, **kwargs: Any ) -> AsyncIterable["_models.OperationListResult"]: <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> request = build_list_request( template_url=self.list.metadata['url'], ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request = build_list_request( template_url=next_link, ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> request.method = "GET" <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> async def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize("OperationListResult", pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return deserialized.next_link or None, AsyncList(list_of_elem) <NEW_LINE> <DEDENT> async def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return AsyncItemPaged( get_next, extract_data ) <NEW_LINE> <DEDENT> list.metadata = {'url': '/providers/Microsoft.Resources/operations'}
Operations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.resource.subscriptions.v2019_11_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
62598fac097d151d1a2c0fe6
class AdminSplitDateTime(forms.SplitDateTimeWidget): <NEW_LINE> <INDENT> def __init__(self, attrs=None): <NEW_LINE> <INDENT> widgets = [AdminDateWidget, AdminTimeWidget] <NEW_LINE> forms.MultiWidget.__init__(self, widgets, attrs) <NEW_LINE> <DEDENT> def render(self, name, value, attrs=None): <NEW_LINE> <INDENT> input_html = [ht for ht in super(AdminSplitDateTime, self).render(name, value, attrs).split('\n') if ht != ''] <NEW_LINE> return mark_safe('<div class="datetime clearfix"><div class="input-group date bootstrap-datepicker"><span class="input-group-addon"><i class="fa fa-calendar"></i></span>%s' '<span class="input-group-btn"><button class="btn btn-default" type="button">%s</button></span></div>' '<div class="input-group time bootstrap-clockpicker"><span class="input-group-addon"><i class="fa fa-clock-o">' '</i></span>%s<span class="input-group-btn"><button class="btn btn-default" type="button">%s</button></span></div></div>' % (input_html[0], _(u'Today'), input_html[1], _(u'Now'))) <NEW_LINE> <DEDENT> def format_output(self, rendered_widgets): <NEW_LINE> <INDENT> return mark_safe(u'<div class="datetime clearfix">%s%s</div>' % (rendered_widgets[0], rendered_widgets[1]))
A SplitDateTime Widget that has some myadmin-specific styling.
62598fac56b00c62f0fb2872
class Token(Authentication): <NEW_LINE> <INDENT> def __init__(self, token, name=None): <NEW_LINE> <INDENT> self._token = token <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_json(parsed_json): <NEW_LINE> <INDENT> name = json_get(parsed_json, "name") <NEW_LINE> sha1 = json_get(parsed_json, "sha1") <NEW_LINE> return Token(sha1, name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self._token <NEW_LINE> <DEDENT> def update_kwargs(self, kwargs): <NEW_LINE> <INDENT> if "params" in kwargs: <NEW_LINE> <INDENT> kwargs["params"]["token"] = self._token <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs["params"] = {"token": self._token}
An immutable representation of a Gitea authentication token
62598fac167d2b6e312b6f2f
class Game_tree(): <NEW_LINE> <INDENT> def __init__(self, player_at_move=1, field=np.zeros(9)): <NEW_LINE> <INDENT> self.node_count = 1 <NEW_LINE> self.leaf_count = 0 <NEW_LINE> self.queue = BetterPriorityQueue() <NEW_LINE> self.tree = Game_state(player=player_at_move, depth=7, tree=self, field_vec=field) <NEW_LINE> <DEDENT> def add_to_queue(self, priority, element): <NEW_LINE> <INDENT> self.queue.add(priority, element) <NEW_LINE> return <NEW_LINE> <DEDENT> def in_queue(self, element): <NEW_LINE> <INDENT> return self.queue.isin(element) <NEW_LINE> <DEDENT> def backprop(self): <NEW_LINE> <INDENT> current_depth = 9 <NEW_LINE> processed = 0 <NEW_LINE> while not self.queue.empty(): <NEW_LINE> <INDENT> processing = self.queue.pop() <NEW_LINE> depth = processing.depth <NEW_LINE> processed += 1 <NEW_LINE> if processed == 10000: <NEW_LINE> <INDENT> print('processed another 10000 nodes') <NEW_LINE> processed = 0 <NEW_LINE> <DEDENT> if depth < current_depth: <NEW_LINE> <INDENT> print('arrived at depth', depth) <NEW_LINE> current_depth = depth <NEW_LINE> <DEDENT> best_value = float('inf') <NEW_LINE> best_move = None <NEW_LINE> if processing.player == -1: <NEW_LINE> <INDENT> for child in processing.children: <NEW_LINE> <INDENT> if child.value < best_value: <NEW_LINE> <INDENT> best_value = child.value <NEW_LINE> best_move = child <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for child in processing.children: <NEW_LINE> <INDENT> if child.value > best_value: <NEW_LINE> <INDENT> best_value = child.value <NEW_LINE> best_move = child <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> processing.value = best_value <NEW_LINE> processing.next_move = best_move <NEW_LINE> processing.queue_up_parent() <NEW_LINE> <DEDENT> return
class holding game tree and performing backpropagation and so...
62598fac66673b3332c30389
class ExportListView(TemplateView): <NEW_LINE> <INDENT> template_name = 'openslides_export/export_list.html' <NEW_LINE> required_permission = 'openslides_export.can_export' <NEW_LINE> def get_context_data(self, *args, **kwargs): <NEW_LINE> <INDENT> context = super(ExportListView, self).get_context_data(*args, **kwargs) <NEW_LINE> context['motions'] = Motion.objects.all() <NEW_LINE> return context
View of the overview page of all exportable elements
62598fac4c3428357761a277
class ProcessedData(object): <NEW_LINE> <INDENT> def __init__(self, did_close, new_stdout=None, new_stderr=None, new_stdin=None): <NEW_LINE> <INDENT> self.new_stdout = new_stdout <NEW_LINE> self.new_stderr = new_stderr <NEW_LINE> self.new_stdin = new_stdin <NEW_LINE> self.did_close = did_close <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'stdout: %s\nstdin: %s\nstderr: %s\nclosed?: %s' % ( self.new_stdout, self.new_stdin, self.new_stderr, self.did_close)
Container for new data processed by the job. IO data that is processed by a process will be stored in an object of this type. Attributes: new_stdout: Any new data that came from stdout. None if no data came. new_stderr: Any new data that came from stderr. None if no data came. new_stdin: Any new data that was written into stdin. None if no data was written. did_close: True if the program has closed all input.
62598fac60cbc95b0636430c
class IsSuperAdminUser(BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> return request.user and request.user.is_superuser
Allows access only to admin users.
62598fac57b8e32f525080fa
class Model(with_metaclass(ModelMeta)): <NEW_LINE> <INDENT> def __init__(self, init_method='from_initialization', **kwargs): <NEW_LINE> <INDENT> super(Model, self).__init__() <NEW_LINE> defined_fields = type(self).fields or {} <NEW_LINE> for key in kwargs: <NEW_LINE> <INDENT> if key not in defined_fields: <NEW_LINE> <INDENT> raise ModelMisuseError( 'Trying to initialize model {} with value for undefined field {}'.format(type(self).__name__, key)) <NEW_LINE> <DEDENT> <DEDENT> for field in defined_fields: <NEW_LINE> <INDENT> getattr(defined_fields[field], init_method)(kwargs, field, self) <NEW_LINE> <DEDENT> <DEDENT> topic = None <NEW_LINE> fields = None <NEW_LINE> @classmethod <NEW_LINE> def create(cls, data): <NEW_LINE> <INDENT> return cls(init_method='to_model', **data) <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for field in type(self).fields.keys(): <NEW_LINE> <INDENT> type(self).fields[field].to_builtin(self, field, result) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, type(self)) and all(getattr(self, name) == getattr(other, name) for name in sorted(type(self).fields.keys())) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def serialize(cls): <NEW_LINE> <INDENT> return { 'class_name': cls.__name__, 'fields': {name: field.serialize() for name, field in cls.fields.items()}, 'topic': cls.topic.__name__ }
Model is a base class for all models. Models are defining the data structure of the payload of messages and the metadata required, such as a name and topic.
62598facf9cc0f698b1c52a8
class SenderMixinSentMailTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_onlyLogFailedAddresses(self): <NEW_LINE> <INDENT> onDone = self.assertFailure(defer.Deferred(), smtp.SMTPDeliveryError) <NEW_LINE> onDone.addCallback(lambda e: self.assertEqual( e.log, "bob@example.com: 199 Error in sending.\n")) <NEW_LINE> clientFactory = smtp.SMTPSenderFactory( 'source@address', 'recipient@address', StringIO("Message body"), onDone, retries=0, timeout=0.5) <NEW_LINE> client = clientFactory.buildProtocol( address.IPv4Address('TCP', 'example.net', 25)) <NEW_LINE> addresses = [("alice@example.com", 200, "No errors here!"), ("bob@example.com", 199, "Error in sending.")] <NEW_LINE> client.sentMail(199, "Test response", 1, addresses, client.log) <NEW_LINE> return onDone
Tests for L{smtp.SenderMixin.sentMail}, used in particular by L{smtp.SMTPSenderFactory} and L{smtp.ESMTPSenderFactory}.
62598fac5fcc89381b26612b
class UserFavorite(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(UserProfile, verbose_name="用户收藏") <NEW_LINE> fav_id = models.IntegerField(default=0, verbose_name="数据ID") <NEW_LINE> fav_type_choices = ( (1, "课程"), (2, "课程机构"), (3, "讲师"), ) <NEW_LINE> fav_type = models.IntegerField(choices=fav_type_choices, default=1, verbose_name="收藏类型") <NEW_LINE> add_time = models.DateTimeField(default=datetime.now, verbose_name="收藏时间") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "用户收藏" <NEW_LINE> verbose_name_plural = verbose_name
用户收藏
62598fac460517430c43203c
class AbbrevMixin: <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.abbrev = False <NEW_LINE> self.add_settable(cmd2.Settable('abbrev', bool, 'Accept command abbreviations')) <NEW_LINE> self.register_postparsing_hook(self.cmd2_abbrev_hook) <NEW_LINE> <DEDENT> def cmd2_abbrev_hook(self, data: cmd2.plugin.PostparsingData) -> cmd2.plugin.PostparsingData: <NEW_LINE> <INDENT> if self.abbrev: <NEW_LINE> <INDENT> target = 'do_' + data.statement.command <NEW_LINE> if target not in dir(self): <NEW_LINE> <INDENT> cmds = self.get_all_commands() <NEW_LINE> funcs = [func for func in cmds if func.startswith(data.statement.command)] <NEW_LINE> if len(funcs) == 1: <NEW_LINE> <INDENT> raw = data.statement.raw.replace(data.statement.command, funcs[0], 1) <NEW_LINE> data.statement = self.statement_parser.parse(raw) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return data
A cmd2 plugin (mixin class) which adds support for abbreviated commands.
62598fac7d43ff24874273e1
class urlfetch(PackageGet): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _extract_raw(cls, value): <NEW_LINE> <INDENT> return value.body
Wrapper for urlfetch.
62598fac9c8ee82313040150
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = get_user_model() <NEW_LINE> fields = ('email', 'password', 'name') <NEW_LINE> extra_kwargs = {'password': {'write_only': True, 'min_length': 5}} <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return get_user_model().objects.create_user(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> password = validated_data.pop('password', None) <NEW_LINE> user = super().update(instance, validated_data) <NEW_LINE> if password: <NEW_LINE> <INDENT> user.set_password(password) <NEW_LINE> user.save() <NEW_LINE> <DEDENT> return user
Serializer for users objects
62598fac5fdd1c0f98e5df50
class SimpleAsyncRam(Unit): <NEW_LINE> <INDENT> def _declr(self): <NEW_LINE> <INDENT> self.addr_in = VectSignal(2) <NEW_LINE> self.din = VectSignal(8) <NEW_LINE> self.addr_out = VectSignal(2) <NEW_LINE> self.dout = VectSignal(8)._m() <NEW_LINE> <DEDENT> def _impl(self): <NEW_LINE> <INDENT> self._ram = ram = self._sig("ram_data", Bits(8)[4]) <NEW_LINE> self.dout(ram[self.addr_out]) <NEW_LINE> ram[self.addr_in](self.din)
Note that there is no such a thing in hw yet... .. hwt-autodoc::
62598fac67a9b606de545f8b
class SigninForm(Form): <NEW_LINE> <INDENT> email = StringField('邮箱', validators=[ DataRequired("邮箱不能为空"), Email('邮箱格式错误') ], description="邮箱") <NEW_LINE> password = PasswordField('密码', validators=[DataRequired("密码不能为空")], description="密码") <NEW_LINE> remember = BooleanField('保持登录') <NEW_LINE> def validate_email(self, field): <NEW_LINE> <INDENT> user = User.query.filter(User.email == self.email.data).first() <NEW_LINE> if not user: <NEW_LINE> <INDENT> raise ValueError("账户不存在") <NEW_LINE> <DEDENT> <DEDENT> def validate_password(self, field): <NEW_LINE> <INDENT> if self.email.data: <NEW_LINE> <INDENT> user = User.query.filter(User.email == self.email.data).first() <NEW_LINE> if not user or not user.check_password(self.password.data): <NEW_LINE> <INDENT> raise ValueError('密码不正确') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.user = user
Form for signin
62598fac2c8b7c6e89bd3784
class InsertionSort: <NEW_LINE> <INDENT> __slots__ = 'original_data' <NEW_LINE> def __init__(self, data): <NEW_LINE> <INDENT> self.original_data = data <NEW_LINE> <DEDENT> def insertion_sort(self): <NEW_LINE> <INDENT> data = self.original_data <NEW_LINE> for index in range(1, len(data)): <NEW_LINE> <INDENT> position = index <NEW_LINE> while position > 0 and data[position - 1] > data[position]: <NEW_LINE> <INDENT> data[position], data[position - 1] = data[position - 1], data[position] <NEW_LINE> position -= 1 <NEW_LINE> <DEDENT> <DEDENT> return data
This class implements the insertion sort.
62598fac5fc7496912d48261
class NoSuchTestError(DigressError): <NEW_LINE> <INDENT> pass
Raised when no such test exists.
62598fac009cb60464d014df
class CannotCommunicate(exceptions.HomeAssistantError): <NEW_LINE> <INDENT> pass
Error to indicate we cannot connect.
62598fac38b623060ffa9058
class _PlayerDatabase(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.time_stamp = time() <NEW_LINE> self.last_win = None <NEW_LINE> self.wins = 0
Class used to hold values for a player in the winners database.
62598fac99cbb53fe6830e96
class BasicProvider: <NEW_LINE> <INDENT> _method = "post" <NEW_LINE> def get_action(self, payment): <NEW_LINE> <INDENT> return self.get_return_url(payment) <NEW_LINE> <DEDENT> def __init__(self, capture=True): <NEW_LINE> <INDENT> self._capture = capture <NEW_LINE> <DEDENT> def get_hidden_fields(self, payment): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_form(self, payment, data=None): <NEW_LINE> <INDENT> from .forms import PaymentForm <NEW_LINE> return PaymentForm( self.get_hidden_fields(payment), self.get_action(payment), self._method ) <NEW_LINE> <DEDENT> def process_data(self, payment, request): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_token_from_request(self, payment, request): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def get_return_url(self, payment, extra_data=None): <NEW_LINE> <INDENT> payment_link = payment.get_process_url() <NEW_LINE> url = urljoin(get_base_url(), payment_link) <NEW_LINE> if extra_data: <NEW_LINE> <INDENT> qs = urlencode(extra_data) <NEW_LINE> return url + "?" + qs <NEW_LINE> <DEDENT> return url <NEW_LINE> <DEDENT> def capture(self, payment, amount=None): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def release(self, payment): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def refund(self, payment, amount=None): <NEW_LINE> <INDENT> raise NotImplementedError()
This class defines the provider API. It should not be instantiated directly. Use factory instead.
62598facaad79263cf42e792
class AzureFirewallNetworkRuleCollection(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'priority': {'maximum': 65000, 'minimum': 100}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'priority': {'key': 'properties.priority', 'type': 'int'}, 'action': {'key': 'properties.action', 'type': 'AzureFirewallRCAction'}, 'rules': {'key': 'properties.rules', 'type': '[AzureFirewallNetworkRule]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(AzureFirewallNetworkRuleCollection, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = None <NEW_LINE> self.priority = kwargs.get('priority', None) <NEW_LINE> self.action = kwargs.get('action', None) <NEW_LINE> self.rules = kwargs.get('rules', None) <NEW_LINE> self.provisioning_state = None
Network rule collection resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: The name of the resource that is unique within the Azure firewall. This name can be used to access the resource. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :param priority: Priority of the network rule collection resource. :type priority: int :param action: The action type of a rule collection. :type action: ~azure.mgmt.network.v2019_09_01.models.AzureFirewallRCAction :param rules: Collection of rules used by a network rule collection. :type rules: list[~azure.mgmt.network.v2019_09_01.models.AzureFirewallNetworkRule] :ivar provisioning_state: The provisioning state of the network rule collection resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2019_09_01.models.ProvisioningState
62598fac91f36d47f2230e85
class AverageMeter(object): <NEW_LINE> <INDENT> def __init__(self, window_size=20): <NEW_LINE> <INDENT> self.values = deque(maxlen=window_size) <NEW_LINE> self.counts = deque(maxlen=window_size) <NEW_LINE> self.sum = 0.0 <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def update(self, value, count=1): <NEW_LINE> <INDENT> self.values.append(value) <NEW_LINE> self.counts.append(count) <NEW_LINE> self.sum += value <NEW_LINE> self.count += count <NEW_LINE> <DEDENT> @property <NEW_LINE> def avg(self): <NEW_LINE> <INDENT> if np.sum(self.counts) == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return np.sum(self.values) / np.sum(self.counts) <NEW_LINE> <DEDENT> @property <NEW_LINE> def global_avg(self): <NEW_LINE> <INDENT> if self.count == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return self.sum / self.count
Track a series of values and provide access to smoothed values over a window or the global series average.
62598fac3346ee7daa337628
class ParameterHandler(object): <NEW_LINE> <INDENT> def __init__(self, *pv_pairs): <NEW_LINE> <INDENT> super(ParameterHandler, self).__init__() <NEW_LINE> self._parameters = list(zip(pv_pairs[::2], pv_pairs[1::2])) <NEW_LINE> if len(pv_pairs) % 2: <NEW_LINE> <INDENT> self._parameters.extend(pv_pairs[-1].items()) <NEW_LINE> <DEDENT> for parameter, _ in self._parameters: <NEW_LINE> <INDENT> if(not hasattr(parameter, 'set') or not hasattr(parameter, 'get')): <NEW_LINE> <INDENT> raise TypeError('all parameters must have get and set methods.') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _update_value(parameter, value): <NEW_LINE> <INDENT> old_value = parameter.get() <NEW_LINE> new_value = resolve_value(value) <NEW_LINE> if (old_value is None) or (old_value != new_value): <NEW_LINE> <INDENT> parameter.set(new_value) <NEW_LINE> <DEDENT> return old_value
ParameterHandler(p0, v0[, p1, v1, ...][, parameter_dict]) :class:`~uqtools.parameter.Parameter` backend for :class:`Set` and :class:`Revert`. `pN.set(vN)` is called for every pair `pN`, `vN` and every item in `parameter_dict` on `__enter__`. Parameters ---------- p0, p1, ... : `Parameter` Set parameters. v0, v1, ... : `any`, accepts `Parameter` Target values. parameter_dict : {`Parameter`: `any`} `dict` Set `Parameter` to target value mapping. Must be the last argument, may be the only argument. Examples -------- >>> uqtools.SetParameter(p0, 0) >>> uqtools.SetParameter({p0: 0}) >>> uqtools.SetParameter(p0, 0, p1, 1, {p2: 2, p3: 3})
62598fac1b99ca400228f50f
class Connector(QObject, Object, DeviceBase): <NEW_LINE> <INDENT> def __init__(self, uri=None, attributes=[], policy=UpdatePolicy.POLLING, interval=1.0): <NEW_LINE> <INDENT> QObject.__init__(self) <NEW_LINE> Object.__init__(self) <NEW_LINE> self.uri = uri <NEW_LINE> self.attributes = {} <NEW_LINE> for attribute in attributes: <NEW_LINE> <INDENT> self.add_attribute(attribute) <NEW_LINE> <DEDENT> self.add_attribute( {"name": "state", "attr": "State", "value": State.UNKNOWN}) <NEW_LINE> self.update_policy(policy, interval) <NEW_LINE> <DEDENT> def add_attribute(self, attribute=None): <NEW_LINE> <INDENT> if type(attribute) is not dict or "attr" not in attribute: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if "name" in attribute: <NEW_LINE> <INDENT> name = attribute["name"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = attribute["attr"].lower() <NEW_LINE> <DEDENT> if name not in self.attributes: <NEW_LINE> <INDENT> self.attributes[name] = {"value": None} <NEW_LINE> <DEDENT> for key, value in attribute.items(): <NEW_LINE> <INDENT> if key != "name": <NEW_LINE> <INDENT> self.attributes[name][key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def update_policy(self, policy=UpdatePolicy.POLLING, interval=1.0): <NEW_LINE> <INDENT> self.policy = policy <NEW_LINE> self.interval = interval <NEW_LINE> <DEDENT> def state(self, refresh=False): <NEW_LINE> <INDENT> return self.attributes["state"]["value"] <NEW_LINE> <DEDENT> def read(self, attribute=None, refresh=False, alt=None): <NEW_LINE> <INDENT> return self.attributes[attribute]["value"] <NEW_LINE> <DEDENT> def write(self, attribute=None, value=None): <NEW_LINE> <INDENT> self.attributes[attribute]["value"] = value <NEW_LINE> return True <NEW_LINE> <DEDENT> def execute(self, command=None, *values): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop_device(self): <NEW_LINE> <INDENT> pass
A Connector object interfaces between devices and control systems. This is the base class to all connectors. It provides a mapping between Janus attribute names and identifiers of the control system. Derived objects should care to keep the attribute values up to date, either by polling them or through event based mechanisms. A changed value should be indicated by a value_changed signal.
62598facd58c6744b42dc2b6
class RuoteWorkitem(Workitem): <NEW_LINE> <INDENT> def loads(self, blob): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._body = json.loads(blob) <NEW_LINE> self._worker_type = self._body["fields"]["params"]["worker_type"] <NEW_LINE> <DEDENT> except (ValueError, KeyError, TypeError): <NEW_LINE> <INDENT> raise RuoteWorkitemError("Can't parse workitem body") <NEW_LINE> <DEDENT> <DEDENT> def dumps(self): <NEW_LINE> <INDENT> if self._body is None: <NEW_LINE> <INDENT> raise RuoteWorkitemError("Workitem hasn't been loaded") <NEW_LINE> <DEDENT> return json.dumps(self._body) <NEW_LINE> <DEDENT> @property <NEW_LINE> def worker_type(self): <NEW_LINE> <INDENT> if self._worker_type is None: <NEW_LINE> <INDENT> raise RuoteWorkitemError("Workitem hasn't been loaded") <NEW_LINE> <DEDENT> return self._worker_type <NEW_LINE> <DEDENT> def set_error(self, error): <NEW_LINE> <INDENT> self._body["error"] = error <NEW_LINE> <DEDENT> def set_trace(self, trace): <NEW_LINE> <INDENT> self._body["trace"] = trace <NEW_LINE> <DEDENT> @property <NEW_LINE> def fei(self): <NEW_LINE> <INDENT> return self._body["fei"].copy() <NEW_LINE> <DEDENT> @property <NEW_LINE> def fields(self): <NEW_LINE> <INDENT> return self._body["fields"]
Ruote workitem. This class is used to parse JSON-based Ruote workitems like: .. code-block:: guess { "re_dispatch_count": 0, "participant_name": "hardworker", "wf_revision": null, "fields": { "repo": "testrepo1", "pkgname": "python-riak", "pkgversion": "1.2.1", "branch": "master2", "workdir": "/home/rozhkov/tmp", "dispatched_at": "2012-03-04 14:00:22.861908 UTC", "params": { "participant_options": { "forget": false, "queue": "taskqueue" }, "worker_type": "simplebuilder", "ref": "hardworker" }, "user": "vasya" }, "wf_name":null, "fei": { "wfid": "20120304-bejeruwodi", "engine_id": "engine", "expid": "0_1_3", "subid": "8079afecd0256e8280b355455ea3435f" } }
62598fac66656f66f7d5a3af
class TestQuota: <NEW_LINE> <INDENT> def test_per_second(self): <NEW_LINE> <INDENT> q = quota.Quota.per_second(6, maximum_burst=1) <NEW_LINE> assert q.period == datetime.timedelta(seconds=1) <NEW_LINE> assert q.limit == 7 <NEW_LINE> <DEDENT> def test_per_minute(self): <NEW_LINE> <INDENT> q = quota.Quota.per_minute(SECONDS_IN_A_MINUTE, maximum_burst=1) <NEW_LINE> assert q.period == datetime.timedelta(seconds=60) <NEW_LINE> assert q.limit == 61 <NEW_LINE> <DEDENT> def test_per_hour(self): <NEW_LINE> <INDENT> q = quota.Quota.per_hour(SECONDS_IN_AN_HOUR, maximum_burst=1) <NEW_LINE> assert q.period == datetime.timedelta(hours=1) <NEW_LINE> assert q.limit == 3601 <NEW_LINE> <DEDENT> def test_per_day(self): <NEW_LINE> <INDENT> q = quota.Quota.per_day(SECONDS_IN_A_DAY, maximum_burst=1) <NEW_LINE> assert q.period == datetime.timedelta(days=1) <NEW_LINE> assert q.limit == 86401 <NEW_LINE> <DEDENT> def test_requires_a_positive_period(self): <NEW_LINE> <INDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> quota.Quota(datetime.timedelta(seconds=0), 1) <NEW_LINE> <DEDENT> <DEDENT> def test_requires_a_non_negative_count(self): <NEW_LINE> <INDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> quota.Quota(datetime.timedelta(seconds=1), -1) <NEW_LINE> <DEDENT> <DEDENT> def test_requires_a_non_negative_maximum_burst(self): <NEW_LINE> <INDENT> with pytest.raises(ValueError): <NEW_LINE> <INDENT> quota.Quota(datetime.timedelta(seconds=1), 1, -1) <NEW_LINE> <DEDENT> <DEDENT> def test_calculates_limit_from_count_and_burst(self): <NEW_LINE> <INDENT> q = quota.Quota( period=datetime.timedelta(hours=1), count=5000, maximum_burst=500 ) <NEW_LINE> assert q.limit == 5500
Tests for our Quota class.
62598fac55399d3f056264e3
class Update(show.ShowOne): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(Update, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'name', help='Workbook name') <NEW_LINE> parser.add_argument( 'description', nargs='?', help='Workbook description') <NEW_LINE> parser.add_argument( 'tags', nargs='*', help='Workbook tags') <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> workbook = w.WorkbookManager(self.app.client).update( parsed_args.name, parsed_args.description, parsed_args.tags) <NEW_LINE> return format(workbook)
Update workbook
62598fac63d6d428bbee276a
class RandomMotion(): <NEW_LINE> <INDENT> def __init__(self, num_points=5000): <NEW_LINE> <INDENT> self.num_points = num_points <NEW_LINE> self.x_values = [0] <NEW_LINE> self.y_values = [0] <NEW_LINE> <DEDENT> def fill_motion(self): <NEW_LINE> <INDENT> while len(self.x_values) < self.num_points: <NEW_LINE> <INDENT> x_step = get_step() <NEW_LINE> y_step = get_step() <NEW_LINE> if x_step == 0 and y_step == 0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> x = self.x_values[-1] + x_step <NEW_LINE> y = self.y_values[-1] + y_step <NEW_LINE> self.x_values.append(x) <NEW_LINE> self.y_values.append(y)
A class to simulate random molecular motion
62598fac0c0af96317c56342
class KalmanBoxTracker(object): <NEW_LINE> <INDENT> count = 0 <NEW_LINE> def __init__(self,bbox,img=None): <NEW_LINE> <INDENT> self.kf = KalmanFilter(dim_x=7, dim_z=4) <NEW_LINE> self.kf.F = np.array([[1,0,0,0,1,0,0],[0,1,0,0,0,1,0],[0,0,1,0,0,0,1],[0,0,0,1,0,0,0], [0,0,0,0,1,0,0],[0,0,0,0,0,1,0],[0,0,0,0,0,0,1]]) <NEW_LINE> self.kf.H = np.array([[1,0,0,0,0,0,0],[0,1,0,0,0,0,0],[0,0,1,0,0,0,0],[0,0,0,1,0,0,0]]) <NEW_LINE> self.kf.R[2:,2:] *= 10. <NEW_LINE> self.kf.P[4:,4:] *= 1000. <NEW_LINE> self.kf.P *= 10. <NEW_LINE> self.kf.Q[-1,-1] *= 0.01 <NEW_LINE> self.kf.Q[4:,4:] *= 0.01 <NEW_LINE> self.kf.x[:4] = convert_bbox_to_z(bbox) <NEW_LINE> self.time_since_update = 0 <NEW_LINE> self.id = KalmanBoxTracker.count <NEW_LINE> print("self.kf.F") <NEW_LINE> print(self.kf.F.shape) <NEW_LINE> print("self.kf.H") <NEW_LINE> print(self.kf.H.shape) <NEW_LINE> print("self.kf.R") <NEW_LINE> print(self.kf.R.shape) <NEW_LINE> print("self.kf.P") <NEW_LINE> print(self.kf.P.shape) <NEW_LINE> print("self.kf.Q") <NEW_LINE> print(self.kf.Q.shape) <NEW_LINE> KalmanBoxTracker.count += 1 <NEW_LINE> self.history = [] <NEW_LINE> self.hits = 0 <NEW_LINE> self.hit_streak = 0 <NEW_LINE> self.age = 0 <NEW_LINE> <DEDENT> def update(self,bbox,img=None): <NEW_LINE> <INDENT> self.time_since_update = 0 <NEW_LINE> self.history = [] <NEW_LINE> self.hits += 1 <NEW_LINE> self.hit_streak += 1 <NEW_LINE> if bbox != []: <NEW_LINE> <INDENT> self.kf.update(convert_bbox_to_z(bbox)) <NEW_LINE> <DEDENT> <DEDENT> def predict(self,img=None): <NEW_LINE> <INDENT> if((self.kf.x[6]+self.kf.x[2])<=0): <NEW_LINE> <INDENT> self.kf.x[6] *= 0.0 <NEW_LINE> <DEDENT> self.kf.predict() <NEW_LINE> self.age += 1 <NEW_LINE> if(self.time_since_update>0): <NEW_LINE> <INDENT> self.hit_streak = 0 <NEW_LINE> <DEDENT> self.time_since_update += 1 <NEW_LINE> self.history.append(convert_x_to_bbox(self.kf.x)) <NEW_LINE> return self.history[-1][0] <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> return convert_x_to_bbox(self.kf.x)[0]
This class represents the internal state of individual tracked objects observed as bbox.
62598fac56ac1b37e63021ab
class HFSFileTest(test_lib.HFSImageFileTestCase): <NEW_LINE> <INDENT> _IDENTIFIER_ANOTHER_FILE = 21 <NEW_LINE> _IDENTIFIER_PASSWORDS_TXT = 20 <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(HFSFileTest, self).setUp() <NEW_LINE> self._resolver_context = context.Context() <NEW_LINE> test_path = self._GetTestFilePath(['hfsplus.raw']) <NEW_LINE> self._SkipIfPathNotExists(test_path) <NEW_LINE> test_os_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_OS, location=test_path) <NEW_LINE> self._raw_path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_RAW, parent=test_os_path_spec) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self._resolver_context.Empty() <NEW_LINE> <DEDENT> def testOpenCloseIdentifier(self): <NEW_LINE> <INDENT> path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_HFS, identifier=self._IDENTIFIER_PASSWORDS_TXT, parent=self._raw_path_spec) <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> self._TestOpenCloseIdentifier(file_object) <NEW_LINE> <DEDENT> def testOpenCloseLocation(self): <NEW_LINE> <INDENT> path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_HFS, identifier=self._IDENTIFIER_PASSWORDS_TXT, location='/passwords.txt', parent=self._raw_path_spec) <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> self._TestOpenCloseLocation(file_object) <NEW_LINE> path_spec.parent = None <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> with self.assertRaises(errors.PathSpecError): <NEW_LINE> <INDENT> self._TestOpenCloseLocation(file_object) <NEW_LINE> <DEDENT> <DEDENT> def testSeek(self): <NEW_LINE> <INDENT> path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_HFS, location='/a_directory/another_file', identifier=self._IDENTIFIER_ANOTHER_FILE, parent=self._raw_path_spec) <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> self._TestSeek(file_object) <NEW_LINE> <DEDENT> def testRead(self): <NEW_LINE> <INDENT> path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_HFS, location='/passwords.txt', identifier=self._IDENTIFIER_PASSWORDS_TXT, parent=self._raw_path_spec) <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> self._TestRead(file_object) <NEW_LINE> <DEDENT> def testReadResourceFork(self): <NEW_LINE> <INDENT> path_spec = path_spec_factory.Factory.NewPathSpec( definitions.TYPE_INDICATOR_HFS, data_stream='rsrc', identifier=25, location='/a_directory/a_resourcefork', parent=self._raw_path_spec) <NEW_LINE> file_object = hfs_file_io.HFSFile(self._resolver_context, path_spec) <NEW_LINE> self._TestReadResourceFork(file_object)
Tests the file-like object implementation using pyfshfs.file_entry.
62598fac63b5f9789fe85126
class ListBucket(list): <NEW_LINE> <INDENT> pass
Subclass of :py:class:`list` allowing for weak references
62598fac5166f23b2e243398
class Handshake(Packet): <NEW_LINE> <INDENT> __slots__ = ['crypto_set', 'port', 'onion_address', 'protocol', 'open', 'peer_id', 'rev', 'version'] <NEW_LINE> @use_condition <NEW_LINE> def parse(self, c, params): <NEW_LINE> <INDENT> crypto_list = c.as_type('crypt_supported', list) <NEW_LINE> self.crypto_set = set() <NEW_LINE> for item in crypt_list: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.crypto_set.add(item.decode('ascii')) <NEW_LINE> <DEDENT> except (AttributeError, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.port = c.port(opt('fileserver_port')) or 0 <NEW_LINE> self.protocol = c.strlen('protocol', 10).decode('ascii') <NEW_LINE> self.peer_id = c.strlen(opt('peer_id'), 64) <NEW_LINE> self.rev = c.range(opt('rev'), (0, 0xffFFffFF)) or 0 <NEW_LINE> self.version = c.strlen('version', 64).decode('ascii') <NEW_LINE> onion = c.onion(opt('onion')) <NEW_LINE> if onion: <NEW_LINE> <INDENT> self.onion_address = OnionAddress(onion) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.onion_address = None <NEW_LINE> <DEDENT> self.open = (params.get(b'opened') is True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def onion(self): <NEW_LINE> <INDENT> if self.onion_address and self.port: <NEW_LINE> <INDENT> return AddrPort(self.onion.address, self.port) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None
Unpacked [handshake] packet sent when the connection is initialized.
62598fac7cff6e4e811b59ec
class FComboBox(QComboBox): <NEW_LINE> <INDENT> def __init__(self, parent, delegate): <NEW_LINE> <INDENT> super(FComboBox, self).__init__(parent) <NEW_LINE> self.delegate = delegate <NEW_LINE> <DEDENT> def focusOutEvent(self, event): <NEW_LINE> <INDENT> logging.debug('focusOut!') <NEW_LINE> logging.debug(self.currentText()) <NEW_LINE> logging.debug(self.currentIndex()) <NEW_LINE> logging.debug(self.completer().currentCompletion()) <NEW_LINE> self.setCurrentText(self.completer().currentCompletion()) <NEW_LINE> logging.debug(self.currentIndex()) <NEW_LINE> self.model().submit() <NEW_LINE> completion = self.completer().currentCompletion()
not used, just for remember the focusOutEvent possibility. See TODO file
62598fac3317a56b869be52a
class AstPatternFormatter(AstFormatter): <NEW_LINE> <INDENT> def do_BoolOp(self, node): <NEW_LINE> <INDENT> return 'Bool' <NEW_LINE> <DEDENT> def do_Bytes(self, node): <NEW_LINE> <INDENT> assert g.isPython3 <NEW_LINE> return 'Bytes' <NEW_LINE> <DEDENT> def do_Constant(self, node): <NEW_LINE> <INDENT> assert g.isPython3 <NEW_LINE> return 'Constant' <NEW_LINE> <DEDENT> def do_Name(self, node): <NEW_LINE> <INDENT> return 'Bool' if node.id in ('True', 'False') else node.id <NEW_LINE> <DEDENT> def do_NameConstant(self, node): <NEW_LINE> <INDENT> s = repr(node.value) <NEW_LINE> return 'Bool' if s in ('True', 'False') else s <NEW_LINE> <DEDENT> def do_Num(self, node): <NEW_LINE> <INDENT> return 'Num' <NEW_LINE> <DEDENT> def do_Str(self, node): <NEW_LINE> <INDENT> return 'Str'
A subclass of AstFormatter that replaces values of constants by Bool, Bytes, Int, Name, Num or Str.
62598fac5fcc89381b26612c
class ShowSecurityGroupRule(neutronV20.ShowCommand): <NEW_LINE> <INDENT> resource = 'security_group_rule' <NEW_LINE> log = logging.getLogger(__name__ + '.ShowSecurityGroupRule') <NEW_LINE> allow_names = False
Show information of a given security group rule.
62598faceab8aa0e5d30bd4c
class _SmoothMAC(object): <NEW_LINE> <INDENT> def __init__(self, block_size, msg=b(""), min_digest=0): <NEW_LINE> <INDENT> self._bs = block_size <NEW_LINE> self._buffer = [] <NEW_LINE> self._buffer_len = 0 <NEW_LINE> self._total_len = 0 <NEW_LINE> self._min_digest = min_digest <NEW_LINE> self._mac = None <NEW_LINE> self._tag = None <NEW_LINE> if msg: <NEW_LINE> <INDENT> self.update(msg) <NEW_LINE> <DEDENT> <DEDENT> def can_reduce(self): <NEW_LINE> <INDENT> return (self._mac is not None) <NEW_LINE> <DEDENT> def get_len(self): <NEW_LINE> <INDENT> return self._total_len <NEW_LINE> <DEDENT> def zero_pad(self): <NEW_LINE> <INDENT> if self._buffer_len & (self._bs-1): <NEW_LINE> <INDENT> npad = self._bs - self._buffer_len & (self._bs-1) <NEW_LINE> self._buffer.append(bchr(0)*npad) <NEW_LINE> self._buffer_len += npad <NEW_LINE> <DEDENT> <DEDENT> def update(self, data): <NEW_LINE> <INDENT> if self._buffer_len==0 and self.can_reduce() and self._min_digest==0 and len(data)%self._bs==0: <NEW_LINE> <INDENT> self._update(data) <NEW_LINE> self._total_len += len(data) <NEW_LINE> return <NEW_LINE> <DEDENT> self._buffer.append(data) <NEW_LINE> self._buffer_len += len(data) <NEW_LINE> self._total_len += len(data) <NEW_LINE> blocks, rem = divmod(self._buffer_len, self._bs) <NEW_LINE> if rem<self._min_digest: <NEW_LINE> <INDENT> blocks -= 1 <NEW_LINE> <DEDENT> if blocks>0 and self.can_reduce(): <NEW_LINE> <INDENT> aligned_data = blocks*self._bs <NEW_LINE> if sys.version_info[0] == 2: <NEW_LINE> <INDENT> buf = b("").join(str(v) for v in self._buffer) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> buf = b("").join(self._buffer) <NEW_LINE> <DEDENT> self._update(buf[:aligned_data]) <NEW_LINE> self._buffer = [ buf[aligned_data:] ] <NEW_LINE> self._buffer_len -= aligned_data <NEW_LINE> <DEDENT> <DEDENT> def _deep_copy(self, target): <NEW_LINE> <INDENT> target._buffer = self._buffer[:] <NEW_LINE> for m in [ '_bs', '_buffer_len', '_total_len', '_min_digest', '_tag' ]: <NEW_LINE> <INDENT> setattr(target, m, getattr(self, m)) <NEW_LINE> <DEDENT> <DEDENT> def _update(self, data_block): <NEW_LINE> <INDENT> raise NotImplementedError("_update() must be still implemented") <NEW_LINE> <DEDENT> def _digest(self, left_data): <NEW_LINE> <INDENT> raise NotImplementedError("_digest() must be still implemented") <NEW_LINE> <DEDENT> def digest(self): <NEW_LINE> <INDENT> if self._tag: <NEW_LINE> <INDENT> return self._tag <NEW_LINE> <DEDENT> if self._buffer_len>0: <NEW_LINE> <INDENT> self.update(b("")) <NEW_LINE> <DEDENT> left_data = b("").join(self._buffer) <NEW_LINE> self._tag = self._digest(left_data) <NEW_LINE> return self._tag
Turn a MAC that only operates on aligned blocks of data into a MAC with granularity of 1 byte.
62598fac26068e7796d4c915
class Nest(SaveAndCheck): <NEW_LINE> <INDENT> def _casting(self, thething): <NEW_LINE> <INDENT> self.save_and_check( {"almonds": thething}, 'almonds', [(repr(thething),)] )
This needs to be verified with actual ScraperWiki.
62598faca17c0f6771d5c1f6
class Solution(object): <NEW_LINE> <INDENT> def canJump(self, nums): <NEW_LINE> <INDENT> max_reach, n = 0, len(nums) <NEW_LINE> for i, x in enumerate(nums): <NEW_LINE> <INDENT> if max_reach < i: return False <NEW_LINE> if max_reach >= n - 1: return True <NEW_LINE> max_reach = max(max_reach, i + x)
Given an array of non-negative integers, you are initially positioned at the first index of the array. Each element in the array represents your maximum jump length at that position. Determine if you are able to reach the last index.
62598fac2c8b7c6e89bd3786
class User(models.Model): <NEW_LINE> <INDENT> id = fields.IntField(pk=True) <NEW_LINE> email = fields.CharField(max_length=100, unique=True) <NEW_LINE> hashed_password = fields.CharField(max_length=1000) <NEW_LINE> is_active = fields.BooleanField(default=True) <NEW_LINE> async def save(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> self.hashed_password = "123456" <NEW_LINE> await super().save(*args, **kwargs) <NEW_LINE> <DEDENT> class PydanticMeta: <NEW_LINE> <INDENT> exclude = ['hashed_password']
Модель пользователя
62598fac3d592f4c4edbae8c
class FullVector(Vector): <NEW_LINE> <INDENT> def __init__(self, lst, zero_test = lambda x : (x == 0)): <NEW_LINE> <INDENT> super(FullVector, self).__init__(lst, zero_test) <NEW_LINE> self.data = lst <NEW_LINE> <DEDENT> def split(self): <NEW_LINE> <INDENT> vec1 = self[:int(len(self)/2)] <NEW_LINE> vec2 = self[int(len(self)/2):] <NEW_LINE> return FullVector(vec1 , self.Zero_test) , FullVector(vec2 , self.Zero_test) <NEW_LINE> <DEDENT> def merge(self, vector): <NEW_LINE> <INDENT> merge = [] <NEW_LINE> for i in range(len(self)): <NEW_LINE> <INDENT> merge.append(self[i]) <NEW_LINE> <DEDENT> for i in range(len(vector)): <NEW_LINE> <INDENT> merge.append(vector[i]) <NEW_LINE> <DEDENT> return make_vector(merge,zero_test = lambda x : (x == 0))
A subclass of Vector where all elements are kept explicitly as a list
62598facd486a94d0ba2bf8f
class StyledAuthenticationForm(AuthenticationForm): <NEW_LINE> <INDENT> username = UsernameField( max_length=254, widget=TextInput(attrs={'class': 'form-control'}), ) <NEW_LINE> password = forms.CharField( label="Password", strip=False, widget=PasswordInput(attrs={'class': 'form-control'}) )
User authentication form with bootstrap styles
62598fac01c39578d7f12d3f
class msg(SecActionMetadata): <NEW_LINE> <INDENT> def evaluate(self, core): <NEW_LINE> <INDENT> a = self.action[4:] <NEW_LINE> if a[0] == "'": <NEW_LINE> <INDENT> a = a[1:-1] <NEW_LINE> <DEDENT> core.msg = a <NEW_LINE> return a
https://github.com/SpiderLabs/ModSecurity/wiki/Reference-Manual#msg
62598fac009cb60464d014e1
class GuillotineSas(Guillotine): <NEW_LINE> <INDENT> def _split(self, section, width, height): <NEW_LINE> <INDENT> if section.width < section.height: <NEW_LINE> <INDENT> return self._split_horizontal(section, width, height) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._split_vertical(section, width, height)
Implements Short Axis Split (SAS) selection rule for Guillotine algorithm.
62598fac3346ee7daa337629
class Model(DB.Model): <NEW_LINE> <INDENT> __tablename__ = 'batches' <NEW_LINE> identifier = DB.Column(DB.Integer(), primary_key=True) <NEW_LINE> speed = DB.Column(DB.Integer()) <NEW_LINE> amount_to_produce = DB.Column(DB.Integer()) <NEW_LINE> started_dt = DB.Column(DB.DateTime()) <NEW_LINE> recipe_id = DB.Column(DB.String(128)) <NEW_LINE> finished_dt = DB.Column(DB.DateTime()) <NEW_LINE> oee = DB.Column(DB.Float())
Model. TODO(Add DOC)
62598fac1f037a2d8b9e40af
class WebMercatorProjection(Projection): <NEW_LINE> <INDENT> EARTH_RADIUS = 6378137 <NEW_LINE> def project(self, latlon): <NEW_LINE> <INDENT> x = latlon.lon * self.pixels_per_degree <NEW_LINE> y = self.pixels_per_radian * math.log( math.tan(math.pi/4 + math.radians(latlon.lat/2))) <NEW_LINE> return Coord(x, y)
WGS 84 Web Mercator / Spherical Mercator projection used by Google Maps. See: https://en.wikipedia.org/wiki/Mercator_projection https://en.wikipedia.org/wiki/Web_Mercator
62598fac71ff763f4b5e7730
class DocumentPatchedEvent(DocumentChangedEvent): <NEW_LINE> <INDENT> def dispatch(self, receiver): <NEW_LINE> <INDENT> super().dispatch(receiver) <NEW_LINE> if hasattr(receiver, '_document_patched'): <NEW_LINE> <INDENT> receiver._document_patched(self) <NEW_LINE> <DEDENT> <DEDENT> def generate(self, references, buffers): <NEW_LINE> <INDENT> raise NotImplementedError()
A Base class for events that represent updating Bokeh Models and their properties.
62598facadb09d7d5dc0a54b
class Deploy(Command): <NEW_LINE> <INDENT> name = "deploy" <NEW_LINE> doc_usage = "" <NEW_LINE> doc_purpose = "deploy the site" <NEW_LINE> logger = None <NEW_LINE> def _execute(self, command, args): <NEW_LINE> <INDENT> self.logger = get_logger('deploy', self.site.loghandlers) <NEW_LINE> timestamp_path = os.path.join(self.site.config['CACHE_FOLDER'], 'lastdeploy') <NEW_LINE> if self.site.config['COMMENT_SYSTEM_ID'] == 'nikolademo': <NEW_LINE> <INDENT> self.logger.warn("\nWARNING WARNING WARNING WARNING\n" "You are deploying using the nikolademo Disqus account.\n" "That means you will not be able to moderate the comments in your own site.\n" "And is probably not what you want to do.\n" "Think about it for 5 seconds, I'll wait :-)\n\n") <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> deploy_drafts = self.site.config.get('DEPLOY_DRAFTS', True) <NEW_LINE> deploy_future = self.site.config.get('DEPLOY_FUTURE', False) <NEW_LINE> if not (deploy_drafts and deploy_future): <NEW_LINE> <INDENT> out_dir = self.site.config['OUTPUT_FOLDER'] <NEW_LINE> self.site.scan_posts() <NEW_LINE> for post in self.site.timeline: <NEW_LINE> <INDENT> if (not deploy_drafts and post.is_draft) or (not deploy_future and post.publish_later): <NEW_LINE> <INDENT> remove_file(os.path.join(out_dir, post.destination_path())) <NEW_LINE> remove_file(os.path.join(out_dir, post.source_path)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for command in self.site.config['DEPLOY_COMMANDS']: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> with open(timestamp_path, 'rb') as inf: <NEW_LINE> <INDENT> last_deploy = literal_eval(inf.read().strip()) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> last_deploy = datetime(1970, 1, 1) <NEW_LINE> <DEDENT> self.logger.notice("==> {0}".format(command)) <NEW_LINE> try: <NEW_LINE> <INDENT> subprocess.check_call(command, shell=True) <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as e: <NEW_LINE> <INDENT> self.logger.error('Failed deployment — command {0} ' 'returned {1}'.format(e.cmd, e.returncode)) <NEW_LINE> sys.exit(e.returncode) <NEW_LINE> <DEDENT> <DEDENT> self.logger.notice("Successful deployment") <NEW_LINE> new_deploy = datetime.now() <NEW_LINE> with codecs.open(timestamp_path, 'wb+', 'utf8') as outf: <NEW_LINE> <INDENT> outf.write(repr(new_deploy))
Deploy site.
62598fac63d6d428bbee276c
class TokenStore(SessionStore): <NEW_LINE> <INDENT> def __init__(self, token=None, namespace=None, cache_alias=settings.SESSION_CACHE_ALIAS): <NEW_LINE> <INDENT> self._cache = caches[cache_alias] <NEW_LINE> self._namespace = None <NEW_LINE> self.set_namespace(namespace) <NEW_LINE> super(SessionStore, self).__init__(token) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cache_key_prefix(self): <NEW_LINE> <INDENT> return f'token_{self.namespace}_' if self.namespace else 'token_' <NEW_LINE> <DEDENT> @property <NEW_LINE> def namespace(self): <NEW_LINE> <INDENT> return self._namespace <NEW_LINE> <DEDENT> def set_namespace(self, namespace): <NEW_LINE> <INDENT> if namespace is not None: <NEW_LINE> <INDENT> assert isinstance(namespace, str) <NEW_LINE> <DEDENT> self._namespace = namespace <NEW_LINE> <DEDENT> @property <NEW_LINE> def token(self): <NEW_LINE> <INDENT> return self.session_key <NEW_LINE> <DEDENT> def create_token(self): <NEW_LINE> <INDENT> self.create() <NEW_LINE> self.modified = False
Cache-based token store system. It used like http sessions.
62598fac10dbd63aa1c70b74
class HAIL(WEATHER): <NEW_LINE> <INDENT> name="冰雹" <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
class inherent = WEATHER attribute: name = 中文名稱 effect = 效果解說
62598fac56b00c62f0fb2876
class ProgramViewSet(DRFCacheMixin, MultiSerializerActionClassMixin, viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = ( training_program.models.Program.objects.all() .select_related('department') ) <NEW_LINE> serializer_class = training_program.serializers.ProgramSerializer <NEW_LINE> serializer_action_classes = { 'create': training_program.serializers.ProgramSerializer, 'partial_update': training_program.serializers.ProgramSerializer, 'update': training_program.serializers.ProgramSerializer, } <NEW_LINE> serializer_class = training_program.serializers.ReadOnlyProgramSerializer <NEW_LINE> filter_backends = (filters.DjangoObjectPermissionsFilter, django_filters.rest_framework.DjangoFilterBackend,) <NEW_LINE> permission_classes = ( auth.permissions.DjangoObjectPermissions, ) <NEW_LINE> filter_class = training_program.filters.ProgramFilter <NEW_LINE> perms_map = { 'get_group_programs': ['%(app_label)s.view_%(model_name)s'] } <NEW_LINE> @action(detail=False, url_path='group-programs', url_name='group') <NEW_LINE> def get_group_programs(self, request): <NEW_LINE> <INDENT> group_programs = ProgramService.get_grouped_programs_by_department( request.user) <NEW_LINE> return Response(group_programs, status=status.HTTP_200_OK)
Create API views for Progarm.
62598fac60cbc95b06364310
class Location: <NEW_LINE> <INDENT> def __init__(self, city, x, y): <NEW_LINE> <INDENT> self.city: City = city <NEW_LINE> self.x: int = x <NEW_LINE> self.y: int = y <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.city.name) + ", (" + str(self.x) + ", " + str(self.y) + ")" <NEW_LINE> <DEDENT> def get_tile(self): <NEW_LINE> <INDENT> if self.x < 0 or self.x >= self.city.CITY_WIDTH or self.y < 0 or self.y >= self.city.CITY_HEIGHT: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.city.get_tiles()[self.y][self.x] <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> return copy.deepcopy(self)
This class contains attributes of a location in this game.
62598facbd1bec0571e150a4
class coin_bw_info(coininfo.publish): <NEW_LINE> <INDENT> eventname = "Interface Bandwidth Used Information" <NEW_LINE> keys = ["interface", "tx_bps", "tx_pps", "rx_bps", "rx_pps", "timestamp"] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.values = [] <NEW_LINE> <DEDENT> def add(self, intf_name, tx_bps, tx_pps, rx_bps, rx_pps, timestamp=None): <NEW_LINE> <INDENT> i = {} <NEW_LINE> if (timestamp==None): <NEW_LINE> <INDENT> i["timestamp"] = time.time() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i["timestamp"] = timestamp <NEW_LINE> <DEDENT> i["interface"] = intf_name <NEW_LINE> i["tx_bps"] = tx_bps <NEW_LINE> i["tx_pps"] = tx_pps <NEW_LINE> i["rx_bps"] = rx_bps <NEW_LINE> i["rx_pps"] = rx_pps <NEW_LINE> self.values.append(i) <NEW_LINE> <DEDENT> def get_dict(self): <NEW_LINE> <INDENT> return self.values[:]
Event to publish bandwidth used @author ykk @date August 2011
62598fac57b8e32f525080fc
class Parameters(models.Model): <NEW_LINE> <INDENT> pass
Character parameters: strength, agility, luck etc
62598fac7cff6e4e811b59ee
class Router: <NEW_LINE> <INDENT> _connector = None <NEW_LINE> _services = [] <NEW_LINE> def __init__(self, connector): <NEW_LINE> <INDENT> self._connector = connector <NEW_LINE> self._connector.on_connect = self._onConnect <NEW_LINE> self._connector.on_message = self._onMessage <NEW_LINE> self._connector.connect("localhost", 1883, 60) <NEW_LINE> <DEDENT> def registerService(self, service): <NEW_LINE> <INDENT> self._services.append(service) <NEW_LINE> <DEDENT> def sendMessage(self, topic, payload): <NEW_LINE> <INDENT> print('Router sending message', topic, payload) <NEW_LINE> self._connector.publish(topic, payload) <NEW_LINE> <DEDENT> def _onConnect(self, client, userdata, flags, rc): <NEW_LINE> <INDENT> print("Connected with result code " + str(rc)) <NEW_LINE> client.subscribe("#") <NEW_LINE> <DEDENT> def _onMessage(self, client, userdata, message): <NEW_LINE> <INDENT> services = [s for s in self._services if s.supportsTopic(message.topic)] <NEW_LINE> for s in services: <NEW_LINE> <INDENT> s.onMessage(message.topic, message.payload.decode('utf-8')) <NEW_LINE> <DEDENT> <DEDENT> def loopForever(self): <NEW_LINE> <INDENT> self._connector.loop_forever()
Core router for messages
62598fac851cf427c66b827e
class TestOntologyManagerPrefixEnum(): <NEW_LINE> <INDENT> def test_ontology_manager_prefix_enum_alias_uri(self): <NEW_LINE> <INDENT> assert PREFIX.ROOT.alias_uri('lol') == 'bem:lol' <NEW_LINE> <DEDENT> def test_ontology_manager_prefix_enum_get_name(self): <NEW_LINE> <INDENT> assert PREFIX.get_name( 'http://qudt.org/schema/qudt#lol') == 'lol' <NEW_LINE> with pytest.raises(ValueError): <NEW_LINE> <INDENT> PREFIX.get_name('dummy') <NEW_LINE> PREFIX.get_name('du#mm#y')
Unit test for PREFIX
62598fac3317a56b869be52b
class Centities: <NEW_LINE> <INDENT> def __init__(self,node=None,type='NAF'): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.map_entity_id_to_node = {} <NEW_LINE> if node is None: <NEW_LINE> <INDENT> self.node = etree.Element('entities') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.node = node <NEW_LINE> for entity_obj in self: <NEW_LINE> <INDENT> self.map_entity_id_to_node[entity_obj.get_id()] = entity_obj.get_node() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_external_reference_to_entity(self,entity_id,ext_ref): <NEW_LINE> <INDENT> node_entity = self.map_entity_id_to_node.get(entity_id) <NEW_LINE> if node_entity is not None: <NEW_LINE> <INDENT> entity = Centity(node_entity,self.type) <NEW_LINE> entity.add_external_reference(ext_ref) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print>>sys.stderr,'Trying to add a reference to the entity',entity_id,'but can not be found in this file' <NEW_LINE> <DEDENT> <DEDENT> def get_node(self): <NEW_LINE> <INDENT> return self.node <NEW_LINE> <DEDENT> def to_kaf(self): <NEW_LINE> <INDENT> if self.type == 'NAF': <NEW_LINE> <INDENT> for node in self.__get_entity_nodes(): <NEW_LINE> <INDENT> node.set('eid',node.get('id')) <NEW_LINE> del node.attrib['id'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_naf(self): <NEW_LINE> <INDENT> if self.type == 'KAF': <NEW_LINE> <INDENT> for node in self.__get_entity_nodes(): <NEW_LINE> <INDENT> node.set('id',node.get('eid')) <NEW_LINE> del node.attrib['eid'] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __get_entity_nodes(self): <NEW_LINE> <INDENT> for ent_node in self.node.findall('entity'): <NEW_LINE> <INDENT> yield ent_node <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for ent_node in self.__get_entity_nodes(): <NEW_LINE> <INDENT> yield Centity(ent_node,self.type) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return dump(self.node)
This class encapsulates the entity layer in KAF/NAF
62598face5267d203ee6b8cc
class LongValuedFlagListArgument(ValuedFlagListArgument): <NEW_LINE> <INDENT> _pattern = "--{0}={1}"
Represents a :py:class:`ValuedFlagListArgument` with a double dash. Example: ``--file=file1.txt --file=file2.txt``
62598facac7a0e7691f724cb
class ExtractSliceSchemaViewSet(viewsets.ViewSet): <NEW_LINE> <INDENT> base_name = 'extract-slice-schema' <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def list(self, request): <NEW_LINE> <INDENT> return json_response( ExtractRule.get_schemas(request.user) )
Returns data dictionary for the extract slice
62598facdd821e528d6d8ef7
class AIAircraftWasDamagedByStationaryUnit(ParsableEvent): <NEW_LINE> <INDENT> __slots__ = ['time', 'actor', 'attacker', 'pos', ] <NEW_LINE> verbose_name = _("AI aircraft was damaged by stationary unit") <NEW_LINE> matcher = make_matcher( "{time}{actor}{s}damaged{s}by{s}{attacker}{pos}" .format( time=TIME_GROUP_PREFIX, actor=AI_AIRCRAFT_ACTOR_GROUP, attacker=STATIONARY_UNIT_ATTACKER_GROUP, pos=POS_GROUP_SUFFIX, s=WHITESPACE, ) ) <NEW_LINE> transformers = ( transform_time, transform_ai_aircraft_as_actor, transform_stationary_unit_as_attacker, transform_2d_pos, )
Example: "[8:33:05 PM] r01000 damaged by 0_Static at 100.0 200.99"
62598facbe383301e02537bb
class DisconnectingError(Exception): <NEW_LINE> <INDENT> pass
Error disconnecting
62598fac3539df3088ecc274
class CTD_ANON_ (pyxb.binding.basis.complexTypeDefinition): <NEW_LINE> <INDENT> _TypeDefinition = pyxb.binding.datatypes.string <NEW_LINE> _ContentTypeTag = pyxb.binding.basis.complexTypeDefinition._CT_SIMPLE <NEW_LINE> _Abstract = False <NEW_LINE> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/wssplat/schemas/wsrf_bf.xsd', 66, 8) <NEW_LINE> _ElementMap = {} <NEW_LINE> _AttributeMap = {} <NEW_LINE> __lang = pyxb.binding.content.AttributeUse(pyxb.namespace.ExpandedName(pyxb.namespace.XML, 'lang'), 'lang', '__httpdocs_oasis_open_orgwsrfbf_2_CTD_ANON__httpwww_w3_orgXML1998namespacelang', pyxb.binding.xml_.STD_ANON_lang) <NEW_LINE> __lang._DeclarationLocation = None <NEW_LINE> __lang._UseLocation = pyxb.utils.utility.Location('/tmp/pyxbdist.mqXn05k/PyXB-1.2.4/pyxb/bundles/wssplat/schemas/wsrf_bf.xsd', 69, 14) <NEW_LINE> lang = property(__lang.value, __lang.set, None, None) <NEW_LINE> _ElementMap.update({ }) <NEW_LINE> _AttributeMap.update({ __lang.name() : __lang })
Complex type [anonymous] with content type SIMPLE
62598fac4e4d5625663723e8
class OpenMapQuest(Geocoder): <NEW_LINE> <INDENT> def __init__( self, api_key=None, format_string=DEFAULT_FORMAT_STRING, scheme=DEFAULT_SCHEME, timeout=DEFAULT_TIMEOUT, proxies=None, user_agent=None, ): <NEW_LINE> <INDENT> super(OpenMapQuest, self).__init__( format_string, scheme, timeout, proxies, user_agent=user_agent ) <NEW_LINE> if not api_key: <NEW_LINE> <INDENT> raise ConfigurationError('OpenMapQuest requires an API key') <NEW_LINE> <DEDENT> self.api_key = api_key <NEW_LINE> self.api = "%s://open.mapquestapi.com/nominatim/v1/search" "?format=json" % self.scheme <NEW_LINE> <DEDENT> def geocode(self, query, exactly_one=True, timeout=None): <NEW_LINE> <INDENT> params = { 'key': self.api_key, 'q': self.format_string % query } <NEW_LINE> if exactly_one: <NEW_LINE> <INDENT> params['maxResults'] = 1 <NEW_LINE> <DEDENT> url = "&".join((self.api, urlencode(params))) <NEW_LINE> logger.debug("%s.geocode: %s", self.__class__.__name__, url) <NEW_LINE> return self._parse_json( self._call_geocoder(url, timeout=timeout), exactly_one ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _parse_json(cls, resources, exactly_one=True): <NEW_LINE> <INDENT> if not len(resources): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if exactly_one: <NEW_LINE> <INDENT> return cls.parse_resource(resources[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [cls.parse_resource(resource) for resource in resources] <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def parse_resource(cls, resource): <NEW_LINE> <INDENT> location = resource['display_name'] <NEW_LINE> latitude = resource['lat'] or None <NEW_LINE> longitude = resource['lon'] or None <NEW_LINE> if latitude and longitude: <NEW_LINE> <INDENT> latitude = float(latitude) <NEW_LINE> longitude = float(longitude) <NEW_LINE> <DEDENT> return Location(location, (latitude, longitude), resource)
Geocoder using MapQuest Open Platform Web Services. Documentation at: https://developer.mapquest.com/documentation/open/
62598fac498bea3a75a57ae0
class ChecklistPage(ContentPage): <NEW_LINE> <INDENT> pass
Checklist page.
62598fac67a9b606de545f8f
class Instance: <NEW_LINE> <INDENT> def __init__(self, name, spicehost = None, spiceport = 0, owner = None, type = None, nth = 0): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.spicehost = spicehost <NEW_LINE> self.spiceport = spiceport <NEW_LINE> self.uuid = smIO.NewUUID() <NEW_LINE> self.owner = owner <NEW_LINE> self.type = type <NEW_LINE> self.nth = nth <NEW_LINE> self.timestamp = time() <NEW_LINE> self.state = None
Config object representing an instance. This class can be extented to store more info about an vm, which can be accessed by getinstanceinfo
62598fac009cb60464d014e3
class Product(models.Model): <NEW_LINE> <INDENT> CONDITION_TYPE = ( ("New", "New"), ("Used", "Used") ) <NEW_LINE> name = models.CharField(max_length=100) <NEW_LINE> owner = models.ForeignKey(User, on_delete=models.CASCADE) <NEW_LINE> description = models.TextField(max_length=500) <NEW_LINE> condition = models.CharField(max_length=100, choices=CONDITION_TYPE) <NEW_LINE> category = models.ForeignKey('Category', on_delete=models.SET_NULL, null=True) <NEW_LINE> brand = models.ForeignKey('Brand', on_delete=models.SET_NULL, null=True) <NEW_LINE> price = models.DecimalField(max_digits=10, decimal_places=5) <NEW_LINE> image = models.ImageField(upload_to='main_product/', blank=True, null=True) <NEW_LINE> created = models.DateTimeField(default=timezone.now) <NEW_LINE> slug = models.SlugField(blank=True, null=True) <NEW_LINE> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.slug and self.name: <NEW_LINE> <INDENT> self.slug = slugify(self.name) <NEW_LINE> <DEDENT> super(Product, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'product' <NEW_LINE> verbose_name_plural = 'products' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name
Class for managing Product Details
62598fac91f36d47f2230e87
class GetQueueDailyStatisticsStateRequest(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'queueName', None, None, ), ) <NEW_LINE> def __init__(self, queueName=None,): <NEW_LINE> <INDENT> self.queueName = queueName <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.queueName = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('GetQueueDailyStatisticsStateRequest') <NEW_LINE> if self.queueName is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('queueName', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.queueName) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.queueName is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field queueName is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.queueName) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - queueName: Queue name;
62598fac71ff763f4b5e7732
class CeilometerAlarms(ceiloutils.CeilometerScenario): <NEW_LINE> <INDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure(context={"cleanup": ["ceilometer"]}) <NEW_LINE> def create_alarm(self, meter_name, threshold, **kwargs): <NEW_LINE> <INDENT> self._create_alarm(meter_name, threshold, kwargs) <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure() <NEW_LINE> def list_alarms(self): <NEW_LINE> <INDENT> self._list_alarms() <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure(context={"cleanup": ["ceilometer"]}) <NEW_LINE> def create_and_list_alarm(self, meter_name, threshold, **kwargs): <NEW_LINE> <INDENT> alarm = self._create_alarm(meter_name, threshold, kwargs) <NEW_LINE> self._list_alarms(alarm.alarm_id) <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure(context={"cleanup": ["ceilometer"]}) <NEW_LINE> def create_and_update_alarm(self, meter_name, threshold, **kwargs): <NEW_LINE> <INDENT> alarm = self._create_alarm(meter_name, threshold, kwargs) <NEW_LINE> alarm_dict_diff = {"description": "Changed Test Description"} <NEW_LINE> self._update_alarm(alarm.alarm_id, alarm_dict_diff) <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure(context={"cleanup": ["ceilometer"]}) <NEW_LINE> def create_and_delete_alarm(self, meter_name, threshold, **kwargs): <NEW_LINE> <INDENT> alarm = self._create_alarm(meter_name, threshold, kwargs) <NEW_LINE> self._delete_alarm(alarm.alarm_id) <NEW_LINE> <DEDENT> @validation.required_services(consts.Service.CEILOMETER) <NEW_LINE> @validation.required_openstack(users=True) <NEW_LINE> @scenario.configure(context={"cleanup": ["ceilometer"]}) <NEW_LINE> def create_alarm_and_get_history(self, meter_name, threshold, state, timeout=60, **kwargs): <NEW_LINE> <INDENT> alarm = self._create_alarm(meter_name, threshold, kwargs) <NEW_LINE> self._get_alarm_state(alarm.alarm_id) <NEW_LINE> self._get_alarm_history(alarm.alarm_id) <NEW_LINE> self._set_alarm_state(alarm, state, timeout)
Benchmark scenarios for Ceilometer Alarms API.
62598facadb09d7d5dc0a54d
class VelbusConfigFlow(config_entries.ConfigFlow, domain=DOMAIN): <NEW_LINE> <INDENT> VERSION = 1 <NEW_LINE> def __init__(self) -> None: <NEW_LINE> <INDENT> self._errors = {} <NEW_LINE> <DEDENT> def _create_device(self, name: str, prt: str): <NEW_LINE> <INDENT> return self.async_create_entry(title=name, data={CONF_PORT: prt}) <NEW_LINE> <DEDENT> def _test_connection(self, prt): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> controller = velbus.Controller(prt) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self._errors[CONF_PORT] = "cannot_connect" <NEW_LINE> return False <NEW_LINE> <DEDENT> controller.stop() <NEW_LINE> return True <NEW_LINE> <DEDENT> def _prt_in_configuration_exists(self, prt: str) -> bool: <NEW_LINE> <INDENT> if prt in velbus_entries(self.hass): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> async def async_step_user(self, user_input=None): <NEW_LINE> <INDENT> self._errors = {} <NEW_LINE> if user_input is not None: <NEW_LINE> <INDENT> name = slugify(user_input[CONF_NAME]) <NEW_LINE> prt = user_input[CONF_PORT] <NEW_LINE> if not self._prt_in_configuration_exists(prt): <NEW_LINE> <INDENT> if self._test_connection(prt): <NEW_LINE> <INDENT> return self._create_device(name, prt) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._errors[CONF_PORT] = "already_configured" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> user_input = {} <NEW_LINE> user_input[CONF_NAME] = "" <NEW_LINE> user_input[CONF_PORT] = "" <NEW_LINE> <DEDENT> return self.async_show_form( step_id="user", data_schema=vol.Schema( { vol.Required(CONF_NAME, default=user_input[CONF_NAME]): str, vol.Required(CONF_PORT, default=user_input[CONF_PORT]): str, } ), errors=self._errors, ) <NEW_LINE> <DEDENT> async def async_step_import(self, user_input=None): <NEW_LINE> <INDENT> user_input[CONF_NAME] = "Velbus Import" <NEW_LINE> prt = user_input[CONF_PORT] <NEW_LINE> if self._prt_in_configuration_exists(prt): <NEW_LINE> <INDENT> return self.async_abort(reason="already_configured") <NEW_LINE> <DEDENT> return await self.async_step_user(user_input)
Handle a config flow.
62598facdd821e528d6d8ef8
class OutboundNatRule(SubResource): <NEW_LINE> <INDENT> _validation = { 'backend_address_pool': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'allocated_outbound_ports': {'key': 'properties.allocatedOutboundPorts', 'type': 'int'}, 'frontend_ip_configurations': {'key': 'properties.frontendIPConfigurations', 'type': '[SubResource]'}, 'backend_address_pool': {'key': 'properties.backendAddressPool', 'type': 'SubResource'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(OutboundNatRule, self).__init__(**kwargs) <NEW_LINE> self.allocated_outbound_ports = kwargs.get('allocated_outbound_ports', None) <NEW_LINE> self.frontend_ip_configurations = kwargs.get('frontend_ip_configurations', None) <NEW_LINE> self.backend_address_pool = kwargs.get('backend_address_pool', None) <NEW_LINE> self.provisioning_state = kwargs.get('provisioning_state', None) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = kwargs.get('etag', None)
Outbound NAT pool of the load balancer. All required parameters must be populated in order to send to Azure. :param id: Resource Identifier. :type id: str :param allocated_outbound_ports: The number of outbound ports to be used for NAT. :type allocated_outbound_ports: int :param frontend_ip_configurations: The Frontend IP addresses of the load balancer. :type frontend_ip_configurations: list[~azure.mgmt.network.v2015_06_15.models.SubResource] :param backend_address_pool: Required. A reference to a pool of DIPs. Outbound traffic is randomly load balanced across IPs in the backend IPs. :type backend_address_pool: ~azure.mgmt.network.v2015_06_15.models.SubResource :param provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'. :type provisioning_state: str :param name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :param etag: A unique read-only string that changes whenever the resource is updated. :type etag: str
62598fac1f5feb6acb162be2
class PasswordResetConfirmView(View): <NEW_LINE> <INDENT> def get(self, request, uidb64, token): <NEW_LINE> <INDENT> if not self._validate_data(request, uidb64, token): <NEW_LINE> <INDENT> return redirect('users:password-reset') <NEW_LINE> <DEDENT> form = PasswordResentConfirmForm() <NEW_LINE> return render(request, 'users/reset_password_change_password.html', context={'form': form}) <NEW_LINE> <DEDENT> def post(self, request, uidb64, token): <NEW_LINE> <INDENT> if not self._validate_data(request, uidb64, token): <NEW_LINE> <INDENT> return redirect('users:password-reset') <NEW_LINE> <DEDENT> form = PasswordResentConfirmForm(request.POST) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> user_id = force_text(urlsafe_base64_decode(uidb64)) <NEW_LINE> user = form.save(commit=False, pk=user_id) <NEW_LINE> try: <NEW_LINE> <INDENT> validate_password(form.cleaned_data['password'], user) <NEW_LINE> <DEDENT> except (ValidationError, ValueError) as e: <NEW_LINE> <INDENT> form.add_error('password', e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> form.save(commit=True, pk=user.id) <NEW_LINE> messages.success(request, 'Your password was successfully updated!') <NEW_LINE> user = authenticate(request, username=user.email, password=form.cleaned_data['password']) <NEW_LINE> if user: <NEW_LINE> <INDENT> login(request, user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> messages.warning(request, 'Unknown error occurred during changing the password.' '\nPlease, try again.') <NEW_LINE> return redirect('users:password-reset') <NEW_LINE> <DEDENT> return redirect('/') <NEW_LINE> <DEDENT> <DEDENT> return render(request, 'users/reset_password_change_password.html', context={'form': form}) <NEW_LINE> <DEDENT> def _check_uidb64_and_token(self, uidb64, token): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user_id = force_text(urlsafe_base64_decode(uidb64)) <NEW_LINE> user = get_user_model().objects.filter(pk=user_id).first() <NEW_LINE> if not user: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not default_token_generator.check_token(user, token): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _validate_data(self, request, uidb64, token): <NEW_LINE> <INDENT> if not self._check_uidb64_and_token(uidb64, token): <NEW_LINE> <INDENT> messages.error(request, 'Invalid link. Please, reset your password again in the form below :)') <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @method_decorator(default_not_authorized) <NEW_LINE> def dispatch(self, *args, **kwargs): <NEW_LINE> <INDENT> return super().dispatch(*args, **kwargs)
View to set new password for the account. Using default implementation provided by django.
62598fac30bbd7224646995a
class TopicUniqueness: <NEW_LINE> <INDENT> def __init__(self, topics): <NEW_LINE> <INDENT> self.topics = topics <NEW_LINE> self.K = len(topics) <NEW_LINE> self.n = len(self.topics[0]) <NEW_LINE> self.cnt_lookup = {} <NEW_LINE> self.topic_uniqueness = {} <NEW_LINE> <DEDENT> def compute_cnt_per_topic(self, k): <NEW_LINE> <INDENT> count = {word:0 for word in k} <NEW_LINE> for i in range(len(k)): <NEW_LINE> <INDENT> word = k[i] <NEW_LINE> for topic_words in self.topics: <NEW_LINE> <INDENT> if word in topic_words: <NEW_LINE> <INDENT> count[word] += 1 <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return count <NEW_LINE> <DEDENT> def form_cnt_lookup_table(self): <NEW_LINE> <INDENT> for k in range(self.K): <NEW_LINE> <INDENT> topic_words = self.topics[k] <NEW_LINE> cnt_k = self.compute_cnt_per_topic(topic_words) <NEW_LINE> self.cnt_lookup[k] = cnt_k <NEW_LINE> <DEDENT> print('Initialized cnt(i,k) for all words across all topics') <NEW_LINE> <DEDENT> def compute_topic_uniquess(self): <NEW_LINE> <INDENT> self.form_cnt_lookup_table() <NEW_LINE> for k in range(self.K): <NEW_LINE> <INDENT> lookup = self.cnt_lookup[k] <NEW_LINE> cnt_values = lookup.values() <NEW_LINE> self.topic_uniqueness[k] = (1/len(lookup)) * sum([1/value for value in cnt_values]) <NEW_LINE> <DEDENT> print('Computed Topic Uniqueness for all topics') <NEW_LINE> <DEDENT> def compute_average_topic_uniqueness(self): <NEW_LINE> <INDENT> return sum(self.topic_uniqueness.values())/len(self.topic_uniqueness) <NEW_LINE> <DEDENT> def get_output(self): <NEW_LINE> <INDENT> topic_uniqueness_for_all_topics = {} <NEW_LINE> self.compute_topic_uniquess() <NEW_LINE> for k in range(self.K): <NEW_LINE> <INDENT> topic_uniqueness_for_all_topics[' '.join(self.topics[k])] = self.topic_uniqueness[k] <NEW_LINE> <DEDENT> return topic_uniqueness_for_all_topics
Implementation of the Topic Uniqueness metric from the Amazon NTM blog : https://aws.amazon.com/blogs/machine-learning/amazon-sagemaker-neural-topic-model-now-supports-auxiliary-vocabulary-channel-new-topic-evaluation-metrics-and-training-subsampling/#:~:text=Word%20embedding%20topic%20coherence%20metric,top%20words%20in%20each%20topic. Topic Uniqueness is a measure of how 'unique' a topic is compared to all the extracted topics. Topics with low topic uniqueness have a chance to be merged to form a bigger topic TU implementation works best for unigram topic words. run TopicUniqueness(topics = topics).get_output() to get uniqueness for each topic. topics = topic words per topic, for all topics
62598fac4c3428357761a27d
class TDNN(Model): <NEW_LINE> <INDENT> def __init__(self, input_, embed_dim=650, feature_maps=[50, 100, 150, 200, 200, 200, 200], kernels=[1,2,3,4,5,6,7], checkpoint_dir="checkpoint", forward_only=False): <NEW_LINE> <INDENT> self.embed_dim = embed_dim <NEW_LINE> self.feature_maps = feature_maps <NEW_LINE> self.kernels = kernels <NEW_LINE> input_ = tf.expand_dims(input_, -1) <NEW_LINE> layers = [] <NEW_LINE> for idx, kernel_dim in enumerate(kernels): <NEW_LINE> <INDENT> reduced_length = input_.get_shape()[1] - kernel_dim + 1 <NEW_LINE> conv = conv2d(input_, feature_maps[idx], kernel_dim , self.embed_dim, name="kernel%d" % idx) <NEW_LINE> pool = tf.nn.max_pool(tf.tanh(conv), [1, reduced_length, 1, 1], [1, 1, 1, 1], 'VALID') <NEW_LINE> layers.append(tf.squeeze(pool)) <NEW_LINE> <DEDENT> if len(kernels) > 1: <NEW_LINE> <INDENT> self.output = tf.concat(layers, 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output = layers[0]
Time-delayed Nueral Network (cf. http://arxiv.org/abs/1508.06615v4)
62598facbd1bec0571e150a5
class Asteroid: <NEW_LINE> <INDENT> def __init__(self, x, y, random_size, velocity): <NEW_LINE> <INDENT> if isinstance(x, int) == False or isinstance(y, int) == False: <NEW_LINE> <INDENT> raise ValueError('Please use integer values.') <NEW_LINE> <DEDENT> elif isinstance(random_size, int) == False or isinstance(velocity, int) == False: <NEW_LINE> <INDENT> raise ValueError('Please use integer values.') <NEW_LINE> <DEDENT> elif x < 15 or x > 465: <NEW_LINE> <INDENT> raise ValueError('Invalid first x coordinate: must be equal to or between 15 and 465.') <NEW_LINE> <DEDENT> elif y != -70: <NEW_LINE> <INDENT> raise ValueError('Invalid first y coordinate: must equal -70.') <NEW_LINE> <DEDENT> elif random_size < 10 or random_size > 30: <NEW_LINE> <INDENT> raise ValueError('Invalid size integer: must integer between 10 and 30.') <NEW_LINE> <DEDENT> elif velocity < 1 or velocity > 10: <NEW_LINE> <INDENT> raise ValueError('Invalid velocity, must be between 2 and 6.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.random_size = random_size <NEW_LINE> self.x1 = x + random_size <NEW_LINE> self.y1 = y + random_size <NEW_LINE> self.color = 'Gray' <NEW_LINE> self.velocity = velocity <NEW_LINE> <DEDENT> <DEDENT> def render_asteroid(self, canvas): <NEW_LINE> <INDENT> canvas.create_oval(self.x, self.y, self.x1, self.y1, fill = self.color) <NEW_LINE> <DEDENT> def move_asteroid(self): <NEW_LINE> <INDENT> self.y = self.y + self.velocity <NEW_LINE> self.y1 = self.y1 + self.velocity <NEW_LINE> <DEDENT> def get_asteroid_x_value(self): <NEW_LINE> <INDENT> return self.x <NEW_LINE> <DEDENT> def get_asteroid_y_value(self): <NEW_LINE> <INDENT> return self.y <NEW_LINE> <DEDENT> def get_asteroid_x1_value(self): <NEW_LINE> <INDENT> return self.x1 <NEW_LINE> <DEDENT> def get_asteroid_y1_value(self): <NEW_LINE> <INDENT> return self.y1
A class to model an asteroid for the game "SpaceShip Adventure." Invariants: x must be between 15 and 465. y must be -70. Velocity must be between 2 and 6.
62598fac63b5f9789fe8512a
class Filters(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Type = None <NEW_LINE> self.DeptIds = None <NEW_LINE> self.UserIds = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Type = params.get("Type") <NEW_LINE> self.DeptIds = params.get("DeptIds") <NEW_LINE> self.UserIds = params.get("UserIds")
可见范围过滤参数
62598fac57b8e32f525080fd
class IHeaderAndFooter(Interface): <NEW_LINE> <INDENT> pass
Utility (function) to add header/footer.
62598fac3317a56b869be52c
class HDFDataset(Dataset): <NEW_LINE> <INDENT> def __init__( self, data_path, batch_size, train_group="train", val_group="validation", test_group="test", dataset="images", target="masks", prefetch=1, preprocessor=None, is_training=None, is_testing=None, ): <NEW_LINE> <INDENT> self.data_path = str(data_path) <NEW_LINE> reader_kwargs = { "all": { "data_path": data_path, "dataset": dataset, "prefetch": prefetch, "preprocessor": preprocessor, }, "train": {"group": train_group}, "val": {"group": val_group}, "test": {"group": test_group}, } <NEW_LINE> super().__init__( batch_size=batch_size, is_training=is_training, is_testing=is_testing, reader_type="HDFReader", reader_kwargs=reader_kwargs, )
A wrapper for the dataset class that makes it easier to create datasets with HDFReaders.
62598face5267d203ee6b8ce
class Integer(object): <NEW_LINE> <INDENT> def __init__(self, min_value=None, max_value=None): <NEW_LINE> <INDENT> self.min_value = min_value <NEW_LINE> self.max_value = max_value
Integer type. Instance variables: min_value -- minimum allowed value max_value -- maximum allowed value
62598fac091ae35668704be2
class HttpError(Exception): <NEW_LINE> <INDENT> def __init__(self, status_code, message): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.status_code = status_code <NEW_LINE> self.message = message <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'Received HTTP status code {self.status_code}: {self.message}. Expected 200: OK'
HTTP status code received was not 200 OK Attributes ---------- status_code : int http status code message : str http text corresponding to status code
62598face76e3b2f99fd89fa