code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class PickTaretMask(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__layer = Lambda(lambda inputs: self.__pick_target_mask(*inputs) , output_shape=self.__pick_target_mask_output_shape) <NEW_LINE> <DEDENT> def __call__(self, inputs): <NEW_LINE> <INDENT> return self.__layer(inputs) <NEW_LINE> <DEDENT> def __pick_target_mask(self, cls_labels, masks): <NEW_LINE> <INDENT> mask_shape = KB.shape(masks) <NEW_LINE> batch = mask_shape[0] <NEW_LINE> reg_num = mask_shape[1] <NEW_LINE> mask_h = mask_shape[3] <NEW_LINE> mask_w = mask_shape[4] <NEW_LINE> dim1 = KB.flatten(KB.repeat(KB.expand_dims(KB.arange(batch)), reg_num)) <NEW_LINE> dim2 = KB.tile(KB.arange(reg_num), [batch]) <NEW_LINE> dim3 = KB.cast(KB.flatten(cls_labels), tf.int32) <NEW_LINE> ids = KB.stack([dim1, dim2, dim3], axis=1) <NEW_LINE> squeezed_masks = tf.gather_nd(masks, ids) <NEW_LINE> return KB.reshape(squeezed_masks, [batch, reg_num, mask_h, mask_w]) <NEW_LINE> <DEDENT> def __pick_target_mask_output_shape(self, inputs): <NEW_LINE> <INDENT> return [None, inputs[1][1], inputs[1][3], inputs[1][4]]
|
TODO : Write description
Pick Target Mask Layer class
|
62598ffb0a366e3fb87dd3b0
|
class UrlsMeta(type): <NEW_LINE> <INDENT> def __new__(meta, name, bases, attrs): <NEW_LINE> <INDENT> validators = collections.defaultdict(set) <NEW_LINE> for attr in attrs.values(): <NEW_LINE> <INDENT> if hasattr(attr, "validates"): <NEW_LINE> <INDENT> validators[attr.validates].add(attr) <NEW_LINE> <DEDENT> <DEDENT> attrs["_validators"] = validators <NEW_LINE> cls = type.__new__(meta, name, bases, attrs) <NEW_LINE> return cls
|
This metaclass aggregates the validator functions marked
using the Urls.validate decorator.
|
62598ffb187af65679d2a0d8
|
class Publicacao(BlogPost): <NEW_LINE> <INDENT> autoria = models.CharField(max_length=150, verbose_name='Autoria') <NEW_LINE> categorias = models.ManyToManyField(AreaDeAtuacao, verbose_name='Categorias') <NEW_LINE> ano_de_publicacao = models.IntegerField(verbose_name='Ano de publicação', choices=YEAR_CHOICES, default=current_year) <NEW_LINE> arquivo_publicacao = models.FileField(upload_to='publicacoes/', verbose_name='Arquivo da publicação') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Publicação' <NEW_LINE> verbose_name_plural = 'Publicações' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title
|
Publicações relacionadas à temática do site.
|
62598ffb15fb5d323ce7f702
|
class PhotoAlbumsResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
|
Retrieve the value for the "Response" output from this choreography execution. (The response from Facebook. Corresponds to the ResponseFormat input. Defaults to JSON.)
|
62598ffb3cc13d1c6d46610d
|
class IntraChainDistSqLocal(AllParticlePosLocal, analysis_IntraChainDistSq): <NEW_LINE> <INDENT> def __init__(self, system, fpl): <NEW_LINE> <INDENT> if not pmi._PMIComm or pmi._MPIcomm.rank in pmi._PMIComm.getMPIcpugroup(): <NEW_LINE> <INDENT> cxxinit(self, analysis_IntraChainDistSq, system, fpl) <NEW_LINE> <DEDENT> <DEDENT> def compute(self): <NEW_LINE> <INDENT> return self.cxxclass.compute(self)
|
The (local) IntraChainDistSq object
|
62598ffb627d3e7fe0e07860
|
class getCoinPurchaseHistory_args(object): <NEW_LINE> <INDENT> thrift_spec = ( None, None, (2, TType.STRUCT, 'request', (CoinHistoryCondition, CoinHistoryCondition.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, request=None,): <NEW_LINE> <INDENT> self.request = request <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.request = CoinHistoryCondition() <NEW_LINE> self.request.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getCoinPurchaseHistory_args') <NEW_LINE> if self.request is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('request', TType.STRUCT, 2) <NEW_LINE> self.request.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
|
Attributes:
- request
|
62598ffb4c3428357761ac6e
|
class Insane(object): <NEW_LINE> <INDENT> def __init__(self, stats): <NEW_LINE> <INDENT> self.deaths = stats['deaths_team_insane'] if 'deaths_team_insane' in stats else None <NEW_LINE> self.losses = stats['losses_team_insane'] if 'losses_team_insane' in stats else None <NEW_LINE> self.kills = stats['kills_team_insane'] if 'kills_team_insane' in stats else None <NEW_LINE> self.wins = stats['wins_team_insane'] if 'wins_team_insane' in stats else None
|
The player's teams insane SkyWars stats.
:param stats: The raw SkyWars stats data from the API.
:type stats: dict
|
62598ffb0a366e3fb87dd3b8
|
@ui.register_ui( link_facebook_login=ui.Link( By.CSS_SELECTOR, '.list-group-item span[class*="facebook-logo"]'), link_google_login=ui.Link( By.CSS_SELECTOR, '.list-group-item span[class*="google-logo"]'), link_amazon_login=ui.Link( By.CSS_SELECTOR, '.list-group-item span[class*="amazon-logo"]'), link_imdb_login=ui.Link( By.CSS_SELECTOR, '.list-group-item span[class*="imdb-logo"]'), link_create_account=ui.Link( By.CSS_SELECTOR, '.list-group-item.create-account')) <NEW_LINE> class PageLogin(PageBase): <NEW_LINE> <INDENT> url = '/registration/signin'
|
Login IMDB page.
|
62598ffb627d3e7fe0e07868
|
class TestFunctionsMustbeLowercaseOnly(TestCase): <NEW_LINE> <INDENT> def test_pass_lowercase_call(self): <NEW_LINE> <INDENT> result = run_linter_throw("lowercase_func (ARGUMENT)\n", whitelist=["style/lowercase_func"]) <NEW_LINE> self.assertTrue(result) <NEW_LINE> <DEDENT> def test_fail_uppercase_call(self): <NEW_LINE> <INDENT> with ExpectedException(LinterFailure): <NEW_LINE> <INDENT> run_linter_throw("UPPERCASE_FUNC (ARGUMENT)\n", whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> <DEDENT> def test_replace_uppercase_call(self): <NEW_LINE> <INDENT> func_name = "UPPERCASE_FUNC" <NEW_LINE> error_line = "{0} (ARGUMENT)\n".format(func_name) <NEW_LINE> replacement_line = "{0} (ARGUMENT)\n".format(func_name.lower()) <NEW_LINE> def get_replacement(): <NEW_LINE> <INDENT> run_linter_throw(error_line, whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> exception = self.assertRaises(LinterFailure, get_replacement) <NEW_LINE> self.assertEqual(replacement(exception), (1, replacement_line)) <NEW_LINE> <DEDENT> def test_pass_lowercase_func_def(self): <NEW_LINE> <INDENT> result = run_linter_throw("function (lowercase_func) endfunction ()\n", whitelist=["style/lowercase_func"]) <NEW_LINE> self.assertTrue(result) <NEW_LINE> <DEDENT> def test_fail_uppercase_func_def(self): <NEW_LINE> <INDENT> with ExpectedException(LinterFailure): <NEW_LINE> <INDENT> run_linter_throw("function (UPPERCASE_FUNC) endfunction ()\n", whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> <DEDENT> def test_replace_uppercase_func_def(self): <NEW_LINE> <INDENT> func_name = "UPPERCASE_FUNC" <NEW_LINE> lower_name = func_name.lower() <NEW_LINE> error = "function ({0}) endfunction ()\n".format(func_name) <NEW_LINE> expected_repl = "function ({0}) endfunction ()\n".format(lower_name) <NEW_LINE> def get_replacement(): <NEW_LINE> <INDENT> run_linter_throw(error, whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> exception = self.assertRaises(LinterFailure, get_replacement) <NEW_LINE> self.assertEqual(replacement(exception), (1, expected_repl)) <NEW_LINE> <DEDENT> def test_pass_lowercase_macro_def(self): <NEW_LINE> <INDENT> result = run_linter_throw("macro (lowercase_macro) endmacro ()\n", whitelist=["style/lowercase_func"]) <NEW_LINE> self.assertTrue(result) <NEW_LINE> <DEDENT> def test_fail_uppercase_macro(self): <NEW_LINE> <INDENT> with ExpectedException(LinterFailure): <NEW_LINE> <INDENT> run_linter_throw("macro (UPPERCASE_MACRO) endmacro ()\n", whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> <DEDENT> def test_replace_uppercase_macro(self): <NEW_LINE> <INDENT> macro_name = "UPPERCASE_MACRO" <NEW_LINE> lower_name = macro_name.lower() <NEW_LINE> error = "macro ({0}) endmacro ()\n".format(macro_name) <NEW_LINE> expected_replacement = "macro ({0}) endmacro ()\n".format(lower_name) <NEW_LINE> def get_replacement(): <NEW_LINE> <INDENT> run_linter_throw(error, whitelist=["style/lowercase_func"]) <NEW_LINE> <DEDENT> exception = self.assertRaises(LinterFailure, get_replacement) <NEW_LINE> self.assertEqual(replacement(exception), (1, expected_replacement))
|
Test case for functions and macros being lowercase.
|
62598ffb187af65679d2a0dd
|
class SingleCursorDatabaseConnector(object): <NEW_LINE> <INDENT> def __init__(self, database, host='localhost', port=5439, user='postgres', password='postgres', autocommit=True): <NEW_LINE> <INDENT> self.database = database <NEW_LINE> self.host = host <NEW_LINE> self.port = port <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> self.autocommit = autocommit <NEW_LINE> self._cursor = None <NEW_LINE> self._connection = None <NEW_LINE> <DEDENT> def _connect(self): <NEW_LINE> <INDENT> self._connection = psycopg2.connect(database=self.database, host=self.host, port=self.port, user=self.user, password=self.password) <NEW_LINE> self._connection.autocommit = self.autocommit <NEW_LINE> return self._connection <NEW_LINE> <DEDENT> def _get_connection(self): <NEW_LINE> <INDENT> if self._connection and not self._connection.closed: <NEW_LINE> <INDENT> return self._connection <NEW_LINE> <DEDENT> self._cursor = None <NEW_LINE> return self._connect() <NEW_LINE> <DEDENT> def _get_cursor(self): <NEW_LINE> <INDENT> if self._cursor and not self._cursor.closed: <NEW_LINE> <INDENT> return self._cursor <NEW_LINE> <DEDENT> self._cursor = self._get_connection().cursor() <NEW_LINE> return self._cursor <NEW_LINE> <DEDENT> def execute(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._get_cursor().execute(*args, **kwargs) <NEW_LINE> <DEDENT> def fetchone(self): <NEW_LINE> <INDENT> return self._get_cursor().fetchone() <NEW_LINE> <DEDENT> def fetchmany(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._get_cursor().fetchmany(*args, **kwargs) <NEW_LINE> <DEDENT> def fetchall(self): <NEW_LINE> <INDENT> return self._get_cursor().fetchall() <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self._get_cursor(), name) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self._get_cursor() <NEW_LINE> <DEDENT> @property <NEW_LINE> def statusmessage(self): <NEW_LINE> <INDENT> cursor = self._get_cursor() <NEW_LINE> if cursor: <NEW_LINE> <INDENT> return cursor.statusmessage <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> self._get_connection().commit() <NEW_LINE> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> self._get_connection().rollback() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._get_connection().close()
|
Wraps the psycopg2 connection and cursor functions to reconnect on close.
SingleCursorDatabaseConnector maintains a single cursor to a psycopg2
database connection. Lazy initialization is used, so all connection and
cursor management happens on the method calls.
|
62598ffb4c3428357761ac76
|
class AH_polytope(): <NEW_LINE> <INDENT> def __init__(self,t,T,P,color='blue'): <NEW_LINE> <INDENT> self.T=T <NEW_LINE> self.t=np.atleast_2d(t) <NEW_LINE> self.P=P <NEW_LINE> self.type='AH_polytope' <NEW_LINE> self.n=T.shape[0] <NEW_LINE> if T.shape[1]!=P.H.shape[1]: <NEW_LINE> <INDENT> ValueError("Error: not appropriate T size, it is",T.shape[1],P.n) <NEW_LINE> <DEDENT> self.method="Gurobi" <NEW_LINE> self.hash_value = None <NEW_LINE> self.distance_program=None <NEW_LINE> self.vertices_2D=None <NEW_LINE> self.color=color <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "AH_polytope from R^%d to R^%d"%(self.P.n,self.n) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> if self.hash_value is None: <NEW_LINE> <INDENT> self.hash_value = hash(self.P) + hash(str(np.hstack([self.T, self.t]))) <NEW_LINE> <DEDENT> return self.hash_value <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return AH_polytope(self.t,self.T,H_polytope(self.P.H,self.P.h)) <NEW_LINE> <DEDENT> def __add__(self,other): <NEW_LINE> <INDENT> from pypolycontain.operations import minkowski_sum <NEW_LINE> return minkowski_sum(self,other) <NEW_LINE> <DEDENT> def __rmul__(self,scalar): <NEW_LINE> <INDENT> return AH_polytope(t=self.t*scalar,T=self.T*scalar,P=self.P)
|
An AH_polytope is an affine transformation of an H-polytope and is defined as:
.. math::
\mathbb{Q}=\{t+Tx | x \in \mathbb{R}^p, H x \le h \}
Attributes:
* P: The underlying H-polytope :math:`P:\{x \in \mathbb{R}^p | Hx \le h\}`
* T: :math:`\mathbb{R}^{n \times p}` matrix: linear transformation
* t: :math:`\mathbb{R}^{n}` vector: translation
|
62598ffb627d3e7fe0e07870
|
class Disableable: <NEW_LINE> <INDENT> def __init__(self, *args, enabled=True, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.enabled = enabled <NEW_LINE> <DEDENT> def selectable(self): <NEW_LINE> <INDENT> return self.enabled
|
Mixin to make widgets selectable by setting widget.enabled.
|
62598ffb4c3428357761ac7c
|
class Admin(User): <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, username, email_address): <NEW_LINE> <INDENT> super().__init__(first_name, last_name, username, email_address) <NEW_LINE> self.privileges = Privileges()
|
Class describing an Admin user profile.
|
62598ffb187af65679d2a0e3
|
class TestableLimitLineParser(LineParserWrapper, lineparsermod.LimitLineParser): <NEW_LINE> <INDENT> pass
|
Wrapper over LimitLineParser to make it testable.
|
62598ffb3cc13d1c6d466123
|
class RaisingJob(NullJob): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> raise RaisingJobException(self.message)
|
A job that raises when it runs.
|
62598ffb627d3e7fe0e07876
|
class Submarine(Ship): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Ship.__init__(self, "submarine", 3)
|
Submarine piece.
|
62598ffb187af65679d2a0e4
|
class MonkeyPatchDefaultTestCase(test.NoDBTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(MonkeyPatchDefaultTestCase, self).setUp() <NEW_LINE> self.flags( monkey_patch=True) <NEW_LINE> <DEDENT> def test_monkey_patch_default_mod(self): <NEW_LINE> <INDENT> for module in CONF.monkey_patch_modules: <NEW_LINE> <INDENT> m = module.split(':', 1) <NEW_LINE> importlib.import_module(m[0]) <NEW_LINE> decorator_name = m[1].rsplit('.', 1) <NEW_LINE> decorator_module = importlib.import_module(decorator_name[0]) <NEW_LINE> getattr(decorator_module, decorator_name[1])
|
Unit test for default monkey_patch_modules value.
|
62598ffb3cc13d1c6d466125
|
class AutoscalingLinksTest(ScalingGroupWebhookFixture): <NEW_LINE> <INDENT> def test_scaling_group_links(self): <NEW_LINE> <INDENT> self.assertTrue(self.group.links is not None, msg='No links returned upon scaling group creation' ' for group {0}'.format(self.group.id)) <NEW_LINE> self._validate_links(self.group.links.self, self.group.id) <NEW_LINE> get_group_resp = self.autoscale_client. view_manifest_config_for_scaling_group(self.group.links.self) <NEW_LINE> self.assertEqual(self.group.id, get_group_resp.entity.id) <NEW_LINE> <DEDENT> def test_scaling_policy_links(self): <NEW_LINE> <INDENT> self.assertTrue(self.policy['links'] is not None, msg='No links returned upon scaling policy creation' ' for group {0}'.format(self.group.id)) <NEW_LINE> self._validate_links(self.policy['links'].self, self.policy['id']) <NEW_LINE> get_policy_resp = self.autoscale_client.get_policy_details( self.group.id, self.policy['links'].self) <NEW_LINE> self.assertEqual(self.policy['id'], (get_policy_resp.entity).id) <NEW_LINE> <DEDENT> def test_webhook_links(self): <NEW_LINE> <INDENT> self.assertTrue(self.webhook['links'] is not None, msg='No links returned upon webhook creation' ' for group {0}'.format(self.group.id)) <NEW_LINE> self._validate_links(self.webhook['links'].self, self.webhook['id']) <NEW_LINE> get_webhook_resp = self.autoscale_client.get_webhook( self.group.id, self.policy['id'], self.webhook['links'].self) <NEW_LINE> self.assertEqual(self.webhook['id'], (get_webhook_resp.entity).id) <NEW_LINE> <DEDENT> def test_webhook_capability_link(self): <NEW_LINE> <INDENT> endpoint = self.url.strip(str(self.tenant_id)) + 'execute/' <NEW_LINE> self.assertTrue( endpoint in self.webhook['links'].capability, msg='The url used to create the group {0} doesnt match' ' the url in self link{1}' .format(endpoint, self.webhook['links'].capability)) <NEW_LINE> <DEDENT> def _has_version(self, link): <NEW_LINE> <INDENT> return re.search('^/v+\d', urlparse(link).path) is not None <NEW_LINE> <DEDENT> def _validate_links(self, self_link, item_id): <NEW_LINE> <INDENT> self.assertTrue( item_id in self_link, msg='The ID does not exist in self links for item_id {0}' .format(item_id)) <NEW_LINE> self.assertTrue( self.url in self_link, msg='The URL used to create the group doesnt match the ' 'url in self link for id {0}'.format(item_id)) <NEW_LINE> self.assertTrue(self._has_version(self_link))
|
Verify links on the autoscaling api response calls.
|
62598ffb0a366e3fb87dd3cb
|
class SocketHandler(websocket.WebSocketHandler): <NEW_LINE> <INDENT> def check_origin(self, origin): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> if self not in WSS: <NEW_LINE> <INDENT> WSS.append(self) <NEW_LINE> <DEDENT> <DEDENT> def on_message(self, message): <NEW_LINE> <INDENT> print(message) <NEW_LINE> <DEDENT> def on_close(self): <NEW_LINE> <INDENT> if self in WSS: <NEW_LINE> <INDENT> WSS.remove(self)
|
The websocket handler defines the basic functionality of the
websockets. Javascript snippet to try in browser:
socket = new WebSocket('ws://127.0.0.1:5000/v1/ws');
|
62598ffb15fb5d323ce7f71c
|
class SpatialAnalysisPersonLineCrossingLineEvents(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'line': {'required': True}, } <NEW_LINE> _attribute_map = { 'line': {'key': 'line', 'type': 'NamedLineBase'}, 'events': {'key': 'events', 'type': '[SpatialAnalysisPersonLineCrossingEvent]'}, } <NEW_LINE> def __init__( self, *, line: "NamedLineBase", events: Optional[List["SpatialAnalysisPersonLineCrossingEvent"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(SpatialAnalysisPersonLineCrossingLineEvents, self).__init__(**kwargs) <NEW_LINE> self.line = line <NEW_LINE> self.events = events
|
SpatialAnalysisPersonLineCrossingLineEvents.
All required parameters must be populated in order to send to Azure.
:ivar line: Required. The named line.
:vartype line: ~azure.media.videoanalyzer.edge.models.NamedLineBase
:ivar events: The event configuration.
:vartype events:
list[~azure.media.videoanalyzer.edge.models.SpatialAnalysisPersonLineCrossingEvent]
|
62598ffb4c3428357761ac84
|
class SearchForm(forms.ModelForm): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(SearchForm, self).__init__(*args, **kwargs) <NEW_LINE> default_text = u'Search...' <NEW_LINE> self.fields['query'].widget.attrs['value'] = default_text <NEW_LINE> self.fields['query'].widget.attrs['onfocus'] = "if (this.value == '" + default_text + "')this.value = ''" <NEW_LINE> <DEDENT> include = ('query',) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = SearchTerm
|
Форма поиска
|
62598ffc3cc13d1c6d46612b
|
class Forecaster(pl.LightningModule): <NEW_LINE> <INDENT> def __init__(self, num_bands: int, output_timesteps: int, hparams: Namespace,) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.output_timesteps = output_timesteps <NEW_LINE> self.lstm = UnrolledLSTM( input_size=num_bands, hidden_size=hparams.forecasting_vector_size, dropout=hparams.forecasting_dropout, batch_first=True, ) <NEW_LINE> self.to_bands = nn.Linear( in_features=hparams.forecasting_vector_size, out_features=num_bands ) <NEW_LINE> <DEDENT> def forward(self, x: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> hidden_tuple: Optional[Tuple[torch.Tensor, torch.Tensor]] = None <NEW_LINE> input_timesteps = x.shape[1] <NEW_LINE> assert input_timesteps >= 1 <NEW_LINE> predicted_output: List[torch.Tensor] = [] <NEW_LINE> for i in range(input_timesteps): <NEW_LINE> <INDENT> input = x[:, i : i + 1, :] <NEW_LINE> output, hidden_tuple = self.lstm(input, hidden_tuple) <NEW_LINE> output = self.to_bands(torch.transpose(output[0, :, :, :], 0, 1)) <NEW_LINE> predicted_output.append(output) <NEW_LINE> <DEDENT> for i in range(self.output_timesteps - 1): <NEW_LINE> <INDENT> output, hidden_tuple = self.lstm(output, hidden_tuple) <NEW_LINE> output = self.to_bands(torch.transpose(output[0, :, :, :], 0, 1)) <NEW_LINE> predicted_output.append(output) <NEW_LINE> <DEDENT> return torch.cat(predicted_output, dim=1) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def add_model_specific_args(parent_parser: ArgumentParser) -> ArgumentParser: <NEW_LINE> <INDENT> parser = ArgumentParser(parents=[parent_parser], add_help=False) <NEW_LINE> parser_args: Dict[str, Tuple[Type, Any]] = { "--forecasting_vector_size": (int, 256), "--forecasting_dropout": (float, 0.2), } <NEW_LINE> for key, vals in parser_args.items(): <NEW_LINE> <INDENT> parser.add_argument(key, type=vals[0], default=vals[1]) <NEW_LINE> <DEDENT> return parser
|
An LSTM based model to predict a multispectral sequence.
:param input_size: The number of input bands passed to the model. The
input vector is expected to be of shape [batch_size, timesteps, bands]
:param output_timesteps: The number of timesteps to predict
hparams
--------
The default values for these parameters are set in add_model_specific_args
:param hparams.forecasting_vector_size: The size of the hidden vector in the LSTM
Default = 128
:param hparams.forecasting_dropout: Variational dropout ratio to apply between timesteps in
the LSTM base. Default = 0.2
|
62598ffc15fb5d323ce7f724
|
class TestFlatteners(unittest.TestCase): <NEW_LINE> <INDENT> def test_flattener_returns_dict_flattener_for_json(self): <NEW_LINE> <INDENT> testing_file = "./tests/json_testing_sample.json" <NEW_LINE> file_type = get_data_type(testing_file) <NEW_LINE> self.assertEqual(flatteners[file_type], flatten_dict) <NEW_LINE> <DEDENT> def test_flattener_returns_already_flat_flattener_for_text(self): <NEW_LINE> <INDENT> testing_file = "./tests/ingest_testing_sample.txt" <NEW_LINE> file_type = get_data_type(testing_file) <NEW_LINE> self.assertEqual(flatteners[file_type], already_flat)
|
Tests flatenner returned based on Data Type
|
62598ffc4c3428357761ac8a
|
class AssignPermissionToServiceKeyUserRole(UpgradeStep): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> self.install_upgrade_profile()
|
Assign permission to ServiceKeyUser role.
|
62598ffc0a366e3fb87dd3d9
|
class Pci(extensions.V3APIExtensionBase): <NEW_LINE> <INDENT> name = "PCIAccess" <NEW_LINE> alias = ALIAS <NEW_LINE> version = 1 <NEW_LINE> def get_resources(self): <NEW_LINE> <INDENT> resources = [extensions.ResourceExtension(ALIAS, PciController(), collection_actions={'detail': 'GET'})] <NEW_LINE> return resources <NEW_LINE> <DEDENT> def get_controller_extensions(self): <NEW_LINE> <INDENT> server_extension = extensions.ControllerExtension( self, 'servers', PciServerController()) <NEW_LINE> compute_extension = extensions.ControllerExtension( self, 'os-hypervisors', PciHypervisorController()) <NEW_LINE> return [server_extension, compute_extension]
|
Pci access support.
|
62598ffc4c3428357761ac94
|
class JSON(PandasDataFrame): <NEW_LINE> <INDENT> def __init__(self, path, nrecs=1000, transformers=None, **kwargs): <NEW_LINE> <INDENT> super(JSON, self).__init__(pd.read_json(path, **kwargs), nrecs=nrecs, transformers=transformers)
|
Create a JSON data message handler
Parameters
----------
path : string
Path to JSON file.
nrecs : int, optional
Number of records to send at a time
**kwargs : keyword arguments, optional
Arguments sent to :func:`pandas.read_json`.
See Also
--------
:func:`pandas.read_json`
:class:`PandasDataFrame`
Returns
-------
:func:`JSON` data message handler object
|
62598ffc15fb5d323ce7f730
|
class FluentLoggerFactory: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_url(cls, url): <NEW_LINE> <INDENT> parts = urlsplit(url) <NEW_LINE> if parts.scheme != 'fluent': <NEW_LINE> <INDENT> raise ValueError('Invalid URL: "%s".' % url) <NEW_LINE> <DEDENT> if parts.query or parts.fragment: <NEW_LINE> <INDENT> raise ValueError('Invalid URL: "%s".' % url) <NEW_LINE> <DEDENT> netloc = parts.netloc.rsplit(':', 1) <NEW_LINE> if len(netloc) == 1: <NEW_LINE> <INDENT> host, port = netloc[0], 24224 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host, port = netloc <NEW_LINE> try: <NEW_LINE> <INDENT> port = int(port) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValueError('Invalid URL: "%s".' % url) <NEW_LINE> <DEDENT> <DEDENT> return FluentLoggerFactory(parts.path[1:], host, port) <NEW_LINE> <DEDENT> def __init__(self, app, host, port): <NEW_LINE> <INDENT> self._app = app <NEW_LINE> self._host = host <NEW_LINE> self._port = port <NEW_LINE> self._sender = fluent.sender.FluentSender(app, host=host, port=port) <NEW_LINE> <DEDENT> @property <NEW_LINE> def host(self): <NEW_LINE> <INDENT> return self._host <NEW_LINE> <DEDENT> @property <NEW_LINE> def port(self): <NEW_LINE> <INDENT> return self._port <NEW_LINE> <DEDENT> @property <NEW_LINE> def app(self): <NEW_LINE> <INDENT> return self._app <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> return FluentLogger(self._sender)
|
For use with ``structlog.configure(logger_factory=...)``.
|
62598ffc3cc13d1c6d46613b
|
class V1beta1_LivenessProbe(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'exec_info': 'V1beta1_ExecAction', 'httpGet': 'V1beta1_HTTPGetAction', 'initialDelaySeconds': 'long', 'tcpSocket': 'V1beta1_TCPSocketAction', 'timeoutSeconds': 'long' } <NEW_LINE> self.exec_info = None <NEW_LINE> self.httpGet = None <NEW_LINE> self.initialDelaySeconds = None <NEW_LINE> self.tcpSocket = None <NEW_LINE> self.timeoutSeconds = None
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
62598ffc0a366e3fb87dd3e1
|
class Parameters(object): <NEW_LINE> <INDENT> DOMAIN_ID = build_v3_parameter_relation('domain_id') <NEW_LINE> ENDPOINT_ID = build_v3_parameter_relation('endpoint_id') <NEW_LINE> GROUP_ID = build_v3_parameter_relation('group_id') <NEW_LINE> POLICY_ID = build_v3_parameter_relation('policy_id') <NEW_LINE> PROJECT_ID = build_v3_parameter_relation('project_id') <NEW_LINE> REGION_ID = build_v3_parameter_relation('region_id') <NEW_LINE> ROLE_ID = build_v3_parameter_relation('role_id') <NEW_LINE> SERVICE_ID = build_v3_parameter_relation('service_id') <NEW_LINE> USER_ID = build_v3_parameter_relation('user_id')
|
Relationships for Common parameters.
|
62598ffc15fb5d323ce7f732
|
class FBFogMode (object): <NEW_LINE> <INDENT> kFBFogModeLinear=property(doc="Linear falloff. ") <NEW_LINE> kFBFogModeExponential=property(doc="Exponential falloff. ") <NEW_LINE> kFBFogModeSquareExponential=property(doc="Squared exponential falloff. ") <NEW_LINE> pass
|
Fog falloff modes.
|
62598ffc0a366e3fb87dd3e5
|
class Solution(object): <NEW_LINE> <INDENT> def lowestCommonAncestor(self, root, p, q): <NEW_LINE> <INDENT> if not root or root == p or root == q: <NEW_LINE> <INDENT> return root <NEW_LINE> <DEDENT> left = self.lowestCommonAncestor(root.left, p, q) <NEW_LINE> right = self.lowestCommonAncestor(root.right, p, q) <NEW_LINE> if left and right: <NEW_LINE> <INDENT> return root <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return left or right
|
Recursive method: DFS.
If the current (sub)tree contains both p and q, then the function result is their LCA.
If only one of them is in that subtree, then the result is that one of them.
If neither are in that subtree, the result is null/None/nil.
More version can be found here:
https://discuss.leetcode.com/topic/18561/4-lines-c-java-python-ruby
|
62598ffc187af65679d2a0f3
|
class _StorageDefaultObjectAclsRepository( repository_mixins.ListQueryMixin, _base_repository.GCPRepository): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(_StorageDefaultObjectAclsRepository, self).__init__( component='defaultObjectAccessControls', list_key_field='bucket', **kwargs)
|
Implementation of Storage Default Object Access Controls repository.
|
62598ffc0a366e3fb87dd3e7
|
class Classifier(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, num_anchors, num_classes, num_layers, pyramid_levels=5, onnx_export=False): <NEW_LINE> <INDENT> super(Classifier, self).__init__() <NEW_LINE> self.num_anchors = num_anchors <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.conv_list = nn.ModuleList( [SeparableConvBlock(in_channels, in_channels, norm=False, activation=False) for i in range(num_layers)]) <NEW_LINE> self.bn_list = nn.ModuleList( [nn.ModuleList([nn.BatchNorm2d(in_channels, momentum=0.01, eps=1e-3) for i in range(num_layers)]) for j in range(pyramid_levels)]) <NEW_LINE> self.header = SeparableConvBlock(in_channels, num_anchors * num_classes, norm=False, activation=False) <NEW_LINE> self.swish = MemoryEfficientSwish() if not onnx_export else Swish() <NEW_LINE> <DEDENT> def forward(self, inputs): <NEW_LINE> <INDENT> feats = [] <NEW_LINE> for feat, bn_list in zip(inputs, self.bn_list): <NEW_LINE> <INDENT> for i, bn, conv in zip(range(self.num_layers), bn_list, self.conv_list): <NEW_LINE> <INDENT> feat = conv(feat) <NEW_LINE> feat = bn(feat) <NEW_LINE> feat = self.swish(feat) <NEW_LINE> <DEDENT> feat = self.header(feat) <NEW_LINE> feat = feat.permute(0, 2, 3, 1) <NEW_LINE> feat = feat.contiguous().view(feat.shape[0], feat.shape[1], feat.shape[2], self.num_anchors, self.num_classes) <NEW_LINE> feat = feat.contiguous().view(feat.shape[0], -1, self.num_classes) <NEW_LINE> feats.append(feat) <NEW_LINE> <DEDENT> feats = torch.cat(feats, dim=1) <NEW_LINE> feats = feats.sigmoid() <NEW_LINE> return feats
|
modified by Zylo117
|
62598ffc462c4b4f79dbc3fd
|
class SimpleUserListView(ListAPIView): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticatedOrReadOnly,) <NEW_LINE> queryset = SimpleUser.objects.all() <NEW_LINE> serializer_class = SimpleUserSerializer
|
Show information about users id, username, last_login, last_request
|
62598ffc4c3428357761aca0
|
class RegistroE111(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'E111'), Campo(2, 'COD_AJ_APUR'), Campo(3, 'DESCR_COMPL_AJ'), CampoNumerico(4, 'VL_AJ_APUR'), ] <NEW_LINE> nivel = 4
|
AJUSTE/BENEFÍCIO/INCENTIVO DA APURAÇÃO DO ICMS
|
62598ffc4c3428357761aca2
|
class RejectInvitation(AcceptInvitation): <NEW_LINE> <INDENT> def __call__(self, iid=None): <NEW_LINE> <INDENT> if not iid: <NEW_LINE> <INDENT> iid = self.request.get('iid') <NEW_LINE> <DEDENT> self.load(iid) <NEW_LINE> notify(InvitationRejectedEvent(self.target, self.invitation)) <NEW_LINE> self.send_email() <NEW_LINE> msg = _(u'info_invitation_rejected', default=u'You have rejected the invitation.') <NEW_LINE> IStatusMessage(self.request).addStatusMessage(msg, type='info') <NEW_LINE> self.storage.remove_invitation(self.invitation) <NEW_LINE> del self.invitation <NEW_LINE> url = self.request.get('HTTP_REFERER', os.path.join(self.context.portal_url(), '@@invitations')) <NEW_LINE> return self.request.RESPONSE.redirect(url) <NEW_LINE> <DEDENT> def get_mail_body_html_view(self): <NEW_LINE> <INDENT> return self.context.unrestrictedTraverse( '@@invitation_rejected_mail_html') <NEW_LINE> <DEDENT> def get_mail_body_text_view(self): <NEW_LINE> <INDENT> return self.context.unrestrictedTraverse( '@@invitation_rejected_mail_text') <NEW_LINE> <DEDENT> def get_subject(self): <NEW_LINE> <INDENT> member = getToolByName(self.context, 'portal_membership').getAuthenticatedMember() <NEW_LINE> fullname = member.getProperty( 'fullname', member.getId()).decode('utf8') <NEW_LINE> context_title = self.context.pretty_title_or_id().decode('utf-8') <NEW_LINE> if 0: <NEW_LINE> <INDENT> _(u'mail_invitation_rejected_subject', default=u'The Invitation to participate in ${title} ' u'was rejected by ${user}', mapping=dict(title=context_title, user=fullname)) <NEW_LINE> <DEDENT> return translate(u'mail_invitation_rejected_subject', domain='ftw.participation', context=self.request, default=u'The Invitation to participate in ${title} ' u'was rejected by ${user}', mapping=dict(title=context_title, user=fullname))
|
Reject a invitation. Like accept but with different messages
and without partipating.
|
62598ffc15fb5d323ce7f73e
|
class IdxINDEXNAME(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'idxINDEXNAME' <NEW_LINE> id_idxINDEX = db.Column(db.MediumInteger(9, unsigned=True), db.ForeignKey(IdxINDEX.id), primary_key=True) <NEW_LINE> ln = db.Column(db.Char(5), primary_key=True, server_default='') <NEW_LINE> type = db.Column(db.Char(3), primary_key=True, server_default='sn') <NEW_LINE> value = db.Column(db.String(255), nullable=False) <NEW_LINE> idxINDEX = db.relationship(IdxINDEX, backref='names')
|
Represent a IdxINDEXNAME record.
|
62598ffc0a366e3fb87dd3ef
|
class ImportPlugin(BasePlugin): <NEW_LINE> <INDENT> default_workspace_category = 'Import' <NEW_LINE> pass
|
Import plugin.
|
62598ffc15fb5d323ce7f740
|
class Variables: <NEW_LINE> <INDENT> class __Variables: <NEW_LINE> <INDENT> def __init__(self, tupel=None): <NEW_LINE> <INDENT> for x in tupel._fields: <NEW_LINE> <INDENT> setattr(self, x, tupel.__getattribute__(x)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> instance=None <NEW_LINE> def __init__(self, tupel=None): <NEW_LINE> <INDENT> if not Variables.instance: <NEW_LINE> <INDENT> Variables.instance=Variables.__Variables(tupel) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, name, val): <NEW_LINE> <INDENT> setattr(self.instance, name, val) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self.instance, name)
|
Singleton-Object
|
62598ffd462c4b4f79dbc405
|
class IsDigit(FieldValidator): <NEW_LINE> <INDENT> regex = re.compile('^-{0,1}[0-9]+$') <NEW_LINE> def validate_value(self): <NEW_LINE> <INDENT> if not self.value: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return re.search(self.regex, self.value) is not None
|
Will fail if value is not a digit.
|
62598ffd15fb5d323ce7f741
|
class _LDAPQuery(object): <NEW_LINE> <INDENT> def __init__(self, base_dn, filter_tmpl, scope, attributes, cache_period): <NEW_LINE> <INDENT> self.base_dn = base_dn <NEW_LINE> self.filter_tmpl = filter_tmpl <NEW_LINE> self.scope = scope <NEW_LINE> self.attributes = attributes <NEW_LINE> self.cache_period = cache_period <NEW_LINE> self.last_timeslice = 0 <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('base_dn={base_dn}, filter_tmpl={filter_tmpl}, ' 'scope={scope}, attributes={attributes}, ' 'cache_period={cache_period}'.format(**self.__dict__)) <NEW_LINE> <DEDENT> def query_cache(self, cache_key): <NEW_LINE> <INDENT> now = time() <NEW_LINE> ts = _timeslice(self.cache_period, now) <NEW_LINE> if ts > self.last_timeslice: <NEW_LINE> <INDENT> logger.debug( 'dumping cache; now ts: %r, last_ts: %r', ts, self.last_timeslice) <NEW_LINE> self.cache = {} <NEW_LINE> self.last_timeslice = ts <NEW_LINE> <DEDENT> return self.cache.get(cache_key) <NEW_LINE> <DEDENT> def execute(self, manager, **kw): <NEW_LINE> <INDENT> cache_key = (self.base_dn % kw, self.filter_tmpl % kw) <NEW_LINE> logger.debug('searching for %r', cache_key) <NEW_LINE> result = self.query_cache(cache_key) if self.cache_period else None <NEW_LINE> if result is None: <NEW_LINE> <INDENT> with manager.connection() as conn: <NEW_LINE> <INDENT> ret = conn.search( search_scope=self.scope, attributes=self.attributes, *cache_key) <NEW_LINE> result, ret = conn.get_response(ret) <NEW_LINE> <DEDENT> if result is None: <NEW_LINE> <INDENT> result = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = [(r['dn'], r['attributes']) for r in result if 'dn' in r] <NEW_LINE> if self.cache_period: <NEW_LINE> <INDENT> self.cache[cache_key] = result <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug('result for %r retrieved from cache', cache_key) <NEW_LINE> <DEDENT> logger.debug('search result: %r', result) <NEW_LINE> return result
|
Represents an LDAP query.
Provides rudimentary in-RAM caching of query results.
|
62598ffd187af65679d2a0f9
|
class Contribution(models.Model): <NEW_LINE> <INDENT> applicant = models.ForeignKey(ApplicantApproval, on_delete=models.CASCADE) <NEW_LINE> project = models.ForeignKey(Project, on_delete=models.PROTECT) <NEW_LINE> date_started = models.DateField(verbose_name="Date contribution was started") <NEW_LINE> date_merged = models.DateField(verbose_name="Date contribution was accepted or merged", help_text="If this contribution is still in progress, you can leave this field blank and edit it later.", blank=True, null=True) <NEW_LINE> url = models.URLField( verbose_name="Contribution URL", help_text="A link to the publicly submitted contribution. The contribution can be work in progress. The URL could a link to a GitHub/GitLab issue or pull request, a link to the mailing list archives for a patch, a Gerrit pull request or issue, a contribution change log on a wiki, a review of graphical design work, a posted case study or user experience study, etc. If you're unsure what URL to put here, ask your mentor.") <NEW_LINE> description = models.TextField( max_length=THREE_PARAGRAPH_LENGTH, help_text="Description of this contribution for review by the Outreachy coordinators and organizers during intern selection. If you used advanced tools to create this contribution, mention them here.") <NEW_LINE> def get_application(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return FinalApplication.objects.get( project=self.project, applicant=self.applicant) <NEW_LINE> <DEDENT> except FinalApplication.DoesNotExist: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{applicant} contribution for {community} - {project}'.format( applicant = self.applicant.applicant.public_name, community = self.project.project_round.community, project = self.project.short_title, )
|
An Outreachy applicant must make contributions to a project in order to be
eligible to be accepted as an intern. The Contribution model is a record
of that contribution that the applicant submits to the Outreachy website.
Contributions are recorded from the start of the contribution period to
when the final application is due. Applicants who have submitted a final
application can continue to record contributions until the intern
announcement.
|
62598ffd15fb5d323ce7f748
|
class TransparencySetting(Element): <NEW_LINE> <INDENT> BlendingSetting = EmbeddedDocumentField(BlendingSetting) <NEW_LINE> DropShadowSetting = EmbeddedDocumentField(DropShadowSetting) <NEW_LINE> FeatherSetting = EmbeddedDocumentField(FeatherSetting) <NEW_LINE> InnerShadowSetting = EmbeddedDocumentField(InnerShadowSetting) <NEW_LINE> OuterGlowSetting = EmbeddedDocumentField(OuterGlowSetting) <NEW_LINE> InnerGlowSetting = EmbeddedDocumentField(InnerGlowSetting) <NEW_LINE> BevelAndEmbossSetting = EmbeddedDocumentField(BevelAndEmbossSetting) <NEW_LINE> SatinSetting = EmbeddedDocumentField(SatinSetting) <NEW_LINE> DirectionalFeatherSetting = EmbeddedDocumentField(DirectionalFeatherSetting) <NEW_LINE> GradientFeatherSetting = EmbeddedDocumentField(GradientFeatherSetting)
|
You can apply transparency effects to page items in an InDesign layout. In
IDML, you accomplish this using the <TransparencySetting> element. A child
element (or elements) of this element specify the transparency effect you
want to apply.
|
62598ffd187af65679d2a0ff
|
class Mqtt(BaseShellyAttribute): <NEW_LINE> <INDENT> def __init__(self, json_def=None): <NEW_LINE> <INDENT> if json_def is None: <NEW_LINE> <INDENT> json_obj = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> json_obj = json.loads(json_def) <NEW_LINE> <DEDENT> self.__dict__ = json_obj <NEW_LINE> self.connected = False if 'connected' not in json_obj else json_obj['connected']
|
Represents Mqtt attributes
|
62598ffd462c4b4f79dbc415
|
class BaseResponseInfoExtractor: <NEW_LINE> <INDENT> def __new__(cls, *arg, **kwargs): <NEW_LINE> <INDENT> if not hasattr(cls, '_instance'): <NEW_LINE> <INDENT> cls._instance = object.__new__(cls) <NEW_LINE> <DEDENT> return cls._instance <NEW_LINE> <DEDENT> def get_status_code(self, response): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_response_size(self, response): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_content_type(self, response): <NEW_LINE> <INDENT> raise NotImplementedError
|
Helper class help to extract logging-relevant information from HTTP response object
|
62598ffd4c3428357761acb6
|
class FunctionSubTests(unittest.TestCase): <NEW_LINE> <INDENT> def testSub(self): <NEW_LINE> <INDENT> g = Function.Function_xn(4)+Function.Function_xn(3)+Function.Function_xn(2)+Function.Function_xn(1) <NEW_LINE> for x in range (2,11): <NEW_LINE> <INDENT> r = randint(2,x) <NEW_LINE> t = Function.Function_xn(randint(1,4)) <NEW_LINE> self.assertAlmostEquals(g.evaluate(r)-t.evaluate(r),(g-t).evaluate(r), delta=1e-12) <NEW_LINE> <DEDENT> for y in range (2,11): <NEW_LINE> <INDENT> r = randint(2,x) <NEW_LINE> t = randint(1,15) <NEW_LINE> self.assertAlmostEquals(g.evaluate(r)-t,(g-t).evaluate(r), delta=1e-12) <NEW_LINE> <DEDENT> for i in range (2,11): <NEW_LINE> <INDENT> r = randint(2,x) <NEW_LINE> t = randint(1,15) <NEW_LINE> self.assertAlmostEquals(t-g.evaluate(r),(t-g).evaluate(r), delta=1e-12)
|
Test + overloads
|
62598ffd15fb5d323ce7f752
|
class Builder(object): <NEW_LINE> <INDENT> def __init__(self, element): <NEW_LINE> <INDENT> if not isinstance(element, xml4h.nodes.Element): <NEW_LINE> <INDENT> raise ValueError( "Builder can only be created with an %s.%s instance, not %s" % (xml4h.nodes.Element.__module__, xml4h.nodes.Element.__name__, element)) <NEW_LINE> <DEDENT> self._element = element <NEW_LINE> <DEDENT> @property <NEW_LINE> def dom_element(self): <NEW_LINE> <INDENT> return self._element <NEW_LINE> <DEDENT> @property <NEW_LINE> def document(self): <NEW_LINE> <INDENT> return self._element.document <NEW_LINE> <DEDENT> @property <NEW_LINE> def root(self): <NEW_LINE> <INDENT> return self._element.root <NEW_LINE> <DEDENT> def find(self, **kwargs): <NEW_LINE> <INDENT> return self._element.find(**kwargs) <NEW_LINE> <DEDENT> def find_doc(self, **kwargs): <NEW_LINE> <INDENT> return self._element.find_doc(**kwargs) <NEW_LINE> <DEDENT> def write(self, *args, **kwargs): <NEW_LINE> <INDENT> self.dom_element.write(*args, **kwargs) <NEW_LINE> <DEDENT> def write_doc(self, *args, **kwargs): <NEW_LINE> <INDENT> self.dom_element.write_doc(*args, **kwargs) <NEW_LINE> <DEDENT> def up(self, count=1, to_name=None): <NEW_LINE> <INDENT> elem = self._element <NEW_LINE> up_count = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> if elem.is_root or elem.parent is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> elem = elem.parent <NEW_LINE> if to_name is None: <NEW_LINE> <INDENT> up_count += 1 <NEW_LINE> if up_count >= count: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if elem.name == to_name: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return Builder(elem) <NEW_LINE> <DEDENT> def transplant(self, node): <NEW_LINE> <INDENT> self._element.transplant_node(node) <NEW_LINE> return self <NEW_LINE> <DEDENT> def clone(self, node): <NEW_LINE> <INDENT> self._element.clone_node(node) <NEW_LINE> return self <NEW_LINE> <DEDENT> def element(self, *args, **kwargs): <NEW_LINE> <INDENT> child_element = self._element.add_element(*args, **kwargs) <NEW_LINE> return Builder(child_element) <NEW_LINE> <DEDENT> elem = element <NEW_LINE> e = element <NEW_LINE> def attributes(self, *args, **kwargs): <NEW_LINE> <INDENT> self._element.set_attributes(*args, **kwargs) <NEW_LINE> return self <NEW_LINE> <DEDENT> attrs = attributes <NEW_LINE> a = attributes <NEW_LINE> def text(self, text): <NEW_LINE> <INDENT> self._element.add_text(text) <NEW_LINE> return self <NEW_LINE> <DEDENT> t = text <NEW_LINE> def comment(self, text): <NEW_LINE> <INDENT> self._element.add_comment(text) <NEW_LINE> return self <NEW_LINE> <DEDENT> c = comment <NEW_LINE> def processing_instruction(self, target, data): <NEW_LINE> <INDENT> self._element.add_instruction(target, data) <NEW_LINE> return self <NEW_LINE> <DEDENT> instruction = processing_instruction <NEW_LINE> i = instruction <NEW_LINE> def cdata(self, text): <NEW_LINE> <INDENT> self._element.add_cdata(text) <NEW_LINE> return self <NEW_LINE> <DEDENT> data = cdata <NEW_LINE> d = cdata <NEW_LINE> def ns_prefix(self, prefix, ns_uri): <NEW_LINE> <INDENT> self._element.set_ns_prefix(prefix, ns_uri) <NEW_LINE> return self
|
Builder class that wraps an :class:`xml4h.nodes.Element` node with methods
for adding XML content to an underlying DOM.
|
62598ffd3cc13d1c6d46615c
|
class ComponentConfig(object): <NEW_LINE> <INDENT> __slots__ = ( 'realm', 'extra', 'keyring', 'controller', 'shared' ) <NEW_LINE> def __init__(self, realm=None, extra=None, keyring=None, controller=None, shared=None): <NEW_LINE> <INDENT> assert(realm is None or type(realm) == six.text_type) <NEW_LINE> self.realm = realm <NEW_LINE> self.extra = extra <NEW_LINE> self.keyring = keyring <NEW_LINE> self.controller = controller <NEW_LINE> self.shared = shared <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "ComponentConfig(realm=<{0}>, extra={1}, keyring={2}, controller={3}, shared={4})".format(self.realm, self.extra, self.keyring, self.controller, self.shared)
|
WAMP application component configuration. An instance of this class is
provided to the constructor of :class:`autobahn.wamp.protocol.ApplicationSession`.
|
62598ffd627d3e7fe0e078b1
|
class TestResourceConfiguration(BaseRuleTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestResourceConfiguration, self).setUp() <NEW_LINE> self.collection.register(Required()) <NEW_LINE> <DEDENT> def test_file_positive(self): <NEW_LINE> <INDENT> self.helper_file_positive() <NEW_LINE> <DEDENT> def test_file_negative(self): <NEW_LINE> <INDENT> self.helper_file_negative('test/fixtures/templates/bad/properties_required.yaml', 12) <NEW_LINE> <DEDENT> def test_file_negative_generic(self): <NEW_LINE> <INDENT> self.helper_file_negative('test/fixtures/templates/bad/generic.yaml', 8)
|
Test Resource Properties
|
62598ffd3cc13d1c6d46615e
|
class AttentionBlock(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, size): <NEW_LINE> <INDENT> super(AttentionBlock, self).__init__() <NEW_LINE> self.atten_e2v = Attention(size) <NEW_LINE> self.atten_v2e = Attention(size) <NEW_LINE> <DEDENT> def forward(self, nodes, edges, adjacency, incidence): <NEW_LINE> <INDENT> new_nodes = self.atten_e2v(nodes, edges, incidence) <NEW_LINE> new_edges = self.atten_v2e(edges, nodes, incidence.t()) <NEW_LINE> return new_nodes, new_edges
|
Attention Block
|
62598ffd462c4b4f79dbc419
|
class ResistantVirus(SimpleVirus): <NEW_LINE> <INDENT> def __init__(self, maxBirthProb, clearProb, resistances, mutProb): <NEW_LINE> <INDENT> super().__init__(maxBirthProb, clearProb) <NEW_LINE> self.resistances = resistances <NEW_LINE> self.mutProb = float(mutProb) <NEW_LINE> <DEDENT> def getResistance(self, drug): <NEW_LINE> <INDENT> return self.resistances[str(drug)] <NEW_LINE> <DEDENT> def reproduce(self, popDensity, activeDrugs): <NEW_LINE> <INDENT> willreproduce = False <NEW_LINE> if activeDrugs != []: <NEW_LINE> <INDENT> for drug in activeDrugs: <NEW_LINE> <INDENT> if self.resistances[drug] == True: <NEW_LINE> <INDENT> willreproduce = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> willreproduce = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> willreproduce = True <NEW_LINE> <DEDENT> if willreproduce: <NEW_LINE> <INDENT> x = random.random() <NEW_LINE> if x <= (self.maxBirthProb * (1 - popDensity)): <NEW_LINE> <INDENT> offspringresistances = {} <NEW_LINE> for key in self.resistances.keys(): <NEW_LINE> <INDENT> y = random.random() <NEW_LINE> if y <= self.mutProb: <NEW_LINE> <INDENT> offspringresistances[key] = not(self.resistances[key]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> offspringresistances[key] = self.resistances[key] <NEW_LINE> <DEDENT> <DEDENT> return ResistantVirus(self.maxBirthProb, self.clearProb, offspringresistances, self.mutProb) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return 0
|
Representation of a virus which can have drug resistance.
|
62598ffd0a366e3fb87dd407
|
class Package(models.Model): <NEW_LINE> <INDENT> class Meta(object): <NEW_LINE> <INDENT> managed = False <NEW_LINE> verbose_name = _("Package") <NEW_LINE> verbose_name_plural = _("Packages") <NEW_LINE> db_table = "package_view" <NEW_LINE> <DEDENT> c_name = models.CharField( verbose_name=_("Name"), max_length=255, ) <NEW_LINE> created_at = models.DateTimeField( verbose_name=_("Created At") ) <NEW_LINE> c_package = models.CharField( verbose_name=_("Package"), max_length=255, ) <NEW_LINE> c_version = models.CharField( verbose_name=_("Version"), max_length=255 ) <NEW_LINE> c_section = models.ForeignKey( Section, verbose_name=_("Section") ) <NEW_LINE> online_icon = models.FileField( verbose_name=_("Online Icon"), max_length=255, blank=True ) <NEW_LINE> c_description = models.TextField( verbose_name=_("Description"), blank=True, null=True, ) <NEW_LINE> download_count = models.IntegerField( verbose_name=_("Download Times"), ) <NEW_LINE> def get_version_admin_url(self): <NEW_LINE> <INDENT> content_type = ContentType.objects.get_for_model(Version) <NEW_LINE> return urlresolvers.reverse( "admin:%s_%s_change" % (content_type.app_label, "version"), args=(self.id,) ) <NEW_LINE> <DEDENT> def get_display_icon(self): <NEW_LINE> <INDENT> if self.online_icon.name: <NEW_LINE> <INDENT> file_path = self.online_icon.name <NEW_LINE> return str(preferences.Setting.resources_alias) + file_path <NEW_LINE> <DEDENT> elif self.c_section: <NEW_LINE> <INDENT> return self.c_section.icon_link <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> display_icon = property(get_display_icon) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> from django.urls import reverse <NEW_LINE> return reverse('package_id', args=[self.id]) <NEW_LINE> <DEDENT> def get_latest_version(self): <NEW_LINE> <INDENT> return Version.objects.get(id=self.id)
|
DCRM Proxy Model: Package
|
62598ffd627d3e7fe0e078b7
|
class DatasetBase(object): <NEW_LINE> <INDENT> def __init__(self, json_path_input, json_path_labels, data_root, extension, is_test=False): <NEW_LINE> <INDENT> self.json_path_input = json_path_input <NEW_LINE> self.json_path_labels = json_path_labels <NEW_LINE> self.data_root = data_root <NEW_LINE> self.extension = extension <NEW_LINE> self.is_test = is_test <NEW_LINE> self.classes = self.read_json_labels() <NEW_LINE> self.classes_dict = self.get_two_way_dict(self.classes) <NEW_LINE> self.json_data = self.read_json_input() <NEW_LINE> <DEDENT> def read_json_input(self): <NEW_LINE> <INDENT> json_data = [] <NEW_LINE> if not self.is_test: <NEW_LINE> <INDENT> with open(self.json_path_input, 'rb') as jsonfile: <NEW_LINE> <INDENT> json_reader = json.load(jsonfile) <NEW_LINE> for elem in json_reader: <NEW_LINE> <INDENT> label = self.clean_template(elem['template']) <NEW_LINE> if label not in self.classes: <NEW_LINE> <INDENT> raise ValueError("Label mismatch! Please correct") <NEW_LINE> <DEDENT> item = ListData(elem['id'], label, os.path.join(self.data_root, elem['id'] + self.extension) ) <NEW_LINE> json_data.append(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with open(self.json_path_input, 'rb') as jsonfile: <NEW_LINE> <INDENT> json_reader = json.load(jsonfile) <NEW_LINE> for elem in json_reader: <NEW_LINE> <INDENT> item = ListData(elem['id'], "Holding something", os.path.join(self.data_root, elem['id'] + self.extension) ) <NEW_LINE> json_data.append(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return json_data <NEW_LINE> <DEDENT> def read_json_labels(self): <NEW_LINE> <INDENT> classes = [] <NEW_LINE> with open(self.json_path_labels, 'rb') as jsonfile: <NEW_LINE> <INDENT> json_reader = json.load(jsonfile) <NEW_LINE> for elem in json_reader: <NEW_LINE> <INDENT> classes.append(elem) <NEW_LINE> <DEDENT> <DEDENT> return sorted(classes) <NEW_LINE> <DEDENT> def get_two_way_dict(self, classes): <NEW_LINE> <INDENT> classes_dict = {} <NEW_LINE> for i, item in enumerate(classes): <NEW_LINE> <INDENT> classes_dict[item] = i <NEW_LINE> classes_dict[i] = item <NEW_LINE> <DEDENT> return classes_dict <NEW_LINE> <DEDENT> def clean_template(self, template): <NEW_LINE> <INDENT> template = template.replace("[", "") <NEW_LINE> template = template.replace("]", "") <NEW_LINE> return template
|
To read json data and construct a list containing video sample `ids`,
`label` and `path`
|
62598ffd4c3428357761acbe
|
class SenseNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_dim=600, supersenses=None, transform=False): <NEW_LINE> <INDENT> super(SenseNet, self).__init__() <NEW_LINE> self.input_dim = input_dim <NEW_LINE> self.supersenses = supersenses <NEW_LINE> self.stoi = {s: i for i, s in enumerate(self.supersenses)} <NEW_LINE> if self.supersenses is None: <NEW_LINE> <INDENT> self.supersenses = ["animal","event","quantity","time"] <NEW_LINE> <DEDENT> self.num_classes = len(self.supersenses) <NEW_LINE> self.transform = transform <NEW_LINE> if not self.transform: <NEW_LINE> <INDENT> self.transformed_input_dim = self.input_dim <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.transformed_input_dim = self.input_dim <NEW_LINE> <DEDENT> if self.transform: <NEW_LINE> <INDENT> self.lin_t = torch.nn.Linear(self.input_dim, self.transformed_input_dim) <NEW_LINE> self.relu = torch.nn.ReLU() <NEW_LINE> <DEDENT> self.lin = torch.nn.Linear(self.transformed_input_dim, self.num_classes) <NEW_LINE> self.logsm = torch.nn.LogSoftmax() <NEW_LINE> <DEDENT> def forward(self, x, **kwargs): <NEW_LINE> <INDENT> if self.transform: <NEW_LINE> <INDENT> x = self.lin_t(x) <NEW_LINE> x = self.relu(x) <NEW_LINE> <DEDENT> x = self.lin(x) <NEW_LINE> x = self.logsm(x) <NEW_LINE> return x <NEW_LINE> <DEDENT> def loss(self, y, yhat): <NEW_LINE> <INDENT> ylosses = {} <NEW_LINE> yloss_weighted_total = 0.0 <NEW_LINE> for sense in y.keys(): <NEW_LINE> <INDENT> sense_idx = self.stoi[sense] <NEW_LINE> sense_weight = y[sense] <NEW_LINE> ylosses[sense] = yhat[sense_idx] <NEW_LINE> yloss_weighted_total += -1.0 * sense_weight * yhat[sense_idx] <NEW_LINE> <DEDENT> return ylosses, yloss_weighted_total
|
Network component that predicts one of 41 possible discrete
WordNet supersense tags. Input is bilstm hidden state (or
possibly concatenation of two hidden states).
Information about the 41 supersense tags are here:
https://dl.acm.org/citation.cfm?id=1610158
|
62598ffd15fb5d323ce7f75a
|
class PcapConnectionWrapper: <NEW_LINE> <INDENT> def __init__(self, conn, pcap_file): <NEW_LINE> <INDENT> self.conn = conn <NEW_LINE> self.pcap_file = pcap_file <NEW_LINE> can.pcap.write_header(self.pcap_file) <NEW_LINE> <DEDENT> def send_frame(self, frame): <NEW_LINE> <INDENT> can.pcap.write_frame(self.pcap_file, time.time(), frame) <NEW_LINE> self.conn.send_frame(frame) <NEW_LINE> <DEDENT> def receive_frame(self): <NEW_LINE> <INDENT> frame = self.conn.receive_frame() <NEW_LINE> if frame: <NEW_LINE> <INDENT> can.pcap.write_frame(self.pcap_file, time.time(), frame) <NEW_LINE> <DEDENT> return frame
|
Connection wrapper which logs all frames sent and received into a wireshark
compatible pcap file.
|
62598ffd187af65679d2a106
|
class Region(object): <NEW_LINE> <INDENT> def __init__(self, pos, size, tool=None, color=None): <NEW_LINE> <INDENT> self.rect = Rectangle(pos, size) <NEW_LINE> self.tool = tool <NEW_LINE> self.color = color <NEW_LINE> self.last_value = None <NEW_LINE> self.last_snapshot = None <NEW_LINE> <DEDENT> def take_snapshot(self, screen): <NEW_LINE> <INDENT> self.last_snapshot = screen.crop(self.rect.as_quad()) <NEW_LINE> return self.last_snapshot <NEW_LINE> <DEDENT> def scrape(self, screen): <NEW_LINE> <INDENT> self.take_snapshot(screen) <NEW_LINE> self.last_value = self.tool.recognize(self.last_snapshot) <NEW_LINE> return self.last_value
|
This is the top level scraping tool.
|
62598ffd627d3e7fe0e078bd
|
class LearningAgent(Agent): <NEW_LINE> <INDENT> def __init__(self, env, learning=False, epsilon=1.0, alpha=0.5): <NEW_LINE> <INDENT> super(LearningAgent, self).__init__(env) <NEW_LINE> self.planner = RoutePlanner(self.env, self) <NEW_LINE> self.valid_actions = self.env.valid_actions <NEW_LINE> self.learning = learning <NEW_LINE> self.Q = dict() <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.alpha = alpha <NEW_LINE> self.counter = 0 <NEW_LINE> <DEDENT> def reset(self, destination=None, testing=False): <NEW_LINE> <INDENT> self.planner.route_to(destination) <NEW_LINE> if testing: <NEW_LINE> <INDENT> self.alpha = 0.0 <NEW_LINE> self.epsilon = 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.epsilon = math.exp(-self.alpha*self.counter) <NEW_LINE> self.counter += 1 <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def build_state(self): <NEW_LINE> <INDENT> waypoint = self.planner.next_waypoint() <NEW_LINE> inputs = self.env.sense(self) <NEW_LINE> deadline = self.env.get_deadline(self) <NEW_LINE> state = (waypoint, inputs['light'], inputs['left'], inputs['right'], inputs['oncoming']) <NEW_LINE> return state <NEW_LINE> <DEDENT> def get_maxQ(self, state): <NEW_LINE> <INDENT> return max(self.Q[state].values()) <NEW_LINE> <DEDENT> def createQ(self, state): <NEW_LINE> <INDENT> if not self.learning: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if state not in self.Q: <NEW_LINE> <INDENT> self.Q[state] = {'left': 0, 'right': 0, 'forward': 0, None: 0} <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def choose_action(self, state): <NEW_LINE> <INDENT> self.state = state <NEW_LINE> self.next_waypoint = self.planner.next_waypoint() <NEW_LINE> action = None <NEW_LINE> if not self.learning: <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if random.random() > self.epsilon: <NEW_LINE> <INDENT> maxQ = self.get_maxQ(state) <NEW_LINE> valid_actions = [] <NEW_LINE> for act in self.Q[state]: <NEW_LINE> <INDENT> if maxQ == self.Q[state][act]: <NEW_LINE> <INDENT> valid_actions.append(act) <NEW_LINE> <DEDENT> <DEDENT> action = random.choice(valid_actions) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = random.choice(self.valid_actions) <NEW_LINE> <DEDENT> <DEDENT> return action <NEW_LINE> <DEDENT> def learn(self, state, action, reward): <NEW_LINE> <INDENT> if self.learning: <NEW_LINE> <INDENT> self.Q[state][action] = (1.0 - self.alpha) * self.Q[state][action] + reward * self.alpha <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> state = self.build_state() <NEW_LINE> self.createQ(state) <NEW_LINE> action = self.choose_action(state) <NEW_LINE> reward = self.env.act(self, action) <NEW_LINE> self.learn(state, action, reward) <NEW_LINE> return
|
An agent that learns to drive in the Smartcab world.
This is the object you will be modifying.
|
62598ffe0a366e3fb87dd411
|
class FSLiveCommand(RemoteCommand): <NEW_LINE> <INDENT> GLOBAL_EH = None <NEW_LINE> LOCAL_EH = None <NEW_LINE> CRITICAL = False <NEW_LINE> TARGET_STATUS_RC_MAP = { } <NEW_LINE> def fs_status_to_rc(self, status): <NEW_LINE> <INDENT> return self.TARGET_STATUS_RC_MAP.get(status, RC_RUNTIME_ERROR) <NEW_LINE> <DEDENT> def _open_fs(self, fsname, eh): <NEW_LINE> <INDENT> return open_lustrefs(fsname, self.options.targets, nodes=self.options.nodes, excluded=self.options.excludes, failover=self.options.failover, indexes=self.options.indexes, labels=self.options.labels, event_handler=eh) <NEW_LINE> <DEDENT> def execute_fs(self, fs, fs_conf, eh, vlevel): <NEW_LINE> <INDENT> raise NotImplemented("Derived class must implement.") <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> first = True <NEW_LINE> self.forbidden(self.options.model, "-m, use -f") <NEW_LINE> if self.CRITICAL and not self.options.fsnames: <NEW_LINE> <INDENT> msg = "A filesystem is required (use -f)." <NEW_LINE> raise CommandHelpException(msg, self) <NEW_LINE> <DEDENT> result = 0 <NEW_LINE> self.init_execute() <NEW_LINE> local_eh = None <NEW_LINE> global_eh = None <NEW_LINE> if self.LOCAL_EH: <NEW_LINE> <INDENT> local_eh = self.LOCAL_EH(self) <NEW_LINE> <DEDENT> if self.GLOBAL_EH: <NEW_LINE> <INDENT> global_eh = self.GLOBAL_EH(self) <NEW_LINE> <DEDENT> eh = self.install_eventhandler(local_eh, global_eh) <NEW_LINE> for fsname in self.iter_fsname(): <NEW_LINE> <INDENT> fs_conf, fs = self._open_fs(fsname, eh) <NEW_LINE> fs.set_debug(self.options.debug) <NEW_LINE> if not first: <NEW_LINE> <INDENT> print <NEW_LINE> <DEDENT> first = False <NEW_LINE> vlevel = self.options.verbose <NEW_LINE> result = max(result, self.execute_fs(fs, fs_conf, eh, vlevel)) <NEW_LINE> <DEDENT> return result
|
shine <cmd> [-f <fsname>] [-n <nodes>] [-dqv]
'CRITICAL' could be set if command will run action that can
damage filesystems.
|
62598ffe15fb5d323ce7f762
|
class Field(Bet): <NEW_LINE> <INDENT> def __init__(self, bet_amount, double=[2, 12], triple=[]): <NEW_LINE> <INDENT> self.name = "Field" <NEW_LINE> self.double_winning_numbers = double <NEW_LINE> self.triple_winning_numbers = triple <NEW_LINE> self.winning_numbers = [2, 3, 4, 9, 10, 11, 12] <NEW_LINE> self.losing_numbers = [5, 6, 7, 8] <NEW_LINE> super().__init__(bet_amount) <NEW_LINE> <DEDENT> def _update_bet(self, table_object, dice_object): <NEW_LINE> <INDENT> status = None <NEW_LINE> win_amount = 0 <NEW_LINE> if dice_object.total in self.triple_winning_numbers: <NEW_LINE> <INDENT> status = "win" <NEW_LINE> win_amount = 3 * self.bet_amount <NEW_LINE> <DEDENT> elif dice_object.total in self.double_winning_numbers: <NEW_LINE> <INDENT> status = "win" <NEW_LINE> win_amount = 2 * self.bet_amount <NEW_LINE> <DEDENT> elif dice_object.total in self.winning_numbers: <NEW_LINE> <INDENT> status = "win" <NEW_LINE> win_amount = 1 * self.bet_amount <NEW_LINE> <DEDENT> elif dice_object.total in self.losing_numbers: <NEW_LINE> <INDENT> status = "lose" <NEW_LINE> <DEDENT> return status, win_amount
|
Parameters
----------
double : list
Set of numbers that pay double on the field bet (default = [2,12])
triple : list
Set of numbers that pay triple on the field bet (default = [])
|
62598ffe0a366e3fb87dd415
|
class Book(models.Model): <NEW_LINE> <INDENT> ref = models.IntegerField() <NEW_LINE> iban = models.CharField(max_length=100) <NEW_LINE> name = models.CharField(max_length=250) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "{book.name} (ref: {book.ref}, iban: {book.iban})" <NEW_LINE> <DEDENT> class ReadonlyMeta: <NEW_LINE> <INDENT> readonly = ["ref", "iban"]
|
A completely different model
|
62598ffe462c4b4f79dbc42a
|
class _Module: <NEW_LINE> <INDENT> def GetIDsOfNames(self,riid,rgszNames,cNames,lcid,rgDispId): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def GetTypeInfo(self,iTInfo,lcid,ppTInfo): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def GetTypeInfoCount(self,pcTInfo): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Invoke(self,dispIdMember,riid,lcid,wFlags,pDispParams,pVarResult,pExcepInfo,puArgErr): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass
|
Exposes the System.Reflection.Module class to unmanaged code.
|
62598ffe15fb5d323ce7f76a
|
class XLSReader(DataReader): <NEW_LINE> <INDENT> types = ("xls", "xlsx") <NEW_LINE> def __init__(self, filename=None): <NEW_LINE> <INDENT> super(XLSReader, self).__init__(filename) <NEW_LINE> if not xlrd: <NEW_LINE> <INDENT> print('**********************************************************') <NEW_LINE> print('You need to install "xlrd" first to import xls/xlsx files!') <NEW_LINE> print('You can use "pip install xlrd" to install it! ') <NEW_LINE> print('**********************************************************') <NEW_LINE> return <NEW_LINE> <DEDENT> self.sheet = None <NEW_LINE> self.row_pos = 0 <NEW_LINE> if filename: <NEW_LINE> <INDENT> book = xlrd.open_workbook(filename) <NEW_LINE> self.sheet = book.sheet_by_index(0) <NEW_LINE> <DEDENT> <DEDENT> def readln(self): <NEW_LINE> <INDENT> if not self.sheet: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> if self.row_pos >= self.sheet.nrows: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> pos = self.row_pos <NEW_LINE> self.row_pos += 1 <NEW_LINE> return self.sheet.row_values(pos)
|
XLS/XLSX file's reader.
|
62598ffe187af65679d2a10d
|
class NodeStats(InternalTelemetryDevice): <NEW_LINE> <INDENT> def __init__(self, config, metrics_store): <NEW_LINE> <INDENT> super().__init__(config, metrics_store) <NEW_LINE> self.cluster = None <NEW_LINE> <DEDENT> def attach_to_cluster(self, cluster): <NEW_LINE> <INDENT> self.cluster = cluster <NEW_LINE> <DEDENT> def on_benchmark_stop(self, phase): <NEW_LINE> <INDENT> if self.cluster and phase is None: <NEW_LINE> <INDENT> logger.info("Gathering nodes stats") <NEW_LINE> stats = self.cluster.nodes_stats(metric="_all", level="shards") <NEW_LINE> total_old_gen_collection_time = 0 <NEW_LINE> total_young_gen_collection_time = 0 <NEW_LINE> nodes = stats["nodes"] <NEW_LINE> for node in nodes.values(): <NEW_LINE> <INDENT> node_name = node["name"] <NEW_LINE> gc = node["jvm"]["gc"]["collectors"] <NEW_LINE> old_gen_collection_time = gc["old"]["collection_time_in_millis"] <NEW_LINE> young_gen_collection_time = gc["young"]["collection_time_in_millis"] <NEW_LINE> self.metrics_store.put_value_node_level(node_name, "node_old_gen_gc_time", old_gen_collection_time, "ms") <NEW_LINE> self.metrics_store.put_value_node_level(node_name, "node_young_gen_gc_time", young_gen_collection_time, "ms") <NEW_LINE> total_old_gen_collection_time += old_gen_collection_time <NEW_LINE> total_young_gen_collection_time += young_gen_collection_time <NEW_LINE> <DEDENT> self.metrics_store.put_value_cluster_level("node_total_old_gen_gc_time", total_old_gen_collection_time, "ms") <NEW_LINE> self.metrics_store.put_value_cluster_level("node_total_young_gen_gc_time", total_young_gen_collection_time, "ms")
|
Gathers statistics via the Elasticsearch nodes stats API
|
62598ffe0a366e3fb87dd41b
|
class Message(object): <NEW_LINE> <INDENT> def __init__(self, message, numbers=None, group_id=None, schedule_time=None, optouts=False): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> if numbers is None and group_id is None: <NEW_LINE> <INDENT> raise Exception('numbers or group_id must be set.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.numbers = numbers <NEW_LINE> self.group_id = group_id <NEW_LINE> <DEDENT> self.numbers = numbers <NEW_LINE> if schedule_time is not None and not isinstance(schedule_time, datetime): <NEW_LINE> <INDENT> raise TypeError("If set 'schedule_time' should be a 'datetime'.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.schedule_time = schedule_time <NEW_LINE> <DEDENT> if not isinstance(optouts, bool): <NEW_LINE> <INDENT> raise TypeError("If set 'optouts' must be a boolean.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.optouts = optouts <NEW_LINE> <DEDENT> <DEDENT> def message_size(self): <NEW_LINE> <INDENT> string = self.message.encode('utf8') <NEW_LINE> return len(string) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.message_size()
|
Base message class
Attributes:
message: The message content.
numbers: A List of numbers to send the message to.
group_id: The group_id to send the message to. Overrides numbers.
schedule_time: A datetime object specifying when to send the message.
optouts: A Boolean setting whether to check the recipients against an optout list.
|
62598ffe4c3428357761acd2
|
class DbfMemoFieldDef(DbfFieldDef): <NEW_LINE> <INDENT> typeCode = "M" <NEW_LINE> defaultValue = " " * 10 <NEW_LINE> length = 10 <NEW_LINE> def decodeValue(self, value): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def encodeValue(self, value): <NEW_LINE> <INDENT> raise NotImplementedError
|
Definition of the memo field.
Note: memos aren't currently completely supported.
|
62598ffe462c4b4f79dbc432
|
class ShoppingCartViewSet(XBModelViewSet): <NEW_LINE> <INDENT> permission_classes = [IsAuthenticated,IsOwnerOrReadOnly] <NEW_LINE> authentication_classes = [JSONWebTokenAuthentication,SessionAuthentication] <NEW_LINE> serializer_class = ShopCartListSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> num = self.request.query_params.get("num", None) <NEW_LINE> if num: <NEW_LINE> <INDENT> return [{"nums": ShoppingCart.objects.filter(user=self.request.user).count()}] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ShoppingCart.objects.filter(user=self.request.user) <NEW_LINE> <DEDENT> <DEDENT> def list(self, request, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> num = self.request.query_params.get("num", None) <NEW_LINE> self.check_object_permissions(self.request, 'a') <NEW_LINE> method = self.request.META["REQUEST_METHOD"].lower() <NEW_LINE> queryset = self.filter_queryset(self.get_queryset()) <NEW_LINE> page = self.paginate_queryset(queryset) <NEW_LINE> if page is not None: <NEW_LINE> <INDENT> serializer = self.get_serializer(page, many=True) <NEW_LINE> return self.get_paginated_response(serializer.data) <NEW_LINE> <DEDENT> if num: <NEW_LINE> <INDENT> serializer = self.get_serializer(queryset, many=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> serializer = self.get_serializer(queryset, many=True) <NEW_LINE> <DEDENT> return CodeStatus(type=method, data=serializer.data) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return CodeStatus(type=method, data=None) <NEW_LINE> <DEDENT> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> shop_cart = serializer.save() <NEW_LINE> goods = shop_cart.goods <NEW_LINE> goods.goods_num -= shop_cart.nums <NEW_LINE> goods.save() <NEW_LINE> <DEDENT> def perform_destroy(self, instance): <NEW_LINE> <INDENT> goods = instance.goods <NEW_LINE> goods.goods_num += instance.nums <NEW_LINE> goods.save() <NEW_LINE> instance.delete() <NEW_LINE> <DEDENT> def perform_update(self, serializer): <NEW_LINE> <INDENT> existed_record = ShoppingCart.objects.get(id=serializer.instance.id) <NEW_LINE> existed_nums = existed_record.nums <NEW_LINE> saved_record = serializer.save() <NEW_LINE> nums = saved_record.nums-existed_nums <NEW_LINE> goods = saved_record.goods <NEW_LINE> goods.goods_num -= nums <NEW_LINE> goods.save() <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == 'create': <NEW_LINE> <INDENT> return ShopCartCreateSerializer <NEW_LINE> <DEDENT> elif self.action == "list" and self.request.query_params.get("num", None): <NEW_LINE> <INDENT> return CartNumsSerializers <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ShopCartListSerializer
|
list:
获取购物车详情
create:
加入购物车
delete:
删除购物记录
update:
更新购物记录
|
62598ffe15fb5d323ce7f76e
|
class UserImageForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = UserProfile <NEW_LINE> fields = ['profile_img'] <NEW_LINE> labels = { 'profile_img': _("Profile image") } <NEW_LINE> <DEDENT> def clean_profile_img(self): <NEW_LINE> <INDENT> return resize_img(self.cleaned_data["profile_img"], 60, 60)
|
Form used for changing the profile picture
It is a model form, containing only one field - profile_img
|
62598ffe462c4b4f79dbc434
|
class UserCreationForm(forms.ModelForm): <NEW_LINE> <INDENT> email = forms.EmailField(label='Email', widget=forms.EmailInput({'placeholder': 'Enter Email:'}), required=True) <NEW_LINE> password1 = forms.CharField(label='Password', widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('name', 'username', 'email') <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get("password1") <NEW_LINE> password2 = self.cleaned_data.get("password2") <NEW_LINE> if password1 and password2 and password1 != password2: <NEW_LINE> <INDENT> raise forms.ValidationError("Passwords don't match") <NEW_LINE> <DEDENT> return password2 <NEW_LINE> <DEDENT> def save(self, commit=True): <NEW_LINE> <INDENT> user = super(UserCreationForm, self).save(commit=False) <NEW_LINE> user.set_password(self.cleaned_data["password1"]) <NEW_LINE> if commit: <NEW_LINE> <INDENT> user.save() <NEW_LINE> <DEDENT> return user
|
Repeat password are not in model, so implemented as fields
|
62598ffe15fb5d323ce7f770
|
class InlineResponse2002(object): <NEW_LINE> <INDENT> swagger_types = { 'code': 'int', 'error': 'str', 'message': 'list[InlineResponse2002Message]' } <NEW_LINE> attribute_map = { 'code': 'code', 'error': 'error', 'message': 'message' } <NEW_LINE> def __init__(self, code=None, error=None, message=None): <NEW_LINE> <INDENT> self._code = None <NEW_LINE> self._error = None <NEW_LINE> self._message = None <NEW_LINE> self.discriminator = None <NEW_LINE> if code is not None: <NEW_LINE> <INDENT> self.code = code <NEW_LINE> <DEDENT> if error is not None: <NEW_LINE> <INDENT> self.error = error <NEW_LINE> <DEDENT> if message is not None: <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def code(self): <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> @code.setter <NEW_LINE> def code(self, code): <NEW_LINE> <INDENT> self._code = code <NEW_LINE> <DEDENT> @property <NEW_LINE> def error(self): <NEW_LINE> <INDENT> return self._error <NEW_LINE> <DEDENT> @error.setter <NEW_LINE> def error(self, error): <NEW_LINE> <INDENT> self._error = error <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @message.setter <NEW_LINE> def message(self, message): <NEW_LINE> <INDENT> self._message = message <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(InlineResponse2002, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, InlineResponse2002): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
62598ffe187af65679d2a110
|
class SavedMetricsException(Exception): <NEW_LINE> <INDENT> pass
|
Raise this exception if there's a problem recovering the saved metric
list from the statedir. This will always happen on the first run, and
should be ignored. On subsequent runs, it probably means a config
problem where the statedir can't be written or something.
|
62598ffe187af65679d2a114
|
class DirModel(UsageStats, QtCore.QAbstractListModel): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> UsageStats.__init__(self) <NEW_LINE> QtCore.QAbstractListModel.__init__(self, *args) <NEW_LINE> self._viewMode = 0 <NEW_LINE> self.dir = QtCore.QDir() <NEW_LINE> self.dir.setFilter(QtCore.QDir.AllDirs|QtCore.QDir.Files) <NEW_LINE> self.dir.setNameFilters(("*wav", "*aif", "*aiff", "*flac", "*wv", "*mp3")) <NEW_LINE> self.dir.setSorting(self.dir.Name|self.dir.DirsFirst) <NEW_LINE> self.setCurrentDir("/") <NEW_LINE> self.iconProvider = QtGui.QFileIconProvider() <NEW_LINE> <DEDENT> def rowCount(self, mi): <NEW_LINE> <INDENT> return len(self._filelist) <NEW_LINE> <DEDENT> def data(self, mi, role=QtCore.Qt.DisplayRole): <NEW_LINE> <INDENT> if role == QtCore.Qt.DisplayRole: <NEW_LINE> <INDENT> return QtCore.QVariant(self._filelist[mi.row()]) <NEW_LINE> <DEDENT> elif role == QtCore.Qt.DecorationRole: <NEW_LINE> <INDENT> if self.isDir(mi): <NEW_LINE> <INDENT> return QtCore.QVariant(self.iconProvider.icon(self.iconProvider.Folder)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return QtCore.QVariant(self.iconProvider.icon(self.iconProvider.File)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return QtCore.QVariant() <NEW_LINE> <DEDENT> <DEDENT> def handleClick(self, mi): <NEW_LINE> <INDENT> if self.dir.cd(self._filelist[mi.row()]): <NEW_LINE> <INDENT> self._filelist = self.dir.entryList() <NEW_LINE> self._filelist.removeAt(0) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> <DEDENT> def flags(self, mi): <NEW_LINE> <INDENT> defaultFlags = QtCore.QAbstractListModel.flags(self,mi) <NEW_LINE> if not self.isDir(mi): <NEW_LINE> <INDENT> return QtCore.Qt.ItemIsDragEnabled | defaultFlags <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return defaultFlags <NEW_LINE> <DEDENT> <DEDENT> def mimeTypes(self): <NEW_LINE> <INDENT> return ["text/plain", "text/uri-list"] <NEW_LINE> <DEDENT> def mimeData(self, milist): <NEW_LINE> <INDENT> [self.add(self.abspath( self._filelist[mi.row()])) for mi in milist] <NEW_LINE> mimeData = QtCore.QMimeData() <NEW_LINE> li = [QtCore.QUrl("".join(("file://", self.abspath( self._filelist[mi.row()])))) for mi in milist] <NEW_LINE> mimeData.setUrls(li) <NEW_LINE> return mimeData <NEW_LINE> <DEDENT> def isDir(self, mi): <NEW_LINE> <INDENT> return os.path.isdir(self.abspath(self._filelist[mi.row()])) <NEW_LINE> <DEDENT> def abspath(self, f): <NEW_LINE> <INDENT> return os.path.join(str(self.dir.path()), str(f)) <NEW_LINE> <DEDENT> def setNameFilters(self, filters): <NEW_LINE> <INDENT> self.dir.setNameFilters(filters) <NEW_LINE> <DEDENT> def viewFiles(self): <NEW_LINE> <INDENT> self._viewMode = 0 <NEW_LINE> self._filelist = self.dir.entryList() <NEW_LINE> self._filelist.removeAt(0) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def viewRecent(self): <NEW_LINE> <INDENT> self._viewMode = 1 <NEW_LINE> self._filelist = self.getRecent() <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def viewMostPopular(self): <NEW_LINE> <INDENT> self._viewMode = 2 <NEW_LINE> self._filelist = self.getMostAccessed(50) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def setCurrentDir(self, dir): <NEW_LINE> <INDENT> self.dir.cd(dir) <NEW_LINE> if self._viewMode is 0: <NEW_LINE> <INDENT> self.viewFiles()
|
Directory list browser
|
62598ffe0a366e3fb87dd42b
|
class Link(Resource): <NEW_LINE> <INDENT> href = wtypes.text <NEW_LINE> target = wtypes.text <NEW_LINE> rel = wtypes.text
|
Web link.
|
62598ffe462c4b4f79dbc441
|
class producer: <NEW_LINE> <INDENT> def __iter__(self): <NEW_LINE> <INDENT> self.lst = flow.wrap(slowlist([1,2,3])) <NEW_LINE> self.nam = flow.wrap(slowlist(_onetwothree)) <NEW_LINE> self.next = self.yield_lst <NEW_LINE> return self <NEW_LINE> <DEDENT> def yield_lst(self): <NEW_LINE> <INDENT> self.next = self.yield_nam <NEW_LINE> return self.lst <NEW_LINE> <DEDENT> def yield_nam(self): <NEW_LINE> <INDENT> self.next = self.yield_results <NEW_LINE> return self.nam <NEW_LINE> <DEDENT> def yield_results(self): <NEW_LINE> <INDENT> self.next = self.yield_lst <NEW_LINE> return (self.lst.next(), self.nam.next())
|
iterator version of the following generator...
def producer():
lst = flow.wrap(slowlist([1,2,3]))
nam = flow.wrap(slowlist(_onetwothree))
while True:
yield lst
yield nam
yield (lst.next(),nam.next())
|
62598ffe627d3e7fe0e078dc
|
class GeohashLocator(ALocator): <NEW_LINE> <INDENT> def __init__(self, itemCollection): <NEW_LINE> <INDENT> if itemCollection == None: <NEW_LINE> <INDENT> raise ValueError() <NEW_LINE> <DEDENT> self.container = GeohashContainer(itemCollection) <NEW_LINE> <DEDENT> def Search(self, lat, lng, radiusMeters): <NEW_LINE> <INDENT> if abs(lat)>90 or abs(lng)>180 or radiusMeters<=0: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> proximityHashString = proximityhash.create_geohash(lat, lng, radiusMeters*1.1, 7, True) <NEW_LINE> hashes = proximityHashString.split(",") <NEW_LINE> results=[] <NEW_LINE> for geohash in hashes: <NEW_LINE> <INDENT> results.extend(self.container.Retrieve(geohash)) <NEW_LINE> <DEDENT> return results <NEW_LINE> <DEDENT> def Add(self, key, value): <NEW_LINE> <INDENT> self.container.Add(key, value)
|
A class which can be used to locate points within specified radius of a center
This particular type of Locator uses geohashing for fast searches.
However, results usually also include locations which are slightly more distant than desired.
Consider using HybridLocator which runs a search with GeohashLocator and further filters the initial larger set of results with a more precise distance based BasicLocator.
In addition, geohash values of close locations differ significantly around polar and equatorial regions as well as prime meridian and +-180 meridians.
Consider using HybridLocator which falls back to using BasicLocator when a search is being conducted around the specified regions.
|
62598ffe0a366e3fb87dd42f
|
class GemRequirement(PackageRequirement): <NEW_LINE> <INDENT> REQUIREMENTS = {ExecutableRequirement('gem')} <NEW_LINE> def __init__(self, package, version="", require=""): <NEW_LINE> <INDENT> PackageRequirement.__init__(self, 'gem', package, version) <NEW_LINE> self.require = require <NEW_LINE> <DEDENT> def install_command(self): <NEW_LINE> <INDENT> gem = self.package <NEW_LINE> if self.version: <NEW_LINE> <INDENT> gem += ':' + self.version <NEW_LINE> <DEDENT> result = ['gem', 'install', gem] <NEW_LINE> return result <NEW_LINE> <DEDENT> def is_installed(self): <NEW_LINE> <INDENT> cmd = 'gem list -i ' + self.package <NEW_LINE> return not run(cmd, stdout=Capture(), stderr=Capture()).returncode
|
This class is a subclass of ``PackageRequirement``. It specifies the proper
type for ``ruby`` packages automatically and provide a function to check
for the requirement.
|
62598ffe187af65679d2a118
|
class DefaultClause(FetchedValue): <NEW_LINE> <INDENT> has_argument = True <NEW_LINE> def __init__(self, arg, for_update=False, _reflected=False): <NEW_LINE> <INDENT> util.assert_arg_type(arg, (util.string_types[0], expression.ClauseElement, expression.TextClause), 'arg') <NEW_LINE> super(DefaultClause, self).__init__(for_update) <NEW_LINE> self.arg = arg <NEW_LINE> self.reflected = _reflected <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "DefaultClause(%r, for_update=%r)" % (self.arg, self.for_update)
|
A DDL-specified DEFAULT column value.
:class:`.DefaultClause` is a :class:`.FetchedValue`
that also generates a "DEFAULT" clause when
"CREATE TABLE" is emitted.
:class:`.DefaultClause` is generated automatically
whenever the ``server_default``, ``server_onupdate`` arguments of
:class:`.Column` are used. A :class:`.DefaultClause`
can be passed positionally as well.
For example, the following::
Column('foo', Integer, server_default="50")
Is equivalent to::
Column('foo', Integer, DefaultClause("50"))
|
62598fff4c3428357761ace6
|
class ModelChoiceField(ChoiceField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid_choice': _('Select a valid choice. That choice is not one of' ' the available choices.'), } <NEW_LINE> iterator = ModelChoiceIterator <NEW_LINE> def __init__(self, queryset, empty_label="---------", required=True, widget=None, label=None, initial=None, help_text='', to_field_name=None, limit_choices_to=None, *args, **kwargs): <NEW_LINE> <INDENT> if required and (initial is not None): <NEW_LINE> <INDENT> self.empty_label = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.empty_label = empty_label <NEW_LINE> <DEDENT> Field.__init__(self, required, widget, label, initial, help_text, *args, **kwargs) <NEW_LINE> self.queryset = queryset <NEW_LINE> self.limit_choices_to = limit_choices_to <NEW_LINE> self.to_field_name = to_field_name <NEW_LINE> <DEDENT> def get_limit_choices_to(self): <NEW_LINE> <INDENT> if callable(self.limit_choices_to): <NEW_LINE> <INDENT> return self.limit_choices_to() <NEW_LINE> <DEDENT> return self.limit_choices_to <NEW_LINE> <DEDENT> def __deepcopy__(self, memo): <NEW_LINE> <INDENT> result = super(ChoiceField, self).__deepcopy__(memo) <NEW_LINE> result.queryset = result.queryset <NEW_LINE> return result <NEW_LINE> <DEDENT> def _get_queryset(self): <NEW_LINE> <INDENT> return self._queryset <NEW_LINE> <DEDENT> def _set_queryset(self, queryset): <NEW_LINE> <INDENT> self._queryset = queryset <NEW_LINE> self.widget.choices = self.choices <NEW_LINE> <DEDENT> queryset = property(_get_queryset, _set_queryset) <NEW_LINE> def label_from_instance(self, obj): <NEW_LINE> <INDENT> return smart_text(obj) <NEW_LINE> <DEDENT> def _get_choices(self): <NEW_LINE> <INDENT> if hasattr(self, '_choices'): <NEW_LINE> <INDENT> return self._choices <NEW_LINE> <DEDENT> return self.iterator(self) <NEW_LINE> <DEDENT> choices = property(_get_choices, ChoiceField._set_choices) <NEW_LINE> def prepare_value(self, value): <NEW_LINE> <INDENT> if hasattr(value, '_meta'): <NEW_LINE> <INDENT> if self.to_field_name: <NEW_LINE> <INDENT> return value.serializable_value(self.to_field_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value.pk <NEW_LINE> <DEDENT> <DEDENT> return super(ModelChoiceField, self).prepare_value(value) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if value in self.empty_values: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> key = self.to_field_name or 'pk' <NEW_LINE> value = self.queryset.get(**{key: value}) <NEW_LINE> <DEDENT> except (ValueError, TypeError, self.queryset.model.DoesNotExist): <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['invalid_choice'], code='invalid_choice') <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> return Field.validate(self, value) <NEW_LINE> <DEDENT> def has_changed(self, initial, data): <NEW_LINE> <INDENT> initial_value = initial if initial is not None else '' <NEW_LINE> data_value = data if data is not None else '' <NEW_LINE> return force_text(self.prepare_value(initial_value)) != force_text(data_value)
|
A ChoiceField whose choices are a model QuerySet.
|
62598fff0a366e3fb87dd433
|
class MellanoxDirectPlugin(plugin.PluginBase): <NEW_LINE> <INDENT> def __init__(self, **config): <NEW_LINE> <INDENT> processutils.configure(**config) <NEW_LINE> <DEDENT> def get_supported_vifs(self): <NEW_LINE> <INDENT> return set([objects.PluginVIFSupport(PLUGIN_NAME, '1.0', '1.0')]) <NEW_LINE> <DEDENT> def plug(self, instance, vif): <NEW_LINE> <INDENT> vnic_mac = vif.address <NEW_LINE> device_id = instance.uuid <NEW_LINE> fabric = vif.physical_network <NEW_LINE> if not fabric: <NEW_LINE> <INDENT> raise exception.NetworkMissingPhysicalNetwork( network_uuid=vif.network.id) <NEW_LINE> <DEDENT> dev_name = vif.devname_with_prefix(_DEV_PREFIX_ETH) <NEW_LINE> processutils.execute('ebrctl', 'add-port', vnic_mac, device_id, fabric, PLUGIN_NAME, dev_name, run_as_root=True) <NEW_LINE> <DEDENT> def unplug(self, vif): <NEW_LINE> <INDENT> vnic_mac = vif.address <NEW_LINE> fabric = vif.physical_network <NEW_LINE> if not fabric: <NEW_LINE> <INDENT> raise exception.NetworkMissingPhysicalNetwork( network_uuid=vif.network.id) <NEW_LINE> <DEDENT> processutils.execute('ebrctl', 'del-port', fabric, vnic_mac, run_as_root=True)
|
A VIF type that plugs the interface directly into the Mellanox physical
network fabric.
|
62598fff4c3428357761ace8
|
class ExtendingSender(Sender, ABC): <NEW_LINE> <INDENT> def __init__(self, sender: Optional[Sender]): <NEW_LINE> <INDENT> self.sender = sender or SyncSender() <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_async(self) -> bool: <NEW_LINE> <INDENT> return self.sender.is_async <NEW_LINE> <DEDENT> def close(self) -> Union[None, Coroutine[None, None, None]]: <NEW_LINE> <INDENT> return self.sender.close()
|
Base class for senders that extend other senders.
Parameters
----------
sender
request sender, :class:`SyncSender` if not specified
|
62598fff627d3e7fe0e078e2
|
class BatteryNotifier(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, parent, device_id, device_name): <NEW_LINE> <INDENT> super(BatteryNotifier, self).__init__() <NEW_LINE> self.event = threading.Event() <NEW_LINE> if notify2 is not None: <NEW_LINE> <INDENT> notify2.init('razer_daemon') <NEW_LINE> <DEDENT> self._logger = logging.getLogger('razer.device{0}.batterynotifier'.format(device_id)) <NEW_LINE> self._shutdown = False <NEW_LINE> self._device_name = device_name <NEW_LINE> self._get_battery_func = parent.getBattery <NEW_LINE> if notify2 is not None: <NEW_LINE> <INDENT> self._notification = notify2.Notification(summary="{0}") <NEW_LINE> self._notification.set_timeout(NOTIFY_TIMEOUT) <NEW_LINE> <DEDENT> self._last_notify_time = datetime.datetime(1970, 1, 1) <NEW_LINE> <DEDENT> @property <NEW_LINE> def shutdown(self): <NEW_LINE> <INDENT> return self._shutdown <NEW_LINE> <DEDENT> @shutdown.setter <NEW_LINE> def shutdown(self, value): <NEW_LINE> <INDENT> self._shutdown = value <NEW_LINE> <DEDENT> def notify_battery(self): <NEW_LINE> <INDENT> now = datetime.datetime.now() <NEW_LINE> if (now - self._last_notify_time).seconds > INTERVAL_FREQ: <NEW_LINE> <INDENT> self._last_notify_time = now <NEW_LINE> battery_level = self._get_battery_func() <NEW_LINE> if battery_level == -1.0: <NEW_LINE> <INDENT> time.sleep(0.2) <NEW_LINE> battery_level = self._get_battery_func() <NEW_LINE> <DEDENT> if battery_level < 10.0: <NEW_LINE> <INDENT> if notify2 is not None: <NEW_LINE> <INDENT> self._notification.update(summary="{0} Battery at {1:.1f}%".format(self._device_name, battery_level), message='Please charge your device', icon='notification-battery-low') <NEW_LINE> self._notification.show() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if notify2 is not None: <NEW_LINE> <INDENT> self._notification.update(summary="{0} Battery at {1:.1f}%".format(self._device_name, battery_level)) <NEW_LINE> self._notification.show() <NEW_LINE> <DEDENT> <DEDENT> if notify2 is None: <NEW_LINE> <INDENT> self._logger.debug("{0} Battery at {1:.1f}%".format(self._device_name, battery_level)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not self._shutdown: <NEW_LINE> <INDENT> if self.event.is_set(): <NEW_LINE> <INDENT> self.notify_battery() <NEW_LINE> <DEDENT> time.sleep(0.1) <NEW_LINE> <DEDENT> self._logger.debug("Shutting down battery notifier")
|
Thread to notify about battery
|
62598fff3cc13d1c6d466190
|
class IntegerSlider(Slider): <NEW_LINE> <INDENT> DISCRETE = True <NEW_LINE> @staticmethod <NEW_LINE> def value_to_number(x): <NEW_LINE> <INDENT> if not isinstance(x, int): <NEW_LINE> <INDENT> raise TypeError('expected int, got {}'.format(type(x).__name__)) <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def value_from_number(x): <NEW_LINE> <INDENT> return int(x)
|
A type of :class:`Slider` that accepts only integer values/bounds.
|
62598fff15fb5d323ce7f788
|
class ArtemiaMap: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._graph = None <NEW_LINE> self._cards = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_place_cards(cls, cards: List[MapPlaceCard]) -> 'ArtemiaMap': <NEW_LINE> <INDENT> cards_len = len(cards) <NEW_LINE> if cards_len != 10: <NEW_LINE> <INDENT> raise ArtemiaMapSetUpError( f': Fed Place cards must be exactly 10. {cards_len} was were ' 'given.' ) <NEW_LINE> <DEDENT> map_ = cls() <NEW_LINE> graph: Dict[int, List[int]] = dict() <NEW_LINE> for i in range(10): <NEW_LINE> <INDENT> vertical_neighbor = i + 5 if i < 5 else i - 5 <NEW_LINE> if i in (0, 5): <NEW_LINE> <INDENT> horizontal_neighbors = [i + 1] <NEW_LINE> <DEDENT> elif i in (4, 9): <NEW_LINE> <INDENT> horizontal_neighbors = [i - 1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> horizontal_neighbors = [i - 1, i + 1] <NEW_LINE> <DEDENT> graph.setdefault(i, []).extend( [*horizontal_neighbors, vertical_neighbor] ) <NEW_LINE> <DEDENT> map_._graph = graph <NEW_LINE> map_._cards = deepcopy(cards) <NEW_LINE> return map_ <NEW_LINE> <DEDENT> def _get_card_index(self, id_: int) -> int: <NEW_LINE> <INDENT> for i, card in enumerate(self._cards): <NEW_LINE> <INDENT> if card.number == id_: <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> <DEDENT> raise ValueError(f'No card corresponding to the given id {id_}.') <NEW_LINE> <DEDENT> def get_card(self, id_: int) -> MapPlaceCard: <NEW_LINE> <INDENT> return self._cards[self._get_card_index(id_)] <NEW_LINE> <DEDENT> def get_neighbors_by_id(self, id_: int) -> Iterable[MapPlaceCard]: <NEW_LINE> <INDENT> if not 1 <= id_ <= 10: <NEW_LINE> <INDENT> raise ValueError( f'map place id must be between 1 and 10. {id} was given.' ) <NEW_LINE> <DEDENT> card_index = self._get_card_index(id_) <NEW_LINE> return map(lambda x: self._cards[x], self._graph[card_index]) <NEW_LINE> <DEDENT> def get_neighbors( self, map_place_card: MapPlaceCard ) -> Iterable[MapPlaceCard]: <NEW_LINE> <INDENT> return self.get_neighbors_by_id(map_place_card.number)
|
The Artemia map class.
|
62598fff462c4b4f79dbc44f
|
class InstrumentationManager(object): <NEW_LINE> <INDENT> def __init__(self, class_): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def manage(self, class_, manager): <NEW_LINE> <INDENT> setattr(class_, '_default_class_manager', manager) <NEW_LINE> <DEDENT> def dispose(self, class_, manager): <NEW_LINE> <INDENT> delattr(class_, '_default_class_manager') <NEW_LINE> <DEDENT> def manager_getter(self, class_): <NEW_LINE> <INDENT> def get(cls): <NEW_LINE> <INDENT> return cls._default_class_manager <NEW_LINE> <DEDENT> return get <NEW_LINE> <DEDENT> def instrument_attribute(self, class_, key, inst): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def post_configure_attribute(self, class_, key, inst): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def install_descriptor(self, class_, key, inst): <NEW_LINE> <INDENT> setattr(class_, key, inst) <NEW_LINE> <DEDENT> def uninstall_descriptor(self, class_, key): <NEW_LINE> <INDENT> delattr(class_, key) <NEW_LINE> <DEDENT> def install_member(self, class_, key, implementation): <NEW_LINE> <INDENT> setattr(class_, key, implementation) <NEW_LINE> <DEDENT> def uninstall_member(self, class_, key): <NEW_LINE> <INDENT> delattr(class_, key) <NEW_LINE> <DEDENT> def instrument_collection_class(self, class_, key, collection_class): <NEW_LINE> <INDENT> return collections.prepare_instrumentation(collection_class) <NEW_LINE> <DEDENT> def get_instance_dict(self, class_, instance): <NEW_LINE> <INDENT> return instance.__dict__ <NEW_LINE> <DEDENT> def initialize_instance_dict(self, class_, instance): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def install_state(self, class_, instance, state): <NEW_LINE> <INDENT> setattr(instance, '_default_state', state) <NEW_LINE> <DEDENT> def remove_state(self, class_, instance): <NEW_LINE> <INDENT> delattr(instance, '_default_state') <NEW_LINE> <DEDENT> def state_getter(self, class_): <NEW_LINE> <INDENT> return lambda instance: getattr(instance, '_default_state') <NEW_LINE> <DEDENT> def dict_getter(self, class_): <NEW_LINE> <INDENT> return lambda inst: self.get_instance_dict(class_, inst)
|
User-defined class instrumentation extension.
:class:`.InstrumentationManager` can be subclassed in order
to change
how class instrumentation proceeds. This class exists for
the purposes of integration with other object management
frameworks which would like to entirely modify the
instrumentation methodology of the ORM, and is not intended
for regular usage. For interception of class instrumentation
events, see :class:`.InstrumentationEvents`.
The API for this class should be considered as semi-stable,
and may change slightly with new releases.
.. versionchanged:: 0.8
:class:`.InstrumentationManager` was moved from
:mod:`sqlalchemy.orm.instrumentation` to
:mod:`sqlalchemy.ext.instrumentation`.
|
62598fff187af65679d2a11e
|
class MatchLSTMAttention(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_p_dim, input_q_dim, output_dim): <NEW_LINE> <INDENT> super(MatchLSTMAttention, self).__init__() <NEW_LINE> self.input_p_dim = input_p_dim <NEW_LINE> self.input_q_dim = input_q_dim <NEW_LINE> self.output_dim = output_dim <NEW_LINE> self.nlayers = len(self.output_dim) <NEW_LINE> W_p = [torch.nn.Linear(self.input_p_dim if i == 0 else self.output_dim[i - 1], self.output_dim[i]) for i in range(self.nlayers)] <NEW_LINE> W_q = [torch.nn.Linear(self.input_q_dim, self.output_dim[i]) for i in range(self.nlayers)] <NEW_LINE> W_r = [torch.nn.Linear(self.output_dim[i], self.output_dim[i]) for i in range(self.nlayers)] <NEW_LINE> w = [torch.nn.Parameter(torch.FloatTensor(self.output_dim[i])) for i in range(self.nlayers)] <NEW_LINE> match_b = [torch.nn.Parameter(torch.FloatTensor(1)) for i in range(self.nlayers)] <NEW_LINE> self.W_p = torch.nn.ModuleList(W_p) <NEW_LINE> self.W_q = torch.nn.ModuleList(W_q) <NEW_LINE> self.W_r = torch.nn.ModuleList(W_r) <NEW_LINE> self.w = torch.nn.ParameterList(w) <NEW_LINE> self.match_b = torch.nn.ParameterList(match_b) <NEW_LINE> self.init_weights() <NEW_LINE> <DEDENT> def init_weights(self): <NEW_LINE> <INDENT> for i in range(self.nlayers): <NEW_LINE> <INDENT> torch.nn.init.xavier_uniform(self.W_p[i].weight.data, gain=1) <NEW_LINE> torch.nn.init.xavier_uniform(self.W_q[i].weight.data, gain=1) <NEW_LINE> torch.nn.init.xavier_uniform(self.W_r[i].weight.data, gain=1) <NEW_LINE> self.W_p[i].bias.data.fill_(0) <NEW_LINE> self.W_q[i].bias.data.fill_(0) <NEW_LINE> self.W_r[i].bias.data.fill_(0) <NEW_LINE> torch.nn.init.normal(self.w[i].data, mean=0, std=0.05) <NEW_LINE> self.match_b[i].data.fill_(1.0) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, input_p, input_q, h_tm1, depth): <NEW_LINE> <INDENT> input_p = torch.nn.utils.rnn.pack_padded_sequence(input_p, passage_lengths) <NEW_LINE> input_q = torch.nn.utils.rnn.pack_padded_sequence(input_q, question_lengths) <NEW_LINE> G_p = self.W_p[depth](input_p).unsqueeze(1) <NEW_LINE> G_q = self.W_q[depth](input_q) <NEW_LINE> G_r = self.W_r[depth](h_tm1).unsqueeze(1) <NEW_LINE> G = F.tanh(G_p + G_q + G_r) <NEW_LINE> alpha = torch.matmul(G, self.w[depth]) <NEW_LINE> alpha = alpha + self.match_b[depth].unsqueeze(0) <NEW_LINE> alpha = F.softmax(alpha,axis=-1) <NEW_LINE> alpha = alpha.unsqueeze(1) <NEW_LINE> z = torch.bmm(alpha, input_q) <NEW_LINE> z = z.squeeze(1) <NEW_LINE> z = torch.cat([input_p, z], 1) <NEW_LINE> return z
|
input: p (passage): batch x inp_p
passage_lengths
q (question) batch x time x inp_q
question lengths
h_tm1: batch x out (Output hidden state)
depth: int
output: z: batch x inp_p+inp_q
|
62598fff3cc13d1c6d466194
|
class Schedule: <NEW_LINE> <INDENT> def __init__(self, start_date: datetime = None, end_date: datetime = None): <NEW_LINE> <INDENT> if start_date is not None: <NEW_LINE> <INDENT> start_date = pendulum.instance(start_date) <NEW_LINE> <DEDENT> if end_date is not None: <NEW_LINE> <INDENT> end_date = pendulum.instance(end_date) <NEW_LINE> <DEDENT> self.start_date = start_date <NEW_LINE> self.end_date = end_date <NEW_LINE> <DEDENT> def next(self, n: int, after: datetime = None) -> List[datetime]: <NEW_LINE> <INDENT> raise NotImplementedError("Must be implemented on Schedule subclasses") <NEW_LINE> <DEDENT> def serialize(self) -> tuple: <NEW_LINE> <INDENT> from prefect.serialization.schedule import ScheduleSchema <NEW_LINE> return ScheduleSchema().dump(self)
|
Base class for Schedules
Args:
- start_date (datetime, optional): an optional start date for the schedule
- end_date (datetime, optional): an optional end date for the schedule
|
62598fff462c4b4f79dbc451
|
class ClientApiClient(MetaApiClient): <NEW_LINE> <INDENT> def __init__(self, http_client, token: str, domain: str = 'agiliumtrade.agiliumtrade.ai'): <NEW_LINE> <INDENT> super().__init__(http_client, token, domain) <NEW_LINE> self._host = f'https://mt-client-api-v1.{domain}' <NEW_LINE> <DEDENT> async def get_hashing_ignored_field_lists(self) -> HashingIgnoredFieldLists: <NEW_LINE> <INDENT> opts = { 'url': f'{self._host}/hashing-ignored-field-lists', 'method': 'GET', } <NEW_LINE> return await self._httpClient.request(opts)
|
metaapi.cloud client API client (see https://metaapi.cloud/docs/client/)
|
62598fff0a366e3fb87dd43e
|
class HasVerbosity(Params): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(HasVerbosity, self).__init__() <NEW_LINE> self.verbose = Param(self, "verbose", "Stdout verbosity") <NEW_LINE> self._setDefault(verbose=0) <NEW_LINE> <DEDENT> def set_verbosity(self, verbose): <NEW_LINE> <INDENT> self._paramMap[self.verbose] = verbose <NEW_LINE> return self <NEW_LINE> <DEDENT> def get_verbosity(self): <NEW_LINE> <INDENT> return self.getOrDefault(self.verbose)
|
Parameter mixin for output verbosity
|
62598fff4c3428357761acf5
|
class FitExperiment(Experiment): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.initialized = False <NEW_LINE> self.__dict__.update(kwargs) <NEW_LINE> req_param(self, ['dataset', 'model']) <NEW_LINE> opt_param(self, ['backend']) <NEW_LINE> opt_param(self, ['live'], False) <NEW_LINE> if self.backend is not None: <NEW_LINE> <INDENT> self.initialize(self.backend) <NEW_LINE> <DEDENT> <DEDENT> def initialize(self, backend): <NEW_LINE> <INDENT> if self.initialized: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.backend = backend <NEW_LINE> self.model.link() <NEW_LINE> self.backend.par.init_model(self.model, self.backend) <NEW_LINE> self.model.initialize(backend) <NEW_LINE> self.initialized = True <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.dataset.set_batch_size(self.model.batch_size) <NEW_LINE> self.dataset.backend = self.backend <NEW_LINE> self.dataset.load(backend=self.backend, experiment=self) <NEW_LINE> if hasattr(self.dataset, 'serialized_path') and ( self.dataset.serialized_path is not None): <NEW_LINE> <INDENT> logger.warning('Ability to serialize dataset has been deprecated.') <NEW_LINE> <DEDENT> if not hasattr(self.model, 'backend'): <NEW_LINE> <INDENT> self.model.backend = self.backend <NEW_LINE> <DEDENT> if not hasattr(self.model, 'epochs_complete'): <NEW_LINE> <INDENT> self.model.epochs_complete = 0 <NEW_LINE> <DEDENT> mfile = "" <NEW_LINE> if hasattr(self.model, 'deserialized_path'): <NEW_LINE> <INDENT> mfile = os.path.expandvars(os.path.expanduser( self.model.deserialized_path)) <NEW_LINE> <DEDENT> elif hasattr(self.model, 'serialized_path'): <NEW_LINE> <INDENT> mfile = os.path.expandvars(os.path.expanduser( self.model.serialized_path)) <NEW_LINE> <DEDENT> if os.access(mfile, os.R_OK): <NEW_LINE> <INDENT> if self.backend.is_distributed(): <NEW_LINE> <INDENT> raise NotImplementedError('Deserializing models not supported ' 'in distributed mode') <NEW_LINE> <DEDENT> self.model.set_params(deserialize(mfile)) <NEW_LINE> <DEDENT> elif mfile != "": <NEW_LINE> <INDENT> logger.info('Unable to find saved model %s, starting over', mfile) <NEW_LINE> <DEDENT> if self.model.epochs_complete >= self.model.num_epochs: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.live: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.model.fit(self.dataset) <NEW_LINE> if hasattr(self.model, 'serialized_path'): <NEW_LINE> <INDENT> if self.backend.rank() == 0: <NEW_LINE> <INDENT> serialize(self.model.get_params(), self.model.serialized_path)
|
In this `Experiment`, a model is trained on a training dataset to
learn a set of parameters
Note that a pre-fit model may be loaded depending on serialization
parameters (rather than learning from scratch). The same may also apply to
the datasets specified.
Keyword Args:
backend (neon.backends.Backend): The backend to associate with the
datasets to use in this experiment
TODO:
add other params
|
62598fff627d3e7fe0e078ee
|
class AlueponIdentity(IanaInterfaceTypeIdentity): <NEW_LINE> <INDENT> _prefix = 'ianaift' <NEW_LINE> _revision = '2014-05-08' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> IanaInterfaceTypeIdentity.__init__(self) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.ietf._meta import _iana_if_type as meta <NEW_LINE> return meta._meta_table['AlueponIdentity']['meta_info']
|
Ethernet Passive Optical Networks (E\-PON).
|
62598fff462c4b4f79dbc459
|
class MaxMindGeoWebClient(geoip2.webservice.Client, GeoIpClientAdapter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._account_id = current_app.config.get('MAXMIND_WEB_ACCOUNT_ID') <NEW_LINE> self._license_key = current_app.config.get('MAXMIND_WEB_LICENSE_KEY') <NEW_LINE> self._host = current_app.config.get('MAXMIND_WEB_HOST') <NEW_LINE> if not self._account_id: <NEW_LINE> <INDENT> raise GeoIPClientError('MaxMind Account ID not set.') <NEW_LINE> <DEDENT> if not self._license_key: <NEW_LINE> <INDENT> raise GeoIPClientError('MaxMind License key not set.') <NEW_LINE> <DEDENT> if not self._host: <NEW_LINE> <INDENT> raise GeoIPClientError('MaxMind host not set.') <NEW_LINE> <DEDENT> super().__init__(self._account_id, self._license_key, host=self._host) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_value, traceback): <NEW_LINE> <INDENT> return super().__exit__(exc_type, exc_value, traceback) <NEW_LINE> <DEDENT> def ip2geo(self, ip_address) -> Union[Tuple[str, str, str, str, str], None]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> response = self.city(ip_address) <NEW_LINE> <DEDENT> except geoip2.errors.AddressNotFoundError: <NEW_LINE> <INDENT> logging.debug('IP address {0} not found.'.format(ip_address)) <NEW_LINE> return None <NEW_LINE> <DEDENT> except geoip2.errors.GeoIP2Error as error: <NEW_LINE> <INDENT> logging.error('Error while geolocating {0} - {1}'.format( ip_address, error)) <NEW_LINE> return None <NEW_LINE> <DEDENT> latitude = response.location.latitude <NEW_LINE> longitude = response.location.longitude <NEW_LINE> country_name = response.country.name <NEW_LINE> iso_code = response.country.iso_code <NEW_LINE> city = response.city.name <NEW_LINE> return (iso_code, latitude, longitude, country_name, city)
|
A GeoIP client using the MaxMind web service api.
|
62598fff187af65679d2a123
|
class vtkDataArrayFromNumPyMultiArray(vtkDataArrayFromNumPyBuffer): <NEW_LINE> <INDENT> def __init__(self, vtk_class, ctype, data=None, buffered=True): <NEW_LINE> <INDENT> self.buffered = buffered <NEW_LINE> vtkDataArrayFromNumPyBuffer.__init__(self, vtk_class, ctype, data) <NEW_LINE> <DEDENT> def read_numpy_array(self, data): <NEW_LINE> <INDENT> if not isinstance(data, np.ndarray): <NEW_LINE> <INDENT> data = np.array(data, dtype=self.ctype) <NEW_LINE> <DEDENT> if data.dtype != self.ctype: <NEW_LINE> <INDENT> data = data.astype(self.ctype) <NEW_LINE> <DEDENT> if data.ndim <=2: <NEW_LINE> <INDENT> raise Warning('This is inefficient for 1D and 2D NumPy arrays. ' + 'Use a vtkDataArrayFromNumPyArray subclass instead.') <NEW_LINE> <DEDENT> if self.buffered: <NEW_LINE> <INDENT> n = data.ndim-1 <NEW_LINE> for c in range(n//2): <NEW_LINE> <INDENT> data = data.swapaxes(c,n-1-c) <NEW_LINE> <DEDENT> vtkDataArrayFromNumPyBuffer.read_numpy_array(self, data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.vtk_da.SetNumberOfComponents(data.shape[-1]) <NEW_LINE> self.vtk_da.SetNumberOfTuples(np.prod(data.shape[:-1])) <NEW_LINE> for c, d_T in enumerate(data.T): <NEW_LINE> <INDENT> for i, d in enumerate(d_T.flat): <NEW_LINE> <INDENT> self.vtk_da.SetComponent(i, c, d)
|
Class for reading vtkDataArray from a multi-dimensional NumPy array.
This class can be used to generate a vtkDataArray from a NumPy array.
The NumPy array should be of the form <gridsize> x <number of components>
where 'number of components' indicates the number of components in
each gridpoint in the vtkDataArray. Note that this form is also expected
even in the case of only a single component.
|
62598fff3cc13d1c6d4661a2
|
class ZF: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> if config.random: <NEW_LINE> <INDENT> with_random_url = safe_get(config.zf_url).url <NEW_LINE> _random = with_random_url.split('/')[-2] <NEW_LINE> self.base_url=config.zf_url+_random+'/' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.base_url = config.zf_url <NEW_LINE> <DEDENT> self.login_url, self.code_url, self.headers = prepare_request(self.base_url) <NEW_LINE> <DEDENT> def pre_login(self): <NEW_LINE> <INDENT> _req = safe_get(self.base_url, headers=self.headers) <NEW_LINE> self.VIEWSTATE = get_viewstate(_req.text) <NEW_LINE> uid = get_unique_key(prefix="user") <NEW_LINE> _req1 = safe_get(self.code_url, cookies=_req.cookies, headers=self.headers) <NEW_LINE> image_content = _req1.content <NEW_LINE> image_content=image.process_image_string(image_content) <NEW_LINE> base64_image="data:image/gif;base64,"+image_content.encode('base64').replace('\n','') <NEW_LINE> pickled_cookies = pickle.dumps(_req.cookies) <NEW_LINE> rds.hmset(uid, { "checkcode" : base64_image, "base_url" : self.base_url, "viewstate" : self.VIEWSTATE, 'cookies' : base64.encodestring(pickled_cookies), }) <NEW_LINE> rds.pexpire(uid, config.COOKIES_TIME_OUT) <NEW_LINE> return uid <NEW_LINE> <DEDENT> def get_checkcode(self, uid): <NEW_LINE> <INDENT> pickled = base64.decodestring(rds.hget(uid, 'cookies')) <NEW_LINE> self.cookies = pickle.loads(pickled) <NEW_LINE> rds.pexpire(uid, config.COOKIES_TIME_OUT) <NEW_LINE> _req1 = safe_get(self.code_url, cookies=self.cookies, headers=self.headers) <NEW_LINE> image_content = _req1.content <NEW_LINE> img=image.process_image_string(image_content) <NEW_LINE> return img
|
处理用户第一次请求时的数据
|
62598fff627d3e7fe0e078f7
|
class Solution: <NEW_LINE> <INDENT> def fail_MLE_minPathSum(self, grid): <NEW_LINE> <INDENT> length_row = len(grid) <NEW_LINE> length_column = len(grid[0]) <NEW_LINE> if length_row == 1: <NEW_LINE> <INDENT> return sum(grid[0]) <NEW_LINE> <DEDENT> if length_column == 1: <NEW_LINE> <INDENT> sum_min = 0 <NEW_LINE> for row in grid: <NEW_LINE> <INDENT> sum_min += row[0] <NEW_LINE> <DEDENT> return sum_min <NEW_LINE> <DEDENT> if length_row == 2 and length_column == 2: <NEW_LINE> <INDENT> sum_min = grid[0][0] + grid[1][1] + min(grid[0][1], grid[1][0]) <NEW_LINE> return sum_min <NEW_LINE> <DEDENT> grid_column = [] <NEW_LINE> for row in range(length_row): <NEW_LINE> <INDENT> grid_column.append(grid[row][1:]) <NEW_LINE> <DEDENT> sum_min = min(self.minPathSum(grid[1:]) + grid[1][0], self.minPathSum(grid_column) + grid[0][1]) <NEW_LINE> return sum_min <NEW_LINE> <DEDENT> def minPathSum(self, grid): <NEW_LINE> <INDENT> length_row = len(grid) <NEW_LINE> length_column = len(grid[0]) <NEW_LINE> dp = [[0 for i in range(length_column)] for j in range(length_row)] <NEW_LINE> dp[0][0] = grid[0][0] <NEW_LINE> for row in range(1, length_row): <NEW_LINE> <INDENT> dp[row][0] = dp[row - 1][0] + grid[row][0] <NEW_LINE> <DEDENT> for column in range(1, length_column): <NEW_LINE> <INDENT> dp[0][column] = dp[0][column - 1] + grid[0][column] <NEW_LINE> <DEDENT> for row in range(1, length_row): <NEW_LINE> <INDENT> for column in range(1, length_column): <NEW_LINE> <INDENT> dp[row][column] = min(dp[row - 1][column], dp[row][column - 1]) + grid[row][column] <NEW_LINE> <DEDENT> <DEDENT> print(dp) <NEW_LINE> return dp[length_row - 1][length_column - 1]
|
@param grid: a list of lists of integers.
@return: An integer, minimizes the sum of all numbers along its path
|
62598fff3cc13d1c6d4661a5
|
class CallsFilter(object): <NEW_LINE> <INDENT> def __call__(self, occurrence): <NEW_LINE> <INDENT> if not occurrence.is_called(): <NEW_LINE> <INDENT> return False
|
Filter out non-call occurrences.
|
62598fff3cc13d1c6d4661a7
|
class __Backend: <NEW_LINE> <INDENT> def __init__(self, backend_c): <NEW_LINE> <INDENT> self.__backend_c = backend_c <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> connect_lib().marky_backend_free(self.__backend_c) <NEW_LINE> <DEDENT> def backend_c(self): <NEW_LINE> <INDENT> return self.__backend_c
|
A container for a Marky Backend.
Do not instantiate directly, instead create Backend instances using backend_*().
|
62598fff0a366e3fb87dd450
|
class LogHandler(logging.Handler): <NEW_LINE> <INDENT> def __init__(self, request, level=logging.INFO): <NEW_LINE> <INDENT> logging.Handler.__init__(self, level) <NEW_LINE> self.request = request <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> entry = self.format(record) <NEW_LINE> status = record.levelname[0] <NEW_LINE> add_entry(self.request, status, entry)
|
Log handler
Logs information to the log table in the system database.
|
62598fff462c4b4f79dbc465
|
class CIVulnScan(BackgroundSubProcess): <NEW_LINE> <INDENT> cmd = ["ci-vulnscan"] <NEW_LINE> def _parse_args(self, *args, project_path: str = None): <NEW_LINE> <INDENT> if not project_path: <NEW_LINE> <INDENT> raise Exception("No project path given") <NEW_LINE> <DEDENT> args = ['--project-path', project_path] <NEW_LINE> self.cmd = self.cmd + args
|
Executed ci-build with a custom build command
|
6259900015fb5d323ce7f7a4
|
class AttributeList(NodeListBase): <NEW_LINE> <INDENT> _list_item_type = Attribute
|
Represents a list attributes.
|
625990003cc13d1c6d4661ae
|
class BaseGeometry(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> raise Exception("area() is not implemented") <NEW_LINE> <DEDENT> def integer_validator(self, name, value): <NEW_LINE> <INDENT> if (type(value) is not int): <NEW_LINE> <INDENT> raise TypeError(str(name) + " must be an integer") <NEW_LINE> <DEDENT> if (value <= 0): <NEW_LINE> <INDENT> raise ValueError(str(name) + " must be greater than 0")
|
"
Template for a base geometry object
|
6259900015fb5d323ce7f7a8
|
class Structure(object): <NEW_LINE> <INDENT> __slots__ = [ 'title', 'atms', 'bonds', 'latt_vecs' ] <NEW_LINE> def __init__(self, title): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.atms = [] <NEW_LINE> self.bonds = [] <NEW_LINE> self.latt_vecs = [] <NEW_LINE> <DEDENT> def extend_atms(self, atms_iter): <NEW_LINE> <INDENT> self.atms.extend(atms_iter) <NEW_LINE> <DEDENT> def extend_bonds(self, bonds_iter): <NEW_LINE> <INDENT> self.bonds.extend(bonds_iter) <NEW_LINE> <DEDENT> def set_latt_vecs(self, latt_vecs): <NEW_LINE> <INDENT> self.latt_vecs = latt_vecs
|
Chemical structure class
This class are for storing the enough information for plotting a structure
out by using pov-ray. So it is at the central position of this piece of
code, with readers generating its instances from the input file, and
plotters writes the pov-ray files based on the information stored in its
instances.
.. py:attribute:: title
An optional title for the structure, it can also be used for holding some
instructions for plotting.
.. py:attribute:: atms
The list of atoms in the structure. The entries can be any type as long
as they have got attribute ``symb`` for the element symbol and ``coord``
for a numpy array of its coordinate. Any duck type works here, but the
:py:class:`Atm` class of this module is recommended to be used.
.. py:attribute:: bonds
A list of bonds in the structure. Its entries should be tuples where the
first two fields gives the **zero-based** indices of the atoms connected
by the bond. And the next entry gives the bond order, which can be a
float-point number to indicate partial bond.
.. py:attribute: latt_vecs
A list of three numpy vectors for the lattice vectors of the structure if
it is crystalline. It should be set to an empty list for molecules.
|
62599000462c4b4f79dbc47f
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.