code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class RedisSentinelStorage(RedisStorage): <NEW_LINE> <INDENT> STORAGE_SCHEME = "redis+sentinel" <NEW_LINE> def __init__(self, uri, **options): <NEW_LINE> <INDENT> if not get_dependency("redis"): <NEW_LINE> <INDENT> raise ConfigurationError( "redis prerequisite not available" ) <NEW_LINE> <DEDENT> parsed = urllib.parse.urlparse(uri) <NEW_LINE> sentinel_configuration = [] <NEW_LINE> password = None <NEW_LINE> if parsed.password: <NEW_LINE> <INDENT> password = parsed.password <NEW_LINE> <DEDENT> for loc in parsed.netloc[parsed.netloc.find("@") + 1:].split(","): <NEW_LINE> <INDENT> host, port = loc.split(":") <NEW_LINE> sentinel_configuration.append((host, int(port))) <NEW_LINE> <DEDENT> self.service_name = ( parsed.path.replace("/", "") if parsed.path else options.get("service_name", None) ) <NEW_LINE> if self.service_name is None: <NEW_LINE> <INDENT> raise ConfigurationError("'service_name' not provided") <NEW_LINE> <DEDENT> self.sentinel = get_dependency("redis.sentinel").Sentinel( sentinel_configuration, socket_timeout=options.get("socket_timeout", 0.2), password=password ) <NEW_LINE> self.initialize_storage(uri) <NEW_LINE> super(RedisStorage, self).__init__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage(self): <NEW_LINE> <INDENT> return self.sentinel.master_for(self.service_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def storage_slave(self): <NEW_LINE> <INDENT> return self.sentinel.slave_for(self.service_name) <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> return super(RedisStorage, self).get(key, self.storage_slave) <NEW_LINE> <DEDENT> def get_expiry(self, key): <NEW_LINE> <INDENT> return super(RedisStorage, self).get_expiry(key, self.storage_slave) <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> return super(RedisStorage, self).check(self.storage_slave)
rate limit storage with redis sentinel as backend
62598fa03d592f4c4edbacf5
class User(AbstractUser): <NEW_LINE> <INDENT> @property <NEW_LINE> def has_profile(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.profile <NEW_LINE> <DEDENT> except Profile.DoesNotExist: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_creditcheck(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.creditcheck <NEW_LINE> <DEDENT> except CreditCheck.DoesNotExist: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.email
Main auth user model.
62598fa00c0af96317c561a8
class Softmax(Layer): <NEW_LINE> <INDENT> def __init__(self, insize, outsize): <NEW_LINE> <INDENT> super(Softmax, self).__init__(insize, outsize, name=None) <NEW_LINE> self.kernel = nn.LogSoftmax(dim=2)
Softmax Layer
62598fa08e7ae83300ee8ec7
class FileUtilsTest(IndexTester): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.makebase() <NEW_LINE> <DEDENT> def test_model_index(self): <NEW_LINE> <INDENT> self.assertEquals(file_utils.model_index('somepath',[self.collectiondups]),(None,None)) <NEW_LINE> <DEDENT> def test_filespecs(self): <NEW_LINE> <INDENT> specs=file_utils.filespecs(self.testdups_path) <NEW_LINE> filepath=os.path.join(self.testdups_path,'dup_in_folder','HilaryEmailC05793347 copy.pdf') <NEW_LINE> spec=specs[filepath] <NEW_LINE> self.assertEquals(spec.length,118916) <NEW_LINE> self.assertTrue(spec.exists) <NEW_LINE> self.assertEquals(spec.name,'HilaryEmailC05793347 copy.pdf') <NEW_LINE> self.assertEquals(spec.ext,'.pdf')
test various fileutils
62598fa0a79ad16197769e8c
class PlatformConfigMeta(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractmethod <NEW_LINE> def ConfigDir(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def DownloadDir(self): <NEW_LINE> <INDENT> return None
Class to provide platform-specific information like directory locations. Each platform is expected to subclass PlatformMeta and supply concrete implementations of all methods. We use a Python Abstract Base Class to protect against future versions. If we add fields to this class, any existing platform implementations will be prompted to add implementations (because they will fail to startup when their PlatformMeta fails to instantiate).
62598fa07cff6e4e811b584b
class Settings(BaseSettings): <NEW_LINE> <INDENT> SEED: str = 'SCOMIY6IHXNIL6ZFTBBYDLU65VONYWI3Y6EN4IDWDP2IIYTCYZBCCE6C' <NEW_LINE> HORIZON_ENDPOINT: str = kin_config.HORIZON_URI_TEST <NEW_LINE> NETWORK_PASSPHRASE: str = kin_config.HORIZON_PASSPHRASE_TEST <NEW_LINE> APP_ID: str = kin_config.ANON_APP_ID <NEW_LINE> CHANNEL_COUNT: int = 100 <NEW_LINE> CHANNEL_SALT: str = 'bootstrap' <NEW_LINE> CHANNEL_STARTING_BALANCE: int = 1 <NEW_LINE> PORT: int = 8000 <NEW_LINE> LOG_LEVEL: str = 'INFO' <NEW_LINE> class Config: <NEW_LINE> <INDENT> env_prefix = ''
Config options for the bootstrap server If an environmental variable exist for the same name, it will override the default value given here
62598fa03c8af77a43b67e53
class BaseCloudStorage(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def __init__( self, part_size: int = DEFAULT_PART_SIZE, file_threshold: int = DEFAULT_FILE_THRESHOLD, ): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.part_size = part_size <NEW_LINE> self.file_threshold = file_threshold <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> async def get_bucket_keys( self, bucket_name: str, prefix: str = "" ) -> Dict[str, BucketKeyMetadata]: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> async def upload_file( self, bucket_name: str, cloud_key: str, file_path: str, prefix: str = "", callback: Callable[[str, str, str, bool], None] = None, ) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_upload_files_coroutines( self, bucket_name: str, cloud_map_list: List[CloudLocalMap], prefix: str = "", callback: Callable[[str, str, str, bool], None] = None, ) -> List[Coroutine[Any, Any, bool]]: <NEW_LINE> <INDENT> upload_tasks = [] <NEW_LINE> for file in cloud_map_list: <NEW_LINE> <INDENT> upload_tasks.append( self.upload_file( bucket_name, file.cloud_key, file.local_filepath, prefix, callback, ) ) <NEW_LINE> <DEDENT> return upload_tasks <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> async def remove_item( self, bucket_name: str, cloud_key: str, callback: Callable[[str, str, str], None] = None, ) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_remove_items_coroutines( self, bucket_name: str, item_names: List[str], callback: Callable[[str, str, str], None] = None, ) -> List[Coroutine[Any, Any, None]]: <NEW_LINE> <INDENT> remove_tasks: List[Any] = [] <NEW_LINE> for item in item_names: <NEW_LINE> <INDENT> remove_tasks.append( self.remove_item( bucket_name=bucket_name, cloud_key=item, callback=callback, ) ) <NEW_LINE> <DEDENT> return remove_tasks <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> async def download_file( self, bucket_name: str, cloud_key: str, destination_filepath: str, prefix: str = "", callback: Callable[[str, str, str, bool], None] = None, ) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_download_files_coroutines( self, bucket_name: str, local_directory: str, cloud_key_list: List[str], prefix: str = "", callback: Callable[[str, str, str, bool], None] = None, ) -> List[Coroutine[Any, Any, bool]]: <NEW_LINE> <INDENT> download_tasks = [] <NEW_LINE> for item in cloud_key_list: <NEW_LINE> <INDENT> download_tasks.append( self.download_file( bucket_name=bucket_name, cloud_key=item, destination_filepath=os.path.join( local_directory, strip_prefix(item, prefix) ), prefix=prefix, callback=callback, ) ) <NEW_LINE> <DEDENT> return download_tasks
Abstract definition of what a platform implementation needs to include. Any new platforms need to inherit from this.
62598fa0fbf16365ca793ee1
class SimpleLock(object): <NEW_LINE> <INDENT> def __init__(self, lockname): <NEW_LINE> <INDENT> self.lockname = lockname <NEW_LINE> self.filename = os.path.join(LOCKDIR, self.lockname) <NEW_LINE> <DEDENT> @property <NEW_LINE> def active(self): <NEW_LINE> <INDENT> return os.path.exists(self.filename) <NEW_LINE> <DEDENT> @property <NEW_LINE> def age(self): <NEW_LINE> <INDENT> if self.active: <NEW_LINE> <INDENT> f = open(self.filename, 'r') <NEW_LINE> timestamp = f.read() <NEW_LINE> f.close() <NEW_LINE> return float(timestamp) <NEW_LINE> <DEDENT> return -1 <NEW_LINE> <DEDENT> def aquire(self): <NEW_LINE> <INDENT> if not self.active: <NEW_LINE> <INDENT> f = open(self.filename, 'w') <NEW_LINE> f.write(str(time.time())) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LockAlreadyActive(self.lockname) <NEW_LINE> <DEDENT> <DEDENT> def release(self): <NEW_LINE> <INDENT> if self.active: <NEW_LINE> <INDENT> os.remove(self.filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise LockNotActive(self.lockname) <NEW_LINE> <DEDENT> <DEDENT> def decorate(self, function): <NEW_LINE> <INDENT> def _wrapped(*args, **kwargs): <NEW_LINE> <INDENT> if self.active: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.aquire() <NEW_LINE> try: <NEW_LINE> <INDENT> return function(*args, **kwargs) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.release() <NEW_LINE> <DEDENT> <DEDENT> _wrapped.__name__ = function.__name__ <NEW_LINE> return _wrapped
mylock = SimpleLock('mylock') @mylock.decorate def myfunc(): do_something() myfunc() only calls the function if no other thread/process is running it at the moment.
62598fa00a50d4780f705201
class Poll(object): <NEW_LINE> <INDENT> def __init__(self, poll_title, vote_options): <NEW_LINE> <INDENT> global POLL_ID <NEW_LINE> self.poll_ID = POLL_ID <NEW_LINE> POLL_ID += 1 <NEW_LINE> self.creation_time = time.time() <NEW_LINE> self.poll_msg = None <NEW_LINE> self.creation_msg = None <NEW_LINE> self.creator = None <NEW_LINE> self.poll_title = poll_title <NEW_LINE> self.vote_options = vote_options <NEW_LINE> self.reactions = dict() <NEW_LINE> self.options_to_users = dict() <NEW_LINE> self.user_to_amount = dict() <NEW_LINE> self.option_to_reaction = dict() <NEW_LINE> self.reaction_to_option = dict() <NEW_LINE> for i, option in enumerate(vote_options): <NEW_LINE> <INDENT> if i in EmojiStorage.NUMBER_TO_LETTEREMOJI: <NEW_LINE> <INDENT> reaction = EmojiStorage.NUMBER_TO_LETTEREMOJI[i] <NEW_LINE> self.option_to_reaction[option] = reaction <NEW_LINE> self.reaction_to_option[reaction] = option <NEW_LINE> self.options_to_users[option] = [] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def process_reactions(self, botname): <NEW_LINE> <INDENT> tmp = {key: [] for key in self.options_to_users} <NEW_LINE> self.options_to_users = tmp <NEW_LINE> self.user_to_amount = dict() <NEW_LINE> for reaction, userdict in self.reactions.items(): <NEW_LINE> <INDENT> users = userdict['usernames'] <NEW_LINE> if reaction in EmojiStorage.LETTEREMOJI_TO_NUMBER: <NEW_LINE> <INDENT> option = self.reaction_to_option[reaction] <NEW_LINE> for user in users: <NEW_LINE> <INDENT> if user != botname: <NEW_LINE> <INDENT> if user not in self.user_to_amount: <NEW_LINE> <INDENT> self.user_to_amount[user] = 1 <NEW_LINE> <DEDENT> self.options_to_users[option].append(user) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif reaction in EmojiStorage.DEFAULT_PEOPLE_EMOJI_TO_NUMBER: <NEW_LINE> <INDENT> for user in users: <NEW_LINE> <INDENT> if user != botname: <NEW_LINE> <INDENT> if user not in self.user_to_amount: <NEW_LINE> <INDENT> self.user_to_amount[user] = EmojiStorage.DEFAULT_PEOPLE_EMOJI_TO_NUMBER[reaction] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.user_to_amount[user] += EmojiStorage.DEFAULT_PEOPLE_EMOJI_TO_NUMBER[reaction] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def create_message(self): <NEW_LINE> <INDENT> attachments = {"title": self.poll_title, "color": "#ff6644", 'collapsed': False} <NEW_LINE> msg = "*%s* \n\n" % (self.poll_title) <NEW_LINE> text = "" <NEW_LINE> for option, users in self.options_to_users.items(): <NEW_LINE> <INDENT> reaction = self.option_to_reaction[option] <NEW_LINE> user_string = "" <NEW_LINE> total = 0 <NEW_LINE> for i in range(len(users)): <NEW_LINE> <INDENT> user = users[i] <NEW_LINE> amount = self.user_to_amount[user] <NEW_LINE> user_string += "%s [%d]" % (user, amount) <NEW_LINE> total += amount <NEW_LINE> if i < len(users)-1: <NEW_LINE> <INDENT> user_string += ", " <NEW_LINE> <DEDENT> <DEDENT> msg += "*%s %s [%d]* \n\n %s \n\n " % (reaction, option,total,user_string) <NEW_LINE> text += "*%s %s [%d]* \n\n %s \n\n " % (reaction, option,total,user_string) <NEW_LINE> <DEDENT> attachments['text'] = text <NEW_LINE> return msg, [attachments]
A Poll object, used as parent for SinglePoll and MultiPoll.
62598fa0cc0a2c111447ae35
@zope.interface.implementer(interfaces.IUnauthorizedPagelet) <NEW_LINE> class UnauthorizedPagelet(z3c.pagelet.browser.BrowserPagelet): <NEW_LINE> <INDENT> def update(self): <NEW_LINE> <INDENT> self.request.response.setStatus(403) <NEW_LINE> self.request.response.setHeader( 'Expires', 'Mon, 26 Jul 1997 05:00:00 GMT') <NEW_LINE> self.request.response.setHeader( 'Cache-Control', 'no-store, no-cache, must-revalidate') <NEW_LINE> self.request.response.setHeader('Pragma', 'no-cache') <NEW_LINE> principal = self.request.principal <NEW_LINE> auth = zope.component.getUtility( zope.authentication.interfaces.IAuthentication) <NEW_LINE> auth.unauthorized(principal.id, self.request) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> if self.request.response.getStatus() not in (302, 303): <NEW_LINE> <INDENT> template = zope.component.getMultiAdapter( (self, self.request), z3c.template.interfaces.IContentTemplate) <NEW_LINE> return template(self)
Unauthorized pagelet.
62598fa007f4c71912baf273
class Sum(Plugin): <NEW_LINE> <INDENT> def init(self, config): <NEW_LINE> <INDENT> self.register(self.events.TALKED_TO_ME, self.action) <NEW_LINE> <DEDENT> def action(self, user, channel, msg): <NEW_LINE> <INDENT> result = sum(int(x) for x in msg.split()) <NEW_LINE> self.say(channel, u"%s, la suma es %d", user, result)
Ejemplo que suma los nros pasados.
62598fa056ac1b37e6302012
class MsgInstCreate: <NEW_LINE> <INDENT> pass
errorList 错误返回列表 :fieldmembers: * successFlg : 成功标志 * tradeFlag : 允许交易标志 * errorCode : 错误代码 * failInfo : 中文失败信息 * englishFailInfo : 英文失败信息 * optFlag : 操作标志 * exceptionType : 异常类型 * instructId : 指令ID * stkId : 证券代码 * traderId : 交易员代码 * orderType : 买卖方向 * exchId : 交易市场 * regId : 股东代码 * acctId : 资金帐号 * custId : 客户帐号 * orderQty : 委托数量 * orderPrice : 委托价格 * optId : 柜员代码 * restrictMode : 限制类型 * stkName : 证券名称
62598fa024f1403a926857c6
class TestGevent(setuptools.Command): <NEW_LINE> <INDENT> BANNED_TESTS = ( 'unit._cython._no_messages_server_completion_queue_per_call_test.Test.test_rpcs', 'unit._cython._no_messages_single_server_completion_queue_test.Test.test_rpcs', 'testing._client_test.ClientTest.test_infinite_request_stream_real_time', 'unit._server_ssl_cert_config_test', 'protoc_plugin._python_plugin_test.PythonPluginTest', 'protoc_plugin.beta_python_plugin_test', 'unit.beta._beta_features_test', ) <NEW_LINE> description = 'run tests with gevent. Assumes grpc/gevent are installed' <NEW_LINE> user_options = [] <NEW_LINE> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> from gevent import monkey <NEW_LINE> monkey.patch_all() <NEW_LINE> import tests <NEW_LINE> import grpc.experimental.gevent <NEW_LINE> grpc.experimental.gevent.init_gevent() <NEW_LINE> import gevent <NEW_LINE> import tests <NEW_LINE> loader = tests.Loader() <NEW_LINE> loader.loadTestsFromNames(['tests']) <NEW_LINE> runner = tests.Runner() <NEW_LINE> runner.skip_tests(self.BANNED_TESTS) <NEW_LINE> result = gevent.spawn(runner.run, loader.suite) <NEW_LINE> result.join() <NEW_LINE> if not result.value.wasSuccessful(): <NEW_LINE> <INDENT> sys.exit('Test failure')
Command to run tests w/gevent.
62598fa067a9b606de545df2
class IBNDenseUnit(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, dropout_rate, conv1_ibn): <NEW_LINE> <INDENT> super(IBNDenseUnit, self).__init__() <NEW_LINE> self.use_dropout = (dropout_rate != 0.0) <NEW_LINE> bn_size = 4 <NEW_LINE> inc_channels = out_channels - in_channels <NEW_LINE> mid_channels = inc_channels * bn_size <NEW_LINE> self.conv1 = ibn_pre_conv1x1_block( in_channels=in_channels, out_channels=mid_channels, use_ibn=conv1_ibn) <NEW_LINE> self.conv2 = pre_conv3x3_block( in_channels=mid_channels, out_channels=inc_channels) <NEW_LINE> if self.use_dropout: <NEW_LINE> <INDENT> self.dropout = nn.Dropout(p=dropout_rate) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> identity = x <NEW_LINE> x = self.conv1(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> if self.use_dropout: <NEW_LINE> <INDENT> x = self.dropout(x) <NEW_LINE> <DEDENT> x = torch.cat((identity, x), dim=1) <NEW_LINE> return x
IBN-DenseNet unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. dropout_rate : float Parameter of Dropout layer. Faction of the input units to drop. conv1_ibn : bool Whether to use IBN normalization in the first convolution layer of the block.
62598fa0435de62698e9bc1b
class Directions: <NEW_LINE> <INDENT> NORTH = "North" <NEW_LINE> SOUTH = "South" <NEW_LINE> EAST = "East" <NEW_LINE> WEST = "West" <NEW_LINE> STOP = "Stop" <NEW_LINE> LEFT = {NORTH: WEST, SOUTH: EAST, EAST: NORTH, WEST: SOUTH, STOP: STOP} <NEW_LINE> RIGHT = dict([(y,x) for x, y in LEFT.items()])
Map the directions.
62598fa09c8ee82313040082
class ActionContextHandler(sublime_plugin.EventListener): <NEW_LINE> <INDENT> def on_query_context(self, view, key, op, operand, match_all): <NEW_LINE> <INDENT> if not key.startswith('python_traceback'): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return view.name() == TRACEBACK_WINDOW_NAME
Provide special `python_traceback` context in traceback window So that plugin could provide traceback specific bindings.
62598fa0796e427e5384e5bb
class SessionViewTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.initial_response = self.client.get(reverse('exordium:index')) <NEW_LINE> <DEDENT> def test_add_success_message(self): <NEW_LINE> <INDENT> add_session_success(self.initial_response.wsgi_request, 'Success') <NEW_LINE> self.assertIn('exordium_msg_success', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_success'], ['Success']) <NEW_LINE> <DEDENT> def test_add_two_success_messages(self): <NEW_LINE> <INDENT> add_session_success(self.initial_response.wsgi_request, 'Success') <NEW_LINE> self.assertIn('exordium_msg_success', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_success'], ['Success']) <NEW_LINE> add_session_success(self.initial_response.wsgi_request, 'Two') <NEW_LINE> self.assertIn('exordium_msg_success', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_success'], ['Success', 'Two']) <NEW_LINE> <DEDENT> def test_add_fail_message(self): <NEW_LINE> <INDENT> add_session_fail(self.initial_response.wsgi_request, 'Fail') <NEW_LINE> self.assertIn('exordium_msg_fail', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_fail'], ['Fail']) <NEW_LINE> <DEDENT> def test_add_two_fail_messages(self): <NEW_LINE> <INDENT> add_session_fail(self.initial_response.wsgi_request, 'Fail') <NEW_LINE> self.assertIn('exordium_msg_fail', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_fail'], ['Fail']) <NEW_LINE> add_session_fail(self.initial_response.wsgi_request, 'Two') <NEW_LINE> self.assertIn('exordium_msg_fail', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(self.initial_response.wsgi_request.session['exordium_msg_fail'], ['Fail', 'Two']) <NEW_LINE> <DEDENT> def test_add_invalid_message(self): <NEW_LINE> <INDENT> initial_session_keys = sorted(self.initial_response.wsgi_request.session.keys()) <NEW_LINE> add_session_msg(self.initial_response.wsgi_request, 'Invalid', 'invalid') <NEW_LINE> self.assertNotIn('exordium_msg_invalid', self.initial_response.wsgi_request.session) <NEW_LINE> self.assertEqual(initial_session_keys, sorted(self.initial_response.wsgi_request.session.keys()))
Tests dealing with the session variables we set (for success/fail messages). Mostly this isn't really necessary since they're tested "by accident" in a number of other tests, but we'll explicitly run a couple tests here and slightly increase our coverage to boot.
62598fa02ae34c7f260aaf08
class VanillaSeq2seqModel(baseModel): <NEW_LINE> <INDENT> def _add_decoder(self): <NEW_LINE> <INDENT> cell1 = tf.contrib.rnn.LSTMCell( self.hps.hidden_dim, state_is_tuple=True, initializer=self.rand_unif_init) <NEW_LINE> cell2 = tf.contrib.rnn.LSTMCell( self.hps.hidden_dim, state_is_tuple=True, initializer=self.rand_unif_init) <NEW_LINE> cell = tf.contrib.rnn.MultiRNNCell([cell1, cell2]) <NEW_LINE> self.arg_dec_outputs, arg_dec_out_state, arg_attn_dists = attention_decoder( self.emb_arg_dec_inputs, self._dec_in_state, self.encoder_outputs, self._enc_padding_mask, cell, initial_state_attention=(self.hps.mode == "decode")) <NEW_LINE> self._dec_out_state = (arg_dec_out_state, []) <NEW_LINE> self.attn_dists = (arg_attn_dists, [], []) <NEW_LINE> return
Vanilla sequence-to-sequence model with attention mechanism.
62598fa05fdd1c0f98e5ddc1
class PartyCustomer(ModelSQL, ModelView): <NEW_LINE> <INDENT> _description = __doc__ <NEW_LINE> _name = "party.customer" <NEW_LINE> _rec_name = 'shortname' <NEW_LINE> _inherits = {'party.party': 'party'} <NEW_LINE> party = fields.Many2One('party.party', 'Party', ondelete="CASCADE", required=True) <NEW_LINE> def create(self, vals): <NEW_LINE> <INDENT> cursor = Transaction().cursor <NEW_LINE> later = {} <NEW_LINE> vals = vals.copy() <NEW_LINE> for field in vals: <NEW_LINE> <INDENT> if field in self._columns and hasattr(self._columns[field], 'set'): <NEW_LINE> <INDENT> later[field] = vals[field] <NEW_LINE> <DEDENT> <DEDENT> for field in later: <NEW_LINE> <INDENT> del vals[field] <NEW_LINE> <DEDENT> if cursor.nextid(self._table): <NEW_LINE> <INDENT> cursor.setnextid(self._table, cursor.currid(self._table)) <NEW_LINE> <DEDENT> new_id = super(PartyCustomer, self).create(vals) <NEW_LINE> customer = self.browse(new_id) <NEW_LINE> new_id = customer.party.id <NEW_LINE> cursor.execute('UPDATE "' + self._table + '" SET id = %s ' 'WHERE id = %s', (customer.party.id, customer.id)) <NEW_LINE> ModelStorage.delete(self, customer.id) <NEW_LINE> self.write(new_id, later) <NEW_LINE> res = self.browse(new_id) <NEW_LINE> return res.id
Party Customer
62598fa092d797404e388a7a
class StoryAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('title', 'branch', 'content_type', 'year',) <NEW_LINE> list_filter = ['year', 'public_approved'] <NEW_LINE> search_fields = ['title']
Adds the ability to filter stories in the admin page
62598fa04e4d56256637224c
class YaruAuth(BaseOAuth2): <NEW_LINE> <INDENT> AUTHORIZATION_URL = YANDEX_AUTHORIZATION_URL <NEW_LINE> ACCESS_TOKEN_URL = YANDEX_ACCESS_TOKEN_URL <NEW_LINE> AUTH_BACKEND = YaruBackend <NEW_LINE> SERVER_URL = YANDEX_SERVER <NEW_LINE> SETTINGS_KEY_NAME = 'YANDEX_APP_ID' <NEW_LINE> SETTINGS_SECRET_NAME = 'YANDEX_API_SECRET' <NEW_LINE> def get_api_url(self): <NEW_LINE> <INDENT> return 'https://api-yaru.yandex.ru/me/' <NEW_LINE> <DEDENT> def user_data(self, access_token, response, *args, **kwargs): <NEW_LINE> <INDENT> params = {'oauth_token': access_token, 'format': 'json', 'text': 1, } <NEW_LINE> url = self.get_api_url() + '?' + urlencode(params) <NEW_LINE> try: <NEW_LINE> <INDENT> return simplejson.load(urlopen(url)) <NEW_LINE> <DEDENT> except (ValueError, IndexError): <NEW_LINE> <INDENT> log('error', 'Could not load data from Yandex.', exc_info=True, extra=dict(data=params)) <NEW_LINE> return None
Yandex Ya.ru OAuth mechanism
62598fa01b99ca400228f442
class PropertyModel(object): <NEW_LINE> <INDENT> def __init__( self, user_name, bucket_name, property_name=None, property_prefix_id=None): <NEW_LINE> <INDENT> self.user_name = user_name <NEW_LINE> self.bucket_name = bucket_name <NEW_LINE> self.property_name = property_name <NEW_LINE> if property_name: <NEW_LINE> <INDENT> self.id = pack_hash((property_name,)) <NEW_LINE> <DEDENT> elif property_prefix_id: <NEW_LINE> <INDENT> self.id = property_prefix_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("PropertyModel requires 'property_name' " "or 'property_id'.") <NEW_LINE> <DEDENT> <DEDENT> @inlineCallbacks <NEW_LINE> def get_name(self): <NEW_LINE> <INDENT> key = (self.user_name, self.bucket_name, "property_name") <NEW_LINE> column_id = self.id <NEW_LINE> data = yield get_relation(key, column_id=column_id) <NEW_LINE> returnValue(ujson.loads(data)) <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def get_values(self): <NEW_LINE> <INDENT> key = (self.user_name, self.bucket_name, "property") <NEW_LINE> prefix = self.id <NEW_LINE> data = yield get_relation(key, prefix=prefix) <NEW_LINE> returnValue(dict([(self.id + x[0], ujson.loads(x[1])[1]) for x in data.items()])) <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def get_totals(self): <NEW_LINE> <INDENT> key = (self.user_name, self.bucket_name, "property", self.id[0]) <NEW_LINE> prefix = self.id <NEW_LINE> data = yield get_counter(key, prefix=prefix) <NEW_LINE> response = defaultdict(lambda:defaultdict(lambda:0)) <NEW_LINE> for column_id in data: <NEW_LINE> <INDENT> property_id = self.id + column_id[0:16] <NEW_LINE> event_id = column_id[16:] <NEW_LINE> response[property_id][event_id] = data[column_id] <NEW_LINE> <DEDENT> returnValue(response) <NEW_LINE> <DEDENT> @inlineCallbacks <NEW_LINE> def get_events(self): <NEW_LINE> <INDENT> key = (self.user_name, self.bucket_name, "property", self.id[0]) <NEW_LINE> prefix = self.id <NEW_LINE> data = yield get_counter(key, prefix=prefix) <NEW_LINE> column_ids = set([column_id[16:] for column_id in data]) <NEW_LINE> key = (self.user_name, self.bucket_name, "event") <NEW_LINE> events = yield get_relation(key, column_ids=column_ids) <NEW_LINE> returnValue(events)
Properties are key/value pairs linked to a visitor and stored in buckets.
62598fa0a17c0f6771d5c063
class Identity(Transform): <NEW_LINE> <INDENT> def __init__(self,dropout_chance=0): <NEW_LINE> <INDENT> Transform.__init__(self) <NEW_LINE> <DEDENT> def forward(self, x, train=True): <NEW_LINE> <INDENT> self.shape = x.shape <NEW_LINE> return x <NEW_LINE> <DEDENT> def backward(self,grad_wrt_out): <NEW_LINE> <INDENT> return np.ones(self.shape) * grad_wrt_out
Identity Transform This exists to give you an idea for how to fill out the template
62598fa032920d7e50bc5e7f
class DownloadError(DangoException): <NEW_LINE> <INDENT> pass
Exception raised when an error occurs while the bot was downloading a file.
62598fa0442bda511e95c283
class UserStatsAdmin(FullAuditBaseAdmin): <NEW_LINE> <INDENT> list_display = ('id', ) <NEW_LINE> readonly_fields = ('id', )
UserStats Admin
62598fa085dfad0860cbf989
class ApplicationGatewayBackendHealth(Model): <NEW_LINE> <INDENT> _attribute_map = { 'backend_address_pools': {'key': 'backendAddressPools', 'type': '[ApplicationGatewayBackendHealthPool]'}, } <NEW_LINE> def __init__(self, backend_address_pools=None): <NEW_LINE> <INDENT> self.backend_address_pools = backend_address_pools
List of ApplicationGatewayBackendHealthPool resources. :param backend_address_pools: :type backend_address_pools: list[~azure.mgmt.network.v2017_06_01.models.ApplicationGatewayBackendHealthPool]
62598fa007f4c71912baf274
class MIMEMessage(db.Model): <NEW_LINE> <INDENT> message_id = db.LinkProperty(required=True) <NEW_LINE> from_id = db.StringProperty() <NEW_LINE> subject = db.StringProperty() <NEW_LINE> reply_to = db.LinkProperty() <NEW_LINE> content_type = db.StringProperty() <NEW_LINE> content = db.StringProperty() <NEW_LINE> @classmethod <NEW_LINE> def create(cls, message_id): <NEW_LINE> <INDENT> msg = MIMEMessage.get_by_key_name(message_id) <NEW_LINE> if msg: <NEW_LINE> <INDENT> raise Exception('Exists') <NEW_LINE> <DEDENT> msg = MIMEMessage( key_name=message_id, message_id=message_id ) <NEW_LINE> msg.put() <NEW_LINE> return msg
First version of representation of mailinglist message.
62598fa0a8370b77170f020d
class SessionDestroyedError(Exception): <NEW_LINE> <INDENT> pass
If a session is destroyed, it cannot be opened or accessed anymore
62598fa0ac7a0e7691f72334
class MediaAttachment: <NEW_LINE> <INDENT> def __init__(self, media_type, url): <NEW_LINE> <INDENT> self.media_type = media_type <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_json(cls, json): <NEW_LINE> <INDENT> return cls(json['type'], json['payload']['url']) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> json = {'type': self.media_type, 'payload': {'url': self.url}} <NEW_LINE> return json
A class holding media attachments. Attributes: media_type: The type of the attachment. Can be one of 'image', 'audio', 'video', or 'file'. url: The url of the media as a string.
62598fa08e7ae83300ee8ec9
class IrModel(models.Model): <NEW_LINE> <INDENT> _inherit = 'ir.model' <NEW_LINE> website_form_recaptcha = fields.Boolean( string='Require ReCaptcha', help='Requires successful ReCaptcha for form submission.', )
Add ReCaptcha attr & validation to IrModel for use in forms
62598fa0d6c5a102081e1f6f
class JSONResponseMixin(object): <NEW_LINE> <INDENT> def render_to_json_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> return HttpResponse(self.get_data(context), content_type="application/json") <NEW_LINE> <DEDENT> def get_data(self, context): <NEW_LINE> <INDENT> return json.dumps(context)
A mixin that can be used to render a JSON response.
62598fa091af0d3eaad39c35
class ProxySignalWithArguments(object): <NEW_LINE> <INDENT> def __init__(self, sender, signal_name, signal_index): <NEW_LINE> <INDENT> self._sender = sender <NEW_LINE> self._signal_name = signal_name <NEW_LINE> if isinstance(signal_index, tuple): <NEW_LINE> <INDENT> self._signal_index = ','.join(["'%s'" % a for a in signal_index]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._signal_index = "'%s'" % signal_index <NEW_LINE> <DEDENT> <DEDENT> def connect(self, slot): <NEW_LINE> <INDENT> write_code("%s.%s[%s].connect(%s)" % (self._sender, self._signal_name, self._signal_index, slot))
This is a proxy for (what should be) a signal that passes arguments.
62598fa0cc0a2c111447ae37
class Solution: <NEW_LINE> <INDENT> def hasRoute(self, graph, s, t): <NEW_LINE> <INDENT> def dfs(v): <NEW_LINE> <INDENT> if v == t: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> visited.append(v) <NEW_LINE> for i in v.neighbors: <NEW_LINE> <INDENT> if i in visited: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if dfs(i): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> visited = [] <NEW_LINE> return dfs(s) <NEW_LINE> <DEDENT> def hasRoute2(self, graph, s, t): <NEW_LINE> <INDENT> from collections import deque <NEW_LINE> q = deque([s]) <NEW_LINE> visited = [] <NEW_LINE> while q: <NEW_LINE> <INDENT> node = q.popleft() <NEW_LINE> if node == t: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> visited.append(node) <NEW_LINE> for i in node.neighbors: <NEW_LINE> <INDENT> if i not in visited: <NEW_LINE> <INDENT> q.append(i) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
@param graph: A list of Directed graph node @param s: the starting Directed graph node @param t: the terminal Directed graph node @return: a boolean value
62598fa08da39b475be03009
class _PlayerProperty: <NEW_LINE> <INDENT> def __init__(self, attribute, doc=None): <NEW_LINE> <INDENT> self.attribute = attribute <NEW_LINE> self.__doc__ = doc or '' <NEW_LINE> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if '_' + self.attribute in obj.__dict__: <NEW_LINE> <INDENT> return obj.__dict__['_' + self.attribute] <NEW_LINE> <DEDENT> return getattr(objtype, '_' + self.attribute) <NEW_LINE> <DEDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> obj.__dict__['_' + self.attribute] = value <NEW_LINE> if obj._audio_player: <NEW_LINE> <INDENT> getattr(obj._audio_player, 'set_' + self.attribute)(value)
Descriptor for Player attributes to forward to the AudioPlayer. We want the Player to have attributes like volume, pitch, etc. These are actually implemented by the AudioPlayer. So this descriptor will forward an assignement to one of the attributes to the AudioPlayer. For example `player.volume = 0.5` will call `player._audio_player.set_volume(0.5)`. The Player class has default values at the class level which are retrieved if not found on the instance.
62598fa03eb6a72ae038a46b
class DescribeProjectSecurityGroupsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Groups = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("Groups") is not None: <NEW_LINE> <INDENT> self.Groups = [] <NEW_LINE> for item in params.get("Groups"): <NEW_LINE> <INDENT> obj = SecurityGroup() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Groups.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
DescribeProjectSecurityGroups返回参数结构体
62598fa0097d151d1a2c0e52
class ServiceCallEventTest(TestCase): <NEW_LINE> <INDENT> def test_attribute_names(self): <NEW_LINE> <INDENT> event = ServiceCallEvent() <NEW_LINE> expected = [] <NEW_LINE> names = sorted(n for n in dir(event) if not n.startswith("_")) <NEW_LINE> self.assertSequenceEqual(expected, names) <NEW_LINE> <DEDENT> def test_for_slots(self): <NEW_LINE> <INDENT> self.assertTrue(hasattr(ServiceCallEvent, "__slots__"))
Test the ServiceCallEvent class.
62598fa030dc7b766599f677
class StringNotInAdvancedFilter(AdvancedFilter): <NEW_LINE> <INDENT> _validation = { 'operator_type': {'required': True}, } <NEW_LINE> _attribute_map = { 'operator_type': {'key': 'operatorType', 'type': 'str'}, 'key': {'key': 'key', 'type': 'str'}, 'values': {'key': 'values', 'type': '[str]'}, } <NEW_LINE> def __init__( self, *, key: Optional[str] = None, values: Optional[List[str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(StringNotInAdvancedFilter, self).__init__(key=key, **kwargs) <NEW_LINE> self.operator_type = 'StringNotIn' <NEW_LINE> self.values = values
StringNotIn Advanced Filter. All required parameters must be populated in order to send to Azure. :ivar operator_type: Required. The operator type used for filtering, e.g., NumberIn, StringContains, BoolEquals and others.Constant filled by server. Possible values include: "NumberIn", "NumberNotIn", "NumberLessThan", "NumberGreaterThan", "NumberLessThanOrEquals", "NumberGreaterThanOrEquals", "BoolEquals", "StringIn", "StringNotIn", "StringBeginsWith", "StringEndsWith", "StringContains", "NumberInRange", "NumberNotInRange", "StringNotBeginsWith", "StringNotEndsWith", "StringNotContains", "IsNullOrUndefined", "IsNotNull". :vartype operator_type: str or ~azure.mgmt.eventgrid.models.AdvancedFilterOperatorType :ivar key: The field/property in the event based on which you want to filter. :vartype key: str :ivar values: The set of filter values. :vartype values: list[str]
62598fa0009cb60464d0134e
class PIPConfigException(PIPException): <NEW_LINE> <INDENT> pass
Configuration errors related to the XACML PIP (Policy Information Point) class
62598fa05f7d997b871f92f4
@define_command(args=MATCH_STR, syntax='<message>') <NEW_LINE> class cmd_say(events.Handler): <NEW_LINE> <INDENT> def handler_10_do(self, event, message): <NEW_LINE> <INDENT> event.client.write("You say: %s" % message) <NEW_LINE> self.container.write_all( "%s says: %s" % (event.user.name, message), exclude=event.client, )
Say something to the other users
62598fa0097d151d1a2c0e53
class NamedType(object): <NEW_LINE> <INDENT> isOptional = False <NEW_LINE> isDefaulted = False <NEW_LINE> def __init__(self, name, asn1Object): <NEW_LINE> <INDENT> self.__name = name <NEW_LINE> self.__type = asn1Object <NEW_LINE> self.__nameAndType = name, asn1Object <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '%s(%r, %r)' % (self.__class__.__name__, self.__name, self.__type) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType == other <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType != other <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType < other <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType <= other <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType > other <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.__nameAndType >= other <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.__nameAndType) <NEW_LINE> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> return self.__nameAndType[idx] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.__nameAndType) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> @property <NEW_LINE> def asn1Object(self): <NEW_LINE> <INDENT> return self.__type <NEW_LINE> <DEDENT> def getName(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def getType(self): <NEW_LINE> <INDENT> return self.asn1Object
Create named field object for a constructed ASN.1 type. The |NamedType| object represents a single name and ASN.1 type of a constructed ASN.1 type. |NamedType| objects are immutable and duck-type Python :class:`tuple` objects holding *name* and *asn1Object* components. Parameters ---------- name: :py:class:`str` Field name asn1Object: ASN.1 type object
62598fa0627d3e7fe0e06cd5
class Any(expression.ColumnElement): <NEW_LINE> <INDENT> __visit_name__ = 'any' <NEW_LINE> def __init__(self, left, right, operator=operators.eq): <NEW_LINE> <INDENT> self.type = sqltypes.Boolean() <NEW_LINE> self.left = expression._literal_as_binds(left) <NEW_LINE> self.right = right <NEW_LINE> self.operator = operator
Represent the clause ``left operator ANY (right)``. ``right`` must be an array expression. .. seealso:: :class:`.postgresql.ARRAY` :meth:`.postgresql.ARRAY.Comparator.any` - ARRAY-bound method
62598fa0b7558d5895463457
class GatedRecurrentUnit(object): <NEW_LINE> <INDENT> def __init__(self, n_hidden=100, init='glorot_uniform'): <NEW_LINE> <INDENT> self.n_hidden = n_hidden <NEW_LINE> self.init = initializations.get(init) <NEW_LINE> Wz = self.init([n_hidden, n_hidden]) <NEW_LINE> Wr = self.init([n_hidden, n_hidden]) <NEW_LINE> Wh = self.init([n_hidden, n_hidden]) <NEW_LINE> Uz = self.init([n_hidden, n_hidden]) <NEW_LINE> Ur = self.init([n_hidden, n_hidden]) <NEW_LINE> Uh = self.init([n_hidden, n_hidden]) <NEW_LINE> bz = model_ops.zeros(shape=(n_hidden,)) <NEW_LINE> br = model_ops.zeros(shape=(n_hidden,)) <NEW_LINE> bh = model_ops.zeros(shape=(n_hidden,)) <NEW_LINE> self.trainable_weights = [Wz, Wr, Wh, Uz, Ur, Uh, bz, br, bh] <NEW_LINE> <DEDENT> def forward(self, inputs, messages): <NEW_LINE> <INDENT> z = tf.nn.sigmoid(tf.matmul(messages, self.trainable_weights[0]) + tf.matmul(inputs, self.trainable_weights[3]) + self.trainable_weights[6]) <NEW_LINE> r = tf.nn.sigmoid(tf.matmul(messages, self.trainable_weights[1]) + tf.matmul(inputs, self.trainable_weights[4]) + self.trainable_weights[7]) <NEW_LINE> h = (1 - z) * tf.nn.tanh(tf.matmul(messages, self.trainable_weights[2]) + tf.matmul(inputs * r, self.trainable_weights[5]) + self.trainable_weights[8]) + z * inputs <NEW_LINE> return h <NEW_LINE> <DEDENT> def none_tensors(self): <NEW_LINE> <INDENT> trainable_weights = self.trainable_weights <NEW_LINE> self.trainable_weights = [] <NEW_LINE> return trainable_weights <NEW_LINE> <DEDENT> def set_tensors(self, tensor): <NEW_LINE> <INDENT> self.trainable_weights = tensor
Submodule for Message Passing
62598fa07d847024c075c1f0
class KeystoneLDAPConfigurationAdapter( charms_openstack.adapters.ConfigurationAdapter): <NEW_LINE> <INDENT> @property <NEW_LINE> def ldap_options(self): <NEW_LINE> <INDENT> return os_utils.config_flags_parser( hookenv.config('ldap-config-flags') )
Charm specific configuration adapter to deal with ldap config flag parsing
62598fa097e22403b383ad36
class RateHandler(BaseHandler): <NEW_LINE> <INDENT> @tornado.web.asynchronous <NEW_LINE> def post(self): <NEW_LINE> <INDENT> self.finish() <NEW_LINE> if 'value' in self.request.arguments: <NEW_LINE> <INDENT> play = bool(float(self.request.arguments['value'][0])) <NEW_LINE> if play: <NEW_LINE> <INDENT> self._media_backend.play() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._media_backend.pause()
Handler for /rate requests. The rate command is used to play/pause media. A value argument should be supplied which indicates media should be played or paused. 0.000000 => pause 1.000000 => play
62598fa0be383301e0253620
class LiUEmployeeLDAPBackend(_LiUBaseLDAPBackend): <NEW_LINE> <INDENT> settings_prefix = 'LIU_EMPLOYEE_LDAP_' <NEW_LINE> _settings = LiUEmployeeLDAPSettings(settings_prefix)
An authentication backend for LiU employees.
62598fa0dd821e528d6d8d5f
class AddTableWindow(QtWidgets.QWidget, Ui_Form_AddTable, Msg): <NEW_LINE> <INDENT> def __init__(self, ic_db, parent=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.setupUi(self) <NEW_LINE> self.ic_db = ic_db <NEW_LINE> self.buttonAdd.clicked.connect(self.addTable) <NEW_LINE> self.buttonExit.clicked.connect(self.close) <NEW_LINE> self.setAttribute(QtCore.Qt.WA_DeleteOnClose, True) <NEW_LINE> <DEDENT> def addTable(self): <NEW_LINE> <INDENT> table_name = self.inputTableName.text() <NEW_LINE> try: <NEW_LINE> <INDENT> self.ic_db.add_new_table(table_name) <NEW_LINE> self.successfulMsg('Table added!') <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> self.criticalMsg(str(e)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.resetFields() <NEW_LINE> <DEDENT> <DEDENT> def resetFields(self): <NEW_LINE> <INDENT> self.inputTableName.setText('')
Widget whose function is to add a table to the database. ic_db is an instance of ItemChooser(). It represents the database on which the actions will be performed. parent is the parent widget, which defaults to None.
62598fa0a17c0f6771d5c064
class BiphasicPort(serial.Serial): <NEW_LINE> <INDENT> def __init__(self, port=DEFAULT_PORT, baud=BAUDRATE): <NEW_LINE> <INDENT> if not REALLY_STIM: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> serial.Serial.__init__(self, port, baud, timeout=0, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS, parity=serial.PARITY_NONE) <NEW_LINE> logging.info(MODULE_IDENTIFIER + "Serial port initialized.") <NEW_LINE> <DEDENT> def sendBiphasicPulse(self): <NEW_LINE> <INDENT> if not REALLY_STIM: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.setDTR(True) <NEW_LINE> time.sleep(0.0002) <NEW_LINE> self.setDTR(False) <NEW_LINE> time.sleep(0.0001) <NEW_LINE> self.setRTS(True) <NEW_LINE> time.sleep(0.0002) <NEW_LINE> self.setRTS(False) <NEW_LINE> time.sleep(0.001) <NEW_LINE> logging.info(MODULE_IDENTIFIER + "Biphasic pulse delivered.") <NEW_LINE> return
Serial port set up for biphasic pulse communication
62598fa0435de62698e9bc1e
class RedisAbstractBackEnd(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> PREFIX = "h:{0}" <NEW_LINE> def __init__(self, obj=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> self._redis = Redis() <NEW_LINE> <DEDENT> elif isinstance(obj, (list, tuple)): <NEW_LINE> <INDENT> host, port = obj[0], obj[1] <NEW_LINE> db = obj[2] if len(obj) >= 3 else 0 <NEW_LINE> self._redis = Redis(host=host, port=port, db=db) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._redis = obj <NEW_LINE> <DEDENT> self._prefix_len = len(self.PREFIX.format('')) <NEW_LINE> self.rules = {} <NEW_LINE> <DEDENT> def _get_func_key(self, name): <NEW_LINE> <INDENT> return self.PREFIX.format(name)
Having two classes implementing a Hanoi BackEnd in REDIS means duplicating a lot of code. This class is defined as abstract for the sake of clarity that should not be directly used but by means of a subclass.
62598fa030bbd7224646988c
class Longueur: <NEW_LINE> <INDENT> def __init__(self,cartes): <NEW_LINE> <INDENT> self.cartes=set(cartes) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> l=list(self.cartes) <NEW_LINE> l.sort(reverse=True) <NEW_LINE> chaine='' <NEW_LINE> for x in l: <NEW_LINE> <INDENT> chaine += ' '+valeur[x] <NEW_LINE> <DEDENT> return chaine <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.cartes) <NEW_LINE> <DEDENT> def qualite(self): <NEW_LINE> <INDENT> res=0 <NEW_LINE> for x in self.cartes: <NEW_LINE> <INDENT> res += honneur[x] <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def pointH (self): <NEW_LINE> <INDENT> res=0 <NEW_LINE> for x in self.cartes: <NEW_LINE> <INDENT> res += pointH[x] <NEW_LINE> <DEDENT> return res
Une longueur dans une couleur particulière
62598fa001c39578d7f12ba9
class WorseAndWorse3(Player): <NEW_LINE> <INDENT> name = 'Worse and Worse 3' <NEW_LINE> classifier = { 'memory_depth': float('inf'), 'stochastic': True, 'makes_use_of': set(), 'long_run_time': False, 'inspects_source': False, 'manipulates_source': False, 'manipulates_state': False } <NEW_LINE> def strategy(self, opponent: Player) -> Action: <NEW_LINE> <INDENT> current_round = len(self.history) + 1 <NEW_LINE> if current_round == 1: <NEW_LINE> <INDENT> return C <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> probability = 1 - opponent.defections / (current_round - 1) <NEW_LINE> return random_choice(probability)
Cooperates in the first turn. Then defects with probability no. of opponent defects / (current turn - 1). Therefore it is more likely to defect when the opponent defects for a larger proportion of the turns. Names: - Worse and Worse 3: [PRISON1998]_
62598fa0e1aae11d1e7ce739
class OneVersusAllOutput(OutputType): <NEW_LINE> <INDENT> def output(self, A, Z=None): <NEW_LINE> <INDENT> return A <NEW_LINE> <DEDENT> def predict(self, A): <NEW_LINE> <INDENT> return A.argmax(axis=1) <NEW_LINE> <DEDENT> def loss(self, Y, Z, A=None): <NEW_LINE> <INDENT> return np.maximum(0, 1 - Y*Z).sum() <NEW_LINE> <DEDENT> def derivative(self, Y, Z, A): <NEW_LINE> <INDENT> return -((A * Z < 1) * Z) <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return 'onevall' <NEW_LINE> <DEDENT> def label_vec_to_mat(self, z, K): <NEW_LINE> <INDENT> ncases = len(z) <NEW_LINE> Z = -np.ones((ncases, K), dtype=np.int) <NEW_LINE> for i in range(ncases): <NEW_LINE> <INDENT> Z[i][z[i]] = 1 <NEW_LINE> <DEDENT> return Z
One versus all hinge loss output layer.
62598fa0a79ad16197769e8f
class SimplePreprocessor: <NEW_LINE> <INDENT> def __init__(self, width: int, height: int, inter=cv2.INTER_AREA): <NEW_LINE> <INDENT> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.inter = inter <NEW_LINE> <DEDENT> def preprocess(self, image: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> resized_image = cv2.resize( image, (self.width, self.height), interpolation=self.inter ) <NEW_LINE> return resized_image
Class to resize an image to a certain width and height Parameters ---------- width: int output width height: int output height inter: interpolation method, default = ``cv2.INTER_AREA`` ``opencv`` interpolation method. See ``opencv`` :obj:`InterpolationFlags`. .. note:: The value 3 that appears in the class parameters above is a Sphinx formatting error.
62598fa0e5267d203ee6b738
class Sphere: <NEW_LINE> <INDENT> def __init__(self, radius): <NEW_LINE> <INDENT> self.radius = radius <NEW_LINE> <DEDENT> def getRadius(self): <NEW_LINE> <INDENT> return self.radius <NEW_LINE> <DEDENT> def surfaceArea(self): <NEW_LINE> <INDENT> self.surfaceArea = 4 * pi * (self.radius * self.radius) <NEW_LINE> return self.surfaceArea <NEW_LINE> <DEDENT> def volume(self): <NEW_LINE> <INDENT> self.volume = 4/3 * pi * (self.radius ** 3) <NEW_LINE> return self.volume
This class represents a geometric solid sphere. it will return radius, surface area, and volume using getRadius(), surfaceArea(), and volume().
62598fa007f4c71912baf276
class InputBox(InteractControl): <NEW_LINE> <INDENT> def __init__(self, default=u"", label=None, width=0, height=1, keypress=False): <NEW_LINE> <INDENT> if not isinstance(default, basestring): <NEW_LINE> <INDENT> default = repr(default) <NEW_LINE> <DEDENT> self.default=default <NEW_LINE> self.width=int(width) <NEW_LINE> self.height=int(height) <NEW_LINE> self.keypress = keypress <NEW_LINE> self.label=label <NEW_LINE> if self.height > 1: <NEW_LINE> <INDENT> self.subtype = "textarea" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.subtype = "input" <NEW_LINE> <DEDENT> <DEDENT> def message(self): <NEW_LINE> <INDENT> return {'control_type':'input_box', 'subtype':self.subtype, 'default':self.default, 'width':self.width, 'height':self.height, 'evaluate': False, 'keypress': self.keypress, 'label':self.label}
An input box control :arg default: default value of the input box. If this is not a string, repr is called on it to get a string, which is then the default input. :arg int width: character width of the input box. :arg int height: character height of the input box. If this is greater than one, an HTML textarea will be rendered, while if it is less than one, an input box form element will be rendered. :arg str label: the label of the control, ``""`` for no label, and a default value (None) of the control's variable. :arg bool keypress: update the value of the interact when the user presses any key, rather than when the user presses Enter or unfocuses the textbox
62598fa04e4d56256637224f
class SchemeListSerializer(serializers.HyperlinkedModelSerializer, SchemeSerializerMixin): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Scheme <NEW_LINE> fields = ['id', 'url', 'name', 'description', 'date_from', 'date_to']
Сериализатор списка моделей опроса
62598fa08e7ae83300ee8ecb
class Calculator: <NEW_LINE> <INDENT> def add(num1, num2): <NEW_LINE> <INDENT> addition = num1 + num2 <NEW_LINE> return addition <NEW_LINE> <DEDENT> def sub(num1, num2): <NEW_LINE> <INDENT> subtraction = num1 - num2 <NEW_LINE> return subtraction <NEW_LINE> <DEDENT> def mult(num1, num2): <NEW_LINE> <INDENT> multiplication = num1 * num2 <NEW_LINE> return multiplication <NEW_LINE> <DEDENT> def divide(num1, num2): <NEW_LINE> <INDENT> division = num1/num2 <NEW_LINE> return division
A simple calculator module
62598fa091af0d3eaad39c37
class CKEditorField(TextAreaField): <NEW_LINE> <INDENT> widget = CKEditorWidget()
A custom text editor for the admin panel.
62598fa04527f215b58e9d0f
class _RARHeaderDataEx(ctypes.Structure): <NEW_LINE> <INDENT> _pack_ = 1 <NEW_LINE> _fields_ = [("ArcName", ctypes.c_char * 1024), ("ArcNameW", ctypes.c_wchar * 1024), ("FileName", ctypes.c_char * 1024), ("FileNameW", ctypes.c_wchar * 1024), ("Flags", ctypes.c_uint), ("PackSize", ctypes.c_uint), ("PackSizeHigh", ctypes.c_uint), ("UnpSize", ctypes.c_uint), ("UnpSizeHigh", ctypes.c_uint), ("HostOS", ctypes.c_uint), ("FileCRC", ctypes.c_uint), ("FileTime", ctypes.c_uint), ("UnpVer", ctypes.c_uint), ("Method", ctypes.c_uint), ("FileAttr", ctypes.c_uint), ("CmtBuf", ctypes.c_char_p), ("CmtBufSize", ctypes.c_uint), ("CmtSize", ctypes.c_uint), ("CmtState", ctypes.c_uint), ("Reserved", ctypes.c_uint * 1024)]
Archive file structure. Used by DLL calls.
62598fa01f037a2d8b9e3f13
class cublasNotInitialized(cublasError): <NEW_LINE> <INDENT> pass
CUBLAS library not initialized.
62598fa038b623060ffa8ebe
class VCharacterManager: <NEW_LINE> <INDENT> __metaclass__ = Singleton <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.character_client = {} <NEW_LINE> self.client_character = {} <NEW_LINE> <DEDENT> def addVCharacter(self,vcharacter): <NEW_LINE> <INDENT> characterId = vcharacter.getCharacterId() <NEW_LINE> self.character_client[characterId] = vcharacter <NEW_LINE> <DEDENT> def getVCharacterByClientId(self,clientId): <NEW_LINE> <INDENT> for vcharacter in self.character_client.values(): <NEW_LINE> <INDENT> if vcharacter.getDynamicId()==clientId: <NEW_LINE> <INDENT> return vcharacter <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def getVCharacterByCharacterId(self,characterId): <NEW_LINE> <INDENT> vcharacter = self.character_client.get(characterId) <NEW_LINE> return vcharacter <NEW_LINE> <DEDENT> def dropVCharacterByClientId(self,clientId): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> vcharacter = self.getVCharacterByClientId(clientId) <NEW_LINE> if vcharacter: <NEW_LINE> <INDENT> characterId = vcharacter.getCharacterId() <NEW_LINE> del self.character_client[characterId] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def dropVCharacterByCharacterId(self,characterId): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> del self.character_client[characterId] <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def getNodeByClientId(self,dynamicId): <NEW_LINE> <INDENT> vcharacter = self.getVCharacterByClientId(dynamicId) <NEW_LINE> if vcharacter: <NEW_LINE> <INDENT> return vcharacter.getNode() <NEW_LINE> <DEDENT> return -1 <NEW_LINE> <DEDENT> def getNodeByCharacterId(self,characterId): <NEW_LINE> <INDENT> vcharacter = self.character_client.get(characterId) <NEW_LINE> if vcharacter: <NEW_LINE> <INDENT> return vcharacter.getNode() <NEW_LINE> <DEDENT> return -1 <NEW_LINE> <DEDENT> def getClientIdByCharacterId(self,characterId): <NEW_LINE> <INDENT> vcharacter = self.character_client.get(characterId) <NEW_LINE> if vcharacter: <NEW_LINE> <INDENT> return vcharacter.getDynamicId() <NEW_LINE> <DEDENT> return -1 <NEW_LINE> <DEDENT> def getCharacterIdByClientId(self,dynamicId): <NEW_LINE> <INDENT> vcharacter = self.getVCharacterByClientId(dynamicId) <NEW_LINE> if vcharacter: <NEW_LINE> <INDENT> return vcharacter.getCharacterId() <NEW_LINE> <DEDENT> return -1
角色管理器
62598fa03eb6a72ae038a46e
class ICouponDeletedEvent(BaseWebhookEvent): <NEW_LINE> <INDENT> pass
Occurs whenever a coupon is deleted.
62598fa063d6d428bbee25dd
class Rip_packet: <NEW_LINE> <INDENT> def __init__(self, router_id): <NEW_LINE> <INDENT> self.command = 2 <NEW_LINE> self.version = 2 <NEW_LINE> self.router_id = router_id <NEW_LINE> self.entry_table = [] <NEW_LINE> <DEDENT> def add_entry(self, entry): <NEW_LINE> <INDENT> self.entry_table.append(entry) <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> packet = struct.pack(HEADER_FORMAT, self.command, self.version, self.router_id) <NEW_LINE> for row in self.entry_table: <NEW_LINE> <INDENT> packet += struct.pack(ENTRY_FORMAT, *row) <NEW_LINE> <DEDENT> return packet <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> string = LINE <NEW_LINE> string += "|{:^7}|{:^7}|{:^15}|\n".format( self.command, self.version, self.router_id) <NEW_LINE> string += LINE <NEW_LINE> for entry in self.entry_table: <NEW_LINE> <INDENT> string += "|{:^15}|{:^15}|\n|{:^31}|\n|{:^31}|\n|{:^31}|\n|{:^31}|\n".format( *entry) <NEW_LINE> string += LINE <NEW_LINE> <DEDENT> return string
a rip v2 packet, this class handles turning itself into a byte array
62598fa07b25080760ed72d4
class CNNReporter(Reporter): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> proxy = True <NEW_LINE> <DEDENT> objects = CNNReporterManager()
This class is a proxy model for Reporter, used for testing proxy model support
62598fa0fff4ab517ebcd61a
class Caches: <NEW_LINE> <INDENT> def __init__(self, root): <NEW_LINE> <INDENT> self.root = root <NEW_LINE> <DEDENT> def get(self, name): <NEW_LINE> <INDENT> return Cache(os.path.join(self.root, name))
Repository of caches that, if kept across site builds, can speed up further builds
62598fa0cb5e8a47e493c08b
class ListInstanceProfilesResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the ListInstanceProfiles Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62598fa0498bea3a75a5794d
class Complexity(pydrogen.Typical): <NEW_LINE> <INDENT> functions = {'len': constant, 'print': constant, 'range': linear} <NEW_LINE> def preprocess(self, context): <NEW_LINE> <INDENT> syms = [] <NEW_LINE> if 'functions' in context and type(context['functions']) == dict: <NEW_LINE> <INDENT> self.functions.update(context['functions']) <NEW_LINE> del context['functions'] <NEW_LINE> <DEDENT> for var, symbol in context.items(): <NEW_LINE> <INDENT> if type(symbol) == str: <NEW_LINE> <INDENT> symbol = symbols(symbol) <NEW_LINE> context[var] = symbol <NEW_LINE> <DEDENT> syms.append((symbol, oo)) <NEW_LINE> <DEDENT> <DEDENT> def Statements(self, ss, context=None): return sum(ss.post(context)[0]) <NEW_LINE> def Assign(self, targets, e, context=None): return constant(None) + e.post(context) <NEW_LINE> def For(self, target, itr, ss, orelse, context=None): <NEW_LINE> <INDENT> if hasattr(itr.pre(), 'id'): <NEW_LINE> <INDENT> itr = itr.pre().id <NEW_LINE> if itr in context: <NEW_LINE> <INDENT> loops = linear(context[itr]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loops = constant(None) <NEW_LINE> <DEDENT> <DEDENT> elif type(itr.pre()) in (ast.List, ast.Tuple, ast.Str, ast.Dict, ast.Set): <NEW_LINE> <INDENT> loops = constant(None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loops = itr.post(context) <NEW_LINE> <DEDENT> work = ss.post(context) <NEW_LINE> return loops * work if work.contains(loops) else work * loops <NEW_LINE> <DEDENT> def BoolOp(self, es, context=None): return constant(None) + sum(es.post(context)) <NEW_LINE> def BinOp(self, e1, e2, context): return constant(None) + e1.post(context) + e2.post(context) <NEW_LINE> def Call(self, func, args, context=None): <NEW_LINE> <INDENT> func = func.pre() <NEW_LINE> if context is None: <NEW_LINE> <INDENT> context = {} <NEW_LINE> <DEDENT> if func.id in self.functions: <NEW_LINE> <INDENT> symbol = None <NEW_LINE> for arg in args.pre(): <NEW_LINE> <INDENT> if arg.id in context: <NEW_LINE> <INDENT> symbol = context[arg.id] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if symbol or self.functions[func.id] == constant: <NEW_LINE> <INDENT> return self.functions[func.id](symbol) <NEW_LINE> <DEDENT> <DEDENT> for _, module in sys.modules.items(): <NEW_LINE> <INDENT> scope = module.__dict__ <NEW_LINE> if func.id in scope and callable(scope[func.id]): <NEW_LINE> <INDENT> func = scope[func.id] <NEW_LINE> return Complexity(func, **context, functions=self.functions).Complexity <NEW_LINE> <DEDENT> <DEDENT> raise pydrogen.PydrogenError("failed to interpret {}".format(func.id)) <NEW_LINE> <DEDENT> def Num(self, n, context=None): return 0 <NEW_LINE> def NameConstant(self): return 0 <NEW_LINE> def Name(self, id, context=None): return 0 <NEW_LINE> def If(self, test, body, orelse, context=None): <NEW_LINE> <INDENT> return test.post(context) + body.post(context) + orelse.post(context) <NEW_LINE> <DEDENT> def Compare(self, l, r, context=None): <NEW_LINE> <INDENT> return constant(None) + l.post(context) + r.post(context)
Complexity approximation for a small subset of Python.
62598fa0bd1bec0571e14fd9
class Classifier(): <NEW_LINE> <INDENT> log = logging.getLogger("recheckwatchbot") <NEW_LINE> queries = None <NEW_LINE> def __init__(self, queries_dir): <NEW_LINE> <INDENT> self.es = results.SearchEngine(ES_URL) <NEW_LINE> self.queries_dir = queries_dir <NEW_LINE> self.queries = loader.load(self.queries_dir) <NEW_LINE> <DEDENT> def hits_by_query(self, query, queue=None, facet=None, size=100): <NEW_LINE> <INDENT> if queue: <NEW_LINE> <INDENT> es_query = qb.single_queue(query, queue, facet=facet) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> es_query = qb.generic(query, facet=facet) <NEW_LINE> <DEDENT> return self.es.search(es_query, size=size) <NEW_LINE> <DEDENT> def most_recent(self): <NEW_LINE> <INDENT> query = qb.most_recent_event() <NEW_LINE> results = self.es.search(query, size='1') <NEW_LINE> if len(results) > 0: <NEW_LINE> <INDENT> last = dp.parse(results[0].timestamp) <NEW_LINE> return last <NEW_LINE> <DEDENT> return datetime.datetime.utcfromtimestamp(0) <NEW_LINE> <DEDENT> def classify(self, change_number, patch_number, build_short_uuid, recent=False): <NEW_LINE> <INDENT> self.log.debug("Entering classify") <NEW_LINE> self.queries = loader.load(self.queries_dir) <NEW_LINE> bug_matches = [] <NEW_LINE> for x in self.queries: <NEW_LINE> <INDENT> if x.get('suppress-notification'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.log.debug( "Looking for bug: https://bugs.launchpad.net/bugs/%s" % x['bug']) <NEW_LINE> query = qb.single_patch(x['query'], change_number, patch_number, build_short_uuid) <NEW_LINE> results = self.es.search(query, size='10', recent=recent) <NEW_LINE> if len(results) > 0: <NEW_LINE> <INDENT> bug_matches.append(x['bug']) <NEW_LINE> <DEDENT> <DEDENT> return bug_matches
Classify failed tempest-devstack jobs based. Given a change and revision, query logstash with a list of known queries that are mapped to specific bugs.
62598fa01f5feb6acb162a4e
class AuditRecordMixin(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> audit_records = GenericRelation(AuditRecord) <NEW_LINE> def get_revision_field_names(self): <NEW_LINE> <INDENT> return [f.name for f in self._meta.get_fields()] <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> _self = type(self).objects.get(pk=self.pk) if self.pk else None <NEW_LINE> super(AuditRecordMixin, self).save(*args, **kwargs) <NEW_LINE> if _self: <NEW_LINE> <INDENT> changed_fields = {} <NEW_LINE> for field in self.get_revision_field_names(): <NEW_LINE> <INDENT> old, new = getattr(_self, field), getattr(self, field) <NEW_LINE> if old != new: <NEW_LINE> <INDENT> changed_fields[str(field)] = {str(old): str(new)} <NEW_LINE> <DEDENT> <DEDENT> if changed_fields: <NEW_LINE> <INDENT> AuditRecord.objects.create( content_object=self, changed_fields=json.dumps(changed_fields), user=get_current_user() )
Mixin for logging model field changes.
62598fa05fdd1c0f98e5ddc5
@attr.s(frozen=True, eq=False) <NEW_LINE> class WorkerContext(metaclass=NoPublicConstructor): <NEW_LINE> <INDENT> idle_timeout: float = attr.ib( default=600.0, validator=check_non_negative, ) <NEW_LINE> init: Callable[[], Any] = attr.ib( default=bool, validator=attr.validators.is_callable(), ) <NEW_LINE> retire: Callable[[], Any] = attr.ib( default=bool, validator=attr.validators.is_callable(), ) <NEW_LINE> grace_period: float = attr.ib( default=30.0, validator=check_non_negative, ) <NEW_LINE> worker_type: WorkerType = attr.ib( default=WorkerType.SPAWN, validator=attr.validators.in_(WorkerType), ) <NEW_LINE> _worker_class: Type[AbstractWorker] = attr.ib(repr=False, init=False) <NEW_LINE> _worker_cache: WorkerCache = attr.ib(repr=False, init=False) <NEW_LINE> _lifetime: ContextLifetimeManager = attr.ib( factory=ContextLifetimeManager, repr=False, init=False ) <NEW_LINE> def __attrs_post_init__(self): <NEW_LINE> <INDENT> worker_class, cache_class = WORKER_MAP[self.worker_type] <NEW_LINE> self.__dict__["_worker_class"] = worker_class <NEW_LINE> self.__dict__["_worker_cache"] = cache_class() <NEW_LINE> <DEDENT> @trio.lowlevel.enable_ki_protection <NEW_LINE> async def run_sync(self, sync_fn, *args, cancellable=False, limiter=None): <NEW_LINE> <INDENT> if limiter is None: <NEW_LINE> <INDENT> limiter = current_default_worker_limiter() <NEW_LINE> <DEDENT> async with limiter, self._lifetime: <NEW_LINE> <INDENT> self._worker_cache.prune() <NEW_LINE> while True: <NEW_LINE> <INDENT> with trio.CancelScope(shield=not cancellable): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> worker = self._worker_cache.pop() <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> worker = self._worker_class( self.idle_timeout, self.init, self.retire ) <NEW_LINE> await worker.start() <NEW_LINE> <DEDENT> result = await worker.run_sync(sync_fn, *args) <NEW_LINE> <DEDENT> if result is None: <NEW_LINE> <INDENT> await trio.lowlevel.checkpoint_if_cancelled() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._worker_cache.append(worker) <NEW_LINE> return result.unwrap() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> async def _aclose(self, grace_period=None): <NEW_LINE> <INDENT> if grace_period is None: <NEW_LINE> <INDENT> grace_period = self.grace_period <NEW_LINE> <DEDENT> with trio.CancelScope(shield=True): <NEW_LINE> <INDENT> await self._lifetime.close_and_wait() <NEW_LINE> await trio.to_thread.run_sync(self._worker_cache.shutdown, grace_period) <NEW_LINE> <DEDENT> <DEDENT> @trio.lowlevel.enable_ki_protection <NEW_LINE> def statistics(self): <NEW_LINE> <INDENT> self._worker_cache.prune() <NEW_LINE> return WorkerContextStatistics( idle_workers=len(self._worker_cache), running_workers=self._lifetime.calc_running(), )
A reification of a context where workers have a custom configuration. Instances of this class are to be created using :func:`open_worker_context`, and cannot be directly instantiated. The arguments to :func:`open_worker_context` that created an instance are available for inspection as read-only attributes. This class provides a ``statistics()`` method, which returns an object with the following fields: * ``idle_workers``: The number of live workers currently stored in the context's cache. * ``running_workers``: The number of workers currently executing jobs.
62598fa0435de62698e9bc1f
class ExitException(Exception): <NEW_LINE> <INDENT> pass
Raise and main sees this as regular exit
62598fa02ae34c7f260aaf0c
class Key(namedtuple('__KeyCombination', 'symbol modifiers modMatters')): <NEW_LINE> <INDENT> s_symbol = 0 <NEW_LINE> s_modifiers = 0 <NEW_LINE> s_modMatters = False <NEW_LINE> def __new__(cls, symbol, modifiers, modMatters=False): <NEW_LINE> <INDENT> return super(Key, cls).__new__(cls, symbol, modifiers, modMatters) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> symbol, modifiers, modMatters = self <NEW_LINE> if other is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if symbol == other.symbol: <NEW_LINE> <INDENT> if not modMatters or (modifiers & other.modifiers): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> symbol, mod, modMatters = self <NEW_LINE> return symbol
A key + modifying keys combination. A Key will register Key(symbol,mod) as being equal to Key(symbol,mod2) so long as mod is a subset of mod2 or vice versa This way, random modifiers like numlock will not have an effect on playability. This does mean that differentiating ctrl+shift+x and ctrl+x is not currently possible
62598fa0435de62698e9bc20
class CreateSubreddit(Templated): <NEW_LINE> <INDENT> def __init__(self, site = None, name = '', captcha=None): <NEW_LINE> <INDENT> allow_image_upload = site and not site.quarantine <NEW_LINE> feature_autoexpand_media_previews = feature.is_enabled("autoexpand_media_previews") <NEW_LINE> Templated.__init__(self, site=site, name=name, captcha=captcha, comment_sorts=CommentSortMenu.visible_options(), allow_image_upload=allow_image_upload, feature_autoexpand_media_previews=feature_autoexpand_media_previews, ) <NEW_LINE> self.color_options = Subreddit.KEY_COLORS <NEW_LINE> self.subreddit_selector = SubredditSelector( placeholder="Add Page", class_name="sr-name", include_user_subscriptions=False, show_add=True, )
reddit creation form.
62598fa03cc13d1c6d465598
class WheelBuilder(object): <NEW_LINE> <INDENT> def __init__(self, requirement_set, finder, build_options=None, global_options=None): <NEW_LINE> <INDENT> self.requirement_set = requirement_set <NEW_LINE> self.finder = finder <NEW_LINE> self.wheel_dir = requirement_set.wheel_download_dir <NEW_LINE> self.build_options = build_options or [] <NEW_LINE> self.global_options = global_options or [] <NEW_LINE> <DEDENT> def _build_one(self, req): <NEW_LINE> <INDENT> base_args = [ sys.executable, '-c', "import setuptools;__file__=%r;" "exec(compile(open(__file__).read().replace('\\r\\n', '\\n'), " "__file__, 'exec'))" % req.setup_py ] + list(self.global_options) <NEW_LINE> logger.info('Running setup.py bdist_wheel for %s', req.name) <NEW_LINE> logger.info('Destination directory: %s', self.wheel_dir) <NEW_LINE> wheel_args = base_args + ['bdist_wheel', '-d', self.wheel_dir] + self.build_options <NEW_LINE> try: <NEW_LINE> <INDENT> call_subprocess(wheel_args, cwd=req.source_dir, show_stdout=False) <NEW_LINE> return True <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.error('Failed building wheel for %s', req.name) <NEW_LINE> return False <NEW_LINE> <DEDENT> <DEDENT> def build(self): <NEW_LINE> <INDENT> self.requirement_set.prepare_files(self.finder) <NEW_LINE> reqset = self.requirement_set.requirements.values() <NEW_LINE> buildset = [] <NEW_LINE> for req in reqset: <NEW_LINE> <INDENT> if req.is_wheel: <NEW_LINE> <INDENT> logger.info( 'Skipping %s, due to already being wheel.', req.name, ) <NEW_LINE> <DEDENT> elif req.editable: <NEW_LINE> <INDENT> logger.info( 'Skipping %s, due to being editable', req.name, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> buildset.append(req) <NEW_LINE> <DEDENT> <DEDENT> if not buildset: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for req in buildset]), ) <NEW_LINE> with indent_log(): <NEW_LINE> <INDENT> build_success, build_failure = [], [] <NEW_LINE> for req in buildset: <NEW_LINE> <INDENT> if self._build_one(req): <NEW_LINE> <INDENT> build_success.append(req) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> build_failure.append(req) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if build_success: <NEW_LINE> <INDENT> logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) <NEW_LINE> <DEDENT> if build_failure: <NEW_LINE> <INDENT> logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) <NEW_LINE> <DEDENT> return len(build_failure) == 0
Build wheels from a RequirementSet.
62598fa0b7558d589546345a
class CallableObject(object): <NEW_LINE> <INDENT> __slots__ = ['_ob', '_func'] <NEW_LINE> def __init__(self, c): <NEW_LINE> <INDENT> if not hasattr(c, '__call__'): <NEW_LINE> <INDENT> raise ValueError('Error: given callback is not callable.') <NEW_LINE> <DEDENT> if hasattr(c, '__self__'): <NEW_LINE> <INDENT> self._ob = weakref.ref(c.__self__) <NEW_LINE> self._func = c.__func__.__name__ <NEW_LINE> <DEDENT> elif hasattr(c, 'im_self'): <NEW_LINE> <INDENT> self._ob = weakref.ref(c.im_self) <NEW_LINE> self._func = c.im_func.__name__ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._func = c <NEW_LINE> self._ob = None <NEW_LINE> <DEDENT> <DEDENT> def isdead(self): <NEW_LINE> <INDENT> if self._ob: <NEW_LINE> <INDENT> return self._ob() is None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def compare(self, other): <NEW_LINE> <INDENT> if self._ob and other._ob: <NEW_LINE> <INDENT> return (self._ob() is other._ob()) and (self._func == other._func) <NEW_LINE> <DEDENT> elif not (self._ob or other._ob): <NEW_LINE> <INDENT> return self._func == other._func <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self._func.__str__() <NEW_LINE> <DEDENT> def call(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.isdead(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if self._ob: <NEW_LINE> <INDENT> func = getattr(self._ob(), self._func) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> func = self._func <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return func(*args, **kwargs) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> print('Exception while handling event:') <NEW_LINE> print(getErrorMsg())
CallableObject(callable) A class to hold a callable. If it is a plain function, its reference is held (because it might be a closure). If it is a method, we keep the function name and a weak reference to the object. In this way, having for instance a signal bound to a method, the object is not prevented from being cleaned up.
62598fa021bff66bcd722a90
class Component(object): <NEW_LINE> <INDENT> def __init__(self,manufacturer,model='n/a',manufactureYear='',comment=''): <NEW_LINE> <INDENT> self._manufacturer = manufacturer <NEW_LINE> self._model = model <NEW_LINE> self._manufactureYear = manufactureYear <NEW_LINE> self._comment = comment <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def addComment(self,comment): <NEW_LINE> <INDENT> self._comment = self._comment + comment <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def replaceComment(self,comment): <NEW_LINE> <INDENT> self._comment = comment
This class in intended as a basic heritable class for all components of an X-ray scattering set-up. The purpose of Components is to save the experimental set-up used in a standardized way. Anyone not familiar with an experiment should be able to easily get the specs of the set-up and there should be no ambiguity on what devices/components were used.
62598fa04a966d76dd5eed0e
class PaymentStatusListener(View): <NEW_LINE> <INDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> status = request.POST.get('status', None) <NEW_LINE> order_payment_id = request.POST.get('order_payment_id') <NEW_LINE> try: <NEW_LINE> <INDENT> order_payment = OrderPayment.objects.get(id=order_payment_id) <NEW_LINE> <DEDENT> except OrderPayment.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> service = PaymentService(order_payment) <NEW_LINE> service.adapter.set_order_payment_new_status(status) <NEW_LINE> return HttpResponse('success')
This view simulates our listener that handles incoming messages from an external PSP to update the status of a payment. It's an "underwater" view and the user does not directly engage with this view or url, only the external server by making a POST request to it.
62598fa0adb09d7d5dc0a3b7
class CreateDistanceCallback(object): <NEW_LINE> <INDENT> def __init__(self, locations): <NEW_LINE> <INDENT> size = len(locations) <NEW_LINE> self.matrix = {} <NEW_LINE> for from_node in range(size): <NEW_LINE> <INDENT> self.matrix[from_node] = {} <NEW_LINE> for to_node in range(size): <NEW_LINE> <INDENT> x1 = locations[from_node][0] <NEW_LINE> y1 = locations[from_node][1] <NEW_LINE> x2 = locations[to_node][0] <NEW_LINE> y2 = locations[to_node][1] <NEW_LINE> self.matrix[from_node][to_node] = manDistance(x1, y1, x2, y2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def Distance(self, from_node, to_node): <NEW_LINE> <INDENT> return self.matrix[from_node][to_node]
Create callback to calculate distances between points.
62598fa03539df3088ecc0e1
class RuleAddForm(AddForm): <NEW_LINE> <INDENT> form_fields = form.FormFields(IRuleConfiguration) <NEW_LINE> label = _(u"Add Rule") <NEW_LINE> description = _(u"Add a new rule. Once complete, you can manage the " "rule's actions and conditions separately.") <NEW_LINE> form_name = _(u"Configure rule") <NEW_LINE> def nextURL(self): <NEW_LINE> <INDENT> context = aq_parent(aq_inner(self.context)) <NEW_LINE> url = str(getMultiAdapter((context, self.request), name=u"absolute_url")) <NEW_LINE> if base_hasattr(self._parent, '_chosen_name'): <NEW_LINE> <INDENT> return '%s/++rule++%s/@@manage-elements' % (url, self._parent._chosen_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%s/@@rules-controlpanel' % url <NEW_LINE> <DEDENT> <DEDENT> def create(self, data): <NEW_LINE> <INDENT> rule = Rule() <NEW_LINE> form.applyChanges(rule, self.form_fields, data) <NEW_LINE> return rule
An add form for rules.
62598fa08a43f66fc4bf1fa9
class MessageBulkActionEndpoint(BaseAPIView): <NEW_LINE> <INDENT> permission = 'msgs.msg_api' <NEW_LINE> serializer_class = MsgBulkActionSerializer <NEW_LINE> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> serializer = self.serializer_class(user=user, data=request.data) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.save() <NEW_LINE> return Response('', status=status.HTTP_204_NO_CONTENT) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_write_explorer(cls): <NEW_LINE> <INDENT> spec = dict(method="POST", title="Update one or more messages", url=reverse('api.v1.message_actions'), slug='msg-actions', request='{ "messages": [12345, 23456], "action": "label", ' '"label_uuid": "fdd156ca-233a-48c1-896d-a9d594d59b95" }') <NEW_LINE> spec['fields'] = [dict(name='messages', required=True, help="A JSON array of one or more integers, each a message id."), dict(name='action', required=True, help="One of the following strings: label, unlabel, archive, unarchive, delete"), dict(name='label', required=False, help="The name of a label if the action is label or unlabel"), dict(name='label_uuid', required=False, help="The UUID of a label if the action is label or unlabel")] <NEW_LINE> return spec
## Bulk Message Updating A **POST** can be used to perform an action on a set of messages in bulk. * **messages** - either a single message id or a JSON array of message ids (int or array of ints) * **action** - the action to perform, a string one of: label - Apply the given label to the messages unlabel - Remove the given label from the messages archive - Archive the messages unarchive - Un-archive the messages delete - Permanently delete the messages * **label** - the name of a label (string, optional) * **label_uuid** - the UUID of a label (string, optional) Example: POST /api/v1/message_actions.json { "messages": [1234, 2345, 3456], "action": "label", "label": "Testing" } You will receive an empty response if successful.
62598fa0baa26c4b54d4f0dc
class EmporiumCoin(Bitcoin): <NEW_LINE> <INDENT> name = 'emporiumcoin' <NEW_LINE> symbols = ('EMPC', ) <NEW_LINE> nodes = ("40.68.31.20", ) <NEW_LINE> port = 8295 <NEW_LINE> message_start = b'\xc2\xb4\xa3\xd1' <NEW_LINE> base58_prefixes = { 'PUBKEY_ADDR': 33, 'SCRIPT_ADDR': 28, 'SECRET_KEY': 161 }
Class with all the necessary EmporiumCoin network information based on https://github.com/emporiumcoin/EmporiumCoin/blob/master/src/net.cpp (date of access: 02/14/2018)
62598fa091f36d47f2230db7
class Question(models.Model): <NEW_LINE> <INDENT> quiz = models.ManyToManyField(Quiz) <NEW_LINE> question_text = models.TextField() <NEW_LINE> is_subjective = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.question_text
Questions belonging to a quiz.
62598fa0f7d966606f747e0f
class InspectionViewSet(ListModelMixin, mixins.RetrieveModelMixin, mixins.CreateModelMixin, mixins.DestroyModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = Inspection.objects.filter_by() <NEW_LINE> authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) <NEW_LINE> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filter_class = InspectionFilter <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == 'upload_images': <NEW_LINE> <INDENT> return InspectionImageSerializer <NEW_LINE> <DEDENT> if self.action in ('list', 'retrieve'): <NEW_LINE> <INDENT> return InspectionDetailSerializer <NEW_LINE> <DEDENT> return InspectionSerializer <NEW_LINE> <DEDENT> @action(methods=['POST'], detail=True, url_path='images') <NEW_LINE> def upload_images(self, request, *args, **kwargs): <NEW_LINE> <INDENT> inspection_id = kwargs.get('pk') <NEW_LINE> request.data['inspection'] = inspection_id <NEW_LINE> return super(InspectionViewSet, self).create(request, *args, **kwargs)
巡检 list: 获取巡检信息 retrieve: 获取巡检详细信息 create: 创建巡检报告 destroy: 删除巡检报告 update: 修改巡检报告 upload_images: 上传巡检图片
62598fa00c0af96317c561af
class Authorizer(ftpserver.DummyAuthorizer): <NEW_LINE> <INDENT> _group_info = None <NEW_LINE> _root_path_len = None <NEW_LINE> _home = None <NEW_LINE> def __init__(self, grp): <NEW_LINE> <INDENT> super(Authorizer, self).__init__() <NEW_LINE> self._group_info = grp <NEW_LINE> self._root_path_len = len(config.FTP_ROOT.split(u'/')) <NEW_LINE> self._home = config.FTP_ROOT <NEW_LINE> if isinstance(self._home, unicode): <NEW_LINE> <INDENT> self._home = self._home.encode(config.FILESYSTEM_ENCODING) <NEW_LINE> <DEDENT> <DEDENT> def validate_authentication(self, username, passwd): <NEW_LINE> <INDENT> if username == config.FTP_ADMIN_USERNAME: <NEW_LINE> <INDENT> return md5(passwd).hexdigest() == config.FTP_ADMIN_PASSWD_MD5 <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> api = Acnt9API(username, passwd) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return 'failed to login: {0}'.format(e) <NEW_LINE> <DEDENT> self._group_info.update(api) <NEW_LINE> if username not in self._group_info.authed_users: <NEW_LINE> <INDENT> return 'failed to login: unauthorized user' <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def get_home_dir(self, username): <NEW_LINE> <INDENT> return self._home <NEW_LINE> <DEDENT> def get_msg_login(self, username): <NEW_LINE> <INDENT> return "welcome, " + username + "!" <NEW_LINE> <DEDENT> def get_msg_quit(self, username): <NEW_LINE> <INDENT> return "goodbye!" <NEW_LINE> <DEDENT> def get_perms(self, username): <NEW_LINE> <INDENT> ftpserver.logerror("unsupported operation: Authorizer.get_perms") <NEW_LINE> return "" <NEW_LINE> <DEDENT> _perm_map = {'e': 'read', 'l': 'read', 'r': 'read', 'a': 'modify', 'd': 'modify', 'f': 'modify', 'm': 'write', 'w': 'write', 'M': 'modify'} <NEW_LINE> def has_perm(self, username, perm, path = None): <NEW_LINE> <INDENT> username = fs_enc(username) <NEW_LINE> path = fs_enc(path) <NEW_LINE> parts = path.split(u'/') <NEW_LINE> if len(parts) > self._root_path_len and parts[self._root_path_len] == config.ROOT_PUB_NAME: <NEW_LINE> <INDENT> if username == config.FTP_ADMIN_USERNAME: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self._perm_map[perm] != 'modify' <NEW_LINE> <DEDENT> for i in range(self._root_path_len, len(parts)): <NEW_LINE> <INDENT> base = parts[i] <NEW_LINE> if base == config.PUBLIC_NAME or base == config.PRIVATE_NAME: <NEW_LINE> <INDENT> if username == config.FTP_ADMIN_USERNAME: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> base = 'public' if base == config.PUBLIC_NAME else 'private' <NEW_LINE> g = self._group_info.get_node_by_path(u'/'.join(parts[:i])) <NEW_LINE> if not g: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return username in getattr(g, base + '_' + self._perm_map[perm]) <NEW_LINE> <DEDENT> <DEDENT> return perm in ['e', 'l']
authorizer used for pyftpdlib
62598fa0379a373c97d98e43
class SQLAlchemy: <NEW_LINE> <INDENT> def __init__(self, app=None): <NEW_LINE> <INDENT> self._app = app <NEW_LINE> self.session = self._create_scoped_session() <NEW_LINE> if app is not None: <NEW_LINE> <INDENT> self.init_app(app) <NEW_LINE> <DEDENT> <DEDENT> def init_app(self, app): <NEW_LINE> <INDENT> ctx = _app_ctx_stack.top <NEW_LINE> ctx['sqlalchemy_session'] = self.session <NEW_LINE> @app.teardown_appcontext <NEW_LINE> def shutdown_session(response_or_exc): <NEW_LINE> <INDENT> self.session.remove() <NEW_LINE> return response_or_exc <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _create_scoped_session(): <NEW_LINE> <INDENT> session = orm.sessionmaker() <NEW_LINE> return orm.scoped_session(session, scopefunc=_app_ctx_stack.__ident_func__)
SQLAlchemy demonstrate how flask-sqlalchemy managing session
62598fa03c8af77a43b67e56
class _ListSpiders(cli_base.Command): <NEW_LINE> <INDENT> def setup(self): <NEW_LINE> <INDENT> parser = self._parser.add_parser( "list", help="List the availible tasks.") <NEW_LINE> parser.set_defaults(work=self.run) <NEW_LINE> <DEDENT> def _work(self): <NEW_LINE> <INDENT> return spiders_util.get_spiders_info() <NEW_LINE> <DEDENT> def _on_task_done(self, result): <NEW_LINE> <INDENT> table = prettytable.PrettyTable( ["Name", "Websites", "Description", "Start Urls"]) <NEW_LINE> for spider in result: <NEW_LINE> <INDENT> table.add_row([spider.name, spider.websites, spider.description, spider.start_urls]) <NEW_LINE> <DEDENT> print(table)
List the spiders.
62598fa057b8e32f52508033
class Google(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User, on_delete=models.CASCADE) <NEW_LINE> gauth_key = models.CharField(max_length=16)
This models adds the Google Authenticator info to the standard User model
62598fa0e76e3b2f99fd8865
class QueueWithMax(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._s1 = StackWithMax() <NEW_LINE> self._s2 = StackWithMax() <NEW_LINE> <DEDENT> def enqueue(self, a): <NEW_LINE> <INDENT> self._s1.push(a) <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self._s2.is_empty: <NEW_LINE> <INDENT> while not self._s1.is_empty: <NEW_LINE> <INDENT> v = self._s1.pop() <NEW_LINE> self._s2.push(v) <NEW_LINE> <DEDENT> <DEDENT> return self._s2.pop() <NEW_LINE> <DEDENT> def max(self): <NEW_LINE> <INDENT> return max(self._s2.max(), self._s1.max())
Implement a queue with two stacks with constant time maximum retrieving.
62598fa0097d151d1a2c0e57
class SimpleSITKReader(AbstractReader): <NEW_LINE> <INDENT> def __init__(self, dtypes, dshapes, name='simplesitkreader'): <NEW_LINE> <INDENT> super(SimpleSITKReader, self).__init__(dtypes, dshapes, name=name) <NEW_LINE> <DEDENT> def _read_sample(self, id_queue, **kwargs): <NEW_LINE> <INDENT> path_list = id_queue[0] <NEW_LINE> data = [] <NEW_LINE> for p, d in zip(list(path_list), self.dtypes): <NEW_LINE> <INDENT> if isinstance(p, str): <NEW_LINE> <INDENT> sample = sitk.GetArrayFromImage(sitk.ReadImage(p)) <NEW_LINE> data.append(sample.astype(self._map_dtype(d))) <NEW_LINE> <DEDENT> elif isinstance(p, (float, int)): <NEW_LINE> <INDENT> if d is tf.float32 and isinstance(p, int): <NEW_LINE> <INDENT> print('Warning: Losing accuracy by converting int to float') <NEW_LINE> <DEDENT> data.append(self._map_dtype(d)(p)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Not sure how to interpret "{}"'.format(p)) <NEW_LINE> <DEDENT> <DEDENT> data = self._preprocess(data) <NEW_LINE> data = self._augment(data) <NEW_LINE> return data
SimpleSITKReader Simple reader class to read sitk files by file path
62598fa07047854f4633f20b
class BanditAlgorithm: <NEW_LINE> <INDENT> def __init__(self, bandit): <NEW_LINE> <INDENT> self.reset(bandit) <NEW_LINE> <DEDENT> def reset(self, bandit=None): <NEW_LINE> <INDENT> if bandit is not None: <NEW_LINE> <INDENT> self.bandit = bandit <NEW_LINE> <DEDENT> num_arms = bandit.get_num_arms() <NEW_LINE> self.running_sum_rewards = np.zeros(num_arms, dtype=float) <NEW_LINE> self.num_pulls = np.zeros(num_arms, dtype=int) <NEW_LINE> self.total_pulls = 0 <NEW_LINE> self.average_rewards = np.zeros(num_arms, dtype=float) <NEW_LINE> self.best_arm = random.randint(0, num_arms-1) <NEW_LINE> <DEDENT> def pull(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def update(self, reward, arm): <NEW_LINE> <INDENT> self.running_sum_rewards[arm] += reward <NEW_LINE> self.num_pulls[arm] += 1 <NEW_LINE> self.total_pulls += 1 <NEW_LINE> self.average_rewards = np.divide(self.running_sum_rewards, self.num_pulls) <NEW_LINE> self.average_rewards[np.isinf(self.average_rewards)] = 0 <NEW_LINE> self.average_rewards[np.isnan(self.average_rewards)] = 0 <NEW_LINE> self.best_arm = np.argmax(self.get_average_rewards()) <NEW_LINE> <DEDENT> def get_bandit(self): <NEW_LINE> <INDENT> return self.bandit <NEW_LINE> <DEDENT> def get_total_pulls(self): <NEW_LINE> <INDENT> return self.total_pulls <NEW_LINE> <DEDENT> def get_average_rewards(self): <NEW_LINE> <INDENT> return self.average_rewards <NEW_LINE> <DEDENT> def get_best_arm(self): <NEW_LINE> <INDENT> return self.best_arm <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> raise NotImplementedError
Bandit algorithm abstract class.
62598fa045492302aabfc304
class RnnRbm: <NEW_LINE> <INDENT> def __init__( self, n_hidden=150, n_hidden_recurrent=100, lr=0.001, r=(21, 109), dt=0.3 ): <NEW_LINE> <INDENT> self.lastcost=None <NEW_LINE> self.r = r <NEW_LINE> self.dt = dt <NEW_LINE> (v, v_sample, cost, monitor, params, updates_train, v_t, updates_generate) = build_rnnrbm( r[1] - r[0], n_hidden, n_hidden_recurrent ) <NEW_LINE> gradient = T.grad(cost, params, consider_constant=[v_sample]) <NEW_LINE> updates_train.update( ((p, p - lr * g) for p, g in zip(params, gradient)) ) <NEW_LINE> self.train_function = theano.function( [v], monitor, updates=updates_train ) <NEW_LINE> self.generate_function = theano.function( [], v_t, updates=updates_generate ) <NEW_LINE> <DEDENT> def train(self, files, batch_size=100, num_epochs=200): <NEW_LINE> <INDENT> if self.lastcost != None: <NEW_LINE> <INDENT> print("last cost:") <NEW_LINE> print(numpy.mean(self.lastcost )) <NEW_LINE> <DEDENT> assert len(files) > 0, 'Training set is empty!' ' (did you download the data files?)' <NEW_LINE> dataset = [midiread(f, self.r, self.dt).piano_roll.astype(theano.config.floatX) for f in files] <NEW_LINE> try: <NEW_LINE> <INDENT> for epoch in range(num_epochs): <NEW_LINE> <INDENT> numpy.random.shuffle(dataset) <NEW_LINE> costs = [] <NEW_LINE> for s, sequence in enumerate(dataset): <NEW_LINE> <INDENT> print("s:"+str(s)) <NEW_LINE> for i in range(0, len(sequence), batch_size): <NEW_LINE> <INDENT> tmp=sequence[i:i + batch_size] <NEW_LINE> lentmp=len(tmp) <NEW_LINE> cost = self.train_function(tmp) <NEW_LINE> costs.append(cost) <NEW_LINE> <DEDENT> <DEDENT> f = open('RnnRbmObj.save', 'wb') <NEW_LINE> cPickle.dump(self, f, protocol=cPickle.HIGHEST_PROTOCOL) <NEW_LINE> f.close() <NEW_LINE> print('Epoch %i/%i' % (epoch + 1, num_epochs)) <NEW_LINE> print(numpy.mean(costs)) <NEW_LINE> self.lastcost=costs <NEW_LINE> sys.stdout.flush() <NEW_LINE> <DEDENT> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> print('Interrupted by user.') <NEW_LINE> <DEDENT> print("done") <NEW_LINE> <DEDENT> def generate(self, filename, show=False): <NEW_LINE> <INDENT> piano_roll = self.generate_function() <NEW_LINE> midiwrite(filename, piano_roll, self.r, self.dt) <NEW_LINE> if show: <NEW_LINE> <INDENT> extent = (0, self.dt * len(piano_roll)) + self.r <NEW_LINE> pylab.figure() <NEW_LINE> pylab.imshow(piano_roll.T, origin='lower', aspect='auto', interpolation='nearest', cmap=pylab.cm.gray_r, extent=extent) <NEW_LINE> pylab.xlabel('time (s)') <NEW_LINE> pylab.ylabel('MIDI note number') <NEW_LINE> pylab.title('generated piano-roll')
Simple class to train an RNN-RBM from MIDI files and to generate sample sequences.
62598fa067a9b606de545df8
class Auth(TokenAuth): <NEW_LINE> <INDENT> def check_auth(self, token, allowed_roles, resource, method): <NEW_LINE> <INDENT> accounts = app.data.driver.db['accounts'] <NEW_LINE> lookup = {'t': token} <NEW_LINE> if allowed_roles: <NEW_LINE> <INDENT> lookup['r'] = {'$in': allowed_roles} <NEW_LINE> <DEDENT> account = accounts.find_one(lookup) <NEW_LINE> if account: <NEW_LINE> <INDENT> self.request_auth_value = account['_id'] <NEW_LINE> <DEDENT> return account is not None
This class implements Token Based Authentication for our API endpoints. Since the API itself is going to be on SSL, we're fine with this variation of Basic Authentication. For details on Eve authentication handling see: http://python-eve.org/authentication.html
62598fa05fdd1c0f98e5ddc7
class Cluster(Particle): <NEW_LINE> <INDENT> pass
egamma: ph_cl_* pau: ph_*_clus
62598fa045492302aabfc305
class ListDocIdsService(RestfulResource): <NEW_LINE> <INDENT> def get(self, wiki_id, start=0, limit=None): <NEW_LINE> <INDENT> bucket = get_s3_bucket() <NEW_LINE> keys = bucket.get_all_keys(prefix='xml/%s' % (str(wiki_id)), max_keys=1) <NEW_LINE> if len(keys) == 0: <NEW_LINE> <INDENT> return {'status':500, 'message':'Wiki not yet processed'} <NEW_LINE> <DEDENT> if limit: <NEW_LINE> <INDENT> ids = ArticleDocIdIterator(wiki_id)[start:limit] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ids = [id for id in ArticleDocIdIterator(wiki_id)[start:]] <NEW_LINE> <DEDENT> return {wiki_id: ids, 'status':200, 'numFound':len(ids)}
Service to expose resources in WikiDocumentIterator
62598fa0a17c0f6771d5c068
class Point: <NEW_LINE> <INDENT> def __init__(self, ship, faces, draft, trim): <NEW_LINE> <INDENT> disp, B, cb = displacement(ship, draft=draft, trim=trim) <NEW_LINE> if not faces: <NEW_LINE> <INDENT> wet = 0.0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> wet = wettedArea(faces, draft=draft, trim=trim) <NEW_LINE> <DEDENT> mom = moment(ship, draft=draft, trim=trim) <NEW_LINE> farea, cf = floatingArea(ship, draft=draft, trim=trim) <NEW_LINE> bm = BMT(ship, draft=draft, trim=trim) <NEW_LINE> cm = mainFrameCoeff(ship, draft=draft) <NEW_LINE> self.draft = draft <NEW_LINE> self.trim = trim <NEW_LINE> self.disp = disp <NEW_LINE> self.xcb = Units.Quantity(B.x, Units.Length) <NEW_LINE> self.wet = wet <NEW_LINE> self.farea = farea <NEW_LINE> self.mom = mom <NEW_LINE> self.KBt = Units.Quantity(B.z, Units.Length) <NEW_LINE> self.BMt = bm <NEW_LINE> self.Cb = cb <NEW_LINE> self.Cf = cf <NEW_LINE> self.Cm = cm
Hydrostatics point, that contains the following members: draft -- Ship draft trim -- Ship trim disp -- Ship displacement xcb -- Bouyance center X coordinate wet -- Wetted ship area mom -- Trimming 1cm ship moment farea -- Floating area KBt -- Transversal KB height BMt -- Transversal BM height Cb -- Block coefficient. Cf -- Floating coefficient. Cm -- Main frame coefficient. The moment to trim the ship 1 cm is positive when is resulting in a positive trim angle.
62598fa0442bda511e95c289
class _CacheConfig(object): <NEW_LINE> <INDENT> def __init__( self, api_url="https://kubernetes.default", verify_api_queries=True, cache_expiry_secs=30, cache_purge_secs=300, cache_expiry_fuzz_secs=0, cache_start_fuzz_secs=0, query_timeout=20, global_config=None, ): <NEW_LINE> <INDENT> self.api_url = api_url <NEW_LINE> self.verify_api_queries = verify_api_queries <NEW_LINE> self.cache_expiry_secs = cache_expiry_secs <NEW_LINE> self.cache_expiry_fuzz_secs = cache_expiry_fuzz_secs <NEW_LINE> self.cache_start_fuzz_secs = cache_start_fuzz_secs <NEW_LINE> self.cache_purge_secs = cache_purge_secs <NEW_LINE> self.query_timeout = query_timeout <NEW_LINE> self.global_config = global_config <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> for key, val in self.__dict__.items(): <NEW_LINE> <INDENT> if val != getattr(other, key): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = "" <NEW_LINE> for key, val in self.__dict__.items(): <NEW_LINE> <INDENT> s += "\n\t%s: %s" % (key, val) <NEW_LINE> <DEDENT> return s + "\n" <NEW_LINE> <DEDENT> def need_new_k8s_object(self, new_config): <NEW_LINE> <INDENT> relevant_fields = ["api_url", "verify_api_queries", "query_timeout"] <NEW_LINE> relevant_global_config_fields = [ "agent_log_path", "k8s_log_api_responses", "k8s_log_api_exclude_200s", "k8s_log_api_min_response_len", "k8s_log_api_min_latency", "k8s_log_api_ratelimit_interval", ] <NEW_LINE> for field in relevant_fields: <NEW_LINE> <INDENT> if getattr(self, field) != getattr(new_config, field): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> if bool(self.global_config) ^ bool(new_config.global_config): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.global_config and new_config: <NEW_LINE> <INDENT> for field in relevant_global_config_fields: <NEW_LINE> <INDENT> if getattr(self.global_config, field) != getattr( new_config.global_config, field ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
Internal configuration options for the Kubernetes cache
62598fa001c39578d7f12bad
class ChebyshevFitter(BandpassFitter): <NEW_LINE> <INDENT> def __init__(self, polycount=33): <NEW_LINE> <INDENT> self.polycount = polycount <NEW_LINE> <DEDENT> def fit(self, bandpass): <NEW_LINE> <INDENT> return ChebyShevFitter.chebyshevFit(bandpass, self.polycount) <NEW_LINE> <DEDENT> def chebyshevFit(cls, bandpass, polycount): <NEW_LINE> <INDENT> x = arange(0, 1, 1.0/len(bandpass)) <NEW_LINE> A = zeros((len(bandpass), polycount), float32) <NEW_LINE> A[:,0] = 1 <NEW_LINE> A[:,1] = 2*x <NEW_LINE> for n in range(2, polycount): <NEW_LINE> <INDENT> A[:,n] = 2*x*A[:,n-1] - A[:,n-2] <NEW_LINE> <DEDENT> (p, residuals, rank, s) = lstsq(A, bandpass) <NEW_LINE> return (p*A).sum(axis=1) <NEW_LINE> <DEDENT> chebyshevFit = classmethod(chebyshevFit)
The bandpass shape can be analytically expressed as a Chebyshev polynomial. This fitter will apply a Chebyshev fit on a dataset
62598fa030bbd7224646988e
class ColumnTest(case.DBTestCase): <NEW_LINE> <INDENT> @property <NEW_LINE> def column(self): <NEW_LINE> <INDENT> from moztrap.view.lists.finder import Column <NEW_LINE> return Column <NEW_LINE> <DEDENT> def test_objects(self): <NEW_LINE> <INDENT> qs = Mock() <NEW_LINE> c = self.column("thing", "_things.html", qs) <NEW_LINE> objects = c.objects() <NEW_LINE> self.assertIs(objects, qs.all.return_value) <NEW_LINE> <DEDENT> @patch("moztrap.view.lists.finder.filter_url") <NEW_LINE> def test_goto_url(self, filter_url): <NEW_LINE> <INDENT> c = self.column("thing", "_things.html", Mock(), "goto_name") <NEW_LINE> obj = Mock() <NEW_LINE> url = c.goto_url(obj) <NEW_LINE> self.assertIs(url, filter_url.return_value) <NEW_LINE> filter_url.assert_called_with("goto_name", obj) <NEW_LINE> <DEDENT> def test_no_goto_url(self): <NEW_LINE> <INDENT> c = self.column("thing", "_things.html", Mock()) <NEW_LINE> url = c.goto_url(Mock()) <NEW_LINE> self.assertIs(url, None)
Tests for finder Column.
62598fa06e29344779b0048b
class EuNorwayTransformFunctions(CommonCompHarmTransformFunctions): <NEW_LINE> <INDENT> NORWAY_SPECIFIC_CATEGORY_MAPPINGS = { } <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.category_mappings = dict( comp_harm_constants.ENGLISH_CATEGORY_MAPPINGS, **EuNorwayTransformFunctions.NORWAY_SPECIFIC_CATEGORY_MAPPINGS) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _columns_reference(columns_ref): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> new_column = { "grossprice" : "gross" } <NEW_LINE> column = new_column[columns_ref.lower()] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> column = columns_ref <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> return column <NEW_LINE> <DEDENT> <DEDENT> def join_sheets_in_a_unique_dataframe(self,df): <NEW_LINE> <INDENT> columns_to_use = ['category', 'subcategory', 'brand', 'product', 'advertiser', 'period', 'media channel', 'gross'] <NEW_LINE> final_df = pd.DataFrame(columns=columns_to_use) <NEW_LINE> wb = pd.ExcelFile(self.config['current_input_file']) <NEW_LINE> worksheets = wb.book.sheets() <NEW_LINE> for sheet in worksheets: <NEW_LINE> <INDENT> if sheet.visibility == 0 and (len(sheet.name.split())==2 and sheet.name.split()[1].isnumeric()): <NEW_LINE> <INDENT> dt_to_clean = wb.parse(sheet.name,header=self.config['header']) <NEW_LINE> dt_to_clean.dropna(how='all',axis=1,inplace=True) <NEW_LINE> dt_to_clean.dropna(how='all',axis=0,inplace=True) <NEW_LINE> columns_to_check = [EuNorwayTransformFunctions._columns_reference(str(x)) for x in dt_to_clean.columns.str.lower().tolist()] <NEW_LINE> dt_to_clean.columns = columns_to_check <NEW_LINE> dt_to_clean.dropna(subset=columns_to_use,inplace=True) <NEW_LINE> final_df = final_df.append(dt_to_clean[columns_to_use],sort=False) <NEW_LINE> <DEDENT> <DEDENT> return final_df
All custom (uncommon) transform functions **SPECIFIC to individual processing task** must be defined as part of this class.
62598fa03539df3088ecc0e3
class BackgroundThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, func, frequency=2, verbosity=1, **kwargs): <NEW_LINE> <INDENT> super(BackgroundThread, self).__init__(**kwargs) <NEW_LINE> self.func = func <NEW_LINE> self.frequency = frequency <NEW_LINE> self.verbosity = verbosity <NEW_LINE> self.daemon = True <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.running = True <NEW_LINE> while self.running: <NEW_LINE> <INDENT> time.sleep(self.frequency) <NEW_LINE> if not self.running: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.func() <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise
BackgroundThread Runs a routine continuously in a separate thread until `BackgroundThread.cancel` is called.
62598fa092d797404e388a7d
class BaseConfig: <NEW_LINE> <INDENT> DEBUG = False <NEW_LINE> SECRET_KEY = os.getenv('SECRET_KEY', 'a default secret key') <NEW_LINE> CORS_SUPPORTS_CREDENTIALS = True
Base application configuration
62598fa02ae34c7f260aaf0f