code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Decoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_channels, encoder_dim, decoder_dim1, decoder_dim2, decoder_dim3, num_classes, num_lstm_cell): <NEW_LINE> <INDENT> super(Decoder, self).__init__() <NEW_LINE> self.num_classes = num_classes <NEW_LINE> self.num_lstm_cell = num_lstm_cell <NEW_LINE> self.attention = Attention(num_channels, encoder_dim, decoder_dim1) <NEW_LINE> self.decode_step1 = nn.LSTMCell(num_channels, decoder_dim1, bias=True) <NEW_LINE> self.decode_step2 = nn.LSTMCell(decoder_dim1, decoder_dim2, bias=True) <NEW_LINE> self.decode_step3 = nn.LSTMCell(decoder_dim2, decoder_dim3, bias=True) <NEW_LINE> self.last_linear = nn.Linear(decoder_dim3, num_classes) <NEW_LINE> self.softmax = nn.Softmax(dim = 1) <NEW_LINE> self.init_h1 = nn.Linear(encoder_dim*encoder_dim, decoder_dim1) <NEW_LINE> self.init_c1 = nn.Linear(encoder_dim*encoder_dim, decoder_dim1) <NEW_LINE> self.init_h2 = nn.Linear(encoder_dim*encoder_dim, decoder_dim2) <NEW_LINE> self.init_c2 = nn.Linear(encoder_dim*encoder_dim, decoder_dim2) <NEW_LINE> self.init_h3 = nn.Linear(encoder_dim*encoder_dim, decoder_dim3) <NEW_LINE> self.init_c3 = nn.Linear(encoder_dim*encoder_dim, decoder_dim3) <NEW_LINE> <DEDENT> def init_hidden_state(self, encoder_out): <NEW_LINE> <INDENT> mean_encoder_out = encoder_out.mean(dim=1) <NEW_LINE> h1 = self.init_h1(mean_encoder_out) <NEW_LINE> c1 = self.init_c1(mean_encoder_out) <NEW_LINE> h2 = self.init_h2(mean_encoder_out) <NEW_LINE> c2 = self.init_c2(mean_encoder_out) <NEW_LINE> h3 = self.init_h3(mean_encoder_out) <NEW_LINE> c3 = self.init_c3(mean_encoder_out) <NEW_LINE> return h1, c1, h2, c2, h3, c3 <NEW_LINE> <DEDENT> def forward(self, encoder_out): <NEW_LINE> <INDENT> batch_size = encoder_out.size(0) <NEW_LINE> h1, c1, h2, c2, h3, c3 = self.init_hidden_state(encoder_out) <NEW_LINE> y_complete = torch.zeros(size = (self.num_lstm_cell, batch_size, self.num_classes)) <NEW_LINE> for i in range(self.num_lstm_cell): <NEW_LINE> <INDENT> attention_weighted_encoding = self.attention(encoder_out, h1) <NEW_LINE> h1, c1 = self.decode_step1(attention_weighted_encoding, (h1, c1)) <NEW_LINE> h2, c2 = self.decode_step2(h1, (h2, c2)) <NEW_LINE> h3, c3 = self.decode_step3(h2, (h3, c3)) <NEW_LINE> out = self.last_linear(h3) <NEW_LINE> y_t = self.softmax(out) <NEW_LINE> y_complete[i] = y_t <NEW_LINE> <DEDENT> return y_complete.sum(dim =0 )
This will recieve the input from the Attention Layer & encoder, that will be passed on to the LSTM cell to do it's work.
62598fa660cbc95b06364243
class Callable(Validator): <NEW_LINE> <INDENT> def __init__(self, callable_): <NEW_LINE> <INDENT> if not callable(callable_): <NEW_LINE> <INDENT> raise TypeError('"callable" argument is not callable') <NEW_LINE> <DEDENT> self.callable = callable_ <NEW_LINE> <DEDENT> def validate(self, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.callable(value) <NEW_LINE> <DEDENT> except (TypeError, ValueError) as e: <NEW_LINE> <INDENT> raise ValidationError(str(e))
A validator that accepts a callable. Attributes: - callable: The callable
62598fa6e76e3b2f99fd892d
class MockDoc(object): <NEW_LINE> <INDENT> def __init__(self, Name): <NEW_LINE> <INDENT> self.Name = Name <NEW_LINE> <DEDENT> def PrintOut(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Save(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def SaveAs(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ExportAsFixedFormat(self, **kwargs): <NEW_LINE> <INDENT> pass
A Document object.
62598fa65f7d997b871f935c
class LevenshteinSimilarity(object): <NEW_LINE> <INDENT> def __init__(self, content_x1, content_y2): <NEW_LINE> <INDENT> self.s1 = content_x1 <NEW_LINE> self.s2 = content_y2 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def extract_keyword(content): <NEW_LINE> <INDENT> re_exp = re.compile(r'(<style>.*?</style>)|(<[^>]+>)', re.S) <NEW_LINE> content = re_exp.sub(' ', content) <NEW_LINE> content = html.unescape(content) <NEW_LINE> seg = [i for i in jieba.cut(content, cut_all=True) if i != ''] <NEW_LINE> keywords = jieba.analyse.extract_tags("|".join(seg), topK=200, withWeight=False) <NEW_LINE> return keywords <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> jieba.analyse.set_stop_words('./files/stopwords.txt') <NEW_LINE> keywords1 = ', '.join(self.extract_keyword(self.s1)) <NEW_LINE> keywords2 = ', '.join(self.extract_keyword(self.s2)) <NEW_LINE> distances = Levenshtein.ratio(keywords1, keywords2) <NEW_LINE> return distances
编辑距离
62598fa699cbb53fe6830dcc
class AggBatchSGD(Optimizer): <NEW_LINE> <INDENT> def __init__(self, lr=0.01, batches_per_update=1, **kwargs): <NEW_LINE> <INDENT> super(AggBatchSGD, self).__init__(**kwargs) <NEW_LINE> with K.name_scope(self.__class__.__name__): <NEW_LINE> <INDENT> self.iterations = K.variable(0, dtype='int64', name='iterations') <NEW_LINE> self.lr = K.variable(lr, name='lr') <NEW_LINE> self.batches_per_update = batches_per_update <NEW_LINE> <DEDENT> <DEDENT> @interfaces.legacy_get_updates_support <NEW_LINE> def get_updates(self, loss, params): <NEW_LINE> <INDENT> shapes = [K.int_shape(p) for p in params] <NEW_LINE> sum_grads = [K.zeros(shape) for shape in shapes] <NEW_LINE> grads = self.get_gradients(loss, params) <NEW_LINE> self.updates = [K.update_add(self.iterations, 1)] <NEW_LINE> self.weights = [self.iterations] + sum_grads <NEW_LINE> for p, g, sg in zip(params, grads, sum_grads): <NEW_LINE> <INDENT> new_p = p - self.lr * sg / float(self.batches_per_update) <NEW_LINE> if getattr(p, 'constraint', None) is not None: <NEW_LINE> <INDENT> new_p = p.constraint(new_p) <NEW_LINE> <DEDENT> cond = K.equal(self.iterations % self.batches_per_update, 0) <NEW_LINE> self.updates.append(K.switch(cond, K.update(p, new_p), p)) <NEW_LINE> self.updates.append(K.switch(cond, K.update(sg, g), K.update(sg, sg+g))) <NEW_LINE> <DEDENT> return self.updates <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'lr': float(K.get_value(self.lr)), 'batches_per_update': self.batches_per_update} <NEW_LINE> base_config = super(AggBatchSGD, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items()))
vanilla SGD which aggregates gradients from multiple mini batches and then perfom an update # TODO: Can we get a implementation of this with momentum and nesterov? # Larger batch_size is really desirable. # See idea from : https://arxiv.org/abs/1711.00489
62598fa67047854f4633f2d0
class LatticeString(str): <NEW_LINE> <INDENT> def __new__(cls, value, multi=None, in_dict=True): <NEW_LINE> <INDENT> return str.__new__(cls, value) <NEW_LINE> <DEDENT> def __init__(self, value, multi=None, in_dict=True): <NEW_LINE> <INDENT> self.unique = True <NEW_LINE> if multi: <NEW_LINE> <INDENT> self.multi = list(multi) <NEW_LINE> if len(self.multi) > 1: <NEW_LINE> <INDENT> self.unique = False <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.multi = [value] <NEW_LINE> <DEDENT> self.in_dict = in_dict
String subclass เพื่อเก็บวิธีตัดหลายๆ วิธี
62598fa6fff4ab517ebcd6dc
class YAMLConfigFileParser(ConfigFileParser): <NEW_LINE> <INDENT> def get_syntax_description(self): <NEW_LINE> <INDENT> msg = ("The config file uses YAML syntax and must represent a YAML " "'mapping' (for details, see http://learn.getgrav.org/advanced/yaml).") <NEW_LINE> return msg <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _load_yaml(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> import yaml <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> raise ConfigFileParserException("Could not import yaml. It can be installed by running 'pip install PyYAML'") <NEW_LINE> <DEDENT> return yaml <NEW_LINE> <DEDENT> def parse(self, stream): <NEW_LINE> <INDENT> yaml = self._load_yaml() <NEW_LINE> try: <NEW_LINE> <INDENT> parsed_obj = yaml.safe_load(stream) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise ConfigFileParserException("Couldn't parse config file: %s" % e) <NEW_LINE> <DEDENT> if not isinstance(parsed_obj, dict): <NEW_LINE> <INDENT> raise ConfigFileParserException("The config file doesn't appear to " "contain 'key: value' pairs (aka. a YAML mapping). " "yaml.load('%s') returned type '%s' instead of 'dict'." % ( getattr(stream, 'name', 'stream'), type(parsed_obj).__name__)) <NEW_LINE> <DEDENT> result = OrderedDict() <NEW_LINE> for key, value in parsed_obj.items(): <NEW_LINE> <INDENT> if isinstance(value, list): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[key] = str(value) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def serialize(self, items, default_flow_style=False): <NEW_LINE> <INDENT> yaml = self._load_yaml() <NEW_LINE> items = dict(items) <NEW_LINE> return yaml.dump(items, default_flow_style=default_flow_style)
Parses YAML config files. Depends on the PyYAML module. https://pypi.python.org/pypi/PyYAML
62598fa691af0d3eaad39d06
class GNMTEncoderModel(attention_model.AttentionModel): <NEW_LINE> <INDENT> def _build_encoder(self, hparams): <NEW_LINE> <INDENT> if hparams.encoder_type != "gnmt": <NEW_LINE> <INDENT> return super(GNMTEncoderModel, self)._build_encoder(hparams) <NEW_LINE> <DEDENT> num_layers = hparams.num_layers <NEW_LINE> num_residual_layers = hparams.num_residual_layers <NEW_LINE> num_bi_layers = 1 <NEW_LINE> num_uni_layers = num_layers - num_bi_layers <NEW_LINE> utils.print_out(" num_bi_layers = %d" % num_bi_layers) <NEW_LINE> utils.print_out(" num_uni_layers = %d" % num_uni_layers) <NEW_LINE> iterator = self.iterator <NEW_LINE> source = iterator.source <NEW_LINE> if self.time_major: <NEW_LINE> <INDENT> source = tf.transpose(source) <NEW_LINE> <DEDENT> with tf.variable_scope("encoder") as scope: <NEW_LINE> <INDENT> dtype = scope.dtype <NEW_LINE> encoder_emb_inp = source <NEW_LINE> bi_encoder_outputs, bi_encoder_state = self._build_bidirectional_rnn( inputs=encoder_emb_inp, sequence_length=iterator.source_sequence_length, dtype=dtype, hparams=hparams, num_bi_layers=num_bi_layers, num_bi_residual_layers=0, ) <NEW_LINE> uni_cell = model_helper.create_rnn_cell( unit_type=hparams.unit_type, num_units=hparams.num_units, num_layers=num_uni_layers, num_residual_layers=num_residual_layers, forget_bias=hparams.forget_bias, dropout=hparams.dropout, num_gpus=hparams.num_gpus, base_gpu=1, mode=self.mode, single_cell_fn=self.single_cell_fn) <NEW_LINE> encoder_outputs, encoder_state = tf.nn.dynamic_rnn( uni_cell, bi_encoder_outputs, dtype=dtype, sequence_length=iterator.source_sequence_length, time_major=self.time_major) <NEW_LINE> encoder_state = (bi_encoder_state[1],) + ( (encoder_state,) if num_uni_layers == 1 else encoder_state) <NEW_LINE> <DEDENT> return encoder_outputs, encoder_state
Sequence-to-sequence dynamic model with GNMT encoder architecture.
62598fa691f36d47f2230e1f
class LocationNotInFeature(ValueError): <NEW_LINE> <INDENT> pass
The location isn't contained in the given feature
62598fa64428ac0f6e658419
class Message(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'messages' <NEW_LINE> id = db.Column( db.Integer, primary_key=True, ) <NEW_LINE> text = db.Column( db.String(140), nullable=False, ) <NEW_LINE> timestamp = db.Column( db.DateTime, nullable=False, server_default=db.func.now(), ) <NEW_LINE> user_id = db.Column( db.Integer, db.ForeignKey('users.id', ondelete='CASCADE'), nullable=False, ) <NEW_LINE> def is_liked(self, user_id: int) -> bool: <NEW_LINE> <INDENT> return bool( Like.query.filter_by(message_id=self.id, user_id=user_id).first())
An individual message ("warble").
62598fa610dbd63aa1c70aa9
class Verse(models.Model): <NEW_LINE> <INDENT> book = models.CharField(max_length=10) <NEW_LINE> chapter = models.IntegerField() <NEW_LINE> verse = models.IntegerField() <NEW_LINE> txt_hebrew = models.TextField(blank=True, null=True) <NEW_LINE> txt_greek = models.TextField(blank=True, null=True) <NEW_LINE> txt_latin = models.TextField(blank=True, null=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> managed = False <NEW_LINE> db_table = 'Verse' <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse( 'bible:update', kwargs={ 'book': self.book, 'chapter': self.chapter, 'verse': self.verse } )
Verse model.
62598fa63539df3088ecc1ac
class TestWrapperMetadata(unittest2.TestCase): <NEW_LINE> <INDENT> def test_cf(self): <NEW_LINE> <INDENT> metadata = atmos_flux_inversion.wrapper.global_attributes_dict() <NEW_LINE> self.assertIn("Conventions", metadata) <NEW_LINE> self.assertIn("CF", metadata.get("Conventions", "")) <NEW_LINE> self.assertIn("history", metadata) <NEW_LINE> <DEDENT> def test_acdd(self): <NEW_LINE> <INDENT> metadata = atmos_flux_inversion.wrapper.global_attributes_dict() <NEW_LINE> self.assertIn("Conventions", metadata) <NEW_LINE> self.assertIn("standard_name_vocabulary", metadata) <NEW_LINE> self.assertIn("date_created", metadata) <NEW_LINE> self.assertIn("date_modified", metadata) <NEW_LINE> self.assertIn("date_metadata_modified", metadata) <NEW_LINE> self.assertIn("creator_name", metadata) <NEW_LINE> <DEDENT> @expectFailureIf(sys.platform == "cygwin") <NEW_LINE> def test_modules_list(self): <NEW_LINE> <INDENT> metadata = atmos_flux_inversion.wrapper.global_attributes_dict() <NEW_LINE> self.assertIn("installed_modules", metadata) <NEW_LINE> installed_modules = metadata["installed_modules"] <NEW_LINE> self.assertGreater(len(installed_modules), 0) <NEW_LINE> for name_version in installed_modules: <NEW_LINE> <INDENT> self.assertIn("=", name_version)
Test the metadata provided for the wrapper.
62598fa6d486a94d0ba2bec6
class MulticlassLogistic(): <NEW_LINE> <INDENT> def __init__(self, l2_coef, class_number=None): <NEW_LINE> <INDENT> self.class_number = class_number <NEW_LINE> self.lambda_2 = l2_coef <NEW_LINE> <DEDENT> def func(self, X, y, w): <NEW_LINE> <INDENT> M = X.dot(w.T) <NEW_LINE> return (((-1 / X.shape[0]) * (M[np.arange(X.shape[0]), y].sum() - (sc.logsumexp(M, axis=1)).sum())) + 0.5 * self.lambda_2 * (np.linalg.norm(w) ** 2)) <NEW_LINE> <DEDENT> def grad(self, X, y, w): <NEW_LINE> <INDENT> if self.class_number is None: <NEW_LINE> <INDENT> self.class_number = np.amax(y) + 1 <NEW_LINE> <DEDENT> M = X.dot(w.T) <NEW_LINE> M_max = M.max(axis=1)[:, np.newaxis] <NEW_LINE> Exp_matrix = np.exp(M - M_max) / np.exp(sc.logsumexp(M - M_max, axis=1))[:, np.newaxis] <NEW_LINE> Y = y[:, np.newaxis] == np.mgrid[0:len(y), 0:self.class_number][1] <NEW_LINE> return (-1 / X.shape[0]) * (X.T.dot(Y).T - X.T.dot(Exp_matrix).T) + self.lambda_2 * w
Оракул для задачи многоклассовой логистической регрессии. Оракул должен поддерживать l2 регуляризацию. w в этом случае двумерный numpy array размера (class_number, d), где class_number - количество классов в задаче, d - размерность задачи
62598fa67d43ff248742737e
class GetEnabledSubclassesMixin: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_enabled(cls, *args): <NEW_LINE> <INDENT> enabled = list() <NEW_LINE> for subclass in cls.__subclasses__(): <NEW_LINE> <INDENT> if subclass.is_enabled(): <NEW_LINE> <INDENT> enabled.append(subclass(*args)) <NEW_LINE> <DEDENT> <DEDENT> return enabled
This mixin provides a method that returns all subclasses that are enabled. It should be used with abstract plugins
62598fa6b7558d5895463527
class MA2_Problem(ABC_problems.ABC_Problem): <NEW_LINE> <INDENT> def __init__(self, N=100, n=50): <NEW_LINE> <INDENT> self.N = N <NEW_LINE> self.n = n <NEW_LINE> self.prior = [distributions.uniform, distributions.uniform] <NEW_LINE> self.prior_args = np.array([[0, 1], [0, 1]]) <NEW_LINE> self.simulator_args = ['theta1', 'theta2'] <NEW_LINE> self.K = 2 <NEW_LINE> self.true_theta1 = 0.60 <NEW_LINE> self.true_theta2 = 0.20 <NEW_LINE> <DEDENT> def get_true_theta(self): <NEW_LINE> <INDENT> return np.array([self.true_theta1, self.true_theta2]) <NEW_LINE> <DEDENT> def statistics(self, data, theta=None): <NEW_LINE> <INDENT> return data.reshape(1, -1) <NEW_LINE> <DEDENT> def simulator(self, theta): <NEW_LINE> <INDENT> theta1 = theta[0] <NEW_LINE> theta2 = theta[1] <NEW_LINE> w = np.atleast_2d(distributions.normal.draw_samples(0, 1, self.n)).T <NEW_LINE> assert self.n > 2 <NEW_LINE> x = np.zeros([self.n, 1]) <NEW_LINE> x[0, :] = w[0:1, :] <NEW_LINE> x[1, :] = w[1:2, :] + theta1 * w[0:1, :] <NEW_LINE> x[2:, :] = w[2:, :] + theta1 * w[1:-1, :] + theta2 * w[:-2, :] <NEW_LINE> return x <NEW_LINE> <DEDENT> def log_likelihood(self, theta): <NEW_LINE> <INDENT> theta1 = theta[0] <NEW_LINE> theta2 = theta[1] <NEW_LINE> assert self.n > 2 <NEW_LINE> x = self.data_obs <NEW_LINE> z = np.zeros((self.n, 1)) <NEW_LINE> z[0, 0] = x[0, 0] <NEW_LINE> z[1, 0] = x[1, 0] - theta1*z[0, 0] <NEW_LINE> for t in range(self.n-2): <NEW_LINE> <INDENT> j = t+2 <NEW_LINE> z[j, 0] = x[j,:] - theta1*z[j-1, 0] - theta2*z[j-2, 0] <NEW_LINE> <DEDENT> ret = 0 <NEW_LINE> for j in range(self.n): <NEW_LINE> <INDENT> ret += distributions.normal.logpdf(z[j, 0], 0., 1.) <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def sample_from_prior(self): <NEW_LINE> <INDENT> sample_theta1 = self.prior[0].draw_samples(self.prior_args[0, 0], self.prior_args[0, 1], 1)[0] <NEW_LINE> sample_theta2 = self.prior[1].draw_samples(self.prior_args[1, 0], self.prior_args[1, 1], 1)[0] <NEW_LINE> return np.array([sample_theta1, sample_theta2]) <NEW_LINE> <DEDENT> def visualize(self): <NEW_LINE> <INDENT> plt.figure() <NEW_LINE> t = np.linspace(0, self.n, self.n).astype(int) <NEW_LINE> plt.plot(t, self.data_obs, '-',mfc='none', color='darkviolet') <NEW_LINE> plt.xlabel('time t') <NEW_LINE> plt.ylabel('data y') <NEW_LINE> plt.show()
The MA2 problem with two parameters: y_t = w_t + theta1 * w_(t-1) + theta2 * w_(t-2)
62598fa624f1403a9268582f
class CartridgeHealthStatistics: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.memory_usage = None <NEW_LINE> self.load_avg = None
Holds the memory usage and load average reading
62598fa657b8e32f52508097
class TextEntityTypeBotCommand(Object): <NEW_LINE> <INDENT> ID = "textEntityTypeBotCommand" <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(q: dict, *args) -> "TextEntityTypeBotCommand": <NEW_LINE> <INDENT> return TextEntityTypeBotCommand()
A bot command, beginning with "/". This shouldn't be highlighted if there are no bots in the chat Attributes: ID (:obj:`str`): ``TextEntityTypeBotCommand`` No parameters required. Returns: TextEntityType Raises: :class:`telegram.Error`
62598fa6d268445f26639aff
@attr.s(frozen=True, slots=True) <NEW_LINE> class NavBoxInfo(object): <NEW_LINE> <INDENT> u <NEW_LINE> background_img_src = attr.ib( validator=attr.validators.instance_of(unicode) ) <NEW_LINE> count = attr.ib( validator=attr.validators.instance_of(int) ) <NEW_LINE> description = attr.ib( validator=attr.validators.instance_of(unicode) ) <NEW_LINE> href = attr.ib( validator=attr.validators.instance_of(unicode) ) <NEW_LINE> name = attr.ib( validator=attr.validators.instance_of(unicode) )
Immutable object describing a homepage navigation box.
62598fa6eab8aa0e5d30bc82
class ConditionalGetMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if not response.streaming and not response.has_header('Content-Length'): <NEW_LINE> <INDENT> response['Content-Length'] = str(len(response.content)) <NEW_LINE> <DEDENT> if request.method != 'GET': <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if self.needs_etag(response) and not response.has_header('ETag'): <NEW_LINE> <INDENT> set_response_etag(response) <NEW_LINE> <DEDENT> etag = response.get('ETag') <NEW_LINE> last_modified = response.get('Last-Modified') <NEW_LINE> if last_modified: <NEW_LINE> <INDENT> last_modified = parse_http_date_safe(last_modified) <NEW_LINE> <DEDENT> if etag or last_modified: <NEW_LINE> <INDENT> return get_conditional_response( request, etag=etag, last_modified=last_modified, response=response, ) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> def needs_etag(self, response): <NEW_LINE> <INDENT> cache_control_headers = cc_delim_re.split(response.get('Cache-Control', '')) <NEW_LINE> return all(header.lower() != 'no-store' for header in cache_control_headers)
Handles conditional GET operations. If the response has an ETag or Last-Modified header, and the request has If-None-Match or If-Modified-Since, the response is replaced by an HttpNotModified. An ETag header is added if needed. Also sets the Content-Length response-header.
62598fa67d847024c075c2bc
class interp_dl(object): <NEW_LINE> <INDENT> def __init__(self, filename, tst = False): <NEW_LINE> <INDENT> self.tau, self.vx, self.vy = read_kl_2d(filename) <NEW_LINE> self.eps = 1e-6 <NEW_LINE> self.f_interp_x = interp1d(self.tau, self.vx, kind = 'cubic', bounds_error=False) <NEW_LINE> self.f_interp_y = interp1d(self.tau, self.vy, kind = 'cubic', bounds_error=False) <NEW_LINE> self.pt = (0.,0.) <NEW_LINE> self.n = 101 <NEW_LINE> self.out = (0.,0.) <NEW_LINE> if tst: <NEW_LINE> <INDENT> tau_dense = np.linspace(0., 15., 200) <NEW_LINE> x_dense = [] <NEW_LINE> y_dense = [] <NEW_LINE> for t in tau_dense: <NEW_LINE> <INDENT> x_dense.append(self.f_interp(t)[0]) <NEW_LINE> y_dense.append(self.f_interp(t)[1]) <NEW_LINE> <DEDENT> lines = plt.plot( x_dense, y_dense ) <NEW_LINE> plt.show() <NEW_LINE> <DEDENT> <DEDENT> def f_interp(self, t): <NEW_LINE> <INDENT> return (self.f_interp_x(t),self.f_interp_y(t)) <NEW_LINE> <DEDENT> def tangential(self, t): <NEW_LINE> <INDENT> eps = self.eps <NEW_LINE> return (self.f_interp_x(t+eps)-self.f_interp_x(t-eps))/(2.*eps),(self.f_interp_y(t+eps)-self.f_interp_y(t-eps))/(2.*eps) <NEW_LINE> <DEDENT> def distance(self, pt, t_start): <NEW_LINE> <INDENT> self.pt = pt <NEW_LINE> t_solve= fsolve(self.fct, t_start) <NEW_LINE> v = self.tangential(t_solve) <NEW_LINE> r = (self.f_interp_x(t_solve)-self.pt[0],self.f_interp_y(t_solve)-self.pt[1]) <NEW_LINE> s = sign(v[0]*r[1]-v[1]*r[0])[0] <NEW_LINE> return ( t_solve[0], self.f_interp_x(t_solve)[0], self.f_interp_y(t_solve)[0], s*sqrt((self.f_interp(t_solve)[0]-self.pt[0])**2+(self.f_interp(t_solve)[1]-self.pt[1])**2)[0] ) <NEW_LINE> <DEDENT> def fct(self, t): <NEW_LINE> <INDENT> v = self.tangential(t) <NEW_LINE> r = (self.f_interp_x(t)-self.pt[0],self.f_interp_y(t)-self.pt[1]) <NEW_LINE> return v[0]*r[0] + v[1]*r[1] <NEW_LINE> <DEDENT> def tang_diff(self, v_ext, t): <NEW_LINE> <INDENT> v = self.tangential(t) <NEW_LINE> return (v[0]*v_ext[1]-v[1]*v_ext[0])
The main connection between an external force characterized by a number of points and the mubosym After running the initialization the base-functions are setup (by means of optimized coefficients) :param filename: the external file with a list of x y - values (table, separation sign is space), if filename is empty the function f11 is taken instead :param tst: if true the result of the optimization is plotted
62598fa6a219f33f346c6710
class PrincipalSource: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> None_ = 0 <NEW_LINE> UserInfoList = 1 <NEW_LINE> Windows = 2 <NEW_LINE> MembershipProvider = 4 <NEW_LINE> RoleProvider = 8 <NEW_LINE> All = 15
Specifies the source of a principal.
62598fa6796e427e5384e68c
class laneletType (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'laneletType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/hans/workspace/thesis/extras/schema-extended.xsd', 247, 4) <NEW_LINE> _Documentation = None
An atomic simple type.
62598fa64f88993c371f0486
class Operations(object): <NEW_LINE> <INDENT> def __init__(self, client, config, serializer, deserializer): <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self.api_version = "2017-03-01" <NEW_LINE> self.config = config <NEW_LINE> <DEDENT> def list( self, custom_headers=None, raw=False, **operation_config): <NEW_LINE> <INDENT> def internal_paging(next_link=None, raw=False): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> url = '/providers/Microsoft.ContainerRegistry/operations' <NEW_LINE> query_parameters = {} <NEW_LINE> query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> url = next_link <NEW_LINE> query_parameters = {} <NEW_LINE> <DEDENT> header_parameters = {} <NEW_LINE> header_parameters['Content-Type'] = 'application/json; charset=utf-8' <NEW_LINE> if self.config.generate_client_request_id: <NEW_LINE> <INDENT> header_parameters['x-ms-client-request-id'] = str(uuid.uuid1()) <NEW_LINE> <DEDENT> if custom_headers: <NEW_LINE> <INDENT> header_parameters.update(custom_headers) <NEW_LINE> <DEDENT> if self.config.accept_language is not None: <NEW_LINE> <INDENT> header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str') <NEW_LINE> <DEDENT> request = self._client.get(url, query_parameters) <NEW_LINE> response = self._client.send( request, header_parameters, **operation_config) <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> exp = CloudError(response) <NEW_LINE> exp.request_id = response.headers.get('x-ms-request-id') <NEW_LINE> raise exp <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> deserialized = models.OperationDefinitionPaged(internal_paging, self._deserialize.dependencies) <NEW_LINE> if raw: <NEW_LINE> <INDENT> header_dict = {} <NEW_LINE> client_raw_response = models.OperationDefinitionPaged(internal_paging, self._deserialize.dependencies, header_dict) <NEW_LINE> return client_raw_response <NEW_LINE> <DEDENT> return deserialized
Operations operations. :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An objec model deserializer. :ivar api_version: The client API version. Constant value: "2017-03-01".
62598fa667a9b606de545ec4
class Literal(Syn): <NEW_LINE> <INDENT> value: Any <NEW_LINE> type: Any
Represent literal values.
62598fa6925a0f43d25e7f36
class AllowForm(forms.Form): <NEW_LINE> <INDENT> allow = forms.BooleanField(required=False) <NEW_LINE> redirect_uri = forms.CharField(widget=forms.HiddenInput()) <NEW_LINE> scope = forms.CharField(widget=forms.HiddenInput()) <NEW_LINE> client_id = forms.CharField(widget=forms.HiddenInput()) <NEW_LINE> state = forms.CharField(required=False, widget=forms.HiddenInput()) <NEW_LINE> response_type = forms.CharField(widget=forms.HiddenInput())
用户选择是否允许授权的表单(Authorization code /Implicit grant 模式)
62598fa616aa5153ce4003fb
class TestPdfSettingsDto(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testPdfSettingsDto(self): <NEW_LINE> <INDENT> pass
PdfSettingsDto unit test stubs
62598fa6a8370b77170f02d3
class Config(object): <NEW_LINE> <INDENT> def __getattr__(self, variable): <NEW_LINE> <INDENT> warn('Config has been deprecated', DeprecationWarning) <NEW_LINE> return V[variable] <NEW_LINE> <DEDENT> def __setattr__(self, variable, value): <NEW_LINE> <INDENT> warn('Config has been deprecated', DeprecationWarning) <NEW_LINE> V[variable] = value
Deprecated, but still provide the same interface, but to variables.
62598fa68e71fb1e983bb9aa
class ElementMap(Elements): <NEW_LINE> <INDENT> def __init__(self, locator_type, query_string=None, base_element=None, timeout=0, key=lambda el: el.text, value=lambda el: el, only_if=lambda els: len(els) > 0, facet=False, filter_by=lambda el: el is not None): <NEW_LINE> <INDENT> super(ElementMap, self).__init__( locator_type, query_string, base_element, timeout, facet=facet, only_if=only_if, filter_by=filter_by ) <NEW_LINE> self.key_mapper = key <NEW_LINE> self.value_mapper = value <NEW_LINE> <DEDENT> def __get__(self, instance, owner): <NEW_LINE> <INDENT> if not instance: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return OrderedDict( (self.key_mapper(el), self.value_mapper(self.enhance(el))) for el in self._get_element(self.root.find_elements) ) if self.root else {} <NEW_LINE> <DEDENT> except ( NoSuchElementException, TimeoutException, StaleElementReferenceException ): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return lambda: self.__get__(self, self.__class__)[key]
Used to create dynamic dictionaries based on an element locator specified by one of :class:`holmium.core.Locators`. The wrapped dictionary is an :class:`collections.OrderedDict` instance. :param holmium.core.Locators locator_type: selenium locator to use when locating the element :param str query_string: the value to pass to the locator :param holmium.core.Element base_element: a reference to another element under which to locate this element. :param int timeout: time to implicitely wait for the element :param bool facet: flag to treat this element as a facet. :param lambda key: transform function for mapping a key to a WebElement in the collection. The located :class:`selenium.webdriver.remote.webelement.WebElement` instance is passed as the only argument to the function. :param lambda value: transform function for the value when accessed via the key. The located :class:`selenium.webdriver.remote.webelement.WebElement` instance is passed as the only argument to the function. :param function only_if: extra validation function that is called repeatedly until :attr:`timeout` elapses. If not provided the default function used checks that the element collection is not empty. The list of located :class:`selenium.webdriver.remote.webelement.WebElement` instances is passed as the only argument to the function. :param function filter_by: condition function determines which elements are included in the collection. If not provided the default function used includes all elements identified by :attr:`query_string`. A :class:`selenium.webdriver.remote.webelement.WebElement` instance is passed as the only argument to the function.
62598fa6ac7a0e7691f72403
class PKR251Exception(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs)
Exception class for
62598fa630bbd722464698f4
class Application(Gtk.Application): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Gtk.Application.__init__(self) <NEW_LINE> <DEDENT> def do_activate(self): <NEW_LINE> <INDENT> window = ApplicationWindow(self) <NEW_LINE> self.add_window(window) <NEW_LINE> window.show_all() <NEW_LINE> last_used_file = Settings.load().last_used_file <NEW_LINE> if last_used_file: <NEW_LINE> <INDENT> window.open_file(last_used_file) <NEW_LINE> <DEDENT> quit_action = Gio.SimpleAction(name="quit") <NEW_LINE> quit_action.connect("activate", lambda *args: self.do_quit()) <NEW_LINE> self.add_action(quit_action) <NEW_LINE> self.set_accels_for_action("app.quit", ["<Primary>Q"]) <NEW_LINE> self.set_accels_for_action("win.file_open", ["<Primary>O"]) <NEW_LINE> self.set_accels_for_action("win.close", ["<Primary>W"]) <NEW_LINE> self.set_accels_for_action("win.search", ["<Primary>F"]) <NEW_LINE> <DEDENT> def do_quit(self): <NEW_LINE> <INDENT> for win in self.get_windows(): <NEW_LINE> <INDENT> win.do_destroy()
The FavaGTK application.
62598fa632920d7e50bc5f4f
@constructible <NEW_LINE> class IgniteServiceType(IntEnum): <NEW_LINE> <INDENT> NODE = 0 <NEW_LINE> THIN_CLIENT = 1 <NEW_LINE> NONE = 2
Application start mode.
62598fa67d847024c075c2bd
class SessionResumeHelper2(SessionResumeHelper1): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _restore_SessionState_metadata(cls, session, session_repr): <NEW_LINE> <INDENT> metadata_repr = _validate( session_repr, key='metadata', value_type=dict) <NEW_LINE> session.metadata.title = _validate( metadata_repr, key='title', value_type=str, value_none=True) <NEW_LINE> session.metadata.flags = set([ _validate( flag, value_type=str, value_type_msg=_("Each flag must be a string")) for flag in _validate( metadata_repr, key='flags', value_type=list)]) <NEW_LINE> session.metadata.running_job_name = _validate( metadata_repr, key='running_job_name', value_type=str, value_none=True) <NEW_LINE> app_blob = _validate( metadata_repr, key='app_blob', value_type=str, value_none=True) <NEW_LINE> if app_blob is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> app_blob = app_blob.encode("ASCII") <NEW_LINE> <DEDENT> except UnicodeEncodeError: <NEW_LINE> <INDENT> raise CorruptedSessionError(_("app_blob is not ASCII")) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> app_blob = base64.standard_b64decode(app_blob) <NEW_LINE> <DEDENT> except binascii.Error: <NEW_LINE> <INDENT> raise CorruptedSessionError(_("Cannot base64 decode app_blob")) <NEW_LINE> <DEDENT> <DEDENT> session.metadata.app_blob = app_blob <NEW_LINE> logger.debug(_("restored metadata %r"), session.metadata)
Helper class for implementing session resume feature This class works with data constructed by :class:`~plainbox.impl.session.suspend.SessionSuspendHelper2` which has been pre-processed by :class:`SessionResumeHelper` (to strip the initial envelope). Due to the constraints of what can be represented in a suspended session, this class cannot work in isolation. It must operate with a list of know jobs. Since (most of the) jobs are being provided externally (as they represent the non-serialized parts of checkbox or other job providers) several failure modes are possible. Those are documented in :meth:`resume()`
62598fa63539df3088ecc1ad
class IWorkspaceFolder(form.Schema, IImageScaleTraversable): <NEW_LINE> <INDENT> calendar_visible = schema.Bool( title=MessageFactory(u"label_workspace_calendar_visibility", u"Calendar visible in central calendar"), required=False, default=False, ) <NEW_LINE> email = schema.TextLine( title=MessageFactory(u'label_workspace_email', u'E-mail address'), required=False, default=u'', )
Interface for WorkspaceFolder
62598fa607f4c71912baf33c
class ImagableItemMixin(models.Model): <NEW_LINE> <INDENT> image = models.ImageField(blank=True, verbose_name=_(u"image"), upload_to=get_image_upload_path, default=DEFAULT_PATH) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def image_height(self): <NEW_LINE> <INDENT> return get_image_size(self.image.path)[1] <NEW_LINE> <DEDENT> @property <NEW_LINE> def retina_image_height(self): <NEW_LINE> <INDENT> return get_image_size(self.image.path)[1] * 2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def image_url(self): <NEW_LINE> <INDENT> return "{}_fx.jpg".format(os.path.splitext(self.image.url)[0]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def retina_image_url(self): <NEW_LINE> <INDENT> return "{}_fx@2x.jpg".format(os.path.splitext(self.image.url)[0]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def thumbnail(self): <NEW_LINE> <INDENT> return "{}_thumbnail.jpg".format(os.path.splitext(self.image.url)[0]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def retina_thumbnail(self): <NEW_LINE> <INDENT> return "{}_thumbnail@2x.jpg".format(os.path.splitext(self.image.url)[0]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_image_changed(self): <NEW_LINE> <INDENT> return self.__initial != self.image <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_default_image(self): <NEW_LINE> <INDENT> return self.__initial.name == DEFAULT_PATH <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.image: <NEW_LINE> <INDENT> self.image = DEFAULT_PATH <NEW_LINE> <DEDENT> if self.__initial != self.image and self.__initial != DEFAULT_PATH: <NEW_LINE> <INDENT> delete_image_files( self.__initial.name.split('/')[-1].split('.')[0], self.__class__.__name__.lower()) <NEW_LINE> <DEDENT> super(ImagableItemMixin, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ImagableItemMixin, self).__init__(*args, **kwargs) <NEW_LINE> self.__initial = self.image
A class that adds a field to the model that stores the image.
62598fa6442bda511e95c34e
class return_descriptors_result(object): <NEW_LINE> <INDENT> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype10, _size7) = iprot.readListBegin() <NEW_LINE> for _i11 in range(_size7): <NEW_LINE> <INDENT> _elem12 = iprot.readDouble() <NEW_LINE> self.success.append(_elem12) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('return_descriptors_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.DOUBLE, len(self.success)) <NEW_LINE> for iter13 in self.success: <NEW_LINE> <INDENT> oprot.writeDouble(iter13) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62598fa6cc0a2c111447af08
class GenomicRegionDataProvider( column.ColumnarDataProvider ): <NEW_LINE> <INDENT> COLUMN_NAMES = [ 'chrom', 'start', 'end' ] <NEW_LINE> settings = { 'chrom_column' : 'int', 'start_column' : 'int', 'end_column' : 'int', 'named_columns' : 'bool', } <NEW_LINE> def __init__( self, dataset, chrom_column=None, start_column=None, end_column=None, named_columns=False, **kwargs ): <NEW_LINE> <INDENT> dataset_source = DatasetDataProvider( dataset ) <NEW_LINE> if chrom_column == None: <NEW_LINE> <INDENT> chrom_column = dataset_source.get_metadata_column_index_by_name( 'chromCol' ) <NEW_LINE> <DEDENT> if start_column == None: <NEW_LINE> <INDENT> start_column = dataset_source.get_metadata_column_index_by_name( 'startCol' ) <NEW_LINE> <DEDENT> if end_column == None: <NEW_LINE> <INDENT> end_column = dataset_source.get_metadata_column_index_by_name( 'endCol' ) <NEW_LINE> <DEDENT> indeces = [ chrom_column, start_column, end_column ] <NEW_LINE> if not all( map( lambda i: i != None, indeces ) ): <NEW_LINE> <INDENT> raise ValueError( "Could not determine proper column indeces for" + " chrom, start, end: %s" %( str( indeces ) ) ) <NEW_LINE> <DEDENT> kwargs.update({ 'indeces' : indeces }) <NEW_LINE> if not kwargs.get( 'column_types', None ): <NEW_LINE> <INDENT> kwargs.update({ 'column_types' : dataset_source.get_metadata_column_types( indeces=indeces ) }) <NEW_LINE> <DEDENT> self.named_columns = named_columns <NEW_LINE> if self.named_columns: <NEW_LINE> <INDENT> self.column_names = self.COLUMN_NAMES <NEW_LINE> <DEDENT> super( GenomicRegionDataProvider, self ).__init__( dataset_source, **kwargs ) <NEW_LINE> <DEDENT> def __iter__( self ): <NEW_LINE> <INDENT> parent_gen = super( GenomicRegionDataProvider, self ).__iter__() <NEW_LINE> for column_values in parent_gen: <NEW_LINE> <INDENT> if self.named_columns: <NEW_LINE> <INDENT> yield dict( zip( self.column_names, column_values ) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield column_values
Data provider that parses chromosome, start, and end data from a file using the datasets metadata settings. Is a ColumnarDataProvider that uses a DatasetDataProvider as it's source. If `named_columns` is true, will return dictionaries with the keys 'chrom', 'start', 'end'.
62598fa67b25080760ed73a5
class StandardRobot(Robot): <NEW_LINE> <INDENT> def update_position_and_clean(self): <NEW_LINE> <INDENT> pos = self.position <NEW_LINE> pos = pos.get_new_position(self.direction, self.speed) <NEW_LINE> if self._room.is_position_valid(pos): <NEW_LINE> <INDENT> self.set_robot_position(pos) <NEW_LINE> priorCleanedState = self._room.is_tile_cleaned(int(pos.x), int(pos.y)) <NEW_LINE> self._room.clean_tile_at_position(self.get_robot_position(), self.capacity) <NEW_LINE> if self._room.is_tile_cleaned(int(pos.x), int(pos.y)) != priorCleanedState: <NEW_LINE> <INDENT> print('Robot # {} cleaned tile at {}'.format(self.robotNum, pos)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> direction = self._room.get_random_direction() <NEW_LINE> while direction == self.get_robot_direction(): <NEW_LINE> <INDENT> direction = self.get_random_direction() <NEW_LINE> <DEDENT> self.set_robot_direction(direction) <NEW_LINE> <DEDENT> <DEDENT> def is_robot_finished(self): <NEW_LINE> <INDENT> if self._room.is_room_cleaned(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False
A StandardRobot is a Robot with the standard movement strategy. At each time-step, a StandardRobot attempts to move in its current direction; when it would hit a wall or furtniture, it *instead* chooses a new direction randomly.
62598fa68c0ade5d55dc360d
class MyConcreteFactory(AbstractFactory): <NEW_LINE> <INDENT> def create_product_X(self): <NEW_LINE> <INDENT> print("Called: my implementation of abstract factory; will return: " + "MyConcreteProductX instance") <NEW_LINE> return MyConcreteProductX() <NEW_LINE> <DEDENT> def create_product_Y(self): <NEW_LINE> <INDENT> print("Called: my implementation of abstract factory; will return: " + "MyConcreteProductY instance") <NEW_LINE> return MyConcreteProductY()
Another concrete implementation for AbstractFactory
62598fa63539df3088ecc1ae
class IAccordion(interface.Interface): <NEW_LINE> <INDENT> pass
Marker interface for the accordion viewlet
62598fa61f037a2d8b9e3fe5
class TestCase(unittest.TestCase): <NEW_LINE> <INDENT> api_class = None <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(TestCase, self).setUp() <NEW_LINE> if self.api_class is None: <NEW_LINE> <INDENT> self.api = falcon.API() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.api = self.api_class() <NEW_LINE> <DEDENT> falcon.request._maybe_wrap_wsgi_stream = True <NEW_LINE> <DEDENT> if not hasattr(unittest.TestCase, 'assertIn'): <NEW_LINE> <INDENT> def assertIn(self, a, b): <NEW_LINE> <INDENT> self.assertTrue(a in b) <NEW_LINE> <DEDENT> <DEDENT> def simulate_get(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('GET', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_head(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('HEAD', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_post(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('POST', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_put(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('PUT', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_options(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('OPTIONS', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_patch(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('PATCH', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_delete(self, path='/', **kwargs): <NEW_LINE> <INDENT> return self.simulate_request('DELETE', path, **kwargs) <NEW_LINE> <DEDENT> def simulate_request(self, method='GET', path='/', query_string=None, headers=None, body=None, file_wrapper=None): <NEW_LINE> <INDENT> if not path.startswith('/'): <NEW_LINE> <INDENT> raise ValueError("path must start with '/'") <NEW_LINE> <DEDENT> if query_string and query_string.startswith('?'): <NEW_LINE> <INDENT> raise ValueError("query_string should not start with '?'") <NEW_LINE> <DEDENT> if '?' in path: <NEW_LINE> <INDENT> raise ValueError( 'path may not contain a query string. Please use the ' 'query_string parameter instead.' ) <NEW_LINE> <DEDENT> env = create_environ( method=method, path=path, query_string=(query_string or ''), headers=headers, body=body, file_wrapper=file_wrapper, ) <NEW_LINE> srmock = StartResponseMock() <NEW_LINE> validator = wsgiref.validate.validator(self.api) <NEW_LINE> iterable = validator(env, srmock) <NEW_LINE> result = Result(iterable, srmock.status, srmock.headers) <NEW_LINE> return result
Extends :py:mod:`unittest` to support WSGI functional testing. Note: If available, uses :py:mod:`testtools` in lieu of :py:mod:`unittest`. This base class provides some extra plumbing for unittest-style test cases, to help simulate WSGI calls without having to spin up an actual web server. Simply inherit from this class in your test case classes instead of :py:class:`unittest.TestCase` or :py:class:`testtools.TestCase`. Attributes: api_class (class): An API class to use when instantiating the ``api`` instance (default: :py:class:`falcon.API`) api (object): An API instance to target when simulating requests (default: ``self.api_class()``)
62598fa663d6d428bbee26ab
class HotelRoom(models.Model): <NEW_LINE> <INDENT> _name = 'hotel.room' <NEW_LINE> _description = 'Hotel Room' <NEW_LINE> _order = "sequence, room_type_id, name" <NEW_LINE> name = fields.Char('Room Name', required=True) <NEW_LINE> active = fields.Boolean('Active', default=True) <NEW_LINE> sequence = fields.Integer('Sequence', default=0) <NEW_LINE> room_type_id = fields.Many2one('hotel.room.type', 'Hotel Room Type', required=True, ondelete='restrict') <NEW_LINE> floor_id = fields.Many2one('hotel.floor', 'Ubication', help='At which floor the room is located.') <NEW_LINE> max_adult = fields.Integer('Max Adult') <NEW_LINE> max_child = fields.Integer('Max Child') <NEW_LINE> capacity = fields.Integer('Capacity') <NEW_LINE> to_be_cleaned = fields.Boolean('To be Cleaned', default=False) <NEW_LINE> shared_room_id = fields.Many2one('hotel.shared.room', 'Shared Room', default=False) <NEW_LINE> description_sale = fields.Text( 'Sale Description', translate=True, help="A description of the Product that you want to communicate to " " your customers. This description will be copied to every Sales " " Order, Delivery Order and Customer Invoice/Credit Note") <NEW_LINE> extra_beds_allowed = fields.Integer('Extra beds allowed', default='0', required=True) <NEW_LINE> @api.multi <NEW_LINE> def get_capacity(self, extra_bed=0): <NEW_LINE> <INDENT> if not self.shared_room_id: <NEW_LINE> <INDENT> return self.capacity + extra_bed <NEW_LINE> <DEDENT> return self.capacity
The rooms for lodging can be for sleeping, usually called rooms, and also for speeches (conference rooms), parking, relax with cafe con leche, spa...
62598fa6462c4b4f79dbb907
class CreateMirror18Test(BaseTest): <NEW_LINE> <INDENT> fixtureGpg = True <NEW_LINE> configOverride = { "ppaDistributorID": "ubuntu", "ppaCodename": "maverick", } <NEW_LINE> runCmd = "aptly mirror create -keyring=aptlytest.gpg mirror18 ppa:gladky-anton/gnuplot" <NEW_LINE> def outputMatchPrepare(_, s): <NEW_LINE> <INDENT> return re.sub(r'Signature made .* using', '', s) <NEW_LINE> <DEDENT> def check(self): <NEW_LINE> <INDENT> self.check_output() <NEW_LINE> self.check_cmd_output("aptly mirror show mirror18", "mirror_show")
create mirror: mirror with ppa URL
62598fa60c0af96317c5627c
class Trajectory: <NEW_LINE> <INDENT> SLOW_SPEED = 1.0 <NEW_LINE> FAST_SPEED = 1.4 <NEW_LINE> LINE_DESIRED_DIST = 0.4 <NEW_LINE> R_MIN = 0.1 <NEW_LINE> CURVE_WEIGHT = 1.5 <NEW_LINE> def __init__(self, frameobjects): <NEW_LINE> <INDENT> self.frameobjects = frameobjects <NEW_LINE> <DEDENT> @cachedproperty <NEW_LINE> def immediate_path(self): <NEW_LINE> <INDENT> if self.frameobjects.target_line is None: <NEW_LINE> <INDENT> return (1000., Trajectory.SLOW_SPEED) <NEW_LINE> <DEDENT> v, xint = self.frameobjects.target_line <NEW_LINE> n, k = self.frameobjects.target_line_nk <NEW_LINE> z = k * n + 0.4 * v <NEW_LINE> z = z / np.linalg.norm(z) <NEW_LINE> fact = Trajectory.CURVE_WEIGHT * v.dot([1,0]) <NEW_LINE> print(fact) <NEW_LINE> r = (1 / (fact + 2e-4)) * Trajectory.R_MIN <NEW_LINE> v = Trajectory.SLOW_SPEED <NEW_LINE> return (r,v) <NEW_LINE> <DEDENT> @cachedproperty <NEW_LINE> def immediate_path_old(self): <NEW_LINE> <INDENT> line_right = False <NEW_LINE> line = None <NEW_LINE> if self.frameobjects.left_line is not None: <NEW_LINE> <INDENT> line = self.frameobjects.left_line <NEW_LINE> <DEDENT> elif self.frameobjects.right_line is not None: <NEW_LINE> <INDENT> line = self.frameobjects.right_line <NEW_LINE> line_right = True <NEW_LINE> <DEDENT> if line is None: <NEW_LINE> <INDENT> return (1000, Trajectory.SLOW_SPEED) <NEW_LINE> <DEDENT> n, k = line <NEW_LINE> fact = self.CURVE_WEIGHT * n.dot([0,1]) <NEW_LINE> r = (1 / (fact + 1e-4)) * Trajectory.R_MIN <NEW_LINE> if line_right: r = -r <NEW_LINE> v = Trajectory.SLOW_SPEED <NEW_LINE> return (r, v)
Use a FrameInfo object to plan a local trajectory.
62598fa6851cf427c66b81c2
class APIAdvancedSearchView(SearchModelMixin, generics.ListAPIView): <NEW_LINE> <INDENT> filter_backends = (MayanObjectPermissionsFilter,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> self.search_model = self.get_search_model() <NEW_LINE> self.serializer_class = self.search_model.serializer <NEW_LINE> if self.search_model.permission: <NEW_LINE> <INDENT> self.mayan_object_permissions = { 'GET': (self.search_model.permission,) } <NEW_LINE> <DEDENT> if self.request.GET.get('_match_all', 'off') == 'on': <NEW_LINE> <INDENT> global_and_search = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> global_and_search = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> queryset, ids, timedelta = self.search_model.search( query_string=self.request.GET, user=self.request.user, global_and_search=global_and_search ) <NEW_LINE> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> raise ParseError(force_text(exception)) <NEW_LINE> <DEDENT> return queryset
Perform an advanced search operation --- GET: omit_serializer: true parameters: - name: _match_all paramType: query type: string description: When checked, only results that match all fields will be returned. When unchecked results that match at least one field will be returned. Possible values are "on" or "off"
62598fa6e1aae11d1e7ce7a0
class GenericAPIView(APIView, generics.GenericAPIView): <NEW_LINE> <INDENT> pass
Base class for generic API views.
62598fa657b8e32f52508098
class Guard(Enemy): <NEW_LINE> <INDENT> def __init__(self, name, hp, damage): <NEW_LINE> <INDENT> super().__init__(name, hp, damage)
Enemy that stays in one room Attributes: name: name of enemy. hp: health of enemy. damage: damage dealt by enemy location_x: x-coord location_y: y-coord
62598fa67d43ff248742737f
class UfileDownloader: <NEW_LINE> <INDENT> def __init__(self, root_path, config): <NEW_LINE> <INDENT> self._root_path = root_path <NEW_LINE> self._source_map = source_map.SourceMap(config["source_map_name"]) <NEW_LINE> self._download_folder = config["download_folder"] <NEW_LINE> self._image_downloader = image_downloader.ImageDownloader() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> for resource in self._source_map.get_undownloaded_resources(): <NEW_LINE> <INDENT> link = resource.name <NEW_LINE> target_path = os.path.join(self._root_path, self._download_folder) <NEW_LINE> if not os.path.exists(target_path): <NEW_LINE> <INDENT> os.makedirs(target_path) <NEW_LINE> <DEDENT> store_path = os.path.join(target_path, resource.subpath) <NEW_LINE> try: <NEW_LINE> <INDENT> self._image_downloader.download_to_file(link, store_path) <NEW_LINE> self._source_map.mark_downloaded(resource) <NEW_LINE> <DEDENT> except FileNotFoundError as e: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(f"error happen {e}") <NEW_LINE> <DEDENT> time.sleep(0.2)
UfileDownloader download ufile images to local. depends on image downloader, SourceMap
62598fa6656771135c48957c
class StyleDependencyError(Exception): <NEW_LINE> <INDENT> pass
Style dependency error.
62598fa6dd821e528d6d8e2f
@cassiopeia.type.core.common.inheritdocs <NEW_LINE> class Mastery(cassiopeia.type.dto.common.CassiopeiaDto): <NEW_LINE> <INDENT> def __init__(self, dictionary): <NEW_LINE> <INDENT> self.masteryId = dictionary.get("masteryId", 0) <NEW_LINE> self.rank = dictionary.get("rank", 0)
masteryId int the ID of the mastery rank int the number of points put into this mastery by the user
62598fa62ae34c7f260aafdb
class Tasks(BaseCommand): <NEW_LINE> <INDENT> def status(self, ref): <NEW_LINE> <INDENT> task = Task(self._gateway, ref) <NEW_LINE> return task.status() <NEW_LINE> <DEDENT> def running(self): <NEW_LINE> <INDENT> return self._gateway.query('/proc/bgtasks', 'status', 'running') <NEW_LINE> <DEDENT> def by_name(self, name): <NEW_LINE> <INDENT> return self._gateway.query('/proc/bgtasks', 'name', name) <NEW_LINE> <DEDENT> def wait(self, ref, retries=100, seconds=1): <NEW_LINE> <INDENT> task = Task(self._gateway, ref, retries, seconds) <NEW_LINE> return task.wait()
Gateway Background Task APIs
62598fa6eab8aa0e5d30bc84
class DedimaniaInvalidCredentials(DedimaniaException): <NEW_LINE> <INDENT> pass
Invalid code or player.
62598fa62c8b7c6e89bd36bf
class AllCommandsReturnNotImplemented(Behaviour): <NEW_LINE> <INDENT> def get_handle_command_function(self, _opid_or_status): <NEW_LINE> <INDENT> return self.operation_not_implemented_response <NEW_LINE> <DEDENT> def operation_not_implemented_response(self, req, _psfile): <NEW_LINE> <INDENT> attributes = self.minimal_attributes() <NEW_LINE> return IppRequest( self.version, StatusCodeEnum.server_error_operation_not_supported, req.request_id, attributes)
A printer which responds to all commands with a not implemented error. There's no real use for this, it's just an example.
62598fa67d847024c075c2be
class Builder(object): <NEW_LINE> <INDENT> def __init__(self, tool=None): <NEW_LINE> <INDENT> if tool is None: <NEW_LINE> <INDENT> raise TypeError("Expected argument 'tool' (pos 1) is missing") <NEW_LINE> <DEDENT> self._tool_name = tool <NEW_LINE> self._tool = None <NEW_LINE> for build_tool in build_tools.values(): <NEW_LINE> <INDENT> if build_tool.match(self._tool_name): <NEW_LINE> <INDENT> self._tool = build_tool <NEW_LINE> <DEDENT> <DEDENT> if self._tool is None: <NEW_LINE> <INDENT> raise NotImplementedError("Unsupported build tool") <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<Builder tool_name='{_tool_name}' tool='{_tool}'>".format(**vars(self)) <NEW_LINE> <DEDENT> def build(self, *args, **kwargs): <NEW_LINE> <INDENT> logger.debug("Building sources using '%s'", self._tool_name) <NEW_LINE> return self._tool.build(*args, **kwargs) <NEW_LINE> <DEDENT> def get_logs(self): <NEW_LINE> <INDENT> logger.debug("Getting logs '%s'", self._tool_name) <NEW_LINE> return self._tool.get_logs() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_supported_tools(cls): <NEW_LINE> <INDENT> return build_tools.keys()
Class representing a process of building binaries from sources.
62598fa68da39b475be030dc
class QuotaForm(BaseQuotaForm): <NEW_LINE> <INDENT> class Meta(BaseQuotaForm.Meta): <NEW_LINE> <INDENT> model = models.Quota <NEW_LINE> exclude = ('quota',) <NEW_LINE> <DEDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> for field in self.fields.values(): <NEW_LINE> <INDENT> field.widget.attrs['class'] = ( field.widget.attrs.get('class', '') + 'form-control') <NEW_LINE> <DEDENT> self.fields['group'].required = False <NEW_LINE> inst = kwargs.get('instance', None) <NEW_LINE> if inst and inst.quota: <NEW_LINE> <INDENT> allocation = inst.group.allocation <NEW_LINE> if allocation.status == models.AllocationRequest.APPROVED: <NEW_LINE> <INDENT> self.initial['requested_quota'] = inst.quota
This version of the form class that allows editing of the requested quota values. If the allocation record being edited is in approved state, we pre-fill the requested quota values from the current quota values.
62598fa692d797404e388ae2
class Database(): <NEW_LINE> <INDENT> def __init__(self,data_size,batch_size,entry_type='sequential'): <NEW_LINE> <INDENT> self.max_size = data_size <NEW_LINE> self.size = 0 <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.insert_index = 0 <NEW_LINE> self.sample_index = 0 <NEW_LINE> self.experience = [] <NEW_LINE> self.entry_type = entry_type <NEW_LINE> <DEDENT> def get_insert_index(self): <NEW_LINE> <INDENT> if self.entry_type == 'sequential': <NEW_LINE> <INDENT> self.insert_index += 1 <NEW_LINE> if self.insert_index >= self.size: <NEW_LINE> <INDENT> self.insert_index = 0 <NEW_LINE> <DEDENT> <DEDENT> elif self.entry_type == 'random': <NEW_LINE> <INDENT> self.insert_index = random.randint(0,self.size-1) <NEW_LINE> <DEDENT> elif self.entry_type == 'prioritized': <NEW_LINE> <INDENT> raise(NotImplementedError) <NEW_LINE> <DEDENT> return self.insert_index <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.experience = [] <NEW_LINE> self.insert_index = 0 <NEW_LINE> self.sample_index = 0 <NEW_LINE> self.size = 0 <NEW_LINE> <DEDENT> def store(self,experience): <NEW_LINE> <INDENT> if self.size < self.max_size: <NEW_LINE> <INDENT> self.experience.append(experience) <NEW_LINE> self.size +=1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.experience[self.get_insert_index()] = experience <NEW_LINE> <DEDENT> <DEDENT> def store_from_array(self,*args): <NEW_LINE> <INDENT> for i in range(args[0].shape[0]): <NEW_LINE> <INDENT> entry = [] <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> entry.append(arg[i]) <NEW_LINE> <DEDENT> self.store(entry) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.sample_index = 0 <NEW_LINE> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> random.shuffle(self.experience) <NEW_LINE> <DEDENT> def sample_random_batch(self,n=32,return_arrays=True): <NEW_LINE> <INDENT> if n <= self.size: <NEW_LINE> <INDENT> batch = random.sample(self.experience,n) <NEW_LINE> if return_arrays: <NEW_LINE> <INDENT> arrays = [] <NEW_LINE> for i in range(len(batch[0])): <NEW_LINE> <INDENT> to_add = np.array([entry[i] for entry in batch]) <NEW_LINE> arrays.append(to_add) <NEW_LINE> <DEDENT> return tuple(arrays) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return batch <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Requested {} samples, but database only of size {}'.format(n,self.size)) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self,batch_size=None,return_arrays=True): <NEW_LINE> <INDENT> if batch_size is None: batch_size = self.batch_size <NEW_LINE> if self.sample_index == 0: <NEW_LINE> <INDENT> self.shuffle() <NEW_LINE> <DEDENT> if (self.sample_index + batch_size > self.size) and (not self.sample_index == 0): <NEW_LINE> <INDENT> self.reset() <NEW_LINE> raise(StopIteration) <NEW_LINE> <DEDENT> if (self.sample_index + 2*batch_size > self.size): <NEW_LINE> <INDENT> batch = self.experience[self.sample_index:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> batch = self.experience[self.sample_index:self.sample_index+batch_size] <NEW_LINE> <DEDENT> self.sample_index += batch_size <NEW_LINE> if return_arrays: <NEW_LINE> <INDENT> arrays = [] <NEW_LINE> for i in range(len(batch[0])): <NEW_LINE> <INDENT> to_add = np.array([entry[i] for entry in batch]) <NEW_LINE> arrays.append(to_add) <NEW_LINE> <DEDENT> return tuple(arrays) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return batch <NEW_LINE> <DEDENT> <DEDENT> next = __next__
Database with iterator to generate minibatches.
62598fa663d6d428bbee26ac
class UserProfileManager(BaseUserManager): <NEW_LINE> <INDENT> def create_user(self, email, name, password=None): <NEW_LINE> <INDENT> if not email: <NEW_LINE> <INDENT> raise ValueError('User must have a valid email') <NEW_LINE> <DEDENT> email = self.normalize_email(email) <NEW_LINE> user = self.model(email=email, name=name) <NEW_LINE> user.set_password(password) <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user <NEW_LINE> <DEDENT> def create_superuser(self, email, name, password): <NEW_LINE> <INDENT> user = self.create_user(email, name, password) <NEW_LINE> user.is_staff = True <NEW_LINE> user.is_superuser = True <NEW_LINE> user.save(using=self._db) <NEW_LINE> return user
Manager for User Profile
62598fa6a8370b77170f02d5
class IArticle(form.Schema, IImageScaleTraversable): <NEW_LINE> <INDENT> dexteritytextindexer.searchable('title') <NEW_LINE> title = schema.TextLine( title=_(u"Title for album"), required=True, ) <NEW_LINE> dexteritytextindexer.searchable('description') <NEW_LINE> description = schema.Text( title=_(u"Description for album"), description=_(u"Short description, if no filling, will show text head 20 words instead."), required=False, ) <NEW_LINE> leadImage = NamedBlobImage( title=_(u"Lead Image"), description=_(u"Will show in blog's article list page"), required=False, ) <NEW_LINE> dexteritytextindexer.searchable('text') <NEW_LINE> text = RichText( title=_(u"Text"), required=False, )
Normal article
62598fa6cb5e8a47e493c0f5
class Event(Model): <NEW_LINE> <INDENT> DUPLICATE = 1 <NEW_LINE> UNKNOWN_PROBLEM = 2 <NEW_LINE> INCOMPATIBLE_TRAIT = 3 <NEW_LINE> def __init__(self, message_id, event_type, generated, traits, raw): <NEW_LINE> <INDENT> Model.__init__(self, message_id=message_id, event_type=event_type, generated=generated, traits=traits, raw=raw) <NEW_LINE> <DEDENT> def append_trait(self, trait_model): <NEW_LINE> <INDENT> self.traits.append(trait_model) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> trait_list = [] <NEW_LINE> if self.traits: <NEW_LINE> <INDENT> trait_list = [str(trait) for trait in self.traits] <NEW_LINE> <DEDENT> return ("<Event: %s, %s, %s, %s>" % (self.message_id, self.event_type, self.generated, " ".join(trait_list))) <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> return {'message_id': self.message_id, 'event_type': self.event_type, 'generated': serialize_dt(self.generated), 'traits': [trait.serialize() for trait in self.traits], 'raw': self.raw}
A raw event from the source system. Events have Traits. Metrics will be derived from one or more Events.
62598fa6b7558d589546352a
@logger.init('spreadsheet', 'DEBUG') <NEW_LINE> class Dataframe(base.MatchedDataframe): <NEW_LINE> <INDENT> def __init__(self, *args, **kwds): <NEW_LINE> <INDENT> super(Dataframe, self).__init__(*args, **kwds) <NEW_LINE> self.seen = set() <NEW_LINE> <DEDENT> @logger.call('spreadsheet', 'debug') <NEW_LINE> def __call__(self, crosslinks, linkages): <NEW_LINE> <INDENT> self.set_dimensions(crosslinks) <NEW_LINE> self.set_named_columns(self.columns.getordered(self.dimensions)) <NEW_LINE> self.set_version() <NEW_LINE> self.set_subdataframes() <NEW_LINE> for crosslink in crosslinks: <NEW_LINE> <INDENT> linkage = linkages[(crosslink.row, crosslink.index)] <NEW_LINE> self._append(crosslink, linkage) <NEW_LINE> <DEDENT> self._sort() <NEW_LINE> self._concat() <NEW_LINE> self._rename(self.dimensions)
Report dataframe creator, an independent report (does not depend on data from other reports).
62598fa6d7e4931a7ef3bf95
class rule_006(token_case): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> token_case.__init__(self, 'component', '006', lTokens) <NEW_LINE> self.groups.append('case::keyword')
This rule checks the **is** keyword has proper case. |configuring_uppercase_and_lowercase_rules_link| **Violation** .. code-block:: vhdl component fifo IS component fifo Is **Fix** .. code-block:: vhdl component fifo is component fifo is
62598fa6167d2b6e312b6e6b
class ExBertEmbeddings(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.word_embeddings = nn.Embedding(config.vocab_size, config.hidden_size, padding_idx=config.pad_token_id) <NEW_LINE> self.position_embeddings = nn.Embedding(config.max_position_embeddings, config.hidden_size) <NEW_LINE> self.dropout = nn.Dropout(config.hidden_dropout_prob) <NEW_LINE> <DEDENT> def forward(self, input_ids=None, position_ids=None): <NEW_LINE> <INDENT> input_shape = input_ids.size() <NEW_LINE> seq_length = input_shape[1] <NEW_LINE> device = input_ids.device <NEW_LINE> if position_ids is None: <NEW_LINE> <INDENT> position_ids = torch.arange(seq_length, dtype=torch.long, device=device) <NEW_LINE> position_ids = position_ids.unsqueeze(0).expand(input_shape) <NEW_LINE> <DEDENT> inputs_embeds = self.word_embeddings(input_ids) <NEW_LINE> position_embeddings = self.position_embeddings(position_ids) <NEW_LINE> embeddings = inputs_embeds + position_embeddings <NEW_LINE> embeddings = self.dropout(embeddings) <NEW_LINE> return embeddings <NEW_LINE> <DEDENT> def init_weights(self): <NEW_LINE> <INDENT> self.word_embeddings.data.uniform_(-self.config.embedding_std, self.config.embedding_std) <NEW_LINE> self.position_embeddings.data.uniform_(-self.config.embedding_std, self.config.embedding_std)
Construct the embeddings from word, position and token_type embeddings.
62598fa6d58c6744b42dc252
class CartExcludedTaxModifier(BaseCartModifier): <NEW_LINE> <INDENT> taxes = 1 - 1 / (1 + settings.VALUE_ADDED_TAX / 100) <NEW_LINE> def add_extra_cart_row(self, cart, request): <NEW_LINE> <INDENT> amount = cart.subtotal * self.taxes <NEW_LINE> instance = { 'label': _("{}% VAT incl.").format(settings.VALUE_ADDED_TAX), 'amount': amount, } <NEW_LINE> cart.extra_rows[self.identifier] = ExtraCartRow(instance)
This tax calculator presumes that unit prices are gross prices, hence also the subtotal, and that the tax is calculated per cart but not added to the cart.
62598fa656ac1b37e63020e8
class FileMetadata(db.Model): <NEW_LINE> <INDENT> __SEP = ".." <NEW_LINE> __NEXT = "./" <NEW_LINE> owner = db.UserProperty() <NEW_LINE> filename = db.StringProperty() <NEW_LINE> uploadedOn = db.DateTimeProperty() <NEW_LINE> source = db.StringProperty() <NEW_LINE> blobkey = db.StringProperty() <NEW_LINE> grep_link = db.StringProperty() <NEW_LINE> @staticmethod <NEW_LINE> def getFirstKeyForUser(username): <NEW_LINE> <INDENT> return db.Key.from_path("FileMetadata", username + FileMetadata.__SEP) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getLastKeyForUser(username): <NEW_LINE> <INDENT> return db.Key.from_path("FileMetadata", username + FileMetadata.__NEXT) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getKeyName(username, date, blob_key): <NEW_LINE> <INDENT> sep = FileMetadata.__SEP <NEW_LINE> return str(username + sep + str(date) + sep + blob_key)
A helper class that will hold metadata for the user's blobs. Specifially, we want to keep track of who uploaded it, where they uploaded it from (right now they can only upload from their computer, but in the future urlfetch would be nice to add), and links to the results of their MR jobs. To enable our querying to scan over our input data, we store keys in the form 'user/date/blob_key', where 'user' is the given user's e-mail address, 'date' is the date and time that they uploaded the item on, and 'blob_key' indicates the location in the Blobstore that the item can be found at. '/' is not the actual separator between these values - we use '..' since it is an illegal set of characters for an e-mail address to contain.
62598fa63617ad0b5ee0604e
class trace: <NEW_LINE> <INDENT> def __init__(self, loglevel = logging.DEBUG, maxlen = 20): <NEW_LINE> <INDENT> self.loglevel = loglevel <NEW_LINE> self.maxlen = maxlen <NEW_LINE> <DEDENT> def abbrev(self, arg): <NEW_LINE> <INDENT> if arg: <NEW_LINE> <INDENT> argstr = repr(arg) <NEW_LINE> if len(argstr) > self.maxlen: <NEW_LINE> <INDENT> argstr = argstr[:self.maxlen] + "..'" <NEW_LINE> <DEDENT> return argstr <NEW_LINE> <DEDENT> return arg <NEW_LINE> <DEDENT> def argstr(self, *args, **kwargs): <NEW_LINE> <INDENT> arglist = [] <NEW_LINE> for arg in args: <NEW_LINE> <INDENT> if arg: <NEW_LINE> <INDENT> arglist.append(self.abbrev(arg)) <NEW_LINE> <DEDENT> <DEDENT> for k, v in kwargs.items(): <NEW_LINE> <INDENT> arglist.append('{} = {}'.format(k, self.abbrev(v))) <NEW_LINE> <DEDENT> return ', '.join(arglist) <NEW_LINE> <DEDENT> def __call__(self, func): <NEW_LINE> <INDENT> @functools.wraps(func) <NEW_LINE> def trace_and_call(*args, **kwargs): <NEW_LINE> <INDENT> result = func(*args, **kwargs) <NEW_LINE> argstr = self.argstr(*args, **kwargs) <NEW_LINE> logging.log(self.loglevel, '{}({}): {}'.format(func.__name__, argstr, result)) <NEW_LINE> return result <NEW_LINE> <DEDENT> return trace_and_call
Trace decorator class
62598fa699cbb53fe6830dd0
class Tag(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=50) <NEW_LINE> user = models.ForeignKey(get_user_model(), on_delete=models.CASCADE) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
A Tag in the database
62598fa691af0d3eaad39d0a
class EligibleTransactionVolume(MappingSchema): <NEW_LINE> <INDENT> min_price = SchemaNode(Int(), validator=colander.Range(min=0)) <NEW_LINE> max_price = SchemaNode( Int(), missing=None, validator=colander.Any(colander.Range(min=0)))
{ "min_price": 0, "max_price": 1000 }
62598fa6fff4ab517ebcd6e0
class HTLBRIterator(PyexcelIterator): <NEW_LINE> <INDENT> def __init__(self, reader): <NEW_LINE> <INDENT> self.reader_ref = reader <NEW_LINE> self.current = 0 <NEW_LINE> self.columns = reader.number_of_columns() <NEW_LINE> self.rows = reader.number_of_rows() <NEW_LINE> self.total = self.columns * self.rows <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next_cell_position(self): <NEW_LINE> <INDENT> return (int(self.current / self.columns), int(self.current % self.columns)) <NEW_LINE> <DEDENT> def move_cursor(self): <NEW_LINE> <INDENT> self.current += 1 <NEW_LINE> <DEDENT> def get_next_value(self): <NEW_LINE> <INDENT> row, column = self.next_cell_position() <NEW_LINE> self.move_cursor() <NEW_LINE> return self.reader_ref.cell_value(row, column) <NEW_LINE> <DEDENT> def exit_condition(self): <NEW_LINE> <INDENT> return self.current >= self.total <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.exit_condition(): <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.get_next_value()
Iterate horizontally from top left to bottom right default iterator for Reader class
62598fa60a50d4780f7052d7
class DetectionWithCombinedFollowers(Follower): <NEW_LINE> <INDENT> def __init__(self, image_provider, detector, main_finder, secondary_finder): <NEW_LINE> <INDENT> self.img_provider = image_provider <NEW_LINE> self.detector = detector <NEW_LINE> self.main_finder = main_finder <NEW_LINE> self.secondary_finder = secondary_finder <NEW_LINE> self._obj_topleft = (0, 0) <NEW_LINE> self._obj_bottomright = (0, 0) <NEW_LINE> self._obj_descriptors = {} <NEW_LINE> <DEDENT> def descriptors(self): <NEW_LINE> <INDENT> desc = super(DetectionWithCombinedFollowers, self).descriptors() <NEW_LINE> desc.update({ 'scene_rgb': self.img_provider.rgb_img(), 'depth_img': self.img_provider.depth_img(), 'pcd': self.img_provider.pcd(), }) <NEW_LINE> return desc <NEW_LINE> <DEDENT> def train(self): <NEW_LINE> <INDENT> obj_model = self.img_provider.obj_pcd() <NEW_LINE> pts = points(obj_model) <NEW_LINE> self._obj_descriptors.update( { 'obj_model': obj_model, 'obj_model_points': pts, } ) <NEW_LINE> obj_templates, obj_masks = self.img_provider.obj_rgb_templates_and_masks() <NEW_LINE> self._obj_descriptors.update( { 'object_templates': obj_templates, 'object_masks': obj_masks, } ) <NEW_LINE> <DEDENT> def detect(self): <NEW_LINE> <INDENT> self.detector.update(self.descriptors()) <NEW_LINE> fue_exitoso, descriptors = self.detector.detect() <NEW_LINE> topleft = (0, 0) <NEW_LINE> bottomright = (0, 0) <NEW_LINE> if fue_exitoso: <NEW_LINE> <INDENT> self.upgrade_detected_descriptors(descriptors) <NEW_LINE> desc = self.descriptors() <NEW_LINE> topleft = desc['topleft'] <NEW_LINE> bottomright = desc['bottomright'] <NEW_LINE> <DEDENT> return fue_exitoso, topleft, bottomright <NEW_LINE> <DEDENT> def follow(self, es_deteccion): <NEW_LINE> <INDENT> self.main_finder.update(self.descriptors()) <NEW_LINE> fue_exitoso, descriptors = self.main_finder.find(es_deteccion) <NEW_LINE> topleft = (0, 0) <NEW_LINE> bottomright = (0, 0) <NEW_LINE> if fue_exitoso: <NEW_LINE> <INDENT> self.upgrade_main_followed_descriptors(descriptors) <NEW_LINE> self.secondary_finder.update(self.descriptors()) <NEW_LINE> mejora_fue_exitosa, new_descriptors = self.secondary_finder.find(es_deteccion) <NEW_LINE> if mejora_fue_exitosa: <NEW_LINE> <INDENT> self.upgrade_secondary_followed_descriptors(new_descriptors) <NEW_LINE> <DEDENT> desc = self.descriptors() <NEW_LINE> topleft = desc['topleft'] <NEW_LINE> bottomright = desc['bottomright'] <NEW_LINE> <DEDENT> return fue_exitoso, topleft, bottomright <NEW_LINE> <DEDENT> def upgrade_detected_descriptors(self, descriptors): <NEW_LINE> <INDENT> desc = self.detector.calculate_descriptors(descriptors) <NEW_LINE> self.set_object_descriptors(desc) <NEW_LINE> <DEDENT> def upgrade_main_followed_descriptors(self, descriptors): <NEW_LINE> <INDENT> desc = self.main_finder.calculate_descriptors(descriptors) <NEW_LINE> self.set_object_descriptors(desc) <NEW_LINE> <DEDENT> def upgrade_secondary_followed_descriptors(self, descriptors): <NEW_LINE> <INDENT> desc = self.secondary_finder.calculate_descriptors(descriptors) <NEW_LINE> self.set_object_descriptors(desc)
Combina los seguidores RGB y D usando la deteccion estatica de profundidad, ya que inserta en los descriptores a las nubes de puntos
62598fa6f548e778e596b4a0
class AutoUpgradeOptions(_messages.Message): <NEW_LINE> <INDENT> autoUpgradeStartTime = _messages.StringField(1) <NEW_LINE> description = _messages.StringField(2) <NEW_LINE> requestedUpgradeStartTime = _messages.StringField(3)
AutoUpgradeOptions defines the set of options for the user to control how the Auto Upgrades will proceed. Fields: autoUpgradeStartTime: [Output only] This field is set when upgrades are about to commence with the approximate start time for the upgrades, in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format. description: [Output only] This field is set when upgrades are about to commence with the description of the upgrade. requestedUpgradeStartTime: User requested start time, in [RFC3339](https://www.ietf.org/rfc/rfc3339.txt) text format.
62598fa6dd821e528d6d8e30
@implement_to_string <NEW_LINE> class Unknown(AbstractModel): <NEW_LINE> <INDENT> TYPE = None <NEW_LINE> display_name = None <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return self.display_name if self.display_name else self.TYPE <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.__repr__()) <NEW_LINE> <DEDENT> def __init__(self, object_type=None, display_name=None, *args, **kwargs): <NEW_LINE> <INDENT> self.TYPE = object_type <NEW_LINE> self.display_name = display_name <NEW_LINE> super(Unknown, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def unserialize(cls, data): <NEW_LINE> <INDENT> object_type = data["objectType"] if "objectType" in data else "" <NEW_LINE> display_name = data["displayName"] if "displayName" in data else "" <NEW_LINE> return cls(display_name=display_name, object_type=object_type)
This class is used when we can't find a matching object type
62598fa6498bea3a75a57a18
class S3DataStore(DataStore): <NEW_LINE> <INDENT> def __init__(self, bucket, base_key, aws_key=None, aws_secret=None): <NEW_LINE> <INDENT> self._s3_client = boto.connect_s3(aws_access_key_id=aws_key, aws_secret_access_key=aws_secret) <NEW_LINE> self._bucket = self._s3_client.get_bucket(bucket, validate=False) <NEW_LINE> if base_key != '' and base_key[-1:] != '/': <NEW_LINE> <INDENT> self._base_key_name = base_key + '/' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._base_key_name = '' <NEW_LINE> <DEDENT> <DEDENT> def put(self, message, key): <NEW_LINE> <INDENT> s3_key = boto.s3.key.Key(self._bucket) <NEW_LINE> s3_key.key = '{}{}'.format(self._base_key_name, key) <NEW_LINE> s3_key.set_contents_from_string(message) <NEW_LINE> return 's3://{}/{}'.format(self._bucket.name, s3_key.key) <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> return pyswfaws.util.get_from_s3(self._s3_client, key)
Uses S3 to store serialized messages.
62598fa6b7558d589546352b
class FakeProvider(RepoProvider): <NEW_LINE> <INDENT> async def get_resolved_ref(self): <NEW_LINE> <INDENT> return "1a2b3c4d5e6f" <NEW_LINE> <DEDENT> async def get_resolved_spec(self): <NEW_LINE> <INDENT> return "fake/repo/1a2b3c4d5e6f" <NEW_LINE> <DEDENT> def get_repo_url(self): <NEW_LINE> <INDENT> return "https://example.com/fake/repo.git" <NEW_LINE> <DEDENT> async def get_resolved_ref_url(self): <NEW_LINE> <INDENT> return "https://example.com/fake/repo/tree/1a2b3c4d5e6f" <NEW_LINE> <DEDENT> def get_build_slug(self): <NEW_LINE> <INDENT> return '{user}-{repo}'.format(user='Rick', repo='Morty')
Fake provider for local testing of the UI
62598fa63d592f4c4edbadc9
class Notify(models.Model): <NEW_LINE> <INDENT> id = UuidField(primary_key=True) <NEW_LINE> client_event = models.BooleanField(default=True) <NEW_LINE> resume_daily_event = models.BooleanField(default=True) <NEW_LINE> change_event = models.BooleanField(default=True) <NEW_LINE> content_type = models.ForeignKey(ContentType) <NEW_LINE> object_id = models.CharField(max_length=36) <NEW_LINE> content_object = generic.GenericForeignKey() <NEW_LINE> org_id = models.CharField(max_length=36, null=False, blank=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.id
Email notification
62598fa62ae34c7f260aafdd
class VLinkUrls(Validator): <NEW_LINE> <INDENT> splitter = re.compile('[ ,]+') <NEW_LINE> id_re = re.compile('^/ea/([^/]+)/') <NEW_LINE> def __init__(self, item, *a, **kw): <NEW_LINE> <INDENT> self.item = item <NEW_LINE> Validator.__init__(self, item, *a, **kw) <NEW_LINE> <DEDENT> def run(self, val): <NEW_LINE> <INDENT> res=[] <NEW_LINE> for v in self.splitter.split(val): <NEW_LINE> <INDENT> link_id = self.id_re.match(v) <NEW_LINE> if link_id: <NEW_LINE> <INDENT> l = VLink(None,False).run(link_id.group(1)) <NEW_LINE> if l: <NEW_LINE> <INDENT> res.append(l) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return res
A comma-separated list of link urls
62598fa6be8e80087fbbef5e
class RequestWelfareGetTitleID: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I64, 'actor_id_', None, None, ), (2, TType.I32, 'title_id_', None, None, ), ) <NEW_LINE> def __init__(self, actor_id_=None, title_id_=None,): <NEW_LINE> <INDENT> self.actor_id_ = actor_id_ <NEW_LINE> self.title_id_ = title_id_ <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I64: <NEW_LINE> <INDENT> self.actor_id_ = iprot.readI64(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.title_id_ = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('RequestWelfareGetTitleID') <NEW_LINE> if self.actor_id_ is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('actor_id_', TType.I64, 1) <NEW_LINE> oprot.writeI64(self.actor_id_) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.title_id_ is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('title_id_', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.title_id_) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.actor_id_ is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field actor_id_ is unset!') <NEW_LINE> <DEDENT> if self.title_id_ is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field title_id_ is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - actor_id_ - title_id_
62598fa616aa5153ce4003ff
class SensorRecordInstance(): <NEW_LINE> <INDENT> def __init__(self, recordLength, timeInterval): <NEW_LINE> <INDENT> self.recordLength = recordLength <NEW_LINE> self.timeInterval = timeInterval <NEW_LINE> self.currentData = 0 <NEW_LINE> self.currentDataCount = 0 <NEW_LINE> self.data = [] <NEW_LINE> self.dataValid = False <NEW_LINE> for i in range (0, self.recordLength): <NEW_LINE> <INDENT> self.data.append(0.0) <NEW_LINE> <DEDENT> self.currentTime = 0 <NEW_LINE> <DEDENT> def addData(self, newTimestamp, newData): <NEW_LINE> <INDENT> if (self.currentDataCount == 0): <NEW_LINE> <INDENT> self.currentTime = newTimestamp <NEW_LINE> self.dataValid = True <NEW_LINE> self.currentData = newData <NEW_LINE> self.currentDataCount = 1 <NEW_LINE> return <NEW_LINE> <DEDENT> if ((newTimestamp - self.currentTime) >= self.timeInterval): <NEW_LINE> <INDENT> timeUnits = int((newTimestamp - self.currentTime) / self.timeInterval) <NEW_LINE> self.currentTime += timeUnits <NEW_LINE> for i in range(0, timeUnits): <NEW_LINE> <INDENT> self.data.pop(0) <NEW_LINE> self.data.append(self.currentData / self.currentDataCount) <NEW_LINE> <DEDENT> self.currentData = 0 <NEW_LINE> self.currentDataCount = 0 <NEW_LINE> <DEDENT> self.currentData += newData <NEW_LINE> self.currentDataCount += 1 <NEW_LINE> <DEDENT> def getData(self): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> def getCurrentData(self): <NEW_LINE> <INDENT> if (self.currentDataCount == 0): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.currentData / self.currentDataCount <NEW_LINE> <DEDENT> <DEDENT> def getDataValid(self): <NEW_LINE> <INDENT> return self.dataValid
Sensor record for a single sensor type
62598fa638b623060ffa8f93
@tf_export("distribute.experimental.MultiWorkerMirroredStrategy", v1=[]) <NEW_LINE> class CollectiveAllReduceStrategy(distribute_lib.Strategy): <NEW_LINE> <INDENT> def __init__( self, communication=cross_device_ops_lib.CollectiveCommunication.AUTO): <NEW_LINE> <INDENT> super(CollectiveAllReduceStrategy, self).__init__( CollectiveAllReduceExtended( self, communication=communication)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_local_devices(cls, devices): <NEW_LINE> <INDENT> obj = cls() <NEW_LINE> obj.extended._initialize_local(TFConfigClusterResolver(), devices=devices) <NEW_LINE> return obj
Distribution strategy that uses collective ops for all-reduce. It is similar to MirroredStrategy but it uses collective ops for reduction. By default it uses all local GPUs or CPU for single-worker training. When 'TF_CONFIG' environment variable is given, it parses cluster_spec, task_type and task_id from 'TF_CONFIG' and turns into a multi-worker strategy which mirrores models on GPUs of all machines in a cluster. In the current implementation, it uses all GPUs in a cluster and it assumes all workers have the same number of GPUs. It supports both eager mode and graph mode. However, for eager mode, it has to set up the eager context in its constructor and therefore all ops in eager mode have to run after the strategy object is created. Args: communication: optional Enum of type `distribute.experimental.CollectiveCommunication`. This provides a way for the user to override the choice of collective op communication. Possible values include `AUTO`, `RING`, and `NCCL`.
62598fa60c0af96317c5627f
class RevisionTests(TestCaseBase): <NEW_LINE> <INDENT> fixtures = ['test_users.json'] <NEW_LINE> def test_revision_view(self): <NEW_LINE> <INDENT> d = _create_document() <NEW_LINE> r = d.current_revision <NEW_LINE> r.created = datetime(2011, 1, 1) <NEW_LINE> r.reviewed = datetime(2011, 1, 2) <NEW_LINE> r.save() <NEW_LINE> url = reverse('wiki.revision', args=[d.slug, r.id]) <NEW_LINE> response = self.client.get(url) <NEW_LINE> eq_(200, response.status_code) <NEW_LINE> doc = pq(response.content) <NEW_LINE> eq_('Revision id: %s' % r.id, doc('#wiki-doc div.revision-info li.revision-id').text()) <NEW_LINE> eq_(d.title, doc('#wiki-doc h1.title').text()) <NEW_LINE> eq_(r.content, doc('#doc-source textarea').text()) <NEW_LINE> eq_('Created: Jan 1, 2011 12:00:00 AM', doc('#wiki-doc div.revision-info li.revision-created') .text().strip()) <NEW_LINE> eq_('Reviewed: Jan 2, 2011 12:00:00 AM', doc('#wiki-doc div.revision-info li.revision-reviewed') .text().strip()) <NEW_LINE> eq_('Yes', doc('.revision-info li.revision-is-reviewed').find('span') .text()) <NEW_LINE> eq_('Yes', doc('.revision-info li.revision-is-current').find('span') .text())
Tests for the Revision template
62598fa644b2445a339b68ed
class MarkAmphoraBootingInDB(BaseDatabaseTask): <NEW_LINE> <INDENT> def execute(self, amphora_id, compute_id): <NEW_LINE> <INDENT> LOG.debug("Mark BOOTING in DB for amphora: %(amp)s with " "compute id %(id)s", {'amp': amphora_id, 'id': compute_id}) <NEW_LINE> self.amphora_repo.update(db_apis.get_session(), amphora_id, status=constants.AMPHORA_BOOTING, compute_id=compute_id) <NEW_LINE> <DEDENT> def revert(self, result, amphora_id, compute_id, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(result, failure.Failure): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> LOG.warning("Reverting mark amphora booting in DB for amp " "id %(amp)s and compute id %(comp)s", {'amp': amphora_id, 'comp': compute_id}) <NEW_LINE> try: <NEW_LINE> <INDENT> self.amphora_repo.update(db_apis.get_session(), amphora_id, status=constants.ERROR, compute_id=compute_id) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> LOG.error("Failed to update amphora %(amp)s " "status to ERROR due to: " "%(except)s", {'amp': amphora_id, 'except': e})
Mark the amphora as booting in the database.
62598fa6a219f33f346c6714
class PatchMaybeReportException(MonkeyPatch): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> from opengever.base.sentry import FTW_RAVEN_AVAILABLE <NEW_LINE> if not FTW_RAVEN_AVAILABLE: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> from ftw.raven.reporter import maybe_report_exception as original_maybe_report_exception <NEW_LINE> from opengever.api.not_reported_exceptions import NotReportedException <NEW_LINE> def maybe_report_exception(context, request, exc_type, exc, traceback): <NEW_LINE> <INDENT> if isinstance(exc, NotReportedException): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return original_maybe_report_exception(context, request, exc_type, exc, traceback) <NEW_LINE> <DEDENT> from ftw.raven import reporter <NEW_LINE> self.patch_refs(reporter, 'maybe_report_exception', maybe_report_exception)
Monkeypatch for ftw.raven.reporter.maybe_report_exception This allows to skip reporting of exceptions that inherit NotReportedException
62598fa61f5feb6acb162b1e
class CacheControl: <NEW_LINE> <INDENT> update_dict = UpdateDict <NEW_LINE> def __init__(self, properties, type): <NEW_LINE> <INDENT> self.properties = properties <NEW_LINE> self.type = type <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def parse(cls, header, updates_to=None, type=None): <NEW_LINE> <INDENT> if updates_to: <NEW_LINE> <INDENT> props = cls.update_dict() <NEW_LINE> props.updated = updates_to <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> props = {} <NEW_LINE> <DEDENT> for match in token_re.finditer(header): <NEW_LINE> <INDENT> name = match.group(1) <NEW_LINE> value = match.group(2) or match.group(3) or None <NEW_LINE> if value: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = int(value) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> props[name] = value <NEW_LINE> <DEDENT> obj = cls(props, type=type) <NEW_LINE> if updates_to: <NEW_LINE> <INDENT> props.updated_args = (obj,) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<CacheControl %r>" % str(self) <NEW_LINE> <DEDENT> max_stale = value_property("max-stale", none="*", type="request") <NEW_LINE> min_fresh = value_property("min-fresh", type="request") <NEW_LINE> only_if_cached = exists_property("only-if-cached", type="request") <NEW_LINE> public = exists_property("public", type="response") <NEW_LINE> private = value_property("private", none="*", type="response") <NEW_LINE> no_cache = value_property("no-cache", none="*") <NEW_LINE> no_store = exists_property("no-store") <NEW_LINE> no_transform = exists_property("no-transform") <NEW_LINE> must_revalidate = exists_property("must-revalidate", type="response") <NEW_LINE> proxy_revalidate = exists_property("proxy-revalidate", type="response") <NEW_LINE> max_age = value_property("max-age", none=-1) <NEW_LINE> s_maxage = value_property("s-maxage", type="response") <NEW_LINE> s_max_age = s_maxage <NEW_LINE> stale_while_revalidate = value_property("stale-while-revalidate", type="response") <NEW_LINE> stale_if_error = value_property("stale-if-error", type="response") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return serialize_cache_control(self.properties) <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return self.__class__(self.properties.copy(), type=self.type)
Represents the Cache-Control header. By giving a type of ``'request'`` or ``'response'`` you can control what attributes are allowed (some Cache-Control values only apply to requests or responses).
62598fa6167d2b6e312b6e6d
class _MyWindowsException(OSError): <NEW_LINE> <INDENT> pass
An exception type like L{ctypes.WinError}, but available on all platforms.
62598fa6f7d966606f747ee1
class TestBlogApp(unittest.TestCase): <NEW_LINE> <INDENT> def test_initialize_db(self): <NEW_LINE> <INDENT> result = helpers.initialize_db('../data/Database1.json') <NEW_LINE> self.assertIsInstance(result, FileNotFoundError) <NEW_LINE> print("[test_initialize_db] First test case passed") <NEW_LINE> result = helpers.initialize_db("../data/Database.json") <NEW_LINE> self.assertIsInstance(result, list) <NEW_LINE> print("[test_initialize_db] Second test case passed") <NEW_LINE> <DEDENT> def test_validate_email(self): <NEW_LINE> <INDENT> result = helpers.validate_email("abc@gmail.com") <NEW_LINE> self.assertEqual(result, True) <NEW_LINE> print("[test_validate_email] First test case passed") <NEW_LINE> result = helpers.validate_email("invalid-email") <NEW_LINE> self.assertEqual(result, False) <NEW_LINE> print("[test_validate_email] Second test case passed")
Class for writing unit test cases
62598fa6d58c6744b42dc253
class GeoIP(_GeoIP): <NEW_LINE> <INDENT> def __init__(self, path=None, cache=0, country=None, city=None, isp=None): <NEW_LINE> <INDENT> super(GeoIP, self).__init__(path=path, cache=cache, country=country, city=city) <NEW_LINE> <DEDENT> def isp(self, query): <NEW_LINE> <INDENT> raise GeoIPException('GeoIP ISP unsupported')
Add ISP support to GeoIP
62598fa65f7d997b871f935f
class ImageShuffler(Callback): <NEW_LINE> <INDENT> TRAINING_SET = None <NEW_LINE> VALIDATION_SET = None <NEW_LINE> def __init__(self, training_set, validation_set): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.TRAINING_SET = training_set <NEW_LINE> self.VALIDATION_SET = validation_set <NEW_LINE> <DEDENT> def augment_images(self, images, groundtruths, padding): <NEW_LINE> <INDENT> img_aug, gt_aug = [], [] <NEW_LINE> for idx in range(images.shape[0]): <NEW_LINE> <INDENT> img_temp, gt_temp = epoch_augmentation(images[idx], groundtruths[idx], padding=padding) <NEW_LINE> img_aug.append(img_temp) <NEW_LINE> gt_aug.append(gt_temp) <NEW_LINE> <DEDENT> augmented_images = np.reshape( np.asarray(img_aug), (images.shape[0], images.shape[1] + 2 * padding, images.shape[2] + 2 * padding, images.shape[3]) ) <NEW_LINE> augmented_groundtruth = np.reshape( np.asarray(gt_aug), (groundtruths.shape[0], groundtruths.shape[1] + 2 * padding, groundtruths.shape[2] + 2 * padding) ) <NEW_LINE> return augmented_images, augmented_groundtruth <NEW_LINE> <DEDENT> def on_epoch_begin(self, epoch, logs=None): <NEW_LINE> <INDENT> if epoch != 0: <NEW_LINE> <INDENT> img, gt, padding = self.TRAINING_SET.get_unmodified() <NEW_LINE> img_aug, gt_aug = self.augment_images(img, gt, padding) <NEW_LINE> self.TRAINING_SET.overwrite_augmented(img_aug, gt_aug) <NEW_LINE> img, gt, padding = self.VALIDATION_SET.get_unmodified() <NEW_LINE> img_aug, gt_aug = self.augment_images(img, gt, padding) <NEW_LINE> self.VALIDATION_SET.overwrite_augmented(img_aug, gt_aug) <NEW_LINE> <DEDENT> <DEDENT> def on_train_begin(self, logs=None): <NEW_LINE> <INDENT> self.on_epoch_begin(-1)
Callback to shuffle/augment unmodified images from ImageSequence and feed the modified versions back to it at the start of each epoch.
62598fa691af0d3eaad39d0c
class Menu_Creditos: <NEW_LINE> <INDENT> def __init__(self, tela): <NEW_LINE> <INDENT> self.tela = tela <NEW_LINE> self.opcao = 1 <NEW_LINE> self.enter = False <NEW_LINE> self.musica = pygame.mixer.Sound("dados/sons/creditos.wav") <NEW_LINE> <DEDENT> def tratar_eventos_menu(self): <NEW_LINE> <INDENT> for evento in pygame.event.get(): <NEW_LINE> <INDENT> if (evento.type == QUIT): <NEW_LINE> <INDENT> raise SystemExit <NEW_LINE> <DEDENT> elif (evento.type == KEYDOWN): <NEW_LINE> <INDENT> if ((evento.key == K_ESCAPE) or (evento.key == K_q)): <NEW_LINE> <INDENT> raise SystemExit <NEW_LINE> <DEDENT> elif (evento.key == K_RETURN): <NEW_LINE> <INDENT> self.enter = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def repintar_menu(self): <NEW_LINE> <INDENT> fundo = pygame.image.load("dados/imagens/creditos.png") <NEW_LINE> self.tela.blit(fundo, (0, 0)) <NEW_LINE> rotulo = " [ Voltar ] " <NEW_LINE> cor = (0, 0, 0) <NEW_LINE> fonte = pygame.font.Font("dados/fontes/dejavu_sans.ttf", 25) <NEW_LINE> fonte.set_bold(True) <NEW_LINE> fonte_rend = fonte.render(rotulo, True, cor) <NEW_LINE> self.tela.blit(fonte_rend, (500, 500)) <NEW_LINE> pygame.display.update() <NEW_LINE> <DEDENT> def rodar(self): <NEW_LINE> <INDENT> self.musica.play(-1) <NEW_LINE> self.enter = False <NEW_LINE> while (True): <NEW_LINE> <INDENT> self.tratar_eventos_menu() <NEW_LINE> self.repintar_menu() <NEW_LINE> if (self.enter): <NEW_LINE> <INDENT> self.musica.stop() <NEW_LINE> return self.opcao
Classe Menu Creditos
62598fa63cc13d1c6d465669
class EventDisplay(urwid.WidgetWrap): <NEW_LINE> <INDENT> def __init__(self, conf, event, collection=None): <NEW_LINE> <INDENT> self._conf = conf <NEW_LINE> self.collection = collection <NEW_LINE> self.event = event <NEW_LINE> divider = urwid.Divider(' ') <NEW_LINE> lines = [] <NEW_LINE> lines.append(urwid.Text('Title: ' + event.summary)) <NEW_LINE> if event.organizer != '': <NEW_LINE> <INDENT> lines.append(urwid.Text('Organizer: ' + event.organizer)) <NEW_LINE> <DEDENT> if event.location != '': <NEW_LINE> <INDENT> lines.append(urwid.Text('Location: ' + event.location)) <NEW_LINE> <DEDENT> if event.categories != '': <NEW_LINE> <INDENT> lines.append(urwid.Text('Categories: ' + event.categories)) <NEW_LINE> <DEDENT> if event.allday: <NEW_LINE> <INDENT> startstr = event.start_local.strftime(self._conf['locale']['dateformat']) <NEW_LINE> endstr = event.end_local.strftime(self._conf['locale']['dateformat']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> startstr = event.start_local.strftime( '{} {}'.format(self._conf['locale']['dateformat'], self._conf['locale']['timeformat']) ) <NEW_LINE> if event.start_local.date == event.end_local.date: <NEW_LINE> <INDENT> endstr = event.end_local.strftime(self._conf['locale']['timeformat']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> endstr = event.end_local.strftime( '{} {}'.format(self._conf['locale']['dateformat'], self._conf['locale']['timeformat']) ) <NEW_LINE> <DEDENT> <DEDENT> if startstr == endstr: <NEW_LINE> <INDENT> lines.append(urwid.Text('Date: ' + startstr)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lines.append(urwid.Text('Date: ' + startstr + ' - ' + endstr)) <NEW_LINE> <DEDENT> lines.append(urwid.Text('Calendar: ' + event.calendar)) <NEW_LINE> lines.append(divider) <NEW_LINE> if event.description != '': <NEW_LINE> <INDENT> lines.append(urwid.Text(event.description)) <NEW_LINE> <DEDENT> pile = urwid.Pile(lines) <NEW_LINE> urwid.WidgetWrap.__init__(self, urwid.Filler(pile, valign='top'))
A widget showing one Event()'s details
62598fa61f037a2d8b9e3fe9
class Daos(Dao): <NEW_LINE> <INDENT> def __init__(self, url, **kwargs): <NEW_LINE> <INDENT> self.configure(url, **kwargs) <NEW_LINE> <DEDENT> def configure(self, url, **kwargs): <NEW_LINE> <INDENT> self.chrom_dao = ChromDao(url, **kwargs) <NEW_LINE> self.ld_dao = LdDao(url, **kwargs) <NEW_LINE> self.idcoef_dao = IdCoefDao(url, **kwargs) <NEW_LINE> self.snp_dao = SnpDao(url, **kwargs)
DAO mother object.
62598fa60a50d4780f7052da
class account_analytic_expense_deprecation_cost(osv.osv): <NEW_LINE> <INDENT> _name = 'account.analytic.expense.deprecation.cost' <NEW_LINE> _description = 'Year deprecation cost' <NEW_LINE> _rec_name = 'department_id' <NEW_LINE> _order = 'department_id' <NEW_LINE> _columns = { 'year_id': fields.many2one( 'account.analytic.expense.deprecation', 'Year', ondelete='cascade'), 'department_id': fields.many2one( 'hr.department', 'Department', required=True), 'total': fields.float('Total for year', digits=(16, 2), required=True), }
Add schedule method and override split method
62598fa67d43ff2487427381
class VerveResponseQuestionCategory(object): <NEW_LINE> <INDENT> def __init__(self, status_code=None, message=None, list=None, data=None, records=None): <NEW_LINE> <INDENT> self.swagger_types = { 'status_code': 'str', 'message': 'str', 'list': 'list[QuestionCategory]', 'data': 'QuestionCategory', 'records': 'int' } <NEW_LINE> self.attribute_map = { 'status_code': 'statusCode', 'message': 'message', 'list': 'list', 'data': 'data', 'records': 'records' } <NEW_LINE> self._status_code = status_code <NEW_LINE> self._message = message <NEW_LINE> self._list = list <NEW_LINE> self._data = data <NEW_LINE> self._records = records <NEW_LINE> <DEDENT> @property <NEW_LINE> def status_code(self): <NEW_LINE> <INDENT> return self._status_code <NEW_LINE> <DEDENT> @status_code.setter <NEW_LINE> def status_code(self, status_code): <NEW_LINE> <INDENT> self._status_code = status_code <NEW_LINE> <DEDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self._message <NEW_LINE> <DEDENT> @message.setter <NEW_LINE> def message(self, message): <NEW_LINE> <INDENT> self._message = message <NEW_LINE> <DEDENT> @property <NEW_LINE> def list(self): <NEW_LINE> <INDENT> return self._list <NEW_LINE> <DEDENT> @list.setter <NEW_LINE> def list(self, list): <NEW_LINE> <INDENT> self._list = list <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def records(self): <NEW_LINE> <INDENT> return self._records <NEW_LINE> <DEDENT> @records.setter <NEW_LINE> def records(self, records): <NEW_LINE> <INDENT> self._records = records <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fa6be8e80087fbbef60
class task_one(PatternGraphs): <NEW_LINE> <INDENT> def __init__(self, k, minsup, database, subsets): <NEW_LINE> <INDENT> super().__init__(database) <NEW_LINE> self.patterns = {} <NEW_LINE> self.dico_thresh = {} <NEW_LINE> self.k = k <NEW_LINE> self.minsup = minsup <NEW_LINE> self.gid_subsets = subsets <NEW_LINE> self.thresh = 0 <NEW_LINE> self.curr_score = [] <NEW_LINE> <DEDENT> def store(self, dfs_code, gid_subsets): <NEW_LINE> <INDENT> freq = len(gid_subsets[0]) + len(gid_subsets[1]) <NEW_LINE> conf = len(gid_subsets[0]) / freq <NEW_LINE> if conf >= self.thresh and freq >= self.minsup: <NEW_LINE> <INDENT> if not (conf in self.dico_thresh): <NEW_LINE> <INDENT> self.patterns[conf] = [(dfs_code, gid_subsets)] <NEW_LINE> self.dico_thresh[conf] = [freq] <NEW_LINE> self.curr_score.append(conf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not(freq in self.dico_thresh[conf]): <NEW_LINE> <INDENT> self.curr_score.append(conf) <NEW_LINE> <DEDENT> self.patterns[conf].append((dfs_code, gid_subsets)) <NEW_LINE> self.dico_thresh[conf].append(freq) <NEW_LINE> <DEDENT> if len(self.curr_score) > self.k: <NEW_LINE> <INDENT> mini = min(self.dico_thresh[min(self.dico_thresh)]) <NEW_LINE> idx_minis = [] <NEW_LINE> for idx, zz in enumerate(self.dico_thresh[min(self.dico_thresh)]): <NEW_LINE> <INDENT> if zz == mini: <NEW_LINE> <INDENT> idx_minis.append(idx) <NEW_LINE> <DEDENT> <DEDENT> idx_minis = sorted(idx_minis, reverse=True) <NEW_LINE> for yy in idx_minis: <NEW_LINE> <INDENT> self.dico_thresh[min(self.dico_thresh)].pop(yy) <NEW_LINE> self.patterns[min(self.patterns)].pop(yy) <NEW_LINE> <DEDENT> if not self.patterns[min(self.patterns)]: <NEW_LINE> <INDENT> del self.patterns[min(self.patterns)] <NEW_LINE> <DEDENT> if not self.dico_thresh[min(self.dico_thresh)]: <NEW_LINE> <INDENT> del self.dico_thresh[min(self.dico_thresh)] <NEW_LINE> <DEDENT> self.curr_score.remove(min(self.curr_score)) <NEW_LINE> self.thresh = min(self.dico_thresh) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def prune(self, gid_subsets): <NEW_LINE> <INDENT> freq = len(gid_subsets[0]) + len(gid_subsets[1]) <NEW_LINE> if freq < self.minsup: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> def create_fm_col(self, all_gids, subset_gids): <NEW_LINE> <INDENT> subset_gids = set(subset_gids) <NEW_LINE> bools = [] <NEW_LINE> for i, val in enumerate(all_gids): <NEW_LINE> <INDENT> if val in subset_gids: <NEW_LINE> <INDENT> bools.append(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bools.append(0) <NEW_LINE> <DEDENT> <DEDENT> return bools <NEW_LINE> <DEDENT> def get_feature_matrices(self): <NEW_LINE> <INDENT> matrices = [[] for _ in self.gid_subsets] <NEW_LINE> for pattern, gid_subsets in self.patterns: <NEW_LINE> <INDENT> for i, gid_subset in enumerate(gid_subsets): <NEW_LINE> <INDENT> matrices[i].append(self.create_fm_col(self.gid_subsets[i], gid_subset)) <NEW_LINE> <DEDENT> <DEDENT> return [numpy.array(matrix).transpose() for matrix in matrices]
Finds the frequent (support >= minsup) subgraphs among the positive graphs. This class provides a method to build a feature matrix for each subset.
62598fa68e7ae83300ee8fa0
class Plugin: <NEW_LINE> <INDENT> def __init__(self, file, name, desc, version, state): <NEW_LINE> <INDENT> self.file = file <NEW_LINE> self.name = name <NEW_LINE> self.desc = desc <NEW_LINE> self.version = version <NEW_LINE> self.state = state
Class that defines a plugin with : - his name - his description - his version - his state...
62598fa64a966d76dd5eede1
class Storage(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=256) <NEW_LINE> capacity = models.IntegerField() <NEW_LINE> state = models.IntegerField(default=0) <NEW_LINE> address = models.CharField(max_length=64, null=True) <NEW_LINE> dir = models.CharField(max_length=256, null=True) <NEW_LINE> transport = models.CharField(max_length=20, default="netfs") <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'cm' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def dict(self): <NEW_LINE> <INDENT> d = {} <NEW_LINE> d['storage_id'] = self.id <NEW_LINE> d['state'] = self.state <NEW_LINE> d['name'] = self.name <NEW_LINE> d['capacity'] = self.capacity <NEW_LINE> d['used_space'] = self.used_space <NEW_LINE> d['mountpoint'] = self.path <NEW_LINE> d['dir'] = self.dir <NEW_LINE> d['address'] = self.address <NEW_LINE> return d <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> conn = libvirt.open('qemu:///system') <NEW_LINE> conn.storagePoolLookupByName(self.name) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return '/var/lib/cc1/storages/%s/' % self.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def used_space(self): <NEW_LINE> <INDENT> return self.image_set.aggregate(Sum('size'))['size__sum'] or 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def free_space(self): <NEW_LINE> <INDENT> return self.capacity - self.used_space <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get(): <NEW_LINE> <INDENT> storages = Storage.objects.filter(state__exact=storage_states['ok']) <NEW_LINE> if storages.count() == 0: <NEW_LINE> <INDENT> raise CMException("storage_no_storage") <NEW_LINE> <DEDENT> sorted(storages, key=lambda storage: storage.free_space) <NEW_LINE> return storages[0] <NEW_LINE> <DEDENT> def lock(self): <NEW_LINE> <INDENT> self.state = storage_states['locked'] <NEW_LINE> <DEDENT> def unlock(self): <NEW_LINE> <INDENT> self.state = storage_states['ok'] <NEW_LINE> <DEDENT> def libvirt_template(self): <NEW_LINE> <INDENT> template = loader.get_template("pools/%s.xml" % self.transport) <NEW_LINE> c = Context({'storage': self, 'cc_userid': 331, 'cc_groupid': 331}) <NEW_LINE> return template.render(c)
@model{STORAGE} Class for storages This class controlls cluster's Storage - where Images are stored. Storage is mounted to Node physical machine via web interface.
62598fa6a17c0f6771d5c133
class ServiceHTTPAuthenticationException(colony.ColonyException): <NEW_LINE> <INDENT> message = None
The service HTTP authentication exception class.
62598fa644b2445a339b68ee
class ArcCosNode(FuncNode): <NEW_LINE> <INDENT> funcList.append("ArcCosNode") <NEW_LINE> name ='arccos' <NEW_LINE> func = 'cmath.acos' <NEW_LINE> def __init__(self, arg): <NEW_LINE> <INDENT> super(ArcCosNode, self).__init__(arg) <NEW_LINE> <DEDENT> def diff(self,var): <NEW_LINE> <INDENT> return (Constant(-1)*(Constant(1)-self.args[0]*self.args[0])**Constant(-0.5))*self.args[0].diff(var)
Represents the arccosine function
62598fa68e71fb1e983bb9b0
class UnregisteredClass(Exception): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(UnregisteredClass, self).__init__("Class <{0}> is not registered".format(name))
docstring for exception UnregisteredClass
62598fa630bbd722464698f7
class Number(Rollable): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> super(Number, self).__init__() <NEW_LINE> self._value = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s' % (self._value) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<class %s>(value=%r)' % (self.__class__.__name__, self._value) <NEW_LINE> <DEDENT> def roll(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self._value = value
A numeric constant
62598fa6627d3e7fe0e06dab
class FFmpegVideoWriter(VideoWriter): <NEW_LINE> <INDENT> def __init__(self, outpath, fps, size, out_opts=None): <NEW_LINE> <INDENT> self.outpath = outpath <NEW_LINE> self.fps = fps <NEW_LINE> self.size = size <NEW_LINE> self._ffmpeg = FFmpeg( in_opts=[ "-f", "rawvideo", "-vcodec", "rawvideo", "-s", "%dx%d" % self.size, "-pix_fmt", "rgb24", "-r", str(self.fps), ], out_opts=out_opts, ) <NEW_LINE> self._ffmpeg.run("-", self.outpath) <NEW_LINE> <DEDENT> def write(self, img): <NEW_LINE> <INDENT> self._ffmpeg.stream(img.tostring()) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._ffmpeg.close()
Class for writing videos using ffmpeg.
62598fa667a9b606de545eca
class ValutaCostiDialog(aw.Dialog): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if not kwargs.has_key('title') and len(args) < 3: <NEW_LINE> <INDENT> kwargs['title'] = COSTI_FRAME_TITLE <NEW_LINE> <DEDENT> aw.Dialog.__init__(self, *args, **kwargs) <NEW_LINE> self.AddSizedPanel(ValutaCostiPanel(self, -1)) <NEW_LINE> self.CenterOnScreen()
Dialog Valutazione costi di acquisto presenti.
62598fa6f7d966606f747ee3
class DdosProtectionPlan(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, 'etag': {'readonly': True}, 'resource_guid': {'readonly': True}, 'provisioning_state': {'readonly': True}, 'virtual_networks': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, 'etag': {'key': 'etag', 'type': 'str'}, 'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'virtual_networks': {'key': 'properties.virtualNetworks', 'type': '[SubResource]'}, } <NEW_LINE> def __init__( self, *, location: Optional[str] = None, tags: Optional[Dict[str, str]] = None, **kwargs ): <NEW_LINE> <INDENT> super(DdosProtectionPlan, self).__init__(**kwargs) <NEW_LINE> self.id = None <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.location = location <NEW_LINE> self.tags = tags <NEW_LINE> self.etag = None <NEW_LINE> self.resource_guid = None <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.virtual_networks = None
A DDoS protection plan in a resource group. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource ID. :vartype id: str :ivar name: Resource name. :vartype name: str :ivar type: Resource type. :vartype type: str :param location: Resource location. :type location: str :param tags: A set of tags. Resource tags. :type tags: dict[str, str] :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar resource_guid: The resource GUID property of the DDoS protection plan resource. It uniquely identifies the resource, even if the user changes its name or migrate the resource across subscriptions or resource groups. :vartype resource_guid: str :ivar provisioning_state: The provisioning state of the DDoS protection plan resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2020_03_01.models.ProvisioningState :ivar virtual_networks: The list of virtual networks associated with the DDoS protection plan resource. This list is read-only. :vartype virtual_networks: list[~azure.mgmt.network.v2020_03_01.models.SubResource]
62598fa6d58c6744b42dc254