code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class RecurringCharge(OfferTermInfo): <NEW_LINE> <INDENT> _validation = { 'name': {'required': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'Name', 'type': 'str'}, 'effective_date': {'key': 'EffectiveDate', 'type': 'iso-8601'}, 'recurring_charge': {'key': 'RecurringCharge', 'type': 'int'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(RecurringCharge, self).__init__(**kwargs) <NEW_LINE> self.name = 'Recurring Charge' <NEW_LINE> self.recurring_charge = kwargs.get('recurring_charge', None)
Indicates a recurring charge is present for this offer. All required parameters must be populated in order to send to Azure. :param name: Required. Name of the offer term.Constant filled by server. Possible values include: "Recurring Charge", "Monetary Commitment", "Monetary Credit". :type name: str or ~azure.mgmt.commerce.models.OfferTermInfoEnum :param effective_date: Indicates the date from which the offer term is effective. :type effective_date: ~datetime.datetime :param recurring_charge: The amount of recurring charge as per the offer term. :type recurring_charge: int
62598fce7b180e01f3e4924d
class Parser(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._rules = [] <NEW_LINE> <DEDENT> def process(self, paragraphs): <NEW_LINE> <INDENT> self._guess_types(paragraphs) <NEW_LINE> for rule in get_all_rules(): <NEW_LINE> <INDENT> rule_inst = rule(paragraphs) <NEW_LINE> self._rules.append(rule_inst) <NEW_LINE> if not rule_inst.apply(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def _get_message(self, message): <NEW_LINE> <INDENT> msg = "{level}: {msg} in line {line}:{position}\n\n{context}\n{sug}" <NEW_LINE> context = "" <NEW_LINE> if message.context: <NEW_LINE> <INDENT> context = '\t{0}'.format("\n\t".join(message.context.split("\n"))) <NEW_LINE> context = blue.format(context) <NEW_LINE> <DEDENT> msg = msg.format( level=log_color[message.severity], msg=message.txt, line=message.line_number, position=message.position, context=context, sug=message.suggestion, ) <NEW_LINE> return msg <NEW_LINE> <DEDENT> def get_msg(self): <NEW_LINE> <INDENT> msg = "" <NEW_LINE> for rule in self._rules: <NEW_LINE> <INDENT> for message in rule.messages: <NEW_LINE> <INDENT> if message.severity >= log_level: <NEW_LINE> <INDENT> msg += self._get_message(message) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return msg <NEW_LINE> <DEDENT> def _guess_types(self, paragraphs): <NEW_LINE> <INDENT> for paragraph in paragraphs: <NEW_LINE> <INDENT> paragraph.type = self._get_paragraph_type(paragraph) <NEW_LINE> for field in paragraph: <NEW_LINE> <INDENT> field.type = self._get_field_type(field) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _get_paragraph_type(self, paragraph): <NEW_LINE> <INDENT> type_scores = {} <NEW_LINE> paragraph_type_sorted = OrderedDict(sorted(PARAGRAPH_TYPES.items())) <NEW_LINE> count = len(paragraph_type_sorted) <NEW_LINE> for paragraph_type, field_names in paragraph_type_sorted.items(): <NEW_LINE> <INDENT> step = sys.float_info.epsilon * count <NEW_LINE> count -= 1 <NEW_LINE> step += 1.0 / len(paragraph_type_sorted[paragraph_type]) <NEW_LINE> type_scores[paragraph_type] = 0.0 <NEW_LINE> for field_name in field_names.keys(): <NEW_LINE> <INDENT> if field_name in paragraph: <NEW_LINE> <INDENT> type_scores[paragraph_type] += step <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> tag, score = max(type_scores.items(), key=operator.itemgetter(1)) <NEW_LINE> return tag if score > 0 else None <NEW_LINE> <DEDENT> def _get_field_type(self, field): <NEW_LINE> <INDENT> for field_type, field_names in FIELD_TYPES.items(): <NEW_LINE> <INDENT> if field.name.strip() in field_names: <NEW_LINE> <INDENT> return field_type
Parser to debian copyright file machine readable. Try to parse the described here http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
62598fce3346ee7daa337845
class Square(Rectangle): <NEW_LINE> <INDENT> def __init__(self, size, x=0, y=0, id=None): <NEW_LINE> <INDENT> super().__init__(size, size, x, y, id) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[Square] ({:d}) {:d}/{:d} - {:d}".format( self.id, self.x, self.y, self.size, ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.width <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, value): <NEW_LINE> <INDENT> self.width = value <NEW_LINE> self.height = value <NEW_LINE> <DEDENT> def update(self, *args, **kwargs): <NEW_LINE> <INDENT> if args is None or not args: <NEW_LINE> <INDENT> for key, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, key, value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> valid_args = args[:4] <NEW_LINE> args_list = ['id', 'size', 'x', 'y'] <NEW_LINE> for index in range(len(valid_args)): <NEW_LINE> <INDENT> setattr(self, args_list[index], valid_args[index]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_dictionary(self): <NEW_LINE> <INDENT> return {'id': self.id, 'size': self.size, 'x': self.x, 'y': self.y}
Square class
62598fceec188e330fdf8c91
class BaseVoyageContribution(models.Model): <NEW_LINE> <INDENT> date_created = models.DateTimeField(auto_now_add=True) <NEW_LINE> contributor = models.ForeignKey(User, null=False, related_name='+') <NEW_LINE> notes = models.TextField('Notes', max_length=10000, help_text='Notes for the contribution') <NEW_LINE> status = models.IntegerField( 'Status', help_text='Indicates whether the contribution is still being edited, committed, discarded etc') <NEW_LINE> def get_related_voyage_ids(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def get_related_voyages(self): <NEW_LINE> <INDENT> return list(voyage.models.Voyage.objects.filter(voyage_id__in=self.get_related_voyage_ids())) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> abstract = True
Base (abstract) model for all types of contributions.
62598fce5fcc89381b26634a
class TxFoot(Foot): <NEW_LINE> <INDENT> def pack(self): <NEW_LINE> <INDENT> self.packed = '' <NEW_LINE> self.kind = self.packet.data['fk'] <NEW_LINE> if self.kind not in raeting.FOOT_KIND_NAMES: <NEW_LINE> <INDENT> self.kind = raeting.footKinds.unknown <NEW_LINE> emsg = "Unrecognizible packet foot." <NEW_LINE> raise raeting.PacketError(emsg) <NEW_LINE> <DEDENT> if self.kind == raeting.footKinds.nacl: <NEW_LINE> <INDENT> self.packed = "".rjust(raeting.footSizes.nacl, '\x00') <NEW_LINE> <DEDENT> elif self.kind == raeting.footKinds.nada: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def sign(self): <NEW_LINE> <INDENT> if self.kind not in raeting.FOOT_KIND_NAMES: <NEW_LINE> <INDENT> self.kind = raeting.footKinds.unknown <NEW_LINE> emsg = "Unrecognizible packet foot." <NEW_LINE> raise raeting.PacketError(emsg) <NEW_LINE> <DEDENT> if self.kind == raeting.footKinds.nacl: <NEW_LINE> <INDENT> self.packed = self.packet.signature(self.packet.packed) <NEW_LINE> <DEDENT> elif self.kind == raeting.footKinds.nada: <NEW_LINE> <INDENT> pass
RAET protocol transmit packet foot class
62598fce4527f215b58ea2ca
class contains(object): <NEW_LINE> <INDENT> def __init__(self, str_): <NEW_LINE> <INDENT> self.str_ = str_ <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, basestring) and self.str_ in other <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<string containing '%s'>" % self.str_
Match using string operator 'in'
62598fce50812a4eaa620de2
class Solution: <NEW_LINE> <INDENT> def longestPalindrome(self, s): <NEW_LINE> <INDENT> if len(s) == 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> sLen = len(s) <NEW_LINE> start = 0 <NEW_LINE> longest = 1 <NEW_LINE> isPalindrome = [[False for i in range(sLen)] for j in range(sLen)] <NEW_LINE> for i in range(sLen): <NEW_LINE> <INDENT> isPalindrome[i][i] = True <NEW_LINE> <DEDENT> for i in range(0, sLen - 1): <NEW_LINE> <INDENT> if s[i] == s[i + 1]: <NEW_LINE> <INDENT> isPalindrome[i][i + 1] = True <NEW_LINE> start = i <NEW_LINE> longest = 2 <NEW_LINE> <DEDENT> <DEDENT> for i in range(sLen - 3, -1, -1): <NEW_LINE> <INDENT> for j in range(i + 2, sLen): <NEW_LINE> <INDENT> if isPalindrome[i + 1][j - 1] and s[i] == s[j]: <NEW_LINE> <INDENT> isPalindrome[i][j] = True <NEW_LINE> count = j - i + 1 <NEW_LINE> if count > longest: <NEW_LINE> <INDENT> longest = count <NEW_LINE> start = i <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return s[start : start + longest]
@param s: input string @return: the longest palindromic substring
62598fce97e22403b383b303
class TestOptionEod(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testOptionEod(self): <NEW_LINE> <INDENT> pass
OptionEod unit test stubs
62598fcebf627c535bcb18a8
class FAILURE_AWG_STREAM_NOT_CLOSED(_INJECT_FAILURE): <NEW_LINE> <INDENT> index = 280
The FAILURE_AWG_STREAM_NOT_CLOSED state indicates that after performing the injection, that awg did not close the stream when it should have.
62598fce7cff6e4e811b5e24
class SettingOptions(SettingItem): <NEW_LINE> <INDENT> options = ListProperty([]) <NEW_LINE> popup = ObjectProperty(None, allownone=True) <NEW_LINE> def on_panel(self, instance, value): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.bind(on_release=self._create_popup) <NEW_LINE> <DEDENT> def _set_option(self, instance): <NEW_LINE> <INDENT> self.value = instance.text <NEW_LINE> self.popup.dismiss() <NEW_LINE> <DEDENT> def _create_popup(self, instance): <NEW_LINE> <INDENT> content = BoxLayout(orientation='vertical', spacing='5dp') <NEW_LINE> popup_width = min(0.95 * Window.width, dp(500)) <NEW_LINE> self.popup = popup = Popup( content=content, title=self.title, size_hint=(None, None), size=(popup_width, '400dp')) <NEW_LINE> popup.height = len(self.options) * dp(55) + dp(150) <NEW_LINE> content.add_widget(Widget(size_hint_y=None, height=1)) <NEW_LINE> uid = str(self.uid) <NEW_LINE> for option in self.options: <NEW_LINE> <INDENT> state = 'down' if option == self.value else 'normal' <NEW_LINE> btn = ToggleButton(text=option, state=state, group=uid) <NEW_LINE> btn.bind(on_release=self._set_option) <NEW_LINE> content.add_widget(btn) <NEW_LINE> <DEDENT> content.add_widget(SettingSpacer()) <NEW_LINE> btn = Button(text='Cancel', size_hint_y=None, height=dp(50)) <NEW_LINE> btn.bind(on_release=popup.dismiss) <NEW_LINE> content.add_widget(btn) <NEW_LINE> popup.open()
Implementation of an option list on top of a :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a list of options from which the user can select.
62598fce283ffb24f3cf3c83
class Social(AuthenticationBase): <NEW_LINE> <INDENT> def __init__(self, domain): <NEW_LINE> <INDENT> self.domain = domain <NEW_LINE> <DEDENT> def login(self, client_id, access_token, connection, scope='openid'): <NEW_LINE> <INDENT> return self.post( 'https://%s/oauth/access_token' % self.domain, data={ 'client_id': client_id, 'access_token': access_token, 'connection': connection, 'scope': scope, }, headers={'Content-Type': 'application/json'} )
Social provider's endpoints. Args: domain (str): Your auth0 domain (e.g: username.auth0.com)
62598fceab23a570cc2d4f6c
class Colm(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://www.colm.net/open-source/colm" <NEW_LINE> url = "http://www.colm.net/files/colm/colm-0.12.0.tar.gz" <NEW_LINE> version('0.12.0', '079a1ed44f71d48a349d954096c8e411')
Colm Programming Language Colm is a programming language designed for the analysis and transformation of computer languages. Colm is influenced primarily by TXL. It is in the family of program transformation languages.
62598fce5fcc89381b26634b
class IXlsMime(IMime): <NEW_LINE> <INDENT> pass
Marker interface for Excel mime type.
62598fce377c676e912f6f77
class MVMStringPPrinter(object): <NEW_LINE> <INDENT> def __init__(self, val, pointer = False): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.pointer = pointer <NEW_LINE> <DEDENT> def stringify(self): <NEW_LINE> <INDENT> stringtyp = str_t_info[int(self.val['body']['storage_type']) & 0b11] <NEW_LINE> if stringtyp in ("blob_32", "blob_ascii", "blob_8"): <NEW_LINE> <INDENT> data = self.val['body']['storage'][stringtyp] <NEW_LINE> pieces = [] <NEW_LINE> graphs = int(self.val['body']['num_graphs']) <NEW_LINE> for i in range(graphs): <NEW_LINE> <INDENT> pdata = int((data + i).dereference()) <NEW_LINE> try: <NEW_LINE> <INDENT> pieces.append(chr(pdata)) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> pieces.append("\\x%x" % pdata) <NEW_LINE> <DEDENT> <DEDENT> return "".join(pieces) <NEW_LINE> <DEDENT> elif stringtyp == "strands": <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "string of type " + stringtyp <NEW_LINE> <DEDENT> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> result = self.stringify() <NEW_LINE> if result: <NEW_LINE> <INDENT> if self.pointer: <NEW_LINE> <INDENT> return "pointer to '" + self.stringify() + "'" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "'" + self.stringify() + "'" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None
Whenever gdb encounters an MVMString or an MVMString*, this class gets instantiated and its to_string method tries its best to print out the actual contents of the MVMString's storage.
62598fce7c178a314d78d89e
class ReverseComplementCollapsingCounter(object): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> self._n = n <NEW_LINE> self._counts = dict() <NEW_LINE> self._converter = MarkovOrderConverter(4, n-1) <NEW_LINE> <DEDENT> def __call__(self, nmer, count): <NEW_LINE> <INDENT> assert len(nmer) == self._n <NEW_LINE> import hmm.pssm <NEW_LINE> rev_comp_nmer = hmm.pssm.rev_comp(nmer) <NEW_LINE> rev_comp_idx = self._converter.convert_to_order_n_observation(rev_comp_nmer.astype(int)) <NEW_LINE> if rev_comp_idx in self._counts: <NEW_LINE> <INDENT> self._counts[rev_comp_idx] += count <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> idx = self._converter.convert_to_order_n_observation(nmer.astype(int)) <NEW_LINE> self._counts[idx] = count <NEW_LINE> <DEDENT> <DEDENT> def num_counts(self): <NEW_LINE> <INDENT> return len(self._counts) <NEW_LINE> <DEDENT> def counts(self): <NEW_LINE> <INDENT> for idx, count in self._counts.iteritems(): <NEW_LINE> <INDENT> yield self._converter.convert_from_order_n_observation(idx), count
Counts n-mers but collapses reverse complement counts together.
62598fcedc8b845886d539bc
class QuotaConsumer(object): <NEW_LINE> <INDENT> def __init__(self, quota_manager, bucket, batch_size): <NEW_LINE> <INDENT> self.quota_manager = quota_manager <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.bucket = bucket <NEW_LINE> self.quota = 0 <NEW_LINE> <DEDENT> def consume(self, amount=1): <NEW_LINE> <INDENT> while self.quota < amount: <NEW_LINE> <INDENT> delta = self.quota_manager.consume(self.bucket, self.batch_size, consume_some=True) <NEW_LINE> if not delta: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.quota += delta <NEW_LINE> <DEDENT> self.quota -= amount <NEW_LINE> return True <NEW_LINE> <DEDENT> def put(self, amount=1): <NEW_LINE> <INDENT> self.quota += amount <NEW_LINE> <DEDENT> def check(self, amount=1): <NEW_LINE> <INDENT> if self.quota >= amount: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.quota + self.quota_manager.get(self.bucket) >= amount <NEW_LINE> <DEDENT> def dispose(self): <NEW_LINE> <INDENT> self.quota_manager.put(self.bucket, self.quota)
Quota consumer wrapper for efficient quota consuming/reclaiming. Quota is consumed in batches and put back in dispose() method. WARNING: Always call the dispose() method if you need to keep quota consistent.
62598fce71ff763f4b5e7b80
class SegmentationAccuracy(caffe.Layer): <NEW_LINE> <INDENT> def setup(self, bottom, top): <NEW_LINE> <INDENT> if len(bottom) != 2: <NEW_LINE> <INDENT> raise Exception('Need two bottom inputs for SegmentationAccuracy') <NEW_LINE> <DEDENT> if len(top) != 1: <NEW_LINE> <INDENT> raise Exception('Need one top layer for pixel accuracy') <NEW_LINE> <DEDENT> params = eval(self.param_str) <NEW_LINE> self.ignore_label = params['ignore_label'] <NEW_LINE> self.verbose = params['verbose'] <NEW_LINE> <DEDENT> def reshape(self, bottom, top): <NEW_LINE> <INDENT> top[0].reshape(1) <NEW_LINE> <DEDENT> def forward(self, bottom, top): <NEW_LINE> <INDENT> predictions = bottom[0].data <NEW_LINE> labels = bottom[1].data <NEW_LINE> hits = np.zeros(bottom[1].data.shape[1:]) <NEW_LINE> n = 0 <NEW_LINE> for i in range(labels.shape[0]): <NEW_LINE> <INDENT> mask_ignore = (labels[i,...] == self.ignore_label) <NEW_LINE> hits += ((labels[i,...] == predictions[i,...].argmax(0)) & np.logical_not(mask_ignore)) <NEW_LINE> n += np.logical_not(mask_ignore).sum() <NEW_LINE> <DEDENT> mean_accuracy = hits.sum()/n <NEW_LINE> top[0].data[...] = mean_accuracy <NEW_LINE> if self.verbose: <NEW_LINE> <INDENT> print('mean pixel accuracy: {:.3f}'.format(mean_accuracy)) <NEW_LINE> <DEDENT> <DEDENT> def backward(self, top, propagate_down, bottom): <NEW_LINE> <INDENT> pass
Layer for reporting the Segmentation accuracy
62598fce851cf427c66b86b1
class ValueGenerator(Generator): <NEW_LINE> <INDENT> values = {} <NEW_LINE> DIRS = { 'value':str } <NEW_LINE> def __init__(self, att, params): <NEW_LINE> <INDENT> Generator.__init__(self, att, params) <NEW_LINE> assert 'value' in self.params, "{0}: mandatory 'value' directive".format(self) <NEW_LINE> value = self.params['value'] <NEW_LINE> if not value in ValueGenerator.values: <NEW_LINE> <INDENT> ValueGenerator.values[value] = PersistentGenerator(macroGenerator(value)) <NEW_LINE> <DEDENT> self.value = ValueGenerator.values[value] <NEW_LINE> self.cleanParams(ValueGenerator.DIRS) <NEW_LINE> <DEDENT> def mapGen(self, func): <NEW_LINE> <INDENT> super(ValueGenerator, self).mapGen(func) <NEW_LINE> self.value.mapGen(func) <NEW_LINE> <DEDENT> def genData(self): <NEW_LINE> <INDENT> return self.value.genData() <NEW_LINE> <DEDENT> def getData(self): <NEW_LINE> <INDENT> return self.genData()
Generate a constant value per tuple
62598fce9f28863672818a7c
class DataAnalyticPipeline(object): <NEW_LINE> <INDENT> def __init__(self, pipeline_type, layer=constants.INFRA): <NEW_LINE> <INDENT> self.pipeline_type = pipeline_type <NEW_LINE> self.layer = layer <NEW_LINE> self.pipes = [] <NEW_LINE> self.num_of_pipes = 0 <NEW_LINE> self.state = constants.PIPELINE_STATE_FREE <NEW_LINE> self.entry_callback = self.entry_callback <NEW_LINE> self.exit_callback = self.exit_callback <NEW_LINE> <DEDENT> def set_callbacks(self, entry_callback, exit_callback): <NEW_LINE> <INDENT> self.entry_callback = entry_callback <NEW_LINE> self.exit_callback = exit_callback <NEW_LINE> <DEDENT> def entry_callback(self, pipeline, message): <NEW_LINE> <INDENT> LOG.debug(_("entry_call: enter the pipeline[%s]" % (pipeline.pipeline_type))) <NEW_LINE> <DEDENT> def exit_callback(self, pipeline, result): <NEW_LINE> <INDENT> LOG.debug(_("exit_call: quit the pipeline[%s]" % (pipeline.pipeline_type))) <NEW_LINE> <DEDENT> def assemble_compute_pipes(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add_compute_pipe(self, pipe): <NEW_LINE> <INDENT> self.pipes.append(pipe) <NEW_LINE> self.num_of_pipes += 1 <NEW_LINE> <DEDENT> def run(self, args): <NEW_LINE> <INDENT> msg = args <NEW_LINE> self.state = constants.PIPELINE_STATE_ENTRY <NEW_LINE> pipe_result = self.entry_callback(self, msg) <NEW_LINE> self.state = constants.PIPELINE_STATE_IN_USE <NEW_LINE> for pipe in self.pipes: <NEW_LINE> <INDENT> pipe.prepare(context=pipe_result, message=msg) <NEW_LINE> pipe.compute(context=pipe_result, message=msg) <NEW_LINE> pipe_result = pipe.finish(context=pipe_result, message=msg) <NEW_LINE> <DEDENT> self.state = constants.PIPELINE_STATE_EXIT <NEW_LINE> self.exit_callback(self, pipe_result) <NEW_LINE> self.state = constants.PIPELINE_STATE_FREE <NEW_LINE> <DEDENT> def get_state(self): <NEW_LINE> <INDENT> return self.state
Abstract class for data analytic pipeline entity
62598fceab23a570cc2d4f6d
class Metrics(Callback): <NEW_LINE> <INDENT> def on_train_begin(self, logs={}): <NEW_LINE> <INDENT> self.val_f1s = [] <NEW_LINE> self.val_recalls = [] <NEW_LINE> self.val_precisions = [] <NEW_LINE> <DEDENT> def on_epoch_end(self, epoch, logs={}): <NEW_LINE> <INDENT> val_predict = np.argmax(np.asarray(self.model.predict(self.validation_data[0])), axis=1) <NEW_LINE> val_targ = np.argmax(self.validation_data[1], axis=1) <NEW_LINE> _val_f1 = f1_score(val_targ, val_predict, average='macro') <NEW_LINE> self.val_f1s.append(_val_f1) <NEW_LINE> print(' — val_f1:', _val_f1) <NEW_LINE> return
Defined your personal callback
62598fce3346ee7daa337847
@python_2_unicode_compatible <NEW_LINE> class BuildCommandResult(BuildCommandResultMixin, models.Model): <NEW_LINE> <INDENT> build = models.ForeignKey(Build, verbose_name=_('Build'), related_name='commands') <NEW_LINE> command = models.TextField(_('Command')) <NEW_LINE> description = models.TextField(_('Description'), blank=True) <NEW_LINE> output = models.TextField(_('Command output'), blank=True) <NEW_LINE> exit_code = models.IntegerField(_('Command exit code')) <NEW_LINE> start_time = models.DateTimeField(_('Start time')) <NEW_LINE> end_time = models.DateTimeField(_('End time')) <NEW_LINE> class Meta(object): <NEW_LINE> <INDENT> ordering = ['start_time'] <NEW_LINE> get_latest_by = 'start_time' <NEW_LINE> <DEDENT> objects = RelatedBuildQuerySet.as_manager() <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return (ugettext(u'Build command {pk} for build {build}') .format(pk=self.pk, build=self.build)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def run_time(self): <NEW_LINE> <INDENT> if self.start_time is not None and self.end_time is not None: <NEW_LINE> <INDENT> diff = self.end_time - self.start_time <NEW_LINE> return diff.seconds
Build command for a ``Build``.
62598fce5fdd1c0f98e5e38c
class RuleNode(Node): <NEW_LINE> <INDENT> def __init__(self, tokens): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.resource = ResourceNode(tokens) <NEW_LINE> condition_length = isolate_condition(tokens) <NEW_LINE> condition_tokens = tokens[:condition_length] <NEW_LINE> del tokens[:condition_length] <NEW_LINE> self.condition = parse_condition_to_ast(condition_tokens) <NEW_LINE> expect('{', tokens) <NEW_LINE> self.actions = [] <NEW_LINE> while tokens[0] != '}': <NEW_LINE> <INDENT> self.actions.append(ActionNode(tokens)) <NEW_LINE> <DEDENT> expect('}', tokens) <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise errors.ParsingError('EOF found while parsing') <NEW_LINE> <DEDENT> <DEDENT> def execute(self, resource): <NEW_LINE> <INDENT> if self.condition.execute(resource): <NEW_LINE> <INDENT> for action in self.actions: <NEW_LINE> <INDENT> action.execute(resource) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> actions_str = ['\n\t{}'.format(str(action)) for action in self.actions] <NEW_LINE> return '{}({}){{{}\n}}'.format(self.resource, self.condition, ''.join(actions_str))
A node that defines a rule, basically a coupling of a resource type, a condition and a set of actions to apply to matching resources
62598fcebe7bc26dc9252059
class barycentric: <NEW_LINE> <INDENT> def __init__(self, xs, ys): <NEW_LINE> <INDENT> self.xs = xs <NEW_LINE> self.ys = np.copy(ys) <NEW_LINE> self.n = np.size(self.xs) <NEW_LINE> <DEDENT> def pre_compute_weights(self): <NEW_LINE> <INDENT> w = np.ones(self.n) <NEW_LINE> eps=1.0e-14 <NEW_LINE> for i in range(0, self.n): <NEW_LINE> <INDENT> for j in range(0, self.n): <NEW_LINE> <INDENT> if i != j: <NEW_LINE> <INDENT> w[i] = w[i]/(self.xs[i] - self.xs[j] + eps) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.w = w <NEW_LINE> <DEDENT> def evaluate(self, x): <NEW_LINE> <INDENT> N = 0 <NEW_LINE> D = 0 <NEW_LINE> eps=1.0e-14 <NEW_LINE> for i in range(self.n): <NEW_LINE> <INDENT> if x == self.xs[i]: <NEW_LINE> <INDENT> return self.ys[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> W = self.w[i] / (x - self.xs[i] + eps) <NEW_LINE> N += W * self.ys[i] <NEW_LINE> D += W <NEW_LINE> <DEDENT> <DEDENT> return N/D
A class to solve the interpolation problem of a f:R -> R^2 using Lagrange polynomial in barycentric form
62598fce60cbc95b0636473e
class TpuContext(threading.local): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._number_of_shards = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def number_of_shards(self): <NEW_LINE> <INDENT> return self._number_of_shards <NEW_LINE> <DEDENT> def set_number_of_shards(self, number_of_shards): <NEW_LINE> <INDENT> self._number_of_shards = number_of_shards
A context object holding state about the TPU computation being built.
62598fce956e5f7376df587e
class SchedulerOptions(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SchedulerOptions, self).__init__() <NEW_LINE> self.data = {} <NEW_LINE> self.last_modified = None <NEW_LINE> self.last_checked = None <NEW_LINE> <DEDENT> def _get_file_handle(self, filename): <NEW_LINE> <INDENT> return open(filename) <NEW_LINE> <DEDENT> def _get_file_timestamp(self, filename): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.path.getmtime(filename) <NEW_LINE> <DEDENT> except os.error: <NEW_LINE> <INDENT> LOG.exception("Could not stat scheduler options file " "%(filename)s.", {"filename": filename}) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def _load_file(self, handle): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return jsonutils.load(handle) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> LOG.exception("Could not decode scheduler options.") <NEW_LINE> return {} <NEW_LINE> <DEDENT> <DEDENT> def _get_time_now(self): <NEW_LINE> <INDENT> return timeutils.utcnow() <NEW_LINE> <DEDENT> def get_configuration(self, filename=None): <NEW_LINE> <INDENT> if not filename: <NEW_LINE> <INDENT> filename = CONF.scheduler_json_config_location <NEW_LINE> <DEDENT> if not filename: <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> if self.last_checked: <NEW_LINE> <INDENT> now = self._get_time_now() <NEW_LINE> if now - self.last_checked < datetime.timedelta(minutes=5): <NEW_LINE> <INDENT> return self.data <NEW_LINE> <DEDENT> <DEDENT> last_modified = self._get_file_timestamp(filename) <NEW_LINE> if (not last_modified or not self.last_modified or last_modified > self.last_modified): <NEW_LINE> <INDENT> self.data = self._load_file(self._get_file_handle(filename)) <NEW_LINE> self.last_modified = last_modified <NEW_LINE> <DEDENT> if not self.data: <NEW_LINE> <INDENT> self.data = {} <NEW_LINE> <DEDENT> return self.data
Monitor and load local .json file for filtering and weighing. SchedulerOptions monitors a local .json file for changes and loads it if needed. This file is converted to a data structure and passed into the filtering and weighing functions which can use it for dynamic configuration.
62598fce4527f215b58ea2d0
class InstanceBinding(Binding[T], Generic[T]): <NEW_LINE> <INDENT> def __init__(self, key: Key, instance: T) -> None: <NEW_LINE> <INDENT> assert isinstance is not None <NEW_LINE> assert isinstance(instance, key.interface) <NEW_LINE> self._instance = instance <NEW_LINE> self._key = key <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self) -> Key[T]: <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> @property <NEW_LINE> def linked_key(self) -> Optional[Key]: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies(self) -> Set[Key]: <NEW_LINE> <INDENT> return set() <NEW_LINE> <DEDENT> def create_provider(self, providers: Mapping[Key, Provider]) -> Provider[T]: <NEW_LINE> <INDENT> return InstanceProvider(self._instance)
Binding that binds interface to an instance of this interface
62598fce377c676e912f6f79
class MiniCIReport(object): <NEW_LINE> <INDENT> def __init__(self, api_username, api_key, api_limit): <NEW_LINE> <INDENT> self.api_username = api_username <NEW_LINE> self.api_key = api_key <NEW_LINE> self.api_limit= api_limit <NEW_LINE> self.c3api = api_helper.C3api(api_username, api_key, api_limit) <NEW_LINE> <DEDENT> def get_status_number_date_by_submission_id(self, submission_id): <NEW_LINE> <INDENT> data = self.c3api.get_status_number_date_by_submission_id(submission_id) <NEW_LINE> return data["passed_test_count"], data["skipped_test_count"], data["failed_test_count"], data["test_count"], DateParser.parse(data["updated_at"]).date().isoformat() <NEW_LINE> <DEDENT> def write_report(self, summary_report): <NEW_LINE> <INDENT> output_file = open("minici.json","w") <NEW_LINE> output_file.write(summary_report) <NEW_LINE> <DEDENT> def generate_json(self, **kwargs): <NEW_LINE> <INDENT> cid = kwargs['canonical_id'] <NEW_LINE> release = kwargs['release'] <NEW_LINE> formfactor = kwargs['form_factor'] <NEW_LINE> from_date = kwargs['from_date'] <NEW_LINE> self.release = release <NEW_LINE> self.formfactor = formfactor <NEW_LINE> self.canonical_id = cid <NEW_LINE> data = self.c3api.retrieve_machinereports(cid, release, from_date) <NEW_LINE> print(data) <NEW_LINE> records_list = [] <NEW_LINE> for entry in data['objects']: <NEW_LINE> <INDENT> entry_data = {} <NEW_LINE> entry_data['passed'] = entry['passed_test_count'] <NEW_LINE> entry_data['skipped'] = entry['skipped_test_count'] <NEW_LINE> entry_data['failed'] = entry['failed_test_count'] <NEW_LINE> entry_data['total'] = entry['test_count'] <NEW_LINE> entry_data['date'] = entry['created_at'] <NEW_LINE> entry_data['release'] = entry['release'] <NEW_LINE> entry_data['formfactor'] = entry['form_factor'] <NEW_LINE> entry_data['canonical_id'] = entry['canonical_id'] <NEW_LINE> entry_data['submission_id'] = entry['id'] <NEW_LINE> entry_data['pastebin'] = 'NA' <NEW_LINE> records_list.append(entry_data) <NEW_LINE> <DEDENT> summary_report = {"records" : records_list} <NEW_LINE> summary_report_json = json.dumps(summary_report) <NEW_LINE> self.write_report(summary_report_json)
to get mini-CI report
62598fce091ae3566870502d
class FairseqDataset(torch.utils.data.Dataset): <NEW_LINE> <INDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def collater(self, samples): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def num_tokens(self, index): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def size(self, index): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def ordered_indices(self): <NEW_LINE> <INDENT> return np.arange(len(self)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supports_prefetch(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def attr(self, attr: str, index: int): <NEW_LINE> <INDENT> return getattr(self, attr, None) <NEW_LINE> <DEDENT> def prefetch(self, indices): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def set_epoch(self, epoch): <NEW_LINE> <INDENT> pass
A dataset that provides helpers for batching.
62598fcebf627c535bcb18ae
class Identity(GPTransformation): <NEW_LINE> <INDENT> def transf(self,f): <NEW_LINE> <INDENT> return f <NEW_LINE> <DEDENT> def dtransf_df(self,f): <NEW_LINE> <INDENT> return np.ones_like(f) <NEW_LINE> <DEDENT> def d2transf_df2(self,f): <NEW_LINE> <INDENT> return np.zeros_like(f) <NEW_LINE> <DEDENT> def d3transf_df3(self,f): <NEW_LINE> <INDENT> return np.zeros_like(f)
.. math:: g(f) = f
62598fce3346ee7daa337849
class TrackResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None)
A ResultSet with methods tailored to the values returned by the Track Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62598fce5fdd1c0f98e5e390
class ResidualCBAMBlock(nn.Module): <NEW_LINE> <INDENT> def __init__(self, nf=64,norm='BN',activate='relu'): <NEW_LINE> <INDENT> super(ResidualCBAMBlock, self).__init__() <NEW_LINE> self.input = BasicConvModule(in_channels=nf,out_channels=nf,activate=activate,normalized=norm,is_3d=False,deconv=False,kernel_size=3, stride=1, padding=1) <NEW_LINE> self.sa=SpatialAttention() <NEW_LINE> self.output=BasicConvModule(in_channels=nf, out_channels=nf, activate=None, normalized=norm, is_3d=False, deconv=False,kernel_size=3, stride=1, padding=1) <NEW_LINE> <DEDENT> def forward(self, x,): <NEW_LINE> <INDENT> res = x <NEW_LINE> x = self.input(x) <NEW_LINE> x = self.sa(x)*x <NEW_LINE> x = self.output(x) <NEW_LINE> return x + res
Resduial bolck with Instance normalized
62598fce50812a4eaa620de6
class MongoDBDestination(Destination): <NEW_LINE> <INDENT> def connect(self): <NEW_LINE> <INDENT> self.client = pymongo.MongoClient( host=self.config.get("MongoDB", "server_ip"), port=self.config.getint("MongoDB", "port") ) <NEW_LINE> self.db = self.client[self.config.get("MongoDB", "db")] <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.client.close() <NEW_LINE> <DEDENT> def read_stream_def_table(self): <NEW_LINE> <INDENT> known_streams = {} <NEW_LINE> known_stream_versions = {} <NEW_LINE> if "known_streams" in self.db.collection_names(): <NEW_LINE> <INDENT> for stream_obj in self.db.known_streams.find(): <NEW_LINE> <INDENT> known_streams[stream_obj["stream"]] = stream_obj <NEW_LINE> known_stream_versions[stream_obj["stream"]] = stream_obj["definition"] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.db.known_streams.create_index([('id', pymongo.ASCENDING)], unique=True) <NEW_LINE> <DEDENT> self.known_streams = known_streams <NEW_LINE> self.known_stream_versions = known_stream_versions <NEW_LINE> <DEDENT> def create_new_stream_destination(self, stream_obj): <NEW_LINE> <INDENT> stream_id = stream_obj["id"] <NEW_LINE> self.db.known_streams.replace_one({'id': stream_id}, stream_obj, upsert=True) <NEW_LINE> return stream_id <NEW_LINE> <DEDENT> def insert_measurement(self, stream, measurements): <NEW_LINE> <INDENT> stream_obj = self.known_streams[stream] <NEW_LINE> stream_collection = "{}_{}".format(stream, stream_obj["version"]) <NEW_LINE> str_meas = {TIMESTAMP: measurements[TIMESTAMP]} <NEW_LINE> for key in measurements: <NEW_LINE> <INDENT> if key != TIMESTAMP: <NEW_LINE> <INDENT> str_meas[key] = str(measurements[key]) <NEW_LINE> <DEDENT> <DEDENT> self.db[stream_collection].insert_one(str_meas) <NEW_LINE> <DEDENT> def get_raw_stream_data(self, stream, start=None, stop=None, definition=None): <NEW_LINE> <INDENT> start, stop = self.validate_time_range(start, stop) <NEW_LINE> stream_obj = self.known_streams[stream] <NEW_LINE> stream_collection = "{}_{}".format(stream, stream_obj["version"]) <NEW_LINE> if definition is None: <NEW_LINE> <INDENT> definition = self.known_streams[stream]["definition"] <NEW_LINE> <DEDENT> field_list = [field for field in definition] <NEW_LINE> data = {TIMESTAMP: []} <NEW_LINE> for field in field_list: <NEW_LINE> <INDENT> data[field] = [] <NEW_LINE> <DEDENT> results = self.db[stream_collection].find({"$and": [ {TIMESTAMP: {"$gte": start}}, {TIMESTAMP: {"$lte": stop}} ]}) <NEW_LINE> for meas in results: <NEW_LINE> <INDENT> for field in field_list: <NEW_LINE> <INDENT> dtype = data_types[self.known_stream_versions[stream][field]['type']] <NEW_LINE> data[field].append(dtype['type'](meas[field])) <NEW_LINE> <DEDENT> dtype = data_types[self.config.get("Server", "timestamp_type")] <NEW_LINE> data[TIMESTAMP].append(dtype['type'](meas[TIMESTAMP])) <NEW_LINE> <DEDENT> return (0, data, '')
A class for storing data in a mongodb database.
62598fce4a966d76dd5ef2dc
class check_user_anonymous_users(): <NEW_LINE> <INDENT> TITLE = 'Anonymous Users' <NEW_LINE> CATEGORY = 'User' <NEW_LINE> TYPE = 'sql' <NEW_LINE> SQL = "SELECT * FROM mysql.user WHERE user=''" <NEW_LINE> verbose = False <NEW_LINE> skip = False <NEW_LINE> result = {} <NEW_LINE> def do_check(self, *results): <NEW_LINE> <INDENT> output = '' <NEW_LINE> for rows in results: <NEW_LINE> <INDENT> if len(rows) > 0: <NEW_LINE> <INDENT> self.result['level'] = 'RED' <NEW_LINE> for row in rows: <NEW_LINE> <INDENT> output += row[0] + "\n" <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.result['level'] = 'GREEN' <NEW_LINE> output = 'No anonymous users found.' <NEW_LINE> <DEDENT> self.result['output'] = output <NEW_LINE> <DEDENT> return self.result <NEW_LINE> <DEDENT> def __init__(self, parent): <NEW_LINE> <INDENT> print('Performing check: ' + self.TITLE)
check_user_anonymous_users: Do anonymous users exist.
62598fce8a349b6b43686645
class CamLoggingMixin(BaseMixin): <NEW_LINE> <INDENT> def __init__(self, params, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(params, *args, **kwargs) <NEW_LINE> self.__cam_log_file = params.cam_log <NEW_LINE> self.__camlogger = None <NEW_LINE> <DEDENT> def __exit__(self, *eargs): <NEW_LINE> <INDENT> if self.__camlogger: self.__camlogger.close() <NEW_LINE> super().__exit__(*eargs) <NEW_LINE> <DEDENT> def _camlog(self, text): <NEW_LINE> <INDENT> self._created_logs.add('cam') <NEW_LINE> with self.__cam_log_file.open('a', 1) as f: <NEW_LINE> <INDENT> f.write(text + '\n') <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def _camlog_fd(self): <NEW_LINE> <INDENT> if self.__camlogger is None or self.__camlogger.closed: <NEW_LINE> <INDENT> self.__camlogger = self.__cam_log_file.open('a+', 1) <NEW_LINE> self._created_logs.add('cam') <NEW_LINE> <DEDENT> return self.__camlogger
Initialize file descriptor and open cam log file for appending cam emulator message logs. Closing it on exit/end test run
62598fcea219f33f346c6c0e
class Branch(Picker): <NEW_LINE> <INDENT> def pick(self, build): <NEW_LINE> <INDENT> return build.get_params().get('BRANCH_NAME') or build._get_git_rev_branch()[0]['name']
Picks out job branches.
62598fce7b180e01f3e49252
class Bilateral_EAF2(TransactionManager): <NEW_LINE> <INDENT> def resolve(self): <NEW_LINE> <INDENT> while len(self.get_deficits()) > 0: <NEW_LINE> <INDENT> for acc in self.get_deficits(): <NEW_LINE> <INDENT> for tx in self.get_txs(acc, 'outflows', 'reverse_chronological', 'active'): <NEW_LINE> <INDENT> self.inactivate(tx) <NEW_LINE> if self.get_position(acc) >= 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> self.settle()
Deactivate transactions in inverse chronological order for every deficit party until that party has positive position
62598fce0fa83653e46f52ed
class LocalTCP(asyncio.Protocol): <NEW_LINE> <INDENT> def __init__(self, port): <NEW_LINE> <INDENT> self.port = port <NEW_LINE> self._handler = None <NEW_LINE> self._transport = None <NEW_LINE> <DEDENT> def _init_handler(self): <NEW_LINE> <INDENT> self._handler = LocalHandler(self.port) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> local = LocalTCP(self.port) <NEW_LINE> local._init_handler() <NEW_LINE> return local <NEW_LINE> <DEDENT> def pause_writing(self): <NEW_LINE> <INDENT> self._handler.remote.transport.pause_reading() <NEW_LINE> <DEDENT> def resume_writing(self): <NEW_LINE> <INDENT> self._handler.remote.transport.resume_reading() <NEW_LINE> <DEDENT> def connection_made(self, transport): <NEW_LINE> <INDENT> self._transport = transport <NEW_LINE> peer = self._transport.get_extra_info("peername") <NEW_LINE> self._handler.handle_connection_made(flag.TRANSPORT_TCP, transport, peer) <NEW_LINE> <DEDENT> def data_received(self, data): <NEW_LINE> <INDENT> self._handler.handle_data_received(data) <NEW_LINE> <DEDENT> def eof_received(self): <NEW_LINE> <INDENT> self._handler.handle_eof_received() <NEW_LINE> <DEDENT> def connection_lost(self, exc): <NEW_LINE> <INDENT> self._handler.handle_connection_lost(exc)
Local Tcp Factory
62598fceec188e330fdf8c9b
class X: <NEW_LINE> <INDENT> def __init__(self, TAG_NAME: str = 'X'): <NEW_LINE> <INDENT> self.TAG_NAME = TAG_NAME <NEW_LINE> self.known_attrs = {} <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'{self.TAG_NAME}' <NEW_LINE> <DEDENT> def __strip(self, line: str): <NEW_LINE> <INDENT> data = line.split(':', maxsplit=1) <NEW_LINE> if len(data) == 2: <NEW_LINE> <INDENT> _, no_tag_line = data <NEW_LINE> <DEDENT> elif len(data) == 0: <NEW_LINE> <INDENT> raise 'm3u8格式错误 无法处理的异常' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> no_tag_line = data[0] <NEW_LINE> <DEDENT> return no_tag_line <NEW_LINE> <DEDENT> def get_tag_info(self, line: str): <NEW_LINE> <INDENT> return self.__strip(line) <NEW_LINE> <DEDENT> def format_key(self, key: str): <NEW_LINE> <INDENT> return key.replace('-', '_').lower() <NEW_LINE> <DEDENT> def convert_type(self, name: str, value: str, _type: type): <NEW_LINE> <INDENT> self.__setattr__(self.format_key(name), _type(value)) <NEW_LINE> <DEDENT> def regex_attrs(self, info: str) -> list: <NEW_LINE> <INDENT> if info.endswith(',') is False: <NEW_LINE> <INDENT> info += ',' <NEW_LINE> <DEDENT> return re.findall('(.*?)=("[^"]*?"|[^,]*?),', info) <NEW_LINE> <DEDENT> def set_attrs_from_line(self, line: str): <NEW_LINE> <INDENT> info = self.get_tag_info(line) <NEW_LINE> for key, value in self.regex_attrs(info): <NEW_LINE> <INDENT> value = value.strip('"') <NEW_LINE> if key in self.known_attrs: <NEW_LINE> <INDENT> if isinstance(self.known_attrs[key], str): <NEW_LINE> <INDENT> self.__setattr__(self.known_attrs[key], value) <NEW_LINE> <DEDENT> elif isinstance(self.known_attrs[key], type): <NEW_LINE> <INDENT> self.convert_type(key, value, self.known_attrs[key]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.known_attrs[key](value) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print(f'unknown attr -> {key} <- of {self.TAG_NAME}') <NEW_LINE> <DEDENT> <DEDENT> return self
每一个标签具有的通用性质 - 标签名 - 以期望的形式打印本身信息 - 标签行去除 TAG_NAME: 部分
62598fce4527f215b58ea2d4
class OBJECT_MT_spiral_curve_presets(Menu): <NEW_LINE> <INDENT> bl_label = "Spiral Curve Presets" <NEW_LINE> bl_idname = "OBJECT_MT_spiral_curve_presets" <NEW_LINE> preset_subdir = "curve_extras/curve.spirals" <NEW_LINE> preset_operator = "script.execute_preset" <NEW_LINE> draw = bpy.types.Menu.draw_preset
Presets for curve.spiral
62598fce099cdd3c636755e2
class NodeView(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.NodeId = None <NEW_LINE> self.NodeIp = None <NEW_LINE> self.Visible = None <NEW_LINE> self.Break = None <NEW_LINE> self.DiskSize = None <NEW_LINE> self.DiskUsage = None <NEW_LINE> self.MemSize = None <NEW_LINE> self.MemUsage = None <NEW_LINE> self.CpuNum = None <NEW_LINE> self.CpuUsage = None <NEW_LINE> self.Zone = None <NEW_LINE> self.NodeRole = None <NEW_LINE> self.NodeHttpIp = None <NEW_LINE> self.JvmMemUsage = None <NEW_LINE> self.ShardNum = None <NEW_LINE> self.DiskIds = None <NEW_LINE> self.Hidden = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.NodeId = params.get("NodeId") <NEW_LINE> self.NodeIp = params.get("NodeIp") <NEW_LINE> self.Visible = params.get("Visible") <NEW_LINE> self.Break = params.get("Break") <NEW_LINE> self.DiskSize = params.get("DiskSize") <NEW_LINE> self.DiskUsage = params.get("DiskUsage") <NEW_LINE> self.MemSize = params.get("MemSize") <NEW_LINE> self.MemUsage = params.get("MemUsage") <NEW_LINE> self.CpuNum = params.get("CpuNum") <NEW_LINE> self.CpuUsage = params.get("CpuUsage") <NEW_LINE> self.Zone = params.get("Zone") <NEW_LINE> self.NodeRole = params.get("NodeRole") <NEW_LINE> self.NodeHttpIp = params.get("NodeHttpIp") <NEW_LINE> self.JvmMemUsage = params.get("JvmMemUsage") <NEW_LINE> self.ShardNum = params.get("ShardNum") <NEW_LINE> self.DiskIds = params.get("DiskIds") <NEW_LINE> self.Hidden = params.get("Hidden") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
节点维度视图数据
62598fce4a966d76dd5ef2de
class Attribute(Element): <NEW_LINE> <INDENT> pass
Attribute descriptions
62598fce8a349b6b43686647
class LoadAverageCollector(diamond.collector.Collector): <NEW_LINE> <INDENT> PROC = '/proc/loadavg' <NEW_LINE> def collect(self): <NEW_LINE> <INDENT> if not os.access(self.PROC, os.R_OK): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> file = open(self.PROC) <NEW_LINE> for line in file: <NEW_LINE> <INDENT> match = _RE.match(line) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.publish('01', float(match.group(1)), 2) <NEW_LINE> self.publish('05', float(match.group(2)), 2) <NEW_LINE> self.publish('15', float(match.group(3)), 2) <NEW_LINE> self.publish('processes_running', int(match.group(4))) <NEW_LINE> self.publish('processes_total', int(match.group(5))) <NEW_LINE> <DEDENT> <DEDENT> file.close()
Uses /proc/loadavg to collect data on load average
62598fce7c178a314d78d8a6
class Control(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.filepath = '/Users/Nieo/Library/Logs/Unity/Player.log' <NEW_LINE> self.logreader = LogReader(self.filepath, self.handleLogUpdate) <NEW_LINE> self.database = dbhandler.DatabaseHandler() <NEW_LINE> self.playerName = 'Nieo' <NEW_LINE> self.logreader.start() <NEW_LINE> sys.exit(self.app.exec_()) <NEW_LINE> <DEDENT> def handleLogUpdate(self, type, data): <NEW_LINE> <INDENT> if type == "zone": <NEW_LINE> <INDENT> print("--") <NEW_LINE> for d in data: <NEW_LINE> <INDENT> print("data: ", d) <NEW_LINE> <DEDENT> if data[2] == "OPPOSING HAND": <NEW_LINE> <INDENT> print(self.database.getCardName(data[1])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> elif type == "playState": <NEW_LINE> <INDENT> logging.debug(data) <NEW_LINE> if data[1] == 'PLAYING' and data[0] == self.playerName: <NEW_LINE> <INDENT> logging.info("New game starting") <NEW_LINE> <DEDENT> elif data[1] == 'WON': <NEW_LINE> <INDENT> if data[0] == self.playerName: <NEW_LINE> <INDENT> print("------Victory------") <NEW_LINE> logging.info("You have won") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("-----Defeat-----") <NEW_LINE> logging.info("You have lost")
Main controler
62598fce0fa83653e46f52ef
class GWeather(): <NEW_LINE> <INDENT> def __init__(self, city, country): <NEW_LINE> <INDENT> self.url = "http://www.google.com/ig/api?weather" <NEW_LINE> self.city = city <NEW_LINE> self.country = country <NEW_LINE> <DEDENT> def get_data(self): <NEW_LINE> <INDENT> current = dict() <NEW_LINE> forecast = [] <NEW_LINE> data = requests.get("%s=%s+%s" % (self.url, self.city, self.country) ) <NEW_LINE> doc = etree.fromstring(data.content) <NEW_LINE> current_conditions = doc.findall("weather/current_conditions") <NEW_LINE> forecast_conditions = doc.findall("weather/forecast_conditions") <NEW_LINE> if current_conditions: <NEW_LINE> <INDENT> for child in current_conditions[0].iterchildren(): <NEW_LINE> <INDENT> current[child.tag] = child.get("data") <NEW_LINE> <DEDENT> <DEDENT> if forecast_conditions: <NEW_LINE> <INDENT> for forecast_elem in forecast_conditions: <NEW_LINE> <INDENT> aux = dict() <NEW_LINE> for child in forecast_elem.iterchildren(): <NEW_LINE> <INDENT> aux[child.tag] = child.get("data") <NEW_LINE> <DEDENT> forecast.append(aux) <NEW_LINE> <DEDENT> <DEDENT> return { "current": current, "forecast": forecast } <NEW_LINE> <DEDENT> def convert_to_c(self, temp): <NEW_LINE> <INDENT> return str(round((int(temp) - 32) * (5.0/9.0), 2))
Retrieves weather data from google weather API. You can put your city.
62598fce97e22403b383b30f
class RNNCell(object): <NEW_LINE> <INDENT> def __call__(self, inputs, state, scope=None): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_size(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_size(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_size(self): <NEW_LINE> <INDENT> raise NotImplementedError("Abstract method") <NEW_LINE> <DEDENT> def zero_state(self, batch_size, dtype): <NEW_LINE> <INDENT> zeros = array_ops.zeros( array_ops.pack([batch_size, self.state_size]), dtype=dtype) <NEW_LINE> zeros.set_shape([None, self.state_size]) <NEW_LINE> return zeros
Abstract object representing an RNN cell. An RNN cell, in the most abstract setting, is anything that has a state -- a vector of floats of size self.state_size -- and performs some operation that takes inputs of size self.input_size. This operation results in an output of size self.output_size and a new state. This module provides a number of basic commonly used RNN cells, such as LSTM (Long Short Term Memory) or GRU (Gated Recurrent Unit), and a number of operators that allow add dropouts, projections, or embeddings for inputs. Constructing multi-layer cells is supported by a super-class, MultiRNNCell, defined later. Every RNNCell must have the properties below and and implement __call__ with the following signature.
62598fce50812a4eaa620de8
class TestPyComments(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_name_to_func_map(self): <NEW_LINE> <INDENT> test_file = 'tests/commentsForPy' <NEW_LINE> options = Namespace() <NEW_LINE> options.already = set() <NEW_LINE> options.ex_re = None <NEW_LINE> options.map_holder = MapHolder() <NEW_LINE> options.verbose = False <NEW_LINE> lines, sloc = count_lines_python(test_file, options, 'py') <NEW_LINE> self.assertEqual(lines, 29) <NEW_LINE> self.assertEqual(sloc, 13)
Test functioning of Python line counters.
62598fcea05bb46b3848ac75
class PackDirectory(Action): <NEW_LINE> <INDENT> def created(self): <NEW_LINE> <INDENT> self.conditions = self.get_conditions(PackingFinished) <NEW_LINE> <DEDENT> def execute(self, instance): <NEW_LINE> <INDENT> src = self.evaluate_expression(self.get_parameter(0)) <NEW_LINE> dest = self.evaluate_expression(self.get_parameter(1)) <NEW_LINE> instance.objectPlayer.pack(src, dest) <NEW_LINE> for condition in self.conditions: <NEW_LINE> <INDENT> condition.generate()
Pack directory Parameters: 0: Directory (EXPSTRING, ExpressionParameter) 1: Destination filename (EXPSTRING, ExpressionParameter)
62598fcecc40096d6161a3dc
class DaesoError(DaesoException): <NEW_LINE> <INDENT> pass
Exception for a serious error in Daeso
62598fce099cdd3c636755e4
class PhbstRobot(Robot, RobotWithPhbands): <NEW_LINE> <INDENT> EXT = "PHBST" <NEW_LINE> def write_notebook(self, nbpath=None): <NEW_LINE> <INDENT> nbformat, nbv, nb = self.get_nbformat_nbv_nb(title=None) <NEW_LINE> args = [(l, f.filepath) for l, f in self.items()] <NEW_LINE> nb.cells.extend([ nbv.new_code_cell("robot = abilab.PhbstRobot(*%s)\nrobot.trim_paths()\nrobot" % str(args)), ]) <NEW_LINE> nb.cells.extend(self.get_baserobot_code_cells()) <NEW_LINE> nb.cells.extend(self.get_phbands_code_cells()) <NEW_LINE> return self._write_nb_nbpath(nb, nbpath)
This robot analyzes the results contained in multiple PHBST.nc files. .. rubric:: Inheritance Diagram .. inheritance-diagram:: PhbstRobot
62598fcebe7bc26dc925205e
class ProfileRequestHandler(RequestHandler): <NEW_LINE> <INDENT> @tornado.gen.coroutine <NEW_LINE> def get(self): <NEW_LINE> <INDENT> if not self.current_user: <NEW_LINE> <INDENT> self.redirect("/") <NEW_LINE> <DEDENT> profile = yield self.db.accounts.find_one({"_id": self.current_user.account_id}) <NEW_LINE> statistics = yield self.get_statistics() <NEW_LINE> self.render("profile.html", profile=profile, statistics=statistics, _xsrf=self.xsrf_form_html()) <NEW_LINE> <DEDENT> @tornado.gen.coroutine <NEW_LINE> def post(self): <NEW_LINE> <INDENT> if not self.current_user: <NEW_LINE> <INDENT> self.handle_bad_request() <NEW_LINE> <DEDENT> email = self.get_argument("email") <NEW_LINE> if email: <NEW_LINE> <INDENT> yield self.set_email(email) <NEW_LINE> <DEDENT> self.redirect("/profile") <NEW_LINE> <DEDENT> @tornado.gen.coroutine <NEW_LINE> def get_statistics(self): <NEW_LINE> <INDENT> result = yield self.db.events.aggregate([ { "$match": { "kwargs.account_id": self.current_user.account_id, "type": {"$in": [SystemEventType.MADE_BET.value, SystemEventType.WIN.value]}, } }, {"$group": {"_id": "$type", "count": {"$sum": 1}, "coins": {"$sum": "$kwargs.coins"}}}, ]) <NEW_LINE> return collections.defaultdict(lambda: collections.defaultdict(int), {SystemEventType(doc["_id"]): doc for doc in result["result"]}) <NEW_LINE> <DEDENT> @tornado.gen.coroutine <NEW_LINE> def set_email(self, email): <NEW_LINE> <INDENT> yield self.db.accounts.update({"_id": self.current_user.account_id}, {"$set": { "email": email, "confirmed": False, }}) <NEW_LINE> yield self.log_event(SystemEventType.SET_EMAIL, account_id=self.current_user.account_id, email=email)
User profile handler.
62598fcead47b63b2c5a7c65
class SoftDeletionQueryset(models.QuerySet): <NEW_LINE> <INDENT> def existing(self): <NEW_LINE> <INDENT> return self.filter(deleted_at=None) <NEW_LINE> <DEDENT> def deleted(self): <NEW_LINE> <INDENT> return self.exclude(deleted_at=None) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> now = datetime.datetime.utcnow() <NEW_LINE> return super().update( deleted_at=now, updated_at=now ) <NEW_LINE> <DEDENT> def hard_delete(self): <NEW_LINE> <INDENT> return super().delete()
Queryset implementing soft deletion behaviour
62598fcefbf16365ca7944c6
class _TransactionCtx(object): <NEW_LINE> <INDENT> def __enter__(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> self.should_close_conn = False <NEW_LINE> if not _db_ctx.is_init(): <NEW_LINE> <INDENT> _db_ctx.init() <NEW_LINE> self.should_close_conn = True <NEW_LINE> <DEDENT> _db_ctx.transactions += 1 <NEW_LINE> logging.info('begin transaction...' if _db_ctx.transactions == 1 else 'join current transaction...') <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> _db_ctx.transactions -= 1 <NEW_LINE> try: <NEW_LINE> <INDENT> if _db_ctx.transactions == 0: <NEW_LINE> <INDENT> if exc_type is None: <NEW_LINE> <INDENT> self.commit() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.rollback() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if self.should_close_conn: <NEW_LINE> <INDENT> _db_ctx.cleanup() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def commit(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> logging.info('commit transaction...') <NEW_LINE> try: <NEW_LINE> <INDENT> _db_ctx.connection.commit() <NEW_LINE> logging.info('commit ok.') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logging.warning('commit failed. try rollback...') <NEW_LINE> _db_ctx.connection.rollback() <NEW_LINE> logging.warning('rollback ok.') <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> def rollback(self): <NEW_LINE> <INDENT> global _db_ctx <NEW_LINE> logging.warning('rollback transaction...') <NEW_LINE> _db_ctx.connection.rollback() <NEW_LINE> logging.info('rollback ok.')
事务嵌套比Connection嵌套复杂一点,因为事务嵌套需要计数, 每遇到一层嵌套就+1,离开一层嵌套就-1,最后到0时提交事务
62598fcebf627c535bcb18b7
class FireAnt(Ant): <NEW_LINE> <INDENT> name = 'Fire' <NEW_LINE> damage = 3 <NEW_LINE> "*** YOUR CODE HERE ***" <NEW_LINE> implemented = True <NEW_LINE> food_cost = 4 <NEW_LINE> armor = 1 <NEW_LINE> implemented = True <NEW_LINE> def reduce_armor(self, amount): <NEW_LINE> <INDENT> self.armor -= amount <NEW_LINE> if self.armor <= 0: <NEW_LINE> <INDENT> for bee in self.place.bees[:]: <NEW_LINE> <INDENT> bee.armor -= self.damage <NEW_LINE> if bee.armor <= 0: <NEW_LINE> <INDENT> bee.place.remove_insect(bee) <NEW_LINE> <DEDENT> <DEDENT> self.place.remove_insect(self)
FireAnt cooks any Bee in its Place when it expires.
62598fce283ffb24f3cf3c90
class GoGoAnime: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.url = 'https://gogoanime.pe/' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "GoGoAnime" <NEW_LINE> <DEDENT> def _findAnimeElements(self, tree): <NEW_LINE> <INDENT> return tree.xpath("//ul[contains(concat(' ', normalize-space(@class), ' '), ' items ')]/*") <NEW_LINE> <DEDENT> def getRecent(self): <NEW_LINE> <INDENT> animes = [] <NEW_LINE> animeElements = [] <NEW_LINE> with requests.Session() as session: <NEW_LINE> <INDENT> session.headers = {'User-Agent': 'Mozilla/5.0'} <NEW_LINE> response = session.get(self.url) <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> tree = html.fromstring(response.text) <NEW_LINE> animeElements = self._findAnimeElements(tree) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.error(f"Received {response.status_code} from {str(self)}") <NEW_LINE> <DEDENT> <DEDENT> for animeElement in animeElements: <NEW_LINE> <INDENT> title = animeElement.xpath(".//p/a[@title]")[0].text_content() <NEW_LINE> link = animeElement.xpath(".//p/a[@title]/@href")[0] <NEW_LINE> if link.startswith('/'): <NEW_LINE> <INDENT> link = self.url + link <NEW_LINE> <DEDENT> ep = animeElement.xpath(".//p[contains(concat(' ', normalize-space(@class), ' '), ' episode ')]")[0].text_content() <NEW_LINE> animes.append(Source(title=title, progress=ep, link=link)) <NEW_LINE> <DEDENT> return animes[:16]
GoGoAnime: provides a minimal GoGoAnime api.
62598fcedc8b845886d539c8
class MultiLevelImplicitDecoder(tf.keras.Model): <NEW_LINE> <INDENT> def __init__(self, num_level: int = 3, num_filter: Union[int, Sequence[int]] = 128, num_out_channel: int = 1, implicit_net_type: str = 'imnet', share_net_level_groups: Sequence[Sequence[int]] = None, activation_params: Dict[str, Any] = None, name: str = 'MultiLevelImplicitDecoder'): <NEW_LINE> <INDENT> super(MultiLevelImplicitDecoder, self).__init__(name=name) <NEW_LINE> self.num_level = num_level <NEW_LINE> if isinstance(num_filter, int): <NEW_LINE> <INDENT> num_filter = [num_filter for i in range(self.num_level)] <NEW_LINE> <DEDENT> elif isinstance(num_filter, Sequence): <NEW_LINE> <INDENT> assert len(num_filter) >= self.num_level <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError('num_filter must be either int or list of int.') <NEW_LINE> <DEDENT> self.num_filter = num_filter <NEW_LINE> self.num_out_channel = num_out_channel <NEW_LINE> self.implicit_net_type = implicit_net_type <NEW_LINE> if share_net_level_groups is None: <NEW_LINE> <INDENT> share_net_level_groups = [] <NEW_LINE> <DEDENT> elif share_net_level_groups == 'all': <NEW_LINE> <INDENT> share_net_level_groups = [list(range(self.num_level))] <NEW_LINE> <DEDENT> self.share_net_level_groups = share_net_level_groups <NEW_LINE> self.implicit_nets = [None for i in range(self.num_level)] <NEW_LINE> for level_group in self.share_net_level_groups: <NEW_LINE> <INDENT> if len(level_group) < 1: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> level0 = level_group[0] <NEW_LINE> if self.implicit_net_type == 'imnet': <NEW_LINE> <INDENT> self.implicit_nets[level0] = network_imnet.ImNet( num_out_channel=self.num_out_channel, num_filter=self.num_filter[level0], activation_params=activation_params, name='ImNet_' + str(level0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unknown implicit_net_type: %s' % self.implicit_net_type) <NEW_LINE> <DEDENT> for level in level_group[1:]: <NEW_LINE> <INDENT> self.implicit_nets[level] = self.implicit_nets[level0] <NEW_LINE> <DEDENT> <DEDENT> for i in range(self.num_level): <NEW_LINE> <INDENT> if self.implicit_nets[i] is not None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif self.implicit_net_type == 'imnet': <NEW_LINE> <INDENT> self.implicit_nets[i] = network_imnet.ImNet( num_out_channel=self.num_out_channel, num_filter=self.num_filter[i], activation_params=activation_params, name='ImNet_' + str(i)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('Unknown implicit_net_type: %s' % self.implicit_net_type) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def call(self, inputs: Sequence[tf.Tensor], levels: Sequence[int] = None, training: bool = False) -> Sequence[tf.Tensor]: <NEW_LINE> <INDENT> if levels is None: <NEW_LINE> <INDENT> levels = list(range(self.num_level)) <NEW_LINE> <DEDENT> out = [] <NEW_LINE> for level_i, input_i in zip(levels, inputs): <NEW_LINE> <INDENT> out_i = self.implicit_nets[level_i](input_i, training=training) <NEW_LINE> out.append(out_i) <NEW_LINE> <DEDENT> return out
Decodes multi-level latent codes into SDF.
62598fceadb09d7d5dc0a986
class TimestampError(TxValidationError): <NEW_LINE> <INDENT> pass
Transaction timestamp is smaller or equal to one parent's timestamp
62598fced8ef3951e32c8061
class RationalJerks(object): <NEW_LINE> <INDENT> def __init__(self, start, stop, step, state_0=[46, 1.0, 10], alpha=10.3): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> self.state_0 = state_0 <NEW_LINE> self.t = np.arange(start, stop, step) <NEW_LINE> self.states = self.get_states() <NEW_LINE> <DEDENT> def lorenz_f(self, state, t): <NEW_LINE> <INDENT> x, y, z = state <NEW_LINE> return z, -self.alpha * y + z, -x + x * y <NEW_LINE> <DEDENT> def get_states(self): <NEW_LINE> <INDENT> return odeint(self.lorenz_f, self.state_0, self.t)
description of class
62598fceab23a570cc2d4f73
class CollectionMatcher(BaseMatcher): <NEW_LINE> <INDENT> CLASS = None <NEW_LINE> def __init__(self, of=None): <NEW_LINE> <INDENT> assert self.CLASS, "must specify collection type to match" <NEW_LINE> self.of = self._validate_argument(of) <NEW_LINE> <DEDENT> def _validate_argument(self, arg): <NEW_LINE> <INDENT> if arg is None: <NEW_LINE> <INDENT> return arg <NEW_LINE> <DEDENT> if isinstance(arg, type): <NEW_LINE> <INDENT> return InstanceOf(arg) <NEW_LINE> <DEDENT> if not isinstance(arg, BaseMatcher): <NEW_LINE> <INDENT> raise TypeError( "argument of %s can be a type or a matcher (got %r)" % ( self.__class__.__name__, type(arg))) <NEW_LINE> <DEDENT> return arg <NEW_LINE> <DEDENT> def match(self, value): <NEW_LINE> <INDENT> if not isinstance(value, self.CLASS): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.of is not None: <NEW_LINE> <INDENT> return all(self.of == item for item in value) <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> of = "" if self.of is None else "[%r]" % (self.of,) <NEW_LINE> return "<%s%s>" % (self.__class__.__name__, of)
Base class for collections' matchers. This class shouldn't be used directly.
62598fce5fdd1c0f98e5e398
class UsersView(mixins.ListModelMixin, mixins.RetrieveModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> serializer_class = UserSerializer <NEW_LINE> filter_backends = (filters.OrderingFilter, ) <NEW_LINE> ordering = ('id', ) <NEW_LINE> queryset = User.objects.all() <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> if not self.request.user.is_authenticated(): <NEW_LINE> <INDENT> return User.objects.none() <NEW_LINE> <DEDENT> if self.request.user.groups.filter(name="show_me_everything").exists(): <NEW_LINE> <INDENT> return User.objects.all() <NEW_LINE> <DEDENT> return User.objects.filter(pk=self.request.user.pk)
Get more information about users
62598fce4527f215b58ea2da
class GitTreeRef(Model): <NEW_LINE> <INDENT> _attribute_map = { '_links': {'key': '_links', 'type': 'ReferenceLinks'}, 'object_id': {'key': 'objectId', 'type': 'str'}, 'size': {'key': 'size', 'type': 'long'}, 'tree_entries': {'key': 'treeEntries', 'type': '[GitTreeEntryRef]'}, 'url': {'key': 'url', 'type': 'str'} } <NEW_LINE> def __init__(self, _links=None, object_id=None, size=None, tree_entries=None, url=None): <NEW_LINE> <INDENT> super(GitTreeRef, self).__init__() <NEW_LINE> self._links = _links <NEW_LINE> self.object_id = object_id <NEW_LINE> self.size = size <NEW_LINE> self.tree_entries = tree_entries <NEW_LINE> self.url = url
GitTreeRef. :param _links: :type _links: :class:`ReferenceLinks <git.v4_1.models.ReferenceLinks>` :param object_id: SHA1 hash of git object :type object_id: str :param size: Sum of sizes of all children :type size: long :param tree_entries: Blobs and trees under this tree :type tree_entries: list of :class:`GitTreeEntryRef <git.v4_1.models.GitTreeEntryRef>` :param url: Url to tree :type url: str
62598fcebe7bc26dc925205f
class PathSet: <NEW_LINE> <INDENT> def __init__(self, paths=()): <NEW_LINE> <INDENT> self._covers_cache = {} <NEW_LINE> self._paths = set() <NEW_LINE> for path in paths: <NEW_LINE> <INDENT> self.add(path) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for path in self._paths: <NEW_LINE> <INDENT> yield path <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._paths) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<PathSet: {}>".format(repr(self._paths)) <NEW_LINE> <DEDENT> def add(self, new_path): <NEW_LINE> <INDENT> if self.covers(new_path): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> for existing_path in self._paths.copy(): <NEW_LINE> <INDENT> if list_starts_with(existing_path, new_path): <NEW_LINE> <INDENT> self._paths.remove(existing_path) <NEW_LINE> <DEDENT> <DEDENT> self._covers_cache = {} <NEW_LINE> self._paths.add(new_path) <NEW_LINE> return True <NEW_LINE> <DEDENT> def covers(self, candidate_path): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._covers_cache[candidate_path] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> result = False <NEW_LINE> for existing_path in self._paths: <NEW_LINE> <INDENT> if list_starts_with(candidate_path, existing_path): <NEW_LINE> <INDENT> result = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> self._covers_cache[candidate_path] = result <NEW_LINE> return result
Collects metadata paths and stores only the highest levels ones. >>> s = PathSet() >>> s.add(("foo", "bar")) >>> s.add(("foo",)) >>> s {"foo"}
62598fce3d592f4c4edbb2c1
class Auth(_widget.Abstract): <NEW_LINE> <INDENT> def __init__(self, uid: str, **kwargs): <NEW_LINE> <INDENT> super().__init__(uid, **kwargs) <NEW_LINE> self._app_id = _reg.get('vkontakte.app_id') <NEW_LINE> if not self._app_id: <NEW_LINE> <INDENT> raise RuntimeError("Settings parameter 'vkontakte.app_id' is not defined.") <NEW_LINE> <DEDENT> self._scope = kwargs.get('scope', ('wall', 'offline', 'photos')) <NEW_LINE> self._access_url = kwargs.get('access_url', '') <NEW_LINE> self._access_token = kwargs.get('access_token', '') <NEW_LINE> self._user_id = kwargs.get('user_id', '') <NEW_LINE> self._group_id = kwargs.get('group_id', '') <NEW_LINE> self._css += ' widget-vk-oauth' <NEW_LINE> <DEDENT> @property <NEW_LINE> def scope(self) -> tuple: <NEW_LINE> <INDENT> return self._scope <NEW_LINE> <DEDENT> @property <NEW_LINE> def access_url(self) -> str: <NEW_LINE> <INDENT> return self._access_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def group_id(self) -> str: <NEW_LINE> <INDENT> return self._group_id <NEW_LINE> <DEDENT> def _get_element(self, **kwargs) -> _html.Element: <NEW_LINE> <INDENT> authorize_url = _router.url('https://oauth.vk.com/authorize', query={ 'client_id': self._app_id, 'scope': ','.join(self.scope), 'redirect_uri': 'https://oauth.vk.com/blank.html', 'display': 'page', 'response_type': 'token', 'v': '5.37', }) <NEW_LINE> wrapper = _html.TagLessElement() <NEW_LINE> wrapper.append(_widget.input.Text( uid=self.uid + '[access_url]', weight=10, label=_lang.t('vkontakte@access_url'), help=_lang.t('vkontakte@access_url_help', {'link': authorize_url}), value=self.access_url, required=True, ).renderable()) <NEW_LINE> wrapper.append(_widget.input.Integer( uid=self.uid + '[group_id]', weight=20, label=_lang.t('vkontakte@group_id'), value=self.group_id, h_size='col-sm-2' ).renderable()) <NEW_LINE> return wrapper
PytSite Vkontakte oAuth Widget.
62598fce8a349b6b4368664d
class ValidateNamingConvention(pyblish.backend.plugin.Validator): <NEW_LINE> <INDENT> families = ['model', 'animation', 'animRig'] <NEW_LINE> hosts = ['maya'] <NEW_LINE> version = (0, 1, 0) <NEW_LINE> pattern = re.compile("^\w+_\w{3}(Shape)?$") <NEW_LINE> def process(self, context): <NEW_LINE> <INDENT> for instance in pyblish.backend.plugin.instances_by_plugin( instances=context, plugin=self): <NEW_LINE> <INDENT> mismatches = list() <NEW_LINE> for node in instance: <NEW_LINE> <INDENT> if not self.pattern.match(node): <NEW_LINE> <INDENT> mismatches.append(node) <NEW_LINE> <DEDENT> <DEDENT> if mismatches: <NEW_LINE> <INDENT> msg = "The following nodes were misnamed\n" <NEW_LINE> for node in mismatches: <NEW_LINE> <INDENT> msg += "\t{0}\n".format(node) <NEW_LINE> <DEDENT> exc = ValueError(msg) <NEW_LINE> exc.nodes = mismatches <NEW_LINE> yield instance, exc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield instance, None
Ensure each included node ends with a three-letter, upper-case type Example: clavicle_CTL <- Good shoulder <- Bad Raises: ValueError with an added .nodes attribute for each node found to be misnamed.
62598fce956e5f7376df5884
@IN.register('DBObjectUnknownTypeException', type = 'exception_handler') <NEW_LINE> class DBObjectUnknownTypeException(DBException): <NEW_LINE> <INDENT> pass
Exception DBObjectUnknownTypeException.
62598fce851cf427c66b86c0
class NonMaxSuppression(OnnxOpConverter): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def _impl_v10(cls, inputs, attr, params): <NEW_LINE> <INDENT> boxes = inputs[0] <NEW_LINE> scores = inputs[1] <NEW_LINE> max_output_boxes_per_class = inputs[2] <NEW_LINE> iou_threshold = inputs[3] <NEW_LINE> score_threshold = inputs[4] <NEW_LINE> boxes_dtype = infer_type(boxes).checked_type.dtype <NEW_LINE> if attr.get("center_point_box", 0) != 0: <NEW_LINE> <INDENT> xc, yc, w, h = _op.split(boxes, 4, axis=2) <NEW_LINE> half_w = w / _expr.const(2.0, boxes_dtype) <NEW_LINE> half_h = h / _expr.const(2.0, boxes_dtype) <NEW_LINE> x1 = xc - half_w <NEW_LINE> x2 = xc + half_w <NEW_LINE> y1 = yc - half_h <NEW_LINE> y2 = yc + half_h <NEW_LINE> boxes = _op.concatenate([y1, x1, y2, x2], axis=2) <NEW_LINE> <DEDENT> if iou_threshold is None: <NEW_LINE> <INDENT> iou_threshold = _expr.const(0.0, dtype="float32") <NEW_LINE> <DEDENT> if score_threshold is None: <NEW_LINE> <INDENT> score_threshold = _expr.const(0.0, dtype="float32") <NEW_LINE> <DEDENT> def conditionally_squeeze_scalar(x): <NEW_LINE> <INDENT> rank = len(infer_shape(x)) <NEW_LINE> assert rank <= 1, "nms thresholds must be scalars" <NEW_LINE> if rank == 1: <NEW_LINE> <INDENT> return _op.squeeze(x, [0]) <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> max_output_boxes_per_class = conditionally_squeeze_scalar(max_output_boxes_per_class) <NEW_LINE> iou_threshold = conditionally_squeeze_scalar(iou_threshold) <NEW_LINE> score_threshold = conditionally_squeeze_scalar(score_threshold) <NEW_LINE> nms_out = _op.vision.all_class_non_max_suppression( boxes, scores, max_output_boxes_per_class, iou_threshold, score_threshold ) <NEW_LINE> return _op.strided_slice(nms_out[0], _op.const([0], dtype="int64"), nms_out[1])
Operator converter for NonMaxSuppression.
62598fcead47b63b2c5a7c69
class UpdateModuleView(PermissionRequiredMixin, UpdateView): <NEW_LINE> <INDENT> permission_required = 'loads.change_module' <NEW_LINE> model = Module <NEW_LINE> fields = ['module_code', 'module_name', 'campus', 'size', 'semester', 'contact_hours', 'admin_hours', 'assessment_hours', 'coordinator', 'moderators', 'programmes', 'lead_programme'] <NEW_LINE> def get_form(self, form_class=None): <NEW_LINE> <INDENT> form = super(UpdateModuleView, self).get_form(form_class) <NEW_LINE> staff = get_object_or_404(Staff, user=self.request.user) <NEW_LINE> package = staff.package <NEW_LINE> package_staff = package.get_all_staff() <NEW_LINE> package_programmes = Programme.objects.all().filter(package=package) <NEW_LINE> form.fields['coordinator'].queryset = package_staff <NEW_LINE> form.fields['moderators'].queryset = package_staff <NEW_LINE> form.fields['programmes'].queryset = package_programmes <NEW_LINE> form.fields['lead_programme'].queryset = package_programmes <NEW_LINE> return form <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('modules_details', kwargs={'module_id': self.object.pk})
View for editing a Module
62598fce3d592f4c4edbb2c3
class FieldRecalculation(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def build(cls, field_calculation, recalculates_on): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = apps.get_model(*(recalculates_on['model'].split('.'))) <NEW_LINE> <DEDENT> except LookupError: <NEW_LINE> <INDENT> raise CalculatedFieldError("No existe el modelo de recalculo: %s" % recalculates_on['model']) <NEW_LINE> <DEDENT> return FieldRecalculationOnRelated(field_calculation, model, recalculates_on.get('if_post_save')) <NEW_LINE> <DEDENT> def must_recalculate(self, instance): <NEW_LINE> <INDENT> raise NotImplementedError('Se implementa en las clases de abajo.')
Clase base de un recalculo del campo.
62598fcedc8b845886d539cc
class FixedOffset(datetime.tzinfo): <NEW_LINE> <INDENT> def __init__(self, offset): <NEW_LINE> <INDENT> self.__offset = datetime.timedelta(minutes = offset) <NEW_LINE> hr, min = divmod( offset, 60 ) <NEW_LINE> self.__name= "GMT{0:+03.0f}:{1:02.0f}".format(hr,min) <NEW_LINE> <DEDENT> def utcoffset(self, dt): <NEW_LINE> <INDENT> return self.__offset <NEW_LINE> <DEDENT> def tzname(self, dt): <NEW_LINE> <INDENT> return self.__name <NEW_LINE> <DEDENT> def dst(self, dt): <NEW_LINE> <INDENT> return ZERO
Fixed offset in minutes east from UTC.
62598fce956e5f7376df5885
class JpegStillArrayWriter(QtCore.QObject): <NEW_LINE> <INDENT> __CAPTURE_FRAME__ = False <NEW_LINE> __PROCESS_FRAME__ = False <NEW_LINE> __CAPTURE_GROUP__ = False <NEW_LINE> __PROCESS_GROUP__ = True <NEW_LINE> def __init__(self, settings, config): <NEW_LINE> <INDENT> QtCore.QObject.__init__(self) <NEW_LINE> self._settings = settings <NEW_LINE> self._config = config <NEW_LINE> self._settings.settingChanged.connect(self.settingChanged) <NEW_LINE> self._recorddirectory = settings.getSetting("recorddirectory") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def getModule(cls, settings, config): <NEW_LINE> <INDENT> return cls(settings, config) <NEW_LINE> <DEDENT> def processGroup(self, framegroup): <NEW_LINE> <INDENT> if framegroup is not None: <NEW_LINE> <INDENT> digits = len(str(len(framegroup))) <NEW_LINE> outdir = "%s%svideo_%s" % (self._recorddirectory, os.path.sep, time.strftime("%Y-%m-%d_%H-%M-%S"), ) <NEW_LINE> if not os.path.exists(outdir): <NEW_LINE> <INDENT> os.mkdir(outdir) <NEW_LINE> <DEDENT> idx = 1 <NEW_LINE> timingFile = open("%s%stiming.txt" % (outdir, os.path.sep), 'w') <NEW_LINE> f0 = framegroup[0] <NEW_LINE> startTimestamp = f0.timestamp <NEW_LINE> timingFile.write("#timestamp=%s\n" % (startTimestamp,)) <NEW_LINE> timingFile.write("#frames=%s\n" % (len(framegroup),)) <NEW_LINE> timingFile.write("#streams=%s\n" % (len(f0),)) <NEW_LINE> for frameset in framegroup: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> for frame in frameset: <NEW_LINE> <INDENT> filename = "%s%sframe-d%s-%s.jpg" % (outdir, os.path.sep, i, str(idx).zfill(digits)) <NEW_LINE> timingFile.write("%s\t%s\t%s\t%s\n" % (idx-1, frameset.timestamp, i, filename,)) <NEW_LINE> frame.asQPixmap().save(filename) <NEW_LINE> i+=1 <NEW_LINE> <DEDENT> idx+=1 <NEW_LINE> <DEDENT> timingFile.close() <NEW_LINE> <DEDENT> <DEDENT> @Slot(str,object) <NEW_LINE> def settingChanged(self, name, value): <NEW_LINE> <INDENT> if name == "recorddirectory": <NEW_LINE> <INDENT> self._recorddirectory = value
Writes a frame group to file in the form of 1 jpg per frameset per frame
62598fce7b180e01f3e49257
class cl_platform_info(cl_uenum): <NEW_LINE> <INDENT> CL_PLATFORM_PROFILE = 0x0900 <NEW_LINE> CL_PLATFORM_VERSION = 0x0901 <NEW_LINE> CL_PLATFORM_NAME = 0x0902 <NEW_LINE> CL_PLATFORM_VENDOR = 0x0903 <NEW_LINE> CL_PLATFORM_EXTENSIONS = 0x0904
The set of possible parameter names used with the :func:`clGetPlatformInfo` function.
62598fce3346ee7daa33784f
class OAuth2CodeRequest(models.Model): <NEW_LINE> <INDENT> owner = models.ForeignKey('auth.User', related_name='OAuth2CodeRequests', on_delete=models.CASCADE) <NEW_LINE> name = models.CharField(max_length=40) <NEW_LINE> state = models.CharField(max_length=200) <NEW_LINE> timestamp = models.DateTimeField()
Model for Stracking OAuth2 states and users.
62598fce5fdd1c0f98e5e39c
class ListAlertsResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'request_id': 'str', 'took': 'float', 'data': 'list[BaseAlert]', 'paging': 'PageDetails' } <NEW_LINE> attribute_map = { 'request_id': 'requestId', 'took': 'took', 'data': 'data', 'paging': 'paging' } <NEW_LINE> def __init__(self, request_id=None, took=0.0, data=None, paging=None): <NEW_LINE> <INDENT> self._request_id = None <NEW_LINE> self._took = None <NEW_LINE> self._data = None <NEW_LINE> self._paging = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.request_id = request_id <NEW_LINE> self.took = took <NEW_LINE> if data is not None: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> if paging is not None: <NEW_LINE> <INDENT> self.paging = paging <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def request_id(self): <NEW_LINE> <INDENT> return self._request_id <NEW_LINE> <DEDENT> @request_id.setter <NEW_LINE> def request_id(self, request_id): <NEW_LINE> <INDENT> if request_id is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `request_id`, must not be `None`") <NEW_LINE> <DEDENT> self._request_id = request_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def took(self): <NEW_LINE> <INDENT> return self._took <NEW_LINE> <DEDENT> @took.setter <NEW_LINE> def took(self, took): <NEW_LINE> <INDENT> if took is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `took`, must not be `None`") <NEW_LINE> <DEDENT> self._took = took <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> <DEDENT> @property <NEW_LINE> def paging(self): <NEW_LINE> <INDENT> return self._paging <NEW_LINE> <DEDENT> @paging.setter <NEW_LINE> def paging(self, paging): <NEW_LINE> <INDENT> self._paging = paging <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ListAlertsResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fce3d592f4c4edbb2c5
class BaseCommand(metaclass=BaseCommandMeta): <NEW_LINE> <INDENT> names = [] <NEW_LINE> def __init__(self, app_state, command_processor, tab_manager): <NEW_LINE> <INDENT> self._app_state = app_state <NEW_LINE> self._command_processor = command_processor <NEW_LINE> self._tab_manager = tab_manager <NEW_LINE> <DEDENT> def run(self, args): <NEW_LINE> <INDENT> raise NotImplementedError('...')
Base command that all commands are supposed to derive from. Automatically registers given command in the registry via own metaclass.
62598fce283ffb24f3cf3c96
class Conv2d(Affine): <NEW_LINE> <INDENT> def __init__(self, out_channels, kernel_size, strides, padding="same", dilation=1, bias=True, initializer=None, name=None): <NEW_LINE> <INDENT> super().__init__(bias=bias, initializer=initializer, name=name) <NEW_LINE> self.out_channels = out_channels <NEW_LINE> self.kernel_size = kernel_size <NEW_LINE> self.strides = strides <NEW_LINE> self.padding = padding <NEW_LINE> self.dilation = dilation <NEW_LINE> <DEDENT> @build_with_name_scope <NEW_LINE> def build_parameters(self, x): <NEW_LINE> <INDENT> self.in_channels = int(x.shape[-1]) <NEW_LINE> self.kernel = tf.Variable(tf.random.normal((self.kernel_size, self.kernel_size, self.in_channels, self.out_channels)), trainable=True) <NEW_LINE> if self.use_bias: <NEW_LINE> <INDENT> self.bias = tf.Variable(tf.random.normal([self.out_channels]), trainable=True) <NEW_LINE> <DEDENT> self.reset_parameters() <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = tf.nn.conv2d( x, filter=self.normalized_kernel, strides=self.strides, padding=self.padding.upper(), dilations=self.dilation) <NEW_LINE> if self.use_bias: <NEW_LINE> <INDENT> x = tf.nn.bias_add(x, self.bias) <NEW_LINE> <DEDENT> return x <NEW_LINE> <DEDENT> def extra_repr(self): <NEW_LINE> <INDENT> return "({}, {}, {}, {}, bias={})".format(self.out_channels, self.kernel_size, self.strides, self.padding, self.use_bias)
Applies 2d convolutional transformation (and bias) to input.
62598fceec188e330fdf8ca7
class UrlRewriteNyaa(object): <NEW_LINE> <INDENT> def validator(self): <NEW_LINE> <INDENT> from flexget import validator <NEW_LINE> root = validator.factory() <NEW_LINE> root.accept('choice').accept_choices(CATEGORIES) <NEW_LINE> advanced = root.accept('dict') <NEW_LINE> advanced.accept('choice', key='category').accept_choices(CATEGORIES) <NEW_LINE> advanced.accept('choice', key='filter').accept_choices(FILTERS) <NEW_LINE> return root <NEW_LINE> <DEDENT> def search(self, entry, config): <NEW_LINE> <INDENT> if not isinstance(config, dict): <NEW_LINE> <INDENT> config = {'category': config} <NEW_LINE> <DEDENT> config.setdefault('category', 'anime') <NEW_LINE> config.setdefault('filter', 'all') <NEW_LINE> entries = set() <NEW_LINE> for search_string in entry.get('search_strings', [entry['title']]): <NEW_LINE> <INDENT> name = normalize_unicode(search_string) <NEW_LINE> url = 'http://www.nyaa.eu/?page=rss&cats=%s&filter=%s&term=%s' % ( CATEGORIES[config['category']], FILTERS.index(config['filter']), urllib.quote(name.encode('utf-8'))) <NEW_LINE> log.debug('requesting: %s' % url) <NEW_LINE> rss = feedparser.parse(url) <NEW_LINE> status = rss.get('status', False) <NEW_LINE> if status != 200: <NEW_LINE> <INDENT> raise plugin.PluginWarning('Search result not 200 (OK), received %s' % status) <NEW_LINE> <DEDENT> ex = rss.get('bozo_exception', False) <NEW_LINE> if ex: <NEW_LINE> <INDENT> raise plugin.PluginWarning('Got bozo_exception (bad feed)') <NEW_LINE> <DEDENT> for item in rss.entries: <NEW_LINE> <INDENT> entry = Entry() <NEW_LINE> entry['title'] = item.title <NEW_LINE> entry['url'] = item.link <NEW_LINE> entries.add(entry) <NEW_LINE> <DEDENT> <DEDENT> return entries <NEW_LINE> <DEDENT> def url_rewritable(self, task, entry): <NEW_LINE> <INDENT> return entry['url'].startswith('http://www.nyaa.eu/?page=torrentinfo&tid=') <NEW_LINE> <DEDENT> def url_rewrite(self, task, entry): <NEW_LINE> <INDENT> entry['url'] = entry['url'].replace('torrentinfo', 'download')
Nyaa urlrewriter and search plugin.
62598fceab23a570cc2d4f76
class Header(PmmlBinding): <NEW_LINE> <INDENT> def toPFA(self, options, context): <NEW_LINE> <INDENT> raise NotImplementedError
Represents a <Header> tag and provides methods to convert to PFA.
62598fcead47b63b2c5a7c6d
class cached_property(object): <NEW_LINE> <INDENT> def __init__(self, fget=None, fset=None, fdel=None, doc=None, name=None): <NEW_LINE> <INDENT> self.fget = fget <NEW_LINE> self.fset = fset <NEW_LINE> self.fdel = fdel <NEW_LINE> if doc is None and fget is not None: <NEW_LINE> <INDENT> doc = fget.__doc__ <NEW_LINE> <DEDENT> self.__doc__ = doc <NEW_LINE> if name is None and fget is not None: <NEW_LINE> <INDENT> name = fget.__name__ <NEW_LINE> <DEDENT> self.__name__ = name <NEW_LINE> if fget is not None: <NEW_LINE> <INDENT> self.__module__ = fget.__module__ <NEW_LINE> <DEDENT> <DEDENT> def __get__(self, obj, objtype=None): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> if self.fget is None: <NEW_LINE> <INDENT> raise AttributeError("unreadable attribute") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return obj.__dict__[self.__name__] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> value = obj.__dict__[self.__name__] = self.fget(obj) <NEW_LINE> return value <NEW_LINE> <DEDENT> <DEDENT> def __set__(self, obj, value): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if self.fset is not None: <NEW_LINE> <INDENT> value = self.fset(obj, value) <NEW_LINE> <DEDENT> obj.__dict__[self.__name__] = value <NEW_LINE> <DEDENT> def __delete__(self, obj): <NEW_LINE> <INDENT> if obj is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> value = obj.__dict__.pop(self.__name__) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.fdel is not None: <NEW_LINE> <INDENT> fdel_args = [obj] <NEW_LINE> if len(getargspec(self.fdel)[0]) == 2: <NEW_LINE> <INDENT> fdel_args.append(value) <NEW_LINE> <DEDENT> self.fdel(*fdel_args) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def setter(self, fset): <NEW_LINE> <INDENT> return self.__class__(self.fget, fset, self.fdel, self.__doc__, self.__name__) <NEW_LINE> <DEDENT> def deleter(self, fdel): <NEW_LINE> <INDENT> return self.__class__(self.fget, self.fset, fdel, self.__doc__, self.__name__)
Property descriptor that caches the return value of the get function. Like django.utils.functional.cached_property with the addition of setter and deleter. *Examples* .. code-block:: python @cached_property def connection(self): return Connection() @connection.setter # Prepares stored value def connection(self, value): if value is None: raise TypeError("Connection must be a connection") return value @connection.deleter def connection(self, value): # Additional action to do at del(self.attr) if value is not None: print("Connection %r deleted" % (value, ))
62598fce4a966d76dd5ef2ea
class ExperimentalDataFilter(paf.filter_base.Filter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> paf.filter_base.Filter.__init__(self) <NEW_LINE> temp_port = port.Port(MAKE_DATA()) <NEW_LINE> self.output_port["experiment_data"] = temp_port <NEW_LINE> self.add_expected_parameter("pump_name", MAKE_DATA(""), "data") <NEW_LINE> self.add_expected_parameter("temporary_data", MAKE_DATA(), MAKE_DATA()) <NEW_LINE> del self.showing_info[-1] <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> experimental_data = self.get_data("main").value <NEW_LINE> file_name = experimental_data["file_name"] <NEW_LINE> pump_name = self["pump_name"].value <NEW_LINE> data = self["temporary_data"][pump_name + "_dataset"] <NEW_LINE> self.fill_port("experiment_data", MAKE_DATA(data)) <NEW_LINE> db = pyxcel.engine.centralizer.Centralizer().database <NEW_LINE> experimental_data = db[file_name] <NEW_LINE> instrument_name = experimental_data['Instrument_name'].value <NEW_LINE> if type_inst(db[instrument_name]["default_value"]) == "XRF": <NEW_LINE> <INDENT> temp_name = pump_name + "_col" <NEW_LINE> data_column = self["temporary_data"][temp_name] <NEW_LINE> column = data_column.value <NEW_LINE> GIXRF_file_header = open(file_name).readline().split() <NEW_LINE> column_name = GIXRF_file_header[column] <NEW_LINE> instrument_name += "_" + column_name <NEW_LINE> <DEDENT> instrument = self["temporary_data"][instrument_name] <NEW_LINE> instrument["name"] = instrument_name <NEW_LINE> self.fill_port("main", instrument)
filter to extract instrument and experimental data from database
62598fce0fa83653e46f52fa
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'header_image08.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.image_dir = test_dir + 'images/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {'xl/worksheets/sheet1.xml': ['<pageMargins', '<pageSetup']} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.write('A1', 'Foo') <NEW_LINE> worksheet.write_comment('B2', 'Some text') <NEW_LINE> worksheet.set_comments_author('John') <NEW_LINE> worksheet.set_header('&L&G', {'image_left': self.image_dir + 'red.jpg'}) <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual()
Test file created by XlsxWriter against a file created by Excel.
62598fcea219f33f346c6c19
class TestImageProcessing: <NEW_LINE> <INDENT> def setup_method(self): <NEW_LINE> <INDENT> self.hass = get_test_home_assistant() <NEW_LINE> setup_component( self.hass, http.DOMAIN, {http.DOMAIN: {http.CONF_SERVER_PORT: get_test_instance_port()}}) <NEW_LINE> config = { ip.DOMAIN: { 'platform': 'test' }, 'camera': { 'platform': 'demo' }, } <NEW_LINE> setup_component(self.hass, ip.DOMAIN, config) <NEW_LINE> state = self.hass.states.get('camera.demo_camera') <NEW_LINE> self.url = "{0}{1}".format( self.hass.config.api.base_url, state.attributes.get(ATTR_ENTITY_PICTURE)) <NEW_LINE> <DEDENT> def teardown_method(self): <NEW_LINE> <INDENT> self.hass.stop() <NEW_LINE> <DEDENT> @patch('homeassistant.components.camera.demo.DemoCamera.camera_image', autospec=True, return_value=b'Test') <NEW_LINE> def test_get_image_from_camera(self, mock_camera): <NEW_LINE> <INDENT> self.hass.start() <NEW_LINE> ip.scan(self.hass, entity_id='image_processing.test') <NEW_LINE> self.hass.block_till_done() <NEW_LINE> state = self.hass.states.get('image_processing.test') <NEW_LINE> assert mock_camera.called <NEW_LINE> assert state.state == '1' <NEW_LINE> assert state.attributes['image'] == b'Test' <NEW_LINE> <DEDENT> @patch('homeassistant.components.camera.async_get_image', side_effect=HomeAssistantError()) <NEW_LINE> def test_get_image_without_exists_camera(self, mock_image): <NEW_LINE> <INDENT> self.hass.states.remove('camera.demo_camera') <NEW_LINE> ip.scan(self.hass, entity_id='image_processing.test') <NEW_LINE> self.hass.block_till_done() <NEW_LINE> state = self.hass.states.get('image_processing.test') <NEW_LINE> assert mock_image.called <NEW_LINE> assert state.state == '0'
Test class for image processing.
62598fce9f28863672818a86
class StrictContainer: <NEW_LINE> <INDENT> __fields__ = () <NEW_LINE> __field_types__ = () <NEW_LINE> def __init__(self, fields, *args, field_types=None, default=None, **kwargs): <NEW_LINE> <INDENT> self.__fields__ = fields <NEW_LINE> if field_types is None: <NEW_LINE> <INDENT> self.__field_types__ = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(field_types, dict) or not set(field_types.keys()) <= set(fields): <NEW_LINE> <INDENT> raise ValueError("field_types must be a dict with fields as keys") <NEW_LINE> <DEDENT> self.__field_types__ = field_types <NEW_LINE> <DEDENT> for k in fields: <NEW_LINE> <INDENT> if k in vars(self): <NEW_LINE> <INDENT> raise ValueError(f"Name collision: {k}") <NEW_LINE> <DEDENT> if k.startswith("_"): <NEW_LINE> <INDENT> raise ValueError(f"Fields cannot start with _ (got: {k}).") <NEW_LINE> <DEDENT> super().__setattr__(k, default) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, val): <NEW_LINE> <INDENT> if not key.startswith("_") and key not in self.__fields__: <NEW_LINE> <INDENT> raise AttributeError(f"Unknown attribute {key}") <NEW_LINE> <DEDENT> if key in self.__field_types__: <NEW_LINE> <INDENT> val = self.__field_types__[key](val) <NEW_LINE> <DEDENT> return super().__setattr__(key, val) <NEW_LINE> <DEDENT> def __contains__(self, val): <NEW_LINE> <INDENT> return val in self.__fields__ <NEW_LINE> <DEDENT> def fields(self): <NEW_LINE> <INDENT> return self.__fields__ <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return (getattr(self, k) for k in self.__fields__) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return ((k, getattr(self, k)) for k in self.__fields__) <NEW_LINE> <DEDENT> def update(self, other=None, **new_fields): <NEW_LINE> <INDENT> if other is not None: <NEW_LINE> <INDENT> if new_fields: <NEW_LINE> <INDENT> raise ValueError("Either other or new_fields can be given") <NEW_LINE> <DEDENT> if hasattr(other, "_fields"): <NEW_LINE> <INDENT> new_fields = dict(zip(other._fields, other)) <NEW_LINE> <DEDENT> elif isinstance(other, (dict, StrictContainer)): <NEW_LINE> <INDENT> new_fields = other <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError(f"Cannot update from {type(other)} type") <NEW_LINE> <DEDENT> <DEDENT> for key, val in new_fields.items(): <NEW_LINE> <INDENT> if key not in self.__fields__: <NEW_LINE> <INDENT> raise AttributeError(f"unknown attribute {key}") <NEW_LINE> <DEDENT> <DEDENT> for key, val in new_fields.items(): <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def get(self, key, default=None): <NEW_LINE> <INDENT> return getattr(self, key, default) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> attr_str = [] <NEW_LINE> for key, val in self.items(): <NEW_LINE> <INDENT> if hasattr(val, "shape") and hasattr(val, "dtype"): <NEW_LINE> <INDENT> val_repr = f"{type(val)} with shape {val.shape}, dtype {val.dtype}" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val_repr = repr(val) <NEW_LINE> <DEDENT> attr_str.append(f" {key} = {val_repr}") <NEW_LINE> <DEDENT> attr_str = ",\n".join(attr_str) <NEW_LINE> return f"{self.__class__.__qualname__}(\n{attr_str}\n)"
A mutable container with fixed fields (optionally typed).
62598fce7b180e01f3e49259
class FiException(Exception): <NEW_LINE> <INDENT> def __init__(self, status, message, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(status, message, *args, **kwargs) <NEW_LINE> self.status = status <NEW_LINE> self.message = message
Exception produced by a context broker response
62598fce5fdd1c0f98e5e3a0
class PypiVariables: <NEW_LINE> <INDENT> classifiers = [ "Environment :: Console", "Natural Language :: English", "Programming Language :: Python", "Topic :: Database :: Front-Ends", "Intended Audience :: Developers", "Development Status :: 3 - Alpha", "Operating System :: OS Independent", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)" ] <NEW_LINE> install_requires = ["raven", "requests", "bs4", "matplotlib"] <NEW_LINE> extras_require = {"gui": ["PyQt5"]} <NEW_LINE> name = "comunio" <NEW_LINE> version = General.version_number <NEW_LINE> description = General.project_description <NEW_LINE> url = GitRepository.gitlab_url <NEW_LINE> download_url = General.download_master_zip <NEW_LINE> author = General.author_names <NEW_LINE> author_email = General.author_emails <NEW_LINE> license = General.license_type
Variables used for distributing with setuptools to the python package index
62598fce656771135c489a84
class TestPastAppointmentsUnit: <NEW_LINE> <INDENT> def setup_class(self): <NEW_LINE> <INDENT> self.pastappointments = PastAppointments()
The `TestPastAppointmentsUnit` class contains unit tests for predict functions.
62598fce97e22403b383b31b
class WordDictionary: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root = dict() <NEW_LINE> pass <NEW_LINE> <DEDENT> def addWord(self, word: str) -> None: <NEW_LINE> <INDENT> cur = self.root <NEW_LINE> for c in word: <NEW_LINE> <INDENT> if c not in cur: <NEW_LINE> <INDENT> cur[c] = dict() <NEW_LINE> <DEDENT> cur = cur[c] <NEW_LINE> <DEDENT> cur['#'] = [] <NEW_LINE> <DEDENT> def search(self, word: str) -> bool: <NEW_LINE> <INDENT> def dfs(word, cur) -> bool: <NEW_LINE> <INDENT> if len(word) == 0 and '#' in cur: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> for c in word: <NEW_LINE> <INDENT> if c == '.': <NEW_LINE> <INDENT> return any(dfs(word[1:], cur[next_dict]) for next_dict in cur) <NEW_LINE> <DEDENT> if c not in cur: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return dfs(word[1:], cur[c]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return dfs(word, self.root)
CREATED AT: 2022/1/28 1 <= word.length <= 500 word in addWord consists lower-case English letters. word in search consist of '.' or lower-case English letters. At most 50000 calls will be made to addWord and search.
62598fceadb09d7d5dc0a990
class Widget(object): <NEW_LINE> <INDENT> def __init__(self, parent, manage=True, embed=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.Parent = parent.ContentArea <NEW_LINE> embed = True <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self.Parent = parent <NEW_LINE> <DEDENT> self.Create(self.Parent, manage=manage, embed=embed) <NEW_LINE> <DEDENT> @property <NEW_LINE> def CanEmbed(self): <NEW_LINE> <INDENT> return self.__class__.CreateControl == Widget.CreateControl <NEW_LINE> <DEDENT> @property <NEW_LINE> def TopLevelWindow(self): <NEW_LINE> <INDENT> return self.Parent.GetTopLevelParent() <NEW_LINE> <DEDENT> def Query(self, *args, **kwargs): <NEW_LINE> <INDENT> self.SetData(*args, **kwargs) <NEW_LINE> self.TopLevelWindow.SetTitle(self.Title) <NEW_LINE> ShowModal(self.TopLevelWindow) <NEW_LINE> return self.GetData() <NEW_LINE> <DEDENT> def ApplyDialog(self, event=None): <NEW_LINE> <INDENT> event = wx.CommandEvent(wx.wxEVT_COMMAND_BUTTON_CLICKED, wx.ID_OK) <NEW_LINE> wx.PostEvent(self.Parent, event) <NEW_LINE> <DEDENT> Title = 'MadGUI' <NEW_LINE> def Create(self, parent, manage=True, embed=False): <NEW_LINE> <INDENT> if embed and self.CanEmbed: <NEW_LINE> <INDENT> self.Control = parent <NEW_LINE> sizer = self.Sizer = self.CreateControls(self.Control) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.Control = self.CreateControl(parent) <NEW_LINE> sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> sizer.Add(self.Control, 1, wx.EXPAND) <NEW_LINE> <DEDENT> if manage: <NEW_LINE> <INDENT> parent.SetSizer(sizer) <NEW_LINE> <DEDENT> self.Control.SetValidator(Validator(self)) <NEW_LINE> <DEDENT> def CreateControl(self, parent): <NEW_LINE> <INDENT> panel = wx.Panel(parent) <NEW_LINE> sizer = self.Sizer = self.CreateControls(panel) <NEW_LINE> panel.SetSizer(sizer) <NEW_LINE> return panel <NEW_LINE> <DEDENT> def CreateControls(self, parent): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def Validate(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def SetData(self, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def GetData(self): <NEW_LINE> <INDENT> raise NotImplementedError()
Manage a group of related controls. This is an abstract base class. Subclasses should override the following members: - CreateControls - GetData / SetData - Validate (wx.Validator API) - Title (as attribute)
62598fce283ffb24f3cf3c9a
class SupplierOrderStatus(models.Model): <NEW_LINE> <INDENT> order = models.ForeignKey(SupplierOrder) <NEW_LINE> status = models.CharField(_("Status"), max_length=20, choices=SUPPLIERORDER_STATUS, blank=True) <NEW_LINE> notes = models.CharField(_("Notes"), max_length=100, blank=True) <NEW_LINE> date = models.DateTimeField(_('Date'), blank=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.status <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Supplier Order Status") <NEW_LINE> verbose_name_plural = _("Supplier Order Statuses")
Status of a supplier's order. There will be multiple statuses as it is placed and subsequently processed and received.
62598fce851cf427c66b86c8
class AggregationLayerMetadata(GenericLayerMetadata): <NEW_LINE> <INDENT> _standard_properties = { 'aggregation_attribute': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'aggregation_attribute/' 'gco:CharacterString'), 'adult_ratio_attribute': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'adult_ratio_attribute/' 'gco:CharacterString'), 'adult_ratio_default': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'adult_ratio_default/' 'gco:CharacterString'), 'elderly_ratio_attribute': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'elderly_ratio_attribute/' 'gco:CharacterString'), 'elderly_ratio_default': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'elderly_ratio_default/' 'gco:CharacterString'), 'female_ratio_attribute': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'female_ratio_attribute/' 'gco:CharacterString'), 'female_ratio_default': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'female_ratio_default/' 'gco:CharacterString'), 'youth_ratio_attribute': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'youth_ratio_attribute/' 'gco:CharacterString'), 'youth_ratio_default': ( 'gmd:identificationInfo/' 'gmd:MD_DataIdentification/' 'gmd:supplementalInformation/' 'inasafe/' 'youth_ratio_default/' 'gco:CharacterString') } <NEW_LINE> _standard_properties = merge_dictionaries( GenericLayerMetadata._standard_properties, _standard_properties)
Metadata class for aggregation layers .. versionadded:: 3.2
62598fce4c3428357761a6d4
class TestCMakeBuildRunner_WithParam_cmake_generator(AbstractTestCMakeProject_WithParam_cmake_generator): <NEW_LINE> <INDENT> CMAKE_PROJECT_FACTORY = CMakeBuildRunnerAsCMakeProjectFactory
Test CMakeBuildRunner with CMakeProject commands cmake_generator param (cmake -G <CMAKE_GENERATOR> option) and ensure that CMake command-line is correct.
62598fce50812a4eaa620def
class FileSystemApplicationLogsConfig(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'level': {'key': 'level', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, level: Optional[Union[str, "LogLevel"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(FileSystemApplicationLogsConfig, self).__init__(**kwargs) <NEW_LINE> self.level = level
Application logs to file system configuration. :ivar level: Log level. Possible values include: "Off", "Verbose", "Information", "Warning", "Error". :vartype level: str or ~azure.mgmt.web.v2020_06_01.models.LogLevel
62598fcefbf16365ca7944d2
class User(Base): <NEW_LINE> <INDENT> __tablename__ = 'acl_user' <NEW_LINE> email = sqlalchemy.Column(sqlalchemy.String, primary_key=True) <NEW_LINE> role = sqlalchemy.Column(sqlalchemy.String) <NEW_LINE> def __init__(self, email: str, role: str): <NEW_LINE> <INDENT> self.email = email <NEW_LINE> self.role = role <NEW_LINE> super().__init__()
A user that is known to the ACL
62598fce0fa83653e46f52fe
class S3Filter(S3Method): <NEW_LINE> <INDENT> def apply_method(self, r, **attr): <NEW_LINE> <INDENT> representation = r.representation <NEW_LINE> if representation == "html": <NEW_LINE> <INDENT> return self._form(r, **attr) <NEW_LINE> <DEDENT> elif representation == "json": <NEW_LINE> <INDENT> return self._options(r, **attr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r.error(501, current.manager.ERROR.BAD_FORMAT) <NEW_LINE> <DEDENT> <DEDENT> def _form(self, r, **attr): <NEW_LINE> <INDENT> r.error(501, current.manager.ERROR.NOT_IMPLEMENTED) <NEW_LINE> <DEDENT> def _options(self, r, **attr): <NEW_LINE> <INDENT> resource = self.resource <NEW_LINE> get_config = resource.get_config <NEW_LINE> options = {} <NEW_LINE> filter_widgets = get_config("filter_widgets", None) <NEW_LINE> if filter_widgets: <NEW_LINE> <INDENT> fresource = current.s3db.resource(resource.tablename) <NEW_LINE> for widget in filter_widgets: <NEW_LINE> <INDENT> if hasattr(widget, "ajax_options"): <NEW_LINE> <INDENT> opts = widget.ajax_options(fresource) <NEW_LINE> if opts and isinstance(opts, dict): <NEW_LINE> <INDENT> options.update(opts) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> options = json.dumps(options) <NEW_LINE> current.response.headers["Content-Type"] = "application/json" <NEW_LINE> return options
Back-end for filter form updates
62598fce4a966d76dd5ef2ee
class UINotificationModel(BaseNotificationListenerModel): <NEW_LINE> <INDENT> sync_url = ZMQSocketURL() <NEW_LINE> pub_url = ZMQSocketURL() <NEW_LINE> identifier = Str() <NEW_LINE> sub_url = ZMQSocketURL()
This is a data model for :class:`UINotification <.ui_notification.UINotification>`, which contains the sync and pub sockets, along with an identifier.
62598fce60cbc95b06364754
class MetaInfo(plugin.DocumentPlugin): <NEW_LINE> <INDENT> def __init__(self, doc): <NEW_LINE> <INDENT> self.load() <NEW_LINE> if doc.__class__ == document.EditorDocument: <NEW_LINE> <INDENT> doc.loaded.connect(self.load, -999) <NEW_LINE> doc.closed.connect(self.save, 999) <NEW_LINE> <DEDENT> <DEDENT> def settingsGroup(self): <NEW_LINE> <INDENT> url = self.document().url() <NEW_LINE> if not url.isEmpty(): <NEW_LINE> <INDENT> s = app.settings('metainfo') <NEW_LINE> s.beginGroup(url.toString().replace('\\', '_').replace('/', '_')) <NEW_LINE> return s <NEW_LINE> <DEDENT> <DEDENT> def load(self): <NEW_LINE> <INDENT> s = self.settingsGroup() <NEW_LINE> for name in _defaults: <NEW_LINE> <INDENT> self.loadValue(name, s) <NEW_LINE> <DEDENT> <DEDENT> def loadValue(self, name, settings=None): <NEW_LINE> <INDENT> s = settings or self.settingsGroup() <NEW_LINE> default, readfunc = _defaults[name] <NEW_LINE> if s and QSettings().value("metainfo", True, bool): <NEW_LINE> <INDENT> self.__dict__[name] = readfunc(s.value(name, default)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__dict__[name] = default <NEW_LINE> <DEDENT> <DEDENT> def save(self): <NEW_LINE> <INDENT> s = self.settingsGroup() <NEW_LINE> if s: <NEW_LINE> <INDENT> s.setValue("time", time.time()) <NEW_LINE> for name in _defaults: <NEW_LINE> <INDENT> value = self.__dict__[name] <NEW_LINE> s.remove(name) if value == _defaults[name][0] else s.setValue(name, value)
Stores meta-information for a Document.
62598fce656771135c489a88
class BibWorkflowEngineLog(db.Model): <NEW_LINE> <INDENT> __tablename__ = "bwlWORKFLOWLOGGING" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> id_object = db.Column(db.String(255), db.ForeignKey('bwlWORKFLOW.uuid'), nullable=False) <NEW_LINE> log_type = db.Column(db.Integer, default=0, nullable=False) <NEW_LINE> created = db.Column(db.DateTime, default=datetime.now) <NEW_LINE> message = db.Column(db.TEXT, default="", nullable=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%(severity)s: %(created)s - %(message)s" % { "severity": self.log_type, "created": self.created, "message": self.message } <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "BibWorkflowEngineLog(%s)" % (", ".join([ "log_type='%s'" % self.log_type, "created='%s'" % self.created, "message='%s'" % self.message, "id_object='%s'" % self.id_object ])) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, *criteria, **filters): <NEW_LINE> <INDENT> return cls.query.filter(*criteria).filter_by(**filters) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_most_recent(cls, *criteria, **filters): <NEW_LINE> <INDENT> most_recent = cls.get(*criteria, **filters).order_by( desc(BibWorkflowEngineLog.created)).first() <NEW_LINE> if most_recent is None: <NEW_LINE> <INDENT> raise NoResultFound <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return most_recent <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def delete(cls, uuid=None): <NEW_LINE> <INDENT> cls.get(BibWorkflowEngineLog.id == uuid).delete() <NEW_LINE> db.session.commit()
Represents a log entry for BibWorkflowEngine. This class represent a record of a log emit by an object into the database. The object must be saved before using this class as it requires the object id.
62598fce283ffb24f3cf3c9e
class LineEditWFocusOut(QtWidgets.QLineEdit): <NEW_LINE> <INDENT> def __init__(self, parent, callback, focusInCallback=None): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> if parent.layout() is not None: <NEW_LINE> <INDENT> parent.layout().addWidget(self) <NEW_LINE> <DEDENT> self.callback = callback <NEW_LINE> self.focusInCallback = focusInCallback <NEW_LINE> self.returnPressed.connect(self.returnPressedHandler) <NEW_LINE> self.__changed = False <NEW_LINE> self.textEdited.connect(self.__textEdited) <NEW_LINE> <DEDENT> def __textEdited(self): <NEW_LINE> <INDENT> self.__changed = True <NEW_LINE> <DEDENT> def returnPressedHandler(self): <NEW_LINE> <INDENT> self.selectAll() <NEW_LINE> self.__callback_if_changed() <NEW_LINE> <DEDENT> def __callback_if_changed(self): <NEW_LINE> <INDENT> if self.__changed: <NEW_LINE> <INDENT> self.__changed = False <NEW_LINE> if hasattr(self, "cback") and self.cback: <NEW_LINE> <INDENT> self.cback(self.text()) <NEW_LINE> <DEDENT> if self.callback: <NEW_LINE> <INDENT> self.callback() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def setText(self, text): <NEW_LINE> <INDENT> self.__changed = False <NEW_LINE> super().setText(text) <NEW_LINE> <DEDENT> def focusOutEvent(self, *e): <NEW_LINE> <INDENT> super().focusOutEvent(*e) <NEW_LINE> self.__callback_if_changed() <NEW_LINE> <DEDENT> def focusInEvent(self, *e): <NEW_LINE> <INDENT> self.__changed = False <NEW_LINE> if self.focusInCallback: <NEW_LINE> <INDENT> self.focusInCallback() <NEW_LINE> <DEDENT> return super().focusInEvent(*e)
A class derived from QLineEdit, which postpones the synchronization of the control's value with the master's attribute until the user leaves the line edit or presses Tab when the value is changed. The class also allows specifying a callback function for focus-in event. .. attribute:: callback Callback that is called when the change is confirmed .. attribute:: focusInCallback Callback that is called on the focus-in event
62598fce283ffb24f3cf3c9f
class UserLoginForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ['username', 'password'] <NEW_LINE> widgets = { 'password': forms.PasswordInput(), } <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> username = self.cleaned_data.get('username') <NEW_LINE> password = self.cleaned_data.get('password') <NEW_LINE> if (not password) or (not username): <NEW_LINE> <INDENT> raise forms.ValidationError("All fields are required!") <NEW_LINE> <DEDENT> return self.cleaned_data
Registration form validation for user login
62598fcfad47b63b2c5a7c75
class GoogleAnalyticsResponseError(Exception): <NEW_LINE> <INDENT> pass
Exception class raised when the API response does not have an expected format by the data generation.
62598fcf55399d3f05626934
class VoicesTransformer(Visitor): <NEW_LINE> <INDENT> def visit_Voices(self, voices): <NEW_LINE> <INDENT> for i, music in enumerate(voices.exprs): <NEW_LINE> <INDENT> voice = Voice(music.exprs) <NEW_LINE> voices.exprs[i] = voice <NEW_LINE> <DEDENT> return voices, True
Children of Voices are always Music. Specializing them to Voice adds semantic information which Music does not provide for later consumers.
62598fcf50812a4eaa620df1
class Solution: <NEW_LINE> <INDENT> res = [] <NEW_LINE> def search(self, nums, permutation, used): <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> self.res.append(permutation) <NEW_LINE> <DEDENT> for i in range(len(nums)): <NEW_LINE> <INDENT> if nums[i] in used: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> used.add(nums[i]) <NEW_LINE> self.search(nums[:i] + nums[i+1:], permutation + [nums[i]], set()) <NEW_LINE> <DEDENT> <DEDENT> def permuteUnique(self, nums): <NEW_LINE> <INDENT> if nums is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> self.search(nums, [], set()) <NEW_LINE> return self.res
@param nums: A list of Integers. @return: A list of permutations.
62598fcfdc8b845886d539d8