code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TraySlot(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'tray': 'int', 'slot': 'int' } <NEW_LINE> self.attribute_map = { 'tray': 'tray', 'slot': 'slot' } <NEW_LINE> self._tray = None <NEW_LINE> self._slot = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def tray(self): <NEW_LINE> <INDENT> return self._tray <NEW_LINE> <DEDENT> @tray.setter <NEW_LINE> def tray(self, tray): <NEW_LINE> <INDENT> self._tray = tray <NEW_LINE> <DEDENT> @property <NEW_LINE> def slot(self): <NEW_LINE> <INDENT> return self._slot <NEW_LINE> <DEDENT> @slot.setter <NEW_LINE> def slot(self, slot): <NEW_LINE> <INDENT> self._slot = slot <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fad498bea3a75a57afc
class ListOfCourses: <NEW_LINE> <INDENT> __slots__ = 'course_data' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.course_data = [] <NEW_LINE> <DEDENT> def get_course_data(self): <NEW_LINE> <INDENT> return self.course_data <NEW_LINE> <DEDENT> def set_course_data(self, course): <NEW_LINE> <INDENT> self.course_data.append(course) <NEW_LINE> <DEDENT> def remove_course_data(self, course_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if len(self.course_data) > 0: <NEW_LINE> <INDENT> for i in range(len(self.course_data)): <NEW_LINE> <INDENT> if self.course_data[i].id == course_id: <NEW_LINE> <INDENT> del self.course_data[i] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> <DEDENT> def remove_all_course_data(self): <NEW_LINE> <INDENT> self.course_data = []
A class that stores a list of Course objects
62598fad16aa5153ce4004e0
class Job(models.Model): <NEW_LINE> <INDENT> job_id = models.AutoField(primary_key=True) <NEW_LINE> owner = models.ForeignKey(User, related_name='jobs') <NEW_LINE> submitted = models.DateTimeField(auto_now_add=True) <NEW_LINE> completed = models.DateTimeField(blank=True, null=True) <NEW_LINE> max_depth = models.PositiveSmallIntegerField(default=MAX_DEPTH_DEFAULT) <NEW_LINE> result_id = models.CharField(max_length=32, null=True) <NEW_LINE> email = models.EmailField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return 'job: %d' % (self.job_id)
The Job model contains information about created Word Scraper jobs.
62598fada05bb46b3848a84a
class MusicalWorkContributorRole (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'MusicalWorkContributorRole') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://ddex.net/xml/20120214/ddex.xsd', 2558, 3) <NEW_LINE> _Documentation = 'A role played by a ddex:Contributor in relation to a ddex:MusicalWork.'
A role played by a ddex:Contributor in relation to a ddex:MusicalWork.
62598fad38b623060ffa9078
class MyTranslateAdminForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.translate <NEW_LINE> fields = "__all__" <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> super(MyTranslateAdminForm, self).clean() <NEW_LINE> blub = models.translate.objects.filter(fromeditype=self.cleaned_data['fromeditype'], frommessagetype=self.cleaned_data['frommessagetype'], alt=self.cleaned_data['alt'], frompartner=self.cleaned_data['frompartner'], topartner=self.cleaned_data['topartner']) <NEW_LINE> if blub and (self.instance.pk is None or self.instance.pk != blub[0].id): <NEW_LINE> <INDENT> raise django_forms_util.ValidationError('Combination of fromeditype,frommessagetype,alt,frompartner,topartner already exists.') <NEW_LINE> <DEDENT> return self.cleaned_data
customs form for translations to check if entry exists (unique_together not validated right (because of null values in partner fields))
62598fadf9cc0f698b1c52b8
class TimeAggregator: <NEW_LINE> <INDENT> def __init__(self, **kwds): <NEW_LINE> <INDENT> self._first = kwds.get('first') <NEW_LINE> self._last = kwds.get('last') <NEW_LINE> self._requestDates = requestDates(**kwds) <NEW_LINE> self._periodicity = kwds.get('periodicity') <NEW_LINE> <DEDENT> @property <NEW_LINE> def requestDates(self): <NEW_LINE> <INDENT> return self._requestDates <NEW_LINE> <DEDENT> @property <NEW_LINE> def sourceDates(self): <NEW_LINE> <INDENT> return self._requestDates <NEW_LINE> <DEDENT> def aggregate(self, input): <NEW_LINE> <INDENT> return input <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def Create(operator, **kwds): <NEW_LINE> <INDENT> cls = _timeAggregatorClasses.get(operator, TimeAggregator) <NEW_LINE> return cls(**kwds)
Time aggregator knows how to aggregate time series depending on the metric and the query time params.
62598fad26068e7796d4c933
class DeleteProposalSet(DeleteView): <NEW_LINE> <INDENT> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> space = get_object_or_404(Space, url=kwargs['space_url']) <NEW_LINE> if (request.user.has_perm('admin_space', space) or request.user.has_perm('mod_space', space)): <NEW_LINE> <INDENT> return super(DeleteProposalSet, self).dispatch(request, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise PermissionDenied <NEW_LINE> <DEDENT> <DEDENT> def get_object(self): <NEW_LINE> <INDENT> return get_object_or_404(ProposalSet, pk=self.kwargs['set_id']) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> space = self.kwargs['space_url'] <NEW_LINE> return reverse(urln_space.SPACE_INDEX, kwargs={'space_url': space}) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(DeleteProposalSet, self).get_context_data(**kwargs) <NEW_LINE> context['get_place'] = get_object_or_404(Space, url=self.kwargs['space_url']) <NEW_LINE> return context
Delete a proposal set. .. versionadded: 0.1.5 :rtype: Confirmation :context: get_place
62598fad5fdd1c0f98e5df6b
class Division(AbstractStructureElement): <NEW_LINE> <INDENT> def head(self): <NEW_LINE> <INDENT> for e in self.data: <NEW_LINE> <INDENT> if isinstance(e, Head): <NEW_LINE> <INDENT> return e <NEW_LINE> <DEDENT> <DEDENT> raise NoSuchAnnotation()
Structure element representing some kind of division. Divisions may be nested at will, and may include almost all kinds of other structure elements.
62598fad99fddb7c1ca62dd8
class SinOsc(PureUGen): <NEW_LINE> <INDENT> _ordered_input_names = collections.OrderedDict( [("frequency", 440.0), ("phase", 0.0)] ) <NEW_LINE> _valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL)
A sinusoid oscillator unit generator. :: >>> supriya.ugens.SinOsc.ar() SinOsc.ar() :: >>> print(_) synthdef: name: ... ugens: - SinOsc.ar: frequency: 440.0 phase: 0.0
62598fad21bff66bcd722c45
class SonarCommand(BackendCommand): <NEW_LINE> <INDENT> BACKEND = Sonar <NEW_LINE> @classmethod <NEW_LINE> def setup_cmd_parser(cls): <NEW_LINE> <INDENT> parser = BackendCommandArgumentParser(cls.BACKEND.CATEGORIES, from_date=True, archive=True) <NEW_LINE> group = parser.parser.add_argument_group('Sonarqube arguments') <NEW_LINE> group.add_argument('--base-url', dest='base_url', help="Base URL for Sonarqube instance") <NEW_LINE> group.add_argument('component', help="Component/project") <NEW_LINE> group.add_argument('metrics', help="Metrics") <NEW_LINE> return parser
Class to run Sonaqube backend from the command line.
62598fad627d3e7fe0e06e8c
class Entry(models.Model): <NEW_LINE> <INDENT> topic = models.ForeignKey(Topic, on_delete=None) <NEW_LINE> text = models.TextField() <NEW_LINE> date_added = models.DateTimeField(auto_now_add=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'entries' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if len(self.text) > 50: <NEW_LINE> <INDENT> return self.text[:50] + "..." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.text
Something specific learned about a topic
62598fad76e4537e8c3ef58c
class GithubOrgsView(ProjectMixin, ListView): <NEW_LINE> <INDENT> context_object_name = 'project' <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if not request.is_ajax(): <NEW_LINE> <INDENT> raise Http404("This is an ajax view, friend.") <NEW_LINE> <DEDENT> return super(GithubOrgsView, self).dispatch( request, *args, **kwargs) <NEW_LINE> <DEDENT> def render_to_response(self, context, **response_kwargs): <NEW_LINE> <INDENT> github_data = self.get_github_orgs() <NEW_LINE> return JsonResponse(github_data, safe=False) <NEW_LINE> <DEDENT> def get_github_orgs(self): <NEW_LINE> <INDENT> retrieved_data = [] <NEW_LINE> if self.request.user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> token = SocialToken.objects.get( account__user=self.request.user.id, account__provider='github' ) <NEW_LINE> if token: <NEW_LINE> <INDENT> response = requests.get( 'https://api.github.com/user/orgs', params={ 'access_token': token, } ) <NEW_LINE> retrieved_data = response.json() <NEW_LINE> retrieved_data.insert(0, { 'login': self.request.user.username, 'avatar_url': 'https://github.com/%s.png?size=40' % self.request.user.username, 'is_user': True, }) <NEW_LINE> <DEDENT> <DEDENT> except SocialToken.DoesNotExist: <NEW_LINE> <INDENT> print('Token not exist') <NEW_LINE> <DEDENT> <DEDENT> return retrieved_data
List organization for the user
62598fad01c39578d7f12d5e
class TextField(BaseField): <NEW_LINE> <INDENT> def __init__( self, name, size=None, value=None ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.size = int( size or 10 ) <NEW_LINE> self.value = value or "" <NEW_LINE> <DEDENT> def get_html( self, prefix="", disabled=False ): <NEW_LINE> <INDENT> value = self.value <NEW_LINE> value = unicodify( value ) <NEW_LINE> return unicodify( '<input type="text" name="%s%s" size="%d" value="%s"%s>' % ( prefix, self.name, self.size, escape( value, quote=True ), self.get_disabled_str( disabled ) ) ) <NEW_LINE> <DEDENT> def set_size(self, size): <NEW_LINE> <INDENT> self.size = int( size )
A standard text input box. >>> print TextField( "foo" ).get_html() <input type="text" name="foo" size="10" value=""> >>> print TextField( "bins", size=4, value="default" ).get_html() <input type="text" name="bins" size="4" value="default">
62598fadcc40096d6161a1ca
class FlortDjCsppTelemeteredDriver(SimpleDatasetDriver): <NEW_LINE> <INDENT> def _build_parser(self, stream_handle): <NEW_LINE> <INDENT> parser_config = { DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: FlortDjCsppMetadataTelemeteredDataParticle, DATA_PARTICLE_CLASS_KEY: FlortDjCsppInstrumentTelemeteredDataParticle, } } <NEW_LINE> parser = FlortDjCsppParser(parser_config, stream_handle, self._exception_callback) <NEW_LINE> return parser
Derived flort_dj_cspp driver class All this needs to do is create a concrete _build_parser method
62598fad55399d3f05626503
class Test(object): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> self.experiences = args <NEW_LINE> self.data = {} <NEW_LINE> for x in self.experiences: <NEW_LINE> <INDENT> self.data[x.name] = [] <NEW_LINE> <DEDENT> <DEDENT> def get_random(self): <NEW_LINE> <INDENT> return np.random.randint(1, 100) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nexperiences(self): <NEW_LINE> <INDENT> return len(self.experiences) <NEW_LINE> <DEDENT> @property <NEW_LINE> def nobs(self): <NEW_LINE> <INDENT> inc = 0 <NEW_LINE> for d in self.data: <NEW_LINE> <INDENT> inc += sum(self.data[d]) <NEW_LINE> <DEDENT> return inc <NEW_LINE> <DEDENT> def increment_experience(self, experience, result): <NEW_LINE> <INDENT> self.data[experience].append(result) <NEW_LINE> <DEDENT> def calc_stats(self): <NEW_LINE> <INDENT> for d in self.data: <NEW_LINE> <INDENT> split_name = d <NEW_LINE> obs = len(self.data[d]) <NEW_LINE> suc = sum(self.data[d]) <NEW_LINE> try: <NEW_LINE> <INDENT> conv = suc / obs <NEW_LINE> <DEDENT> except ZeroDivisionError: <NEW_LINE> <INDENT> conv = 0 <NEW_LINE> <DEDENT> print(split_name, obs, suc, conv) <NEW_LINE> <DEDENT> <DEDENT> def calc_win_percent(self): <NEW_LINE> <INDENT> stats = {} <NEW_LINE> for d in self.data: <NEW_LINE> <INDENT> obs = len(self.data[d]) <NEW_LINE> suc = sum(self.data[d]) <NEW_LINE> beta = BetaDist(k=suc, n=obs) <NEW_LINE> stats[d] = [] <NEW_LINE> for i in range(0, 1000): <NEW_LINE> <INDENT> stats[d].append(beta.draw_rvs()) <NEW_LINE> <DEDENT> <DEDENT> return stats <NEW_LINE> <DEDENT> def reset_test(self): <NEW_LINE> <INDENT> for x in self.experiences: <NEW_LINE> <INDENT> self.data[x.name] = [] <NEW_LINE> <DEDENT> <DEDENT> def perform_draw(self): <NEW_LINE> <INDENT> split = self.get_split() <NEW_LINE> experience = self.experiences[split] <NEW_LINE> draw = experience.draw_binomial() <NEW_LINE> self.increment_experience(experience.name, draw)
Represents a test
62598fad5166f23b2e2433b8
class CustomerStatus(Customer): <NEW_LINE> <INDENT> xml_fields = Customer.xml_fields + ( 'amount', 'currency', 'account', 'phone1', 'phone2', 'countryname' ) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.amount = kwargs.pop('amount') <NEW_LINE> self.currency = kwargs.pop('currency') <NEW_LINE> self.account = kwargs.pop('account') <NEW_LINE> self.phone1 = kwargs.pop('phone1') <NEW_LINE> self.phone2 = kwargs.pop('phone2') <NEW_LINE> self.countryname = kwargs.pop('countryname') <NEW_LINE> super(CustomerStatus, self).__init__(*args, **kwargs)
A customer with additional fields.
62598fade1aae11d1e7ce813
class Market: <NEW_LINE> <INDENT> def __init__(self, symbols, alpaca_key_id, alpaca_secret_key, date = None): <NEW_LINE> <INDENT> self._api = alpaca.REST(key_id = alpaca_key_id, secret_key = alpaca_secret_key, base_url = 'https://paper-api.alpaca.markets') <NEW_LINE> self.symbols = symbols <NEW_LINE> self.load_day(date) <NEW_LINE> <DEDENT> def load_day(self, date=None): <NEW_LINE> <INDENT> self._bars = [] <NEW_LINE> if date is None: <NEW_LINE> <INDENT> date = pd.Timestamp.now(tz='America/New_York').floor('1d') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> date = date.tz_convert('America/New_York').floor('1d') <NEW_LINE> <DEDENT> market_open = date.replace(hour=9, minute=30) <NEW_LINE> market_close = date.replace(hour=16, minute=0) <NEW_LINE> today = date.isoformat() <NEW_LINE> tomorrow = (date + pd.Timedelta('1day')).isoformat() <NEW_LINE> data = self._api.get_barset(self.symbols, '1Min', start=today, end=tomorrow).df <NEW_LINE> bars = data[market_open:market_close] <NEW_LINE> self._bars = bars <NEW_LINE> self.i = 0 <NEW_LINE> <DEDENT> def next_barset(self): <NEW_LINE> <INDENT> if self.i >= len(self._bars.index): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> new_bar = self._bars.loc[self._bars.index[self.i]] <NEW_LINE> self.i = self.i+1 <NEW_LINE> return new_bar <NEW_LINE> <DEDENT> def curr_barset(self): <NEW_LINE> <INDENT> if self.i >= len(self._bars.index): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._bars.loc[self._bars.index[self.i]] <NEW_LINE> <DEDENT> def reset(self, date=None): <NEW_LINE> <INDENT> if date is not None: <NEW_LINE> <INDENT> load_day(date) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.i = 0
This class automatically downloads the data for the provided symbols from Alpaca, and simulates the pass of time during a day by providing 1 minute barsets with its method next_barset().
62598fad4e4d562566372405
class EventTracer(AbstractOverlay): <NEW_LINE> <INDENT> x = Float <NEW_LINE> y = Float <NEW_LINE> color = ColorTrait("red") <NEW_LINE> size = Float(5) <NEW_LINE> angle = Float(0.0) <NEW_LINE> def normal_mouse_move(self, event): <NEW_LINE> <INDENT> self.x = event.x <NEW_LINE> self.y = event.y <NEW_LINE> self.component.request_redraw() <NEW_LINE> <DEDENT> def overlay(self, component, gc, view_bounds, mode): <NEW_LINE> <INDENT> with gc: <NEW_LINE> <INDENT> gc.translate_ctm(self.x, self.y) <NEW_LINE> if self.angle != 0: <NEW_LINE> <INDENT> gc.rotate_ctm(self.angle * 3.14159/180.) <NEW_LINE> <DEDENT> gc.set_stroke_color(self.color_) <NEW_LINE> gc.set_line_width(1.0) <NEW_LINE> gc.move_to(-self.size, 0) <NEW_LINE> gc.line_to(self.size, 0) <NEW_LINE> gc.move_to(0, -self.size) <NEW_LINE> gc.line_to(0, self.size) <NEW_LINE> gc.stroke_path()
Draws a marker under the mouse cursor where an event is occurring.
62598fad85dfad0860cbfa63
class Clause(BaseDDL): <NEW_LINE> <INDENT> __slots__ = ['name', 'params'] <NEW_LINE> def __init__(self, name, *params): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.params = params <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> return Clause(self.name, *args)
A single or compound clause, for example ``CREATE TABLE`` -- in some cases clauses make take parameters, e.g. ``VARCHAR(255)``
62598fad66673b3332c303ab
class TransactionError(Error): <NEW_LINE> <INDENT> pass
Transaction failed.
62598fad5fdd1c0f98e5df6c
class ConfigSource(object): <NEW_LINE> <INDENT> def __init__(self, conf, *args): <NEW_LINE> <INDENT> self._conf = conf <NEW_LINE> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> raise NotImplementedError
Base configuration source class. Used for fetching configuration keys from external sources.
62598fada219f33f346c67f6
class SignatureValidator(RequestValidator): <NEW_LINE> <INDENT> nonce_length = 20, 45 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(SignatureValidator, self).__init__() <NEW_LINE> self.endpoint = SignatureOnlyEndpoint(self) <NEW_LINE> self.lti_consumer = None <NEW_LINE> self.cache = cache <NEW_LINE> <DEDENT> @property <NEW_LINE> def enforce_ssl(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> ssl = settings.LTI_SSL <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> ssl = True <NEW_LINE> <DEDENT> return ssl <NEW_LINE> <DEDENT> def validate_timestamp_and_nonce(self, client_key, timestamp, nonce, request): <NEW_LINE> <INDENT> msg = "LTI request's {} is not valid." <NEW_LINE> log.debug('Timestamp validating is started.') <NEW_LINE> ts = int(timestamp) <NEW_LINE> ts_key = '{}_ts'.format(client_key) <NEW_LINE> cache_ts = self.cache.get(ts_key, ts) <NEW_LINE> if cache_ts > ts: <NEW_LINE> <INDENT> log.debug(msg.format('timestamp')) <NEW_LINE> return False <NEW_LINE> <DEDENT> self.cache.set(ts_key, ts, 10) <NEW_LINE> log.debug('Timestamp is valid.') <NEW_LINE> log.debug('Nonce validating is started.') <NEW_LINE> if self.cache.get(nonce): <NEW_LINE> <INDENT> log.debug(msg.format('nonce')) <NEW_LINE> return False <NEW_LINE> <DEDENT> self.cache.set(nonce, 1, 10) <NEW_LINE> log.debug('Nonce is valid.') <NEW_LINE> return True <NEW_LINE> <DEDENT> def validate_client_key(self, client_key, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.lti_consumer = LtiProvider.objects.get(consumer_key=client_key) <NEW_LINE> <DEDENT> except LtiProvider.DoesNotExist: <NEW_LINE> <INDENT> log.exception('Consumer with the key {} is not found.'.format(client_key)) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def get_client_secret(self, client_key, request): <NEW_LINE> <INDENT> log.debug('Getting client secret') <NEW_LINE> return self.lti_consumer.consumer_secret
Helper class that verifies the OAuth signature on a request. The pattern required by the oauthlib library mandates that subclasses of RequestValidator contain instance methods that can be called back into in order to fetch the consumer secret or to check that fields conform to application-specific requirements.
62598faddd821e528d6d8f15
class CertificatePackageWarranty(CertificatePackage): <NEW_LINE> <INDENT> name = 'certificate package warranty' <NEW_LINE> def _get_choices(self, gandi): <NEW_LINE> <INDENT> packages = super(CertificatePackageWarranty, self)._get_choices(gandi) <NEW_LINE> return list(set([pack.split('_')[3] for pack in packages])) <NEW_LINE> <DEDENT> def convert(self, value, param, ctx): <NEW_LINE> <INDENT> self.gandi = ctx.obj <NEW_LINE> value = click.Choice.convert(self, value, param, ctx) <NEW_LINE> return int(value)
Choice parameter to select an available certificate warranty.
62598fad32920d7e50bc6034
class CommandJobTemplateExport: <NEW_LINE> <INDENT> def GetResources(self): <NEW_LINE> <INDENT> return {'Pixmap': 'Path-ExportTemplate', 'MenuText': QtCore.QT_TRANSLATE_NOOP("Path_Job", "Export Template"), 'ToolTip': QtCore.QT_TRANSLATE_NOOP("Path_Job", "Exports Path Job as a template to be used for other jobs")} <NEW_LINE> <DEDENT> def GetJob(self): <NEW_LINE> <INDENT> jobs = PathJob.Instances() <NEW_LINE> if not jobs: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if len(jobs) == 1: <NEW_LINE> <INDENT> return jobs[0] <NEW_LINE> <DEDENT> sel = FreeCADGui.Selection.getSelection() <NEW_LINE> if len(sel) == 1: <NEW_LINE> <INDENT> job = sel[0] <NEW_LINE> if hasattr(job, 'Proxy') and isinstance(job.Proxy, PathJob.ObjectJob): <NEW_LINE> <INDENT> return job <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def IsActive(self): <NEW_LINE> <INDENT> return self.GetJob() is not None <NEW_LINE> <DEDENT> def Activated(self): <NEW_LINE> <INDENT> job = self.GetJob() <NEW_LINE> dialog = PathJobDlg.JobTemplateExport(job) <NEW_LINE> if dialog.exec_() == 1: <NEW_LINE> <INDENT> self.SaveDialog(job, dialog) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def SaveDialog(cls, job, dialog): <NEW_LINE> <INDENT> foo = QtGui.QFileDialog.getSaveFileName(QtGui.QApplication.activeWindow(), "Path - Job Template", PathPreferences.filePath(), "job_*.json")[0] <NEW_LINE> if foo: <NEW_LINE> <INDENT> cls.Execute(job, foo, dialog) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def Execute(cls, job, path, dialog=None): <NEW_LINE> <INDENT> attrs = job.Proxy.templateAttrs(job) <NEW_LINE> if dialog and not dialog.includePostProcessing(): <NEW_LINE> <INDENT> attrs.pop(PathJob.JobTemplate.PostProcessor, None) <NEW_LINE> attrs.pop(PathJob.JobTemplate.PostProcessorArgs, None) <NEW_LINE> attrs.pop(PathJob.JobTemplate.PostProcessorOutputFile, None) <NEW_LINE> <DEDENT> toolControllers = dialog.includeToolControllers() if dialog else job.ToolController <NEW_LINE> if toolControllers: <NEW_LINE> <INDENT> tcAttrs = [tc.Proxy.templateAttrs(tc) for tc in toolControllers] <NEW_LINE> attrs[PathJob.JobTemplate.ToolController] = tcAttrs <NEW_LINE> <DEDENT> stockAttrs = None <NEW_LINE> if dialog: <NEW_LINE> <INDENT> if dialog.includeStock(): <NEW_LINE> <INDENT> stockAttrs = PathStock.TemplateAttributes(job.Stock, dialog.includeStockExtent(), dialog.includeStockPlacement()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> stockAttrs = PathStock.TemplateAttributes(job.Stock) <NEW_LINE> <DEDENT> if stockAttrs: <NEW_LINE> <INDENT> attrs[PathJob.JobTemplate.Stock] = stockAttrs <NEW_LINE> <DEDENT> setupSheetAttrs = None <NEW_LINE> if dialog: <NEW_LINE> <INDENT> setupSheetAttrs = job.Proxy.setupSheet.templateAttributes(dialog.includeSettingToolRapid(), dialog.includeSettingOperationHeights(), dialog.includeSettingOperationDepths(), dialog.includeSettingOpsSettings()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setupSheetAttrs = job.Proxy.setupSheet.templateAttributes(True, True, True) <NEW_LINE> <DEDENT> if setupSheetAttrs: <NEW_LINE> <INDENT> attrs[PathJob.JobTemplate.SetupSheet] = setupSheetAttrs <NEW_LINE> <DEDENT> encoded = job.Proxy.setupSheet.encodeTemplateAttributes(attrs) <NEW_LINE> with open(PathUtil.toUnicode(path), 'w') as fp: <NEW_LINE> <INDENT> json.dump(encoded, fp, sort_keys=True, indent=2)
Command to export a template of a given job. Opens a dialog to select the file to store the template in. If the template is stored in Path's file path (see preferences) and named in accordance with job_*.json it will automatically be found on Job creation and be available for selection.
62598fadbe383301e02537d9
class AIMHarness(component): <NEW_LINE> <INDENT> Inboxes = {"inbox" : "tuple-based commands for ChatManager", "control" : "NOT USED", "internal inbox" : "links to various child components", "internal control" : "links to signal outbox of various child components", } <NEW_LINE> Outboxes = {"outbox" : "tuple-based notifications from ChatManager", "signal" : "NOT USED", "internal outbox" : "outbox to various child components", "internal signal" : "sends shutdown handling signals to various child components", } <NEW_LINE> def __init__(self, screenname, password): <NEW_LINE> <INDENT> super(AIMHarness, self).__init__() <NEW_LINE> self.loginer = LoginHandler.LoginHandler(screenname, password).activate() <NEW_LINE> self.link((self.loginer, "signal"), (self, "internal inbox")) <NEW_LINE> self.addChildren(self.loginer) <NEW_LINE> self.debugger.addDebugSection("AIMHarness.main", 5) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> while not self.dataReady("internal inbox"): <NEW_LINE> <INDENT> yield 1 <NEW_LINE> <DEDENT> result = self.recv("internal inbox") <NEW_LINE> if type(result) == type((1,2)): <NEW_LINE> <INDENT> self.send(result) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.oscar = result <NEW_LINE> queued = self.recv("internal inbox") <NEW_LINE> self.unlink(self.oscar) <NEW_LINE> assert self.debugger.note("AIMHarness.main", 9, "%i queued messages" % len(queued)) <NEW_LINE> self.chatter = ChatManager.ChatManager().activate() <NEW_LINE> self.link((self.chatter, "heard"), (self, "outbox"), passthrough=2) <NEW_LINE> self.link((self, "inbox"), (self.chatter, "talk"), passthrough=1) <NEW_LINE> self.link((self.chatter, "outbox"), (self.oscar, "inbox")) <NEW_LINE> self.link((self.oscar, "outbox"), (self.chatter, "inbox")) <NEW_LINE> self.link((self, "internal outbox"), (self.chatter, "inbox")) <NEW_LINE> while len(queued): <NEW_LINE> <INDENT> self.send(queued[0], "internal outbox") <NEW_LINE> del(queued[0]) <NEW_LINE> <DEDENT> assert self.debugger.note("AIMHarness.main", 5, "Everything linked up and initialized, starting normal operation") <NEW_LINE> while True: <NEW_LINE> <INDENT> yield 1
AIMHarness() -> new AIMHarness component Send ("message", recipient, message) commands to its "inbox" to send instant messages. It will output ("buddy online", {name: buddyname}) and ("message", sender, message) tuples whenever a buddy comes online or a new message arrives for you.
62598fad7b180e01f3e49040
class SetAuthenticationRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.ListenerId = None <NEW_LINE> self.Domain = None <NEW_LINE> self.BasicAuth = None <NEW_LINE> self.GaapAuth = None <NEW_LINE> self.RealServerAuth = None <NEW_LINE> self.BasicAuthConfId = None <NEW_LINE> self.GaapCertificateId = None <NEW_LINE> self.RealServerCertificateId = None <NEW_LINE> self.RealServerCertificateDomain = None <NEW_LINE> self.PolyRealServerCertificateIds = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.ListenerId = params.get("ListenerId") <NEW_LINE> self.Domain = params.get("Domain") <NEW_LINE> self.BasicAuth = params.get("BasicAuth") <NEW_LINE> self.GaapAuth = params.get("GaapAuth") <NEW_LINE> self.RealServerAuth = params.get("RealServerAuth") <NEW_LINE> self.BasicAuthConfId = params.get("BasicAuthConfId") <NEW_LINE> self.GaapCertificateId = params.get("GaapCertificateId") <NEW_LINE> self.RealServerCertificateId = params.get("RealServerCertificateId") <NEW_LINE> self.RealServerCertificateDomain = params.get("RealServerCertificateDomain") <NEW_LINE> self.PolyRealServerCertificateIds = params.get("PolyRealServerCertificateIds") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
SetAuthentication请求参数结构体
62598fadbaa26c4b54d4f293
class cdhit_result(object): <NEW_LINE> <INDENT> def __init__(self, name=None, data=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> if data: <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.data = [] <NEW_LINE> <DEDENT> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return {'sample': self.name, 'data': [r.to_json() for r in self.data]} <NEW_LINE> <DEDENT> def to_df(self): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for r in self.data: <NEW_LINE> <INDENT> d[r.get_label()] = r.get_single_value() <NEW_LINE> <DEDENT> df = pd.DataFrame(data=d, index=[self.name]) <NEW_LINE> return df <NEW_LINE> <DEDENT> def get_thold_labels(self, th, mono): <NEW_LINE> <INDENT> labels = [] <NEW_LINE> if mono >= 0: <NEW_LINE> <INDENT> for r in self.data: <NEW_LINE> <INDENT> if r.get_single_value() > th: <NEW_LINE> <INDENT> labels.append(r.get_label()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for r in self.data: <NEW_LINE> <INDENT> if r.get_single_value() <= th: <NEW_LINE> <INDENT> labels.append(r.get_label()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return labels <NEW_LINE> <DEDENT> def append(self, cluster): <NEW_LINE> <INDENT> self.data.append(cluster) <NEW_LINE> <DEDENT> def load_from_file(self, file, name=None): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if isinstance(file, str): <NEW_LINE> <INDENT> with open(file) as f: <NEW_LINE> <INDENT> self.load_content(f.read().splitlines()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with file as f: <NEW_LINE> <INDENT> self.load_content(f.read().splitlines()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def load_content(self, cnt): <NEW_LINE> <INDENT> logger = logging.getLogger(__name__) <NEW_LINE> pat = re.compile('>(.+)\.\.\.') <NEW_LINE> reads = [] <NEW_LINE> for l in cnt: <NEW_LINE> <INDENT> if l[0] == '>': <NEW_LINE> <INDENT> if reads: <NEW_LINE> <INDENT> self.append(cdhit_cluster(name, reads, reads[0].name)) <NEW_LINE> name = l[1:].strip() <NEW_LINE> reads = [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug('appling first name') <NEW_LINE> name = l[1:].strip() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> m = pat.search(l) <NEW_LINE> if m: <NEW_LINE> <INDENT> gname = m.group(1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> gname = 'unk' <NEW_LINE> <DEDENT> reads.append(cdhit_read(gname, l.split('... ')[1].strip(' at%'), l[2:].split(',')[0].strip())) <NEW_LINE> <DEDENT> <DEDENT> self.append(cdhit_cluster(name, reads, reads[0].name))
represents whole cd-hit file with all clusters as list
62598fad4e4d562566372406
class IsInPolyAuthorPublicationConvoluteValue(HasLinkToValue): <NEW_LINE> <INDENT> def __init__(self, argument): <NEW_LINE> <INDENT> super().__init__(argument) <NEW_LINE> self._namespace = "http://www.knora.org/ontology/kuno-raeber" <NEW_LINE> self._name = "isInPolyAuthorPublicationConvoluteValue"
Relating a publication poem by Kuno Raeber to a reification statement of the relation between the poem and a poly-author publication it is in.
62598fad3539df3088ecc292
@pytest.mark.skipif( paths.AUTHSELECT is None, reason="Authselect is only available in fedora-like distributions") <NEW_LINE> class TestServerInstallation(IntegrationTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def install(cls, mh): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_install(self): <NEW_LINE> <INDENT> apply_authselect_profile( self.master, preconfigured_profile, preconfigured_options) <NEW_LINE> tasks.install_master(self.master, setup_dns=False) <NEW_LINE> check_authselect_profile(self.master, default_profile, ('with-sudo',)) <NEW_LINE> <DEDENT> def test_uninstall(self): <NEW_LINE> <INDENT> tasks.uninstall_master(self.master) <NEW_LINE> check_authselect_profile( self.master, preconfigured_profile, preconfigured_options) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def uninstall(cls, mh): <NEW_LINE> <INDENT> apply_authselect_profile(cls.master, default_profile)
Tests the server installation with authselect profile. When the system is a fresh installation, authselect tool is available and the default profile 'sssd' without any option is set by default. But when the system has been upgraded from older version, even though authselect tool is available, no profile is set (authselect current would return 'No existing configuration detected'). This test ensures that both scenarios are properly handled by the server installer.
62598fad7047854f4633f3ba
class IndicatorActionsForm(forms.Form): <NEW_LINE> <INDENT> error_css_class = 'error' <NEW_LINE> required_css_class = 'required' <NEW_LINE> action_type = forms.ChoiceField(required=True, widget=forms.Select) <NEW_LINE> begin_date = forms.DateTimeField(required=False, widget=CalWidget(format='%Y-%m-%d %H:%M:%S', attrs={'class':'datetimeclass', 'size':'25', 'id':'id_action_begin_date'}), input_formats=settings.PY_FORM_DATETIME_FORMATS) <NEW_LINE> end_date = forms.DateTimeField(required=False, widget=CalWidget(format='%Y-%m-%d %H:%M:%S', attrs={'class':'datetimeclass', 'size':'25', 'id':'id_action_end_date'}), input_formats=settings.PY_FORM_DATETIME_FORMATS) <NEW_LINE> performed_date = forms.DateTimeField(required=False, widget=CalWidget(format='%Y-%m-%d %H:%M:%S', attrs={'class':'datetimeclass', 'size':'25', 'id':'id_action_performed_date'}), input_formats=settings.PY_FORM_DATETIME_FORMATS) <NEW_LINE> active = forms.ChoiceField(widget=RadioSelect, choices=(('on', 'on'), ('off', 'off'))) <NEW_LINE> reason = forms.CharField(widget=forms.TextInput(attrs={'size': '50'}), required=False) <NEW_LINE> date = forms.CharField( widget=forms.HiddenInput(attrs={'size': '50', 'readonly':'readonly', 'id':'id_action_date'})) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(IndicatorActionsForm, self).__init__(*args, **kwargs) <NEW_LINE> self.fields['action_type'].choices = [(c.name, c.name ) for c in get_item_names(IndicatorAction, True)]
Django form for adding actions.
62598fad21bff66bcd722c47
class GribFile(object): <NEW_LINE> <INDENT> def __init__(self, file_object, multi_field=True, lon_offset=True): <NEW_LINE> <INDENT> self.file_object = file_object <NEW_LINE> self.lon_offset = lon_offset <NEW_LINE> if multi_field: <NEW_LINE> <INDENT> gribapi.grib_multi_support_on() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> gribapi.grib_multi_support_off() <NEW_LINE> <DEDENT> <DEDENT> def select(self, **key_val_dict): <NEW_LINE> <INDENT> selected = [] <NEW_LINE> self.file_object.seek(0) <NEW_LINE> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> gid = gribapi.grib_new_from_file(self.file_object) <NEW_LINE> <DEDENT> except gribapi.GribInternalError as e: <NEW_LINE> <INDENT> logging.error('GRIB API Error: "%s" on file "%s"' % (str(e), self.file_object.name)) <NEW_LINE> raise e <NEW_LINE> <DEDENT> if gid is None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> message = GribMessage(gid, self.lon_offset) <NEW_LINE> if self.grib_message_is_match(message, key_val_dict): <NEW_LINE> <INDENT> selected.append(message) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> message.release() <NEW_LINE> <DEDENT> <DEDENT> return selected <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def grib_message_is_match(message, key_val_dict): <NEW_LINE> <INDENT> for k, v in key_val_dict.items(): <NEW_LINE> <INDENT> if message.get(k) != v: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Interface for selecting message(s) from grib files using key/value matching.
62598fad4527f215b58e9ec1
@pulumi.output_type <NEW_LINE> class GetCpCodeResult: <NEW_LINE> <INDENT> def __init__(__self__, contract=None, contract_id=None, group=None, group_id=None, id=None, name=None, product_ids=None): <NEW_LINE> <INDENT> if contract and not isinstance(contract, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'contract' to be a str") <NEW_LINE> <DEDENT> if contract is not None: <NEW_LINE> <INDENT> warnings.warn("""The setting \"contract\" has been deprecated.""", DeprecationWarning) <NEW_LINE> pulumi.log.warn("""contract is deprecated: The setting \"contract\" has been deprecated.""") <NEW_LINE> <DEDENT> pulumi.set(__self__, "contract", contract) <NEW_LINE> if contract_id and not isinstance(contract_id, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'contract_id' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "contract_id", contract_id) <NEW_LINE> if group and not isinstance(group, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'group' to be a str") <NEW_LINE> <DEDENT> if group is not None: <NEW_LINE> <INDENT> warnings.warn("""The setting \"group\" has been deprecated.""", DeprecationWarning) <NEW_LINE> pulumi.log.warn("""group is deprecated: The setting \"group\" has been deprecated.""") <NEW_LINE> <DEDENT> pulumi.set(__self__, "group", group) <NEW_LINE> if group_id and not isinstance(group_id, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'group_id' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "group_id", group_id) <NEW_LINE> if id and not isinstance(id, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'id' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "id", id) <NEW_LINE> if name and not isinstance(name, str): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'name' to be a str") <NEW_LINE> <DEDENT> pulumi.set(__self__, "name", name) <NEW_LINE> if product_ids and not isinstance(product_ids, list): <NEW_LINE> <INDENT> raise TypeError("Expected argument 'product_ids' to be a list") <NEW_LINE> <DEDENT> pulumi.set(__self__, "product_ids", product_ids) <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def contract(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "contract") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="contractId") <NEW_LINE> def contract_id(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "contract_id") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def group(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "group") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="groupId") <NEW_LINE> def group_id(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "group_id") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def id(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "id") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> return pulumi.get(self, "name") <NEW_LINE> <DEDENT> @property <NEW_LINE> @pulumi.getter(name="productIds") <NEW_LINE> def product_ids(self) -> Sequence[str]: <NEW_LINE> <INDENT> return pulumi.get(self, "product_ids")
A collection of values returned by getCpCode.
62598fad1b99ca400228f520
class TestSetRandomKey(TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpTestData(cls): <NEW_LINE> <INDENT> cls.new_user = User.objects.create( username="new-tester", email="new-tester@example.com", password="password" ) <NEW_LINE> <DEDENT> @patch("tcms.auth.models.datetime") <NEW_LINE> @patch("tcms.auth.models.random") <NEW_LINE> def test_set_random_key(self, random, mock_datetime): <NEW_LINE> <INDENT> mock_datetime.datetime.today.return_value = datetime.datetime(2017, 5, 10) <NEW_LINE> mock_datetime.timedelta.return_value = datetime.timedelta(7) <NEW_LINE> fake_random = 0.12345678 <NEW_LINE> random.random.return_value = fake_random <NEW_LINE> activation_key = UserActivateKey.set_random_key_for_user(self.new_user) <NEW_LINE> self.assertEqual(self.new_user, activation_key.user) <NEW_LINE> s_random = sha1(str(fake_random).encode("utf-8")).hexdigest()[:5] <NEW_LINE> expected_key = sha1( "{}{}".format(s_random, self.new_user.username).encode("utf-8") ).hexdigest() <NEW_LINE> self.assertEqual(expected_key, activation_key.activation_key) <NEW_LINE> self.assertEqual(datetime.datetime(2017, 5, 17), activation_key.key_expires)
Test case for UserActivateKey.set_random_key_for_user
62598fad0c0af96317c56363
class Prox(object): <NEW_LINE> <INDENT> def prox(self, x, eta): <NEW_LINE> <INDENT> raise NotImplementedError("Prox function not implemented")
A function that implements the prox operator prox_{eta}(x) = argmin_{y} eta f(y) + (1/2) ||y-x||_2^2
62598fada79ad1619776a047
class User : <NEW_LINE> <INDENT> user_list = [] <NEW_LINE> def __init__(self, username, account,password): <NEW_LINE> <INDENT> self.username = username <NEW_LINE> self.account = account <NEW_LINE> self.password=password <NEW_LINE> <DEDENT> def save_user(self): <NEW_LINE> <INDENT> User.user_list.append(self) <NEW_LINE> <DEDENT> def delete_user(self): <NEW_LINE> <INDENT> User.user_list.remove(self) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def find_by_account(cls,account): <NEW_LINE> <INDENT> for details in cls.user_list: <NEW_LINE> <INDENT> if details.account == account: <NEW_LINE> <INDENT> return details <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def user_exists(cls,account): <NEW_LINE> <INDENT> for user in cls.user_list: <NEW_LINE> <INDENT> if user.account == account: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def display_users(cls): <NEW_LINE> <INDENT> return cls.user_list <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def copy_password(cls, account): <NEW_LINE> <INDENT> user_found = User.find_by_account(account) <NEW_LINE> pyperclip.copy(user_found.username)
Class that generates new instances of a User
62598fad4f6381625f1994af
class DBAPIError(LunaticError): <NEW_LINE> <INDENT> pass
DBAPI Error.
62598fad01c39578d7f12d60
class itkInPlaceImageFilterIUC3ID3(itkImageToImageFilterAPython.itkImageToImageFilterIUC3ID3): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> InputImageDimension = _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_InputImageDimension <NEW_LINE> OutputImageDimension = _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_OutputImageDimension <NEW_LINE> def SetInPlace(self, *args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_SetInPlace(self, *args) <NEW_LINE> <DEDENT> def GetInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_GetInPlace(self) <NEW_LINE> <DEDENT> def InPlaceOn(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_InPlaceOn(self) <NEW_LINE> <DEDENT> def InPlaceOff(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_InPlaceOff(self) <NEW_LINE> <DEDENT> def CanRunInPlace(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_CanRunInPlace(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkInPlaceImageFilterAPython.delete_itkInPlaceImageFilterIUC3ID3 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkInPlaceImageFilterAPython.itkInPlaceImageFilterIUC3ID3_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkInPlaceImageFilterIUC3ID3.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New)
Proxy of C++ itkInPlaceImageFilterIUC3ID3 class
62598fad63d6d428bbee278c
class _EventDictionary(dict, EventSource): <NEW_LINE> <INDENT> changed = False <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(_EventDictionary, self).__init__(*args, **kwargs) <NEW_LINE> EventSource.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if key in self: <NEW_LINE> <INDENT> if self[key] == value: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> ret = super(_EventDictionary, self).__setitem__(key, value) <NEW_LINE> self.onchange() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> if key not in self: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ret = super(_EventDictionary, self).__delitem__(key) <NEW_LINE> self.onchange() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def pop(self, key, d=None): <NEW_LINE> <INDENT> if key not in self: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> ret = super(_EventDictionary, self).pop(key, d) <NEW_LINE> self.onchange() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> ret = super(_EventDictionary, self).clear() <NEW_LINE> self.onchange() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def update(self, d): <NEW_LINE> <INDENT> ret = super(_EventDictionary, self).update(d) <NEW_LINE> self.onchange() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def ischanged(self): <NEW_LINE> <INDENT> return self.changed <NEW_LINE> <DEDENT> def align_version(self): <NEW_LINE> <INDENT> self.changed = False <NEW_LINE> <DEDENT> @decorate_event <NEW_LINE> def onchange(self): <NEW_LINE> <INDENT> self.changed = True <NEW_LINE> return ()
This dictionary allows to be notified if its content is changed.
62598fadadb09d7d5dc0a56b
class SubDiagonalTensor(DiagonalTensor): <NEW_LINE> <INDENT> handled_functions = HANDLED_FUNCTIONS_SUB_DIAGONAL <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "SubDiagonalTensor(N={}, value={})".format(self._N, self._i)
A subclass of ``DiagonalTensor`` to test custom dispatch This class tests semantics for defining ``__torch_function__`` on a subclass of another class that defines ``__torch_function__``. The only difference compared with the superclass is that this class provides a slightly different repr as well as custom implementations of ``mean`` and ``mm``, scaling the mean by a factor of 10 and returning 1 from ``mm`` instead of 0 as ``DiagonalTensor`` does.
62598fad4e4d562566372407
@singleton <NEW_LINE> @blueprint <NEW_LINE> class FXCMPy(APIRateLimitMixin, fxcmpyapi): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._create(*args, **kwargs) <NEW_LINE> <DEDENT> def _create(self, *args, **kwargs): <NEW_LINE> <INDENT> access_token = kwargs.pop('access_token',None) <NEW_LINE> server = kwargs.pop('server','real') <NEW_LINE> proxy_url = kwargs.pop('proxy_url',None) <NEW_LINE> proxy_port = kwargs.pop('proxy_port',None) <NEW_LINE> proxy_type = kwargs.pop('proxy_type',None) <NEW_LINE> log_file = kwargs.pop('log_file',None) <NEW_LINE> log_level = kwargs.pop('log_level','error') <NEW_LINE> config_file = kwargs.pop('config_file','') <NEW_LINE> try: <NEW_LINE> <INDENT> fxcmpyapi.__init__(self, access_token, config_file, log_file, log_level, server, proxy_url, proxy_port, proxy_type) <NEW_LINE> <DEDENT> except ServerError: <NEW_LINE> <INDENT> msg = "access token missing" <NEW_LINE> handling = ExceptionHandling.TERMINATE <NEW_LINE> raise AuthenticationError(msg=msg, handling=handling) <NEW_LINE> <DEDENT> except RequestException as e: <NEW_LINE> <INDENT> msg = "in broker.auth: " + str(e) <NEW_LINE> handling = ExceptionHandling.WARN <NEW_LINE> raise AuthenticationError(msg=msg, handling=handling) <NEW_LINE> <DEDENT> logger = get_logger() <NEW_LINE> if logger: <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> <DEDENT> APIRateLimitMixin.__init__(self, *args, **kwargs) <NEW_LINE> self._max_instruments = kwargs.pop("max_instruments",None) <NEW_LINE> self._trading_calendar = kwargs.get("trading_calendar",None) <NEW_LINE> if not self._rate_limit: <NEW_LINE> <INDENT> self._rate_limit = 2 <NEW_LINE> self._rate_limit_count = self._rate_limit <NEW_LINE> <DEDENT> if not self._rate_period: <NEW_LINE> <INDENT> self._rate_period = 1 <NEW_LINE> <DEDENT> if not self._max_instruments: <NEW_LINE> <INDENT> self._max_instruments = 50 <NEW_LINE> <DEDENT> self._rate_limit_since = None <NEW_LINE> if not self._trading_calendar: <NEW_LINE> <INDENT> raise ValidationError(msg="missing calendar") <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"FXCMPy [{_api_version_}]" <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__str__()
kiteconnect modified to force a singleton (and to print pretty).
62598fad3317a56b869be53b
class TestFile(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.file = tempfile.TemporaryFile() <NEW_LINE> self.data = bytes() <NEW_LINE> <DEDENT> def add_segment(self, metadata, data, toc=None, incomplete=False): <NEW_LINE> <INDENT> metadata = self.to_bytes(metadata) <NEW_LINE> data = self.to_bytes(data) <NEW_LINE> if toc is not None: <NEW_LINE> <INDENT> lead_in = b'TDSm' <NEW_LINE> toc_mask = long(0) <NEW_LINE> if "kTocMetaData" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 1 <NEW_LINE> <DEDENT> if "kTocRawData" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 3 <NEW_LINE> <DEDENT> if "kTocDAQmxRawData" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 7 <NEW_LINE> <DEDENT> if "kTocInterleavedData" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 5 <NEW_LINE> <DEDENT> if "kTocBigEndian" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 6 <NEW_LINE> <DEDENT> if "kTocNewObjList" in toc: <NEW_LINE> <INDENT> toc_mask = toc_mask | long(1) << 2 <NEW_LINE> <DEDENT> lead_in += struct.pack('<i', toc_mask) <NEW_LINE> lead_in += self.to_bytes("69 12 00 00") <NEW_LINE> next_segment_offset = len(metadata) + len(data) <NEW_LINE> raw_data_offset = len(metadata) <NEW_LINE> if incomplete: <NEW_LINE> <INDENT> lead_in += self.to_bytes('FF' * 8) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lead_in += struct.pack('<Q', next_segment_offset) <NEW_LINE> <DEDENT> lead_in += struct.pack('<Q', raw_data_offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lead_in = b'' <NEW_LINE> <DEDENT> self.data += lead_in + metadata + data <NEW_LINE> <DEDENT> def to_bytes(self, hex_data): <NEW_LINE> <INDENT> return binascii.unhexlify(hex_data.replace(" ", ""). replace("\n", "").encode('utf-8')) <NEW_LINE> <DEDENT> def load(self, *args, **kwargs): <NEW_LINE> <INDENT> self.file.write(self.data) <NEW_LINE> self.file.seek(0) <NEW_LINE> return tdms.TdmsFile(self.file, *args, **kwargs)
Generate a TDMS file for testing
62598fad57b8e32f5250810c
class CURegionField(Field): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _('Enter a Cuban region.'), } <NEW_LINE> def clean(self, value): <NEW_LINE> <INDENT> super(CURegionField, self).clean(value) <NEW_LINE> if value in self.empty_values: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return REGION_NORMALIZED[value.strip().lower()] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> raise ValidationError(self.error_messages['invalid'])
A form field for a Cuban region. The input is validated against a dictionary which includes names and abbreviations. It normalizes the input to the standard abbreviation for the given region. .. versionadded:: 1.6
62598fad283ffb24f3cf386e
class _EmissionManager(_interfaces.EmissionManager): <NEW_LINE> <INDENT> def __init__( self, lock, failure_outcome, termination_manager, transmission_manager): <NEW_LINE> <INDENT> self._lock = lock <NEW_LINE> self._failure_outcome = failure_outcome <NEW_LINE> self._termination_manager = termination_manager <NEW_LINE> self._transmission_manager = transmission_manager <NEW_LINE> self._ingestion_manager = None <NEW_LINE> self._expiration_manager = None <NEW_LINE> self._emission_complete = False <NEW_LINE> <DEDENT> def set_ingestion_manager_and_expiration_manager( self, ingestion_manager, expiration_manager): <NEW_LINE> <INDENT> self._ingestion_manager = ingestion_manager <NEW_LINE> self._expiration_manager = expiration_manager <NEW_LINE> <DEDENT> def _abort(self): <NEW_LINE> <INDENT> self._termination_manager.abort(self._failure_outcome) <NEW_LINE> self._transmission_manager.abort(self._failure_outcome) <NEW_LINE> self._ingestion_manager.abort() <NEW_LINE> self._expiration_manager.abort() <NEW_LINE> <DEDENT> def consume(self, value): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if self._emission_complete: <NEW_LINE> <INDENT> self._abort() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._transmission_manager.inmit(value, False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def terminate(self): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if not self._emission_complete: <NEW_LINE> <INDENT> self._termination_manager.emission_complete() <NEW_LINE> self._transmission_manager.inmit(None, True) <NEW_LINE> self._emission_complete = True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def consume_and_terminate(self, value): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> if self._emission_complete: <NEW_LINE> <INDENT> self._abort() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._termination_manager.emission_complete() <NEW_LINE> self._transmission_manager.inmit(value, True) <NEW_LINE> self._emission_complete = True
An implementation of _interfaces.EmissionManager.
62598fad76e4537e8c3ef58f
class PJsuaAccount(object): <NEW_LINE> <INDENT> def __init__(self, account, pj_lib): <NEW_LINE> <INDENT> self.account = account <NEW_LINE> self.buddies = {} <NEW_LINE> self.pj_lib = pj_lib <NEW_LINE> <DEDENT> def add_buddies(self, buddy_cfg): <NEW_LINE> <INDENT> for buddy in buddy_cfg: <NEW_LINE> <INDENT> name = buddy.get('name') <NEW_LINE> if not name: <NEW_LINE> <INDENT> LOGGER.warning("Unable to add buddy with no name") <NEW_LINE> continue <NEW_LINE> <DEDENT> uri = buddy.get('uri') <NEW_LINE> if not uri: <NEW_LINE> <INDENT> LOGGER.warning("Unable to add buddy %s. No URI", name) <NEW_LINE> continue <NEW_LINE> <DEDENT> self.buddies[name] = self.account.add_buddy(uri)
Wrapper for pj.Account object This object contains a reference to a pj.Account and a dictionary of the account's buddies, keyed by buddy name
62598fade76e3b2f99fd8a18
class UsageExpectation: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def of(cls, obj): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return obj.__usage_expectation <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> ua = cls(type(obj)) <NEW_LINE> obj.__usage_expectation = ua <NEW_LINE> return ua <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, cls): <NEW_LINE> <INDENT> self.cls = cls <NEW_LINE> self.allowed_calls = {} <NEW_LINE> <DEDENT> def enforce(self, back=1): <NEW_LINE> <INDENT> allowed_code = frozenset( func.__wrapped__.__code__ if hasattr(func, '__wrapped__') else func.__code__ for func in self.allowed_calls ) <NEW_LINE> caller_frame = inspect.stack(0)[back][0] <NEW_LINE> if back > 1: <NEW_LINE> <INDENT> alt_caller_frame = inspect.stack(0)[back - 1][0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> alt_caller_frame = None <NEW_LINE> <DEDENT> _logger.debug("Caller code: %r", caller_frame.f_code) <NEW_LINE> _logger.debug("Alternate code: %r", alt_caller_frame.f_code if alt_caller_frame else None) <NEW_LINE> _logger.debug("Allowed code: %r", allowed_code) <NEW_LINE> try: <NEW_LINE> <INDENT> if caller_frame.f_code in allowed_code: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if (alt_caller_frame is not None and alt_caller_frame.f_code in allowed_code): <NEW_LINE> <INDENT> warnings.warn( "Please back={}. Properly constructed decorators are" " automatically handled and do not require the use of the" " back argument.".format(back - 1), OffByOneBackWarning, back) <NEW_LINE> return <NEW_LINE> <DEDENT> fn_name = caller_frame.f_code.co_name <NEW_LINE> allowed_undecorated_calls = { func.__wrapped__ if hasattr(func, '__wrapped__') else func: msg for func, msg in self.allowed_calls.items() } <NEW_LINE> allowed_pairs = tuple( (fn.__code__.co_name, why) for fn, why in sorted( allowed_undecorated_calls.items(), key=lambda fn_why: fn_why[0].__code__.co_name) ) <NEW_LINE> raise UnexpectedMethodCall(self.cls, fn_name, allowed_pairs) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> del caller_frame <NEW_LINE> if alt_caller_frame is not None: <NEW_LINE> <INDENT> del alt_caller_frame
Class representing API usage expectation at any given time. Expectations help formalize the way developers are expected to use some set of classes, methods and other instruments. Technically, they also encode the expectations and can raise :class:`DeveloperError`. :attr allowed_calls: A dictionary mapping from bound methods / functions to the use case explaining how that method can be used at the given moment. This works best if the usage is mostly linear (call foo.a(), then foo.b(), then foo.c()). This attribute can be set directly for simplicity. :attr cls: The class of objects this expectation object applies to.
62598fad4428ac0f6e658506
class MinusStrandAlgebra(StrandAlgebra): <NEW_LINE> <INDENT> def __init__(self, ring, pmc): <NEW_LINE> <INDENT> StrandAlgebra.__init__(self, ring, pmc, pmc.genus, mult_one = True) <NEW_LINE> <DEDENT> @memorize <NEW_LINE> def getGenerators(self): <NEW_LINE> <INDENT> assert self.pmc == splitPMC(1) <NEW_LINE> n = 4 <NEW_LINE> algebra = MinusStrandAlgebra(F2, self.pmc) <NEW_LINE> result = [] <NEW_LINE> idems = self.pmc.getIdempotents(algebra.idem_size) <NEW_LINE> for idem in idems: <NEW_LINE> <INDENT> result.append(MinusStrandDiagram(algebra, idem, [])) <NEW_LINE> <DEDENT> for start in range(n): <NEW_LINE> <INDENT> for end in range(n): <NEW_LINE> <INDENT> strands = MinusStrands(self.pmc, [(start, end)]) <NEW_LINE> for l_idem in idems: <NEW_LINE> <INDENT> if strands.leftCompatible(l_idem): <NEW_LINE> <INDENT> result.append( MinusStrandDiagram(algebra, l_idem, strands)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> @memorize <NEW_LINE> def diff(self, gen): <NEW_LINE> <INDENT> return E0 <NEW_LINE> <DEDENT> @memorize <NEW_LINE> def multiply(self, gen1, gen2): <NEW_LINE> <INDENT> if not isinstance(gen1, MinusStrandDiagram): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> if not isinstance(gen2, MinusStrandDiagram): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> assert gen1.parent == self and gen2.parent == self, "Algebra not compatible." <NEW_LINE> if gen1.right_idem != gen2.left_idem: <NEW_LINE> <INDENT> return E0 <NEW_LINE> <DEDENT> total_mult = [m1+m2 for m1, m2 in zip(gen1.multiplicity, gen2.multiplicity)] <NEW_LINE> if not all([x <= 1 for x in total_mult]): <NEW_LINE> <INDENT> return E0 <NEW_LINE> <DEDENT> pmc = gen1.pmc <NEW_LINE> new_strands = [] <NEW_LINE> strands_right = list(gen2.strands) <NEW_LINE> for sd in gen1.strands: <NEW_LINE> <INDENT> mid_idem = pmc.pairid[sd[1]] <NEW_LINE> possible_match = [sd2 for sd2 in strands_right if pmc.pairid[sd2[0]] == mid_idem] <NEW_LINE> if len(possible_match) == 0: <NEW_LINE> <INDENT> new_strands.append(sd) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> sd2 = possible_match[0] <NEW_LINE> if sd2[0] != sd[1]: <NEW_LINE> <INDENT> return E0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_strands.append((sd[0], sd2[1])) <NEW_LINE> strands_right.remove(sd2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> new_strands.extend(strands_right) <NEW_LINE> mult_term = MinusStrandDiagram(self, gen1.left_idem, new_strands, gen2.right_idem) <NEW_LINE> return mult_term.elt()
The corresponding Strands class for the strand algebra in the minus theory.
62598fadeab8aa0e5d30bd6e
class Cloudfiles(object): <NEW_LINE> <INDENT> def __init__(self, region, identity_client, servicenet=True, endpoint=None, timeout=10, retries=1, keepalive=True, proxy=None, rest_client_class=None, debug_level=0): <NEW_LINE> <INDENT> self.identity_client = identity_client <NEW_LINE> if endpoint is None: <NEW_LINE> <INDENT> service = self.identity_client.catalog.get_cloud_files() <NEW_LINE> if service is None: <NEW_LINE> <INDENT> self.identity_client.authenticate() <NEW_LINE> service = self.identity_client.catalog.get_cloud_files() <NEW_LINE> <DEDENT> if servicenet: <NEW_LINE> <INDENT> endpoint = service.endpoints.get_endpoint(region).internal_url <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> endpoint = service.endpoints.get_endpoint(region).public_url <NEW_LINE> <DEDENT> <DEDENT> self.endpoint = endpoint <NEW_LINE> self.timeout = timeout <NEW_LINE> self.retries = retries <NEW_LINE> self.keepalive = keepalive <NEW_LINE> self.debug_level = debug_level <NEW_LINE> self.rest_client = rest_client_class( endpoint=endpoint, timeout=timeout, retries=retries, keepalive=keepalive, proxy=proxy, authenticator=self.identity_client, debug_level=debug_level) <NEW_LINE> <DEDENT> def send_request(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.rest_client.send_request(*args, **kwargs)
Cloudfiles Client
62598fad7b25080760ed7491
class ElectricCar(Car): <NEW_LINE> <INDENT> def __init__(self, make, model, year): <NEW_LINE> <INDENT> super().__init__(make, model, year) <NEW_LINE> self.battery = Battery() <NEW_LINE> <DEDENT> def describe_battery(self): <NEW_LINE> <INDENT> print(f"This car has a {self.battery_size}-kWh battery.") <NEW_LINE> <DEDENT> def fill_gas_tank(self): <NEW_LINE> <INDENT> print("This car doesn't need a gas tank!")
Represents aspects of a car specific to electric vehicles.
62598fad67a9b606de545fae
class GetAccountInfo(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/Google/Documents/GetAccountInfo') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return GetAccountInfoInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return GetAccountInfoResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return GetAccountInfoChoreographyExecution(session, exec_id, path)
Create a new instance of the GetAccountInfo Choreography. A TembooSession object, containing a valid set of Temboo credentials, must be supplied.
62598fad5fcc89381b26613d
class TimerWriteLockRef: <NEW_LINE> <INDENT> def __init__(self, ptr): <NEW_LINE> <INDENT> self.ptr = ptr
A Timer Write Lock allows temporary write access to a timer. Dispose this to release the write lock.
62598fad090684286d5936cd
class IsDataEntryAdmin(BasePermission): <NEW_LINE> <INDENT> message = "You don't have enough privileges to access this API." <NEW_LINE> def has_permission(self, request, view): <NEW_LINE> <INDENT> if not request.user: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if DataEntryAdmin.objects.filter(user=request.user).exists(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
Permission defined for checking the authenticated user is data entry admin or not
62598fad851cf427c66b829e
class NodeMeta(type): <NEW_LINE> <INDENT> def __new__(meta, name, bases, dct): <NEW_LINE> <INDENT> return super(NodeMeta, meta).__new__(meta, name, bases, dct) <NEW_LINE> <DEDENT> def __init__(cls, name, bases, dct): <NEW_LINE> <INDENT> super(NodeMeta, cls).__init__(name, bases, dct) <NEW_LINE> node_registry[name.lower()] = cls
Node metaclass used to keep a registry of Node classes
62598fada219f33f346c67f8
class write_response4(BaseObj): <NEW_LINE> <INDENT> _strfmt1 = "{0:?stid\:{0} }len:{1:umax64} verf:{3} {2}" <NEW_LINE> _attrlist = ("stateid", "count", "committed", "verifier") <NEW_LINE> def __init__(self, unpack): <NEW_LINE> <INDENT> self.stateid = unpack.unpack_conditional(stateid4) <NEW_LINE> self.count = length4(unpack) <NEW_LINE> self.committed = stable_how4(unpack) <NEW_LINE> self.verifier = verifier4(unpack)
struct write_response4 { stateid4 stateid<1>; length4 count; stable_how4 committed; verifier4 verifier; };
62598faddd821e528d6d8f17
class Change(Computation): <NEW_LINE> <INDENT> def __init__(self, before_column_name, after_column_name): <NEW_LINE> <INDENT> self._before_column_name = before_column_name <NEW_LINE> self._after_column_name = after_column_name <NEW_LINE> <DEDENT> def _validate(self, table): <NEW_LINE> <INDENT> before_column = table.columns[self._before_column_name] <NEW_LINE> after_column = table.columns[self._after_column_name] <NEW_LINE> for data_type in (Number, Date, DateTime, TimeDelta): <NEW_LINE> <INDENT> if isinstance(before_column.data_type, data_type): <NEW_LINE> <INDENT> if not isinstance(after_column.data_type, data_type): <NEW_LINE> <INDENT> raise ValueError('Specified columns must be of the same type') <NEW_LINE> <DEDENT> if before_column.aggregate(HasNulls()): <NEW_LINE> <INDENT> warn_null_calculation(self, before_column) <NEW_LINE> <DEDENT> if after_column.aggregate(HasNulls()): <NEW_LINE> <INDENT> warn_null_calculation(self, after_column) <NEW_LINE> <DEDENT> return before_column <NEW_LINE> <DEDENT> <DEDENT> raise DataTypeError('Change before and after columns must both contain data that is one of: Number, Date, DateTime or TimeDelta.') <NEW_LINE> <DEDENT> def get_computed_data_type(self, table): <NEW_LINE> <INDENT> before_column = self._validate(table) <NEW_LINE> if isinstance(before_column.data_type, Date): <NEW_LINE> <INDENT> return TimeDelta() <NEW_LINE> <DEDENT> elif isinstance(before_column.data_type, DateTime): <NEW_LINE> <INDENT> return TimeDelta() <NEW_LINE> <DEDENT> elif isinstance(before_column.data_type, TimeDelta): <NEW_LINE> <INDENT> return TimeDelta() <NEW_LINE> <DEDENT> elif isinstance(before_column.data_type, Number): <NEW_LINE> <INDENT> return Number() <NEW_LINE> <DEDENT> <DEDENT> def prepare(self, table): <NEW_LINE> <INDENT> self._validate(table) <NEW_LINE> <DEDENT> def run(self, row): <NEW_LINE> <INDENT> before = row[self._before_column_name] <NEW_LINE> after = row[self._after_column_name] <NEW_LINE> if before and after: <NEW_LINE> <INDENT> return after - before <NEW_LINE> <DEDENT> return None
Computes change between two columns.
62598fad32920d7e50bc6036
class CLI(argparse.ArgumentParser): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.add_platform_arg() <NEW_LINE> self.add_recipients_arg() <NEW_LINE> self.add_content_arg() <NEW_LINE> self.add_sender_arg() <NEW_LINE> <DEDENT> def add_platform_arg(self): <NEW_LINE> <INDENT> self.add_argument( 'platform', help='The platform over which to send the message' ) <NEW_LINE> <DEDENT> def add_content_arg(self): <NEW_LINE> <INDENT> self.add_argument( 'content', help='The text to use as the body of the message' ) <NEW_LINE> <DEDENT> def add_recipients_arg(self): <NEW_LINE> <INDENT> self.add_argument( 'recipients', help='List of recipients to send to', nargs='*' ) <NEW_LINE> <DEDENT> def add_sender_arg(self): <NEW_LINE> <INDENT> self.add_argument( '-s', '--sender', help='The JSON data for the sender' ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def fill_in_args(cls, platform_name, sender_json, recipients_text, content): <NEW_LINE> <INDENT> if sender_json is None: <NEW_LINE> <INDENT> logging.debug('Sender not specified in command line args. ' 'Attempting to load from environment variables.') <NEW_LINE> sender_env_name = 'comm_{}_sender'.format(platform_name).upper() <NEW_LINE> sender_path = getenv(sender_env_name) <NEW_LINE> if sender_path is not None: <NEW_LINE> <INDENT> with open(sender_path, 'r') as f: <NEW_LINE> <INDENT> sender_json = f.read() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('No sender specified. Please specify one ' 'via command line or environment variables.') <NEW_LINE> <DEDENT> <DEDENT> return { 'platform_name': platform_name, 'sender_json': sender_json, 'recipients_text': recipients_text, 'content': content } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def agnostic_parse(cls, platform_name, sender_json, recipients_text, content): <NEW_LINE> <INDENT> sender_data = json.loads(sender_json) <NEW_LINE> recipients_data = [] <NEW_LINE> for recipient_text in recipients_text: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> recipient_data = json.loads(recipient_text) <NEW_LINE> <DEDENT> except json.JSONDecodeError: <NEW_LINE> <INDENT> logging.debug('Recipient text \'{}\' could not be decoded as ' 'JSON and will be passed as a string.' .format(recipient_text), exc_info=True) <NEW_LINE> recipient_data = recipient_text <NEW_LINE> <DEDENT> recipients_data.append(recipient_data) <NEW_LINE> <DEDENT> return { 'platform_name': platform_name, 'sender_data': sender_data, 'recipients_data': recipients_data, 'content': content } <NEW_LINE> <DEDENT> def get_args(self): <NEW_LINE> <INDENT> args = self.parse_args() <NEW_LINE> return self.agnostic_parse(**self.fill_in_args(args.platform, args.sender, args.recipients, args.content))
The main utility for a command line interface for generalized mass-messaging, capable of fully processing all external resources used by the script.
62598fade5267d203ee6b8ec
class Recording(object): <NEW_LINE> <INDENT> def __init__(self, channel_id, date, start_time, duration, file, is_episode): <NEW_LINE> <INDENT> date_pattern = "%Y-%m-%d" <NEW_LINE> self.channel_id = channel_id <NEW_LINE> self.date = date <NEW_LINE> self.start_time = start_time <NEW_LINE> self.duration = duration <NEW_LINE> self.file = file <NEW_LINE> self.is_episode = is_episode <NEW_LINE> self.id = None <NEW_LINE> self.file_size = 0 <NEW_LINE> <DEDENT> def __repr__ (self): <NEW_LINE> <INDENT> if self.id is not None: <NEW_LINE> <INDENT> return ('Recording:: ID:{} channel:{} date:{} start time:{} duration:{} file:{} is_episode:{}' .format(self.id, self.channel_id, self.date, self.start_time, self.duration, self.file, self.is_episode)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ('Recording:: channel:{} date:{} start time:{} duration:{} file:{} is_episode:{}' .format(self.channel_id, self.date, self.start_time, self.duration, self.file, self.is_episode))
Describes an instance of a channels recording.
62598fad99cbb53fe6830eba
class TwitterClient(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.auth = OAuthHandler(consumer_key, consumer_secret) <NEW_LINE> self.auth.set_access_token(access_token, access_token_secret) <NEW_LINE> self.api = tweepy.API(self.auth) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print("Error: Authentication Failed") <NEW_LINE> <DEDENT> <DEDENT> def clean_tweet(self, tweet): <NEW_LINE> <INDENT> return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) <NEW_LINE> <DEDENT> def get_tweet_sentiment(self, tweet): <NEW_LINE> <INDENT> analysis = TextBlob(self.clean_tweet(tweet)) <NEW_LINE> if analysis.sentiment.polarity > 0: <NEW_LINE> <INDENT> return 'positive' <NEW_LINE> <DEDENT> elif analysis.sentiment.polarity == 0: <NEW_LINE> <INDENT> return 'neutral' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'negative' <NEW_LINE> <DEDENT> <DEDENT> def get_tweets(self, query, count = 10): <NEW_LINE> <INDENT> tweets = [] <NEW_LINE> try: <NEW_LINE> <INDENT> fetched_tweets = self.api.search(q = query, count = count) <NEW_LINE> for tweet in fetched_tweets: <NEW_LINE> <INDENT> parsed_tweet = {} <NEW_LINE> parsed_tweet['text'] = tweet.text <NEW_LINE> parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text) <NEW_LINE> if tweet.retweet_count > 0: <NEW_LINE> <INDENT> if parsed_tweet not in tweets: <NEW_LINE> <INDENT> tweets.append(parsed_tweet) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> tweets.append(parsed_tweet) <NEW_LINE> <DEDENT> <DEDENT> return tweets <NEW_LINE> <DEDENT> except tweepy.TweepError as e: <NEW_LINE> <INDENT> print("Error : " + str(e))
Generic Twitter Class for sentiment analysis.
62598fad32920d7e50bc6037
class RMarkdownCellReader(MarkdownCellReader): <NEW_LINE> <INDENT> comment = '' <NEW_LINE> start_code_re = re.compile(r"^```{(.*)}\s*$") <NEW_LINE> default_language = 'R' <NEW_LINE> default_comment_magics = True <NEW_LINE> def options_to_metadata(self, options): <NEW_LINE> <INDENT> return rmd_options_to_metadata(options) <NEW_LINE> <DEDENT> def uncomment_code_and_magics(self, lines): <NEW_LINE> <INDENT> if self.cell_type == 'code': <NEW_LINE> <INDENT> if is_active(self.ext, self.metadata) and self.comment_magics: <NEW_LINE> <INDENT> uncomment_magic(lines, self.language or self.default_language) <NEW_LINE> <DEDENT> <DEDENT> unescape_code_start(lines, self.ext, self.language or self.default_language) <NEW_LINE> return lines
Read notebook cells from R Markdown notebooks
62598fadd486a94d0ba2bfb1
class IMiPagoAdapter(Interface): <NEW_LINE> <INDENT> pass
Adapter for payments with MiPago
62598fad7c178a314d78d47f
class BackendMock(GeolocationBackend): <NEW_LINE> <INDENT> def geolocate(self): <NEW_LINE> <INDENT> self._raw_data = { "continent": "Europe", "country_code": "49", "name": "Germany", "geo": { "latitude": 51.165691, "longitude": 10.451526 }, "currency_code": "EUR" } <NEW_LINE> <DEDENT> def _parse(self): <NEW_LINE> <INDENT> self._continent = self._raw_data.get('continent') <NEW_LINE> self._country = { 'code': self._raw_data.get('alpha2'), 'name': self._raw_data.get('name') } <NEW_LINE> self._geo_data = self._raw_data.get('geo')
BackendMock backend implementation.
62598fad2c8b7c6e89bd37a8
class TestUrl(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testUrl(self): <NEW_LINE> <INDENT> pass
Url unit test stubs
62598fad3d592f4c4edbaeae
class ImportCSV(Operator, ImportHelper): <NEW_LINE> <INDENT> bl_idname = "import_scene.csv" <NEW_LINE> bl_label = "Import Statistical Data" <NEW_LINE> filename_ext = {".csv", ".tsv"} <NEW_LINE> filter_glob = StringProperty( default="*.csv;*.tsv", options={'HIDDEN'}, ) <NEW_LINE> _parent = None <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> w = context.window <NEW_LINE> w.cursor_set('WAIT') <NEW_LINE> if not self._parent: <NEW_LINE> <INDENT> self._parent = bpy.context.active_object <NEW_LINE> <DEDENT> bpy.ops.object.select_all(action='DESELECT') <NEW_LINE> bpy.context.view_layer.objects.active = None <NEW_LINE> if self._parent.children: <NEW_LINE> <INDENT> print("delete the children!") <NEW_LINE> for child in self._parent.children: <NEW_LINE> <INDENT> child.select_set(state=True) <NEW_LINE> <DEDENT> bpy.ops.object.delete() <NEW_LINE> <DEDENT> self._parent.select_set(state=True) <NEW_LINE> bpy.context.view_layer.objects.active = self._parent <NEW_LINE> filepath = None <NEW_LINE> if self.filepath: <NEW_LINE> <INDENT> filepath = self.filepath <NEW_LINE> self._parent.import_csv.filepath = filepath <NEW_LINE> <DEDENT> elif self._parent.import_csv.filepath: <NEW_LINE> <INDENT> filepath = self._parent.import_csv.filepath <NEW_LINE> <DEDENT> if filepath: <NEW_LINE> <INDENT> reader = CSVReader() <NEW_LINE> dataStore = reader.parse_csv(context, filepath) <NEW_LINE> vis_index = self._parent.import_csv.vis_index <NEW_LINE> visualizer = self._parent.import_csv.visualizers[vis_index] <NEW_LINE> visualization = visualizer.visualize(dataStore) <NEW_LINE> for i in range(len(visualization)): <NEW_LINE> <INDENT> visualization[i].parent = self._parent <NEW_LINE> visualization[i].matrix_parent_inverse = self._parent.matrix_world.inverted() <NEW_LINE> <DEDENT> <DEDENT> bpy.context.view_layer.objects.active = self._parent <NEW_LINE> self._parent.select_set(state=True) <NEW_LINE> w.cursor_set('DEFAULT') <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> if bpy.context.active_object and bpy.context.active_object.visualization: <NEW_LINE> <INDENT> self._parent = bpy.context.active_object <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bpy.ops.object.select_all(action='DESELECT') <NEW_LINE> bpy.context.view_layer.objects.active = None <NEW_LINE> bpy.ops.object.add(radius=0.25, location=(0,0,0)) <NEW_LINE> bpy.context.active_object.visualization = True <NEW_LINE> bpy.context.active_object.name = 'VisualizationEmpty' <NEW_LINE> self._parent = bpy.context.active_object <NEW_LINE> <DEDENT> ImportHelper.invoke(self, context, event) <NEW_LINE> return {'RUNNING_MODAL'} <NEW_LINE> <DEDENT> def draw(self, context): <NEW_LINE> <INDENT> layout = self.layout <NEW_LINE> props = self._parent.import_csv <NEW_LINE> box = layout.box() <NEW_LINE> box.prop(props, 'type') <NEW_LINE> if (props.visualizers): <NEW_LINE> <INDENT> props.visualizers[props.vis_index].draw(layout, context, props.visprops)
Imports statistical data (.csv) to visualize as graphs.
62598fad4527f215b58e9ec4
class RedisQu: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> import redis <NEW_LINE> pool = redis.ConnectionPool(**config.REDIS_CONF) <NEW_LINE> self.r = redis.StrictRedis(connection_pool=pool) <NEW_LINE> <DEDENT> def get(self, qu_name): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> rev = self.r.brpop(qu_name, 0) <NEW_LINE> raw_ctx = rev[1] <NEW_LINE> ctx = pickle.loads(raw_ctx) <NEW_LINE> yield ctx <NEW_LINE> <DEDENT> <DEDENT> def put(self, qu_name, ctx): <NEW_LINE> <INDENT> self.r.lpush(qu_name, pickle.dumps(ctx))
redis queue 封装
62598fad3539df3088ecc295
class ECQTestCase(PloneTestCase): <NEW_LINE> <INDENT> def createEmptyQuiz(self): <NEW_LINE> <INDENT> portal = self.portal <NEW_LINE> dummy = createObject(self.portal, 'ECQuiz', 'dummy') <NEW_LINE> portal.dummy = dummy <NEW_LINE> setProps(dummy, (('instantFeedback', False), ('allowRepetition', False), ('onePerPage', False), ('onePerPageNav', False), ('scoringFunction', 'cruel'), ('directions', 'Please answer these questions!'), ('randomOrder', False), ('numberOfRandomQuestions', 0),)) <NEW_LINE> return dummy <NEW_LINE> <DEDENT> def createDummy(self): <NEW_LINE> <INDENT> portal = self.portal <NEW_LINE> dummy = self.createEmptyQuiz() <NEW_LINE> mcq = createObject(dummy, 'ECQMCQuestion', 'mcq') <NEW_LINE> setProps(mcq, (('allowMultipleSelection', False), ('randomOrder', False), ('numberOfRandomAnswers', 0), ('points', 666), ('tutorGraded', False), )) <NEW_LINE> for uid, comm, corr, answ in (('mca1', 'Correct comment', True, 'This is correct.'), ('mca2', 'Wrong comment', False, 'This is wrong.' ), ): <NEW_LINE> <INDENT> mca = createObject(mcq, 'ECQMCAnswer', uid) <NEW_LINE> setProps(mca, (('comment', comm), ('correct', corr), ('answer', answ), )) <NEW_LINE> <DEDENT> wtool = portal.portal_workflow <NEW_LINE> wtool.doActionFor(dummy, 'publish') <NEW_LINE> return dummy
Base class for integration tests for the 'ECQuiz' product. This may provide specific set-up and tear-down operations, or provide convenience methods.
62598fad2ae34c7f260ab0c4
class IpPort: <NEW_LINE> <INDENT> QUALNAME = "pyrogram.raw.base.IpPort" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> raise TypeError("Base types can only be used for type checking purposes: " "you tried to use a base type instance as argument, " "but you need to instantiate one of its constructors instead. " "More info: https://docs.pyrogram.org/telegram/base/ip-port")
This base type has 2 constructors available. Constructors: .. hlist:: :columns: 2 - :obj:`IpPort <pyrogram.raw.types.IpPort>` - :obj:`IpPortSecret <pyrogram.raw.types.IpPortSecret>`
62598fadcc40096d6161a1cc
class HueyRedisProc(ClientProc): <NEW_LINE> <INDENT> name = None <NEW_LINE> queues = [] <NEW_LINE> blocking = True <NEW_LINE> connection_params = {} <NEW_LINE> def __init__(self, connection_params=None, blocking=None, *args, **kwargs): <NEW_LINE> <INDENT> super(HueyRedisProc, self).__init__(*args, **kwargs) <NEW_LINE> if connection_params is not None: <NEW_LINE> <INDENT> self.connection_params = connection_params <NEW_LINE> <DEDENT> if blocking is not None: <NEW_LINE> <INDENT> self.blocking = blocking <NEW_LINE> <DEDENT> if self.blocking: <NEW_LINE> <INDENT> self.client_cls = RedisBlockingQueue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.client_cls = RedisQueue <NEW_LINE> <DEDENT> <DEDENT> def client(self, queue): <NEW_LINE> <INDENT> if isinstance(queue, RedisQueue): <NEW_LINE> <INDENT> return queue <NEW_LINE> <DEDENT> return self.client_cls(queue, **self.connection_params) <NEW_LINE> <DEDENT> def quantity(self): <NEW_LINE> <INDENT> return sum([len(client) for client in self.clients])
A proc class for the redis backend of the `Huey <http://huey.readthedocs.org/>`_ library. :param name: the name of the proc (required) :param queues: list of queue names to check (required) :param blocking: whether to use the blocking or non-blocking client (optional) :param connection_params: the connection parameter to use by default (optional) :type name: str :type queues: str or list :type blocking: bool :type connection_params: dict Example:: from hirefire.procs.huey import HueyRedisProc from mysite.config import queue class WorkerHueyRedisProc(HueyRedisProc): name = 'worker' queues = [queue] connection_params = { 'host': 'localhost', 'port': 6379, 'db': 0, }
62598fad97e22403b383aef0
class DDL(DDLElement): <NEW_LINE> <INDENT> __visit_name__ = "ddl" <NEW_LINE> @util.deprecated_params( bind=( "2.0", "The :paramref:`_ddl.DDL.bind` argument is deprecated and " "will be removed in SQLAlchemy 2.0.", ), ) <NEW_LINE> def __init__(self, statement, context=None, bind=None): <NEW_LINE> <INDENT> if not isinstance(statement, util.string_types): <NEW_LINE> <INDENT> raise exc.ArgumentError( "Expected a string or unicode SQL statement, got '%r'" % statement ) <NEW_LINE> <DEDENT> self.statement = statement <NEW_LINE> self.context = context or {} <NEW_LINE> self._bind = bind <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s@%s; %s>" % ( type(self).__name__, id(self), ", ".join( [repr(self.statement)] + [ "%s=%r" % (key, getattr(self, key)) for key in ("on", "context") if getattr(self, key) ] ), )
A literal DDL statement. Specifies literal SQL DDL to be executed by the database. DDL objects function as DDL event listeners, and can be subscribed to those events listed in :class:`.DDLEvents`, using either :class:`_schema.Table` or :class:`_schema.MetaData` objects as targets. Basic templating support allows a single DDL instance to handle repetitive tasks for multiple tables. Examples:: from sqlalchemy import event, DDL tbl = Table('users', metadata, Column('uid', Integer)) event.listen(tbl, 'before_create', DDL('DROP TRIGGER users_trigger')) spow = DDL('ALTER TABLE %(table)s SET secretpowers TRUE') event.listen(tbl, 'after_create', spow.execute_if(dialect='somedb')) drop_spow = DDL('ALTER TABLE users SET secretpowers FALSE') connection.execute(drop_spow) When operating on Table events, the following ``statement`` string substitutions are available:: %(table)s - the Table name, with any required quoting applied %(schema)s - the schema name, with any required quoting applied %(fullname)s - the Table name including schema, quoted if needed The DDL's "context", if any, will be combined with the standard substitutions noted above. Keys present in the context will override the standard substitutions.
62598fad8da39b475be031c8
class Sequence(models.Model): <NEW_LINE> <INDENT> pre = models.CharField(max_length=1, blank=True) <NEW_LINE> digest = models.CharField(max_length=5, blank=True) <NEW_LINE> seq = models.CharField(max_length=6, unique=True, blank=True) <NEW_LINE> in_time = models.DateTimeField(auto_now_add=True)
序列库
62598fad167d2b6e312b6f55
class PDBeChemlink(object): <NEW_LINE> <INDENT> def __init__(self, local = None): <NEW_LINE> <INDENT> self._local = os.path.abspath(local) <NEW_LINE> self.__name__ = 'databases.PDBeChemlink' <NEW_LINE> <DEDENT> """ATTRIBUTES""" <NEW_LINE> @property <NEW_LINE> def local(self): return self._local <NEW_LINE> @local.setter <NEW_LINE> def local(self, value): self._local = os.path.abspath(value) <NEW_LINE> @property <NEW_LINE> def localPDBeChems(self): <NEW_LINE> <INDENT> for chem_file in Path.list_files(root = self.local, pattern = '*.cif'): <NEW_LINE> <INDENT> yield chem_file <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> return PDBeChemftp['show'] <NEW_LINE> <DEDENT> """BOOLEANS""" <NEW_LINE> @property <NEW_LINE> def has_local(self): return self._local is not None <NEW_LINE> """METHODS""" <NEW_LINE> def download(self): <NEW_LINE> <INDENT> if not self.has_local: <NEW_LINE> <INDENT> raise NameError('A local PDBeChem database directory must be defined.') <NEW_LINE> <DEDENT> Path.mkdir(self.local) <NEW_LINE> destination = os.path.join(self.local, 'mmcif.tar.gz') <NEW_LINE> try: <NEW_LINE> <INDENT> urllib.urlretrieve(PDBeChemftp['global'], destination) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> command = ['tar', 'zxvf', destination, '-C', self.local] <NEW_LINE> p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.PIPE) <NEW_LINE> out, err = p.communicate() <NEW_LINE> return True <NEW_LINE> <DEDENT> def get_PDBeChem(self, chemID): <NEW_LINE> <INDENT> if self.has_local: <NEW_LINE> <INDENT> for chem_file in self.localPDBeChems: <NEW_LINE> <INDENT> newfile = File(file_name = chem_file, action = 'r') <NEW_LINE> if newfile.prefix.upper() == chemID.upper(): <NEW_LINE> <INDENT> return chem_file <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> chem_file = chemID.upper() + '.cif' <NEW_LINE> source = PDBeChemftp['single'] + chem_file <NEW_LINE> try: <NEW_LINE> <INDENT> urllib.urlretrieve(source, chem_file) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return os.path.abspath(chem_file) <NEW_LINE> <DEDENT> def get_PDBeChems(self, chemIDset): <NEW_LINE> <INDENT> if isintance(chemIDset, str): <NEW_LINE> <INDENT> warnings.warn('For single PDBeChem search the get_PDBeChem function is recomended.') <NEW_LINE> yield self.get_PDBeChem(chemIDset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chemIDset = set([x.upper() for x in chemIDset]) <NEW_LINE> <DEDENT> if self.has_local: <NEW_LINE> <INDENT> for chem_file in self.localPDBeChems: <NEW_LINE> <INDENT> newfile = File(file_name = chem_file, action = 'r') <NEW_LINE> if newfile.prefix.lstrip('pdb').upper() in chemIDset: <NEW_LINE> <INDENT> yield chem_file <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for chemID in chemIDset: <NEW_LINE> <INDENT> yield self.get_PDBeChem(chemID)
The PDBeChemlink class controls the download and parsing of PDBeChem database
62598fad55399d3f05626507
class Lag(Filter): <NEW_LINE> <INDENT> __documentation_section__ = 'Filter UGens' <NEW_LINE> __slots__ = () <NEW_LINE> _ordered_input_names = ( 'source', 'lag_time', ) <NEW_LINE> _valid_rates = ( CalculationRate.AUDIO, CalculationRate.CONTROL, ) <NEW_LINE> def __init__( self, lag_time=0.1, calculation_rate=None, source=None, ): <NEW_LINE> <INDENT> Filter.__init__( self, calculation_rate=calculation_rate, source=source, lag_time=lag_time, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _new_single( cls, lag_time=None, calculation_rate=None, source=None, ): <NEW_LINE> <INDENT> if lag_time == 0: <NEW_LINE> <INDENT> return source <NEW_LINE> <DEDENT> source_rate = CalculationRate.from_input(source) <NEW_LINE> if source_rate == CalculationRate.SCALAR: <NEW_LINE> <INDENT> return source <NEW_LINE> <DEDENT> ugen = cls( lag_time=lag_time, calculation_rate=calculation_rate, source=source, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ar( cls, lag_time=0.1, source=None, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.AUDIO <NEW_LINE> ugen = cls._new_expanded( lag_time=lag_time, calculation_rate=calculation_rate, source=source, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def kr( cls, lag_time=0.1, source=None, ): <NEW_LINE> <INDENT> from supriya.tools import synthdeftools <NEW_LINE> calculation_rate = synthdeftools.CalculationRate.CONTROL <NEW_LINE> ugen = cls._new_expanded( lag_time=lag_time, calculation_rate=calculation_rate, source=source, ) <NEW_LINE> return ugen <NEW_LINE> <DEDENT> @property <NEW_LINE> def lag_time(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('lag_time') <NEW_LINE> return self._inputs[index] <NEW_LINE> <DEDENT> @property <NEW_LINE> def source(self): <NEW_LINE> <INDENT> index = self._ordered_input_names.index('source') <NEW_LINE> return self._inputs[index]
A lag generator. :: >>> source = ugentools.In.kr(bus=0) >>> ugentools.Lag.kr( ... lag_time=0.5, ... source=source, ... ) Lag.kr()
62598fadd7e4931a7ef3c079
class TG2(GovernorBase): <NEW_LINE> <INDENT> def __init__(self, system, name): <NEW_LINE> <INDENT> super(TG2, self).__init__(system, name) <NEW_LINE> self._name = 'TG2' <NEW_LINE> self._data.update({ 'T1': 0.2, 'T2': 10.0, }) <NEW_LINE> self._descr.update({ 'T1': 'Transient gain time constant', 'T2': 'Governor time constant', }) <NEW_LINE> self._units.update({'T1': 's', 'T2': 's'}) <NEW_LINE> self._params.extend(['T1', 'T2']) <NEW_LINE> self._service.extend(['T12', 'iT2']) <NEW_LINE> self._mandatory.extend(['T2']) <NEW_LINE> self._states.extend(['xg']) <NEW_LINE> self._fnamex.extend(['x_g']) <NEW_LINE> self._init() <NEW_LINE> <DEDENT> def init1(self, dae): <NEW_LINE> <INDENT> super(TG2, self).init1(dae) <NEW_LINE> self.T12 = div(self.T1, self.T2) <NEW_LINE> self.iT2 = div(1, self.T2) <NEW_LINE> <DEDENT> def fcall(self, dae): <NEW_LINE> <INDENT> dae.f[self.xg] = mul( self.iT2, mul(self.gain, 1 - self.T12, self.wref0 - dae.x[self.omega]) - dae.x[self.xg]) <NEW_LINE> <DEDENT> def gcall(self, dae): <NEW_LINE> <INDENT> pm = dae.x[self.xg] + self.pm0 + mul(self.gain, self.T12, self.wref0 - dae.x[self.omega]) <NEW_LINE> dae.g[self.pout] = pm - dae.y[self.pout] <NEW_LINE> dae.hard_limit(self.pout, self.pmin, self.pmax) <NEW_LINE> super(TG2, self).gcall(dae) <NEW_LINE> <DEDENT> def jac0(self, dae): <NEW_LINE> <INDENT> super(TG2, self).jac0(dae) <NEW_LINE> dae.add_jac(Fx0, -self.iT2, self.xg, self.xg) <NEW_LINE> dae.add_jac(Fx0, -mul(self.iT2, self.gain, 1 - self.T12), self.xg, self.omega) <NEW_LINE> dae.add_jac(Gx0, 1.0, self.pout, self.xg) <NEW_LINE> dae.add_jac(Gx0, -mul(self.gain, self.T12), self.pout, self.omega) <NEW_LINE> dae.add_jac(Gy0, -1.0, self.pout, self.pout)
Simplified governor model
62598fad236d856c2adc942f
class Network: <NEW_LINE> <INDENT> def __init__(self, *exclude): <NEW_LINE> <INDENT> stat = None <NEW_LINE> with open('/proc/net/dev', 'rb') as file: <NEW_LINE> <INDENT> stat = file.read() <NEW_LINE> <DEDENT> stat = stat.decode('utf-8', 'replace') <NEW_LINE> stat = stat.replace('|', ' | ').replace(':', ' ') <NEW_LINE> stat = list(filter(lambda x : not x == '', stat.split('\n')[1:])) <NEW_LINE> stat = [list(filter(lambda x : not x == '', line.split(' '))) for line in stat] <NEW_LINE> stat[0] = stat[0][2:] <NEW_LINE> exclude = set(exclude) <NEW_LINE> devices = {} <NEW_LINE> for line in stat[1:]: <NEW_LINE> <INDENT> if line[0] not in exclude: <NEW_LINE> <INDENT> devices[line[0]] = [int(x) for x in line[1:]] <NEW_LINE> <DEDENT> <DEDENT> columns, prefix = [], 'rx_' <NEW_LINE> for column in stat[0]: <NEW_LINE> <INDENT> if column == '|': <NEW_LINE> <INDENT> prefix = 'tx_' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns.append(prefix + column) <NEW_LINE> <DEDENT> <DEDENT> self.devices = {} <NEW_LINE> for dev in devices.keys(): <NEW_LINE> <INDENT> fields = {} <NEW_LINE> self.devices[dev] = fields <NEW_LINE> values = devices[dev] <NEW_LINE> for i in range(len(values)): <NEW_LINE> <INDENT> fields[columns[i]] = values[i]
Retrieve network statistics @variable devices:dict<str, dict<str, int>> Map from device name, to data name, to data value Data names for receive: @key rx_bytes Bytes received @key rx_packets Packets received @key rx_errs Errors @key rx_drop Dropped @key rx_fifo FIFO @key rx_frame Frame @key rx_compressed Compressed @key rx_multicast Multicast Data names for transmit: @key tx_bytes Bytes transmitted @key tx_packets Packets transmitted @key tx_errs Errors @key tx_drop Dropped @key tx_fifo FIFO @key tx_colls Collisions @key tx_carrier Carrier @key tx_compressed Compressed
62598fad10dbd63aa1c70b97
class PolybiusSquare(object): <NEW_LINE> <INDENT> def __init__(self, alphabet=None, char_map=('j','i')): <NEW_LINE> <INDENT> if alphabet is None: <NEW_LINE> <INDENT> self.alphabet = string.ascii_lowercase <NEW_LINE> self.tableau_alphabet = ''.join(sorted(list(set(self.alphabet) - set(char_map[0])))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.alphabet = sorted(alphabet.lower() + char_map[0].lower()) <NEW_LINE> self.tableau_alphabet = alphabet.lower() <NEW_LINE> <DEDENT> self.height = self.width = 5 <NEW_LINE> self.char_map = (char_map[0].lower(), char_map[1].lower()) <NEW_LINE> <DEDENT> def get_coordinates(self, c): <NEW_LINE> <INDENT> c = self.char_map[1] if c == self.char_map[0] else c <NEW_LINE> c_id = self.tableau_alphabet.index(c) <NEW_LINE> i = int(c_id / self.width) <NEW_LINE> j = c_id % self.width <NEW_LINE> return i, j <NEW_LINE> <DEDENT> def get_character(self, i, j): <NEW_LINE> <INDENT> return self.tableau_alphabet[i*self.width + j]
Tableau for the Bifid cipher
62598fad66673b3332c303af
class DepartmentID(Field): <NEW_LINE> <INDENT> logger.info('A custom field for the department class.') <NEW_LINE> logger.info('The field forces DepartmentID to be 4 characters long.') <NEW_LINE> logger.info('The field forces DepartmentID to start with an alpha') <NEW_LINE> def db_value(self, value): <NEW_LINE> <INDENT> if len(value) != 4 or not value[0].isalpha(): <NEW_LINE> <INDENT> raise TypeError( "DepartmentID must be 4 characters long and start with an alpha. " ) <NEW_LINE> <DEDENT> return value
This class defines a custom Department ID field. The first character must be a letter, and the DepartmentID must be 4 characters long.
62598fad99cbb53fe6830ebb
class PrimaryXml(XmlFileParser, PackageXmlMixIn): <NEW_LINE> <INDENT> def _registerTypes(self): <NEW_LINE> <INDENT> PackageXmlMixIn._registerTypes(self) <NEW_LINE> self._databinder.registerType(_Metadata, name='metadata')
Handle registering all types for parsing primary.xml.gz.
62598fad56b00c62f0fb2898
class I_cp_w_l8(Instruction_w_l8_B): <NEW_LINE> <INDENT> name = 'cp' <NEW_LINE> mask = 0xFF8060 <NEW_LINE> code = 0xE10060 <NEW_LINE> feat = idaapi.CF_USE1
CP{.B} f
62598fad57b8e32f5250810d
class ServerUsage(extensions.V3APIExtensionBase): <NEW_LINE> <INDENT> name = "ServerUsage" <NEW_LINE> alias = ALIAS <NEW_LINE> namespace = ("http://docs.openstack.org/compute/ext/" "os-server-usage/api/v3") <NEW_LINE> version = 1 <NEW_LINE> def get_controller_extensions(self): <NEW_LINE> <INDENT> controller = ServerUsageController() <NEW_LINE> extension = extensions.ControllerExtension(self, 'servers', controller) <NEW_LINE> return [extension] <NEW_LINE> <DEDENT> def get_resources(self): <NEW_LINE> <INDENT> return []
Adds launched_at and terminated_at on Servers.
62598fad4f88993c371f04fc
class IFollowable(Interface): <NEW_LINE> <INDENT> pass
A content we can follow
62598fad851cf427c66b829f
class ScatteringSolver(metaclass = ABCMeta): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def make_solver_call(self, sensor): <NEW_LINE> <INDENT> pass
Abstract base class that defines the scattering solver interface.
62598fad91f36d47f2230e98
@dataclass <NEW_LINE> class DataTrainingArguments: <NEW_LINE> <INDENT> task_name: Optional[str] = field( default=None, metadata={"help": "The name of the task to train on: " + ", ".join(task_to_keys.keys())}, ) <NEW_LINE> max_seq_length: int = field( default=128, metadata={ "help": "The maximum total input sequence length after tokenization. Sequences longer " "than this will be truncated, sequences shorter will be padded." }, ) <NEW_LINE> overwrite_cache: bool = field( default=False, metadata={"help": "Overwrite the cached preprocessed datasets or not."} ) <NEW_LINE> pad_to_max_length: bool = field( default=True, metadata={ "help": "Whether to pad all samples to `max_seq_length`. " "If False, will pad the samples dynamically when batching to the maximum length in the batch." }, ) <NEW_LINE> max_train_samples: Optional[int] = field( default=None, metadata={ "help": "For debugging purposes or quicker training, truncate the number of training examples to this " "value if set." }, ) <NEW_LINE> max_val_samples: Optional[int] = field( default=None, metadata={ "help": "For debugging purposes or quicker training, truncate the number of validation examples to this " "value if set." }, ) <NEW_LINE> max_test_samples: Optional[int] = field( default=None, metadata={ "help": "For debugging purposes or quicker training, truncate the number of test examples to this " "value if set." }, ) <NEW_LINE> train_file: Optional[str] = field( default=None, metadata={"help": "A csv or a json file containing the training data."} ) <NEW_LINE> validation_file: Optional[str] = field( default=None, metadata={"help": "A csv or a json file containing the validation data."} ) <NEW_LINE> test_file: Optional[str] = field(default=None, metadata={"help": "A csv or a json file containing the test data."}) <NEW_LINE> def __post_init__(self): <NEW_LINE> <INDENT> if self.task_name is not None: <NEW_LINE> <INDENT> self.task_name = self.task_name.lower() <NEW_LINE> if self.task_name not in task_to_keys.keys(): <NEW_LINE> <INDENT> raise ValueError("Unknown task, you should pick one in " + ",".join(task_to_keys.keys())) <NEW_LINE> <DEDENT> <DEDENT> elif self.train_file is None or self.validation_file is None: <NEW_LINE> <INDENT> raise ValueError("Need either a GLUE task or a training/validation file.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> train_extension = self.train_file.split(".")[-1] <NEW_LINE> assert train_extension in ["csv", "json"], "`train_file` should be a csv or a json file." <NEW_LINE> validation_extension = self.validation_file.split(".")[-1] <NEW_LINE> assert ( validation_extension == train_extension ), "`validation_file` should have the same extension (csv or json) as `train_file`."
Arguments pertaining to what data we are going to input our model for training and eval. Using `HfArgumentParser` we can turn this class into argparse arguments to be able to specify them on the command line.
62598fad7b180e01f3e49042
class Cupcake(db.Model): <NEW_LINE> <INDENT> __tablename__ = "cupcake" <NEW_LINE> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> flavor = db.Column(db.Text, nullable=False) <NEW_LINE> size = db.Column(db.Text, nullable=False) <NEW_LINE> rating = db.Column(db.Float, nullable=False) <NEW_LINE> image = db.Column(db.Text, nullable=False, default=DEFAULT_IMG_URL)
Cupcake information
62598fad16aa5153ce4004e6
class FileUpdatedSubject(Subject): <NEW_LINE> <INDENT> def notify(self, *args, **kwargs): <NEW_LINE> <INDENT> file_list = kwargs['file_list'] <NEW_LINE> for observer in self.observers: <NEW_LINE> <INDENT> local_file_list = file_list[:] <NEW_LINE> observer.notify(file_list=local_file_list)
Broadcast a list of updated files
62598fad38b623060ffa907e
class Loader(yaml.Loader): <NEW_LINE> <INDENT> def __init__(self, stream): <NEW_LINE> <INDENT> yaml.add_constructor('!include', self.construct_include) <NEW_LINE> """Initialise Loader.""" <NEW_LINE> try: <NEW_LINE> <INDENT> self._root = os.path.split(stream.name)[0] <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._root = os.path.curdir <NEW_LINE> <DEDENT> super(Loader, self).__init__(stream) <NEW_LINE> <DEDENT> def construct_include(self, tag_suffix, node): <NEW_LINE> <INDENT> filename = os.path.abspath(os.path.join( self._root, self.construct_scalar(node) )) <NEW_LINE> extension = os.path.splitext(filename)[1].lstrip('.') <NEW_LINE> with open(filename, 'r') as f: <NEW_LINE> <INDENT> if extension in ('yaml', 'yml'): <NEW_LINE> <INDENT> return yaml.load(f, Loader) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ''.join(f.readlines())
YAML Loader with `!include` constructor.
62598fad4e4d56256637240a
class KLqp(VariationalInference): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(KLqp, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def initialize(self, n_samples=1, kl_scaling=None, *args, **kwargs): <NEW_LINE> <INDENT> if kl_scaling is None: <NEW_LINE> <INDENT> kl_scaling = {} <NEW_LINE> <DEDENT> self.n_samples = n_samples <NEW_LINE> self.kl_scaling = kl_scaling <NEW_LINE> return super(KLqp, self).initialize(*args, **kwargs) <NEW_LINE> <DEDENT> def build_loss_and_gradients(self, var_list): <NEW_LINE> <INDENT> is_reparameterizable = all([ rv.reparameterization_type == tf.contrib.distributions.FULLY_REPARAMETERIZED for rv in six.itervalues(self.latent_vars)]) <NEW_LINE> is_analytic_kl = all([isinstance(z, Normal) and isinstance(qz, Normal) for z, qz in six.iteritems(self.latent_vars)]) <NEW_LINE> if not is_analytic_kl and self.kl_scaling: <NEW_LINE> <INDENT> raise TypeError("kl_scaling must be None when using non-analytic KL term") <NEW_LINE> <DEDENT> if is_reparameterizable: <NEW_LINE> <INDENT> if is_analytic_kl: <NEW_LINE> <INDENT> return build_reparam_kl_loss_and_gradients(self, var_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return build_reparam_loss_and_gradients(self, var_list) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if is_analytic_kl: <NEW_LINE> <INDENT> return build_score_kl_loss_and_gradients(self, var_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return build_score_loss_and_gradients(self, var_list)
Variational inference with the KL divergence $\text{KL}( q(z; \lambda) \| p(z \mid x) ).$ This class minimizes the objective by automatically selecting from a variety of black box inference techniques. #### Notes `KLqp` also optimizes any model parameters $p(z \mid x; \theta)$. It does this by variational EM, minimizing $\mathbb{E}_{q(z; \lambda)} [ \log p(x, z; \theta) ]$ with respect to $\theta$. In conditional inference, we infer $z$ in $p(z, \beta \mid x)$ while fixing inference over $\beta$ using another distribution $q(\beta)$. During gradient calculation, instead of using the model's density $\log p(x, z^{(s)}), z^{(s)} \sim q(z; \lambda),$ for each sample $s=1,\ldots,S$, `KLqp` uses $\log p(x, z^{(s)}, \beta^{(s)}),$ where $z^{(s)} \sim q(z; \lambda)$ and $\beta^{(s)} \sim q(\beta)$.
62598fadd486a94d0ba2bfb2
class JoueursPossibles(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pions = {} <NEW_LINE> <DEDENT> def lire_joueurspossibles(self, nomFic): <NEW_LINE> <INDENT> with open(nomFic, 'r') as fichier: <NEW_LINE> <INDENT> for ligne in fichier: <NEW_LINE> <INDENT> nom, representation = ligne.split(',') <NEW_LINE> self.ajouter_nom(nom, representation.strip('\n')) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def ajouter_nom(self, nom, representation): <NEW_LINE> <INDENT> if nom not in self.pions and representation not in self.pions.values(): <NEW_LINE> <INDENT> self.pions[nom] = representation <NEW_LINE> <DEDENT> <DEDENT> def get_listenomsjoueurs(self): <NEW_LINE> <INDENT> return list(self.pions.keys()) <NEW_LINE> <DEDENT> def get_nomjoueur(self, representation): <NEW_LINE> <INDENT> joueur = '' <NEW_LINE> for (nom, token) in self.pions.items(): <NEW_LINE> <INDENT> if representation == token: <NEW_LINE> <INDENT> joueur = nom <NEW_LINE> <DEDENT> <DEDENT> return joueur <NEW_LINE> <DEDENT> def get_representationjoueur(self, nom): <NEW_LINE> <INDENT> return self.pions.get(nom, '')
# Cette structure de données gère les noms et les représentations des joueurs # le nom permet de connaitre le nom du fichier qui contient l'image représentant le # joueur en mode graphique et le caractère représentant le joueur sur la grille # Cette fonction retourne une nouvelle liste de joueurs vide dont les joueurs possibles # sont passés en paramètre. Il n'y a pas de joueur courant lorsque la liste est vide
62598fada05bb46b3848a850
class Athlete(Base): <NEW_LINE> <INDENT> __tablename__ = "athelete" <NEW_LINE> id = sa.Column(sa.INTEGER, primary_key=True) <NEW_LINE> age = sa.Column(sa.INTEGER) <NEW_LINE> birthdate = sa.Column(sa.TEXT) <NEW_LINE> gender = sa.Column(sa.TEXT) <NEW_LINE> height = sa.Column(sa.REAL) <NEW_LINE> name = sa.Column(sa.TEXT) <NEW_LINE> weight = sa.Column(sa.INTEGER) <NEW_LINE> gold_medals = sa.Column(sa.INTEGER) <NEW_LINE> silver_medals = sa.Column(sa.INTEGER) <NEW_LINE> bronze_medals = sa.Column(sa.INTEGER) <NEW_LINE> total_medals = sa.Column(sa.INTEGER) <NEW_LINE> sport = sa.Column(sa.TEXT) <NEW_LINE> country = sa.Column(sa.TEXT) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
описание таблицы атлетов
62598fad5fdd1c0f98e5df71
class TooManyRequests(HTTPWarning): <NEW_LINE> <INDENT> pass
raised when receiving a 429 response from the api. **this shouldn't happen.**
62598fad7047854f4633f3bf
class TaskInlineFormset(TaskFormsetMixin, BaseInlineFormSet): <NEW_LINE> <INDENT> pass
InlineFormset for editing tasks that all share a foreign key to a single TaskType.
62598fad97e22403b383aef2
class Processor(DiffEngine.Processor): <NEW_LINE> <INDENT> def __init__(self, tokenizer=None, segmenter=None, last_text=None, last_tokens=None, last_segments=None): <NEW_LINE> <INDENT> self.tokenizer = tokenizer or TOKENIZER <NEW_LINE> self.segmenter = segmenter or SEGMENTER <NEW_LINE> self.update(last_text, last_tokens, last_segments) <NEW_LINE> <DEDENT> def update(self, last_text=None, last_tokens=None, last_segments=None): <NEW_LINE> <INDENT> if last_segments is not None: <NEW_LINE> <INDENT> self.last_segments = last_segments <NEW_LINE> self.last_tokens = self.last_segments.tokens() <NEW_LINE> <DEDENT> elif last_tokens is not None: <NEW_LINE> <INDENT> self.last_tokens = last_tokens <NEW_LINE> self.last_segments = self.segmenter.segment(last_tokens) <NEW_LINE> <DEDENT> elif last_text is not None: <NEW_LINE> <INDENT> self.last_tokens = self.tokenizer.tokenize(last_text) <NEW_LINE> self.last_segments = self.segmenter.segment(self.last_tokens) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.last_tokens = [] <NEW_LINE> self.last_segments = Segment() <NEW_LINE> <DEDENT> <DEDENT> def process(self, text, token_class=Token): <NEW_LINE> <INDENT> tokens = self.tokenizer.tokenize(text, token_class=token_class) <NEW_LINE> segments = self.segmenter.segment(tokens) <NEW_LINE> return self.process_segments(segments, tokens=tokens) <NEW_LINE> <DEDENT> def process_segments(self, segments, tokens=None): <NEW_LINE> <INDENT> if tokens is None: <NEW_LINE> <INDENT> tokens = segments.tokens() <NEW_LINE> <DEDENT> _clear_matches(self.last_segments) <NEW_LINE> operations = diff_segments(self.last_segments, segments) <NEW_LINE> a = self.last_tokens <NEW_LINE> b = tokens <NEW_LINE> self.last_tokens = tokens <NEW_LINE> self.last_segments = segments <NEW_LINE> return operations, a, b
A processor used by the SegmentMatcher difference engine to track the history of a single text.
62598fadd7e4931a7ef3c07b
class BankAccount: <NEW_LINE> <INDENT> def __init__(self, accountName="Current Account", balance=200): <NEW_LINE> <INDENT> self.__accountName = accountName <NEW_LINE> self.__balance = balance <NEW_LINE> <DEDENT> def getBalance(self): <NEW_LINE> <INDENT> return self.__balance
This is a bank account class
62598fad66673b3332c303b1
class PolygonBySegmentBufferDialogTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_icon_png(self): <NEW_LINE> <INDENT> path = ':/plugins/PolygonBySegmentBuffer/icon.png' <NEW_LINE> icon = QIcon(path) <NEW_LINE> self.assertFalse(icon.isNull())
Test rerources work.
62598faddd821e528d6d8f1a
class RequestSigner: <NEW_LINE> <INDENT> def __init__(self, service_name, region, algorithm=0, signature_type=0): <NEW_LINE> <INDENT> self.service_name: str = service_name <NEW_LINE> self.region: str = region <NEW_LINE> self.algorithm: int = algorithm <NEW_LINE> self.signature_type: int = signature_type <NEW_LINE> <DEDENT> def sign( self, request: PreparedRequest, credentials: Optional[Credentials] ) -> PreparedRequest: <NEW_LINE> <INDENT> if credentials is None: <NEW_LINE> <INDENT> raise CredentialsException("Failed to resolve credentials") <NEW_LINE> <DEDENT> alg = AwsSigningAlgorithm(self.algorithm) <NEW_LINE> sig_type = AwsSignatureType(self.signature_type) <NEW_LINE> credential_provider = AwsCredentialsProvider.new_static( credentials.access_key_id, credentials.secret_access_key, credentials.session_token, ) <NEW_LINE> config = AwsSigningConfig( algorithm=alg, signature_type=sig_type, credentials_provider=credential_provider, region=self.region, service=self.service_name, signed_body_value=AwsSignedBodyValue.EMPTY_SHA256, signed_body_header_type=AwsSignedBodyHeaderType.NONE, ) <NEW_LINE> crt_request = _convert_request(request) <NEW_LINE> signed_request = aws_sign_request(crt_request, config).result() <NEW_LINE> request.headers = HeadersDict(dict(signed_request.headers)) <NEW_LINE> return request
General implementation for Request signing
62598fad57b8e32f5250810e
class ContentEncoding( Enum["7bit 8bit binary quoted-printable base64".split()], _NoInit, _NoTitle ): <NEW_LINE> <INDENT> pass
Content encodings for a string. .. Json schema media: https://json-schema.org/understanding-json-schema/reference/non_json_data.html
62598fad283ffb24f3cf3872
class ServerInterface (object): <NEW_LINE> <INDENT> def check_channel_request(self, kind, chanid): <NEW_LINE> <INDENT> return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED <NEW_LINE> <DEDENT> def get_allowed_auths(self, username): <NEW_LINE> <INDENT> return b'password' <NEW_LINE> <DEDENT> def check_auth_none(self, username): <NEW_LINE> <INDENT> return AUTH_FAILED <NEW_LINE> <DEDENT> def check_auth_password(self, username, password): <NEW_LINE> <INDENT> return AUTH_FAILED <NEW_LINE> <DEDENT> def check_auth_publickey(self, username, key): <NEW_LINE> <INDENT> return AUTH_FAILED <NEW_LINE> <DEDENT> def check_auth_interactive(self, username, submethods): <NEW_LINE> <INDENT> return AUTH_FAILED <NEW_LINE> <DEDENT> def check_auth_interactive_response(self, responses): <NEW_LINE> <INDENT> return AUTH_FAILED <NEW_LINE> <DEDENT> def check_port_forward_request(self, address, port): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def cancel_port_forward_request(self, address, port): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def check_global_request(self, kind, msg): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_pty_request(self, channel, term, width, height, pixelwidth, pixelheight, modes): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_shell_request(self, channel): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_exec_request(self, channel, command): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_subsystem_request(self, channel, name): <NEW_LINE> <INDENT> handler_class, larg, kwarg = channel.get_transport()._get_subsystem_handler(name) <NEW_LINE> if handler_class is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> handler = handler_class(channel, name, self, *larg, **kwarg) <NEW_LINE> handler.start() <NEW_LINE> return True <NEW_LINE> <DEDENT> def check_channel_window_change_request(self, channel, width, height, pixelwidth, pixelheight): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_x11_request(self, channel, single_connection, auth_protocol, auth_cookie, screen_number): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_forward_agent_request(self, channel): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_channel_direct_tcpip_request(self, chanid, origin, destination): <NEW_LINE> <INDENT> return OPEN_FAILED_ADMINISTRATIVELY_PROHIBITED
This class defines an interface for controlling the behavior of paramiko in server mode. Methods on this class are called from paramiko's primary thread, so you shouldn't do too much work in them. (Certainly nothing that blocks or sleeps.)
62598fadbd1bec0571e150b6
class Brewery(_Endpoint): <NEW_LINE> <INDENT> endpoint_base = 'brewery' <NEW_LINE> get_endpoints = ('info', 'checkins')
Brewery endpoint class
62598fad091ae35668704c04
class Fig62Game(Game): <NEW_LINE> <INDENT> succs = {'A': [('a1', 'B'), ('a2', 'C'), ('a3', 'D')], 'B': [('b1', 'B1'), ('b2', 'B2'), ('b3', 'B3')], 'C': [('c1', 'C1'), ('c2', 'C2'), ('c3', 'C3')], 'D': [('d1', 'D1'), ('d2', 'D2'), ('d3', 'D3')]} <NEW_LINE> utils = Dict(B1=3, B2=12, B3=8, C1=2, C2=4, C3=6, D1=14, D2=5, D3=2) <NEW_LINE> initial = 'A' <NEW_LINE> def successors(self, state): <NEW_LINE> <INDENT> return self.succs.get(state, []) <NEW_LINE> <DEDENT> def utility(self, state, player): <NEW_LINE> <INDENT> if player == 'MAX': <NEW_LINE> <INDENT> return self.utils[state] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -self.utils[state] <NEW_LINE> <DEDENT> <DEDENT> def terminal_test(self, state): <NEW_LINE> <INDENT> return state not in ('A', 'B', 'C', 'D') <NEW_LINE> <DEDENT> def to_move(self, state): <NEW_LINE> <INDENT> return if_(state in 'BCD', 'MIN', 'MAX')
The game represented in [Fig. 6.2]. Serves as a simple test case. Ex: g = Fig62Game(); minimax_decision('A', g) ==> 'a1' alphabeta_full_search('A', g) ==> 'a1' alphabeta_search('A', g) ==> 'a1'
62598fade76e3b2f99fd8a1c
@plugin('remind') <NEW_LINE> class Remind(RemindBase): <NEW_LINE> <INDENT> def init(self, jarvis): <NEW_LINE> <INDENT> self.first_time_init(jarvis) <NEW_LINE> <DEDENT> def __call__(self, jarvis, s): <NEW_LINE> <INDENT> jarvis.say("## {} ##\n".format(self.timestamp_to_string(time.time()))) <NEW_LINE> self.do_print(jarvis)
List all scheduled reminders
62598fad76e4537e8c3ef593
class UnicodeCSVReader: <NEW_LINE> <INDENT> def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds): <NEW_LINE> <INDENT> f = UTF8Recoder(f, encoding) <NEW_LINE> self.reader = csv.reader(f, dialect=dialect, **kwds) <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> row = self.reader.next() <NEW_LINE> return [unicode(s, "utf-8") for s in row] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self
A CSV reader which will iterate over lines in the CSV file "f", which is encoded in the given encoding.
62598fad8e7ae83300ee9087
class Action7(Action): <NEW_LINE> <INDENT> def execute(self, instance): <NEW_LINE> <INDENT> value = self.evaluate_expression(self.get_parameter(0)) <NEW_LINE> index = self.evaluate_expression(self.get_parameter(1)) <NEW_LINE> try: <NEW_LINE> <INDENT> instance.objectPlayer.delimiters[index] = value <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> instance.objectPlayer.defaultDelimiter = value
List Tokenizing->Delimiters->Set delimiter Parameters: 0: Set delimiter (EXPSTRING, ExpressionParameter) 1: Set delimiter (EXPRESSION, ExpressionParameter)
62598fad5fdd1c0f98e5df72
class Primitive(Circuit): <NEW_LINE> <INDENT> def __init__(self, fpga, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.fpga = fpga <NEW_LINE> self.fpga.primitives.append(self) <NEW_LINE> self.off()
A primitive on an FPGA.
62598fad796e427e5384e779