code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class AssemblyMemberDifferentType(AssemblyMemberDifference,IDisposable): <NEW_LINE> <INDENT> def Dispose(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def ReleaseUnmanagedResources(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __enter__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self,*args): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> TypeId1=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> TypeId2=property(lambda self: object(),lambda self,v: None,lambda self: None)
The two assembly members being compared have different type
62598f9c56ac1b37e6301fa3
class MongoDBPipeline: <NEW_LINE> <INDENT> collection_name = "articles" <NEW_LINE> def __init__(self, mongo_uri, mongo_db): <NEW_LINE> <INDENT> self._mongo_uri = mongo_uri <NEW_LINE> self._mongo_db = mongo_db <NEW_LINE> self._client = None <NEW_LINE> self._db = None <NEW_LINE> self._conn = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_crawler(cls, crawler): <NEW_LINE> <INDENT> return cls( mongo_uri=crawler.settings.get("MONGO_URI"), mongo_db=crawler.settings.get("MONGO_DB"), ) <NEW_LINE> <DEDENT> def open_spider(self, spider): <NEW_LINE> <INDENT> self._conn = connect(self._mongo_db, host=self._mongo_uri) <NEW_LINE> <DEDENT> def close_spider(self, spider): <NEW_LINE> <INDENT> self._conn.close() <NEW_LINE> <DEDENT> def process_item(self, item, spider): <NEW_LINE> <INDENT> if isinstance(item, ArticleItem): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Articles.objects.get(URL=item["URL"]) <NEW_LINE> logger.info("Article \"{}\" already exists, skipping ...".format(item["URL"])) <NEW_LINE> return item <NEW_LINE> <DEDENT> except DoesNotExist: <NEW_LINE> <INDENT> article = Articles(**dict(item)) <NEW_LINE> article.save() <NEW_LINE> logger.info("Successfully added article \"{}\" to MongoDB".format(item["URL"])) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("Failed saving to MongoDB, details: {}".format(e)) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(item, MarketItem): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> m = Market(**dict(item)) <NEW_LINE> m.save() <NEW_LINE> logger.info("Successfully added market data for \"{}\" to MongoDB".format(item["Currency"])) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("Failed saving to MongoDB, details: {}".format(e)) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(item, TwitterItem): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Twitter.objects.get(Tweet_ID=item["Tweet_ID"]) <NEW_LINE> logger.info("Tweet \"{}\" already exists, skipping ...".format(item["Tweet_ID"])) <NEW_LINE> return item <NEW_LINE> <DEDENT> except DoesNotExist: <NEW_LINE> <INDENT> t = Twitter(**dict(item)) <NEW_LINE> t.save() <NEW_LINE> logger.info("Successfully added twitter data to MongoDB") <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("Failed saving to MongoDB, details: {}".format(e)) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(item, GtrendsItem): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Gtrends.objects.get(Keyword=item["Keyword"]) <NEW_LINE> logger.info("Trend keyword \"{}\" already exists, skipping ...".format(item["Keyword"])) <NEW_LINE> return item <NEW_LINE> <DEDENT> except DoesNotExist: <NEW_LINE> <INDENT> t = Gtrends(**dict(item)) <NEW_LINE> t.save() <NEW_LINE> logger.info("Successfully added trend data for \"{}\" to MongoDB".format(item["Keyword"])) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error("Failed saving to MongoDB, details: {}".format(e)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.error("MongoDB Pipeline unsupported item type: {}".format(type(item))) <NEW_LINE> <DEDENT> return item
Pipeline for saving scraped data to MongoDB
62598f9c21bff66bcd722a1d
class PlainOldData: <NEW_LINE> <INDENT> __slots__ = [] <NEW_LINE> def as_prolog_str(self) -> str: <NEW_LINE> <INDENT> slot_strs = [] <NEW_LINE> for slot in self.__slots__: <NEW_LINE> <INDENT> value = getattr(self, slot) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> slot_strs.append(f'{_prolog_atom(slot)}:{_as_prolog_str_full(value)}') <NEW_LINE> <DEDENT> <DEDENT> return 'json{' + ','.join(slot_strs) + '}'
A mixin class that adds serialization methods.
62598f9cf7d966606f747da0
class EditForm(forms.Form): <NEW_LINE> <INDENT> content = forms.CharField( label=_(u'Content'), help_text=_('File content'), required=True, widget=forms.Textarea(attrs={'rows': '18'}), ) <NEW_LINE> def __init__(self, path, filename, file_extension, *args, **kwargs): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.filename = filename <NEW_LINE> self.file_extension = file_extension <NEW_LINE> super(EditForm, self).__init__(*args, **kwargs) <NEW_LINE> file_content = open(os.path.join(path, filename), 'r').read() <NEW_LINE> self.fields['content'].initial = file_content.decode(settings.DEFAULT_CHARSET) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> content = self.cleaned_data['content'] <NEW_LINE> out_file = open(os.path.join(self.path, self.filename), 'w') <NEW_LINE> out_file.write(content.encode(settings.DEFAULT_CHARSET).replace("\r", "")) <NEW_LINE> out_file.close()
Form for editing the File.
62598f9c91f36d47f2230d7c
class TestSingleComponentResponseOfDestinyKiosksComponent(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testSingleComponentResponseOfDestinyKiosksComponent(self): <NEW_LINE> <INDENT> pass
SingleComponentResponseOfDestinyKiosksComponent unit test stubs
62598f9cb7558d58954633e8
class Chunk(models.Model): <NEW_LINE> <INDENT> __metaclass__ = TransMeta <NEW_LINE> key = models.CharField(verbose_name=_('key'), help_text="A unique name for this chunk of content", max_length=255, blank=False, unique=True) <NEW_LINE> content = models.TextField(_('content'), blank=True, null=True) <NEW_LINE> url_patterns = models.TextField(_('url patterns'), blank=True, null=True) <NEW_LINE> PLACES_DICT = dict(PLACES) <NEW_LINE> if 'all' in PLACES_DICT: <NEW_LINE> <INDENT> del PLACES_DICT['all'] <NEW_LINE> <DEDENT> placed_at = models.CharField(max_length=100, choices=tuple(PLACES_DICT.items()), blank=True, null=True) <NEW_LINE> objects = ChunksManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('chunk') <NEW_LINE> verbose_name_plural = _('chunks') <NEW_LINE> translate = ('content', ) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.key) <NEW_LINE> <DEDENT> def match_with_url(self, url_query): <NEW_LINE> <INDENT> url_patterns = self.url_patterns.replace('\r\n', '\n').split('\n') <NEW_LINE> import re <NEW_LINE> match = None <NEW_LINE> for url_pattern in url_patterns: <NEW_LINE> <INDENT> pattern = re.compile(r'%s' % url_pattern) <NEW_LINE> match = pattern.match(url_query) <NEW_LINE> if match: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return match
A Chunk is a piece of content associated with a unique key that can be inserted into any template with the use of a special template tag
62598f9c44b2445a339b6849
class DatSegUEV(DatSegBase): <NEW_LINE> <INDENT> MARK = 'uev' <NEW_LINE> SCHEMA = { Optional('description'): str, Optional('address'): Any(int, All(str, lambda v: int(v, 0))), Optional('file'): str, Optional('mark', default='bootcmd='): str, Required('eval'): str } <NEW_LINE> def load(self, db, root_path): <NEW_LINE> <INDENT> assert isinstance(db, list) <NEW_LINE> assert isinstance(root_path, str) <NEW_LINE> if 'data' in self.smx_data: <NEW_LINE> <INDENT> env_obj = uboot.EnvBlob() <NEW_LINE> env_obj.load(self.smx_data['data']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file_path = get_full_path(root_path, self.smx_data['file'])[0] <NEW_LINE> if file_path.endswith(".txt"): <NEW_LINE> <INDENT> with open(file_path, 'r') as f: <NEW_LINE> <INDENT> env_obj = uboot.EnvBlob() <NEW_LINE> env_obj.load(f.read()) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with open(file_path, 'rb') as f: <NEW_LINE> <INDENT> env_obj = uboot.EnvBlob.parse(f.read()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.data = env_obj.export()
Data segments class for U-Boot ENV image <NAME>.uev: description: str address: int file: str mark: str (default: 'bootcmd=') eval: str
62598f9c67a9b606de545d83
@python_2_unicode_compatible <NEW_LINE> class Plan(OrderedModel): <NEW_LINE> <INDENT> name = models.CharField(_('name'), max_length=100) <NEW_LINE> description = models.TextField(_('description'), blank=True) <NEW_LINE> default = models.BooleanField(default=False, db_index=True) <NEW_LINE> available = models.BooleanField(_('available'), default=False, db_index=True, help_text=_('Is still available for purchase')) <NEW_LINE> visible = models.BooleanField(_('visible'), default=True, db_index=True, help_text=_('Is visible in current offer')) <NEW_LINE> created = models.DateTimeField(_('created'), db_index=True) <NEW_LINE> customized = models.ForeignKey('auth.User', null=True, blank=True, verbose_name=_('customized')) <NEW_LINE> plangroup = models.ForeignKey(PlanGroup, null=True, blank=True, verbose_name=_('plan group')) <NEW_LINE> quotas = models.ManyToManyField('Quota', through='PlanQuota', verbose_name=_('quotas')) <NEW_LINE> url = models.CharField(max_length=200, blank=True, help_text=_( 'Optional link to page with more information (for clickable pricing table headers)')) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ('order',) <NEW_LINE> verbose_name = _("Plan") <NEW_LINE> verbose_name_plural = _("Plans") <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> if not self.created: <NEW_LINE> <INDENT> self.created = now() <NEW_LINE> <DEDENT> super(Plan, self).save(*args, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_default_plan(cls): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return cls.objects.filter(default=True)[0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def get_quota_dict(self): <NEW_LINE> <INDENT> quota_dic = {} <NEW_LINE> for plan_quota in PlanQuota.objects.filter(plan=self).select_related('quota'): <NEW_LINE> <INDENT> quota_dic[plan_quota.quota.codename] = plan_quota.value <NEW_LINE> <DEDENT> return quota_dic
Single plan defined in the system. A plan can customized (referred to user) which means that only this user can purchase this plan and have it selected. Plan also can be visible and available. Plan is displayed on the list of currently available plans for user if it is visible. User cannot change plan to a plan that is not visible. Available means that user can buy a plan. If plan is not visible but still available it means that user which is using this plan already will be able to extend this plan again. If plan is not visible and not available, he will be forced then to change plan next time he extends an account.
62598f9c85dfad0860cbf951
class ExpirationOptions(TypedDict, total=False): <NEW_LINE> <INDENT> type: str <NEW_LINE> time: Optional[datetime]
Pending order expiration settings.
62598f9cdd821e528d6d8cee
class PublicTagApiTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_login_required(self): <NEW_LINE> <INDENT> res = self.client.get(TAGS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
The publicly available tags API
62598f9c1b99ca400228f40a
class Content(InheritableDocument): <NEW_LINE> <INDENT> status = StringField(help_text='The status of this content.') <NEW_LINE> type = StringField(help_text='The type of content', required=True) <NEW_LINE> user = StringField(help_text="ID of the user submitting the content") <NEW_LINE> title = StringField(max_length=100, help_text="The title of the content, limited to 100 chrs.") <NEW_LINE> summary = StringField(max_length=140, help_text="A summary snippet of the content.") <NEW_LINE> created = DateTimeField(help_text="The date and time of when the object was created.", default=datetime.datetime.utcnow) <NEW_LINE> modified = DateTimeField(help_text="When the comment or dependent data was last modified.", default=datetime.datetime.utcnow) <NEW_LINE> body = StringField(help_text="Main body of the text. No character limit.") <NEW_LINE> protective_marking = EmbeddedDocumentField(ProtectiveMarking, required=True) <NEW_LINE> index = BooleanField(help_text='Whether this is the index page for this type of site content.')
A Content object which stores different types of staff-defined static content
62598f9c435de62698e9bbae
class ClearMessageInfo(restful.Resource): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get(): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument('sender_id', type=str, required=True, help=u'sender_id 必须') <NEW_LINE> parser.add_argument('receiver_id', type=str, required=True, help=u'receiver_id') <NEW_LINE> args = parser.parse_args() <NEW_LINE> success = success_dic().dic <NEW_LINE> sender_id = args['sender_id'] <NEW_LINE> receiver_id = args['receiver_id'] <NEW_LINE> is_true = delete_message_info(sender_id, receiver_id) <NEW_LINE> if is_true: <NEW_LINE> <INDENT> success['message'] = '清除成功' <NEW_LINE> return success <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> success['message'] = '清除失败' <NEW_LINE> return success
清楚某一个私信详情
62598f9c8da39b475be02f9f
class Sequence(ElementBase): <NEW_LINE> <INDENT> def __init__(self, children=(), name=None, default=None): <NEW_LINE> <INDENT> ElementBase.__init__(self, name=name, default=default) <NEW_LINE> self._children = self._copy_sequence(children, "children", ElementBase) <NEW_LINE> <DEDENT> def _get_children(self): <NEW_LINE> <INDENT> return self._children <NEW_LINE> <DEDENT> def dependencies(self, memo): <NEW_LINE> <INDENT> if self in memo: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> memo.append(self) <NEW_LINE> dependencies = [] <NEW_LINE> for c in self._children: <NEW_LINE> <INDENT> dependencies.extend(c.dependencies(memo)) <NEW_LINE> <DEDENT> return dependencies <NEW_LINE> <DEDENT> def gstring(self): <NEW_LINE> <INDENT> return "(" + " ".join([e.gstring() for e in self._children]) + ")" <NEW_LINE> <DEDENT> def decode(self, state): <NEW_LINE> <INDENT> state.decode_attempt(self) <NEW_LINE> if len(self._children) == 0: <NEW_LINE> <INDENT> state.decode_success(self) <NEW_LINE> yield state <NEW_LINE> state.decode_retry(self) <NEW_LINE> state.decode_failure(self) <NEW_LINE> return <NEW_LINE> <DEDENT> path = [self._children[0].decode(state)] <NEW_LINE> while path: <NEW_LINE> <INDENT> try: next(path[-1]) <NEW_LINE> except StopIteration: <NEW_LINE> <INDENT> path.pop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if len(path) < len(self._children): <NEW_LINE> <INDENT> path.append(self._children[len(path)].decode(state)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> state.decode_success(self) <NEW_LINE> yield state <NEW_LINE> state.decode_retry(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> state.decode_failure(self) <NEW_LINE> return <NEW_LINE> <DEDENT> def value(self, node): <NEW_LINE> <INDENT> return [child.value() for child in node.children]
Element class representing a sequence of child elements which must all match a recognition in the correct order. Constructor arguments: - *children* (iterable, default: *()*) -- the child elements of this element - *name* (*str*, default: *None*) -- the name of this element For a recognition to match, all child elements must match the recognition in the order that they were given in the *children* constructor argument. Example usage: >>> from dragonfly.test import ElementTester >>> seq = Sequence([Literal("hello"), Literal("world")]) >>> test_seq = ElementTester(seq) >>> test_seq.recognize("hello world") ['hello', 'world'] >>> test_seq.recognize("hello universe") RecognitionFailure
62598f9ca05bb46b3848a639
class RetraceTaskPackage: <NEW_LINE> <INDENT> def __init__(self, db_package: Package) -> None: <NEW_LINE> <INDENT> self.db_package = db_package <NEW_LINE> self.nvra: str = db_package.nvra() <NEW_LINE> if db_package.pkgtype.lower() == "rpm": <NEW_LINE> <INDENT> self.unpack_to_tmp = unpack_rpm_to_tmp <NEW_LINE> <DEDENT> self.path: Optional[str] = None <NEW_LINE> if db_package.has_lob("package"): <NEW_LINE> <INDENT> self.path = db_package.get_lob_path("package") <NEW_LINE> <DEDENT> self.unpacked_path: Optional[str] = None <NEW_LINE> self.debug_files: Optional[List[str]] = None <NEW_LINE> <DEDENT> def unpack_to_tmp(self, *args, **kwargs) -> str: <NEW_LINE> <INDENT> raise NotImplementedError
A "buffer" representing pyfaf.storage.Package. SQL Alchemy objects are not threadsafe and this object is used to query and buffer all the necessary information so that DB calls are not required from workers.
62598f9c596a897236127a39
class Card(pg.sprite.Sprite): <NEW_LINE> <INDENT> card_names = {1: "Ace", 2: "Two", 3: "Three", 4: "Four", 5: "Five", 6: "Six", 7: "Seven", 8: "Eight", 9: "Nine", 10: "Ten", 11: "Jack", 12: "Queen", 13: "King"} <NEW_LINE> def __init__(self, value, suit, card_size, speed): <NEW_LINE> <INDENT> super(Card, self).__init__() <NEW_LINE> self.card_size = card_size <NEW_LINE> self.speed = speed <NEW_LINE> self.value = value <NEW_LINE> self.suit = suit <NEW_LINE> self.long_name = "{} of {}".format(self.card_names[self.value], self.suit) <NEW_LINE> if 1 < self.value < 11: <NEW_LINE> <INDENT> self.name = "{} of {}".format(self.value, self.suit) <NEW_LINE> self.short_name = "{}{}".format(self.value, self.suit[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.name = self.long_name <NEW_LINE> self.short_name = "{}{}".format(self.card_names[self.value][0], self.suit[0]) <NEW_LINE> <DEDENT> self.load_images() <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.pos = self.rect.center <NEW_LINE> self.face_up = False <NEW_LINE> <DEDENT> def load_images(self): <NEW_LINE> <INDENT> img_name = self.name.lower().replace(" ", "_") <NEW_LINE> image = prepare.GFX[img_name] <NEW_LINE> self.image = pg.transform.scale(image, self.card_size) <NEW_LINE> self.back_image = pg.Surface(self.card_size).convert() <NEW_LINE> self.back_image.fill(pg.Color("dodgerblue")) <NEW_LINE> snake = prepare.GFX["pysnakeicon"] <NEW_LINE> s_rect = snake.get_rect().fit(self.back_image.get_rect()) <NEW_LINE> s_rect.midbottom = self.back_image.get_rect().midbottom <NEW_LINE> snake = pg.transform.scale(snake, s_rect.size) <NEW_LINE> self.back_image.blit(snake, s_rect) <NEW_LINE> pg.draw.rect(self.back_image, pg.Color("gray95"), self.back_image.get_rect(), 4) <NEW_LINE> pg.draw.rect(self.back_image, pg.Color("gray20"), self.back_image.get_rect(), 1) <NEW_LINE> <DEDENT> def draw(self, surface): <NEW_LINE> <INDENT> if self.face_up: <NEW_LINE> <INDENT> surface.blit(self.image, self.rect) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> surface.blit(self.back_image, self.rect) <NEW_LINE> <DEDENT> <DEDENT> def travel(self, destination): <NEW_LINE> <INDENT> angle = get_angle(self.pos, destination) <NEW_LINE> self.pos = project(self.pos, angle, self.speed) <NEW_LINE> self.rect.center = self.pos
Class to represent a single playing card.
62598f9c462c4b4f79dbb7c5
class AppSyncRequest(TypedDict): <NEW_LINE> <INDENT> headers: Dict[str, str]
AppSyncRequest Attributes: ---------- headers: Dict[str, str]
62598f9ccc0a2c111447adc6
class Task (object): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s.%s name=%r>" % (type(self).__module__, type(self).__name__, getattr(self, 'name', None)) <NEW_LINE> <DEDENT> def start(self, finish_callback): <NEW_LINE> <INDENT> raise NotImplementedError
Represent a task that can be done in the background The finish_callback received in Task.start(..) must be stored, and regardless if the task exits with an error, or completes successfully, the callback *must* be called. The finish callback must pass the Task instance itself as the only and first argument: finish_callback(self)
62598f9c442bda511e95c217
@implementer(IExternal) <NEW_LINE> class External(object): <NEW_LINE> <INDENT> __slots__ = ("identifier", ) <NEW_LINE> @classmethod <NEW_LINE> def _build(cls, data): <NEW_LINE> <INDENT> identifier, = data <NEW_LINE> return cls(identifier) <NEW_LINE> <DEDENT> def __init__(self, identifier): <NEW_LINE> <INDENT> self.identifier = identifier <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<External %s>" % (self.identifier, ) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, External): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return self.identifier == other.identifier <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if type(self) != type(other): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return not self.__eq__(other)
Used by TreeSerializer to encapsulate external references.
62598f9c7047854f4633f19d
class InvalidEncodingChars(HL7apyException): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message if self.message else 'Invalid encoding chars'
Raised when the encoding chars specified is not a correct set of HL7 encoding chars >>> from hl7apy.core import Message >>> encoding_chars = {'GROUP': '\r', 'SEGMENT': '\r', 'COMPONENT': '^', 'SUBCOMPONENT': '&', 'REPETITION': '~', 'ESCAPE': '\\'} >>> m = Message('ADT_A01', encoding_chars=encoding_chars) Traceback (most recent call last): ... InvalidEncodingChars: Missing required encoding chars
62598f9cd7e4931a7ef3be53
class Stock: <NEW_LINE> <INDENT> def GetMaxStockPrice(self): <NEW_LINE> <INDENT> return max(self.StockPrices) <NEW_LINE> <DEDENT> def GetMinStockPrice(self): <NEW_LINE> <INDENT> return min(self.StockPrices) <NEW_LINE> <DEDENT> def IsNumber(self,rowValue): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> float(rowValue) <NEW_LINE> return True <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def GetGoogleData(self,Parameters): <NEW_LINE> <INDENT> stockPrices = [] <NEW_LINE> if Parameters['tickerParams'][3]: <NEW_LINE> <INDENT> exchange = 'NYSE' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exchange = 'NASD' <NEW_LINE> <DEDENT> currentTime = int(time.time()) <NEW_LINE> link = 'http://www.google.com/finance/getprices?q=%s&x=%s&i=%d&p=%dd&f=d,c,o,h,l&df=cpct&auto=1&ts=%d'%(Parameters['tickerParams'][0].upper(),exchange,Parameters['tickerParams'][2],Parameters['tickerParams'][1],currentTime) <NEW_LINE> try: <NEW_LINE> <INDENT> filePtr = urllib.urlopen(link) <NEW_LINE> DataList = filePtr.readlines() <NEW_LINE> tickerData = DataList[7:len(DataList)] <NEW_LINE> for minuteData in tickerData: <NEW_LINE> <INDENT> datum = minuteData.split(',') <NEW_LINE> stockPrices.append(float(datum[1])) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return stockPrices <NEW_LINE> <DEDENT> def __init__(self,**kwds): <NEW_LINE> <INDENT> if 'filename' in kwds: <NEW_LINE> <INDENT> self.StockPrices=self.GetStockPrices(kwds['filename']) <NEW_LINE> <DEDENT> elif 'tickerParams' in kwds: <NEW_LINE> <INDENT> self.StockPrices=self.GetGoogleData(kwds['tickerParams']) <NEW_LINE> self.Ticker = kwds['tickerParams']['tickerParams'][0] <NEW_LINE> self.CompanyName = kwds['tickerParams']['tickerParams'][4] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("bad paramaters") <NEW_LINE> <DEDENT> self.maxPrice = self.GetMaxStockPrice() <NEW_LINE> self.minPrice = self.GetMinStockPrice() <NEW_LINE> <DEDENT> def GetStockPrices(self,FileName): <NEW_LINE> <INDENT> cr = csv.reader(open(FileName,"r U")) <NEW_LINE> next(cr, None) <NEW_LINE> c1 = [] <NEW_LINE> for row in cr: <NEW_LINE> <INDENT> if self.IsNumber(row[1]): <NEW_LINE> <INDENT> c1.append(float(row[1])) <NEW_LINE> <DEDENT> <DEDENT> return c1
Stock class will read a csv file provide a list of stock prices
62598f9c21bff66bcd722a1f
class Mode(): <NEW_LINE> <INDENT> def __init__(self, sw=0.116, sh=5.8): <NEW_LINE> <INDENT> self.SLIT_WIDTH = sw <NEW_LINE> self.SLIT_HEIGHT = sh <NEW_LINE> self.TAU_0 = 0.0 <NEW_LINE> self.DGAP = 0.0 <NEW_LINE> self.DPIX = 0.018 <NEW_LINE> self.NDET = 3 <NEW_LINE> self.NXPIX = 2048 <NEW_LINE> self.NYPIX = 2048 <NEW_LINE> self.gap = self.DGAP / self.DPIX
Testing dummy class
62598f9c91f36d47f2230d7d
class AssetListUpdateApi(CustomFilterMixin, ListBulkCreateUpdateDestroyAPIView): <NEW_LINE> <INDENT> queryset = Asset.objects.all() <NEW_LINE> serializer_class = serializers.AssetSerializer <NEW_LINE> permission_classes = (IsSuperUser,)
Asset bulk update api
62598f9c435de62698e9bbaf
class PyPycares(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://github.com/saghul/pycares" <NEW_LINE> url = "https://github.com/saghul/pycares/archive/pycares-3.0.0.tar.gz" <NEW_LINE> version('3.0.0', sha256='28dc2bd59cf20399a6af4383cc8f57970cfca8b808ca05d6493812862ef0ca9c') <NEW_LINE> depends_on('python@2.6:') <NEW_LINE> depends_on('py-cffi')
pycares is a Python module which provides an interface to c-ares. c-ares is a C library that performs DNS requests and name resolutions asynchronously.
62598f9c56b00c62f0fb266c
class NoSuchRank(Error): <NEW_LINE> <INDENT> pass
Attempted to look up a rank which did not exist
62598f9c460517430c431f38
class UpdateFlairTask(AbstractTaskType): <NEW_LINE> <INDENT> def handle(self, requirements): <NEW_LINE> <INDENT> for message in requirements['messages']: <NEW_LINE> <INDENT> flair = self.bot.data_manager.query(FlairModel).filter(FlairModel.name == message.body).first() <NEW_LINE> subscriber = self.bot.data_manager.query( SubscriberModel ).filter( SubscriberModel.reddit_username == message.author ) <NEW_LINE> citizens_api = CitizensAPI() <NEW_LINE> for rank, value in citizens_api.titles.items(): <NEW_LINE> <INDENT> if value == subscriber.highest_rank: <NEW_LINE> <INDENT> highest_rank = rank <NEW_LINE> <DEDENT> <DEDENT> if subscriber.highest_rank >= flair.required_rank: <NEW_LINE> <INDENT> subscriber.flair = flair <NEW_LINE> self.bot.data_manager.add(subscriber) <NEW_LINE> self.bot.send_message( 'flair_update_success', user_name=message.author, new_flair=flair.name, highest_rank=citizens_api.get_title(highest_rank) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bot.send_message( 'rank_not_high_enough', user_name=message.author, new_flair=flair.name, highest_rank=citizens_api.get_title(highest_rank) ) <NEW_LINE> <DEDENT> <DEDENT> self.bot.data_manager.commit() <NEW_LINE> return True <NEW_LINE> <DEDENT> def requirements(self): <NEW_LINE> <INDENT> flair_messages = self.bot.match_unread('Flair') <NEW_LINE> if flair_messages: <NEW_LINE> <INDENT> return {'messages': flair_messages} <NEW_LINE> <DEDENT> return False
Updates a users flair based on a choice from them MCP :license: MIT :messages used: 'flair_update_success', 'rank_not_high_enough'
62598f9cbd1bec0571e14fa1
class GatewayRouteListResult(Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[GatewayRoute]'}, } <NEW_LINE> def __init__(self, *, value=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(GatewayRouteListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value
List of virtual network gateway routes. :param value: List of gateway routes :type value: list[~azure.mgmt.network.v2017_06_01.models.GatewayRoute]
62598f9c44b2445a339b684a
class AppointmentSchema(Schema): <NEW_LINE> <INDENT> id = fields.Integer(dump_only=True) <NEW_LINE> client_name = fields.String(required=True) <NEW_LINE> request_date = fields.Date(required=False) <NEW_LINE> appointment_date = fields.Date(required=False) <NEW_LINE> appointment_time = fields.Time(required=False) <NEW_LINE> preferred_clinician = fields.String(required=True) <NEW_LINE> appointment_reason = fields.String(required=True)
Appointment Schema
62598f9c9b70327d1c57eb5c
class Vec: <NEW_LINE> <INDENT> def __init__(self, labels, function=None): <NEW_LINE> <INDENT> if function == None: <NEW_LINE> <INDENT> f = {x:y for (x,y) in list(enumerate(labels))} <NEW_LINE> self.D=set(f.keys()) <NEW_LINE> self.f=f <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.D = labels <NEW_LINE> self.f = function <NEW_LINE> <DEDENT> <DEDENT> __getitem__ = getitem <NEW_LINE> __setitem__ = setitem <NEW_LINE> __neg__ = neg <NEW_LINE> __rmul__ = scalar_mul <NEW_LINE> def __mul__(self,other): <NEW_LINE> <INDENT> if isinstance(other, Vec): <NEW_LINE> <INDENT> return dot(self,other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> <DEDENT> def __truediv__(self,other): <NEW_LINE> <INDENT> return (1/other)*self <NEW_LINE> <DEDENT> __add__ = add <NEW_LINE> def __radd__(self, other): <NEW_LINE> <INDENT> if other == 0: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> <DEDENT> def __sub__(self,b): <NEW_LINE> <INDENT> return self+(-b) <NEW_LINE> <DEDENT> __eq__ = equal <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> D_list = sorted(self.D) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> D_list = sorted(self.D, key=hash) <NEW_LINE> <DEDENT> numdec = 3 <NEW_LINE> wd = dict([(k,(1+max(len(str(k)), len('{0:.{1}G}'.format(self[k], numdec))))) if isinstance(self[k], int) or isinstance(self[k], float) else (k,(1+max(len(str(k)), len(str(self[k]))))) for k in D_list]) <NEW_LINE> s1 = ''.join(['{0:>{1}}'.format(k,wd[k]) for k in D_list]) <NEW_LINE> s2 = ''.join(['{0:>{1}.{2}G}'.format(self[k],wd[k],numdec) if isinstance(self[k], int) or isinstance(self[k], float) else '{0:>{1}}'.format(self[k], wd[k]) for k in D_list]) <NEW_LINE> return "\n" + s1 + "\n" + '-'*sum(wd.values()) +"\n" + s2 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Vec(" + str(self.D) + "," + str(self.f) + ")" <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return Vec(self.D, self.f.copy())
A vector has two fields: D - the domain (a set) f - a dictionary mapping (some) domain elements to field elements elements of D not appearing in f are implicitly mapped to zero
62598f9c498bea3a75a578dc
class TruncateMixin(MinimalHandler): <NEW_LINE> <INDENT> truncate_error = False <NEW_LINE> @classmethod <NEW_LINE> def using(cls, truncate_error=None, **kwds): <NEW_LINE> <INDENT> subcls = super(TruncateMixin, cls).using(**kwds) <NEW_LINE> if truncate_error is not None: <NEW_LINE> <INDENT> truncate_error = as_bool(truncate_error, param="truncate_error") <NEW_LINE> if truncate_error is not None: <NEW_LINE> <INDENT> subcls.truncate_error = truncate_error <NEW_LINE> <DEDENT> <DEDENT> return subcls <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _check_truncate_policy(cls, secret): <NEW_LINE> <INDENT> assert cls.truncate_size is not None, "truncate_size must be set by subclass" <NEW_LINE> if cls.truncate_error and len(secret) > cls.truncate_size: <NEW_LINE> <INDENT> raise exc.PasswordTruncateError(cls)
PasswordHash mixin which provides a method that will check if secret would be truncated, and can be configured to throw an error.
62598f9c2c8b7c6e89bd358d
class JSONField(models.TextField): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if "default" not in kwargs: <NEW_LINE> <INDENT> kwargs["default"] = {} <NEW_LINE> <DEDENT> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def to_python(self, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return json.loads(value) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def get_prep_value(self, value): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> if isinstance(value, (dict, list)): <NEW_LINE> <INDENT> return json.dumps(value, cls=DjangoJSONEncoder) <NEW_LINE> <DEDENT> return super().get_prep_value(value) <NEW_LINE> <DEDENT> def from_db_value(self, value, *args, **kwargs): <NEW_LINE> <INDENT> return self.to_python(value) <NEW_LINE> <DEDENT> def get_db_prep_save(self, value, *args, **kwargs): <NEW_LINE> <INDENT> if value is None: <NEW_LINE> <INDENT> value = {} <NEW_LINE> <DEDENT> return json.dumps(value, cls=DjangoJSONEncoder) <NEW_LINE> <DEDENT> def value_from_object(self, obj): <NEW_LINE> <INDENT> value = super().value_from_object(obj) <NEW_LINE> return json.dumps(value, cls=DjangoJSONEncoder)
JSON serializaed TextField.
62598f9c8da39b475be02fa1
class KeyCommand(Command): <NEW_LINE> <INDENT> def handle(self): <NEW_LINE> <INDENT> store = self.option('store') <NEW_LINE> key = bytes(Fernet.generate_key()).decode('utf-8') <NEW_LINE> if store: <NEW_LINE> <INDENT> with open('.env', 'r') as file: <NEW_LINE> <INDENT> data = file.readlines() <NEW_LINE> <DEDENT> for line_number, line in enumerate(data): <NEW_LINE> <INDENT> if line.startswith('KEY='): <NEW_LINE> <INDENT> data[line_number] = 'KEY={}\n'.format(key) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> with open('.env', 'w') as file: <NEW_LINE> <INDENT> file.writelines(data) <NEW_LINE> <DEDENT> self.info('Key added to your .env file: {}'.format(key)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.info("Key: {}".format(key))
Generate a new key. key {--s|--store : Stores the key in the .env file}
62598f9cb7558d58954633eb
class http_parameter_required(object): <NEW_LINE> <INDENT> def __init__(self, request_method, parameter_name, human_readable_name ): <NEW_LINE> <INDENT> self.request_method = request_method <NEW_LINE> self.parameter_name = parameter_name <NEW_LINE> self.human_readable_name = human_readable_name <NEW_LINE> <DEDENT> def __call__(self, fn ): <NEW_LINE> <INDENT> def handle_request( request ): <NEW_LINE> <INDENT> method = self.request_method.lower() <NEW_LINE> lookup = { 'post': request.POST, 'get': request.GET, 'files': request.FILES, } <NEW_LINE> if 0 == len( lookup[ method ].get( self.parameter_name, '' ) ): <NEW_LINE> <INDENT> return API_Error( '%s not specified' % self.human_readable_name ) <NEW_LINE> <DEDENT> return fn( request ) <NEW_LINE> <DEDENT> return handle_request
Applied to a request handler to ensure a specified parameter is supplied with the request
62598f9c009cb60464d012e1
class mainPage(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.scraper = DBAScraper() <NEW_LINE> self.trainer = machineLearning() <NEW_LINE> <DEDENT> def scrape(self): <NEW_LINE> <INDENT> self.scraper.scrape_dba() <NEW_LINE> <DEDENT> def train(self): <NEW_LINE> <INDENT> print("##################################################################") <NEW_LINE> print("#### Please specify a max number of epochs to run in training ####") <NEW_LINE> print("#### trainer will stop automatically when loss is no longer ####") <NEW_LINE> print("#### decreasing ####") <NEW_LINE> print("##################################################################") <NEW_LINE> epochs = input("Epochs to run: ") <NEW_LINE> self.trainer.train_or_predict(int(epochs)) <NEW_LINE> <DEDENT> def mainMenu(self): <NEW_LINE> <INDENT> main_menu_title = "DBA Price predictor for Bang & Olufsen items\n By Casper P\n" <NEW_LINE> main_menu_items = ["Scrape DBA", "Train model", "Predict price", "Quit"] <NEW_LINE> main_menu_cursor = " > " <NEW_LINE> main_menu_cursor_style = ("fg_red", "bold") <NEW_LINE> main_menu_style = ("bg_blue", "fg_yellow") <NEW_LINE> main_menu_exit = False <NEW_LINE> main_menu = TerminalMenu(menu_entries=main_menu_items, title=main_menu_title, menu_cursor=main_menu_cursor, menu_cursor_style=main_menu_cursor_style, menu_highlight_style=main_menu_style, cycle_cursor=True, clear_screen=True) <NEW_LINE> while not main_menu_exit: <NEW_LINE> <INDENT> main_sel = main_menu.show() <NEW_LINE> if main_sel == 0: <NEW_LINE> <INDENT> self.scrape() <NEW_LINE> time.sleep(3) <NEW_LINE> <DEDENT> elif main_sel == 1: <NEW_LINE> <INDENT> self.train() <NEW_LINE> <DEDENT> elif main_sel == 2: <NEW_LINE> <INDENT> self.trainer.predict() <NEW_LINE> input("Press enter for main menu") <NEW_LINE> <DEDENT> elif main_sel == 3: <NEW_LINE> <INDENT> main_menu_exit = True
Handle CLI GUI/ Main menu.
62598f9c0a50d4780f705195
class LogOutput: <NEW_LINE> <INDENT> def __init__(self, live): <NEW_LINE> <INDENT> self.live = live <NEW_LINE> self.stdout = [] <NEW_LINE> self.stderr = [] <NEW_LINE> <DEDENT> def log_stdout(self, line): <NEW_LINE> <INDENT> if self.live: <NEW_LINE> <INDENT> logging.info(line.strip()) <NEW_LINE> <DEDENT> self.stdout.append(line) <NEW_LINE> <DEDENT> def log_stderr(self, line): <NEW_LINE> <INDENT> if self.live: <NEW_LINE> <INDENT> logging.error(line.strip()) <NEW_LINE> <DEDENT> self.stderr.append(line)
Handles the log output of executed applications
62598f9c32920d7e50bc5e12
@register_relay_node <NEW_LINE> class ModulePass(Pass): <NEW_LINE> <INDENT> pass
A pass that works on tvm.relay.Module. Users don't need to interact with this class directly. Instead, a module pass should be created through `module_pass`, because the design of the `module_pass` API is flexible enough to handle the creation of a module pass in different manners. In addition, all members of a module pass can be accessed from the base class. The same rule applies to FunctionPass and SequentialPass as well.
62598f9c442bda511e95c218
class RegistrationProfile(models.Model): <NEW_LINE> <INDENT> ACTIVATED = u"CONFIRMADO" <NEW_LINE> user = models.ForeignKey(User, unique=True, verbose_name=_('user')) <NEW_LINE> activation_key = models.CharField(_('activation key'), max_length=40) <NEW_LINE> objects = RegistrationManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('registration profile') <NEW_LINE> verbose_name_plural = _('registration profiles') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"Registration information for %s" % self.user <NEW_LINE> <DEDENT> def activation_key_expired(self): <NEW_LINE> <INDENT> expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS) <NEW_LINE> return self.activation_key == self.ACTIVATED or (self.user.date_joined + expiration_date <= datetime.datetime.now()) <NEW_LINE> <DEDENT> activation_key_expired.boolean = True
A simple profile which stores an activation key for use during user account registration. Generally, you will not want to interact directly with instances of this model; the provided manager includes methods for creating and activating new accounts, as well as for cleaning out accounts which have never been activated. While it is possible to use this model as the value of the ``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do so. This model's sole purpose is to store data temporarily during account registration and activation.
62598f9c004d5f362081eedb
class Symbol(AbstractCachedSymbol): <NEW_LINE> <INDENT> is_Symbol = True <NEW_LINE> @classmethod <NEW_LINE> def __dtype_setup__(cls, **kwargs): <NEW_LINE> <INDENT> return kwargs.get('dtype', np.int32)
A :class:`sympy.Symbol` capable of mimicking an :class:`sympy.Indexed`
62598f9c3539df3088ecc072
class Match(object): <NEW_LINE> <INDENT> def __init__(self, match_results): <NEW_LINE> <INDENT> if match_results.shape.ndims != 1: <NEW_LINE> <INDENT> raise ValueError('match_results should have rank 1') <NEW_LINE> <DEDENT> if match_results.dtype != tf.int32: <NEW_LINE> <INDENT> raise ValueError('match_results should be an int32 or int64 scalar ' 'tensor') <NEW_LINE> <DEDENT> self._match_results = match_results <NEW_LINE> <DEDENT> @property <NEW_LINE> def match_results(self): <NEW_LINE> <INDENT> return self._match_results <NEW_LINE> <DEDENT> def matched_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.greater(self._match_results, -1))) <NEW_LINE> <DEDENT> def matched_column_indicator(self): <NEW_LINE> <INDENT> return tf.greater_equal(self._match_results, 0) <NEW_LINE> <DEDENT> def num_matched_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.matched_column_indices()) <NEW_LINE> <DEDENT> def unmatched_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.equal(self._match_results, -1))) <NEW_LINE> <DEDENT> def unmatched_column_indicator(self): <NEW_LINE> <INDENT> return tf.equal(self._match_results, -1) <NEW_LINE> <DEDENT> def num_unmatched_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.unmatched_column_indices()) <NEW_LINE> <DEDENT> def ignored_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(self.ignored_column_indicator())) <NEW_LINE> <DEDENT> def ignored_column_indicator(self): <NEW_LINE> <INDENT> return tf.equal(self._match_results, -2) <NEW_LINE> <DEDENT> def num_ignored_columns(self): <NEW_LINE> <INDENT> return tf.size(input=self.ignored_column_indices()) <NEW_LINE> <DEDENT> def unmatched_or_ignored_column_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast(tf.where(tf.greater(0, self._match_results))) <NEW_LINE> <DEDENT> def matched_row_indices(self): <NEW_LINE> <INDENT> return self._reshape_and_cast( tf.gather(self._match_results, self.matched_column_indices())) <NEW_LINE> <DEDENT> def _reshape_and_cast(self, t): <NEW_LINE> <INDENT> return tf.cast(tf.reshape(t, [-1]), tf.int32) <NEW_LINE> <DEDENT> def gather_based_on_match(self, input_tensor, unmatched_value, ignored_value): <NEW_LINE> <INDENT> input_tensor = tf.concat( [tf.stack([ignored_value, unmatched_value]), input_tensor], axis=0) <NEW_LINE> gather_indices = tf.maximum(self.match_results + 2, 0) <NEW_LINE> gathered_tensor = tf.gather(input_tensor, gather_indices) <NEW_LINE> return gathered_tensor
Class to store results from the matcher. This class is used to store the results from the matcher. It provides convenient methods to query the matching results.
62598f9ce64d504609df9296
class VolumeBarFn(beam.DoFn): <NEW_LINE> <INDENT> def __init__(self, threshold=10000): <NEW_LINE> <INDENT> beam.DoFn.__init__(self) <NEW_LINE> self.ticks_processed = Metrics.counter(self.__class__, 'ticks_processed') <NEW_LINE> self.buffer = 0 <NEW_LINE> self.threshold = threshold <NEW_LINE> <DEDENT> def process(self, element): <NEW_LINE> <INDENT> self.buffer += element.quantity * element.price <NEW_LINE> self.ticks_processed.inc() <NEW_LINE> if self.buffer >= self.threshold: <NEW_LINE> <INDENT> self.buffer = 0 <NEW_LINE> yield element
Parse the tick objects into volume bars
62598f9c507cdc57c63a4b51
class ExtractError(MBSError): <NEW_LINE> <INDENT> def __init__(self, tar_cmd, return_code, cmd_output, cause): <NEW_LINE> <INDENT> msg = "Failed to extract source backup" <NEW_LINE> details = ("Failed to tar. Tar command '%s' returned a non-zero " "exit status %s. Command output:\n%s" % (tar_cmd, return_code, cmd_output)) <NEW_LINE> super(ExtractError, self).__init__(msg=msg, details=details, cause=cause)
Base error for archive errors
62598f9c656771135c48943f
class Glyph(_Glyph): <NEW_LINE> <INDENT> def __new__(cls, code: Union[Text, CharCode], fg_color: Color = None, bg_color: Color = None): <NEW_LINE> <INDENT> if not isinstance(code, CharCode): <NEW_LINE> <INDENT> code = CharCode(ord(code)) <NEW_LINE> <DEDENT> fg_color = color.WHITE if fg_color is None else Color(*fg_color) <NEW_LINE> bg_color = color.BLACK if bg_color is None else Color(*bg_color) <NEW_LINE> return super().__new__(cls, code, fg_color, bg_color)
Represent a glyph code -- a unicode code point or a character (str of length 1). fg_color -- the foreground color of the glyph. bg_color -- the background color of the glyph.
62598f9c498bea3a75a578dd
class main_parser(Parser): <NEW_LINE> <INDENT> def validate(self): <NEW_LINE> <INDENT> return len([1 for d in weather_terms if d in self.query]) <NEW_LINE> <DEDENT> def parse(self, parent): <NEW_LINE> <INDENT> if self.info.has_key("key"): <NEW_LINE> <INDENT> WEATHER_API_KEY = self.info["key"] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.resp["text"] = "bad key" <NEW_LINE> self.resp["type"] = "weather" <NEW_LINE> self.resp["status"] = STATUS_ERR <NEW_LINE> return self.resp <NEW_LINE> <DEDENT> where = None <NEW_LINE> times = ["am", "pm", "minutes", "tommorow", "yesterday"] <NEW_LINE> times.extend(days) <NEW_LINE> if len([1 for d in weather_terms if d in self.query]): <NEW_LINE> <INDENT> when = [w for w in self.query if type(w) == dict and w.has_key("type") and w["type"] == "time"] <NEW_LINE> if len(when): <NEW_LINE> <INDENT> when = when[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out = dt.datetime.now().strftime("%Y:%m:%d:%X").replace("/", ":") <NEW_LINE> when = [int(d) for d in out.split(":")] <NEW_LINE> <DEDENT> wtr.parse_weather(self, when, "", WEATHER_API_KEY) <NEW_LINE> <DEDENT> self.resp["status"] = STATUS_OK <NEW_LINE> self.resp["type"] = "weather" <NEW_LINE> return self.resp
weather parser class
62598f9c7d43ff24874272e0
class TestAutomationVerticalSplit(vertical_split.TestVerticalSplit): <NEW_LINE> <INDENT> def test_vertical_split(self): <NEW_LINE> <INDENT> worker_proc, _, worker_rpc_port = utils.run_vtworker_bg( ['--cell', 'test_nj'], auto_log=True) <NEW_LINE> vtworker_endpoint = 'localhost:' + str(worker_rpc_port) <NEW_LINE> automation_server_proc, automation_server_port = ( utils.run_automation_server()) <NEW_LINE> _, vtctld_endpoint = utils.vtctld.rpc_endpoint() <NEW_LINE> params = {'source_keyspace': 'source_keyspace', 'dest_keyspace': 'destination_keyspace', 'shard_list': '0', 'tables': '/moving/,view1', 'vtctld_endpoint': vtctld_endpoint, 'vtworker_endpoint': vtworker_endpoint, } <NEW_LINE> args = ['--server', 'localhost:' + str(automation_server_port), '--task', 'VerticalSplitTask'] <NEW_LINE> args.extend(['--param=' + k + '=' + v for k, v in params.items()]) <NEW_LINE> utils.run(environment.binary_args('automation_client') + args) <NEW_LINE> for t in [vertical_split.source_rdonly1, vertical_split.source_rdonly2]: <NEW_LINE> <INDENT> utils.run_vtctl(['RunHealthCheck', t.tablet_alias]) <NEW_LINE> <DEDENT> self._check_srv_keyspace('') <NEW_LINE> self._check_blacklisted_tables(vertical_split.source_master, ['/moving/', 'view1']) <NEW_LINE> self._check_blacklisted_tables(vertical_split.source_replica, ['/moving/', 'view1']) <NEW_LINE> self._check_blacklisted_tables(vertical_split.source_rdonly1, ['/moving/', 'view1']) <NEW_LINE> self._check_blacklisted_tables(vertical_split.source_rdonly2, ['/moving/', 'view1']) <NEW_LINE> vertical_split.destination_master.wait_for_binlog_player_count(0) <NEW_LINE> utils.kill_sub_process(automation_server_proc, soft=True) <NEW_LINE> utils.kill_sub_process(worker_proc, soft=True)
End-to-end test for running a vertical split via the automation framework. This test is a subset of vertical_split.py. The "VerticalSplitTask" automation operation runs the major commands for a vertical split instead of calling them "manually" from the test.
62598f9c435de62698e9bbb1
class TestRight(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.db_session = init_testing_db() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.db_session.remove() <NEW_LINE> <DEDENT> def test_right_basic(self): <NEW_LINE> <INDENT> self.db_session.add(Right("read")) <NEW_LINE> self.db_session.commit() <NEW_LINE> acc1 = self.db_session.query(Right).filter_by(name="read").scalar() <NEW_LINE> self.assertEquals(acc1.name, "read") <NEW_LINE> self.db_session.add(Right("edit")) <NEW_LINE> self.db_session.add(Right("delete")) <NEW_LINE> self.db_session.commit() <NEW_LINE> self.assertEquals(self.db_session.query(Right).count(), 3) <NEW_LINE> acc1 = self.db_session.query(Right).filter_by(name="read").scalar() <NEW_LINE> acc1.name = "READ" <NEW_LINE> self.db_session.commit() <NEW_LINE> acc1 = self.db_session.query(Right).filter_by(name="READ").scalar() <NEW_LINE> self.assertEquals(acc1.name, "READ") <NEW_LINE> self.db_session.delete(acc1) <NEW_LINE> self.db_session.commit() <NEW_LINE> self.assertEquals(self.db_session.query(Right).count(), 2)
Test of right
62598f9c460517430c431f39
class Text: <NEW_LINE> <INDENT> def __init__(self, rect, size, color, screen, text): <NEW_LINE> <INDENT> self.screen = screen <NEW_LINE> self.rect = copy.deepcopy(rect) <NEW_LINE> self.text = text <NEW_LINE> self.color = color <NEW_LINE> self.font = pygame.font.SysFont(None, size) <NEW_LINE> self.text_image = None <NEW_LINE> self.text_image_rect = None <NEW_LINE> self.prep_img() <NEW_LINE> <DEDENT> def prep_img(self): <NEW_LINE> <INDENT> self.text_image = self.font.render(self.text, True, self.color) <NEW_LINE> self.text_image_rect = self.text_image.get_rect() <NEW_LINE> self.text_image_rect.center = self.rect.center <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> self.screen.blit(self.text_image, self.text_image_rect)
Draws a text to the screen.
62598f9c91f36d47f2230d7e
class Line(object): <NEW_LINE> <INDENT> def __init__(self, dictionary): <NEW_LINE> <INDENT> for k, v in dictionary.items(): <NEW_LINE> <INDENT> if isinstance(v, collections.Mapping): <NEW_LINE> <INDENT> dictionary[k] = self.__class__(v) <NEW_LINE> <DEDENT> <DEDENT> self.__dict__.update(dictionary)
The default user-defined "bunch" class, for lines in a log
62598f9c44b2445a339b684b
class DataProvider(BaseDataProvider): <NEW_LINE> <INDENT> channels = 1 <NEW_LINE> n_class = 3 <NEW_LINE> def __init__(self, nx, path, a_min=0, a_max=20, sigma=1): <NEW_LINE> <INDENT> super(DataProvider, self).__init__(a_min, a_max) <NEW_LINE> self.nx = nx <NEW_LINE> self.path = path <NEW_LINE> self.sigma = sigma <NEW_LINE> self._load_data() <NEW_LINE> <DEDENT> def _load_data(self): <NEW_LINE> <INDENT> with h5py.File(self.path, "r") as fp: <NEW_LINE> <INDENT> self.image = gaussian_filter(fp["image"].value, self.sigma) <NEW_LINE> self.gal_map = fp["segmaps/galaxy"].value <NEW_LINE> self.star_map = fp["segmaps/star"].value <NEW_LINE> <DEDENT> <DEDENT> def _transpose_3d(self, a): <NEW_LINE> <INDENT> return np.stack([a[..., i].T for i in range(a.shape[2])], axis=2) <NEW_LINE> <DEDENT> def _post_process(self, data, labels): <NEW_LINE> <INDENT> op = np.random.randint(0, 4) <NEW_LINE> if op == 0: <NEW_LINE> <INDENT> if np.random.randint(0, 2) == 0: <NEW_LINE> <INDENT> data, labels = self._transpose_3d(data[:,:,np.newaxis]), self._transpose_3d(labels) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> data, labels = np.rot90(data, op), np.rot90(labels, op) <NEW_LINE> <DEDENT> return data, labels <NEW_LINE> <DEDENT> def _next_data(self): <NEW_LINE> <INDENT> ix = np.random.randint(0, self.image.shape[0] - self.nx) <NEW_LINE> iy = np.random.randint(0, self.image.shape[1] - self.nx) <NEW_LINE> slx = slice(ix, ix+self.nx) <NEW_LINE> sly = slice(iy, iy+self.nx) <NEW_LINE> data = self.image[slx, sly] <NEW_LINE> gal_seg = self.gal_map[slx, sly] <NEW_LINE> star_seg = self.star_map[slx, sly] <NEW_LINE> labels = np.zeros((self.nx, self.nx, self.n_class), dtype=np.float32) <NEW_LINE> labels[..., 1] = np.clip(gal_seg, 0, 1) <NEW_LINE> labels[..., 2] = np.clip(star_seg, 0, 1) <NEW_LINE> labels[..., 0] = (1+np.clip(labels[...,1] + labels[...,2], 0, 1))%2 <NEW_LINE> return data, labels
Extends the BaseDataProvider to randomly select the next chunk of the image and randomly applies transformations to the data
62598f9cf548e778e596b369
class Solution: <NEW_LINE> <INDENT> def canJump(self, A): <NEW_LINE> <INDENT> if A is None or len(A)==0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> farthest=A[0] <NEW_LINE> for i in range(len(A)): <NEW_LINE> <INDENT> if A[i]+i>=farthest: <NEW_LINE> <INDENT> farthest=A[i]+i <NEW_LINE> <DEDENT> <DEDENT> return farthest>len(A)-1
@param A: A list of integers @return: A boolean
62598f9c8e7ae83300ee8e5d
class LibvirtBaseVolumeDriver(object): <NEW_LINE> <INDENT> def __init__(self, connection, is_block_dev): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> self.is_block_dev = is_block_dev <NEW_LINE> <DEDENT> def connect_volume(self, connection_info, disk_info): <NEW_LINE> <INDENT> conf = vconfig.LibvirtConfigGuestDisk() <NEW_LINE> conf.driver_name = virtutils.pick_disk_driver_name( self.connection._get_hypervisor_version(), self.is_block_dev ) <NEW_LINE> conf.source_device = disk_info['type'] <NEW_LINE> conf.driver_format = "raw" <NEW_LINE> conf.driver_cache = "none" <NEW_LINE> conf.target_dev = disk_info['dev'] <NEW_LINE> conf.target_bus = disk_info['bus'] <NEW_LINE> conf.serial = connection_info.get('serial') <NEW_LINE> data = {} <NEW_LINE> if 'data' in connection_info: <NEW_LINE> <INDENT> data = connection_info['data'] <NEW_LINE> <DEDENT> if 'logical_block_size' in data: <NEW_LINE> <INDENT> conf.logical_block_size = data['logical_block_size'] <NEW_LINE> <DEDENT> if 'physical_block_size' in data: <NEW_LINE> <INDENT> conf.physical_block_size = data['physical_block_size'] <NEW_LINE> <DEDENT> if 'qos_specs' in data and data['qos_specs']: <NEW_LINE> <INDENT> tune_opts = ['total_bytes_sec', 'read_bytes_sec', 'write_bytes_sec', 'total_iops_sec', 'read_iops_sec', 'write_iops_sec'] <NEW_LINE> specs = data['qos_specs'] <NEW_LINE> if isinstance(specs, dict): <NEW_LINE> <INDENT> for k, v in specs.iteritems(): <NEW_LINE> <INDENT> if k in tune_opts: <NEW_LINE> <INDENT> new_key = 'disk_' + k <NEW_LINE> setattr(conf, new_key, v) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> LOG.warn(_LW('Unknown content in connection_info/' 'qos_specs: %s'), specs) <NEW_LINE> <DEDENT> <DEDENT> if 'access_mode' in data and data['access_mode']: <NEW_LINE> <INDENT> access_mode = data['access_mode'] <NEW_LINE> if access_mode in ('ro', 'rw'): <NEW_LINE> <INDENT> conf.readonly = access_mode == 'ro' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOG.error(_LE('Unknown content in ' 'connection_info/access_mode: %s'), access_mode) <NEW_LINE> raise exception.InvalidVolumeAccessMode( access_mode=access_mode) <NEW_LINE> <DEDENT> <DEDENT> return conf <NEW_LINE> <DEDENT> def disconnect_volume(self, connection_info, disk_dev): <NEW_LINE> <INDENT> pass
Base class for volume drivers.
62598f9c63d6d428bbee256f
class CodeBlockProcessor(BlockProcessor): <NEW_LINE> <INDENT> def test(self, parent, block): <NEW_LINE> <INDENT> return block.startswith(' '*self.tab_length) <NEW_LINE> <DEDENT> def run(self, parent, blocks): <NEW_LINE> <INDENT> sibling = self.lastChild(parent) <NEW_LINE> block = blocks.pop(0) <NEW_LINE> theRest = '' <NEW_LINE> if sibling and sibling.tag == "pre" and len(sibling) and sibling[0].tag == "code": <NEW_LINE> <INDENT> code = sibling[0] <NEW_LINE> block, theRest = self.detab(block) <NEW_LINE> code.text = util.AtomicString('%s\n%s\n' % (code.text, block.rstrip())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pre = util.etree.SubElement(parent, 'pre') <NEW_LINE> code = util.etree.SubElement(pre, 'code') <NEW_LINE> block, theRest = self.detab(block) <NEW_LINE> code.text = util.AtomicString('%s\n' % block.rstrip()) <NEW_LINE> <DEDENT> if theRest: <NEW_LINE> <INDENT> blocks.insert(0, theRest)
Process code blocks.
62598f9c0c0af96317c56140
class File(AutotoolsPackage): <NEW_LINE> <INDENT> homepage = "http://www.darwinsys.com/file/" <NEW_LINE> url = "https://astron.com/pub/file/file-5.37.tar.gz" <NEW_LINE> version('5.40', sha256='167321f43c148a553f68a0ea7f579821ef3b11c27b8cbe158e4df897e4a5dd57') <NEW_LINE> version('5.39', sha256='f05d286a76d9556243d0cb05814929c2ecf3a5ba07963f8f70bfaaa70517fad1') <NEW_LINE> version('5.38', sha256='593c2ffc2ab349c5aea0f55fedfe4d681737b6b62376a9b3ad1e77b2cc19fa34') <NEW_LINE> version('5.37', sha256='e9c13967f7dd339a3c241b7710ba093560b9a33013491318e88e6b8b57bae07f')
The file command is "a file type guesser", that is, a command-line tool that tells you in words what kind of data a file contains
62598f9cbaa26c4b54d4f06a
class Figshare(DoiProvider): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.hosts = [ { "hostname": [ "https://figshare.com/articles/", "http://figshare.com/articles/", "https://figshare.com/account/articles/", ], "api": "https://api.figshare.com/v2/articles/", "filepath": "files", "filename": "name", "download": "download_url", } ] <NEW_LINE> <DEDENT> url_regex = re.compile(r"(.*)/articles/(code/|dataset/)?([^/]+)/(\d+)(/)?(\d+)?") <NEW_LINE> def detect(self, doi, ref=None, extra_args=None): <NEW_LINE> <INDENT> url = self.doi2url(doi) <NEW_LINE> for host in self.hosts: <NEW_LINE> <INDENT> if any([url.startswith(s) for s in host["hostname"]]): <NEW_LINE> <INDENT> match = self.url_regex.match(url) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.article_id = match.groups()[3] <NEW_LINE> self.article_version = match.groups()[5] <NEW_LINE> if not self.article_version: <NEW_LINE> <INDENT> self.article_version = "1" <NEW_LINE> <DEDENT> return { "article": self.article_id, "host": host, "version": self.article_version, } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def fetch(self, spec, output_dir, yield_output=False): <NEW_LINE> <INDENT> article_id = spec["article"] <NEW_LINE> article_version = spec["version"] <NEW_LINE> host = spec["host"] <NEW_LINE> yield "Fetching Figshare article {} in version {}.\n".format( article_id, article_version ) <NEW_LINE> resp = self.urlopen( "{}{}/versions/{}".format(host["api"], article_id, article_version), headers={"accept": "application/json"}, ) <NEW_LINE> article = resp.json() <NEW_LINE> files = deep_get(article, host["filepath"]) <NEW_LINE> files = [file for file in files if not file["is_link_only"]] <NEW_LINE> only_one_file = len(files) == 1 <NEW_LINE> for file_ref in files: <NEW_LINE> <INDENT> unzip = file_ref["name"].endswith(".zip") and only_one_file <NEW_LINE> for line in self.fetch_file(file_ref, host, output_dir, unzip): <NEW_LINE> <INDENT> yield line <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @property <NEW_LINE> def content_id(self): <NEW_LINE> <INDENT> return "{}.v{}".format(self.article_id, self.article_version)
Provide contents of a Figshare article. See https://docs.figshare.com/#public_article for API docs. Examples: - https://doi.org/10.6084/m9.figshare.9782777 - https://doi.org/10.6084/m9.figshare.9782777.v2 - https://figshare.com/articles/binder-examples_requirements/9784088 (only one zipfile, no DOI)
62598f9c379a373c97d98dd2
class TransactionalBank(object): <NEW_LINE> <INDENT> def __init__(self, factory): <NEW_LINE> <INDENT> self.logger = logging.getLogger("springpythontest.testSupportClasses.TransactionalBank") <NEW_LINE> self.dt = DatabaseTemplate(factory) <NEW_LINE> <DEDENT> def open(self, account_num): <NEW_LINE> <INDENT> self.logger.debug("Opening account %s with $0 balance." % account_num) <NEW_LINE> self.dt.execute("INSERT INTO account (account_num, balance) VALUES (?,?)", (account_num, 0)) <NEW_LINE> <DEDENT> def deposit(self, amount, account_num): <NEW_LINE> <INDENT> self.logger.debug("Depositing $%s into %s" % (amount, account_num)) <NEW_LINE> rows = self.dt.execute("UPDATE account SET balance = balance + ? WHERE account_num = ?", (amount, account_num)) <NEW_LINE> if rows == 0: <NEW_LINE> <INDENT> raise BankException("Account %s does NOT exist" % account_num) <NEW_LINE> <DEDENT> <DEDENT> def withdraw(self, amount, account_num): <NEW_LINE> <INDENT> self.logger.debug("Withdrawing $%s from %s" % (amount, account_num)) <NEW_LINE> rows = self.dt.execute("UPDATE account SET balance = balance - ? WHERE account_num = ?", (amount, account_num)) <NEW_LINE> if rows == 0: <NEW_LINE> <INDENT> raise BankException("Account %s does NOT exist" % account_num) <NEW_LINE> <DEDENT> return amount <NEW_LINE> <DEDENT> def balance(self, account_num): <NEW_LINE> <INDENT> return self.dt.query_for_object("SELECT balance FROM account WHERE account_num = ?", (account_num,), types.FloatType) <NEW_LINE> <DEDENT> @transactional() <NEW_LINE> def transfer(self, amount, from_account, to_account): <NEW_LINE> <INDENT> self.logger.debug("Transferring $%s from %s to %s." % (amount, from_account, to_account)) <NEW_LINE> self.withdraw(amount, from_account) <NEW_LINE> self.deposit(amount, to_account)
This sample application can be used to demonstrate the value of atomic operations. The transfer operation must be wrapped in a transaction in order to perform correctly. Otherwise, any errors in the deposit will allow the from-account to leak assets.
62598f9c8da39b475be02fa3
class PhraseAround(_Through): <NEW_LINE> <INDENT> def __init__(self, namespace_uri, localnames): <NEW_LINE> <INDENT> self._config = { 'namespace-uri': namespace_uri, 'localname': assert_list_of_type(localnames, str) }
A phrase around.
62598f9ca05bb46b3848a63d
class OscPacket(object): <NEW_LINE> <INDENT> def __init__(self, dgram): <NEW_LINE> <INDENT> now = time.time() <NEW_LINE> try: <NEW_LINE> <INDENT> if osc_bundle.OscBundle.dgram_is_bundle(dgram): <NEW_LINE> <INDENT> self._messages = sorted( _timed_msg_of_bundle(osc_bundle.OscBundle(dgram), now), key=lambda x: x.time) <NEW_LINE> <DEDENT> elif osc_message.OscMessage.dgram_is_message(dgram): <NEW_LINE> <INDENT> self._messages = (TimedMessage(now, osc_message.OscMessage(dgram)),) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParseError( 'OSC Packet should at least contain an OscMessage or an ' 'OscBundle.') <NEW_LINE> <DEDENT> <DEDENT> except (osc_bundle.ParseError, osc_message.ParseError) as pe: <NEW_LINE> <INDENT> raise ParseError('Could not parse packet %s' % pe) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def messages(self): <NEW_LINE> <INDENT> return self._messages
Unit of transmission of the OSC protocol. Any application that sends OSC Packets is an OSC Client. Any application that receives OSC Packets is an OSC Server.
62598f9c0a50d4780f705197
@deconstructible <NEW_LINE> class FileSystemFinder(BaseFinder): <NEW_LINE> <INDENT> def __init__(self, app_names=None, *args, **kwargs): <NEW_LINE> <INDENT> self.locations = [] <NEW_LINE> self.storages = OrderedDict() <NEW_LINE> if not isinstance(settings.STATICFILES_DIRS, (list, tuple)): <NEW_LINE> <INDENT> raise ImproperlyConfigured( "Your STATICFILES_DIRS setting is not a tuple or list; " "perhaps you forgot a trailing comma?") <NEW_LINE> <DEDENT> for root in settings.STATICFILES_DIRS: <NEW_LINE> <INDENT> if isinstance(root, (list, tuple)): <NEW_LINE> <INDENT> prefix, root = root <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prefix = '' <NEW_LINE> <DEDENT> if settings.STATIC_ROOT and os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root): <NEW_LINE> <INDENT> raise ImproperlyConfigured( "The STATICFILES_DIRS setting should " "not contain the STATIC_ROOT setting") <NEW_LINE> <DEDENT> if (prefix, root) not in self.locations: <NEW_LINE> <INDENT> self.locations.append((prefix, root)) <NEW_LINE> <DEDENT> <DEDENT> for prefix, root in self.locations: <NEW_LINE> <INDENT> filesystem_storage = FileSystemStorage(location=root) <NEW_LINE> filesystem_storage.prefix = prefix <NEW_LINE> self.storages[root] = filesystem_storage <NEW_LINE> <DEDENT> super(FileSystemFinder, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def find(self, path, all=False): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> for prefix, root in self.locations: <NEW_LINE> <INDENT> if root not in searched_locations: <NEW_LINE> <INDENT> searched_locations.append(root) <NEW_LINE> <DEDENT> matched_path = self.find_location(root, path, prefix) <NEW_LINE> if matched_path: <NEW_LINE> <INDENT> if not all: <NEW_LINE> <INDENT> return matched_path <NEW_LINE> <DEDENT> matches.append(matched_path) <NEW_LINE> <DEDENT> <DEDENT> return matches <NEW_LINE> <DEDENT> def find_location(self, root, path, prefix=None): <NEW_LINE> <INDENT> if prefix: <NEW_LINE> <INDENT> prefix = '%s%s' % (prefix, os.sep) <NEW_LINE> if not path.startswith(prefix): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> path = path[len(prefix):] <NEW_LINE> <DEDENT> lpath = self.storages.get(root) <NEW_LINE> lpath = lpath.path(path) <NEW_LINE> if lpath.exists(): <NEW_LINE> <INDENT> return str(lpath) <NEW_LINE> <DEDENT> <DEDENT> def list(self, ignore_patterns): <NEW_LINE> <INDENT> for prefix, root in self.locations: <NEW_LINE> <INDENT> storage = self.storages[root] <NEW_LINE> for path in utils.get_files(storage, ignore_patterns): <NEW_LINE> <INDENT> yield path, storage
A static files finder that uses the ``STATICFILES_DIRS`` setting to locate files.
62598f9c4f6381625f19939b
class FemWorkbench (Workbench): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__class__.Icon = FreeCAD.getResourceDir() + "Mod/Fem/Resources/icons/preferences-fem.svg" <NEW_LINE> self.__class__.MenuText = "FEM" <NEW_LINE> self.__class__.ToolTip = "FEM workbench" <NEW_LINE> <DEDENT> def Initialize(self): <NEW_LINE> <INDENT> import Fem <NEW_LINE> import FemGui <NEW_LINE> import subprocess <NEW_LINE> from platform import system <NEW_LINE> ccx_path = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem").GetString("ccxBinaryPath") <NEW_LINE> if not ccx_path: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if system() == 'Linux': <NEW_LINE> <INDENT> p1 = subprocess.Popen(['which', 'ccx'], stdout=subprocess.PIPE) <NEW_LINE> if p1.wait() == 0: <NEW_LINE> <INDENT> ccx_path = p1.stdout.read().split('\n')[0] <NEW_LINE> <DEDENT> <DEDENT> elif system() == 'Windows': <NEW_LINE> <INDENT> ccx_path = FreeCAD.getHomePath() + 'bin/ccx.exe' <NEW_LINE> <DEDENT> FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem").SetString("ccxBinaryPath", ccx_path) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> FreeCAD.Console.PrintError(e.message) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def GetClassName(self): <NEW_LINE> <INDENT> return "FemGui::Workbench"
Fem workbench object
62598f9cb5575c28eb712bac
class DeviceNotFound(FmAnalyserException): <NEW_LINE> <INDENT> pass
Raised when the system can't connect the device
62598f9ceab8aa0e5d30bb43
class ParallelismTest(): <NEW_LINE> <INDENT> def __init__(self, use_dummy_version=False): <NEW_LINE> <INDENT> print('') <NEW_LINE> self.thread_number = 0 <NEW_LINE> if use_dummy_version: <NEW_LINE> <INDENT> print('Executing Parallel Threading Test:') <NEW_LINE> thread_pool = MultiThreadPool(10) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('Executing Parallel Processing Test:') <NEW_LINE> thread_pool = MultiProcessPool(10) <NEW_LINE> <DEDENT> thread_a_results = thread_pool.map_async(self.parallel_function_a, [1, 2, 3]) <NEW_LINE> thread_b_results = thread_pool.map_async(self.parallel_function_b, [1, 2]) <NEW_LINE> thread_c_results = thread_pool.map_async(self.parallel_function_c, [1]) <NEW_LINE> thread_pool.close() <NEW_LINE> thread_pool.join() <NEW_LINE> <DEDENT> def get_random_float(self): <NEW_LINE> <INDENT> random_float = float(random.randrange(0, 200))/100 <NEW_LINE> return random_float <NEW_LINE> <DEDENT> def parallel_function_a(self, *args): <NEW_LINE> <INDENT> this_thread_number = self.thread_number <NEW_LINE> self.thread_number += 1 <NEW_LINE> print(' Thread {0} starting function A.'.format(this_thread_number)) <NEW_LINE> time.sleep(self.get_random_float()) <NEW_LINE> print(' Thread {0} ending function A.'.format(this_thread_number)) <NEW_LINE> <DEDENT> def parallel_function_b(self, *args): <NEW_LINE> <INDENT> this_thread_number = self.thread_number <NEW_LINE> self.thread_number += 1 <NEW_LINE> print(' Thread {0} starting function B.'.format(this_thread_number)) <NEW_LINE> time.sleep(self.get_random_float()) <NEW_LINE> print(' Thread {0} ending function B.'.format(this_thread_number)) <NEW_LINE> <DEDENT> def parallel_function_c(self, *args): <NEW_LINE> <INDENT> this_thread_number = self.thread_number <NEW_LINE> self.thread_number += 1 <NEW_LINE> print(' Thread {0} starting function C.'.format(this_thread_number)) <NEW_LINE> time.sleep(self.get_random_float()) <NEW_LINE> print(' Thread {0} ending function C.'.format(this_thread_number))
Parallel execution testing class. To run threading as parallel processes, set "use_dummy_version" var to False. To run threading as parallel threads on a single process, set "use_dummy_version" var to True.
62598f9c99cbb53fe6830c90
class BuildFactory(util.ComparableMixin): <NEW_LINE> <INDENT> buildClass = Build <NEW_LINE> useProgress = 1 <NEW_LINE> workdir = "build" <NEW_LINE> compare_attrs = ['buildClass', 'steps', 'useProgress', 'workdir'] <NEW_LINE> def __init__(self, steps=None): <NEW_LINE> <INDENT> if steps is None: <NEW_LINE> <INDENT> steps = [] <NEW_LINE> <DEDENT> self.steps = [self._makeStepFactory(s) for s in steps] <NEW_LINE> <DEDENT> def _makeStepFactory(self, step_or_factory): <NEW_LINE> <INDENT> if isinstance(step_or_factory, BuildStep): <NEW_LINE> <INDENT> return step_or_factory.getStepFactory() <NEW_LINE> <DEDENT> return step_or_factory <NEW_LINE> <DEDENT> def newBuild(self, request): <NEW_LINE> <INDENT> b = self.buildClass(request) <NEW_LINE> b.useProgress = self.useProgress <NEW_LINE> b.workdir = self.workdir <NEW_LINE> b.setStepFactories(self.steps) <NEW_LINE> return b <NEW_LINE> <DEDENT> def addStep(self, step_or_factory, **kwargs): <NEW_LINE> <INDENT> if isinstance(step_or_factory, BuildStep): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> raise ArgumentsInTheWrongPlace() <NEW_LINE> <DEDENT> s = step_or_factory.getStepFactory() <NEW_LINE> <DEDENT> elif type(step_or_factory) == type(BuildStep) and issubclass(step_or_factory, BuildStep): <NEW_LINE> <INDENT> s = (step_or_factory, dict(kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('%r is not a BuildStep nor BuildStep subclass' % step_or_factory) <NEW_LINE> <DEDENT> self.steps.append(s) <NEW_LINE> <DEDENT> def addSteps(self, steps): <NEW_LINE> <INDENT> for s in steps: <NEW_LINE> <INDENT> self.addStep(s)
@cvar buildClass: class to use when creating builds @type buildClass: L{buildbot.process.base.Build}
62598f9cfff4ab517ebcd5ac
class RBF (Stationary): <NEW_LINE> <INDENT> def __init__(self,n_dims,variance=1.,lengthscale=1.,active_dims=None,name=None): <NEW_LINE> <INDENT> super(RBF, self).__init__( n_dims=n_dims, active_dims=active_dims, name=name) <NEW_LINE> logger.debug('Initializing %s kernel.' % self.name) <NEW_LINE> assert np.size(variance) == 1 <NEW_LINE> assert np.size(lengthscale) == 1 <NEW_LINE> self.variance = np.float64(variance) <NEW_LINE> self.lengthscale = np.float64(lengthscale) <NEW_LINE> self.parameter_list = ['variance','lengthscale'] <NEW_LINE> self.constraint_map = {'variance':'+ve', 'lengthscale':'+ve'} <NEW_LINE> <DEDENT> def cov(self,x,z=None,lengthscale=None): <NEW_LINE> <INDENT> if self.lengthscale < 1e-6: <NEW_LINE> <INDENT> K = self.variance * (self.distances_squared(x=x,z=z)==0) <NEW_LINE> logger.debug('protected RBF against zero-division since lengthscale' + 'too small (%s).' % repr(self.lengthscale)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if lengthscale is None: <NEW_LINE> <INDENT> K = self.variance*np.exp( -0.5*self.distances_squared(x=x,z=z) / self.lengthscale**2 ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> lengthscale = np.asarray(lengthscale).flatten() <NEW_LINE> assert len(lengthscale) == self.active_dims.size <NEW_LINE> K = self.variance * np.exp( -0.5 * self.distances_squared(x=x,z=z,lengthscale=lengthscale)) <NEW_LINE> <DEDENT> <DEDENT> K = self._apply_children(K, x, z) <NEW_LINE> return K
squared exponential kernel with the same shape parameter in each dimension
62598f9c851cf427c66b8086
class RMS(Variance) : <NEW_LINE> <INDENT> def __init__ ( self , xmin , xmax , err = False ) : <NEW_LINE> <INDENT> Variance.__init__ ( self , xmin , xmax , err ) <NEW_LINE> <DEDENT> def __call__ ( self , func , *args ) : <NEW_LINE> <INDENT> args = args if args else self._args <NEW_LINE> var2 = Variance.__call__ ( self , func , *args ) <NEW_LINE> import ostap.math.math_ve as ME <NEW_LINE> return ME.sqrt ( var2 ) <NEW_LINE> <DEDENT> def __str__ ( self ) : <NEW_LINE> <INDENT> return "RMS(%s,%s,%s)" % ( self._xmin , self._xmax , self._err )
Calculate the RMS for the distribution or function >>> xmin,xmax = 0,math.pi >>> rms = RMS ( xmin,xmax ) ## specify min/max >>> value = rms ( math.sin )
62598f9ca17c0f6771d5bff9
class Sine(DataGenerator): <NEW_LINE> <INDENT> def __init__(self,phase=0.0,frequency=1.,amplitude=1.,sampling_frequency=1.,*args,**kwargs): <NEW_LINE> <INDENT> self.phase = phase <NEW_LINE> self.frequency = frequency <NEW_LINE> self.amplitude = amplitude <NEW_LINE> super(Sine,self).__init__(sampling_frequency=sampling_frequency, *args,**kwargs) <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> t = 2.0 * numpy.pi * self.index * self.frequency / self.sampling_frequency + self.phase <NEW_LINE> return self.amplitude * numpy.sin(t)
Generates a sine wave
62598f9ca219f33f346c65d8
class CurveHelperNoiseProperties(bpy.types.PropertyGroup) : <NEW_LINE> <INDENT> blend_items = [ ('REPLACE', 'Replace', ""), ('ADD', 'Add', ""), ('SUBSTRACT', 'Substract', ""), ('MULTIPLY', 'Multiply', ""), ] <NEW_LINE> blend_type : bpy.props.EnumProperty(name = "Blend Type", items = blend_items) <NEW_LINE> scale : bpy.props.FloatProperty(name = "Scale", default = 1) <NEW_LINE> strength : bpy.props.FloatProperty(name = "Strength", default = 1) <NEW_LINE> phase : bpy.props.FloatProperty(name = "Phase", default = 1) <NEW_LINE> offset : bpy.props.FloatProperty(name = "Offset") <NEW_LINE> depth : bpy.props.IntProperty(name = "Depth")
name : StringProperty()
62598f9c7047854f4633f1a1
class Fin(MecaComponent): <NEW_LINE> <INDENT> def __init__(self, doc, name='fin'): <NEW_LINE> <INDENT> self.data = { 'len': 365., 'e': 222., 'p': 55., 'm': 255., 'thick': 3., } <NEW_LINE> shape = [] <NEW_LINE> shape.append(Vector(0, 0, 0)) <NEW_LINE> shape.append(Vector(0, 0, self.data['len'])) <NEW_LINE> shape.append(Vector(self.data['e'], 0, self.data['len'] - self.data['p'])) <NEW_LINE> shape.append(Vector(self.data['e'], 0, self.data['len'] - self.data['p'] - self.data['m'])) <NEW_LINE> shape.append(Vector(0, 0, 0)) <NEW_LINE> wire = Part.makePolygon(shape) <NEW_LINE> wire.translate(Vector(0, - self.data['thick'] / 2, 0)) <NEW_LINE> face = Part.Face(wire) <NEW_LINE> comp = face.extrude(Vector(0, self.data['thick'], 0)) <NEW_LINE> MecaComponent.__init__(self, doc, comp, name, (0.95, 1., 1.))
make a fin
62598f9cf7d966606f747da6
class Template(grok.Adapter): <NEW_LINE> <INDENT> grok.context(interfaces.IPossibleTemplate) <NEW_LINE> grok.implements(interfaces.ITemplate) <NEW_LINE> def compile(self, content): <NEW_LINE> <INDENT> return zope.component.getMultiAdapter( (content, ITemplateConfiguration(self.context)), interfaces.ICompilationStrategy ).compile()
A template. This object extracts configuration from a possible template and delegates to a compilation strategy.
62598f9c38b623060ffa8e50
class SmallModulesChecker(BaseChecker): <NEW_LINE> <INDENT> __implements__ = IAstroidChecker <NEW_LINE> name = 'small-modules' <NEW_LINE> priority = -1 <NEW_LINE> msgs = { 'R1273': ('Too many classes in module "%s" (%s/%s classes)', 'too-many-classes', 'Object Calisthenics Rule 7'), } <NEW_LINE> options = () <NEW_LINE> def __init__(self, linter=None): <NEW_LINE> <INDENT> BaseChecker.__init__(self, linter) <NEW_LINE> self._max_classes = 10 <NEW_LINE> self._classes = 0 <NEW_LINE> <DEDENT> def visit_module(self, node): <NEW_LINE> <INDENT> self._classes = 0 <NEW_LINE> <DEDENT> @check_messages('too-many-classes') <NEW_LINE> def leave_module(self, node): <NEW_LINE> <INDENT> if self._classes > self._max_classes: <NEW_LINE> <INDENT> self.add_message('too-many-classes', node=node, args=(node.name, self._classes, self._max_classes)) <NEW_LINE> <DEDENT> <DEDENT> def visit_classdef(self, node): <NEW_LINE> <INDENT> self._classes += 1
checks for modules to have less than number of classes.
62598f9c8e7ae83300ee8e5e
class LinearModel(object): <NEW_LINE> <INDENT> def __init__(self, game, config): <NEW_LINE> <INDENT> self.state_depth, self.board_x, self.board_y = game.board.state.shape <NEW_LINE> self.put_action_size = game.get_placement_action_size() <NEW_LINE> self.capture_action_size = game.get_capture_action_size() <NEW_LINE> self.config = config <NEW_LINE> inputs = Input(shape=(self.state_depth, self.board_x, self.board_y), name="inputs") <NEW_LINE> hidden = Flatten()(inputs) <NEW_LINE> hidden = Dense(self.config.hidden_size, activation='linear')(hidden) <NEW_LINE> self.pi = Dense(self.put_action_size + self.capture_action_size, activation='softmax', name='pi')(hidden) <NEW_LINE> self.v = Dense(1, activation='tanh', name='v')(hidden) <NEW_LINE> self.model = Model(inputs=[inputs], outputs=[self.pi, self.v]) <NEW_LINE> self.model.compile(loss=['categorical_crossentropy', 'mean_squared_error'], optimizer=Adam(self.config.lr))
A linear model takes in a state and estimates the corresponding pi_put, pi_capture and v
62598f9c21a7993f00c65d41
class LibvirtBridgeDriver(vif.VIFDriver): <NEW_LINE> <INDENT> def _get_configurations(self, instance, network, mapping): <NEW_LINE> <INDENT> mac_id = mapping['mac'].replace(':', '') <NEW_LINE> conf = vconfig.LibvirtConfigGuestInterface() <NEW_LINE> conf.net_type = "bridge" <NEW_LINE> conf.mac_addr = mapping['mac'] <NEW_LINE> conf.source_dev = network['bridge'] <NEW_LINE> conf.script = "" <NEW_LINE> if CONF.libvirt_use_virtio_for_bridges: <NEW_LINE> <INDENT> conf.model = "virtio" <NEW_LINE> <DEDENT> conf.filtername = "nova-instance-" + instance['name'] + "-" + mac_id <NEW_LINE> conf.add_filter_param("IP", mapping['ips'][0]['ip']) <NEW_LINE> if mapping['dhcp_server']: <NEW_LINE> <INDENT> conf.add_filter_param("DHCPSERVER", mapping['dhcp_server']) <NEW_LINE> <DEDENT> if CONF.use_ipv6: <NEW_LINE> <INDENT> conf.add_filter_param("RASERVER", mapping.get('gateway_v6') + "/128") <NEW_LINE> <DEDENT> if CONF.allow_same_net_traffic: <NEW_LINE> <INDENT> net, mask = netutils.get_net_and_mask(network['cidr']) <NEW_LINE> conf.add_filter_param("PROJNET", net) <NEW_LINE> conf.add_filter_param("PROJMASK", mask) <NEW_LINE> if CONF.use_ipv6: <NEW_LINE> <INDENT> net_v6, prefixlen_v6 = netutils.get_net_and_prefixlen( network['cidr_v6']) <NEW_LINE> conf.add_filter_param("PROJNET6", net_v6) <NEW_LINE> conf.add_filter_param("PROJMASK6", prefixlen_v6) <NEW_LINE> <DEDENT> <DEDENT> return conf <NEW_LINE> <DEDENT> def plug(self, instance, vif): <NEW_LINE> <INDENT> network, mapping = vif <NEW_LINE> if (not network.get('multi_host') and mapping.get('should_create_bridge')): <NEW_LINE> <INDENT> if mapping.get('should_create_vlan'): <NEW_LINE> <INDENT> iface = CONF.vlan_interface or network['bridge_interface'] <NEW_LINE> LOG.debug(_('Ensuring vlan %(vlan)s and bridge %(bridge)s'), {'vlan': network['vlan'], 'bridge': network['bridge']}, instance=instance) <NEW_LINE> linux_net.LinuxBridgeInterfaceDriver.ensure_vlan_bridge( network['vlan'], network['bridge'], iface) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iface = CONF.flat_interface or network['bridge_interface'] <NEW_LINE> LOG.debug(_("Ensuring bridge %s"), network['bridge'], instance=instance) <NEW_LINE> linux_net.LinuxBridgeInterfaceDriver.ensure_bridge( network['bridge'], iface) <NEW_LINE> <DEDENT> <DEDENT> return self._get_configurations(instance, network, mapping) <NEW_LINE> <DEDENT> def unplug(self, instance, vif): <NEW_LINE> <INDENT> pass
VIF driver for Linux bridge.
62598f9c0a50d4780f705198
class SubjectData(object): <NEW_LINE> <INDENT> def __init__(self, log=None, subjects=None, version=None): <NEW_LINE> <INDENT> self.swagger_types = { 'log': 'list[str]', 'subjects': 'list[Subject]', 'version': 'str' } <NEW_LINE> self.attribute_map = { 'log': 'log', 'subjects': 'subjects', 'version': 'version' } <NEW_LINE> self._log = log <NEW_LINE> self._subjects = subjects <NEW_LINE> self._version = version <NEW_LINE> <DEDENT> @property <NEW_LINE> def log(self): <NEW_LINE> <INDENT> return self._log <NEW_LINE> <DEDENT> @log.setter <NEW_LINE> def log(self, log): <NEW_LINE> <INDENT> self._log = log <NEW_LINE> <DEDENT> @property <NEW_LINE> def subjects(self): <NEW_LINE> <INDENT> return self._subjects <NEW_LINE> <DEDENT> @subjects.setter <NEW_LINE> def subjects(self, subjects): <NEW_LINE> <INDENT> if subjects is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `subjects`, must not be `None`") <NEW_LINE> <DEDENT> self._subjects = subjects <NEW_LINE> <DEDENT> @property <NEW_LINE> def version(self): <NEW_LINE> <INDENT> return self._version <NEW_LINE> <DEDENT> @version.setter <NEW_LINE> def version(self, version): <NEW_LINE> <INDENT> if version is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `version`, must not be `None`") <NEW_LINE> <DEDENT> self._version = version <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598f9c30bbd72246469856
class StatsAggregator(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._resource = gen_dataset_ops.stats_aggregator_handle() <NEW_LINE> <DEDENT> def get_summary(self): <NEW_LINE> <INDENT> return gen_dataset_ops.stats_aggregator_summary(self._resource)
A stateful resource that aggregates statistics from one or more iterators. To record statistics, use one of the custom transformation functions defined in this module when defining your `tf.data.Dataset`. All statistics will be aggregated by the `StatsAggregator` that is associated with a particular iterator (see below). For example, to record the latency of producing each element by iterating over a dataset: ```python dataset = ... dataset = dataset.apply(stats_ops.latency_stats("total_bytes")) ``` To associate a `StatsAggregator` with a `tf.data.Dataset` object, use the following pattern: ```python stats_aggregator = stats_ops.StatsAggregator() dataset = ... # Apply `set_stats_aggregator` to associate `dataset` with `stats_aggregator`. dataset = dataset.apply( tf.contrib.data.set_stats_aggregator(stats_aggregator)) iterator = dataset.make_one_shot_iterator() ``` To get a protocol buffer summary of the currently aggregated statistics, use the `StatsAggregator.get_summary()` tensor. The easiest way to do this is to add the returned tensor to the `tf.GraphKeys.SUMMARIES` collection, so that the summaries will be included with any existing summaries. ```python stats_aggregator = stats_ops.StatsAggregator() # ... stats_summary = stats_aggregator.get_summary() tf.add_to_collection(tf.GraphKeys.SUMMARIES, stats_summary) ``` Note: This interface is experimental and expected to change. In particular, we expect to add other implementations of `StatsAggregator` that provide different ways of exporting statistics, and add more types of statistics.
62598f9c0c0af96317c56142
class IS_SPX(object): <NEW_LINE> <INDENT> pack_s = struct.Struct('4B2I4B') <NEW_LINE> def unpack(self, data): <NEW_LINE> <INDENT> self.Size, self.Type, self.ReqI, self.PLID, self.STime, self.ETime, self.Split, self.Penalty, self.NumStops, self.Sp3 = self.pack_s.unpack(data) <NEW_LINE> return self
SPlit X time
62598f9cd6c5a102081e1f04
class EventType(models.Model): <NEW_LINE> <INDENT> abbr = models.CharField(_('abbreviation'), max_length=4, unique=True) <NEW_LINE> label = models.CharField(_('label'), max_length=50) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('event type') <NEW_LINE> verbose_name_plural = _('event types') <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.label
Simple ``Event`` classification.
62598f9cd7e4931a7ef3be58
class VoronoiTransformer(object): <NEW_LINE> <INDENT> def __init__(self, triangulation): <NEW_LINE> <INDENT> self.triangulation = triangulation <NEW_LINE> <DEDENT> def transform(self): <NEW_LINE> <INDENT> self.centers = {} <NEW_LINE> for t in self.triangulation.triangles: <NEW_LINE> <INDENT> self.centers[id(t)] = self.incenter(t) <NEW_LINE> <DEDENT> segments = [] <NEW_LINE> for t in self.triangulation.triangles: <NEW_LINE> <INDENT> for n in t.neighbours: <NEW_LINE> <INDENT> if n is not None and n is not self.triangulation.external and id(t) < id(n): <NEW_LINE> <INDENT> segment = id(t), id(n) <NEW_LINE> segments.append(segment) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.segments = segments <NEW_LINE> <DEDENT> def incenter(self, t): <NEW_LINE> <INDENT> p0, p1, p2, = t.vertices <NEW_LINE> ax, ay, bx, by, cx, cy, = p0.x, p0.y, p1.x, p1.y, p2.x, p2.y <NEW_LINE> a2 = pow(ax, 2) + pow(ay, 2) <NEW_LINE> b2 = pow(bx, 2) + pow(by, 2) <NEW_LINE> c2 = pow(cx, 2) + pow(cy, 2) <NEW_LINE> UX = (a2 * (by - cy) + b2 * (cy - ay) + c2 * (ay - by)) <NEW_LINE> UY = (a2 * (cx - bx) + b2 * (ax - cx) + c2 * (bx - ax)) <NEW_LINE> D = 2 * (ax * (by - cy) + bx * (cy - ay) + cx * (ay - by)) <NEW_LINE> ux = UX / D <NEW_LINE> uy = UY / D <NEW_LINE> return (ux, uy)
Class to transform a Delaunay triangulation into a Voronoi diagram
62598f9c45492302aabfc298
class Solution: <NEW_LINE> <INDENT> def Power(self, base, exponent): <NEW_LINE> <INDENT> return pow(base, exponent) <NEW_LINE> <DEDENT> def Power1(self, base, exponent): <NEW_LINE> <INDENT> if exponent != int(exponent): <NEW_LINE> <INDENT> print('错误的输入!') <NEW_LINE> return None <NEW_LINE> <DEDENT> if equal(base, 0.0): <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> if exponent == 0: <NEW_LINE> <INDENT> return 1.0 <NEW_LINE> <DEDENT> elif exponent < 0: <NEW_LINE> <INDENT> exponent = abs(exponent) <NEW_LINE> return 1.0 / compute_pow(base, exponent) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return compute_pow(base, exponent) <NEW_LINE> <DEDENT> <DEDENT> def Power2(self, base, exponent): <NEW_LINE> <INDENT> if exponent == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> if exponent == 1: <NEW_LINE> <INDENT> return base <NEW_LINE> <DEDENT> if exponent == -1: <NEW_LINE> <INDENT> return 1.0 / base <NEW_LINE> <DEDENT> res = self.Power2(base, int(exponent/2)) if exponent % 2 == 0 else self.Power2(base, int((exponent-1) / 2)) <NEW_LINE> res *= res <NEW_LINE> if exponent % 2 == 1: <NEW_LINE> <INDENT> res *= base <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def Power3(self, base, exponent): <NEW_LINE> <INDENT> if exponent == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> if exponent == 1: <NEW_LINE> <INDENT> return base <NEW_LINE> <DEDENT> if exponent == -1: <NEW_LINE> <INDENT> return 1.0 / base <NEW_LINE> <DEDENT> res = self.Power3(base, exponent >> 1) <NEW_LINE> res *= res <NEW_LINE> if exponent & 0x1 == 1: <NEW_LINE> <INDENT> res *= base <NEW_LINE> <DEDENT> return res
题目说明:给定一个double类型的浮点数base和int类型的整数exponent。求base的exponent次方。 思路:需要考虑底数是0,指数为0或者负数的情况。
62598f9c56b00c62f0fb2670
class PubSubChannelSubscribe(PubSubMessage): <NEW_LINE> <INDENT> __slots__ = ( "channel", "context", "user", "message", "emotes" "is_gift", "recipient", "sub_plan", "sub_plan_name", "time", "cumulative_months", "streak_months", "multi_month_duration", ) <NEW_LINE> def __init__(self, client: Client, topic: str, data: dict): <NEW_LINE> <INDENT> super().__init__(client, topic, data) <NEW_LINE> subscription = data["message"] <NEW_LINE> self.channel: Channel = client.get_channel(subscription["channel_name"]) or Channel( name=subscription["channel_name"], websocket=client._connection ) <NEW_LINE> self.context: str = subscription["context"] <NEW_LINE> try: <NEW_LINE> <INDENT> self.user = ( PartialUser(client._http, subscription["user_name"], int(subscription["user_id"])), subscription["display_name"], ) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.user = None <NEW_LINE> <DEDENT> self.message = PubSubChatMessage(subscription["sub_message"]["message"]) <NEW_LINE> try: <NEW_LINE> <INDENT> self.emotes = subscription["sub_message"]["emotes"] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.emotes = None <NEW_LINE> <DEDENT> self.is_gift: bool = subscription["is_gift"] <NEW_LINE> try: <NEW_LINE> <INDENT> self.recipient = ( PartialUser(client._http, subscription["recipient_user_name"], int(subscription["recipient_id"])), subscription["recipient_display_name"], ) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.recipient = None <NEW_LINE> <DEDENT> self.sub_plan: str = subscription["sub_plan"] <NEW_LINE> self.sub_plan_name: str = subscription["sub_plan_name"] <NEW_LINE> self.time = parse_timestamp(subscription["time"]) <NEW_LINE> self.cumulative_months: int = int(subscription["cumulative_months"]) <NEW_LINE> self.streak_months = int(subscription["streak_months"]) <NEW_LINE> self.multi_month_duration = int(subscription["multi_month_duration"])
Channel subscription Attributes ----------- channel: :class:`twitchio.Channel` Channel that has been subscribed or subgifted. context: :class:`str` Event type associated with the subscription product. user: :class:`twitchio.PartialUser` The person who subscribed or sent a gift subscription. message: :class:`str` Message sent with the sub/resub. emotes: List[:class:`dict`] Message sent with the sub/resub. is_gift: :class:`bool` If this sub message was caused by a gift subscription. recipient: :class:`twitchio.PartialUser` The person the who received the gift subscription. sub_plan: :class:`str` Subscription Plan ID. sub_plan_name: :class:`str` Channel Specific Subscription Plan Name. time: :class:`datetime.datetime` Time when the subscription or gift was completed. RFC 3339 format. cumulative_months: :class:`int` Cumulative number of tenure months of the subscription. streak_months: :class:`int` Denotes the user's most recent (and contiguous) subscription tenure streak in the channel. multi_month_duration: :class:`int` Number of months gifted as part of a single, multi-month gift OR number of months purchased as part of a multi-month subscription.
62598f9ce5267d203ee6b6cd
class DatabaseRequest(ModelNormal): <NEW_LINE> <INDENT> allowed_values = { } <NEW_LINE> validations = { } <NEW_LINE> additional_properties_type = None <NEW_LINE> _nullable = False <NEW_LINE> @cached_property <NEW_LINE> def openapi_types(): <NEW_LINE> <INDENT> lazy_import() <NEW_LINE> return { 'name': (MultiLingualString,), 'initially_selected_for_enrichment': (bool,), } <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def discriminator(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> attribute_map = { 'name': 'name', 'initially_selected_for_enrichment': 'initiallySelectedForEnrichment', } <NEW_LINE> _composed_schemas = {} <NEW_LINE> required_properties = set([ '_data_store', '_check_type', '_spec_property_naming', '_path_to_item', '_configuration', '_visited_composed_classes', ]) <NEW_LINE> @convert_js_args_to_python_args <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> _check_type = kwargs.pop('_check_type', True) <NEW_LINE> _spec_property_naming = kwargs.pop('_spec_property_naming', False) <NEW_LINE> _path_to_item = kwargs.pop('_path_to_item', ()) <NEW_LINE> _configuration = kwargs.pop('_configuration', None) <NEW_LINE> _visited_composed_classes = kwargs.pop('_visited_composed_classes', ()) <NEW_LINE> if args: <NEW_LINE> <INDENT> raise ApiTypeError( "Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % ( args, self.__class__.__name__, ), path_to_item=_path_to_item, valid_classes=(self.__class__,), ) <NEW_LINE> <DEDENT> self._data_store = {} <NEW_LINE> self._check_type = _check_type <NEW_LINE> self._spec_property_naming = _spec_property_naming <NEW_LINE> self._path_to_item = _path_to_item <NEW_LINE> self._configuration = _configuration <NEW_LINE> self._visited_composed_classes = _visited_composed_classes + (self.__class__,) <NEW_LINE> for var_name, var_value in kwargs.items(): <NEW_LINE> <INDENT> if var_name not in self.attribute_map and self._configuration is not None and self._configuration.discard_unknown_keys and self.additional_properties_type is None: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> setattr(self, var_name, var_value)
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually. Attributes: allowed_values (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict with a capitalized key describing the allowed value and an allowed value. These dicts store the allowed enum values. attribute_map (dict): The key is attribute name and the value is json key in definition. discriminator_value_class_map (dict): A dict to go from the discriminator variable value to the discriminator class name. validations (dict): The key is the tuple path to the attribute and the for var_name this is (var_name,). The value is a dict that stores validations for max_length, min_length, max_items, min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum, inclusive_minimum, and regex. additional_properties_type (tuple): A tuple of classes accepted as additional properties values.
62598f9c379a373c97d98dd4
class TestConflictError(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return ConflictError( error = home_connect_sdk.models.unauthorized_error_error.UnauthorizedError_error( key = '0', description = '0', ) ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return ConflictError( error = home_connect_sdk.models.unauthorized_error_error.UnauthorizedError_error( key = '0', description = '0', ), ) <NEW_LINE> <DEDENT> <DEDENT> def testConflictError(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
ConflictError unit test stubs
62598f9c8da39b475be02fa5
class AFNICommand(AFNICommandBase): <NEW_LINE> <INDENT> input_spec = AFNICommandInputSpec <NEW_LINE> _outputtype = None <NEW_LINE> def __init__(self, **inputs): <NEW_LINE> <INDENT> super(AFNICommand, self).__init__(**inputs) <NEW_LINE> self.inputs.on_trait_change(self._output_update, 'outputtype') <NEW_LINE> if self._outputtype is None: <NEW_LINE> <INDENT> self._outputtype = Info.outputtype() <NEW_LINE> <DEDENT> if not isdefined(self.inputs.outputtype): <NEW_LINE> <INDENT> self.inputs.outputtype = self._outputtype <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._output_update() <NEW_LINE> <DEDENT> os.environ['OMP_NUM_THREADS'] = str(self.num_threads) <NEW_LINE> <DEDENT> def _output_update(self): <NEW_LINE> <INDENT> self._outputtype = self.inputs.outputtype <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def set_default_output_type(cls, outputtype): <NEW_LINE> <INDENT> if outputtype in Info.ftypes: <NEW_LINE> <INDENT> cls._outputtype = outputtype <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('Invalid AFNI outputtype: %s' % outputtype) <NEW_LINE> <DEDENT> <DEDENT> def _overload_extension(self, value, name=None): <NEW_LINE> <INDENT> path, base, _ = split_filename(value) <NEW_LINE> return os.path.join(path, base + Info.outputtype_to_ext(self.inputs.outputtype)) <NEW_LINE> <DEDENT> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = super(AFNICommand, self)._list_outputs() <NEW_LINE> metadata = dict(name_source=lambda t: t is not None) <NEW_LINE> out_names = list(self.inputs.traits(**metadata).keys()) <NEW_LINE> if out_names: <NEW_LINE> <INDENT> for name in out_names: <NEW_LINE> <INDENT> if outputs[name]: <NEW_LINE> <INDENT> _, _, ext = split_filename(outputs[name]) <NEW_LINE> if ext == "": <NEW_LINE> <INDENT> outputs[name] = outputs[name] + "+orig.BRIK" <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return outputs
Shared options for several AFNI commands
62598f9cc432627299fa2d98
class ProjectInformation(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ProjectInformation, self).__init__() <NEW_LINE> self.include_directories = [] <NEW_LINE> self.library_name = None <NEW_LINE> self.library_names = [] <NEW_LINE> self.library_version = None <NEW_LINE> self._ReadConfigureAc() <NEW_LINE> self._ReadMakefileAm() <NEW_LINE> <DEDENT> @property <NEW_LINE> def dll_filename(self): <NEW_LINE> <INDENT> return "{0:s}.dll".format(self.library_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def module_name(self): <NEW_LINE> <INDENT> return "py{0:s}".format(self.library_name[3:]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def package_name(self): <NEW_LINE> <INDENT> return "{0:s}-python".format(self.library_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def package_description(self): <NEW_LINE> <INDENT> return "Python bindings module for {0:s}".format(self.library_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def project_url(self): <NEW_LINE> <INDENT> return "https://github.com/libyal/{0:s}/".format(self.library_name) <NEW_LINE> <DEDENT> def _ReadConfigureAc(self): <NEW_LINE> <INDENT> file_object = open("configure.ac", "rb") <NEW_LINE> if not file_object: <NEW_LINE> <INDENT> raise IOError("Unable to open: configure.ac") <NEW_LINE> <DEDENT> found_ac_init = False <NEW_LINE> found_library_name = False <NEW_LINE> for line in file_object.readlines(): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if found_library_name: <NEW_LINE> <INDENT> library_version = line[1:-2] <NEW_LINE> if sys.version_info[0] >= 3: <NEW_LINE> <INDENT> library_version = library_version.decode("ascii") <NEW_LINE> <DEDENT> self.library_version = library_version <NEW_LINE> break <NEW_LINE> <DEDENT> elif found_ac_init: <NEW_LINE> <INDENT> library_name = line[1:-2] <NEW_LINE> if sys.version_info[0] >= 3: <NEW_LINE> <INDENT> library_name = library_name.decode("ascii") <NEW_LINE> <DEDENT> self.library_name = library_name <NEW_LINE> found_library_name = True <NEW_LINE> <DEDENT> elif line.startswith(b"AC_INIT"): <NEW_LINE> <INDENT> found_ac_init = True <NEW_LINE> <DEDENT> <DEDENT> file_object.close() <NEW_LINE> if not self.library_name or not self.library_version: <NEW_LINE> <INDENT> raise RuntimeError( "Unable to find library name and version in: configure.ac") <NEW_LINE> <DEDENT> <DEDENT> def _ReadMakefileAm(self): <NEW_LINE> <INDENT> if not self.library_name: <NEW_LINE> <INDENT> raise RuntimeError("Missing library name") <NEW_LINE> <DEDENT> file_object = open("Makefile.am", "rb") <NEW_LINE> if not file_object: <NEW_LINE> <INDENT> raise IOError("Unable to open: Makefile.am") <NEW_LINE> <DEDENT> found_subdirs = False <NEW_LINE> for line in file_object.readlines(): <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if found_subdirs: <NEW_LINE> <INDENT> library_name, _, _ = line.partition(b" ") <NEW_LINE> if sys.version_info[0] >= 3: <NEW_LINE> <INDENT> library_name = library_name.decode("ascii") <NEW_LINE> <DEDENT> self.include_directories.append(library_name) <NEW_LINE> if library_name.startswith("lib"): <NEW_LINE> <INDENT> self.library_names.append(library_name) <NEW_LINE> <DEDENT> if library_name == self.library_name: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> elif line.startswith(b"SUBDIRS"): <NEW_LINE> <INDENT> found_subdirs = True <NEW_LINE> <DEDENT> <DEDENT> file_object.close() <NEW_LINE> if not self.include_directories or not self.library_names: <NEW_LINE> <INDENT> raise RuntimeError( "Unable to find include directories and library names in: " "Makefile.am")
Class to define the project information.
62598f9ccb5e8a47e493c054
class StructuredDataRegressor(SupervisedStructuredDataPipeline): <NEW_LINE> <INDENT> def __init__(self, column_names: Optional[List[str]] = None, column_types: Optional[Dict[str, str]] = None, output_dim: Optional[int] = None, loss: types.LossType = 'mean_squared_error', metrics: Optional[types.MetricsType] = None, name: str = 'structured_data_regressor', max_trials: int = 100, directory: Union[str, pathlib.Path, None] = None, objective: str = 'val_loss', overwrite: bool = True, seed: Optional[int] = None): <NEW_LINE> <INDENT> super().__init__( outputs=hypermodels.RegressionHead(output_dim=output_dim, loss=loss, metrics=metrics), column_names=column_names, column_types=column_types, max_trials=max_trials, directory=directory, name=name, objective=objective, tuner=greedy.Greedy, overwrite=overwrite, seed=seed)
AutoKeras structured data regression class. # Arguments column_names: A list of strings specifying the names of the columns. The length of the list should be equal to the number of columns of the data excluding the target column. Defaults to None. If None, it will obtained from the header of the csv file or the pandas.DataFrame. column_types: Dict. The keys are the column names. The values should either be 'numerical' or 'categorical', indicating the type of that column. Defaults to None. If not None, the column_names need to be specified. If None, it will be inferred from the data. output_dim: Int. The number of output dimensions. Defaults to None. If None, it will be inferred from the data. loss: A Keras loss function. Defaults to use 'mean_squared_error'. metrics: A list of Keras metrics. Defaults to use 'mean_squared_error'. name: String. The name of the AutoModel. Defaults to 'structured_data_regressor'. max_trials: Int. The maximum number of different Keras Models to try. The search may finish before reaching the max_trials. Defaults to 100. directory: String. The path to a directory for storing the search outputs. Defaults to None, which would create a folder with the name of the AutoModel in the current directory. objective: String. Name of model metric to minimize or maximize, e.g. 'val_accuracy'. Defaults to 'val_loss'. overwrite: Boolean. Defaults to `True`. If `False`, reloads an existing project of the same name if one is found. Otherwise, overwrites the project. seed: Int. Random seed.
62598f9c009cb60464d012e5
class SMWinservice(win32serviceutil.ServiceFramework): <NEW_LINE> <INDENT> _svc_name_ = 'pythonService' <NEW_LINE> _svc_display_name_ = 'Python Service' <NEW_LINE> _svc_description_ = 'Python Service Description' <NEW_LINE> @classmethod <NEW_LINE> def parse_command_line(cls): <NEW_LINE> <INDENT> win32serviceutil.HandleCommandLine(cls) <NEW_LINE> <DEDENT> def __init__(self, args): <NEW_LINE> <INDENT> win32serviceutil.ServiceFramework.__init__(self, args) <NEW_LINE> self.hWaitStop = win32event.CreateEvent(None, 0, 0, None) <NEW_LINE> socket.setdefaulttimeout(60) <NEW_LINE> <DEDENT> def SvcStop(self): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING) <NEW_LINE> win32event.SetEvent(self.hWaitStop) <NEW_LINE> <DEDENT> def SvcDoRun(self): <NEW_LINE> <INDENT> self.start() <NEW_LINE> servicemanager.LogMsg(servicemanager.EVENTLOG_INFORMATION_TYPE, servicemanager.PYS_SERVICE_STARTED, (self._svc_name_, '')) <NEW_LINE> self.main() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> pass
Base class to create winservice in Python
62598f9ceab8aa0e5d30bb45
class AccessionViewSet( OrgReadViewMixin, mixins.ListModelMixin, mixins.RetrieveModelMixin, mixins.UpdateModelMixin, viewsets.GenericViewSet, ): <NEW_LINE> <INDENT> def get_queryset(self): <NEW_LINE> <INDENT> queryset = Accession.objects.all().order_by("-created") <NEW_LINE> if not self.request.user.is_archivist(): <NEW_LINE> <INDENT> queryset = queryset.filter(organization=self.request.user.organization) <NEW_LINE> <DEDENT> process_status = self.request.GET.get("process_status", "") <NEW_LINE> if process_status != "": <NEW_LINE> <INDENT> queryset = queryset.filter(process_status=int(process_status)) <NEW_LINE> <DEDENT> return queryset <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == "list": <NEW_LINE> <INDENT> return AccessionListSerializer <NEW_LINE> <DEDENT> if self.action == "retrieve": <NEW_LINE> <INDENT> return AccessionSerializer <NEW_LINE> <DEDENT> return AccessionSerializer
Endpoint for Accessions
62598f9c925a0f43d25e7dfd
class EventHubConsumerGroupsListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[EventHubConsumerGroupInfo]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["EventHubConsumerGroupInfo"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(EventHubConsumerGroupsListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = None
The JSON-serialized array of Event Hub-compatible consumer group names with a next link. Variables are only populated by the server, and will be ignored when sending a request. :ivar value: List of consumer groups objects. :vartype value: list[~azure.mgmt.iothub.v2021_07_01.models.EventHubConsumerGroupInfo] :ivar next_link: The next link. :vartype next_link: str
62598f9c63d6d428bbee2572
class BaseRecipeAttrViewSet(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateModelMixin): <NEW_LINE> <INDENT> authentication_classes = (TokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> assigned_only = bool( int(self.request.query_params.get('assigned_only', 0)) ) <NEW_LINE> queryset = self.queryset <NEW_LINE> if assigned_only: <NEW_LINE> <INDENT> queryset = queryset.filter(recipe__isnull=False) <NEW_LINE> <DEDENT> return queryset.filter( user=self.request.user ).order_by('-name').distinct() <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user)
Base viewset for user owned recipe attributes
62598f9cbe383301e02535b5
@task(ignore_result=True) <NEW_LINE> class JobFactoryManager(Task): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.factory = JobFactory() <NEW_LINE> <DEDENT> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> self.factory.initialize_job(*args, **kwargs)
Manages factories handling Job initialization (from db entry).
62598f9ccc0a2c111447adcc
class ExecutionLog(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'execution_log' <NEW_LINE> id = db.Column(db.Integer(), primary_key=True) <NEW_LINE> text = db.Column(db.Text()) <NEW_LINE> level = db.Column(db.String(80), nullable=False, default='DEBUG') <NEW_LINE> register_date = db.Column(db.DateTime(), default=datetime.datetime.utcnow) <NEW_LINE> execution_id = db.Column(db.GUID(), db.ForeignKey('execution.id')) <NEW_LINE> def __init__(self, text, level, execution_id): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> self.level = level <NEW_LINE> self.execution_id = execution_id <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<ExecutionLog %r>' % self.id <NEW_LINE> <DEDENT> def serialize(self): <NEW_LINE> <INDENT> return { 'id': self.id, 'text': self.text, 'level': self.level, 'register_date': self.register_date.isoformat(), 'execution_id': self.execution_id }
ExecutionLog Model
62598f9c2ae34c7f260aaea2
class PasswordResetForm(django_forms.PasswordResetForm): <NEW_LINE> <INDENT> def get_users(self, email): <NEW_LINE> <INDENT> active_users = User.objects.filter(email__iexact=email, is_active=True) <NEW_LINE> return active_users <NEW_LINE> <DEDENT> def send_mail( self, subject_template_name, email_template_name, context, from_email, to_email, html_email_template_name=None): <NEW_LINE> <INDENT> del context['user'] <NEW_LINE> emails.send_password_reset_email.delay(context, to_email)
Allow resetting passwords. This subclass overrides sending emails to use templated email.
62598f9cd99f1b3c44d05470
class TotalItemCountResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'total_item_count': 'int' } <NEW_LINE> attribute_map = { 'total_item_count': 'total_item_count' } <NEW_LINE> required_args = { } <NEW_LINE> def __init__( self, total_item_count=None, ): <NEW_LINE> <INDENT> if total_item_count is not None: <NEW_LINE> <INDENT> self.total_item_count = total_item_count <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key not in self.attribute_map: <NEW_LINE> <INDENT> raise KeyError("Invalid key `{}` for `TotalItemCountResponse`".format(key)) <NEW_LINE> <DEDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> def __getattribute__(self, item): <NEW_LINE> <INDENT> value = object.__getattribute__(self, item) <NEW_LINE> if isinstance(value, Property): <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> if hasattr(self, attr): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if issubclass(TotalItemCountResponse, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TotalItemCountResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition.
62598f9cd7e4931a7ef3be59
class AsynchronousFileReader(CustomThread): <NEW_LINE> <INDENT> def __init__(self, fd, _queue): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> assert isinstance(_queue, queue.Queue) <NEW_LINE> assert callable(fd.readline) <NEW_LINE> self._fd = fd <NEW_LINE> self._queue = _queue <NEW_LINE> self.is_running = False <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for line in iter(self._fd.readline, ''): <NEW_LINE> <INDENT> if line is not None: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> <DEDENT> if self.is_running: <NEW_LINE> <INDENT> self._queue.put(line) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def eof(self): <NEW_LINE> <INDENT> return not self.is_alive() and self._queue.empty() and self.is_running
Helper class to implement asynchronous reading of a file in a separate thread. Pushes read lines on a queue to be consumed in another thread.
62598f9c01c39578d7f12b3e
class StateMachine(ABC): <NEW_LINE> <INDENT> state: State <NEW_LINE> states: Type[State] <NEW_LINE> actions: Dict[State, Callable[[], State]] <NEW_LINE> __should_halt: bool = False <NEW_LINE> def next(self) -> State: <NEW_LINE> <INDENT> previous_state = self.state <NEW_LINE> try: <NEW_LINE> <INDENT> if self.__should_halt: <NEW_LINE> <INDENT> return self.states.HALT <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> action = self.actions.get(previous_state) <NEW_LINE> next_state = action() <NEW_LINE> <DEDENT> <DEDENT> except Exception as error: <NEW_LINE> <INDENT> log.critical(f'Uncaught exception from {self.__class__}') <NEW_LINE> write_traceback(error, logger=log, module=__name__) <NEW_LINE> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log.devel(f'{self.__class__.__name__}: {previous_state} -> {next_state}') <NEW_LINE> return next_state <NEW_LINE> <DEDENT> <DEDENT> def run(self) -> None: <NEW_LINE> <INDENT> while self.state is not self.states.HALT: <NEW_LINE> <INDENT> self.state = self.next() <NEW_LINE> <DEDENT> <DEDENT> def halt(self) -> None: <NEW_LINE> <INDENT> self.__should_halt = True
Base class for a finite state machine implementation.
62598f9c5f7d997b871f92bf
class OracleSpatialRefSys(models.Model, SpatialRefSysMixin): <NEW_LINE> <INDENT> cs_name = models.CharField(max_length=68) <NEW_LINE> srid = models.IntegerField(primary_key=True) <NEW_LINE> auth_srid = models.IntegerField() <NEW_LINE> auth_name = models.CharField(max_length=256) <NEW_LINE> wktext = models.CharField(max_length=2046) <NEW_LINE> cs_bounds = models.PolygonField(null=True) <NEW_LINE> objects = models.GeoManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'gis' <NEW_LINE> db_table = 'CS_SRS' <NEW_LINE> managed = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def wkt(self): <NEW_LINE> <INDENT> return self.wktext <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def wkt_col(cls): <NEW_LINE> <INDENT> return 'wktext'
Maps to the Oracle MDSYS.CS_SRS table.
62598f9c55399d3f056262e2
class CleanCssFilter(CompilerFilter): <NEW_LINE> <INDENT> command = "cleancss"
Compress CSS with clean-css Requires cleancss to be available in the $PATH: https://github.com/jakubpawlowicz/clean-css
62598f9c85dfad0860cbf955
@registry.register_model <NEW_LINE> class NextFrameBasicStochasticDiscrete( basic_deterministic.NextFrameBasicDeterministic): <NEW_LINE> <INDENT> def inject_latent(self, layer, features, filters): <NEW_LINE> <INDENT> del filters <NEW_LINE> hparams = self.hparams <NEW_LINE> final_filters = common_layers.shape_list(layer)[-1] <NEW_LINE> filters = hparams.hidden_size <NEW_LINE> kernel = (4, 4) <NEW_LINE> if hparams.mode == tf.estimator.ModeKeys.PREDICT: <NEW_LINE> <INDENT> layer_shape = common_layers.shape_list(layer) <NEW_LINE> rand = tf.random_uniform(layer_shape[:-1] + [hparams.bottleneck_bits]) <NEW_LINE> d = 2.0 * tf.to_float(tf.less(0.5, rand)) - 1.0 <NEW_LINE> z = tf.layers.dense(d, final_filters, name="unbottleneck") <NEW_LINE> return layer + z, 0.0 <NEW_LINE> <DEDENT> x = tf.layers.dense( features["cur_target_frame"], filters, name="latent_embed", bias_initializer=tf.random_normal_initializer(stddev=0.01)) <NEW_LINE> x = common_attention.add_timing_signal_nd(x) <NEW_LINE> if hparams.full_latent_tower: <NEW_LINE> <INDENT> for i in range(hparams.num_compress_steps): <NEW_LINE> <INDENT> with tf.variable_scope("latent_downstride%d" % i): <NEW_LINE> <INDENT> x = common_layers.make_even_size(x) <NEW_LINE> if i < hparams.filter_double_steps: <NEW_LINE> <INDENT> filters *= 2 <NEW_LINE> <DEDENT> x = common_attention.add_timing_signal_nd(x) <NEW_LINE> x = tf.layers.conv2d(x, filters, kernel, activation=common_layers.belu, strides=(2, 2), padding="SAME") <NEW_LINE> x = common_layers.layer_norm(x) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> x = common_layers.double_discriminator(x) <NEW_LINE> x = tf.expand_dims(tf.expand_dims(x, axis=1), axis=1) <NEW_LINE> <DEDENT> x = tf.tanh(tf.layers.dense(x, hparams.bottleneck_bits, name="bottleneck")) <NEW_LINE> d = x + tf.stop_gradient(2.0 * tf.to_float(tf.less(0.0, x)) - 1.0 - x) <NEW_LINE> if hparams.mode == tf.estimator.ModeKeys.TRAIN: <NEW_LINE> <INDENT> noise = tf.random_uniform(common_layers.shape_list(x)) <NEW_LINE> noise = 2.0 * tf.to_float(tf.less(hparams.bottleneck_noise, noise)) - 1.0 <NEW_LINE> d *= noise <NEW_LINE> <DEDENT> z = tf.layers.dense(d, final_filters, name="unbottleneck") <NEW_LINE> return layer + z, 0.0
Basic next-frame model with a tiny discrete latent.
62598f9cbe8e80087fbbee20
class GLIMPS_Writer: <NEW_LINE> <INDENT> def __init__(self, stdout_messenger, stderr_messenger): <NEW_LINE> <INDENT> self.stdout_messenger = stdout_messenger <NEW_LINE> self.stderr_messenger = stderr_messenger <NEW_LINE> <DEDENT> def write(self, s): <NEW_LINE> <INDENT> if __name__ == '__main__': <NEW_LINE> <INDENT> sys.stdout.write(s) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stdout_messenger.put(s) <NEW_LINE> <DEDENT> <DEDENT> def error(self, s): <NEW_LINE> <INDENT> if __name__ == '__main__': <NEW_LINE> <INDENT> sys.stderr.write(s) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stderr_messenger.put(s)
Class to pipe stdout and sterr to parent process in asyncrounous threads
62598f9c627d3e7fe0e06c6c
class _RLock: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._block = _allocate_lock() <NEW_LINE> self._owner = None <NEW_LINE> self._count = 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> owner = self._owner <NEW_LINE> try: <NEW_LINE> <INDENT> owner = _active[owner].name <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return "<%s %s.%s object owner=%r count=%d at %s>" % ( "locked" if self._block.locked() else "unlocked", self.__class__.__module__, self.__class__.__qualname__, owner, self._count, hex(id(self)) ) <NEW_LINE> <DEDENT> def acquire(self, blocking=True, timeout=-1): <NEW_LINE> <INDENT> me = get_ident() <NEW_LINE> if self._owner == me: <NEW_LINE> <INDENT> self._count += 1 <NEW_LINE> return 1 <NEW_LINE> <DEDENT> rc = self._block.acquire(blocking, timeout) <NEW_LINE> if rc: <NEW_LINE> <INDENT> self._owner = me <NEW_LINE> self._count = 1 <NEW_LINE> <DEDENT> return rc <NEW_LINE> <DEDENT> __enter__ = acquire <NEW_LINE> def release(self): <NEW_LINE> <INDENT> if self._owner != get_ident(): <NEW_LINE> <INDENT> raise RuntimeError("cannot release un-acquired lock") <NEW_LINE> <DEDENT> self._count = count = self._count - 1 <NEW_LINE> if not count: <NEW_LINE> <INDENT> self._owner = None <NEW_LINE> self._block.release() <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, t, v, tb): <NEW_LINE> <INDENT> self.release() <NEW_LINE> <DEDENT> def _acquire_restore(self, state): <NEW_LINE> <INDENT> self._block.acquire() <NEW_LINE> self._count, self._owner = state <NEW_LINE> <DEDENT> def _release_save(self): <NEW_LINE> <INDENT> if self._count == 0: <NEW_LINE> <INDENT> raise RuntimeError("cannot release un-acquired lock") <NEW_LINE> <DEDENT> count = self._count <NEW_LINE> self._count = 0 <NEW_LINE> owner = self._owner <NEW_LINE> self._owner = None <NEW_LINE> self._block.release() <NEW_LINE> return (count, owner) <NEW_LINE> <DEDENT> def _is_owned(self): <NEW_LINE> <INDENT> return self._owner == get_ident()
This class implements reentrant lock objects. A reentrant lock must be released by the thread that acquired it. Once a thread has acquired a reentrant lock, the same thread may acquire it again without blocking; the thread must release it once for each time it has acquired it.
62598f9c8e7ae83300ee8e61
class GlobalOperationsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'globalOperations' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(ComputeAlpha.GlobalOperationsService, self).__init__(client) <NEW_LINE> self._method_configs = { 'AggregatedList': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'compute.globalOperations.aggregatedList', ordered_params=[u'project'], path_params=[u'project'], query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'], relative_path=u'projects/{project}/aggregated/operations', request_field='', request_type_name=u'ComputeGlobalOperationsAggregatedListRequest', response_type_name=u'OperationAggregatedList', supports_download=False, ), 'Delete': base_api.ApiMethodInfo( http_method=u'DELETE', method_id=u'compute.globalOperations.delete', ordered_params=[u'project', u'operation'], path_params=[u'operation', u'project'], query_params=[], relative_path=u'projects/{project}/global/operations/{operation}', request_field='', request_type_name=u'ComputeGlobalOperationsDeleteRequest', response_type_name=u'ComputeGlobalOperationsDeleteResponse', supports_download=False, ), 'Get': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'compute.globalOperations.get', ordered_params=[u'project', u'operation'], path_params=[u'operation', u'project'], query_params=[], relative_path=u'projects/{project}/global/operations/{operation}', request_field='', request_type_name=u'ComputeGlobalOperationsGetRequest', response_type_name=u'Operation', supports_download=False, ), 'List': base_api.ApiMethodInfo( http_method=u'GET', method_id=u'compute.globalOperations.list', ordered_params=[u'project'], path_params=[u'project'], query_params=[u'filter', u'maxResults', u'orderBy', u'pageToken'], relative_path=u'projects/{project}/global/operations', request_field='', request_type_name=u'ComputeGlobalOperationsListRequest', response_type_name=u'OperationList', supports_download=False, ), } <NEW_LINE> self._upload_configs = { } <NEW_LINE> <DEDENT> def AggregatedList(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('AggregatedList') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> def Delete(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Delete') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> def Get(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('Get') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params) <NEW_LINE> <DEDENT> def List(self, request, global_params=None): <NEW_LINE> <INDENT> config = self.GetMethodConfig('List') <NEW_LINE> return self._RunMethod( config, request, global_params=global_params)
Service class for the globalOperations resource.
62598f9cbd1bec0571e14fa4
class BasicReplay: <NEW_LINE> <INDENT> def __init__(self, state_shape, policy_size, capacity=50000): <NEW_LINE> <INDENT> self.policy_size = policy_size <NEW_LINE> self._capacity = 50000 <NEW_LINE> self._insertion_index = 0 <NEW_LINE> self._states = np.zeros([self._capacity, *state_shape]) <NEW_LINE> self._policy_values = np.zeros([self._capacity, policy_size]) <NEW_LINE> self._state_values = np.zeros(self._capacity) <NEW_LINE> <DEDENT> def add_data(self, states, policy_values, state_values): <NEW_LINE> <INDENT> n = states.shape[0] <NEW_LINE> start = self._insertion_index % self._capacity <NEW_LINE> end = start + n <NEW_LINE> self._states[start:end, ...] = states <NEW_LINE> self._policy_values[start:end, ...] = policy_values <NEW_LINE> self._state_values[start:end] = state_values <NEW_LINE> self._insertion_index += n <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return min(self._insertion_index, self.capacity) <NEW_LINE> <DEDENT> def save(self, filepath): <NEW_LINE> <INDENT> pickle.dump(self, open(filepath, 'wb')) <NEW_LINE> <DEDENT> def get_batch(self, batch_size): <NEW_LINE> <INDENT> if self._insertion_index == 0: <NEW_LINE> <INDENT> ix = 0 <NEW_LINE> <DEDENT> elif self._insertion_index < batch_size: <NEW_LINE> <INDENT> ix = random.choice(np.minimum(self._insertion_index, self._capacity), size=self._insertion_index, replace=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ix = random.choice(np.minimum(self._insertion_index, self._capacity), size=batch_size, replace=False) <NEW_LINE> <DEDENT> return self._states[ix, ...], self._policy_values[ix, ...], self._state_values[ix, ...]
A basic replay table. Stores state, policy-value and state-value information in numpy arrays. Can be used as a generator to randomly select from the replay table.
62598f9c7d847024c075c193
class CaPPBuilder(_PPBuilder): <NEW_LINE> <INDENT> def __init__(self, radius=4.3): <NEW_LINE> <INDENT> _PPBuilder.__init__(self, radius) <NEW_LINE> <DEDENT> def _is_connected(self, prev_res, next_res): <NEW_LINE> <INDENT> for r in [prev_res, next_res]: <NEW_LINE> <INDENT> if not r.has_id("CA"): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> n=next_res["CA"] <NEW_LINE> p=prev_res["CA"] <NEW_LINE> if n.is_disordered(): <NEW_LINE> <INDENT> nlist=n.disordered_get_list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> nlist=[n] <NEW_LINE> <DEDENT> if p.is_disordered(): <NEW_LINE> <INDENT> plist=p.disordered_get_list() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> plist=[p] <NEW_LINE> <DEDENT> for nn in nlist: <NEW_LINE> <INDENT> for pp in plist: <NEW_LINE> <INDENT> if (nn-pp)<self.radius: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False
Use CA--CA distance to find polypeptides.
62598f9c15baa72349461d46
class HongbaoFullBackRule: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.DOUBLE, 'full_amount', None, None, ), (2, TType.DOUBLE, 'back_amount', None, None, ), ) <NEW_LINE> def __init__(self, full_amount=None, back_amount=None,): <NEW_LINE> <INDENT> self.full_amount = full_amount <NEW_LINE> self.back_amount = back_amount <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.DOUBLE: <NEW_LINE> <INDENT> self.full_amount = iprot.readDouble() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.DOUBLE: <NEW_LINE> <INDENT> self.back_amount = iprot.readDouble() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('HongbaoFullBackRule') <NEW_LINE> if self.full_amount is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('full_amount', TType.DOUBLE, 1) <NEW_LINE> oprot.writeDouble(self.full_amount) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.back_amount is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('back_amount', TType.DOUBLE, 2) <NEW_LINE> oprot.writeDouble(self.back_amount) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.full_amount) <NEW_LINE> value = (value * 31) ^ hash(self.back_amount) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - full_amount - back_amount
62598f9c67a9b606de545d8b