code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ValueDisplay(object): <NEW_LINE> <INDENT> _VALUE_LENGTH = 8 <NEW_LINE> def __init__(self, window, wave_height): <NEW_LINE> <INDENT> self._window = window <NEW_LINE> self._height, self._width = self._window.getmaxyx() <NEW_LINE> self._wave_height = wave_height <NEW_LINE> logging.debug('Value display height, width = %r, %r', self._height, self._width) <NEW_LINE> logging.debug('wave height: %r', wave_height) <NEW_LINE> if self._width < self._VALUE_LENGTH + 1: <NEW_LINE> <INDENT> raise ValueDisplayError('Width %r is not long enough' % self._width) <NEW_LINE> <DEDENT> <DEDENT> def update(self, value_range): <NEW_LINE> <INDENT> min_value, max_value = value_range <NEW_LINE> min_value_str = format_value(min_value, self._VALUE_LENGTH) <NEW_LINE> max_value_str = format_value(max_value, self._VALUE_LENGTH) <NEW_LINE> self.clear() <NEW_LINE> self._window.addstr(0, 0, max_value_str) <NEW_LINE> self._window.addstr(self._wave_height - 1, 0, min_value_str) <NEW_LINE> self._window.refresh() <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> for row in xrange(self._height): <NEW_LINE> <INDENT> self._window.move(row, 0) <NEW_LINE> self._window.clrtoeol()
Value display controls display for value. ------------------------------ | Maximum value in this view.| --> 0 | | | | | | | | | | | | | Minimum value in this view.| --> wave_height - 1 | | -----------------------------
62598fa6fff4ab517ebcd6e3
class WSProxy: <NEW_LINE> <INDENT> def __init__(self, ws, child, room, connection): <NEW_LINE> <INDENT> self.ws = ws <NEW_LINE> self.child = child <NEW_LINE> self.stop_event = threading.Event() <NEW_LINE> self.room = room <NEW_LINE> self.auto_forward() <NEW_LINE> self.connection = connection <NEW_LINE> <DEDENT> def send(self, msg): <NEW_LINE> <INDENT> data = msg["data"] <NEW_LINE> if isinstance(data, str): <NEW_LINE> <INDENT> data = data.encode('utf-8') <NEW_LINE> <DEDENT> self.child.send(data) <NEW_LINE> <DEDENT> def forward(self): <NEW_LINE> <INDENT> while not self.stop_event.is_set(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = self.child.recv(BUF_SIZE) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if len(data) == 0: <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> data = data.decode(errors="ignore") <NEW_LINE> self.ws.emit("data", {'data': data, 'room': self.connection}, room=self.room) <NEW_LINE> if len(data) == BUF_SIZE: <NEW_LINE> <INDENT> time.sleep(0.1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def auto_forward(self): <NEW_LINE> <INDENT> thread = threading.Thread(target=self.forward, args=()) <NEW_LINE> thread.daemon = True <NEW_LINE> thread.start() <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.stop_event.set() <NEW_LINE> self.child.close() <NEW_LINE> self.ws.logout(self.connection) <NEW_LINE> logger.debug("Proxy {} closed".format(self))
WSProxy is websocket proxy channel object. websocket代理通道对象 Because tornado or flask websocket base event, if we want reuse func with sshd, we need change it to socket, so we implement a proxy. we should use socket pair implement it. usage: 我们使用 socket pair 实现它 ``` child, parent = socket.socketpair() # self must have write_message method, write message to ws proxy = WSProxy(self, child) client = Client(parent, user) ```
62598fa6009cb60464d0141e
class BookmarkType(EditType): <NEW_LINE> <INDENT> def __init__(self, bookmark_string): <NEW_LINE> <INDENT> self.date = self.parse_edit_date(bookmark_string) <NEW_LINE> self.location = self.parse_edit_location(bookmark_string) <NEW_LINE> <DEDENT> def parse_edit_date(self, data): <NEW_LINE> <INDENT> date_part = data.split("|") <NEW_LINE> return datetime.strptime(date_part[-1], " Added on %A, %B %d, %Y %I:%M:%S %p") <NEW_LINE> <DEDENT> def parse_edit_location(self, data): <NEW_LINE> <INDENT> loc_split = data.split("|") <NEW_LINE> if len(loc_split) > 2: <NEW_LINE> <INDENT> return loc_split[1].rstrip().lstrip().split(" ")[-1] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return loc_split[0].split(" ")[5]
Created for Kindle Paperwhite gen 5. Highlight Edit class. Bookmark Edit class Contains location of the bookmark and the date
62598fa699cbb53fe6830dd4
class BootstrapSplitDateTimeWidget(MultiWidget): <NEW_LINE> <INDENT> def __init__(self, attrs=None, date_format=None, time_format=None): <NEW_LINE> <INDENT> from django.forms.widgets import DateInput, TimeInput <NEW_LINE> date_class = attrs['date_class'] <NEW_LINE> time_class = attrs['time_class'] <NEW_LINE> del attrs['date_class'] <NEW_LINE> del attrs['time_class'] <NEW_LINE> time_attrs = attrs.copy() <NEW_LINE> time_attrs['class'] = time_class <NEW_LINE> date_attrs = attrs.copy() <NEW_LINE> date_attrs['class'] = date_class <NEW_LINE> widgets = (DateInput(attrs=date_attrs, format=date_format), TimeInput(attrs=time_attrs)) <NEW_LINE> super(BootstrapSplitDateTimeWidget, self).__init__(widgets, attrs) <NEW_LINE> <DEDENT> def decompress(self, value): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> d = strftime("%Y-%m-%d", value.timetuple()) <NEW_LINE> hour = strftime("%H", value.timetuple()) <NEW_LINE> minute = strftime("%M", value.timetuple()) <NEW_LINE> meridian = strftime("%p", value.timetuple()) <NEW_LINE> return d, hour+":"+minute, meridian <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None, None, None <NEW_LINE> <DEDENT> <DEDENT> def format_output(self, rendered_widgets): <NEW_LINE> <INDENT> return "<label>Date:</label> %s<br/><label>Time:</label> %s" % (rendered_widgets[0], rendered_widgets[1])
Bootstrap Split DateTime Widget github.com/stholmes/django-bootstrap-datetime-widgets/ format_output slightly modified.
62598fa663b5f9789fe85064
class beginFileUpload_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.STRING, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.success = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('beginFileUpload_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.STRING, 0) <NEW_LINE> oprot.writeString(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62598fa60a50d4780f7052db
class HostPropertyManagerTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_fetch_all(self): <NEW_LINE> <INDENT> av_host_properties = AV.host_properties.all().result() <NEW_LINE> self.assertNotEqual(len(av_host_properties), 0) <NEW_LINE> <DEDENT> def test_filter(self): <NEW_LINE> <INDENT> av_host_properties = AV.host_properties.filter(name='cpu').result() <NEW_LINE> self.assertNotEqual(len(av_host_properties), 0)
Test HostPropertyManager.
62598fa68c0ade5d55dc3610
class RulesPage(GenericPage): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> GenericPage.__init__(self,parent,'Game rules')
Choose the ruleset, komi, handicap etc
62598fa623849d37ff850fb4
class Post(models.Model): <NEW_LINE> <INDENT> title = models.CharField("Title", max_length=70) <NEW_LINE> body = models.TextField("text") <NEW_LINE> created_time = models.DateTimeField("created_at", default=timezone.now) <NEW_LINE> modified_time = models.DateTimeField("updated_at") <NEW_LINE> excerpt = models.CharField("Summary", max_length=200, blank=True) <NEW_LINE> category = models.ForeignKey(Category, verbose_name="Categories", on_delete=models.CASCADE) <NEW_LINE> tags = models.ManyToManyField(Tag, verbose_name="Tag", blank=True) <NEW_LINE> author = models.ForeignKey(User, verbose_name="Author", on_delete=models.CASCADE) <NEW_LINE> views = models.PositiveIntegerField(default=0, editable=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Article" <NEW_LINE> verbose_name_plural = "Articles" <NEW_LINE> ordering = ["-created_time"] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> self.modified_time = timezone.now() <NEW_LINE> md = markdown.Markdown( extensions=["markdown.extensions.extra", "markdown.extensions.codehilite",] ) <NEW_LINE> self.excerpt = strip_tags(md.convert(self.body))[:54] <NEW_LINE> super().save(*args, **kwargs) <NEW_LINE> <DEDENT> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse("blog:detail", kwargs={"pk": self.pk}) <NEW_LINE> <DEDENT> def increase_views(self): <NEW_LINE> <INDENT> self.views += 1 <NEW_LINE> self.save(update_fields=["views"]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def toc(self): <NEW_LINE> <INDENT> return self.rich_content.get("toc", "") <NEW_LINE> <DEDENT> @property <NEW_LINE> def body_html(self): <NEW_LINE> <INDENT> return self.rich_content.get("content", "") <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def rich_content(self): <NEW_LINE> <INDENT> return generate_rich_content(self.body)
文章的数据库表稍微复杂一点,主要是涉及的字段更多。
62598fa6bd1bec0571e15043
class HierarchicObject(ModelObject): <NEW_LINE> <INDENT> def AddObjects(self, Objects): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Insert(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Modify(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def RemoveObjects(self, Objects): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Select(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __new__(self, ID=None): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Definition = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> Father = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> HierarchicChildren = property(lambda self: object(), lambda self, v: None, lambda self: None) <NEW_LINE> Name = property(lambda self: object(), lambda self, v: None, lambda self: None)
HierarchicObject() HierarchicObject(ID: Identifier)
62598fa624f1403a92685833
class BGRProviaCurveFilter(BGRCurveFilter): <NEW_LINE> <INDENT> def __init__(self, dtype=np.uint8): <NEW_LINE> <INDENT> BGRCurveFilter.__init__( self, bPoints = [(0,0),(35,25),(205,227),(255,255)], gPoints = [(0,0),(27,21),(196,207),(255,255)], rPoints = [(0,0),(59,54),(202,210),(255,255)], dtype = dtype )
A filter that applies Portra-like curves to BGR
62598fa68a43f66fc4bf207d
class UnterminatedCommentError(Error): <NEW_LINE> <INDENT> pass
Raised if an Unterminated multi-line comment is encountered.
62598fa64428ac0f6e658422
class TestResources(BaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.root = DATA_DIR <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.root = None <NEW_LINE> <DEDENT> @patch('lnxproc.resources.time') <NEW_LINE> def test_resources(self, mytime): <NEW_LINE> <INDENT> mytime.return_value = 1.0 <NEW_LINE> res = lnxproc.resources.Resources( keys=('domainname', 'hostname', ), root=self.root, ) <NEW_LINE> self.assertEqual( len(res), 2, msg='List of modules is incorrect' ) <NEW_LINE> res.read() <NEW_LINE> self.assertEqual( mytime.call_count, 1, msg='Timestamp not called once' ) <NEW_LINE> self.assertEqual( res.timestamp, 1.0, msg='Incorrect timestamp returned' ) <NEW_LINE> self.assertEqual( res.normalize(), { 'timestamp': 1.0, 'domainname': 'world', 'hostname': 'hawking', }, msg='Incorrect resources returned' ) <NEW_LINE> <DEDENT> def test_bad_arg(self): <NEW_LINE> <INDENT> res = lnxproc.resources.Resources( keys=('zzzdomainname', ), root=self.root, ) <NEW_LINE> self.assertEqual( len(res), 0, msg='List of modules is incorrect' )
Test Resources class
62598fa6d268445f26639b03
class ExpressionMapping(str): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.expression = Expression(value[1:-1]) <NEW_LINE> super(ExpressionMapping, self).__init__(value) <NEW_LINE> <DEDENT> def values(self, variables): <NEW_LINE> <INDENT> return [self.expression.value(variables)] <NEW_LINE> <DEDENT> def attributes(self): <NEW_LINE> <INDENT> return self.expression.variables()
Class for parsing and expanding an expression.
62598fa6be383301e02536f8
class Stack: <NEW_LINE> <INDENT> def __init__(self, max_size=101): <NEW_LINE> <INDENT> self.elements = [None] * max_size <NEW_LINE> self.top_element = -1 <NEW_LINE> self.max_size = max_size <NEW_LINE> <DEDENT> def top(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> return self.elements[self.top_element] <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> if self.is_empty(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> el = self.top() <NEW_LINE> self.top_element -= 1 <NEW_LINE> return el <NEW_LINE> <DEDENT> def push(self, element): <NEW_LINE> <INDENT> if self.top_element >= self.max_size -1: <NEW_LINE> <INDENT> raise Exception('Stack is overflow') <NEW_LINE> <DEDENT> self.top_element += 1 <NEW_LINE> self.elements[self.top_element] = element <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> if self.top_element == -1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
>>> from crazy_notes.stack import Stack >>> s = Stack() >>> s.push(1) >>> s.push(2) >>> s.push(3) >>> s.top() 3 >>> s.is_empty() False >>> s.pop() 3 >>> s.top() 2
62598fa62ae34c7f260aafe1
class AverageOfTwo(EstimationMethodService): <NEW_LINE> <INDENT> def parameter_names(self): <NEW_LINE> <INDENT> return ["x", "y"] <NEW_LINE> <DEDENT> @endpoint("/info", ["GET", "PUT"], "text/plain") <NEW_LINE> def info(self): <NEW_LINE> <INDENT> return "This is an estimation method which takes two parameters (X, Y), and returns the average of the result." <NEW_LINE> <DEDENT> @endpoint("/evaluate", ["GET", "PUT"], "application/json") <NEW_LINE> def evaluate(self, x, y): <NEW_LINE> <INDENT> result = (float(x) + float(y)) / 2.0 <NEW_LINE> print("** evaluate( x = " + str(x) + ", y = " + str(y) + ") = " + str(result)) <NEW_LINE> return json.dumps(result)
This is a simple example of an estimation method. It takes two parameters, X and Y, and the result is the average of them.
62598fa6097d151d1a2c0f28
class UEntity(EmbeddedDocument): <NEW_LINE> <INDENT> url = EmbeddedDocumentField(EUrl, null=True) <NEW_LINE> description = EmbeddedDocumentField(EUrl, null=True)
Entities which have been parsed out of the url or description fields defined by the user. Read more about User Entities . Example: "entities": { "url": { "urls": [ { "url": "http://dev.twitter.com", "expanded_url": null, "indices": [0, 22] } ] }, "description": {"urls":[] } }
62598fa663d6d428bbee26b2
class PreGeneratedProblem(MOProblem): <NEW_LINE> <INDENT> def __init__(self, filename=None, points=None, delim=",", **kwargs): <NEW_LINE> <INDENT> self.points = [] <NEW_LINE> self.original_points = [] <NEW_LINE> if points: <NEW_LINE> <INDENT> self.original_points = list(points) <NEW_LINE> self.points = list(points) <NEW_LINE> <DEDENT> elif filename: <NEW_LINE> <INDENT> with open(filename) as fd: <NEW_LINE> <INDENT> for r in fd: <NEW_LINE> <INDENT> self.points.append(list(map(float, map(str.strip, r.split(delim))))) <NEW_LINE> <DEDENT> <DEDENT> self.original_points = list(self.points) <NEW_LINE> <DEDENT> super().__init__(nobj=len(self.points[0]), points=self.points, **kwargs) <NEW_LINE> if not self.ideal: <NEW_LINE> <INDENT> self.ideal = list(np.min(self.points, axis=0)) <NEW_LINE> <DEDENT> if not self.nadir: <NEW_LINE> <INDENT> self.nadir = list(np.max(self.points, axis=0)) <NEW_LINE> <DEDENT> <DEDENT> def evaluate(self, population=None): <NEW_LINE> <INDENT> return self.points
A problem where the objective function values have beeen pregenerated
62598fa6d7e4931a7ef3bf9b
class Features: <NEW_LINE> <INDENT> __slots__ = ['has_copy', 'has_bulk_delete', 'max_deletes', 'max_meta_len'] <NEW_LINE> def __init__(self, has_copy=False, has_bulk_delete=False, max_deletes=1000, max_meta_len=255): <NEW_LINE> <INDENT> self.has_copy = has_copy <NEW_LINE> self.has_bulk_delete = has_bulk_delete <NEW_LINE> self.max_deletes = max_deletes <NEW_LINE> self.max_meta_len = max_meta_len <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> features = [] <NEW_LINE> if self.has_copy: <NEW_LINE> <INDENT> features.append('copy via COPY') <NEW_LINE> <DEDENT> if self.has_bulk_delete: <NEW_LINE> <INDENT> features.append('Bulk delete %d keys at a time' % self.max_deletes) <NEW_LINE> <DEDENT> features.append('maximum meta value length is %d bytes' % self.max_meta_len) <NEW_LINE> return ', '.join(features) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> init_kwargs = [p + '=' + repr(getattr(self, p)) for p in self.__slots__] <NEW_LINE> return 'Features(%s)' % ', '.join(init_kwargs) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(repr(self)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return repr(self) == repr(other) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return repr(self) != repr(other)
Set of configurable features for Swift servers. Swift is deployed in many different versions and configurations. To be able to use advanced features like bulk delete we need to make sure that the Swift server we are using can handle them. This is a value object.
62598fa6a219f33f346c6718
class SitePhpErrorLogFlag(ProxyOnlyResource): <NEW_LINE> <INDENT> _validation = { 'id': {'readonly': True}, 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'local_log_errors': {'key': 'properties.localLogErrors', 'type': 'str'}, 'master_log_errors': {'key': 'properties.masterLogErrors', 'type': 'str'}, 'local_log_errors_max_length': {'key': 'properties.localLogErrorsMaxLength', 'type': 'str'}, 'master_log_errors_max_length': {'key': 'properties.masterLogErrorsMaxLength', 'type': 'str'}, } <NEW_LINE> def __init__(self, kind=None, local_log_errors=None, master_log_errors=None, local_log_errors_max_length=None, master_log_errors_max_length=None): <NEW_LINE> <INDENT> super(SitePhpErrorLogFlag, self).__init__(kind=kind) <NEW_LINE> self.local_log_errors = local_log_errors <NEW_LINE> self.master_log_errors = master_log_errors <NEW_LINE> self.local_log_errors_max_length = local_log_errors_max_length <NEW_LINE> self.master_log_errors_max_length = master_log_errors_max_length
Used for getting PHP error logging flag. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id. :vartype id: str :ivar name: Resource Name. :vartype name: str :param kind: Kind of resource. :type kind: str :ivar type: Resource type. :vartype type: str :param local_log_errors: Local log_errors setting. :type local_log_errors: str :param master_log_errors: Master log_errors setting. :type master_log_errors: str :param local_log_errors_max_length: Local log_errors_max_len setting. :type local_log_errors_max_length: str :param master_log_errors_max_length: Master log_errors_max_len setting. :type master_log_errors_max_length: str
62598fa601c39578d7f12c80
class SwordDamageAlter(PropertyEffect): <NEW_LINE> <INDENT> def __init__(self, user: Entity, sword_move_type_effectiveness: Tuple[float, float, float]): <NEW_LINE> <INDENT> super().__init__(user, "SwordDamageAlter") <NEW_LINE> self.sword_move_type_effectiveness = sword_move_type_effectiveness <NEW_LINE> <DEDENT> def on_damage(self, damage_action: DamageAction): <NEW_LINE> <INDENT> damage = damage_action.damage <NEW_LINE> target = damage.target <NEW_LINE> if target.entity != self.user: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if isinstance(damage_action.cause_object, SwordMove) and isinstance(damage, WeaponHPDamage): <NEW_LINE> <INDENT> sword_move = damage_action.cause_object <NEW_LINE> index = sword_move.move_type.value[1] <NEW_LINE> multiplier = self.sword_move_type_effectiveness[index] <NEW_LINE> damage.multiplier_list.append(multiplier) <NEW_LINE> <DEDENT> return []
Simple PropertyEffect that allows you to add
62598fa6d7e4931a7ef3bf9c
class CredentialResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'value': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'value': {'key': 'value', 'type': 'bytearray'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(CredentialResult, self).__init__(**kwargs) <NEW_LINE> self.name = None <NEW_LINE> self.value = None
The credential result response. Variables are only populated by the server, and will be ignored when sending a request. :ivar name: The name of the credential. :vartype name: str :ivar value: Base64-encoded Kubernetes configuration file. :vartype value: bytearray
62598fa67cff6e4e811b592a
class OPAMetric(_RankingMetric): <NEW_LINE> <INDENT> def __init__(self, name, ragged=False): <NEW_LINE> <INDENT> super(OPAMetric, self).__init__(ragged=ragged) <NEW_LINE> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def _compute_impl(self, labels, predictions, weights, mask): <NEW_LINE> <INDENT> valid_pair = tf.logical_and( tf.expand_dims(mask, 2), tf.expand_dims(mask, 1)) <NEW_LINE> pair_label_diff = tf.expand_dims(labels, 2) - tf.expand_dims(labels, 1) <NEW_LINE> pair_pred_diff = tf.expand_dims(predictions, 2) - tf.expand_dims( predictions, 1) <NEW_LINE> correct_pairs = tf.cast( pair_label_diff > 0, dtype=tf.float32) * tf.cast( pair_pred_diff > 0, dtype=tf.float32) <NEW_LINE> pair_weights = tf.cast( pair_label_diff > 0, dtype=tf.float32) * tf.expand_dims( weights, 2) * tf.cast( valid_pair, dtype=tf.float32) <NEW_LINE> per_list_weights = tf.expand_dims(tf.reduce_sum(pair_weights, axis=[1, 2]), 1) <NEW_LINE> per_list_opa = tf.compat.v1.math.divide_no_nan( tf.expand_dims(tf.reduce_sum(correct_pairs * pair_weights, axis=[1, 2]), 1), per_list_weights) <NEW_LINE> return per_list_opa, per_list_weights
Implements ordered pair accuracy (OPA).
62598fa60c0af96317c56283
class UsersStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.Create = channel.unary_unary( '/event_store.client.users.Users/Create', request_serializer=users__pb2.CreateReq.SerializeToString, response_deserializer=users__pb2.CreateResp.FromString, ) <NEW_LINE> self.Update = channel.unary_unary( '/event_store.client.users.Users/Update', request_serializer=users__pb2.UpdateReq.SerializeToString, response_deserializer=users__pb2.UpdateResp.FromString, ) <NEW_LINE> self.Delete = channel.unary_unary( '/event_store.client.users.Users/Delete', request_serializer=users__pb2.DeleteReq.SerializeToString, response_deserializer=users__pb2.DeleteResp.FromString, ) <NEW_LINE> self.Disable = channel.unary_unary( '/event_store.client.users.Users/Disable', request_serializer=users__pb2.DisableReq.SerializeToString, response_deserializer=users__pb2.DisableResp.FromString, ) <NEW_LINE> self.Enable = channel.unary_unary( '/event_store.client.users.Users/Enable', request_serializer=users__pb2.EnableReq.SerializeToString, response_deserializer=users__pb2.EnableResp.FromString, ) <NEW_LINE> self.Details = channel.unary_stream( '/event_store.client.users.Users/Details', request_serializer=users__pb2.DetailsReq.SerializeToString, response_deserializer=users__pb2.DetailsResp.FromString, ) <NEW_LINE> self.ChangePassword = channel.unary_unary( '/event_store.client.users.Users/ChangePassword', request_serializer=users__pb2.ChangePasswordReq.SerializeToString, response_deserializer=users__pb2.ChangePasswordResp.FromString, ) <NEW_LINE> self.ResetPassword = channel.unary_unary( '/event_store.client.users.Users/ResetPassword', request_serializer=users__pb2.ResetPasswordReq.SerializeToString, response_deserializer=users__pb2.ResetPasswordResp.FromString, )
Missing associated documentation comment in .proto file
62598fa6aad79263cf42e6d5
class RoutePolicy(object): <NEW_LINE> <INDENT> openapi_types = { 'policy_route_info': 'PolicyRouteInfo', 'throttle_policy': 'ThrottlePolicy' } <NEW_LINE> attribute_map = { 'policy_route_info': 'policyRouteInfo', 'throttle_policy': 'throttlePolicy' } <NEW_LINE> def __init__(self, policy_route_info=None, throttle_policy=None): <NEW_LINE> <INDENT> self._policy_route_info = None <NEW_LINE> self._throttle_policy = None <NEW_LINE> self.discriminator = None <NEW_LINE> if policy_route_info is not None: <NEW_LINE> <INDENT> self.policy_route_info = policy_route_info <NEW_LINE> <DEDENT> if throttle_policy is not None: <NEW_LINE> <INDENT> self.throttle_policy = throttle_policy <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def policy_route_info(self): <NEW_LINE> <INDENT> return self._policy_route_info <NEW_LINE> <DEDENT> @policy_route_info.setter <NEW_LINE> def policy_route_info(self, policy_route_info): <NEW_LINE> <INDENT> self._policy_route_info = policy_route_info <NEW_LINE> <DEDENT> @property <NEW_LINE> def throttle_policy(self): <NEW_LINE> <INDENT> return self._throttle_policy <NEW_LINE> <DEDENT> @throttle_policy.setter <NEW_LINE> def throttle_policy(self, throttle_policy): <NEW_LINE> <INDENT> self._throttle_policy = throttle_policy <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, RoutePolicy): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fa6236d856c2adc93bc
class FilterWord(models.Model): <NEW_LINE> <INDENT> words = models.TextField(_("Words")) <NEW_LINE> enabled = models.BooleanField(_("Enabled"), default=True) <NEW_LINE> objects = models.Manager() <NEW_LINE> FilterWordManager = FilterWordManager() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _("Filter Word") <NEW_LINE> verbose_name_plural = _("Filter Words") <NEW_LINE> <DEDENT> def contains(self, text): <NEW_LINE> <INDENT> words = self.words.split(",") <NEW_LINE> for word in words: <NEW_LINE> <INDENT> if word.lower() not in text.lower(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True
Each comma delimited entry represents words that are banned when used together
62598fa656ac1b37e63020ed
class UpdateResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_NewAccessToken(self): <NEW_LINE> <INDENT> return self._output.get('NewAccessToken', None) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) <NEW_LINE> <DEDENT> def getReply(self): <NEW_LINE> <INDENT> return GoogleReply(self.getJSONFromString(self._output.get('Response', [])))
A ResultSet with methods tailored to the values returned by the Update Choreo. The ResultSet object is used to retrieve the results of a Choreo execution.
62598fa607f4c71912baf344
class RandomFactory: <NEW_LINE> <INDENT> randomSources = () <NEW_LINE> getrandbits = getrandbits <NEW_LINE> def _osUrandom(self, nbytes): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return os.urandom(nbytes) <NEW_LINE> <DEDENT> except (AttributeError, NotImplementedError) as e: <NEW_LINE> <INDENT> raise SourceNotAvailable(e) <NEW_LINE> <DEDENT> <DEDENT> def secureRandom(self, nbytes, fallback=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._osUrandom(nbytes) <NEW_LINE> <DEDENT> except SourceNotAvailable: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if fallback: <NEW_LINE> <INDENT> warnings.warn( "urandom unavailable - " "proceeding with non-cryptographically secure random source", category=RuntimeWarning, stacklevel=2) <NEW_LINE> return self.insecureRandom(nbytes) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SecureRandomNotAvailable("No secure random source available") <NEW_LINE> <DEDENT> <DEDENT> def _randBits(self, nbytes): <NEW_LINE> <INDENT> if self.getrandbits is not None: <NEW_LINE> <INDENT> n = self.getrandbits(nbytes * 8) <NEW_LINE> hexBytes = ("%%0%dx" % (nbytes * 2)) % n <NEW_LINE> return _fromhex(hexBytes) <NEW_LINE> <DEDENT> raise SourceNotAvailable("random.getrandbits is not available") <NEW_LINE> <DEDENT> _maketrans = bytes.maketrans <NEW_LINE> _BYTES = _maketrans(b'', b'') <NEW_LINE> def _randModule(self, nbytes): <NEW_LINE> <INDENT> return b"".join([ bytes([random.choice(self._BYTES)]) for i in range(nbytes)]) <NEW_LINE> <DEDENT> def insecureRandom(self, nbytes): <NEW_LINE> <INDENT> for src in ("_randBits", "_randModule"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return getattr(self, src)(nbytes) <NEW_LINE> <DEDENT> except SourceNotAvailable: <NEW_LINE> <INDENT> pass
Factory providing L{secureRandom} and L{insecureRandom} methods. You shouldn't have to instantiate this class, use the module level functions instead: it is an implementation detail and could be removed or changed arbitrarily.
62598fa6009cb60464d01420
class GameStats(): <NEW_LINE> <INDENT> def __init__(self, ai_settings): <NEW_LINE> <INDENT> self.ai_settings = ai_settings <NEW_LINE> self.reset_stats() <NEW_LINE> self.game_active = True <NEW_LINE> <DEDENT> def reset_stats(self): <NEW_LINE> <INDENT> self.ships_self = self.ai_settings.ship_limit
Track statistics for Alien Invasion.
62598fa663b5f9789fe85066
class HostIPv4(Record): <NEW_LINE> <INDENT> bootfile = None <NEW_LINE> bootserver = None <NEW_LINE> configure_for_dhcp = None <NEW_LINE> deny_bootp = None <NEW_LINE> discovered_data = None <NEW_LINE> enable_pxe_lease_time = None <NEW_LINE> host = None <NEW_LINE> ignore_client_requested_options = None <NEW_LINE> ipv4addr = None <NEW_LINE> last_queried = None <NEW_LINE> mac = None <NEW_LINE> match_client = None <NEW_LINE> network = None <NEW_LINE> nextserver = None <NEW_LINE> options = None <NEW_LINE> pxe_lease_time = None <NEW_LINE> use_bootfile = None <NEW_LINE> use_bootserver = None <NEW_LINE> use_deny_bootp = None <NEW_LINE> use_for_ea_inheritance = None <NEW_LINE> use_ignore_client_requested_options = None <NEW_LINE> use_nextserver = None <NEW_LINE> use_options = None <NEW_LINE> use_pxe_lease_time = None <NEW_LINE> _repr_keys = ['ipv4addr'] <NEW_LINE> _search_by = ['ipv4addr'] <NEW_LINE> _wapi_type = 'record:host_ipv4addr' <NEW_LINE> def __init__(self, session, reference_id=None, ipv4addr=None, **kwargs): <NEW_LINE> <INDENT> self.ipv4addr = str(ipv4addr) <NEW_LINE> super(HostIPv4, self).__init__(session, reference_id, **kwargs) <NEW_LINE> <DEDENT> def _save_as(self): <NEW_LINE> <INDENT> return {'ipv4addr': self.ipv4addr}
Implements the host_ipv4addr record type.
62598fa6baa26c4b54d4f1b1
class ESPNetv2(nn.Module): <NEW_LINE> <INDENT> mult2name = { 0.5: 'espnetv2_wd2', 1.0: 'espnetv2_w1', 1.25: 'espnetv2_w5d4', 1.5: 'espnetv2_w3d2', 2.0: 'espnetv2_w2', } <NEW_LINE> def __init__(self, width_mult=2.0, feature_levels=(3, 4, 5), pretrained=True, include_final=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> _check_levels(feature_levels) <NEW_LINE> self.forward_levels = tuple(range(1, feature_levels[-1] + 1)) <NEW_LINE> self.feature_levels = feature_levels <NEW_LINE> self.include_final = include_final <NEW_LINE> name = self.mult2name[float(width_mult)] <NEW_LINE> net = ptcv_get_model(name, pretrained=pretrained) <NEW_LINE> del net.output <NEW_LINE> net = net.features <NEW_LINE> self.layer1 = net.init_block <NEW_LINE> self.layer2 = net.stage1 <NEW_LINE> self.layer3 = net.stage2 <NEW_LINE> self.layer4 = net.stage3 <NEW_LINE> if include_final: <NEW_LINE> <INDENT> self.layer51 = net.stage4 <NEW_LINE> self.layer52 = net.final_block <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.layer5 = net.stage4 <NEW_LINE> <DEDENT> out_channels = [ net.stage1[-1].activ.num_parameters, net.stage2[-1].activ.num_parameters, net.stage3[-1].activ.num_parameters, net.final_block.conv2.stem.out_channels if include_final else net.stage4[-1].activ.num_parameters, ] <NEW_LINE> self.out_channels = [ out_channels[i-2] for i in feature_levels ] <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> outs = [] <NEW_LINE> x = self.layer1(x, x) <NEW_LINE> x = self.layer2(*x) <NEW_LINE> if 2 in self.feature_levels: <NEW_LINE> <INDENT> outs.append(x[0]) <NEW_LINE> <DEDENT> x = self.layer3(*x) <NEW_LINE> if 3 in self.feature_levels: <NEW_LINE> <INDENT> outs.append(x[0]) <NEW_LINE> <DEDENT> x = self.layer4(*x) <NEW_LINE> if 4 in self.feature_levels: <NEW_LINE> <INDENT> outs.append(x[0]) <NEW_LINE> <DEDENT> if 5 in self.forward_levels: <NEW_LINE> <INDENT> if self.include_final: <NEW_LINE> <INDENT> x = self.layer51(*x) <NEW_LINE> x = self.layer52(x[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x = self.layer5(*x)[0] <NEW_LINE> <DEDENT> if 5 in self.feature_levels: <NEW_LINE> <INDENT> outs.append(x) <NEW_LINE> <DEDENT> <DEDENT> return outs
ESPNetv2: A Light-weight, Power Efficient, and General Purpose Convolutional Neural Network width_mult Top1 Top5 Params FLOPs/2 x0.5 42.32 20.15 1,241,332 35.36M x1.0 33.92 13.45 1,670,072 98.09M x1.25 32.06 12.18 1,965,440 138.18M x1.5 30.83 11.29 2,314,856 185.77M x2.0 27.94 9.61 3,498,136 306.93M
62598fa6460517430c431fdc
class plantCalendarScheduling(Base): <NEW_LINE> <INDENT> __tablename__ = "plantCalendarScheduling" <NEW_LINE> ID = Column(Integer, primary_key=True, autoincrement=True, nullable=False) <NEW_LINE> color = Column(Unicode(32), primary_key=False, autoincrement=False, nullable=True) <NEW_LINE> title = Column(Unicode(32), primary_key=False, autoincrement=False, nullable=True) <NEW_LINE> start = Column(Unicode(32), primary_key=False, autoincrement=False, nullable=True) <NEW_LINE> end = Column(Unicode(32), primary_key=False, autoincrement=False, nullable=True)
日历
62598fa6cc0a2c111447af11
class ShimCacheEntryTypeXPSP2x86(obj.ProfileModification): <NEW_LINE> <INDENT> before = ['WindowsObjectClasses'] <NEW_LINE> conditions = {'os': lambda x: x == 'windows', 'major': lambda x: x == 5, 'minor': lambda x: x == 1, 'memory_model': lambda x: x == '32bit', 'vtype_module': lambda x: x == 'volatility.plugins.overlays.windows.xp_sp2_x86_vtypes',} <NEW_LINE> def modification(self, profile): <NEW_LINE> <INDENT> profile.vtypes.update(shimcache_xp_x86) <NEW_LINE> profile.vtypes.update(shimcache_xp_sp2_x86)
A shimcache entry on Windows XP SP2 (x86)
62598fa699cbb53fe6830dd7
class ExtGridLockingColumnModel(BaseExtComponent): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ExtGridLockingColumnModel, self).__init__(*args, **kwargs) <NEW_LINE> self.grid = None <NEW_LINE> self.init_component(*args, **kwargs) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> return 'new Ext.ux.grid.LockingColumnModel({columns:%s})' % ( self.grid.t_render_columns())
Модель колонок для грида блокирования
62598fa68e7ae83300ee8fa3
class Gaussian(nn.Module): <NEW_LINE> <INDENT> def __init__(self, kernel_size=3, sigma=2, channels=3): <NEW_LINE> <INDENT> super(Gaussian, self).__init__() <NEW_LINE> self.gaussian = gaussian_kernel(kernel_size=kernel_size, sigma=sigma, channels=channels) <NEW_LINE> <DEDENT> def forward(self, noised_and_cover): <NEW_LINE> <INDENT> noised_image = noised_and_cover[0] <NEW_LINE> cover_image = noised_and_cover[1] <NEW_LINE> noised_image = self.gaussian(noised_image) <NEW_LINE> return [noised_image, cover_image]
Gaussian kernel.
62598fa68a43f66fc4bf207f
class GenerateMissingAttendanceSheets(models.TransientModel): <NEW_LINE> <INDENT> _name = "beesdoo.shift.generate_missing_attendance_sheets" <NEW_LINE> _description = "beesdoo.shift.generate_missing_attendance_sheets" <NEW_LINE> date_start = fields.Datetime("Start date", required=True) <NEW_LINE> date_end = fields.Datetime("End date", required=True) <NEW_LINE> @api.multi <NEW_LINE> def generate_missing_attendance_sheets(self): <NEW_LINE> <INDENT> self.ensure_one() <NEW_LINE> tasks = self.env["beesdoo.shift.shift"] <NEW_LINE> sheets = self.env["beesdoo.shift.sheet"] <NEW_LINE> tasks = tasks.search( [ ("start_time", ">", self.date_start), ("start_time", "<", self.date_end), ] ) <NEW_LINE> for task in tasks: <NEW_LINE> <INDENT> start_time = task.start_time <NEW_LINE> end_time = task.end_time <NEW_LINE> sheet = sheets.search( [("start_time", "=", start_time), ("end_time", "=", end_time)] ) <NEW_LINE> if not sheet: <NEW_LINE> <INDENT> sheets |= sheets.create( {"start_time": start_time, "end_time": end_time} ) <NEW_LINE> <DEDENT> <DEDENT> return { "name": _("Generated Missing Sheets"), "type": "ir.actions.act_window", "view_type": "form", "view_mode": "tree,form", "res_model": "beesdoo.shift.sheet", "target": "current", "domain": [("id", "in", sheets.ids)], } <NEW_LINE> <DEDENT> @api.constrains("date_start", "date_end") <NEW_LINE> def constrains_dates(self): <NEW_LINE> <INDENT> if self.date_start > datetime.now() or self.date_end > datetime.now(): <NEW_LINE> <INDENT> raise UserError(_("Only past attendance sheets can be generated"))
Generate missing past sheets
62598fa6b7558d5895463531
class LinkIconMixin(object): <NEW_LINE> <INDENT> _IMG_RE = re.compile(''.join(('^', IMAGE_LINK_RE, '$|^', IMAGE_REFERENCE_RE, '$|^<img\s.*?>$'))) <NEW_LINE> brands = OrderedDict(( ('', 'am-icon-link'), ('yahoo.com', 'am-icon-yahoo'), ('youtube.com', 'am-icon-youtube'), ('plus.google.com', 'am-icon-google-plus'), ('google.com', 'am-icon-google'), ('twitter.com', 'am-icon-twitter'), ('facebook.com', 'am-icon-facebook'), ('github.com', 'am-icon-github'), ('instagram.com', 'am-icon-instagram'), ('reddit.com', 'am-icon-reddit-alien'), ('paypal.com', 'am-icon-paypal'), )) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._host = kwargs.pop('host', '') <NEW_LINE> brands = kwargs.pop('brands', None) <NEW_LINE> if brands is not None: <NEW_LINE> <INDENT> allowed = set(brands).intersection(self.brands) <NEW_LINE> allowed.add('') <NEW_LINE> allowed_brands = OrderedDict((key, self.brands[key]) for key in allowed) <NEW_LINE> self.brands = allowed_brands <NEW_LINE> <DEDENT> if self._host: <NEW_LINE> <INDENT> self.brands = OrderedDict(self.brands) <NEW_LINE> self.brands[self._host] = self.brands[''] <NEW_LINE> <DEDENT> super(LinkIconMixin, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def handleMatch(self, match): <NEW_LINE> <INDENT> elem = super(LinkIconMixin, self).handleMatch(match) <NEW_LINE> logger.debug(self.type()) <NEW_LINE> is_mail = self.type() == 'LinkIconAutomailPattern' <NEW_LINE> if elem is not None and not self._IMG_RE.match(elem.text): <NEW_LINE> <INDENT> logger.debug('handled %s', elem.get('href', None)) <NEW_LINE> text = elem.text <NEW_LINE> link = elem.get('href') <NEW_LINE> if is_mail: <NEW_LINE> <INDENT> icon_class = 'am-icon-envelope' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parsed = urlsplit(link) <NEW_LINE> netloc = parsed.netloc <NEW_LINE> icon_class = self.get_brand_icon(netloc) <NEW_LINE> logger.debug('%s -> %s', netloc, icon_class) <NEW_LINE> <DEDENT> if icon_class is not None: <NEW_LINE> <INDENT> logger.debug('pre-head icon %s', link) <NEW_LINE> elem.set('class', icon_class) <NEW_LINE> elem.text = ' ' + text <NEW_LINE> <DEDENT> if not is_mail and netloc not in (self._host, ''): <NEW_LINE> <INDENT> logger.debug('external link %s', link) <NEW_LINE> elem.text += ' <span class="am-icon-external-link"></span>' <NEW_LINE> elem.set('target', '_blank') <NEW_LINE> <DEDENT> <DEDENT> return elem <NEW_LINE> <DEDENT> def get_brand_icon(self, host): <NEW_LINE> <INDENT> if host == '': <NEW_LINE> <INDENT> return self.brands[''] <NEW_LINE> <DEDENT> for k, icon in self.brands.items(): <NEW_LINE> <INDENT> if k == '': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> logger.debug('%s/%s - %s', host, k, icon) <NEW_LINE> if host == k: <NEW_LINE> <INDENT> return icon <NEW_LINE> <DEDENT> if host.endswith('.' + k): <NEW_LINE> <INDENT> return icon <NEW_LINE> <DEDENT> <DEDENT> return None
Common extension logic; mixed into the existing classes.
62598fa67047854f4633f2db
class WebAPIError(Exception): <NEW_LINE> <INDENT> pass
Raised when the online brain visualization could not be launched
62598fa62c8b7c6e89bd36c7
class SchemaService(SchemaInterface): <NEW_LINE> <INDENT> def __init__(self, schema_repository): <NEW_LINE> <INDENT> self.schema_repository = schema_repository <NEW_LINE> <DEDENT> def dump_schema(self): <NEW_LINE> <INDENT> return self.schema_repository.dump_schema() <NEW_LINE> <DEDENT> def initialize_schema(self): <NEW_LINE> <INDENT> self.schema_repository.initialize_schema()
Manage the schema of the storage layer.
62598fa6dd821e528d6d8e37
class CallCode(object): <NEW_LINE> <INDENT> def __init__(self,code,binds=None,divided=False): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.binds = binds or [] <NEW_LINE> self.divided = divided <NEW_LINE> <DEDENT> def output(self,args,ind): <NEW_LINE> <INDENT> args = list(args) <NEW_LINE> for i,val in self.binds: <NEW_LINE> <INDENT> args.insert(i,str(val)) <NEW_LINE> <DEDENT> joinargs = lambda _args: ','.join('\n'+ind+a for a in _args) <NEW_LINE> if self.divided: <NEW_LINE> <INDENT> return self.code.format(joinargs(args[0:-1]),args[-1]) <NEW_LINE> <DEDENT> return self.code.format(joinargs(args))
C++ code representing a function call with optional predefined argument values.
62598fa6435de62698e9bcf7
class DataSourceKey(BaseEnum): <NEW_LINE> <INDENT> PHSEN_ABCDEF_SIO_MULE = 'phsen_abcdef_sio_mule' <NEW_LINE> PHSEN_ABCDEF = 'phsen_abcdef'
These are the possible harvester/parser pairs for this driver
62598fa638b623060ffa8f99
class Industry(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=128) <NEW_LINE> def __str__(self): return self.name <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'nadine' <NEW_LINE> verbose_name = "Industry" <NEW_LINE> verbose_name_plural = "Industries" <NEW_LINE> ordering = ['name']
The type of work a user does
62598fa6796e427e5384e696
class Genome(object): <NEW_LINE> <INDENT> def __init__(self, fitness): <NEW_LINE> <INDENT> self.fitness = fitness
A simple representation of a genome.
62598fa68da39b475be030e4
class ReleaseVariantViewSet(ChangeSetModelMixin, StrictQueryParamMixin, MultiLookupFieldMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> queryset = models.Variant.objects.all().order_by('id') <NEW_LINE> serializer_class = ReleaseVariantSerializer <NEW_LINE> filter_class = filters.ReleaseVariantFilter <NEW_LINE> lookup_fields = (('release__release_id', r'[^/]+'), ('variant_uid', r'[^/]+')) <NEW_LINE> def create(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).create(*args, **kwargs) <NEW_LINE> <DEDENT> def update(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).update(*args, **kwargs) <NEW_LINE> <DEDENT> def partial_update(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).partial_update(*args, **kwargs) <NEW_LINE> <DEDENT> def list(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).list(*args, **kwargs) <NEW_LINE> <DEDENT> def retrieve(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).retrieve(*args, **kwargs) <NEW_LINE> <DEDENT> def destroy(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(ReleaseVariantViewSet, self).destroy(*args, **kwargs)
This end-point provides access to Variants. Each variant is uniquely identified by release ID and variant UID. The pair in the form `release_id/variant_uid` is used in URL for retrieving, updating or deleting a single variant as well as in bulk operations.
62598fa6f7d966606f747ee6
class DapServer(object): <NEW_LINE> <INDENT> def __init__(self, path, templates=None): <NEW_LINE> <INDENT> self.path = os.path.abspath(path) <NEW_LINE> loaders = [PackageLoader("pydap.wsgi", "templates")] <NEW_LINE> if templates is not None: <NEW_LINE> <INDENT> loaders.insert(0, FileSystemLoader(templates)) <NEW_LINE> <DEDENT> self.env = Environment(loader=ChoiceLoader(loaders)) <NEW_LINE> self.env.filters["datetimeformat"] = datetimeformat <NEW_LINE> self.env.filters["unquote"] = unquote <NEW_LINE> self.handlers = load_handlers() <NEW_LINE> <DEDENT> @wsgify <NEW_LINE> def __call__(self, req): <NEW_LINE> <INDENT> path = os.path.abspath( os.path.join(self.path, *req.path_info.split("/"))) <NEW_LINE> if not path.startswith(self.path): <NEW_LINE> <INDENT> return HTTPForbidden() <NEW_LINE> <DEDENT> elif os.path.exists(path): <NEW_LINE> <INDENT> if os.path.isdir(path): <NEW_LINE> <INDENT> return self.index(path, req) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return FileApp(path) <NEW_LINE> <DEDENT> <DEDENT> base, ext = os.path.splitext(path) <NEW_LINE> if os.path.isfile(base): <NEW_LINE> <INDENT> req.environ["pydap.jinja2.environment"] = self.env <NEW_LINE> app = ServerSideFunctions(get_handler(base, self.handlers)) <NEW_LINE> return req.get_response(app) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HTTPNotFound(comment=path) <NEW_LINE> <DEDENT> <DEDENT> def index(self, directory, req): <NEW_LINE> <INDENT> content = [ os.path.join(directory, name) for name in os.listdir(directory)] <NEW_LINE> files = [{ "name": os.path.split(path)[1], "size": os.path.getsize(path), "last_modified": datetime.fromtimestamp(os.path.getmtime(path)), "supported": supported(path, self.handlers), } for path in content if os.path.isfile(path)] <NEW_LINE> files.sort(key=lambda d: alphanum_key(d["name"])) <NEW_LINE> directories = [{ "name": os.path.split(path)[1], "last_modified": datetime.fromtimestamp(os.path.getmtime(path)), } for path in content if os.path.isdir(path)] <NEW_LINE> directories.sort(key=lambda d: alphanum_key(d["name"])) <NEW_LINE> tokens = req.path_info.split("/")[1:] <NEW_LINE> breadcrumbs = [{ "url": "/".join([req.application_url] + tokens[:i+1]), "title": token, } for i, token in enumerate(tokens) if token] <NEW_LINE> context = { "root": req.application_url, "location": req.path_url, "breadcrumbs": breadcrumbs, "directories": directories, "files": files, "version": __version__, } <NEW_LINE> template = self.env.get_template("index.html") <NEW_LINE> return Response( body=template.render(context), content_type="text/html", charset="utf-8")
A directory app that creates file listings and handle DAP requests.
62598fa660cbc95b0636424f
class ComparisonTaskWrapper(): <NEW_LINE> <INDENT> def __init__(self, tasks): <NEW_LINE> <INDENT> self._tasks = tasks <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for com_task, result, timed_points, adjust_name, com_name in self._tasks: <NEW_LINE> <INDENT> score = com_task(result.prediction, timed_points, result.key.prediction_date, result.key.prediction_length) <NEW_LINE> com_key = TaskKey(adjust_name, com_name) <NEW_LINE> out.append(ComparisonResult(result.key, com_key, score)) <NEW_LINE> <DEDENT> return out <NEW_LINE> <DEDENT> @property <NEW_LINE> def off_process(self): <NEW_LINE> <INDENT> return True
:param tasks: List of tuples `(com_task, result, timed_points, adjust_name, com_name)`
62598fa6167d2b6e312b6e73
class ActionRegistry(object): <NEW_LINE> <INDENT> __EXTENSION_NAME = 'abilian:actions' <NEW_LINE> def init_app(self, app): <NEW_LINE> <INDENT> if self.__EXTENSION_NAME in app.extensions: <NEW_LINE> <INDENT> log.warning('ActionRegistry.init_app: actions already enabled on this application') <NEW_LINE> return <NEW_LINE> <DEDENT> app.extensions[self.__EXTENSION_NAME] = dict(categories=dict()) <NEW_LINE> app.before_request(self._before_request) <NEW_LINE> @app.context_processor <NEW_LINE> def add_registry_to_jinja_context(): <NEW_LINE> <INDENT> return dict(actions=self) <NEW_LINE> <DEDENT> <DEDENT> def installed(self, app=None): <NEW_LINE> <INDENT> if app is None: <NEW_LINE> <INDENT> app = current_app <NEW_LINE> <DEDENT> return self.__EXTENSION_NAME in app.extensions <NEW_LINE> <DEDENT> def register(self, *actions): <NEW_LINE> <INDENT> assert self.installed(), "Actions not enabled on this application" <NEW_LINE> assert all(map(lambda a: isinstance(a, Action), actions)) <NEW_LINE> for action in actions: <NEW_LINE> <INDENT> cat = action.category <NEW_LINE> reg = self._state['categories'].setdefault(cat, []) <NEW_LINE> reg.append(action) <NEW_LINE> <DEDENT> <DEDENT> def actions(self, context=None): <NEW_LINE> <INDENT> assert self.installed(), "Actions not enabled on this application" <NEW_LINE> result = {} <NEW_LINE> if context is None: <NEW_LINE> <INDENT> context = self.context <NEW_LINE> <DEDENT> for cat, actions in self._state['categories'].items(): <NEW_LINE> <INDENT> result[cat] = [a for a in actions if a.available(context)] <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def for_category(self, category, context=None): <NEW_LINE> <INDENT> assert self.installed(), "Actions not enabled on this application" <NEW_LINE> actions = self._state['categories'].get(category, []) <NEW_LINE> if context is None: <NEW_LINE> <INDENT> context = self.context <NEW_LINE> <DEDENT> return filter(lambda a: a.available(context), actions) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _state(self): <NEW_LINE> <INDENT> return current_app.extensions[self.__EXTENSION_NAME] <NEW_LINE> <DEDENT> def _before_request(self): <NEW_LINE> <INDENT> g.action_context = {} <NEW_LINE> <DEDENT> @property <NEW_LINE> def context(self): <NEW_LINE> <INDENT> return g.action_context
The Action registry. This is a Flask extension which registers :class:`.Action` sets. Actions are grouped by category and are ordered by registering order. From your application use the instanciated registry :data:`.actions`. The registry is available in jinja2 templates as `actions`.
62598fa644b2445a339b68f0
class APIChangeSpec(object): <NEW_LINE> <INDENT> pass
This class defines the transformations that need to happen. This class must provide the following fields: * `function_keyword_renames`: maps function names to a map of old -> new argument names * `symbol_renames`: maps function names to new function names * `change_to_function`: a set of function names that have changed (for notifications) * `function_reorders`: maps functions whose argument order has changed to the list of arguments in the new order * `function_warnings`: maps full names of functions to warnings that will be printed out if the function is used. (e.g. tf.nn.convolution()) * `function_transformers`: maps function names to custom handlers For an example, see `TFAPIChangeSpec`.
62598fa656b00c62f0fb27b6
class WRNBottleneck(Chain): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, stride, width_factor): <NEW_LINE> <INDENT> super(WRNBottleneck, self).__init__() <NEW_LINE> mid_channels = int(round(out_channels // 4 * width_factor)) <NEW_LINE> with self.init_scope(): <NEW_LINE> <INDENT> self.conv1 = wrn_conv1x1( in_channels=in_channels, out_channels=mid_channels, stride=1, activate=True) <NEW_LINE> self.conv2 = wrn_conv3x3( in_channels=mid_channels, out_channels=mid_channels, stride=stride, activate=True) <NEW_LINE> self.conv3 = wrn_conv1x1( in_channels=mid_channels, out_channels=out_channels, stride=1, activate=False) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> x = self.conv1(x) <NEW_LINE> x = self.conv2(x) <NEW_LINE> x = self.conv3(x) <NEW_LINE> return x
WRN bottleneck block for residual path in WRN unit. Parameters: ---------- in_channels : int Number of input channels. out_channels : int Number of output channels. stride : int or tuple/list of 2 int Stride of the convolution. width_factor : float Wide scale factor for width of layers.
62598fa667a9b606de545ecf
class StandardRegression(FitnessMetric): <NEW_LINE> <INDENT> def __init__(self, const_deriv=False): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.const_deriv = const_deriv <NEW_LINE> <DEDENT> def evaluate_fitness_vector(self, individual, training_data): <NEW_LINE> <INDENT> f_of_x = individual.evaluate(training_data.x) <NEW_LINE> return (f_of_x - training_data.y).flatten() <NEW_LINE> <DEDENT> def evaluate_fit_vec_w_const_deriv(self, individual, training_data): <NEW_LINE> <INDENT> f_of_x, df_dc = individual.evaluate_with_const_deriv(training_data.x) <NEW_LINE> return (f_of_x - training_data.y).flatten(), df_dc <NEW_LINE> <DEDENT> def optimize_constants(self, individual, training_data): <NEW_LINE> <INDENT> num_constants = individual.count_constants() <NEW_LINE> c_0 = np.random.uniform(-100, 100, num_constants) <NEW_LINE> if self.const_deriv: <NEW_LINE> <INDENT> def const_opt_fitness(consts): <NEW_LINE> <INDENT> individual.set_constants(consts) <NEW_LINE> fvec, dfvec_dc = self.evaluate_fit_vec_w_const_deriv( individual, training_data) <NEW_LINE> return fvec, dfvec_dc <NEW_LINE> <DEDENT> sol = optimize.root(const_opt_fitness, c_0, jac=True, method='lm') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> def const_opt_fitness(consts): <NEW_LINE> <INDENT> individual.set_constants(consts) <NEW_LINE> fvec = self.evaluate_fitness_vector(individual, training_data) <NEW_LINE> return fvec <NEW_LINE> <DEDENT> sol = optimize.root(const_opt_fitness, c_0, method='lm') <NEW_LINE> <DEDENT> individual.set_constants(sol.x)
Traditional fitness evaluation
62598fa6090684286d59365d
class FormatHtml(Processor): <NEW_LINE> <INDENT> re_html = re.compile("^<.+?>") <NEW_LINE> def format(self, element: str): <NEW_LINE> <INDENT> if not bool(self.re_html.search(element)): <NEW_LINE> <INDENT> return element <NEW_LINE> <DEDENT> soup = BeautifulSoup(element, features="lxml") <NEW_LINE> text = soup.body.prettify() <NEW_LINE> if text.startswith("<body>"): <NEW_LINE> <INDENT> text = "\n".join(line[1:] for line in text.splitlines()[1:-1]) <NEW_LINE> <DEDENT> return text <NEW_LINE> <DEDENT> def __call__( self, values: Union[List[str], str], response: Response = None, default: str = "" ) -> Tuple[Union[List[str], str], Dict]: <NEW_LINE> <INDENT> if isinstance(values, list): <NEW_LINE> <INDENT> return [self.format(element) for element in values], {} <NEW_LINE> <DEDENT> return self.format(values), {}
Processor for pretty format of XML elements
62598fa62c8b7c6e89bd36c9
class AWGServer(DeviceServer): <NEW_LINE> <INDENT> name = 'awg' <NEW_LINE> @setting(10) <NEW_LINE> def waveforms(self, c, request_json='{}'): <NEW_LINE> <INDENT> request = json.loads(request_json) <NEW_LINE> response = self._waveforms(request) <NEW_LINE> response_json = json.dumps(response) <NEW_LINE> return response_json <NEW_LINE> <DEDENT> def _waveforms(self, request): <NEW_LINE> <INDENT> if request == {}: <NEW_LINE> <INDENT> active_devices = self._get_active_devices() <NEW_LINE> request = {device_name: None for device_name in active_devices} <NEW_LINE> <DEDENT> response = {} <NEW_LINE> for device_name, waveform in request.items(): <NEW_LINE> <INDENT> device_response = None <NEW_LINE> try: <NEW_LINE> <INDENT> device_response = self._waveform(device_name, waveform) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self._reload_device(device_name) <NEW_LINE> device_response = self._waveform(device_name, waveform) <NEW_LINE> <DEDENT> response.update({device_name: device_response}) <NEW_LINE> <DEDENT> self._send_update({'waveforms': response}) <NEW_LINE> return response <NEW_LINE> <DEDENT> def _waveform(self, name, waveform): <NEW_LINE> <INDENT> device = self._get_device(name) <NEW_LINE> if waveform: <NEW_LINE> <INDENT> device.set_waveform(waveform) <NEW_LINE> <DEDENT> response = device.get_waveform() <NEW_LINE> return response
Provides basic control for arbitrary waveform generators
62598fa6d486a94d0ba2bed1
@admin.register(Tasks) <NEW_LINE> class TasksAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ( "priority", "task", "parent", "slug", "id" ) <NEW_LINE> list_display_links: Tuple[str] = ('task',) <NEW_LINE> list_filter = ('task',) <NEW_LINE> prepopulated_fields = {"slug": ("task",)}
Задачи
62598fa67d847024c075c2c8
class ImportUserSocialAuthTask(ImportMysqlToHiveTableTask): <NEW_LINE> <INDENT> @property <NEW_LINE> def table_name(self): <NEW_LINE> <INDENT> return 'social_auth_usersocialauth' <NEW_LINE> <DEDENT> @property <NEW_LINE> def columns(self): <NEW_LINE> <INDENT> return [ ('id', 'INT'), ('user_id', 'INT'), ('provider', 'STRING'), ('uid', 'STRING'), ('extra_data', 'STRING'), ]
Imports the `social_auth_usersocialauth` table to S3/Hive.
62598fa66aa9bd52df0d4dcd
class RioVersionInfoBuilder(VersionInfoBuilder): <NEW_LINE> <INDENT> def generate(self, to_file): <NEW_LINE> <INDENT> info = Stanza() <NEW_LINE> revision_id = self._get_revision_id() <NEW_LINE> if revision_id != NULL_REVISION: <NEW_LINE> <INDENT> info.add('revision-id', revision_id) <NEW_LINE> rev = self._branch.repository.get_revision(revision_id) <NEW_LINE> info.add('date', create_date_str(rev.timestamp, rev.timezone)) <NEW_LINE> try: <NEW_LINE> <INDENT> revno = self._get_revno_str(revision_id) <NEW_LINE> <DEDENT> except errors.GhostRevisionsHaveNoRevno: <NEW_LINE> <INDENT> revno = None <NEW_LINE> <DEDENT> for hook in RioVersionInfoBuilder.hooks['revision']: <NEW_LINE> <INDENT> hook(rev, info) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> revno = '0' <NEW_LINE> <DEDENT> info.add('build-date', create_date_str()) <NEW_LINE> if revno is not None: <NEW_LINE> <INDENT> info.add('revno', revno) <NEW_LINE> <DEDENT> if self._branch.nick is not None: <NEW_LINE> <INDENT> info.add('branch-nick', self._branch.nick) <NEW_LINE> <DEDENT> if self._check or self._include_file_revs: <NEW_LINE> <INDENT> self._extract_file_revisions() <NEW_LINE> <DEDENT> if self._check: <NEW_LINE> <INDENT> if self._clean: <NEW_LINE> <INDENT> info.add('clean', 'True') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info.add('clean', 'False') <NEW_LINE> <DEDENT> <DEDENT> if self._include_history: <NEW_LINE> <INDENT> log = Stanza() <NEW_LINE> for (revision_id, message, timestamp, timezone) in self._iter_revision_history(): <NEW_LINE> <INDENT> log.add('id', revision_id) <NEW_LINE> log.add('message', message) <NEW_LINE> log.add('date', create_date_str(timestamp, timezone)) <NEW_LINE> <DEDENT> info.add('revisions', log.to_unicode()) <NEW_LINE> <DEDENT> if self._include_file_revs: <NEW_LINE> <INDENT> files = Stanza() <NEW_LINE> for path in sorted(self._file_revisions.keys()): <NEW_LINE> <INDENT> files.add('path', path) <NEW_LINE> files.add('revision', self._file_revisions[path]) <NEW_LINE> <DEDENT> info.add('file-revisions', files.to_unicode()) <NEW_LINE> <DEDENT> to_file.write(info.to_unicode())
This writes a rio stream out.
62598fa64f88993c371f048c
class ControlSocketMissingException(Exception): <NEW_LINE> <INDENT> def __init__(self, path=''): <NEW_LINE> <INDENT> message = "SSH control socket %s does not exist" % path <NEW_LINE> super(ControlSocketMissingException, self).__init__(message)
Raised when the SSH control socket is missing
62598fa6435de62698e9bcf9
@register_proxy_pool("karmenzind") <NEW_LINE> class KarmenzindProxyPool(ProxyPool): <NEW_LINE> <INDENT> def __init__(self, redis_db, args=None): <NEW_LINE> <INDENT> super().__init__(redis_db, args) <NEW_LINE> self.proxy_pool_host = os.environ.get('PROXY_POOL_SERVER_HOST', 'localhost') <NEW_LINE> self.port = os.environ.get('KARMEN_PORT', '12345') <NEW_LINE> <DEDENT> def collect_proxies(self): <NEW_LINE> <INDENT> for p in [p['ip'] + ':' + str(p['port']) for p in requests.get("http://%s:%s/api/proxy/?count=10000" % (self.proxy_pool_host, self.port)) .json()['data']['detail']]: <NEW_LINE> <INDENT> self.add_proxy(p)
https://github.com/Karmenzind/fp-server Set the port 12345
62598fa697e22403b383ae11
class EQ(Comparison): <NEW_LINE> <INDENT> def _apply(self, left: int, right: int) -> int: <NEW_LINE> <INDENT> return 1 if left == right else 0
left == right
62598fa64a966d76dd5eede7
class InvanaBotSingleWebCrawler(WebCrawlerBase): <NEW_LINE> <INDENT> name = "InvanaBotSingleWebCrawler" <NEW_LINE> def closed(self, reason): <NEW_LINE> <INDENT> print("spider closed with payload:", reason, self.spider_config.get('cti_id')) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def run_extractor(response=None, extractor=None): <NEW_LINE> <INDENT> extractor_type = extractor.get("extractor_type") <NEW_LINE> extractor_id = extractor.get("extractor_id") <NEW_LINE> driver_klass_module = import_module(f'invana_bot.extractors') <NEW_LINE> driver_klass = getattr(driver_klass_module, extractor_type) <NEW_LINE> if extractor_type is None: <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> extractor_object = driver_klass(response=response, extractor=extractor, extractor_id=extractor_id) <NEW_LINE> data = extractor_object.run() <NEW_LINE> return data <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) <NEW_LINE> <DEDENT> <DEDENT> return {} <NEW_LINE> <DEDENT> def parse(self, response=None): <NEW_LINE> <INDENT> self.logger.info("======Parsing the url: {}".format(response.url)) <NEW_LINE> spider_config = response.meta.get("spider_config") <NEW_LINE> default_storage = self.get_default_storage( settings=self.settings, spider_config=spider_config ) <NEW_LINE> data = {} <NEW_LINE> for extractor in spider_config.get('extractors', []): <NEW_LINE> <INDENT> extracted_data = self.run_extractor(response=response, extractor=extractor) <NEW_LINE> data.update(extracted_data) <NEW_LINE> <DEDENT> context = self.manifest.get("context") <NEW_LINE> if context is not None: <NEW_LINE> <INDENT> data.update({"context": context}) <NEW_LINE> <DEDENT> data['url'] = response.url <NEW_LINE> data['domain'] = get_domain(response.url) <NEW_LINE> data['context']['spider_id'] = spider_config['spider_id'] <NEW_LINE> traversal_data, to_traverse_links_list = self.run_traversals(spider_config=spider_config, response=response) <NEW_LINE> data.update(traversal_data) <NEW_LINE> yield self.prepare_data_for_yield( data=data, storage_id=default_storage.get("storage_id"), collection_name=default_storage.get("collection_name") ) <NEW_LINE> traversal_requests = self.make_traversal_requests(to_traverse_links_list=to_traverse_links_list) <NEW_LINE> for traversal_request in traversal_requests: <NEW_LINE> <INDENT> yield traversal_request <NEW_LINE> <DEDENT> self.post_parse(response=response)
This is generic spider
62598fa61f5feb6acb162b26
class DirectoryRoleRequest(RequestBase): <NEW_LINE> <INDENT> def __init__(self, request_url, client, options): <NEW_LINE> <INDENT> super(DirectoryRoleRequest, self).__init__(request_url, client, options) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self.method = "DELETE" <NEW_LINE> self.send() <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> self.method = "GET" <NEW_LINE> entity = DirectoryRole(json.loads(self.send().content)) <NEW_LINE> self._initialize_collection_properties(entity) <NEW_LINE> return entity <NEW_LINE> <DEDENT> def update(self, directory_role): <NEW_LINE> <INDENT> self.content_type = "application/json" <NEW_LINE> self.method = "PATCH" <NEW_LINE> entity = DirectoryRole(json.loads(self.send(directory_role).content)) <NEW_LINE> self._initialize_collection_properties(entity) <NEW_LINE> return entity <NEW_LINE> <DEDENT> def _initialize_collection_properties(self, value): <NEW_LINE> <INDENT> if value and value._prop_dict: <NEW_LINE> <INDENT> if value.members and value.members._prop_dict: <NEW_LINE> <INDENT> if "members@odata.nextLink" in value._prop_dict: <NEW_LINE> <INDENT> next_page_link = value._prop_dict["members@odata.nextLink"] <NEW_LINE> value.members._init_next_page_request(next_page_link, self._client, None)
The type DirectoryRoleRequest.
62598fa6167d2b6e312b6e75
@injected <NEW_LINE> class ForwardHTTPHandler(HandlerProcessorProceed): <NEW_LINE> <INDENT> externalHost = str <NEW_LINE> externalPort = int <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> assert isinstance(self.externalHost, str), 'Invalid external host %s' % self.externalHost <NEW_LINE> assert isinstance(self.externalPort, int), 'Invalid external port %s' % self.externalPort <NEW_LINE> super().__init__() <NEW_LINE> <DEDENT> def process(self, request:Request, requestCnt:RequestContent, response:Response, responseCnt:ResponseContent, **keyargs): <NEW_LINE> <INDENT> assert isinstance(request, Request), 'Invalid request %s' % request <NEW_LINE> assert isinstance(requestCnt, RequestContent), 'Invalid request content %s' % requestCnt <NEW_LINE> assert isinstance(response, Response), 'Invalid response %s' % response <NEW_LINE> assert isinstance(responseCnt, ResponseContent), 'Invalid response content %s' % responseCnt <NEW_LINE> assert request.scheme == HTTP, 'Cannot forward for scheme %s' % request.scheme <NEW_LINE> if requestCnt.source is not None: <NEW_LINE> <INDENT> if isinstance(requestCnt.source, Iterable): <NEW_LINE> <INDENT> source = BytesIO() <NEW_LINE> for bytes in requestCnt.source: source.write(bytes) <NEW_LINE> body = source.getbuffer() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert isinstance(requestCnt.source, IInputStream), 'Invalid request source %s' % requestCnt.source <NEW_LINE> body = requestCnt.source.read() <NEW_LINE> <DEDENT> <DEDENT> else: body = None <NEW_LINE> if request.parameters: parameters = urlencode(request.parameters) <NEW_LINE> else: parameters = None <NEW_LINE> connection = HTTPConnection(self.externalHost, self.externalPort) <NEW_LINE> try: <NEW_LINE> <INDENT> connection.request(request.method, urlunsplit(('', '', '/%s' % request.uri, parameters, '')), body, request.headers) <NEW_LINE> <DEDENT> except socket.error as e: <NEW_LINE> <INDENT> response.code, response.status, _isSuccess = SERVICE_UNAVAILABLE <NEW_LINE> if e.errno == 111: response.text = 'Connection refused' <NEW_LINE> else: response.text = str(e) <NEW_LINE> return <NEW_LINE> <DEDENT> rsp = connection.getresponse() <NEW_LINE> response.status = rsp.status <NEW_LINE> response.code = response.text = rsp.reason <NEW_LINE> response.headers = dict(rsp.headers.items()) <NEW_LINE> responseCnt.source = rsp
Implementation for a handler that provides forwarding to external servers.
62598fa6d58c6744b42dc257
class IgnoreFileTests(unittest.TestCase): <NEW_LINE> <INDENT> def test_filter_none(self): <NEW_LINE> <INDENT> p = notify_processor.NotifyProcessor(None) <NEW_LINE> self.assertFalse(p.is_ignored("froo.pyc")) <NEW_LINE> <DEDENT> def test_filter_one(self): <NEW_LINE> <INDENT> p = notify_processor.NotifyProcessor(None, ['\A.*\\.pyc\Z']) <NEW_LINE> self.assertTrue(p.is_ignored("froo.pyc")) <NEW_LINE> self.assertFalse(p.is_ignored("froo.pyc.real")) <NEW_LINE> self.assertFalse(p.is_ignored("otherstuff")) <NEW_LINE> <DEDENT> def test_filter_two_simple(self): <NEW_LINE> <INDENT> p = notify_processor.NotifyProcessor(None, ['\A.*foo\Z', '\A.*bar\Z']) <NEW_LINE> self.assertTrue(p.is_ignored("blah_foo")) <NEW_LINE> self.assertTrue(p.is_ignored("blah_bar")) <NEW_LINE> self.assertFalse(p.is_ignored("bar_xxx")) <NEW_LINE> self.assertFalse(p.is_ignored("--foo--")) <NEW_LINE> self.assertFalse(p.is_ignored("otherstuff")) <NEW_LINE> <DEDENT> def test_filter_two_complex(self): <NEW_LINE> <INDENT> p = notify_processor.NotifyProcessor(None, ['\A.*foo\Z|\Afoo.*\Z', '\A.*bar\Z']) <NEW_LINE> self.assertTrue(p.is_ignored("blah_foo")) <NEW_LINE> self.assertTrue(p.is_ignored("blah_bar")) <NEW_LINE> self.assertTrue(p.is_ignored("foo_xxx")) <NEW_LINE> self.assertFalse(p.is_ignored("--foo--")) <NEW_LINE> self.assertFalse(p.is_ignored("otherstuff")) <NEW_LINE> <DEDENT> def test_is_ignored_uses_access(self): <NEW_LINE> <INDENT> sample_path = "sample path" <NEW_LINE> calls = [] <NEW_LINE> store_call = lambda *args: calls.append(args) <NEW_LINE> self.patch(filesystem_notifications, "access", store_call) <NEW_LINE> self.patch(filesystem_notifications, "path_exists", lambda _: True) <NEW_LINE> p = notify_processor.NotifyProcessor(None) <NEW_LINE> p.is_ignored(sample_path) <NEW_LINE> self.assertEqual(calls, [(sample_path,)])
Tests the ignore files behaviour.
62598fa6d7e4931a7ef3bfa0
class _XMLSerializable: <NEW_LINE> <INDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.flattenXML() <NEW_LINE> <DEDENT> def __add__(self, other: 'XMLContent') -> '_XMLSequence': <NEW_LINE> <INDENT> return _XMLSequence(chain((self,), _adaptSequence(other))) <NEW_LINE> <DEDENT> def __radd__(self, other: 'XMLContent') -> '_XMLSequence': <NEW_LINE> <INDENT> return _XMLSequence(chain(_adaptSequence(other), (self,))) <NEW_LINE> <DEDENT> def _toFragments(self, defaultNamespace: Optional[str]) -> Iterator[str]: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def flattenXML(self) -> str: <NEW_LINE> <INDENT> return ''.join(self._toFragments(None)) <NEW_LINE> <DEDENT> def flattenIndented(self) -> str: <NEW_LINE> <INDENT> indentedFragments = [] <NEW_LINE> indent = '\n' <NEW_LINE> prevWasElem = False <NEW_LINE> for fragment in self._toFragments(None): <NEW_LINE> <INDENT> close = fragment.startswith('</') <NEW_LINE> open_ = not close and fragment.startswith('<') and not fragment.startswith('<!') <NEW_LINE> if close: <NEW_LINE> <INDENT> indent = indent[ : -2] <NEW_LINE> <DEDENT> if open_ and fragment.endswith('/>'): <NEW_LINE> <INDENT> close = True <NEW_LINE> <DEDENT> thisIsElem = open_ or close <NEW_LINE> if prevWasElem and thisIsElem: <NEW_LINE> <INDENT> indentedFragments.append(indent) <NEW_LINE> <DEDENT> indentedFragments.append(fragment) <NEW_LINE> if open_ and not close: <NEW_LINE> <INDENT> indent += ' ' <NEW_LINE> <DEDENT> prevWasElem = thisIsElem <NEW_LINE> <DEDENT> indentedFragments.append('\n') <NEW_LINE> return ''.join(indentedFragments) <NEW_LINE> <DEDENT> def present(self, **kwargs: object) -> '_XMLSerializable': <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def join(self, siblings: Iterable['XMLContent']) -> '_XMLSequence': <NEW_LINE> <INDENT> content: List[_XMLSerializable] = [] <NEW_LINE> for sibling in siblings: <NEW_LINE> <INDENT> content.extend(_adaptSequence(sibling)) <NEW_LINE> content.append(self) <NEW_LINE> <DEDENT> if content: <NEW_LINE> <INDENT> del content[-1] <NEW_LINE> <DEDENT> return _XMLSequence(content)
Base class for objects that can be serialized to XML.
62598fa623849d37ff850fb9
class FolderMoveFrom(BaseSubstitution): <NEW_LINE> <INDENT> category = u'AsyncMove' <NEW_LINE> description = u'Move folder from' <NEW_LINE> def safe_call(self): <NEW_LINE> <INDENT> return getattr(self.wrapper, 'folder_move_from', '')
Move folder from substitution
62598fa656b00c62f0fb27b7
class EventTypes(): <NEW_LINE> <INDENT> types = {'event_types': [i for i, j in DISASTER_TYPES]}
Model for event types. Returns only the first element of each tuple.
62598fa6e5267d203ee6b810
class EditProfileForm(FlaskForm): <NEW_LINE> <INDENT> username = StringField('Username', validators=[DataRequired()]) <NEW_LINE> email = StringField('E-mail', validators=[DataRequired(), Email()]) <NEW_LINE> password = PasswordField('Password', validators=[Length(min=6)]) <NEW_LINE> image_url = StringField('(Optional) Image URL') <NEW_LINE> header_image_url = StringField('(Optional) Image URL') <NEW_LINE> bio = StringField('(Optional) About Yourself') <NEW_LINE> location = StringField('(Optional) Location')
Form for editing users
62598fa6e5267d203ee6b811
class ComputeDisksCreateSnapshotRequest(messages.Message): <NEW_LINE> <INDENT> disk = messages.StringField(1, required=True) <NEW_LINE> project = messages.StringField(2, required=True) <NEW_LINE> snapshot = messages.MessageField('Snapshot', 3) <NEW_LINE> zone = messages.StringField(4, required=True)
A ComputeDisksCreateSnapshotRequest object. Fields: disk: Name of the persistent disk resource to snapshot. project: Name of the project scoping this request. snapshot: A Snapshot resource to be passed as the request body. zone: Name of the zone scoping this request.
62598fa70a50d4780f7052e2
class _DrawingEditorMixin: <NEW_LINE> <INDENT> def _add(self,obj,value,name=None,validate=None,desc=None,pos=None): <NEW_LINE> <INDENT> ivc = isValidChild(value) <NEW_LINE> if name and hasattr(obj,'_attrMap'): <NEW_LINE> <INDENT> if '_attrMap' not in obj.__dict__: <NEW_LINE> <INDENT> obj._attrMap = obj._attrMap.clone() <NEW_LINE> <DEDENT> if ivc and validate is None: validate = isValidChild <NEW_LINE> obj._attrMap[name] = AttrMapValue(validate,desc) <NEW_LINE> <DEDENT> if hasattr(obj,'add') and ivc: <NEW_LINE> <INDENT> if pos: <NEW_LINE> <INDENT> obj.insert(pos,value,name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj.add(value,name) <NEW_LINE> <DEDENT> <DEDENT> elif name: <NEW_LINE> <INDENT> setattr(obj,name,value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Can't add, need name")
This is a mixin to provide functionality for edited drawings
62598fa730dc7b766599f753
class Test_is_person_or_closed_team(TestCaseWithFactory): <NEW_LINE> <INDENT> layer = DatabaseFunctionalLayer <NEW_LINE> def test_non_person(self): <NEW_LINE> <INDENT> self.assertFalse(is_public_person_or_closed_team(0)) <NEW_LINE> <DEDENT> def test_person(self): <NEW_LINE> <INDENT> person = self.factory.makePerson() <NEW_LINE> self.assertTrue(is_public_person_or_closed_team(person)) <NEW_LINE> <DEDENT> def test_open_team(self): <NEW_LINE> <INDENT> for policy in INCLUSIVE_TEAM_POLICY: <NEW_LINE> <INDENT> open_team = self.factory.makeTeam(membership_policy=policy) <NEW_LINE> self.assertFalse( is_public_person_or_closed_team(open_team), "%s is not open" % policy) <NEW_LINE> <DEDENT> <DEDENT> def test_closed_team(self): <NEW_LINE> <INDENT> for policy in EXCLUSIVE_TEAM_POLICY: <NEW_LINE> <INDENT> closed_team = self.factory.makeTeam(membership_policy=policy) <NEW_LINE> self.assertTrue( is_public_person_or_closed_team(closed_team), "%s is not closed" % policy)
Tests for is_person_or_closed_team().
62598fa785dfad0860cbf9f7
@method_decorator(login_required(login_url='/login/'), name="dispatch") <NEW_LINE> class createNoteList(GenericAPIView): <NEW_LINE> <INDENT> serializer_class = CreateNoteSerializer <NEW_LINE> queryset = Notes.objects.all() <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = request.user <NEW_LINE> notes = Notes.objects.filter(user_id=user.id, is_archive=False) <NEW_LINE> serializer = self.serializer_class(notes, many=True) <NEW_LINE> logger.info("Getting particular note of user") <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> logger.error("Note Not Found") <NEW_LINE> return Response("Note Not found", status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> user = request.user <NEW_LINE> serializer = self.serializer_class(data=request.data, partial=True) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> note = serializer.save(user_id=user.id) <NEW_LINE> logger.info("Note Successfully Created") <NEW_LINE> return Response("Note Successfully Created", status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> logger.error("Something Went Wrong") <NEW_LINE> return Response("Note not Created", status=status.HTTP_406_NOT_ACCEPTABLE)
This API used to create Note for User
62598fa7d268445f26639b06
class ValueOf: <NEW_LINE> <INDENT> def __init__(self, g): <NEW_LINE> <INDENT> self.g = g <NEW_LINE> <DEDENT> def __call__(self, nodeid): <NEW_LINE> <INDENT> return self.g.value_of(nodeid)
Function that returns the value of a nodeid in graph g. Returns None if g.value_of(nodeid) returns None.
62598fa7442bda511e95c35b
class SvBMVertsNode(bpy.types.Node, SverchCustomTreeNode): <NEW_LINE> <INDENT> bl_idname = 'SvBMVertsNode' <NEW_LINE> bl_label = 'bmesh_props' <NEW_LINE> bl_icon = 'OUTLINER_OB_EMPTY' <NEW_LINE> Modes = ['verts','faces','edges'] <NEW_LINE> Mod = EnumProperty(name="getmodes", default="verts", items=e(Modes), update=updateNode) <NEW_LINE> a = ['hide','select'] <NEW_LINE> PV = a + ['is_manifold','is_wire','is_boundary','calc_shell_factor()','calc_edge_angle(-1)'] <NEW_LINE> PF = a + ['calc_area()','calc_perimeter()','material_index','smooth'] <NEW_LINE> PE = a + ['calc_face_angle()','calc_face_angle_signed()','calc_length()','is_boundary','is_contiguous','is_convex','is_manifold','is_wire','seam'] <NEW_LINE> verts = EnumProperty(name="Vprop", default="is_manifold", items=e(PV), update=updateNode) <NEW_LINE> faces = EnumProperty(name="Fprop", default="select", items=e(PF), update=updateNode) <NEW_LINE> edges = EnumProperty(name="Eprop", default="select", items=e(PE), update=updateNode) <NEW_LINE> def sv_init(self, context): <NEW_LINE> <INDENT> si = self.inputs.new <NEW_LINE> si('StringsSocket', 'Objects') <NEW_LINE> si('VerticesSocket', 'Vert') <NEW_LINE> si('StringsSocket', 'Edge') <NEW_LINE> si('StringsSocket', 'Poly') <NEW_LINE> self.outputs.new('StringsSocket', 'Value') <NEW_LINE> <DEDENT> def draw_buttons(self, context, layout): <NEW_LINE> <INDENT> layout.prop(self, "Mod", "Get") <NEW_LINE> layout.prop(self, self.Mod, "") <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> Val = [] <NEW_LINE> siob = self.inputs['Objects'] <NEW_LINE> v, e, p = self.inputs['Vert'], self.inputs['Edge'], self.inputs['Poly'] <NEW_LINE> if siob.is_linked: <NEW_LINE> <INDENT> obj = siob.sv_get() <NEW_LINE> for OB in obj: <NEW_LINE> <INDENT> bm = bmesh.new() <NEW_LINE> bm.from_mesh(OB.data) <NEW_LINE> get_value(self, bm, Val) <NEW_LINE> bm.free() <NEW_LINE> <DEDENT> <DEDENT> if v.is_linked: <NEW_LINE> <INDENT> sive, sied, sipo = match_long_repeat([v.sv_get(), e.sv_get([[]]), p.sv_get([[]])]) <NEW_LINE> for i in zip(sive, sied, sipo): <NEW_LINE> <INDENT> bm = bmesh_from_pydata(i[0], i[1], i[2]) <NEW_LINE> get_value(self, bm, Val) <NEW_LINE> bm.free() <NEW_LINE> <DEDENT> <DEDENT> self.outputs['Value'].sv_set(Val) <NEW_LINE> <DEDENT> def update_socket(self, context): <NEW_LINE> <INDENT> self.update()
BMesh Verts
62598fa7be383301e02536fe
class TwoTwoReaction(Reaction): <NEW_LINE> <INDENT> def __init__( self, construction_state, functional_group1, functional_group2, bond_order, periodicity, ): <NEW_LINE> <INDENT> self._position_matrix = ( construction_state.get_position_matrix() ) <NEW_LINE> self._functional_group1 = functional_group1 <NEW_LINE> self._functional_group2 = functional_group2 <NEW_LINE> self._bond_order = bond_order <NEW_LINE> self._periodicity = periodicity <NEW_LINE> <DEDENT> def _get_new_atoms(self): <NEW_LINE> <INDENT> return <NEW_LINE> yield <NEW_LINE> <DEDENT> def _get_new_bonds(self): <NEW_LINE> <INDENT> for bonder1, bonder2 in self._get_bonder_pairs(): <NEW_LINE> <INDENT> yield Bond( atom1=bonder1, atom2=bonder2, order=self._bond_order, periodicity=self._periodicity, ) <NEW_LINE> <DEDENT> <DEDENT> def _get_bonder_pairs(self): <NEW_LINE> <INDENT> pairs = it.product( self._functional_group1.get_bonders(), self._functional_group2.get_bonders(), ) <NEW_LINE> sorted_pairs = sorted(pairs, key=self._pair_distance) <NEW_LINE> bonded = set() <NEW_LINE> for bonder1, bonder2 in sorted_pairs: <NEW_LINE> <INDENT> if ( bonder1.get_id() not in bonded and bonder2.get_id() not in bonded ): <NEW_LINE> <INDENT> bonded.add(bonder1.get_id()) <NEW_LINE> bonded.add(bonder2.get_id()) <NEW_LINE> yield bonder1, bonder2 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _pair_distance(self, bonders): <NEW_LINE> <INDENT> bonder1, bonder2 = bonders <NEW_LINE> return euclidean( self._position_matrix[bonder1.get_id()], self._position_matrix[bonder2.get_id()], ) <NEW_LINE> <DEDENT> def _get_deleted_atoms(self): <NEW_LINE> <INDENT> yield from self._functional_group1.get_deleters() <NEW_LINE> yield from self._functional_group2.get_deleters() <NEW_LINE> <DEDENT> def _get_deleted_bonds(self): <NEW_LINE> <INDENT> return <NEW_LINE> yield
A reaction between two functional groups, each with 2 bonder atoms. The reaction creates the two shortest possible bonds between the *bonder* atoms of the two functional groups, and deletes any *deleter* atoms.
62598fa7097d151d1a2c0f2e
class Mint: <NEW_LINE> <INDENT> current_year = 2019 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.update() <NEW_LINE> <DEDENT> def create(self, kind): <NEW_LINE> <INDENT> return kind(self.year) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.year = Mint.current_year
A mint creates coins by stamping on years. The update method sets the mint's stamp to Mint.current_year. >>> mint = Mint() >>> mint.year 2019 >>> dime = mint.create(Dime) >>> dime.year 2019 >>> Mint.current_year = 2100 # Time passes >>> nickel = mint.create(Nickel) >>> nickel.year # The mint has not updated its stamp yet 2019 >>> nickel.worth() # 5 cents + (81 - 50 years) 36 >>> mint.update() # The mint's year is updated to 2100 >>> Mint.current_year = 2175 # More time passes >>> mint.create(Dime).worth() # 10 cents + (75 - 50 years) 35 >>> Mint().create(Dime).worth() # A new mint has the current year 10 >>> dime.worth() # 10 cents + (156 - 50 years) 116 >>> Dime.cents = 20 # Upgrade all dimes! >>> dime.worth() # 20 cents + (156 - 50 years) 126
62598fa7a79ad16197769f69
class Report(Sheet): <NEW_LINE> <INDENT> def __init__(self, props=None, base_obj=None): <NEW_LINE> <INDENT> super(Report, self).__init__(None, base_obj) <NEW_LINE> self._base = None <NEW_LINE> if base_obj is not None: <NEW_LINE> <INDENT> self._base = base_obj <NEW_LINE> <DEDENT> self._columns = TypedList(ReportColumn) <NEW_LINE> self._rows = TypedList(ReportRow) <NEW_LINE> self._source_sheets = TypedList(Sheet) <NEW_LINE> if props: <NEW_LINE> <INDENT> deserialize(self, props) <NEW_LINE> <DEDENT> self.request_response = None <NEW_LINE> self.__initialized = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def columns(self): <NEW_LINE> <INDENT> return self._columns <NEW_LINE> <DEDENT> @columns.setter <NEW_LINE> def columns(self, value): <NEW_LINE> <INDENT> self._columns.load(value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def rows(self): <NEW_LINE> <INDENT> return self._rows <NEW_LINE> <DEDENT> @rows.setter <NEW_LINE> def rows(self, value): <NEW_LINE> <INDENT> self._rows.load(value) <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_sheets(self): <NEW_LINE> <INDENT> return self._source_sheets <NEW_LINE> <DEDENT> @source_sheets.setter <NEW_LINE> def source_sheets(self, value): <NEW_LINE> <INDENT> self._source_sheets.load(value) <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return serialize(self) <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return json.dumps(self.to_dict()) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.to_json()
Smartsheet Report data model.
62598fa7be8e80087fbbef68
class ContentEntry(BaseSiteEntry): <NEW_LINE> <INDENT> content = Content <NEW_LINE> deleted = Deleted <NEW_LINE> publisher = Publisher <NEW_LINE> in_reply_to = InReplyTo <NEW_LINE> worksheet = Worksheet <NEW_LINE> header = Header <NEW_LINE> data = Data <NEW_LINE> field = [Field] <NEW_LINE> revision = Revision <NEW_LINE> page_name = PageName <NEW_LINE> feed_link = gdata.data.FeedLink <NEW_LINE> def find_revison_link(self): <NEW_LINE> <INDENT> return self.find_url(SITES_REVISION_LINK_REL) <NEW_LINE> <DEDENT> FindRevisionLink = find_revison_link
Google Sites Content Entry.
62598fa78da39b475be030e8
class Retry(celery.exceptions.Retry, HandleAfterAbort): <NEW_LINE> <INDENT> def __init__(self, *args, **kw): <NEW_LINE> <INDENT> self.signature = kw.pop('signature', None) <NEW_LINE> celery.exceptions.Retry.__init__(self, *args, **kw) <NEW_LINE> <DEDENT> def __call__(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.signature.apply_async() <NEW_LINE> log.warning('Task submitted for retry in %s seconds.', self.when) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> log.error('Task retry failed', exc_info=True) <NEW_LINE> raise celery.exceptions.Reject(exc, requeue=False)
With cooperation from TransactionAwareTask.retry(), this moves the actual re-queueing of the task into the proper "error handling" transaction phase.
62598fa7796e427e5384e69a
class RMSELoss(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, eps=1e-6): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.mse = torch.nn.MSELoss() <NEW_LINE> self.eps = eps <NEW_LINE> <DEDENT> def forward(self, yhat, y): <NEW_LINE> <INDENT> loss = torch.sqrt(self.mse(yhat, y) + self.eps) <NEW_LINE> return loss
Root mean squared loss
62598fa7435de62698e9bcfc
@attr.s <NEW_LINE> class SecurityGroupCollection(BaseCollection): <NEW_LINE> <INDENT> ENTITY = SecurityGroup <NEW_LINE> def create(self, name, description, provider, cancel=False, wait=False): <NEW_LINE> <INDENT> view = navigate_to(self, 'Add') <NEW_LINE> changed = view.form.fill({'network_manager': "{} Network Manager".format(provider.name), 'name': name, 'description': description, 'cloud_tenant': 'admin'}) <NEW_LINE> if cancel and changed: <NEW_LINE> <INDENT> view.form.cancel.click() <NEW_LINE> flash_message = 'Add of new Security Group was cancelled by the user' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> view.form.add.click() <NEW_LINE> flash_message = 'Security Group "{}" created'.format(name) <NEW_LINE> <DEDENT> view = self.create_view(SecurityGroupAllView) <NEW_LINE> view.flash.assert_success_message(flash_message) <NEW_LINE> view.entities.paginator.set_items_per_page(500) <NEW_LINE> sec_groups = self.instantiate(name, provider, description) <NEW_LINE> if wait: <NEW_LINE> <INDENT> wait_for( lambda: sec_groups.name in view.entities.all_entity_names, message="Wait Security Group to appear", num_sec=400, timeout=1000, delay=20, fail_func=sec_groups.refresh, handle_exception=True ) <NEW_LINE> <DEDENT> return sec_groups
Collection object for the :py:class: `cfme.cloud.SecurityGroup`.
62598fa776e4537e8c3ef4b3
class TestTsloplace(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return Tsloplace( spread = 1.337, trailingPrice = 1.337 ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return Tsloplace( ) <NEW_LINE> <DEDENT> <DEDENT> def testTsloplace(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True)
Tsloplace unit test stubs
62598fa726068e7796d4c85f
class Variant(Subtyped): <NEW_LINE> <INDENT> sku = models.CharField(_('SKU'), max_length=128, db_index=True, unique=True, help_text=_('ID of the product variant used' ' internally in the shop.')) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return '%s' % self.sku
Base class for variants. It identifies a concrete product instance, which goes to a cart. Custom variants inherit from it.
62598fa732920d7e50bc5f5d
class MixData(task.SingleTask): <NEW_LINE> <INDENT> data_coeff = config.list_type(type_=float) <NEW_LINE> weight_coeff = config.list_type(type_=float) <NEW_LINE> mixed_data = None <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> if len(self.data_coeff) != len(self.weight_coeff): <NEW_LINE> <INDENT> raise config.CaputConfigError( "data and weight coefficient lists must be the same length" ) <NEW_LINE> <DEDENT> self._data_ind = 0 <NEW_LINE> <DEDENT> def process(self, data: Union[containers.SiderealStream, containers.RingMap]): <NEW_LINE> <INDENT> def _get_dset(data): <NEW_LINE> <INDENT> if isinstance(data, containers.SiderealStream): <NEW_LINE> <INDENT> return data.vis <NEW_LINE> <DEDENT> elif isinstance(data, containers.RingMap): <NEW_LINE> <INDENT> return data.map <NEW_LINE> <DEDENT> <DEDENT> if self._data_ind >= len(self.data_coeff): <NEW_LINE> <INDENT> raise RuntimeError( "This task cannot accept more items than there are coefficents set." ) <NEW_LINE> <DEDENT> if self.mixed_data is None: <NEW_LINE> <INDENT> self.mixed_data = containers.empty_like(data) <NEW_LINE> self.mixed_data.redistribute("freq") <NEW_LINE> _get_dset(self.mixed_data)[:] = 0.0 <NEW_LINE> self.mixed_data.weight[:] = 0.0 <NEW_LINE> <DEDENT> if type(self.mixed_data) != type(data): <NEW_LINE> <INDENT> raise TypeError( f"type(data) (={type(data)}) must match " f"type(data_stack) (={type(self.type)}" ) <NEW_LINE> <DEDENT> data.redistribute("freq") <NEW_LINE> mixed_dset = _get_dset(self.mixed_data)[:] <NEW_LINE> data_dset = _get_dset(data)[:] <NEW_LINE> if mixed_dset.shape != data_dset.shape: <NEW_LINE> <INDENT> raise ValueError( f"Size of data ({data_dset.shape}) must match " f"data_stack ({mixed_dset.shape})" ) <NEW_LINE> <DEDENT> mixed_dset[:] += self.data_coeff[self._data_ind] * data_dset[:] <NEW_LINE> self.mixed_data.weight[:] += self.weight_coeff[self._data_ind] * data.weight[:] <NEW_LINE> self._data_ind += 1 <NEW_LINE> <DEDENT> def process_finish(self) -> Union[containers.SiderealStream, containers.RingMap]: <NEW_LINE> <INDENT> if self._data_ind != len(self.data_coeff): <NEW_LINE> <INDENT> raise RuntimeError( "Did not receive enough inputs. " f"Got {self._data_ind}, expected {len(self.data_coeff)}." ) <NEW_LINE> <DEDENT> data = self.mixed_data <NEW_LINE> self.mixed_data = None <NEW_LINE> return data
Mix together pieces of data with specified weights. This can generate arbitrary linear combinations of the data and weights for both `SiderealStream` and `RingMap` objects, and can be used for many purposes such as: adding together simulated timestreams, injecting signal into data, replacing weights in simulated data with those from real data, etc. All coefficients are applied naively to generate the final combinations, i.e. no normalisations or weighted summation is performed. Attributes ---------- data_coeff : list A list of coefficients to apply to the data dataset of each input containter to produce the final output. These are applied to either the `vis` or `map` dataset depending on the the type of the input container. weight_coeff : list Coefficient to be applied to each input containers weights to generate the output.
62598fa701c39578d7f12c86
class UnpackException(Exception): <NEW_LINE> <INDENT> pass
Exception while msgpack unpacking
62598fa791af0d3eaad39d16
class NDBUserDatastore(NDBDatastore, datastore.UserDatastore): <NEW_LINE> <INDENT> def __init__(self, user_model, role_model): <NEW_LINE> <INDENT> NDBDatastore.__init__(self) <NEW_LINE> datastore.UserDatastore.__init__(self, user_model, role_model) <NEW_LINE> <DEDENT> def create_user(self, **kwargs): <NEW_LINE> <INDENT> kwargs['id'] = kwargs.get('email', None) <NEW_LINE> return super(NDBUserDatastore, self).create_user(**kwargs) <NEW_LINE> <DEDENT> def create_role(self, **kwargs): <NEW_LINE> <INDENT> kwargs['id'] = kwargs.get('name', None) <NEW_LINE> return super(NDBUserDatastore, self).create_role(**kwargs) <NEW_LINE> <DEDENT> def get_user(self, id_or_email): <NEW_LINE> <INDENT> return self.user_model.get_by_id(id_or_email) <NEW_LINE> <DEDENT> def find_user(self, **kwargs): <NEW_LINE> <INDENT> if 'id' in kwargs: <NEW_LINE> <INDENT> return self.get_user(kwargs['id']) <NEW_LINE> <DEDENT> filters = [getattr(self.user_model, k) == v for k, v in kwargs.iteritems() if hasattr(self.user_model, k)] <NEW_LINE> return self.user_model.query(*filters).get() <NEW_LINE> <DEDENT> def find_role(self, role): <NEW_LINE> <INDENT> return self.role_model.get_by_id(role)
An NDB datastore implementation for Flask-Security.
62598fa721bff66bcd722b6d
class BucketListItemView(generics.CreateAPIView): <NEW_LINE> <INDENT> serializer_class = BucketListItemSerializer <NEW_LINE> permission_classes = (IsAuthenticated, IsBucketlistOwner) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> bucketlist_id = self.kwargs.get('pk') <NEW_LINE> bucketlist = BucketList.objects.filter( pk=bucketlist_id, owner=self.request.user).first() <NEW_LINE> name = serializer.validated_data.get("name") <NEW_LINE> if BucketListItem.objects.filter(name=name, bucketlist=bucketlist): <NEW_LINE> <INDENT> raise ParseError(detail="This item already exist") <NEW_LINE> <DEDENT> serializer.save(bucketlist=bucketlist)
Create a new item.
62598fa767a9b606de545ed3
class ListPluginResponse: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I32, 'Status', None, None, ), (2, TType.LIST, 'Plugins', (TType.STRUCT,(ListPluginResponseItem, ListPluginResponseItem.thrift_spec)), None, ), ) <NEW_LINE> def __init__(self, Status=None, Plugins=None,): <NEW_LINE> <INDENT> self.Status = Status <NEW_LINE> self.Plugins = Plugins <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.Status = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.Plugins = [] <NEW_LINE> (_etype3, _size0) = iprot.readListBegin() <NEW_LINE> for _i4 in xrange(_size0): <NEW_LINE> <INDENT> _elem5 = ListPluginResponseItem() <NEW_LINE> _elem5.read(iprot) <NEW_LINE> self.Plugins.append(_elem5) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('ListPluginResponse') <NEW_LINE> if self.Status is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('Status', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.Status) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.Plugins is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('Plugins', TType.LIST, 2) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.Plugins)) <NEW_LINE> for iter6 in self.Plugins: <NEW_LINE> <INDENT> iter6.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - Status - Plugins
62598fa71f037a2d8b9e3ff3
class TestBalanceResource(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testBalanceResource(self): <NEW_LINE> <INDENT> pass
BalanceResource unit test stubs
62598fa763d6d428bbee26b9
class GraylogUDPPublisherTests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.reactor = iMock(IReactorUDP, IReactorCore, IReactorPluggableResolver) <NEW_LINE> patch(self, 'txgraylog2.graylogger.reactor', new=self.reactor) <NEW_LINE> self.transport = iMock(IUDPTransport) <NEW_LINE> def listenUDP(port, proto): <NEW_LINE> <INDENT> proto.makeConnection(self.transport) <NEW_LINE> <DEDENT> self.reactor.listenUDP.side_effect = listenUDP <NEW_LINE> def callWhenRunning(f, *args, **kwargs): <NEW_LINE> <INDENT> f(*args, **kwargs) <NEW_LINE> <DEDENT> self.reactor.callWhenRunning = callWhenRunning <NEW_LINE> self.reactor.resolve.return_value = succeed('127.0.0.1') <NEW_LINE> self.graylog = GraylogUDPPublisher(reactor=self.reactor) <NEW_LINE> <DEDENT> def test_wrapper_listens(self): <NEW_LINE> <INDENT> self.reactor.listenUDP.assert_called_once_with( 0, matches(IsInstance(_GraylogProtocol))) <NEW_LINE> <DEDENT> def test_observer_extracts_message(self): <NEW_LINE> <INDENT> self.graylog({'message': ('{"short_message": "foo"}',)}) <NEW_LINE> self.transport.write.assert_called_once_with( 'x\x9c\xabV*\xce\xc8/*\x89\xcfM-.NLOU\xb2RPJ\xcb\xcfW\xaa\x05\x00p\xee\x08\x93') <NEW_LINE> <DEDENT> @mock.patch('txgraylog2.graylogger.randbytes.secureRandom', return_value='secureBytes') <NEW_LINE> def test_observer_writes_chunks(self, secureRandom): <NEW_LINE> <INDENT> graylog = GraylogUDPPublisher(reactor=self.reactor, chunkSize=10) <NEW_LINE> graylog({'message': ('{"short_message": "foo"}',)}) <NEW_LINE> self.transport.write.assert_has_calls( [mock.call( '\x1e\x0fsecureBytes\x00\x04x\x9c\xabV*\xce\xc8/*\x89'), mock.call('\x1e\x0fsecureBytes\x01\x04\xcfM-.NLOU\xb2R'), mock.call( '\x1e\x0fsecureBytes\x02\x04PJ\xcb\xcfW\xaa\x05\x00p\xee'), mock.call('\x1e\x0fsecureBytes\x03\x04\x08\x93')], any_order=False)
Test the GraylogUDPPublisher.
62598fa73cc13d1c6d465673
class DetectSlowWave: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, data): <NEW_LINE> <INDENT> return SlowWave()
Design slow wave detection
62598fa74e4d56256637232c
class IPluginsNames(Interface): <NEW_LINE> <INDENT> pass
Retriever Marker
62598fa7a8ecb03325871117
class IPWhitelistHandler(EntityHandlerBase): <NEW_LINE> <INDENT> entity_url_prefix = '/auth/api/v1/ip_whitelists/' <NEW_LINE> entity_kind = model.AuthIPWhitelist <NEW_LINE> entity_kind_name = 'ip_whitelist' <NEW_LINE> entity_kind_title = 'ip whitelist' <NEW_LINE> @classmethod <NEW_LINE> def get_entity_key(cls, name): <NEW_LINE> <INDENT> assert model.is_valid_ip_whitelist_name(name), name <NEW_LINE> return model.ip_whitelist_key(name) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def do_create(cls, entity): <NEW_LINE> <INDENT> entity.put() <NEW_LINE> model.replicate_auth_db() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def do_update(cls, entity, params, modified_by): <NEW_LINE> <INDENT> entity.populate(**params) <NEW_LINE> entity.modified_ts = utils.utcnow() <NEW_LINE> entity.modified_by = modified_by <NEW_LINE> entity.put() <NEW_LINE> model.replicate_auth_db() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def do_delete(cls, entity): <NEW_LINE> <INDENT> entity.key.delete() <NEW_LINE> model.replicate_auth_db()
Creating, reading, updating and deleting a single IP whitelist. GET is available in Standalone, Primary and Replica modes. Everything else is available only in Standalone and Primary modes.
62598fa7442bda511e95c35d
class Triangle(Figure): <NEW_LINE> <INDENT> def __init__(self, x, y, a, color): <NEW_LINE> <INDENT> super().__init__(x, y, color) <NEW_LINE> self._a = a <NEW_LINE> <DEDENT> def _draw(self, color): <NEW_LINE> <INDENT> pencolor(color) <NEW_LINE> up() <NEW_LINE> setpos(self._x, self._y) <NEW_LINE> down() <NEW_LINE> for i in range(3): <NEW_LINE> <INDENT> forward(self._a) <NEW_LINE> left(120) <NEW_LINE> <DEDENT> up()
Клас Трикутник Використовується для зображення правильного трикутника на екрані
62598fa7d486a94d0ba2bed5
class Worker(object): <NEW_LINE> <INDENT> DEFAULT_SCHEDULE = 300 <NEW_LINE> def __init__(self, conf_dict): <NEW_LINE> <INDENT> self.worker = configmanager.ConfigManager.create(conf_dict['module'], conf_dict['name'], conf_dict['parameters']) <NEW_LINE> self.module = conf_dict['module'] <NEW_LINE> self.name = self.worker.__class__.__name__ <NEW_LINE> self.schedule = conf_dict.get('schedule', self.DEFAULT_SCHEDULE) <NEW_LINE> self.run_counter = 0 <NEW_LINE> self.data_updated = threading.Event() <NEW_LINE> self.stop_running = threading.Event()
Provides interface to loadable modules an events to sycronise execution
62598fa72ae34c7f260aafe9
class IPBrightnessSensor(SensorHmIP): <NEW_LINE> <INDENT> def __init__(self, device_description, proxy, resolveparamsets=False): <NEW_LINE> <INDENT> super().__init__(device_description, proxy, resolveparamsets) <NEW_LINE> self.SENSORNODE.update({"CURRENT_ILLUMINATION": [1], "AVERAGE_ILLUMINATION": [1], "LOWEST_ILLUMINATION": [1], "HIGHEST_ILLUMINATION": [1]})
IP Sensor for outdoor brightness measure
62598fa745492302aabfc3d9
class IsolationTestCase(test.TestCase): <NEW_LINE> <INDENT> def test_service_isolation(self): <NEW_LINE> <INDENT> self.useFixture(test.ServiceFixture('compute')) <NEW_LINE> <DEDENT> def test_rpc_consumer_isolation(self): <NEW_LINE> <INDENT> class NeverCalled(object): <NEW_LINE> <INDENT> def __getattribute__(*args): <NEW_LINE> <INDENT> assert False, "I should never get called." <NEW_LINE> <DEDENT> <DEDENT> connection = rpc.create_connection(new=True) <NEW_LINE> proxy = NeverCalled() <NEW_LINE> connection.create_consumer('compute', proxy, fanout=False) <NEW_LINE> connection.consume_in_thread()
Ensure that things are cleaned up after failed tests. These tests don't really do much here, but if isolation fails a bunch of other tests should fail.
62598fa78da39b475be030ea
class ModelBackend(OridginModelBackend): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None, **kwargs): <NEW_LINE> <INDENT> UserModel = get_user_class() <NEW_LINE> if username is None: <NEW_LINE> <INDENT> username = kwargs.get(UserModel.USERNAME_FIELD) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> user = UserModel._default_manager.get_by_natural_key(username) <NEW_LINE> if user.check_password(password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> except UserModel.DoesNotExist: <NEW_LINE> <INDENT> UserModel().set_password(password) <NEW_LINE> <DEDENT> <DEDENT> def get_group_permissions(self, user_obj, obj=None): <NEW_LINE> <INDENT> if user_obj.is_anonymous() or obj is not None: <NEW_LINE> <INDENT> return set() <NEW_LINE> <DEDENT> if not hasattr(user_obj, '_group_perm_cache'): <NEW_LINE> <INDENT> if user_obj.is_superuser: <NEW_LINE> <INDENT> perms = Permission.objects.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user_groups_field = get_user_class()._meta.get_field('groups') <NEW_LINE> user_groups_query = 'group__%s' % user_groups_field.related_query_name() <NEW_LINE> perms = Permission.objects.filter(**{user_groups_query: user_obj}) <NEW_LINE> <DEDENT> perms = perms.values_list('content_type__app_label', 'codename').order_by() <NEW_LINE> user_obj._group_perm_cache = set(["%s.%s" % (ct, name) for ct, name in perms]) <NEW_LINE> <DEDENT> return user_obj._group_perm_cache <NEW_LINE> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> UserModel = get_user_class() <NEW_LINE> try: <NEW_LINE> <INDENT> return UserModel._default_manager.get(pk=user_id) <NEW_LINE> <DEDENT> except UserModel.DoesNotExist: <NEW_LINE> <INDENT> return None
Authenticates against settings.AUTH_USER_MODEL.
62598fa763d6d428bbee26ba
class ComputeType(Enum): <NEW_LINE> <INDENT> KVM=1 <NEW_LINE> DOCKER=2
This declare all types of network in our system using enumeration
62598fa74e4d56256637232d
class Query: <NEW_LINE> <INDENT> def __init__(self, record_type, predicate=None, count=False, limit=50, offset=None, include=[]): <NEW_LINE> <INDENT> self.record_type = record_type <NEW_LINE> if predicate is None: <NEW_LINE> <INDENT> predicate = Predicate() <NEW_LINE> <DEDENT> self.predicate = predicate <NEW_LINE> self.count = count <NEW_LINE> self.sort = [] <NEW_LINE> self.limit = limit <NEW_LINE> self.offset = offset <NEW_LINE> self.include = include <NEW_LINE> <DEDENT> def add_order(self, key, order): <NEW_LINE> <INDENT> self.sort.append([{'$type': 'keypath', '$val': key}, order]) <NEW_LINE> return self
Skygear Query Class Example: >>> p = Predicate(gender__eq="m") >>> query = Query("student", predicate=p) >>> database = container.public_database >>> result = database.query(query)
62598fa74a966d76dd5eedeb
class DataResSwitchBanVendor(object): <NEW_LINE> <INDENT> def __init__(self, data, data_req=None): <NEW_LINE> <INDENT> assert_that(data, has_key('code')) <NEW_LINE> assert_that(data, has_key('message'))
返回值验证
62598fa7d6c5a102081e2050
class MinibatchSource(cntk_py.MinibatchSource): <NEW_LINE> <INDENT> def stream_infos(self): <NEW_LINE> <INDENT> return super(MinibatchSource, self).stream_infos() <NEW_LINE> <DEDENT> def stream_info(self, name): <NEW_LINE> <INDENT> return super(MinibatchSource, self).stream_info(name) <NEW_LINE> <DEDENT> def get_next_minibatch(self, minibatch_size_in_samples, device=None): <NEW_LINE> <INDENT> if device is None: <NEW_LINE> <INDENT> device = cntk_py.DeviceDescriptor.use_default_device() <NEW_LINE> <DEDENT> return super(MinibatchSource, self).get_next_minibatch( minibatch_size_in_samples, minibatch_size_in_sequences, device)
Parent class of all minibatch sources. For most cases you will need the helper functions `:func:cntk.io.text_format_minibatch_source` or `:func:cntk.io.create_minibatch_source`.
62598fa74527f215b58e9deb
class StatusCommand(Command): <NEW_LINE> <INDENT> key = "status" <NEW_LINE> aliases = ["shipstat"] <NEW_LINE> locks = "cmd:all()" <NEW_LINE> help_category = "Cochran - Ships" <NEW_LINE> def at_pre_cmd(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> pass
Run a scan. This shows everything in space within the craft's sensor range.
62598fa7d7e4931a7ef3bfa4