code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
@dataclass <NEW_LINE> class BuildJob: <NEW_LINE> <INDENT> name: str <NEW_LINE> source_dir: str <NEW_LINE> dest_dir: str <NEW_LINE> def log_path(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.dest_dir, "build.log") | Bundle data needed to build. | 62598fb3cc40096d6161a229 |
class CDR(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'CDR_FWR' <NEW_LINE> __table_args__ = {'mysql_row_format': 'DYNAMIC'} <NEW_LINE> seq_id = db.Column(db.Integer, db.ForeignKey('rearrangement.seq_id')) <NEW_LINE> CDR_FWR_id = db.Column(db.Integer, primary_key=True) <NEW_LINE> region= db.Column(db.String(100), index= True, unique= True, nullable= False) <NEW_LINE> dna_seq = db.Column(db.String(2000), index=True, unique=True, nullable=False) <NEW_LINE> prot_seq = db.Column(db.String(2000), index=True, unique=True, nullable=False) <NEW_LINE> relation = db.relationship('Rearrangement', backref='cdr') | Data model for CDR. | 62598fb35fc7496912d482cc |
class CommandLineTestCase(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> cls.dev_null = open(os.devnull, 'w') <NEW_LINE> sys.stdout = cls.dev_null <NEW_LINE> parser = get_parser() <NEW_LINE> cls.parser = parser <NEW_LINE> cls.units = CULINARY_UNITS <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.dev_null.close() | Base TestCase class, sets up a CLI parser | 62598fb301c39578d7f12e1a |
class FileViewSet(utils.GenericViewSet): <NEW_LINE> <INDENT> obj_class = models.File <NEW_LINE> queryset = models.File.objects.filter(is_active=True) <NEW_LINE> serializer_class = serializers.FileSerializer <NEW_LINE> filter_class = works_filters.FileFilter | ViewSet for File CRUD REST Service that inherits from utils.GenericViewSet
| 62598fb37b25080760ed7553 |
class IsMember(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.user in obj.users.all(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Custom permission to only allow users of a Workspace to view or edit it. | 62598fb3a05bb46b3848a90d |
class PluginScriptError(Exception): <NEW_LINE> <INDENT> @property <NEW_LINE> def message(self): <NEW_LINE> <INDENT> return self.args[0] <NEW_LINE> <DEDENT> def __init__(self, message): <NEW_LINE> <INDENT> super(PluginScriptError, self).__init__(message) | Plugin-catchable exception
This exception will be thrown whenever a library call is made with check=True
and it returns a non-zero exit code.
This can be handy for plugin author who
chooses to catch exceptions explicitly than checking the exit code of response.
Attributes:
message - A localized user-readable message. | 62598fb3283ffb24f3cf392e |
class MetadataService: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def __init__(self, host, dbname, port, user, password): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.dbname = dbname <NEW_LINE> self.port = port <NEW_LINE> self.user = user <NEW_LINE> self.password = password <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def create_db_conn(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def is_active_job(self, job_name, job_instance): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def is_run_sync_job(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def check_table_exists(self, job_name, job_instance, tables): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_glue_jobs_from_db(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_job_status(self, job_name, job_instance): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def get_job_details(self, job_name, job_instance): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def update_jobs_table_is_run(self, flag): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def update_jobs_table(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def update_job_instance(self, job_name, job_instance, job_run_id, job_status_ctx): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def update_job_details(self, job_name, job_instance, table): <NEW_LINE> <INDENT> raise NotImplementedError() | MetadataService is an abstract class which defines the various signatures for classes
which implement these functions. | 62598fb367a9b606de546070 |
class V1ServiceList(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'api_version': 'str', 'items': 'list[V1Service]', 'kind': 'str', 'metadata': 'UnversionedListMeta' } <NEW_LINE> self.attribute_map = { 'api_version': 'apiVersion', 'items': 'items', 'kind': 'kind', 'metadata': 'metadata' } <NEW_LINE> self._api_version = None <NEW_LINE> self._items = None <NEW_LINE> self._kind = None <NEW_LINE> self._metadata = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def api_version(self): <NEW_LINE> <INDENT> return self._api_version <NEW_LINE> <DEDENT> @api_version.setter <NEW_LINE> def api_version(self, api_version): <NEW_LINE> <INDENT> self._api_version = api_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return self._items <NEW_LINE> <DEDENT> @items.setter <NEW_LINE> def items(self, items): <NEW_LINE> <INDENT> self._items = items <NEW_LINE> <DEDENT> @property <NEW_LINE> def kind(self): <NEW_LINE> <INDENT> return self._kind <NEW_LINE> <DEDENT> @kind.setter <NEW_LINE> def kind(self, kind): <NEW_LINE> <INDENT> self._kind = kind <NEW_LINE> <DEDENT> @property <NEW_LINE> def metadata(self): <NEW_LINE> <INDENT> return self._metadata <NEW_LINE> <DEDENT> @metadata.setter <NEW_LINE> def metadata(self, metadata): <NEW_LINE> <INDENT> self._metadata = metadata <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb3cc0a2c111447b0b5 |
class StickyConsumerMsgGroupTests(Base): <NEW_LINE> <INDENT> pass | Tests for the behavior of sticky-consumer message groups. These tests
expect all messages from the same group be consumed by the same clients.
See QPID-3347 for details. | 62598fb37b180e01f3e490a1 |
class MAVLink_novatel_diag_message(MAVLink_message): <NEW_LINE> <INDENT> def __init__(self, timeStatus, receiverStatus, solStatus, posType, velType, posSolAge, csFails): <NEW_LINE> <INDENT> MAVLink_message.__init__(self, MAVLINK_MSG_ID_NOVATEL_DIAG, 'NOVATEL_DIAG') <NEW_LINE> self._fieldnames = ['timeStatus', 'receiverStatus', 'solStatus', 'posType', 'velType', 'posSolAge', 'csFails'] <NEW_LINE> self.timeStatus = timeStatus <NEW_LINE> self.receiverStatus = receiverStatus <NEW_LINE> self.solStatus = solStatus <NEW_LINE> self.posType = posType <NEW_LINE> self.velType = velType <NEW_LINE> self.posSolAge = posSolAge <NEW_LINE> self.csFails = csFails <NEW_LINE> <DEDENT> def pack(self, mav): <NEW_LINE> <INDENT> return MAVLink_message.pack(self, mav, 59, struct.pack('<IfHBBBB', self.receiverStatus, self.posSolAge, self.csFails, self.timeStatus, self.solStatus, self.posType, self.velType)) | Transmits the diagnostics data from the Novatel OEMStar GPS | 62598fb3796e427e5384e836 |
class InfoDialogHW(_DialogWindow): <NEW_LINE> <INDENT> __gtype_name__ = 'InfoDialogHW' <NEW_LINE> def __init__(self, title, comment, evaluation, student, mimetype): <NEW_LINE> <INDENT> super(InfoDialogHW, self).__init__("info", 'Informacion del documento') <NEW_LINE> hbox = gtk.HBox() <NEW_LINE> self.content_vbox.pack_start(hbox, True) <NEW_LINE> previewbox = gtk.VBox() <NEW_LINE> preview = Icon(pixel_size=300) <NEW_LINE> preview.props.icon_name = mime.get_mime_icon(mimetype) <NEW_LINE> preview.props.xo_color = profile.get_color() <NEW_LINE> previewbox.pack_start(preview, False) <NEW_LINE> hbox.pack_start(previewbox, False, padding=5) <NEW_LINE> vbox = gtk.VBox() <NEW_LINE> hbox.pack_end(vbox, True, padding=20) <NEW_LINE> title_label = gtk.Label( '<span font_desc="15"><b>%s</b></span>' % title) <NEW_LINE> title_label.set_use_markup(True) <NEW_LINE> vbox.pack_start(title_label, False) <NEW_LINE> commet_box = gtk.VBox() <NEW_LINE> commet_label = gtk.Label('<i>%s</i>' % comment) <NEW_LINE> commet_label.set_use_markup(True) <NEW_LINE> commet_label.set_line_wrap(True) <NEW_LINE> commet_box.pack_start(commet_label, False, padding=30) <NEW_LINE> vbox.pack_start(commet_box, True) <NEW_LINE> box = gtk.HBox() <NEW_LINE> student_label = gtk.Label('%s' % student) <NEW_LINE> box.pack_end(student_label, False) <NEW_LINE> evaluation = evaluation.split('|') <NEW_LINE> evaluation = '<b>%s</b>\n%s' % (evaluation[0], evaluation[1]) <NEW_LINE> evaluation_t = gtk.Label('%s' % evaluation) <NEW_LINE> evaluation_t.set_use_markup(True) <NEW_LINE> evaluation_t.set_line_wrap_mode(gtk.WRAP_WORD) <NEW_LINE> evaluation_t.set_line_wrap(True) <NEW_LINE> box.pack_start(evaluation_t, False) <NEW_LINE> bbox = gtk.HBox() <NEW_LINE> self.content_vbox.pack_end(bbox, False) <NEW_LINE> self.content_vbox.pack_end(box, False) <NEW_LINE> self.show_all() | Crea un dialog con la informacion de las tareas domicialiarias | 62598fb316aa5153ce4005a5 |
class RequestViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> renderer_classes = (JSONRenderer, JSONPRenderer, BrowsableAPIRenderer, PaginatedCSVRenderer) <NEW_LINE> serializer_class = RequestSerializer <NEW_LINE> permission_classes = (IsAuthenticatedOrWriteOnly,) <NEW_LINE> filter_backends = (filters.DjangoFilterBackend,) <NEW_LINE> filter_fields = ('distribution_site',) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> ids = self.request.user.eventdistributionsitedetails_set.all().values_list('id', flat=True) <NEW_LINE> return Request.objects.filter(event_distribution_site_details__pk__in=ids).filter(event_distribution_site_details__event__active=True) | API endpoint that allows users to be viewed or edited. | 62598fb326068e7796d4c9f8 |
class TowerTile: <NEW_LINE> <INDENT> image = pygame.image.load('Tower.png') <NEW_LINE> def __init__(self,x,y): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.damage = 1 <NEW_LINE> self.speed = 1 <NEW_LINE> self.angle = None <NEW_LINE> self.damage_max=False <NEW_LINE> self.rate_max=False <NEW_LINE> self.time_elapsed_since_last_action = 0 <NEW_LINE> self.should_shoot = False <NEW_LINE> self.clock = pygame.time.Clock() <NEW_LINE> <DEDENT> def set_angle(self,x,y): <NEW_LINE> <INDENT> dx = x - (self.x+20) <NEW_LINE> dy = y - (self.y+20) <NEW_LINE> rads = atan2(-dy,dx) <NEW_LINE> rads %= 2*pi <NEW_LINE> self.angle = degrees(rads) <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if self.angle == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> dt = self.clock.tick() <NEW_LINE> self.time_elapsed_since_last_action += dt <NEW_LINE> if self.time_elapsed_since_last_action > (1000/self.speed): <NEW_LINE> <INDENT> self.should_shoot = True <NEW_LINE> self.time_elapsed_since_last_action = 0 <NEW_LINE> <DEDENT> <DEDENT> def damage_upgrade(self): <NEW_LINE> <INDENT> self.damage +=1 | encodes the state of a tower within a game, keeping track of damage, speed, angle of shots
and timing of the shots | 62598fb3ff9c53063f51a6ef |
class SrgShowImRoleEnum(Enum): <NEW_LINE> <INDENT> none = 0 <NEW_LINE> master = 1 <NEW_LINE> slave = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_subscriber_srg_oper as meta <NEW_LINE> return meta._meta_table['SrgShowImRoleEnum'] | SrgShowImRoleEnum
SRG Interface Management Role
.. data:: none = 0
Not Determined
.. data:: master = 1
Master Role
.. data:: slave = 2
Slave Role | 62598fb3d486a94d0ba2c073 |
class getPlanet_args: <NEW_LINE> <INDENT> thrift_spec = ( None, None, None, None, None, None, None, None, None, None, (10, TType.STRUCT, 'coord', (Coord, Coord.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, coord=None,): <NEW_LINE> <INDENT> self.coord = coord <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 10: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.coord = Coord() <NEW_LINE> self.coord.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getPlanet_args') <NEW_LINE> if self.coord is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('coord', TType.STRUCT, 10) <NEW_LINE> self.coord.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- coord | 62598fb367a9b606de546071 |
class FakeSession: <NEW_LINE> <INDENT> def __init__(self, connector=None, responses=(), websocket=None): <NEW_LINE> <INDENT> self.connector = connector <NEW_LINE> self.responses = list(responses) <NEW_LINE> self.websocket = websocket <NEW_LINE> self.calls = [] <NEW_LINE> <DEDENT> async def request( self, method, path, params=None, headers=None, json=None, data=None ): <NEW_LINE> <INDENT> content = json <NEW_LINE> if data: <NEW_LINE> <INDENT> content = data.read() <NEW_LINE> <DEDENT> self.calls.append((method, path, params, headers, content)) <NEW_LINE> response_content = self.responses.pop(0) <NEW_LINE> if isinstance(response_content, ClientResponse): <NEW_LINE> <INDENT> return response_content <NEW_LINE> <DEDENT> return make_http_response(method=method, url=path, content=response_content) <NEW_LINE> <DEDENT> def ws_connect(self, path): <NEW_LINE> <INDENT> return self.websocket <NEW_LINE> <DEDENT> async def close(self): <NEW_LINE> <INDENT> pass | A fake session class. | 62598fb37d847024c075c465 |
class AvaliacaoAPIView(APIView): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> avaliacoes = Avaliacao.objects.all() <NEW_LINE> serializer = AvaliacaoSerializer(avaliacoes, many=True) <NEW_LINE> return Response(serializer.data) <NEW_LINE> def post(self, request): <NEW_LINE> <INDENT> serializer = AvaliacaoSerializer(data=request.data) <NEW_LINE> serializer.is_valid(raise_exception=True) <NEW_LINE> serializer.save() <NEW_LINE> return Response(serializer.data, status=status.HTTP_201_CREATED) | API de Avaliações | 62598fb3e1aae11d1e7ce875 |
class LoginView(BaseView): <NEW_LINE> <INDENT> def __init__(self, user_id, query_params): <NEW_LINE> <INDENT> self.user_id = user_id <NEW_LINE> self.query_params = query_params <NEW_LINE> <DEDENT> def get(self, headers, data): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.user_id = validate_int(self.user_id, 31) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> return ResponseBadRequest() <NEW_LINE> <DEDENT> token = singletons['users'].login(self.user_id) <NEW_LINE> return Response(200, {}, token) | View objects are created per request | 62598fb357b8e32f5250816d |
class CSVSheetWriter(SheetWriter): <NEW_LINE> <INDENT> def __init__(self, filename, name, encoding="utf-8", single_sheet_in_book=False, sheet_index=None, **keywords): <NEW_LINE> <INDENT> self.encoding = encoding <NEW_LINE> self.sheet_name = name <NEW_LINE> self.single_sheet_in_book = single_sheet_in_book <NEW_LINE> self.line_terminator = '\r\n' <NEW_LINE> if KEYWORD_LINE_TERMINATOR in keywords: <NEW_LINE> <INDENT> self.line_terminator = keywords[KEYWORD_LINE_TERMINATOR] <NEW_LINE> <DEDENT> if single_sheet_in_book: <NEW_LINE> <INDENT> self.sheet_name = None <NEW_LINE> <DEDENT> self.sheet_index = sheet_index <NEW_LINE> SheetWriter.__init__(self, filename, self.sheet_name, self.sheet_name, **keywords) <NEW_LINE> <DEDENT> def write_row(self, array): <NEW_LINE> <INDENT> self.writer.writerow(array) | csv file writer | 62598fb330bbd722464699ca |
class Decorator(SymbolNode, Statement): <NEW_LINE> <INDENT> func = None <NEW_LINE> decorators = None <NEW_LINE> var = None <NEW_LINE> is_overload = False <NEW_LINE> def __init__(self, func: FuncDef, decorators: List[Expression], var: 'Var') -> None: <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.decorators = decorators <NEW_LINE> self.var = var <NEW_LINE> self.is_overload = False <NEW_LINE> <DEDENT> def name(self) -> str: <NEW_LINE> <INDENT> return self.func.name() <NEW_LINE> <DEDENT> def fullname(self) -> str: <NEW_LINE> <INDENT> return self.func.fullname() <NEW_LINE> <DEDENT> def accept(self, visitor: NodeVisitor[T]) -> T: <NEW_LINE> <INDENT> return visitor.visit_decorator(self) <NEW_LINE> <DEDENT> def serialize(self) -> JsonDict: <NEW_LINE> <INDENT> return {'.class': 'Decorator', 'func': self.func.serialize(), 'var': self.var.serialize(), 'is_overload': self.is_overload, } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize(cls, data: JsonDict) -> 'Decorator': <NEW_LINE> <INDENT> assert data['.class'] == 'Decorator' <NEW_LINE> dec = Decorator(FuncDef.deserialize(data['func']), [], Var.deserialize(data['var'])) <NEW_LINE> dec.is_overload = data['is_overload'] <NEW_LINE> return dec | A decorated function.
A single Decorator object can include any number of function decorators. | 62598fb3a219f33f346c68a7 |
class TPrimitiveTypeEntry(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.I32, 'type', None, None, ), (2, TType.STRUCT, 'typeQualifiers', (TTypeQualifiers, TTypeQualifiers.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, type=None, typeQualifiers=None,): <NEW_LINE> <INDENT> self.type = type <NEW_LINE> self.typeQualifiers = typeQualifiers <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.type = iprot.readI32(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.typeQualifiers = TTypeQualifiers() <NEW_LINE> self.typeQualifiers.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('TPrimitiveTypeEntry') <NEW_LINE> if self.type is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('type', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.type) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.typeQualifiers is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('typeQualifiers', TType.STRUCT, 2) <NEW_LINE> self.typeQualifiers.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.type is None: <NEW_LINE> <INDENT> raise TProtocol.TProtocolException(message='Required field type is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- type
- typeQualifiers | 62598fb34a966d76dd5eef7a |
class EdiSoapMapping(orm.Model): <NEW_LINE> <INDENT> _name = 'edi.soap.mapping' <NEW_LINE> _description = 'EDI Soap Mapping' <NEW_LINE> _rec_name = 'name' <NEW_LINE> _order = 'connection_id,default_code' <NEW_LINE> def onchange_product_id(self, cr, uid, ids, product_id, context=None): <NEW_LINE> <INDENT> if not product_id: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> product_pool = self.pool.get('product.product') <NEW_LINE> product = product_pool.browse(cr, uid, product_id, context=context) <NEW_LINE> if product.chunk: <NEW_LINE> <INDENT> chunk = product.chunk <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> chunk = product_pool.get_chunk(product) <NEW_LINE> product_pool.write(cr, uid, [product_id], { 'chunk': chunk, }, context=context) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def onchange_default_code(self, cr, uid, ids, default_code, context=None): <NEW_LINE> <INDENT> res = {} <NEW_LINE> product_pool = self.pool.get('product.product') <NEW_LINE> product_ids = product_pool.search(cr, uid, [ ('default_code', 'ilike', default_code), ], context=context) <NEW_LINE> if not product_ids: <NEW_LINE> <INDENT> return res <NEW_LINE> <DEDENT> if len(product_ids) == 1: <NEW_LINE> <INDENT> res['value'] = { 'product_id': product_ids[0], } <NEW_LINE> res['domain'] = { 'product_id': [], } <NEW_LINE> self.onchange_product_id( cr, uid, ids, product_ids[0], context=context) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res['domain'] = { 'product_id': [('id', 'in', product_ids)], } <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> _columns = { 'name': fields.char('Customer code', size=64, required=True), 'variable_weight': fields.boolean('Variable weight'), 'default_code': fields.char('Company code', size=64), 'product_id': fields.many2one( 'product.product', 'Product', required=True), 'connection_id': fields.many2one( 'edi.soap.connection', 'Connection', required=True), 'duty_code': fields.related( 'product_id', 'duty_code', type='char', string='Duty code'), 'chunk': fields.related( 'product_id', 'chunk', type='integer', string='Chunk per pack'), } | Soap Parameter for connection
| 62598fb38e7ae83300ee9147 |
class Author(db.Model): <NEW_LINE> <INDENT> __tablename__='authors' <NEW_LINE> id=db.Column(db.Integer,primary_key=True) <NEW_LINE> name=db.Column(db.String(64),unique=True) <NEW_LINE> books=db.relationship('Book',backref='author',lazy='dynamic') | 作者模型类 | 62598fb356ac1b37e630228e |
class Sampler(object): <NEW_LINE> <INDENT> def __init__(self, data_source, shuffle=True): <NEW_LINE> <INDENT> self.data_source = data_source <NEW_LINE> self.shuffle = shuffle <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> data_idxs = np.arange(len(self.data_source)) <NEW_LINE> if self.shuffle: <NEW_LINE> <INDENT> np.random.shuffle(data_idxs) <NEW_LINE> <DEDENT> for idx in data_idxs: <NEW_LINE> <INDENT> yield idx | Our sampler random selects one element from our data source when shuffle=True,
and it will only iterate our data source only once. | 62598fb34527f215b58e9f78 |
class ChassisConfigurationActions(object): <NEW_LINE> <INDENT> def __init__(self, cli_service, logger): <NEW_LINE> <INDENT> self._cli_service = cli_service <NEW_LINE> self._logger = logger <NEW_LINE> <DEDENT> def set_chassis_name(self, chassis_name): <NEW_LINE> <INDENT> output = CommandTemplateExecutor(self._cli_service, command_template.SET_CHASSIS_NAME).execute_command( chassis_name=chassis_name) <NEW_LINE> return output | Chassis configuration actions | 62598fb3442bda511e95c4fb |
class ApiGetVfsFileContentUpdateStateHandlerRegressionTest( api_regression_test_lib.ApiRegressionTest, vfs_plugin_test.VfsTestMixin): <NEW_LINE> <INDENT> api_method = "GetVfsFileContentUpdateState" <NEW_LINE> handler = vfs_plugin.ApiGetVfsFileContentUpdateStateHandler <NEW_LINE> def Run(self): <NEW_LINE> <INDENT> client_urn = self.SetupClient(0) <NEW_LINE> client_id = client_urn.Basename() <NEW_LINE> acl_test_lib.CreateUser(self.token.username) <NEW_LINE> running_flow_id = self.CreateMultiGetFileFlow( client_urn, file_path="fs/os/c/bin/bash", token=self.token) <NEW_LINE> finished_flow_id = self.CreateMultiGetFileFlow( client_urn, file_path="fs/os/c/bin/bash", token=self.token) <NEW_LINE> if data_store.RelationalDBEnabled(): <NEW_LINE> <INDENT> flow_base.TerminateFlow(client_id, finished_flow_id, reason="Fake Error") <NEW_LINE> non_update_flow_id = flow.StartFlow( client_id=client_id, flow_cls=discovery.Interrogate) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> finished_flow_urn = client_urn.Add("flows").Add(finished_flow_id) <NEW_LINE> with aff4.FACTORY.Open( finished_flow_urn, aff4_type=flow.GRRFlow, mode="rw", token=self.token) as flow_obj: <NEW_LINE> <INDENT> flow_obj.GetRunner().Error("Fake error") <NEW_LINE> <DEDENT> non_update_flow_id = flow.StartAFF4Flow( client_id=client_urn, flow_name=discovery.Interrogate.__name__, token=self.token).Basename() <NEW_LINE> <DEDENT> unknown_flow_id = "F:12345678" <NEW_LINE> self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=running_flow_id), replace={running_flow_id: "W:ABCDEF"}) <NEW_LINE> self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=finished_flow_id), replace={finished_flow_id: "W:ABCDEF"}) <NEW_LINE> self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=non_update_flow_id), replace={non_update_flow_id: "W:ABCDEF"}) <NEW_LINE> self.Check( "GetVfsFileContentUpdateState", args=vfs_plugin.ApiGetVfsFileContentUpdateStateArgs( client_id=client_id, operation_id=unknown_flow_id), replace={unknown_flow_id: "W:ABCDEF"}) | Regression test for ApiGetVfsFileContentUpdateStateHandler. | 62598fb3ec188e330fdf8933 |
class HammingRewardSampler(nn.Module): <NEW_LINE> <INDENT> def __init__(self, opt): <NEW_LINE> <INDENT> super(HammingRewardSampler, self).__init__() <NEW_LINE> self.logger = opt.logger <NEW_LINE> self.alpha = opt.alpha <NEW_LINE> assert self.alpha > 0, 'set alpha to a nonzero value, otherwise use the default loss' <NEW_LINE> self.tau = opt.tau_sent <NEW_LINE> self.scale_loss = opt.scale_loss <NEW_LINE> <DEDENT> def forward(self, input, target, mask, scores=None): <NEW_LINE> <INDENT> N = input.size(0) <NEW_LINE> seq_length = input.size(1) <NEW_LINE> target = target[:, :seq_length] <NEW_LINE> mask = mask[:, :seq_length] <NEW_LINE> if self.scale_loss: <NEW_LINE> <INDENT> row_scores = scores.repeat(1, seq_length) <NEW_LINE> mask = torch.mul(mask, row_scores) <NEW_LINE> <DEDENT> ml_output = get_ml_loss(input, target, mask) <NEW_LINE> V = 30 <NEW_LINE> distrib = [binom(seq_length, e) * ((V-1) * math.exp(-1/self.tau))**(e-seq_length) for e in range(seq_length+1)] <NEW_LINE> select = np.random.choice(a=np.arange(seq_length+1), p=distrib/sum(distrib)) <NEW_LINE> score = math.exp(-select / self.tau) <NEW_LINE> self.logger.debug("exp-neg Hamming distances (d=%d): %.2e" % (select, score)) <NEW_LINE> scores = np.ones((N, seq_length), dtype="float32") * score <NEW_LINE> smooth_target = Variable(torch.from_numpy(scores).view(-1, 1)).cuda().float() <NEW_LINE> refs = target.cpu().data.numpy() <NEW_LINE> preds = refs <NEW_LINE> change_index = np.random.randint(seq_length, size=(N, select)) <NEW_LINE> rows = np.arange(N).reshape(-1, 1).repeat(select, axis=1) <NEW_LINE> select_index = np.random.randint(self.vocab_size, size=(N, select)) <NEW_LINE> preds[rows, change_index] = select_index <NEW_LINE> preds = Variable(torch.from_numpy(preds)).cuda() <NEW_LINE> preds = to_contiguous(preds).view(-1, 1) <NEW_LINE> output = - input.gather(1, preds) * mask * smooth_target <NEW_LINE> if torch.sum(smooth_target * mask).data[0] > 0: <NEW_LINE> <INDENT> output = torch.sum(output) / torch.sum(smooth_target * mask) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.logger.warn("Smooth targets weights sum to 0") <NEW_LINE> output = torch.sum(output) <NEW_LINE> <DEDENT> return ml_output, self.alpha * output + (1 - self.alpha) * ml_output | Sampling the sentences wtr the reward distribution
instead of the captionig model itself | 62598fb3009cb60464d015c5 |
class ErrorResponse(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'error': {'key': 'error', 'type': 'ErrorDefinition'}, } <NEW_LINE> def __init__( self, *, error: Optional["ErrorDefinition"] = None, **kwargs ): <NEW_LINE> <INDENT> super(ErrorResponse, self).__init__(**kwargs) <NEW_LINE> self.error = error | Error response.
:param error: The error details.
:type error: ~azure.mgmt.managedservices.models.ErrorDefinition | 62598fb37047854f4633f47f |
class RemoteButtons(Peripheral): <NEW_LINE> <INDENT> _sensor_id = 0x0037 <NEW_LINE> Port = Enum('Port', 'L R', start=0) <NEW_LINE> Button = IntEnum('Button', 'PLUS RED MINUS', start=0) <NEW_LINE> capability = Enum('capability', {'sense_press':4},) <NEW_LINE> datasets = { capability.sense_press: (3,1) } <NEW_LINE> allowed_combo = [] <NEW_LINE> def __init__(self, name, port=None, capabilities=[]): <NEW_LINE> <INDENT> if port: <NEW_LINE> <INDENT> port = port.value <NEW_LINE> <DEDENT> super().__init__(name, port, capabilities) <NEW_LINE> <DEDENT> def plus_pressed(self): <NEW_LINE> <INDENT> button_list = self.value[self.capability.sense_press] <NEW_LINE> return button_list[self.Button.PLUS] == 1 <NEW_LINE> <DEDENT> def minus_pressed(self): <NEW_LINE> <INDENT> button_list = self.value[self.capability.sense_press] <NEW_LINE> return button_list[self.Button.MINUS] == 1 <NEW_LINE> <DEDENT> def red_pressed(self): <NEW_LINE> <INDENT> button_list = self.value[self.capability.sense_press] <NEW_LINE> return button_list[self.Button.RED] == 1 | Represents one set of '+', '-', 'red' buttons on the PoweredHub Remote
Each remote has two sets of buttons, on the left and right side. Pick the one
your want to attach to by using the port argument with either Port.L or Port.R.
There are actually a few different modes that the hardware supports, but we are
only going to use one of them called 'KEYSD' (see the notes in the documentation on the
raw values reported by the hub). This mode makes the remote send three values back
in a list. To access each button state, there are three helper methods provided
(see below)
Examples::
# Basic connection to the left buttons
@attach(RemoteButtons, name='left_buttons', port=RemoteButtons.Port.L)
# Getting values back in the handler
async def left_buttons_change(self):
is_plus_pressed = self.left_buttons.plus_pressed()
is_minus_pressed = self.left_buttons.minus_pressed()
is_red_pressed = self.left_buttons.red_pressed() | 62598fb3460517430c4320b0 |
class TestUtil(unittest.TestCase): <NEW_LINE> <INDENT> def test_get_relays_in_country(self): <NEW_LINE> <INDENT> relays1 = util.get_relays_in_country("at") <NEW_LINE> self.assertTrue(len(relays1) > 0) <NEW_LINE> relays2 = util.get_relays_in_country("AT") <NEW_LINE> self.assertTrue(len(relays1) == len(relays2)) <NEW_LINE> bogus = util.get_relays_in_country("foo") <NEW_LINE> self.assertEqual(bogus, []) <NEW_LINE> <DEDENT> def test_get_source_port(self): <NEW_LINE> <INDENT> self.assertEqual(util.get_source_port("SOURCE_ADDR=" "255.255.255.255:0"), 0) <NEW_LINE> self.assertEqual(util.get_source_port("SOURCE_ADDR=1.1.1.1:1"), 1) <NEW_LINE> self.assertEqual(util.get_source_port("SOURCE_ADDR=1.1.1.1:" "65535"), 65535) <NEW_LINE> <DEDENT> def test_exiturl(self): <NEW_LINE> <INDENT> self.assertEqual(util.exiturl("foo"), "<https://atlas.torproject.or" "g/#details/foo>") <NEW_LINE> self.assertEqual(util.exiturl(4), "<https://atlas.torproject.org/#det" "ails/4>") <NEW_LINE> <DEDENT> def test_extract_pattern(self): <NEW_LINE> <INDENT> extract_pattern1 = util.extract_pattern("Connection on fd 4 originat" "ing from 444:0000", "Connec" "tion on fd [0-9]+ originati" "ng from [^:]+:([0-9]{1,5})") <NEW_LINE> self.assertEqual(extract_pattern1, "0000") <NEW_LINE> <DEDENT> def test_new_request(self): <NEW_LINE> <INDENT> result = util.new_request("https://atlas.torproject.org", "test") <NEW_LINE> self.assertEqual("https://atlas.torproject.org", result.get_full_url()) <NEW_LINE> self.assertTrue(result.has_header("User-agent")) <NEW_LINE> self.assertTrue(result.has_header("Accept")) <NEW_LINE> self.assertTrue(result.has_header("Accept-language")) <NEW_LINE> self.assertTrue(result.has_header("Accept-encoding")) <NEW_LINE> self.assertTrue(result.has_header("Connection")) <NEW_LINE> <DEDENT> def test_parse_log_lines(self): <NEW_LINE> <INDENT> ports = {"socks": -1, "control": -1} <NEW_LINE> util.parse_log_lines(ports, "Socks listener listening on port 8000.") <NEW_LINE> util.parse_log_lines(ports, "Control listener listening on port 9000.") <NEW_LINE> self.assertEqual(ports["socks"], 8000) <NEW_LINE> self.assertEqual(ports["control"], 9000) | Test the util module. | 62598fb33346ee7daa337699 |
class InfoTagVideo: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def getDbId(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getDirector(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getWritingCredits(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getGenre(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getTagLine(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getPlotOutline(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getPlot(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getPictureURL(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getTitle(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getTVShowTitle(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getMediaType(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getVotes(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getCast(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getFile(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getPath(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getFilenameAndPath(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getIMDBNumber(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getSeason(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getEpisode(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getYear(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getRating(self) -> float: <NEW_LINE> <INDENT> return 0.0 <NEW_LINE> <DEDENT> def getUserRating(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getPlayCount(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getLastPlayed(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getOriginalTitle(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getPremiered(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getFirstAired(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getTrailer(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getArtist(self) -> List[str]: <NEW_LINE> <INDENT> return [""] <NEW_LINE> <DEDENT> def getAlbum(self) -> str: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> def getTrack(self) -> int: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def getDuration(self) -> int: <NEW_LINE> <INDENT> return 0 | **Kodi's video info tag class.**
To get video info tag data of currently played source.
.. note::
Info tag load is only be possible from present player class.
Example::
...
tag = xbmc.Player().getVideoInfoTag()
title = tag.getTitle()
file = tag.getFile()
... | 62598fb37d43ff2487427454 |
class ChannelSELayer(nn.Module): <NEW_LINE> <INDENT> def __init__( self, spatial_dims: int, in_channels: int, r: int = 2, acti_type_1: Union[Tuple[str, Dict], str] = ("relu", {"inplace": True}), acti_type_2: Union[Tuple[str, Dict], str] = "sigmoid", add_residual: bool = False, ) -> None: <NEW_LINE> <INDENT> super(ChannelSELayer, self).__init__() <NEW_LINE> self.add_residual = add_residual <NEW_LINE> pool_type = Pool[Pool.ADAPTIVEAVG, spatial_dims] <NEW_LINE> self.avg_pool = pool_type(1) <NEW_LINE> channels = int(in_channels // r) <NEW_LINE> if channels <= 0: <NEW_LINE> <INDENT> raise ValueError(f"r must be positive and smaller than in_channels, got r={r} in_channels={in_channels}.") <NEW_LINE> <DEDENT> act_1, act_1_args = split_args(acti_type_1) <NEW_LINE> act_2, act_2_args = split_args(acti_type_2) <NEW_LINE> self.fc = nn.Sequential( nn.Linear(in_channels, channels, bias=True), Act[act_1](**act_1_args), nn.Linear(channels, in_channels, bias=True), Act[act_2](**act_2_args), ) <NEW_LINE> <DEDENT> def forward(self, x: torch.Tensor) -> torch.Tensor: <NEW_LINE> <INDENT> b, c = x.shape[:2] <NEW_LINE> y: torch.Tensor = self.avg_pool(x).view(b, c) <NEW_LINE> y = self.fc(y).view([b, c] + [1] * (x.ndim - 2)) <NEW_LINE> result = x * y <NEW_LINE> if self.add_residual: <NEW_LINE> <INDENT> result += x <NEW_LINE> <DEDENT> return result | Re-implementation of the Squeeze-and-Excitation block based on:
"Hu et al., Squeeze-and-Excitation Networks, https://arxiv.org/abs/1709.01507". | 62598fb3a17c0f6771d5c2d9 |
class rule_001(deprecated_rule.Rule): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> deprecated_rule.Rule.__init__(self, 'attribute', '001') <NEW_LINE> self.message.append('Rule ' + self.unique_id + ' has been superceeded by attribute_declaration_300 and attribute_specification_300.') | This rule has been superceeded by:
* `attribute_declaration_300 <attribute_declaration_rules.html#attribute-declaration-300>`_
* `attribute_specification_300 <attribute_specification_rules.html#attribute-specification-300>`_ | 62598fb34e4d5625663724cb |
class AccountController(BaseController): <NEW_LINE> <INDENT> def create_view_account(self, date): <NEW_LINE> <INDENT> _query_builder = Configuration.base_uri <NEW_LINE> _query_builder += '/accounts/viewaccount.json' <NEW_LINE> _query_url = APIHelper.clean_url(_query_builder) <NEW_LINE> _form_parameters = { 'Date': date } <NEW_LINE> _request = self.http_client.post(_query_url, parameters=_form_parameters) <NEW_LINE> BasicAuth.apply(_request) <NEW_LINE> _context = self.execute_request(_request) <NEW_LINE> self.validate_response(_context) <NEW_LINE> return _context.response.raw_body | A Controller to access Endpoints in the ytelapi API. | 62598fb3d58c6744b42dc32b |
class TestCustomClass(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> class PersonsGrid(Djid): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> djid_id = 'persons' <NEW_LINE> query_set = Person.objects.all() <NEW_LINE> <DEDENT> first_name = CharColumn(label='First Name') <NEW_LINE> last_name = CharColumn(label='Last Name') <NEW_LINE> registered = DateTimeColumn(label='Registered Date') <NEW_LINE> <DEDENT> self.PersonsGrid = PersonsGrid <NEW_LINE> <DEDENT> def test_columns(self): <NEW_LINE> <INDENT> self.assertListEqual( list(self.PersonsGrid._meta.column_dict.keys()), ['first_name', 'last_name', 'registered'] ) <NEW_LINE> <DEDENT> def test_col_names(self): <NEW_LINE> <INDENT> self.assertListEqual( self.PersonsGrid.col_names(), ['First Name', 'Last Name', 'Registered Date'], ) <NEW_LINE> <DEDENT> def test_col_model(self): <NEW_LINE> <INDENT> self.assertEqual(len(self.PersonsGrid.col_model()), 3) | Test for a class with all custom columns. | 62598fb3f9cc0f698b1c531d |
class BlogEntry(EntryAbstractClass): <NEW_LINE> <INDENT> within_serie = models.ForeignKey(BlogSerie, null=True, blank=True, related_name='entries') <NEW_LINE> @property <NEW_LINE> def real_image(self): <NEW_LINE> <INDENT> if self.image: <NEW_LINE> <INDENT> return self.image <NEW_LINE> <DEDENT> if self.within_serie is not None: <NEW_LINE> <INDENT> return self.within_serie.image <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def serie_index(self): <NEW_LINE> <INDENT> if self.within_serie is None: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return (self.within_serie.entries .filter(creation_date__lt=self.creation_date) .exclude(pk=self.pk) .count()) + 1 <NEW_LINE> <DEDENT> def relative_entry_in_serie(self, offset): <NEW_LINE> <INDENT> if self.within_serie is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.within_serie.nth_entry(self.serie_index + offset) <NEW_LINE> <DEDENT> @property <NEW_LINE> def next_entry_in_serie(self): <NEW_LINE> <INDENT> return self.relative_entry_in_serie(1) <NEW_LINE> <DEDENT> @property <NEW_LINE> def previous_entry_in_serie(self): <NEW_LINE> <INDENT> return self.relative_entry_in_serie(-1) <NEW_LINE> <DEDENT> class Meta(EntryAbstractClass.Meta): <NEW_LINE> <INDENT> abstract = True | Represents a blog entry. Adds an optional `serie` field to the default
Zinnia model. | 62598fb367a9b606de546073 |
class PredictionClient(): <NEW_LINE> <INDENT> BATCH_SIZE = 20 <NEW_LINE> def provide_input(self) -> List[tf.Tensor]: <NEW_LINE> <INDENT> with tf.name_scope('loading'): <NEW_LINE> <INDENT> prediction_input, expected_result = get_data_from_tfrecord( "./data/test.tfrecord", self.BATCH_SIZE).get_next() <NEW_LINE> <DEDENT> with tf.name_scope('pre-processing'): <NEW_LINE> <INDENT> prediction_input = tf.reshape( prediction_input, shape=(self.BATCH_SIZE, 784)) <NEW_LINE> expected_result = tf.reshape(expected_result, shape=(self.BATCH_SIZE,)) <NEW_LINE> <DEDENT> return [prediction_input, expected_result] <NEW_LINE> <DEDENT> def receive_output(self, likelihoods: tf.Tensor, y_true: tf.Tensor): <NEW_LINE> <INDENT> with tf.name_scope('post-processing'): <NEW_LINE> <INDENT> prediction = tf.argmax(likelihoods, axis=1) <NEW_LINE> eq_values = tf.equal(prediction, tf.cast(y_true, tf.int64)) <NEW_LINE> acc = tf.reduce_mean(tf.cast(eq_values, tf.float32)) <NEW_LINE> op = tf.print('Expected:', y_true, '\nActual:', prediction, '\nAccuracy:', acc) <NEW_LINE> return op | Contains methods meant to be executed by a prediction client.
Args:
player_name: `str`, name of the `tfe.player.Player`
representing the data owner
build_update_step: `Callable`, the function used to construct
a local federated learning update. | 62598fb332920d7e50bc60f8 |
class WeekWithScheduleMixin(WeekCalendarMixin): <NEW_LINE> <INDENT> def get_week_schedules(self, start, end, days): <NEW_LINE> <INDENT> lookup = { '{}__range'.format(self.date_field): (start, end) } <NEW_LINE> queryset = self.get_queryset().filter(**lookup) <NEW_LINE> day_schedules = {day: [] for day in days} <NEW_LINE> for schedule in queryset: <NEW_LINE> <INDENT> schedule_date = getattr(schedule, self.date_field) <NEW_LINE> day_schedules[schedule_date].append(schedule) <NEW_LINE> <DEDENT> return day_schedules <NEW_LINE> <DEDENT> def get_week_calendar(self): <NEW_LINE> <INDENT> calendar_context = super().get_week_calendar() <NEW_LINE> calendar_context['week_day_schedules'] = self.get_week_schedules( calendar_context['week_first'], calendar_context['week_last'], calendar_context['week_days'] ) <NEW_LINE> return calendar_context | スケジュール付きの、週間カレンダーを提供するMixin | 62598fb360cbc95b063643eb |
class RegisterI(object): <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractmethod <NEW_LINE> def getLoginEntry(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getPasswordEntry(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getConfirmPasswordEntry(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getLoginInfoLabel(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getPasswordInfoLabel(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def getConfirmPasswordInfoLabel(self): <NEW_LINE> <INDENT> pass | Register view interface | 62598fb330dc7b766599f8f0 |
class Horas_Trabajadas(models.Model): <NEW_LINE> <INDENT> horas_trabajadas=models.FloatField() <NEW_LINE> descripcion_horas_trabajadas=models.CharField(max_length = 500) <NEW_LINE> fecha=models.DateTimeField() <NEW_LINE> actividad=models.CharField(max_length = 200) <NEW_LINE> estado=models.CharField(max_length = 200) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.id) | Modelo representa la descripcion de cada hora de trabajo agregada mostrando de la fecha de la misma | 62598fb3b7558d58954636d0 |
class VarDispatcher(Dispatcher): <NEW_LINE> <INDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> func, s = self.resolve(args) <NEW_LINE> d = dict((k.token, v) for k, v in s.items()) <NEW_LINE> return func(**d) | A dispatcher that calls functions with variable names.
>>> d = VarDispatcher('d')
>>> x = var('x')
>>> @d.register('inc', x)
... def f(x):
... return x + 1
>>> @d.register('double', x)
... def f(x):
... return x * 2
>>> d('inc', 10)
11
>>> d('double', 10)
20 | 62598fb33317a56b869be59e |
class CustomSplitMapMixin: <NEW_LINE> <INDENT> _Key = Tuple[StringID, str] <NEW_LINE> _CUSTOM_SPLIT_MAP: Dict[_Key, Tuple[CustomSplit, ...]] = defaultdict(tuple) <NEW_LINE> @staticmethod <NEW_LINE> def _get_key(string: str) -> "CustomSplitMapMixin._Key": <NEW_LINE> <INDENT> return (id(string), string) <NEW_LINE> <DEDENT> def add_custom_splits( self, string: str, custom_splits: Iterable[CustomSplit] ) -> None: <NEW_LINE> <INDENT> key = self._get_key(string) <NEW_LINE> self._CUSTOM_SPLIT_MAP[key] = tuple(custom_splits) <NEW_LINE> <DEDENT> def pop_custom_splits(self, string: str) -> List[CustomSplit]: <NEW_LINE> <INDENT> key = self._get_key(string) <NEW_LINE> custom_splits = self._CUSTOM_SPLIT_MAP[key] <NEW_LINE> del self._CUSTOM_SPLIT_MAP[key] <NEW_LINE> return list(custom_splits) <NEW_LINE> <DEDENT> def has_custom_splits(self, string: str) -> bool: <NEW_LINE> <INDENT> key = self._get_key(string) <NEW_LINE> return key in self._CUSTOM_SPLIT_MAP | This mixin class is used to map merged strings to a sequence of
CustomSplits, which will then be used to re-split the strings iff none of
the resultant substrings go over the configured max line length. | 62598fb330bbd722464699cb |
class Solution: <NEW_LINE> <INDENT> def isValidSudoku(self, board): <NEW_LINE> <INDENT> row = [set([]) for i in range(9)] <NEW_LINE> col = [set([]) for i in range(9)] <NEW_LINE> grid = [set([]) for i in range(9)] <NEW_LINE> for r in range(9): <NEW_LINE> <INDENT> for c in range(9): <NEW_LINE> <INDENT> if board[r][c] == '.': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if board[r][c] in row[r]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if board[r][c] in col[c]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> g = r/3*3 +c/3 <NEW_LINE> if board[r][c] in grid[g]: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> grid[g].add(board[r][c]) <NEW_LINE> row[r].add(board[r][c]) <NEW_LINE> col[c].add(board[r][c]) <NEW_LINE> <DEDENT> <DEDENT> return True | @param: board: the board
@return: whether the Sudoku is valid | 62598fb3091ae35668704cc2 |
class Object(EventContainer): <NEW_LINE> <INDENT> def __init__(self, cond = None): <NEW_LINE> <INDENT> if cond == None: <NEW_LINE> <INDENT> self.CureCond = self.defCon <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.CureCond = cond <NEW_LINE> <DEDENT> self.SOC[self.CureCond].addObject() <NEW_LINE> self.GenEv() <NEW_LINE> <DEDENT> def GenEv(self): <NEW_LINE> <INDENT> times = [] <NEW_LINE> for i in range(len(self.SOC[self.CureCond].vec)): <NEW_LINE> <INDENT> times.append(eval(self.SOC[self.CureCond].vec[i].fun)) <NEW_LINE> <DEDENT> self.ev = np.argmin(times) <NEW_LINE> self.Engine.ES.MakeEvent(self, eval(self.SOC[self.CureCond].vec[self.ev].fun)) <NEW_LINE> <DEDENT> def sleep(self): <NEW_LINE> <INDENT> self.GenEv() <NEW_LINE> <DEDENT> def divide(self): <NEW_LINE> <INDENT> Object(self.CureCond) <NEW_LINE> self.GenEv() <NEW_LINE> <DEDENT> def assymDivide(self, cond = None): <NEW_LINE> <INDENT> Object(cond) <NEW_LINE> self.GenEv() <NEW_LINE> <DEDENT> def reconfigurate(self, cmprt): <NEW_LINE> <INDENT> self.SOC[self.CureCond].remObject() <NEW_LINE> self.CureCond = cmprt <NEW_LINE> self.SOC[self.CureCond].addObject() <NEW_LINE> self.GenEv() <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> self.SOC[self.CureCond].remObject() <NEW_LINE> <DEDENT> def toMOC(self, name): <NEW_LINE> <INDENT> self.MOC[name].addObject() <NEW_LINE> <DEDENT> def test(self,s): <NEW_LINE> <INDENT> print('get -- > '+s) <NEW_LINE> <DEDENT> def go(self): <NEW_LINE> <INDENT> exec(self.SOC[self.CureCond].vec[self.ev].res) <NEW_LINE> return self.Engine.ES.deltaT | Класс Объект.
Каждый объект существует внутри некоторого компартмента.
Компартмент-хендлер -- пустой класс-идентификатор поведения для Объектов
По большому счету, нам не важно какие Объекты содержатся в компартменте,
Т.к. от компартмента нам нужно только знание содержимого (количества).
Данные о состоянии Объекта в текущий момент.
- SetEventTime() -- кладем время до события
- EventTime() -- время до события
| 62598fb363b5f9789fe8520e |
class Published(PublishingState): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super().__init__(**kwargs) <NEW_LINE> self._namespace = ONTOLOGY_NS <NEW_LINE> self._project_id = PROJECT_ID <NEW_LINE> self._name = "Published" | State of an expression having been published.
Labels: publiziert (de) / published (en) | 62598fb35fc7496912d482ce |
class TransferFunction(lti): <NEW_LINE> <INDENT> def __new__(cls, *system): <NEW_LINE> <INDENT> if len(system) == 1 and isinstance(system[0], lti): <NEW_LINE> <INDENT> return system[0].to_tf() <NEW_LINE> <DEDENT> return super(TransferFunction, cls).__new__(cls) <NEW_LINE> <DEDENT> def __init__(self, *system): <NEW_LINE> <INDENT> if isinstance(system[0], lti): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super(TransferFunction, self).__init__(self, *system) <NEW_LINE> self._num = None <NEW_LINE> self._den = None <NEW_LINE> self.num, self.den = normalize(*system) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '{0}(\n{1},\n{2}\n)'.format( self.__class__.__name__, repr(self.num), repr(self.den), ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def num(self): <NEW_LINE> <INDENT> return self._num <NEW_LINE> <DEDENT> @num.setter <NEW_LINE> def num(self, num): <NEW_LINE> <INDENT> self._num = atleast_1d(num) <NEW_LINE> if len(self.num.shape) > 1: <NEW_LINE> <INDENT> self.outputs, self.inputs = self.num.shape <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.outputs = 1 <NEW_LINE> self.inputs = 1 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def den(self): <NEW_LINE> <INDENT> return self._den <NEW_LINE> <DEDENT> @den.setter <NEW_LINE> def den(self, den): <NEW_LINE> <INDENT> self._den = atleast_1d(den) <NEW_LINE> <DEDENT> def _copy(self, system): <NEW_LINE> <INDENT> self.num = system.num <NEW_LINE> self.den = system.den <NEW_LINE> <DEDENT> def to_tf(self): <NEW_LINE> <INDENT> return copy.deepcopy(self) <NEW_LINE> <DEDENT> def to_zpk(self): <NEW_LINE> <INDENT> return ZerosPolesGain(*tf2zpk(self.num, self.den)) <NEW_LINE> <DEDENT> def to_ss(self): <NEW_LINE> <INDENT> return StateSpace(*tf2ss(self.num, self.den)) | Linear Time Invariant system class in transfer function form.
Represents the system as the transfer function
:math:`H(s)=\sum_i b[i] s^i / \sum_j a[j] s^i`, where :math:`a` are
elements of the numerator `num` and :math:`b` are the elements of the
denominator `den`.
Parameters
----------
*system : arguments
The `TransferFunction` class can be instantiated with 1 or 2 arguments.
The following gives the number of input arguments and their
interpretation:
* 1: `lti` system: (`StateSpace`, `TransferFunction` or
`ZerosPolesGain`)
* 2: array_like: (numerator, denominator)
Notes
-----
Changing the value of properties that are not part of the
`TransferFunction` system representation (such as the `A`, `B`, `C`, `D`
state-space matrices) is very inefficient and may lead to numerical
inaccuracies. | 62598fb3a8370b77170f0482 |
class CaseType(ModelBase): <NEW_LINE> <INDENT> @property <NEW_LINE> def is_default(self): <NEW_LINE> <INDENT> return self._content.get('is_default', None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._content.get('name', None) | Object model for TestRail Case Types
To get all case types
.. code-block:: python
case_types = list(traw_client.case_types()) | 62598fb3cc0a2c111447b0b8 |
class TestReportApi(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.api = bondora_api.apis.report_api.ReportApi() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_report_generate_report(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_report_get_public_dataset(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_report_get_report(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_report_get_report_list(self): <NEW_LINE> <INDENT> pass | ReportApi unit test stubs | 62598fb3f548e778e596b64a |
class ConsoleApiView: <NEW_LINE> <INDENT> def end_categories_selection(self, category_number, category_list): <NEW_LINE> <INDENT> print("les {} catégories ont été sélectionnées".format(category_number)) <NEW_LINE> print("les catégories sélectionnées sont:") <NEW_LINE> for category in category_list: <NEW_LINE> <INDENT> print("-> {}".format(category)) | This class manages the console's messages for the interactions with the API | 62598fb3283ffb24f3cf3932 |
class Node: <NEW_LINE> <INDENT> def __init__(self, item): <NEW_LINE> <INDENT> super(Node, self).__init__() <NEW_LINE> self.item = item <NEW_LINE> self.next = None | 节点类 | 62598fb3a05bb46b3848a911 |
@OFPMultipartReply.register_stats_type() <NEW_LINE> @_set_stats_type(ofproto.OFPMP_TABLE_FEATURES, OFPTableFeaturesStats) <NEW_LINE> class OFPTableFeaturesStatsReply(OFPMultipartReply): <NEW_LINE> <INDENT> def __init__(self, type_=None, **kwargs): <NEW_LINE> <INDENT> super(OFPTableFeaturesStatsReply, self).__init__(**kwargs) | Table features statistics reply message
The switch responds with this message to a table features statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPTableFeaturesStats`` instance
================ ====================================================== | 62598fb3379a373c97d990bb |
class _AttrDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(_AttrDict, self).__init__(*args, **kwargs) <NEW_LINE> self.__dict__ = self | Attribute dictionary.
A trick to allow accessing dictionary keys as object attributes. | 62598fb355399d3f056265c0 |
class ProductionWorkTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'production_work' | Test Production Work module | 62598fb37b180e01f3e490a3 |
class UnpackUtility: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def sizeOfAxis(x, ind): <NEW_LINE> <INDENT> return x.shape[ind] if x is not None else 0 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def readBinFile(filePath, dtype, packSize, totFirings, numExpr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> f = open(filePath) <NEW_LINE> tempData = np.fromfile(f, dtype=dtype) <NEW_LINE> f.close() <NEW_LINE> tempData = tempData.reshape((6*totFirings*numExpr, packSize)).T <NEW_LINE> tempData = np.copy(tempData, order='C') <NEW_LINE> return tempData <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def saveChnData(chnData, chnDataAll, destDir, ind): <NEW_LINE> <INDENT> fileName = 'chndata_%d.h5' % (ind) <NEW_LINE> outputPath = join(destDir, fileName) <NEW_LINE> print('Saving data to ' + outputPath) <NEW_LINE> f = h5py.File(outputPath, 'w') <NEW_LINE> f['chndata'] = chnData <NEW_LINE> f['chndata_all'] = chnDataAll <NEW_LINE> f.close() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def renameUnindexedFile(srcDir): <NEW_LINE> <INDENT> print('Renaming unindexed raw data files in %s' % (srcDir)) <NEW_LINE> pTarget = re.compile(r'Board([0-9]+)Experiment([0-9]+)' + r'TotalFiring([0-9]+)_Pack.bin') <NEW_LINE> pExisting = re.compile(r'Board([0-9]+)Experiment([0-9]+)' + r'TotalFiring([0-9]+)_Pack_([0-9]+).bin') <NEW_LINE> targetFileList = [] <NEW_LINE> indexList = [] <NEW_LINE> for fileName in listdir(srcDir): <NEW_LINE> <INDENT> if pTarget.match(fileName) is not None: <NEW_LINE> <INDENT> targetFileList.append(fileName) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> matchExisting = pExisting.match(fileName) <NEW_LINE> if matchExisting is not None: <NEW_LINE> <INDENT> indexList.append(int(matchExisting.group(4))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not targetFileList: <NEW_LINE> <INDENT> print('No unindexed file found!') <NEW_LINE> return -1 <NEW_LINE> <DEDENT> if not indexList: <NEW_LINE> <INDENT> renameIndex = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> renameIndex = max(indexList) + 1 <NEW_LINE> <DEDENT> for fileName in targetFileList: <NEW_LINE> <INDENT> srcFilePath = join(srcDir, fileName) <NEW_LINE> destFilePath = '%s_%d.bin' % (srcFilePath[:-4], renameIndex) <NEW_LINE> print(srcFilePath) <NEW_LINE> print('\t->' + destFilePath) <NEW_LINE> rename(srcFilePath, destFilePath) <NEW_LINE> <DEDENT> return renameIndex | Provide a series of statistic methods for Unpack classes | 62598fb367a9b606de546074 |
class NuProcedure(LambdaProcedure): <NEW_LINE> <INDENT> def _symbol(self): <NEW_LINE> <INDENT> return 'nu' <NEW_LINE> <DEDENT> "*** YOUR CODE HERE ***" <NEW_LINE> def evaluate_arguments(self, arg_list, env): <NEW_LINE> <INDENT> return arg_list.map(lambda operand: Thunk(nil, operand, env)) | A procedure whose parameters are to be passed by name. | 62598fb34f6381625f199513 |
class House: <NEW_LINE> <INDENT> def __init__(self, xpos): <NEW_LINE> <INDENT> self.rect = Rect(xpos, 550, 40, 40) <NEW_LINE> self.exploded = False <NEW_LINE> sysfont = pygame.font.SysFont(None, 40) <NEW_LINE> font_house_a = sysfont.render("A", False, (255, 255, 255)) <NEW_LINE> font_house_b = sysfont.render("X", False, (255, 255, 255)) <NEW_LINE> self.images = (pygame.Surface((20, 20), pygame.SRCALPHA), pygame.Surface((20, 20), pygame.SRCALPHA)) <NEW_LINE> self.images[0].blit(font_house_a, (0, 0)) <NEW_LINE> self.images[1].blit(font_house_b, (0, 0)) <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> if self.exploded: <NEW_LINE> <INDENT> SURFACE.blit(self.images[1], self.rect.topleft) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> SURFACE.blit(self.images[0], self.rect.topleft) | House Object | 62598fb3e5267d203ee6b9aa |
class LabsEventResult(models.Model): <NEW_LINE> <INDENT> block = models.ForeignKey(LabsEventBlock, on_delete=models.CASCADE, related_name='results', verbose_name=_('Блок мероприятия')) <NEW_LINE> uuid = models.CharField(max_length=36, unique=True, verbose_name=_('UUID')) <NEW_LINE> title = models.TextField(verbose_name=_('Название')) <NEW_LINE> result_format = models.CharField(max_length=50, verbose_name=_('Формат работы')) <NEW_LINE> fix = models.TextField(verbose_name=_('Способ фиксации результата')) <NEW_LINE> check = models.TextField(verbose_name=_('Способ проверки результата')) <NEW_LINE> order = models.IntegerField(verbose_name=_('Порядок отображения в рамках блока мероприятия')) <NEW_LINE> meta = JSONField(default=None, null=True, verbose_name=_('Ячейки, в которые попадает ЦС')) <NEW_LINE> deleted = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['order'] <NEW_LINE> <DEDENT> def is_personal(self): <NEW_LINE> <INDENT> return not self.result_format or self.result_format == 'personal' <NEW_LINE> <DEDENT> def is_group(self): <NEW_LINE> <INDENT> return not self.result_format or self.result_format == 'group' <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def available_circle_items(self): <NEW_LINE> <INDENT> return [i for i in self.circle_items.all() if i.tool and i.source == CircleItem.SYSTEM_LABS] <NEW_LINE> <DEDENT> def get_result_format_display(self): <NEW_LINE> <INDENT> if self.result_format == 'personal': <NEW_LINE> <INDENT> return _('персональный') <NEW_LINE> <DEDENT> elif self.result_format == 'group': <NEW_LINE> <INDENT> return _('групповой') | Результаты блоков мароприятий по данным из лабс | 62598fb3d486a94d0ba2c077 |
class MockParser: <NEW_LINE> <INDENT> def __init__(self, name = None, help = None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.help = help <NEW_LINE> self.description = None <NEW_LINE> self.arguments = {} <NEW_LINE> <DEDENT> def add_argument(self, *names, **params): <NEW_LINE> <INDENT> for name in names: <NEW_LINE> <INDENT> self.arguments[name] = params | A mock parser, allowing tests to examine the changes. | 62598fb332920d7e50bc60fa |
class ResNet(nn.Module): <NEW_LINE> <INDENT> def __init__(self, layers=(3, 4, 6, 3), outputs=(3, 4, 5), state_dict_path='/Users/nick/.cache/torch/checkpoints/resnet50-19c8e357.pth'): <NEW_LINE> <INDENT> super(ResNet, self).__init__() <NEW_LINE> self.outputs = outputs <NEW_LINE> self.state_dict_path = state_dict_path <NEW_LINE> block = Bottleneck <NEW_LINE> self.inplanes = 64 <NEW_LINE> self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False) <NEW_LINE> self.bn1 = nn.BatchNorm2d(self.inplanes) <NEW_LINE> self.relu = nn.ReLU(inplace=True) <NEW_LINE> self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) <NEW_LINE> self.layer1 = self._make_layer(block, 64, layers[0]) <NEW_LINE> self.layer2 = self._make_layer(block, 128, layers[1], stride=2) <NEW_LINE> self.layer3 = self._make_layer(block, 256, layers[2], stride=2) <NEW_LINE> self.layer4 = self._make_layer(block, 512, layers[3], stride=2) <NEW_LINE> <DEDENT> def _make_layer(self, block, planes, blocks, stride=1): <NEW_LINE> <INDENT> downsample = None <NEW_LINE> if stride != 1 or self.inplanes != planes * block.expansion: <NEW_LINE> <INDENT> downsample = nn.Sequential( nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False), nn.BatchNorm2d(planes * block.expansion), ) <NEW_LINE> <DEDENT> layers = [] <NEW_LINE> layers.append(block(self.inplanes, planes, stride, downsample)) <NEW_LINE> self.inplanes = planes * block.expansion <NEW_LINE> for _ in range(1, blocks): <NEW_LINE> <INDENT> layers.append(block(self.inplanes, planes)) <NEW_LINE> <DEDENT> return nn.Sequential(*layers) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> self.load_state_dict(torch.load(self.state_dict_path), strict=False) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.conv1(x) <NEW_LINE> x = self.bn1(x) <NEW_LINE> x = self.relu(x) <NEW_LINE> x = self.maxpool(x) <NEW_LINE> outputs = [] <NEW_LINE> for i, layer in enumerate([self.layer1, self.layer2, self.layer3, self.layer4]): <NEW_LINE> <INDENT> level = i + 2 <NEW_LINE> if level > max(self.outputs): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> x = layer(x) <NEW_LINE> if level in self.outputs: <NEW_LINE> <INDENT> outputs.append(x) <NEW_LINE> <DEDENT> <DEDENT> return outputs | Deep Residual Network - https://arxiv.org/abs/1512.03385 | 62598fb391f36d47f2230efb |
class DeletePersonalSqlRequest(JDCloudRequest): <NEW_LINE> <INDENT> def __init__(self, parameters, header=None, version="v1"): <NEW_LINE> <INDENT> super(DeletePersonalSqlRequest, self).__init__( '/regions/{regionId}/personalSql:delete', 'POST', header, version) <NEW_LINE> self.parameters = parameters | 删除收藏sql | 62598fb3a8370b77170f0483 |
class Weapon_Terraformer(Weapon_DirectionalGen_Base): <NEW_LINE> <INDENT> def shoot(self, dir): <NEW_LINE> <INDENT> targetsquare = self.wieldingunit.square <NEW_LINE> for distance in 1, 2: <NEW_LINE> <INDENT> targetsquare = self.game.board[targetsquare].getRelSquare(dir, 1) <NEW_LINE> self._convertGrassland(targetsquare) <NEW_LINE> for perpsq in Direction.genPerp(dir): <NEW_LINE> <INDENT> self._convertGrassland(self.game.board[targetsquare].getRelSquare(perpsq, 1)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _convertGrassland(self, sq): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> tile = self.game.board[sq] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise CantHappenInGame("The Terraformer was placed in a way that it's weapon shoots off the board. This can't happen in the game.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if tile.unit.isMountain(): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> tile.replaceTile(Tile_Sand(self.game)) <NEW_LINE> <DEDENT> tile.die() | Eradicate all life in front of the Terraformer. Terraformer. Yes, the unit is called Terraformer and the weapon is also called Terraformer | 62598fb3dd821e528d6d8fd5 |
class PlaceSingleItem(smach.State): <NEW_LINE> <INDENT> def __init__(self, robot, place_designator): <NEW_LINE> <INDENT> smach.State.__init__(self, outcomes=["succeeded", "failed"]) <NEW_LINE> self._robot = robot <NEW_LINE> if place_designator is not None: <NEW_LINE> <INDENT> self.place_designator = place_designator <NEW_LINE> <DEDENT> <DEDENT> def execute(self, userdata=None): <NEW_LINE> <INDENT> item = ds.EdEntityDesignator(robot=self._robot, uuid=arm.gripper.occupied_by.uuid) <NEW_LINE> arm_designator = ds.OccupiedArmDesignator(self._robot, {"required_goals": ["reset", "handover_to_human"], "required_gripper_types": [arms.GripperTypes.GRASPING]}) <NEW_LINE> resolved_arm = arm_designator.resolve() <NEW_LINE> if resolved_arm is None: <NEW_LINE> <INDENT> rospy.logwarn("No arm holding an entity") <NEW_LINE> return "failed" <NEW_LINE> <DEDENT> place = states.Place(robot=self._robot, item_to_place=item, place_pose=self.place_designator, arm=arm_designator) <NEW_LINE> result = place.execute() <NEW_LINE> if result != "done": <NEW_LINE> <INDENT> rospy.loginfo("{place} resulted in {out}".format(place=place, out=result)) <NEW_LINE> handover = states.HandoverToHuman(robot=self._robot, arm_designator=arm_designator) <NEW_LINE> handover.execute() <NEW_LINE> <DEDENT> return "succeeded" if result == "done" else "failed" | Tries to place an object. A 'place' statemachine is constructed dynamically since this makes it easier to
build a statemachine (have we succeeded in grasping the objects?) | 62598fb34a966d76dd5eef7e |
class AppUser(AbstractUser): <NEW_LINE> <INDENT> google_access_token = models.CharField(max_length=100, default=None, null=True, blank=True) <NEW_LINE> facebook_access_token = models.CharField(max_length=100, default=None, null=True, blank=True) <NEW_LINE> google_uid = models.CharField(max_length=100, default=None, null=True, blank=True) <NEW_LINE> facebook_uid = models.CharField(max_length=100, default=None, null=True, blank=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return str(self.username) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'AppUser' <NEW_LINE> verbose_name_plural = 'AppUsers' | Define fields related to a user | 62598fb332920d7e50bc60fb |
class PSUTag(JSONable): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def construct_from_jdict(cls, jdict): <NEW_LINE> <INDENT> if not jdict: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> items = jdict.split('.') <NEW_LINE> device = items[0] <NEW_LINE> api = items[1] <NEW_LINE> patch = items[2] <NEW_LINE> return cls(device, api, patch) <NEW_LINE> <DEDENT> def __init__(self, device, api, patch): <NEW_LINE> <INDENT> self.__device = device <NEW_LINE> self.__api = api <NEW_LINE> self.__patch = patch <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.device == other.device and self.api == other.api and self.patch == other.patch <NEW_LINE> <DEDENT> except (TypeError, AttributeError): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def as_json(self): <NEW_LINE> <INDENT> return '.'.join((self.device, self.api, self.patch)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def device(self): <NEW_LINE> <INDENT> return self.__device <NEW_LINE> <DEDENT> @property <NEW_LINE> def api(self): <NEW_LINE> <INDENT> return self.__api <NEW_LINE> <DEDENT> @property <NEW_LINE> def patch(self): <NEW_LINE> <INDENT> return self.__patch <NEW_LINE> <DEDENT> def __str__(self, *args, **kwargs): <NEW_LINE> <INDENT> return "PSUTag:{device:%s, api:%s, patch:%s}" % (self.device, self.api, self.patch) | classdocs | 62598fb3cc0a2c111447b0ba |
class MarubatuMenu(wx.MenuBar): <NEW_LINE> <INDENT> def onMenuManageClick(self,event): <NEW_LINE> <INDENT> menu_id=event.GetId() <NEW_LINE> menu_obj=event.GetEventObject() <NEW_LINE> menu_label=menu_obj.GetLabel(menu_id) <NEW_LINE> if menu_label==self.RESET_MENU_LABEL: <NEW_LINE> <INDENT> print(self.RESET_MENU_LABEL) <NEW_LINE> <DEDENT> if menu_label==self.EXIT_MENU_LABEL: <NEW_LINE> <INDENT> wx.Exit() <NEW_LINE> <DEDENT> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> menu_manage = wx.Menu() <NEW_LINE> self.RESET_MENU_LABEL="リセット" <NEW_LINE> self.EXIT_MENU_LABEL="終了" <NEW_LINE> selreset_menu=menu_manage.Append(1, self.RESET_MENU_LABEL) <NEW_LINE> exit_menu=menu_manage.Append(2,self.EXIT_MENU_LABEL ) <NEW_LINE> menu_manage.Bind(wx.EVT_MENU,self.onMenuManageClick) <NEW_LINE> self.Append(menu_manage, '管理') | CalcFrameにセットするメニューバークラス | 62598fb39c8ee823130401c6 |
class TestExerciseCase01(TestExercise): <NEW_LINE> <INDENT> length = 2 <NEW_LINE> option_quantity = -100 <NEW_LINE> option_price = 0 <NEW_LINE> asset_quantity = -100 <NEW_LINE> asset_price = 10 <NEW_LINE> exercise = EXERCISE_OPERATION6 | Being exercised on a call. | 62598fb3379a373c97d990bd |
class HasLoss(Params): <NEW_LINE> <INDENT> loss = Param(Params._dummy(), "loss", "the loss function to be optimized.", typeConverter=TypeConverters.toString) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(HasLoss, self).__init__() <NEW_LINE> <DEDENT> def setLoss(self, value): <NEW_LINE> <INDENT> return self._set(loss=value) <NEW_LINE> <DEDENT> def getLoss(self): <NEW_LINE> <INDENT> return self.getOrDefault(self.loss) | Mixin for param loss: the loss function to be optimized. | 62598fb37047854f4633f482 |
class VmadHandlerINFO(_AFixedContainer): <NEW_LINE> <INDENT> processors = OrderedDict([ (u'extra_bind_data_version', get_structs(u'b')), (u'fragment_flags', get_structs(u'B')), (u'file_name', u'str16'), ]) <NEW_LINE> flags_mapper = Flags.from_names(u'on_begin', u'on_end') <NEW_LINE> flags_to_children = OrderedDict([ (u'on_begin', u'begin_frag'), (u'on_end', u'end_frag'), ]) <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(MelVmad.VmadHandlerINFO, self).__init__() <NEW_LINE> self.child_loader = MelVmad.FragmentBasic() | Implements special VMAD handling for INFO records. | 62598fb33d592f4c4edbaf68 |
class IndexDataReader: <NEW_LINE> <INDENT> def __init__(self, data_file, batch_size, bulk_size, file_source, action_metadata, index_name, type_name): <NEW_LINE> <INDENT> self.data_file = data_file <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.bulk_size = bulk_size <NEW_LINE> self.file_source = file_source <NEW_LINE> self.action_metadata = action_metadata <NEW_LINE> self.index_name = index_name <NEW_LINE> self.type_name = type_name <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.file_source.open(self.data_file, 'rt') <NEW_LINE> return self <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> batch = [] <NEW_LINE> try: <NEW_LINE> <INDENT> docs_in_batch = 0 <NEW_LINE> while docs_in_batch < self.batch_size: <NEW_LINE> <INDENT> docs_in_bulk, bulk = self.read_bulk() <NEW_LINE> if docs_in_bulk == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> docs_in_batch += docs_in_bulk <NEW_LINE> batch.append((docs_in_bulk, bulk)) <NEW_LINE> <DEDENT> if docs_in_batch == 0: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> logger.debug("Returning a batch with %d bulks." % len(batch)) <NEW_LINE> return self.index_name, self.type_name, batch <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> logger.exception("Could not read [%s]" % self.data_file) <NEW_LINE> <DEDENT> <DEDENT> def read_bulk(self): <NEW_LINE> <INDENT> docs_in_bulk = 0 <NEW_LINE> current_bulk = [] <NEW_LINE> for action_metadata_line, document in zip(self.action_metadata, self.file_source): <NEW_LINE> <INDENT> if action_metadata_line: <NEW_LINE> <INDENT> current_bulk.append(action_metadata_line) <NEW_LINE> <DEDENT> current_bulk.append(document) <NEW_LINE> docs_in_bulk += 1 <NEW_LINE> if docs_in_bulk == self.bulk_size: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return docs_in_bulk, current_bulk <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.file_source.close() <NEW_LINE> return False | Reads a file in bulks into an array and also adds a meta-data line before each document if necessary.
This implementation also supports batching. This means that you can specify batch_size = N * bulk_size, where N is any natural
number >= 1. This makes file reading more efficient for small bulk sizes. | 62598fb3cc0a2c111447b0bb |
class DynamicUnet(SequentialEx): <NEW_LINE> <INDENT> def __init__(self, encoder: nn.Module, n_classes: int, img_size: Tuple[int, int] = (256, 256), blur: bool = False, blur_final=True, self_attention: bool = False, y_range: Optional[Tuple[float, float]] = None, last_cross: bool = True, bottle: bool = False, **kwargs): <NEW_LINE> <INDENT> imsize = tuple(img_size) <NEW_LINE> sfs_szs, select_layer = get_unet_config(encoder, img_size) <NEW_LINE> ni = sfs_szs[-1][1] <NEW_LINE> sfs_szs = list(reversed(sfs_szs[:-1])) <NEW_LINE> select_layer = list(reversed(select_layer[:-1])) <NEW_LINE> self.sfs = hook_outputs(select_layer, detach=False) <NEW_LINE> x = dummy_eval(encoder, imsize).detach() <NEW_LINE> middle_conv = nn.Sequential(conv_layer(ni, ni * 2, **kwargs), conv_layer(ni * 2, ni, **kwargs)).eval() <NEW_LINE> x = middle_conv(x) <NEW_LINE> layers = [encoder, batchnorm_2d(ni), nn.ReLU(), middle_conv] <NEW_LINE> for i, x_size in enumerate(sfs_szs): <NEW_LINE> <INDENT> not_final = i != len(sfs_szs) - 1 <NEW_LINE> up_in_c, x_in_c = int(x.shape[1]), int(x_size[1]) <NEW_LINE> do_blur = blur and (not_final or blur_final) <NEW_LINE> sa = self_attention and (i == len(sfs_szs) - 3) <NEW_LINE> unet_block = UnetBlock(up_in_c, x_in_c, self.sfs[i], final_div=not_final, blur=do_blur, self_attention=sa, **kwargs).eval() <NEW_LINE> layers.append(unet_block) <NEW_LINE> x = unet_block(x) <NEW_LINE> <DEDENT> ni = x.shape[1] <NEW_LINE> if imsize != sfs_szs[0][-2:]: layers.append(PixelShuffle_ICNR(ni, **kwargs)) <NEW_LINE> x = PixelShuffle_ICNR(ni)(x) <NEW_LINE> if imsize != x.shape[-2:]: layers.append(Lambda(lambda x: F.interpolate(x, imsize, mode='nearest'))) <NEW_LINE> if last_cross: <NEW_LINE> <INDENT> layers.append(MergeLayer(dense=True)) <NEW_LINE> ni += in_channels(encoder) <NEW_LINE> layers.append(res_block(ni, bottle=bottle, **kwargs)) <NEW_LINE> <DEDENT> layers += [conv_layer(ni, n_classes, ks=1, use_activ=False, **kwargs)] <NEW_LINE> if y_range is not None: layers.append(SigmoidRange(*y_range)) <NEW_LINE> super().__init__(*layers) | Create a U-Net from a given architecture. | 62598fb37b180e01f3e490a4 |
class Cart: <NEW_LINE> <INDENT> def __init__(self, request): <NEW_LINE> <INDENT> cart_id = request.session.get(CART_ID) <NEW_LINE> cart = None <NEW_LINE> if cart_id: <NEW_LINE> <INDENT> query = Q(id=cart_id) <NEW_LINE> if request.user.is_authenticated: <NEW_LINE> <INDENT> cart = models.Cart.objects.filter(user=request.user).first() <NEW_LINE> <DEDENT> cart = models.Cart.objects.filter(query).first() if not cart else cart <NEW_LINE> if not request.user and request.user.is_authenticated: <NEW_LINE> <INDENT> cart.user = request.user <NEW_LINE> cart.save() <NEW_LINE> <DEDENT> if cart is None: <NEW_LINE> <INDENT> cart = self.add_new_cart(request) <NEW_LINE> <DEDENT> if cart and not cart.is_cart_active(): <NEW_LINE> <INDENT> cart.set_cart_active() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> cart = self.add_new_cart(request) <NEW_LINE> <DEDENT> self.cart = cart <NEW_LINE> self.promotion = Promotion(cart) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_new_cart(cls, request): <NEW_LINE> <INDENT> if request.user.is_authenticated: <NEW_LINE> <INDENT> cart = models.Cart.objects.create(user=request.user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cart = models.Cart.objects.create() <NEW_LINE> <DEDENT> request.session[CART_ID] = cart.id <NEW_LINE> return cart <NEW_LINE> <DEDENT> def add_product(self, product): <NEW_LINE> <INDENT> item = models.CartItem.objects.filter(cart=self.cart, product=product).first() <NEW_LINE> if not item: <NEW_LINE> <INDENT> models.CartItem.objects.create(cart=self.cart, product=product) <NEW_LINE> <DEDENT> self.apply_promotions() <NEW_LINE> <DEDENT> def remove_product(self, product): <NEW_LINE> <INDENT> item = models.CartItem.objects.filter(cart=self.cart, product=product).first() <NEW_LINE> if item: <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> self.apply_promotions() <NEW_LINE> <DEDENT> def update_product(self, product, quantity): <NEW_LINE> <INDENT> item = models.CartItem.objects.filter(cart=self.cart, product=product).first() <NEW_LINE> if item: <NEW_LINE> <INDENT> if quantity == 0: <NEW_LINE> <INDENT> item.delete() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item.quantity = int(quantity) <NEW_LINE> item.save() <NEW_LINE> <DEDENT> <DEDENT> self.apply_promotions() <NEW_LINE> <DEDENT> def apply_promotions(self): <NEW_LINE> <INDENT> self.promotion.apply_promotions() | Cart class, this will have all methods like add, remove, etc related to a cart | 62598fb33539df3088ecc35a |
class InputMediaGifExternal(Object): <NEW_LINE> <INDENT> ID = 0x4843b0fd <NEW_LINE> def __init__(self, url: str, q: str): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.q = q <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "InputMediaGifExternal": <NEW_LINE> <INDENT> url = String.read(b) <NEW_LINE> q = String.read(b) <NEW_LINE> return InputMediaGifExternal(url, q) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> b.write(String(self.url)) <NEW_LINE> b.write(String(self.q)) <NEW_LINE> return b.getvalue() | Attributes:
ID: ``0x4843b0fd``
Args:
url: ``str``
q: ``str`` | 62598fb3d7e4931a7ef3c13c |
class Static(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required = [ "src_address","nat_address"] <NEW_LINE> self.b_key = "static" <NEW_LINE> self.a10_url="/axapi/v3/ip/nat/inside/source/static/{src_address}+{nat_address}" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.nat_address = "" <NEW_LINE> self.vrid = "" <NEW_LINE> self.src_address = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value) | Class Description::
Static Address Translations.
Class static supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param nat_address: {"optional": false, "type": "string", "description": "NAT Address", "format": "ipv4-address"}
:param vrid: {"description": "VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "type": "number", "maximum": 31, "minimum": 1, "optional": true}
:param src_address: {"optional": false, "type": "string", "description": "Original Source Address", "format": "ipv4-address"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/ip/nat/inside/source/static/{src_address}+{nat_address}`. | 62598fb33346ee7daa33769b |
class OptionsManager: <NEW_LINE> <INDENT> normal_defaults = defaultnormaloptions <NEW_LINE> query_defaults = defaultqueryoptions <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.nopts = {} <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def validate_nopts(cls, nopts): <NEW_LINE> <INDENT> illegal_normal_keys = set(nopts.keys()) - set(cls.normal_defaults.keys()) <NEW_LINE> if illegal_normal_keys: <NEW_LINE> <INDENT> raise ProgramError('Invalid options: ' + quote_items(illegal_normal_keys)) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def validate_qopts(cls, qopts): <NEW_LINE> <INDENT> for d in qopts.values(): <NEW_LINE> <INDENT> illegal_query_keys = set(d.keys()) - set(cls.query_defaults.keys()) <NEW_LINE> if illegal_query_keys: <NEW_LINE> <INDENT> raise ProgramError('Invalid query options: ' + quote_items(illegal_query_keys)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def import_opts(self, nopts, qopts): <NEW_LINE> <INDENT> self.validate_nopts(nopts) <NEW_LINE> self.validate_qopts(qopts) <NEW_LINE> self.nopts.update(nopts) <NEW_LINE> <DEDENT> def get_opt(self, key): <NEW_LINE> <INDENT> default = dc(self.normal_defaults[key]) <NEW_LINE> return self.nopts.get(key, default) <NEW_LINE> <DEDENT> def set_opt(self, key, value): <NEW_LINE> <INDENT> if key not in self.normal_defaults.keys(): <NEW_LINE> <INDENT> raise ProgramError('Invalid option: ' + key) <NEW_LINE> <DEDENT> self.nopts[key] = value <NEW_LINE> <DEDENT> def del_opt(self, key): <NEW_LINE> <INDENT> if key not in self.normal_defaults.keys(): <NEW_LINE> <INDENT> raise ProgramError('Invalid option: ' + key) <NEW_LINE> <DEDENT> self.nopts.pop(key, None) <NEW_LINE> <DEDENT> def get_queryopt(self, query, key): <NEW_LINE> <INDENT> default = dc(self.query_defaults[key]) <NEW_LINE> return query.options.get(key, default) | Manages access to program options. | 62598fb3167d2b6e312b701a |
class UpgradeHeader(ft.ProductComment): <NEW_LINE> <INDENT> field_name = "Upgrade" | The Upgrade general-header allows the client to specify
what additional communication protocols it supports and
would like to use if the server finds it appropriate to
switch protocols.
Upgrade = "Upgrade" ":" 1#product | 62598fb34428ac0f6e6585c7 |
class TagPropertyAllowedValue(object): <NEW_LINE> <INDENT> def __init__(self, value=None, display_name=None, flags=None): <NEW_LINE> <INDENT> self.swagger_types = { 'value': 'str', 'display_name': 'str', 'flags': 'list[str]' } <NEW_LINE> self.attribute_map = { 'value': 'value', 'display_name': 'displayName', 'flags': 'flags' } <NEW_LINE> self._value = value <NEW_LINE> self._display_name = display_name <NEW_LINE> self._flags = flags <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_name(self): <NEW_LINE> <INDENT> return self._display_name <NEW_LINE> <DEDENT> @display_name.setter <NEW_LINE> def display_name(self, display_name): <NEW_LINE> <INDENT> self._display_name = display_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def flags(self): <NEW_LINE> <INDENT> return self._flags <NEW_LINE> <DEDENT> @flags.setter <NEW_LINE> def flags(self, flags): <NEW_LINE> <INDENT> self._flags = flags <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TagPropertyAllowedValue): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb34a966d76dd5eef7f |
class Manager(base.SubManager): <NEW_LINE> <INDENT> resource_class = Resource <NEW_LINE> service_type = 'compute' <NEW_LINE> _attr_mapping = ATTRIBUTE_MAPPING <NEW_LINE> _hidden_methods = ["update"] <NEW_LINE> _json_resource_key = 'security_group_rule' <NEW_LINE> _json_resources_key = 'security_group_rules' <NEW_LINE> _url_resource_path = '/os-security-group-rules' <NEW_LINE> def __init__(self, parent_resource, *args, **kwargs): <NEW_LINE> <INDENT> super(Manager, self).__init__(parent_resource, *args, **kwargs) <NEW_LINE> if self.parent_resource._rules: <NEW_LINE> <INDENT> self._rules = [self.resource_class(self, **self._json2attr(x)) for x in self.parent_resource._rules] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rules = [] <NEW_LINE> <DEDENT> <DEDENT> def _reload_rules(self): <NEW_LINE> <INDENT> self.parent_resource.get() <NEW_LINE> if self.parent_resource._rules: <NEW_LINE> <INDENT> self._rules = [self.resource_class(self, **self._json2attr(x)) for x in self.parent_resource._rules] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._rules = [] <NEW_LINE> <DEDENT> <DEDENT> def create(self, remote_ip_prefix=UNDEF, port_range_min=UNDEF, port_range_max=UNDEF, protocol=UNDEF, group=UNDEF, project=UNDEF): <NEW_LINE> <INDENT> ret = super(Manager, self).create(remote_ip_prefix=remote_ip_prefix, port_range_min=port_range_min, port_range_max=port_range_max, protocol=protocol, group=group, project=project) <NEW_LINE> self._reload_rules() <NEW_LINE> return ret <NEW_LINE> <DEDENT> def get(self, id): <NEW_LINE> <INDENT> for rule in self._rules: <NEW_LINE> <INDENT> if rule.id == id: <NEW_LINE> <INDENT> return rule <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _find_gen(self, **kwargs): <NEW_LINE> <INDENT> for sg_rule in self._rules: <NEW_LINE> <INDENT> for k, v in kwargs.items(): <NEW_LINE> <INDENT> if getattr(sg_rule, k, None) != v: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> yield sg_rule | Manager class for security group rules in Compute API v2 | 62598fb3ff9c53063f51a6f5 |
class Destination(Mixin, db.Model): <NEW_LINE> <INDENT> __tablename__ = "destination" <NEW_LINE> id = db.Column(db.Integer, unique=True, primary_key=True) <NEW_LINE> lat = db.Column(db.Float, nullable=False) <NEW_LINE> lon = db.Column(db.Float, nullable=False) <NEW_LINE> code = db.Column(db.Integer, nullable=False) <NEW_LINE> def __init__(self, lat, lon, code): <NEW_LINE> <INDENT> db.Model.__init__(self, lat=lat, lon=lon, code=code) <NEW_LINE> <DEDENT> def add_dest(self): <NEW_LINE> <INDENT> db.session.add(self) <NEW_LINE> db.session.commit() | Destination Table. | 62598fb366656f66f7d5a498 |
class BrowseNodeAncestor(object): <NEW_LINE> <INDENT> swagger_types = { 'ancestor': 'BrowseNodeAncestor', 'context_free_name': 'str', 'display_name': 'str', 'id': 'str' } <NEW_LINE> attribute_map = { 'ancestor': 'Ancestor', 'context_free_name': 'ContextFreeName', 'display_name': 'DisplayName', 'id': 'Id' } <NEW_LINE> def __init__(self, ancestor=None, context_free_name=None, display_name=None, id=None): <NEW_LINE> <INDENT> self._ancestor = None <NEW_LINE> self._context_free_name = None <NEW_LINE> self._display_name = None <NEW_LINE> self._id = None <NEW_LINE> self.discriminator = None <NEW_LINE> if ancestor is not None: <NEW_LINE> <INDENT> self.ancestor = ancestor <NEW_LINE> <DEDENT> if context_free_name is not None: <NEW_LINE> <INDENT> self.context_free_name = context_free_name <NEW_LINE> <DEDENT> if display_name is not None: <NEW_LINE> <INDENT> self.display_name = display_name <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def ancestor(self): <NEW_LINE> <INDENT> return self._ancestor <NEW_LINE> <DEDENT> @ancestor.setter <NEW_LINE> def ancestor(self, ancestor): <NEW_LINE> <INDENT> self._ancestor = ancestor <NEW_LINE> <DEDENT> @property <NEW_LINE> def context_free_name(self): <NEW_LINE> <INDENT> return self._context_free_name <NEW_LINE> <DEDENT> @context_free_name.setter <NEW_LINE> def context_free_name(self, context_free_name): <NEW_LINE> <INDENT> self._context_free_name = context_free_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def display_name(self): <NEW_LINE> <INDENT> return self._display_name <NEW_LINE> <DEDENT> @display_name.setter <NEW_LINE> def display_name(self, display_name): <NEW_LINE> <INDENT> self._display_name = display_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(BrowseNodeAncestor, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, BrowseNodeAncestor): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fb33317a56b869be5a0 |
class TGroup_User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "tgroup_user" <NEW_LINE> targetgroup_id = db.Column( db.Integer, db.ForeignKey("targetgroup.id"), primary_key=True) <NEW_LINE> user_id = db.Column( db.Integer, db.ForeignKey("user.id"), primary_key=True) | Targetgroups contain several users | 62598fb376e4537e8c3ef64f |
class GeneralOutput: <NEW_LINE> <INDENT> def __init__(self, sitesData): <NEW_LINE> <INDENT> self.sitesData = sitesData <NEW_LINE> <DEDENT> def startProcessing(self): <NEW_LINE> <INDENT> pass | The GeneralOuptut class should be subclassed by the output plugin.
:param dict sitesData: The data from sites in the form of a dictionary. The keys should be the sites, and the values should be an array of times for the transfers.
.. _sitesData-label:
An example structure for ``sitesData`` is::
sitesData = {
"UCSDT2": [
{'starttime': "140192910", 'endtime': "140204950", 'successful': True},
{'starttime': "140105910", ...}
],
"Nebraska": [
{'starttime': ...}]}
...
The initialize function should also be used to initialize any structures required for processing. | 62598fb3a79ad1619776a112 |
class AccountLimits(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/FaceCom/AccountLimits') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return AccountLimitsInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return AccountLimitsResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return AccountLimitsChoreographyExecution(session, exec_id, path) | Create a new instance of the AccountLimits Choreography. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied. | 62598fb3be8e80087fbbf10f |
class OrderWidgetItem(QTreeWidgetItem): <NEW_LINE> <INDENT> def __init__(self, parent, order): <NEW_LINE> <INDENT> QTreeWidgetItem.__init__(self, parent) <NEW_LINE> self.order = order <NEW_LINE> order_time = datetime.datetime.fromtimestamp(int(order["timestamp"])).strftime('%Y-%m-%d %H:%M:%S') <NEW_LINE> self.setText(0, "%s" % order["order_number"]) <NEW_LINE> self.setText(1, order_time) <NEW_LINE> self.setText(2, "%g %s" % (order["price"], order["price_type"])) <NEW_LINE> self.setText(3, "%g %s" % (order["quantity"], order["quantity_type"])) <NEW_LINE> self.setText(4, "%g %s" % (order["traded_quantity"], order["quantity_type"])) <NEW_LINE> self.setText(5, "Sell" if order["is_ask"] else "Buy") <NEW_LINE> self.setText(6, "%s" % order["status"]) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> column = self.treeWidget().sortColumn() <NEW_LINE> if column == 0: <NEW_LINE> <INDENT> return int(self.order["order_number"]) > int(self.order["order_number"]) <NEW_LINE> <DEDENT> if column == 1: <NEW_LINE> <INDENT> return int(self.order["timestamp"]) > int(other.order["timestamp"]) <NEW_LINE> <DEDENT> elif column == 2: <NEW_LINE> <INDENT> return float(self.order["price"]) > float(other.order["price"]) <NEW_LINE> <DEDENT> elif column == 3 or column == 4: <NEW_LINE> <INDENT> return float(self.order["quantity"]) > float(other.order["quantity"]) <NEW_LINE> <DEDENT> return self.text(column) > other.text(column) | This class represents a widget that displays an order. | 62598fb332920d7e50bc60fd |
@requested_by("streetart.tests.requestors.pages.CreatePostViewRequestor") <NEW_LINE> class CreatePostView(BaseFormView): <NEW_LINE> <INDENT> template_name = "pages/new_streetart_post.html" <NEW_LINE> form_class = NewStreetArtPostForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> image_processor = ImageProcessingHelper.from_in_memory_uploaded_file(form.files["image"]) <NEW_LINE> <DEDENT> except InvalidImageFileException as e: <NEW_LINE> <INDENT> raise ValidationError(e.message) <NEW_LINE> <DEDENT> latitude, longitude = image_processor.coordinates <NEW_LINE> post_uuid = str(uuid4()) <NEW_LINE> s3_helper = S3Helper.instance() <NEW_LINE> response = s3_helper.upload_file_to_bucket( local_file_path=form.files["image"].temporary_file_path(), key=post_uuid, bucket=settings.AWS_S3_BUCKET, ) <NEW_LINE> if response["ResponseMetadata"]["HTTPStatusCode"] != 200: <NEW_LINE> <INDENT> raise ValueError("Unable to upload image file to Amazon S3.") <NEW_LINE> <DEDENT> user = self.request.user if self.request.user.is_authenticated else None <NEW_LINE> self._create_object( latitude=latitude, longitude=longitude, title=form.cleaned_data["title"], description=form.cleaned_data["description"], s3_bucket=settings.AWS_S3_BUCKET, s3_key=post_uuid, uuid=post_uuid, user=user, ) <NEW_LINE> return super(CreatePostView, self).form_valid(form) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return "/post-successful/%s/" % (self.created_object.uuid,) | This is a view for creating new street art posts. | 62598fb3fff4ab517ebcd890 |
class WinkLight(WinkDevice, Light): <NEW_LINE> <INDENT> def __init__(self, wink): <NEW_LINE> <INDENT> WinkDevice.__init__(self, wink) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self.wink.state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def brightness(self): <NEW_LINE> <INDENT> if self.wink.brightness() is not None: <NEW_LINE> <INDENT> return int(self.wink.brightness() * 255) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def rgb_color(self): <NEW_LINE> <INDENT> if not self.wink.supports_hue_saturation(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hue = self.wink.color_hue() <NEW_LINE> saturation = self.wink.color_saturation() <NEW_LINE> value = int(self.wink.brightness() * 255) <NEW_LINE> if hue is None or saturation is None or value is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> rgb = colorsys.hsv_to_rgb(hue, saturation, value) <NEW_LINE> r_value = int(round(rgb[0])) <NEW_LINE> g_value = int(round(rgb[1])) <NEW_LINE> b_value = int(round(rgb[2])) <NEW_LINE> return r_value, g_value, b_value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def xy_color(self): <NEW_LINE> <INDENT> if not self.wink.supports_xy_color(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.wink.color_xy() <NEW_LINE> <DEDENT> @property <NEW_LINE> def color_temp(self): <NEW_LINE> <INDENT> if not self.wink.supports_temperature(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return color_util.color_temperature_kelvin_to_mired( self.wink.color_temperature_kelvin()) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self): <NEW_LINE> <INDENT> return SUPPORT_WINK <NEW_LINE> <DEDENT> def turn_on(self, **kwargs): <NEW_LINE> <INDENT> brightness = kwargs.get(ATTR_BRIGHTNESS) <NEW_LINE> rgb_color = kwargs.get(ATTR_RGB_COLOR) <NEW_LINE> color_temp_mired = kwargs.get(ATTR_COLOR_TEMP) <NEW_LINE> state_kwargs = { } <NEW_LINE> if rgb_color: <NEW_LINE> <INDENT> if self.wink.supports_xy_color(): <NEW_LINE> <INDENT> xyb = color_util.color_RGB_to_xy(*rgb_color) <NEW_LINE> state_kwargs['color_xy'] = xyb[0], xyb[1] <NEW_LINE> state_kwargs['brightness'] = xyb[2] <NEW_LINE> <DEDENT> elif self.wink.supports_hue_saturation(): <NEW_LINE> <INDENT> hsv = colorsys.rgb_to_hsv(rgb_color[0], rgb_color[1], rgb_color[2]) <NEW_LINE> state_kwargs['color_hue_saturation'] = hsv[0], hsv[1] <NEW_LINE> <DEDENT> <DEDENT> if color_temp_mired: <NEW_LINE> <INDENT> state_kwargs['color_kelvin'] = mired_to_kelvin(color_temp_mired) <NEW_LINE> <DEDENT> if brightness: <NEW_LINE> <INDENT> state_kwargs['brightness'] = brightness / 255.0 <NEW_LINE> <DEDENT> self.wink.set_state(True, **state_kwargs) <NEW_LINE> <DEDENT> def turn_off(self): <NEW_LINE> <INDENT> self.wink.set_state(False) | Representation of a Wink light. | 62598fb34a966d76dd5eef81 |
class FileHandler(LFileHandler): <NEW_LINE> <INDENT> pass | A output FileHandler. | 62598fb3d7e4931a7ef3c13e |
class Statistics(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.allTimers = [] <NEW_LINE> self.stat = 0 <NEW_LINE> <DEDENT> def arithmetic_mean(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = mean(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def harmonic_mean(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = harmonic_mean(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def median(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = median(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def median_low(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = median_low(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def median_high(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = median_high(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def median_grouped(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = median_grouped(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def mode(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = mode(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def std_dev(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = stdev(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def variance(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = variance(collection, xbar=None) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def population_std_dev(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = pstdev(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat <NEW_LINE> <DEDENT> def population_variance(self, collection): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.allTimers = collection <NEW_LINE> self.stat = pvariance(collection) <NEW_LINE> <DEDENT> except StatisticsError: <NEW_LINE> <INDENT> self.stat = 0 <NEW_LINE> <DEDENT> return self.stat | The library used to define statistical outputs | 62598fb338b623060ffa9146 |
class GamePlayTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.bot = unitility.AutoBot(['win']) <NEW_LINE> self.game = unitility.TestGame(self.bot, '') <NEW_LINE> <DEDENT> def testCleanUpBot(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertTrue(self.bot.all_done) <NEW_LINE> <DEDENT> def testCleanUpGame(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertTrue(self.game.all_done) <NEW_LINE> <DEDENT> def testForceLoss(self): <NEW_LINE> <INDENT> self.bot.replies = ['quit'] <NEW_LINE> self.game.play() <NEW_LINE> <DEDENT> def testForceWin(self): <NEW_LINE> <INDENT> self.bot.replies = ['quit+'] <NEW_LINE> self.game.play() <NEW_LINE> <DEDENT> def testGipfReset(self): <NEW_LINE> <INDENT> self.game.gipfed = ['cat'] <NEW_LINE> self.game.play() <NEW_LINE> self.assertFalse(self.game.gipfed) <NEW_LINE> <DEDENT> def testPlayerLoopRestart(self): <NEW_LINE> <INDENT> self.bot.replies = ['pass'] <NEW_LINE> other_bot = unitility.AutoBot(['pass', 'win']) <NEW_LINE> self.game.players = [other_bot, self.bot] <NEW_LINE> self.game.play() <NEW_LINE> self.assertEqual(0, self.game.player_index) <NEW_LINE> <DEDENT> def testPlayerLoopEnd(self): <NEW_LINE> <INDENT> other_bot = unitility.AutoBot(['pass']) <NEW_LINE> self.game.players = [other_bot, self.bot] <NEW_LINE> self.game.play() <NEW_LINE> self.assertEqual(1, self.game.player_index) <NEW_LINE> <DEDENT> def testScores(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertEqual({self.bot.name: 1}, self.game.scores) <NEW_LINE> <DEDENT> def testSetUpBot(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertTrue(self.bot.all_set) <NEW_LINE> <DEDENT> def testSetUpGame(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertTrue(self.game.all_set) <NEW_LINE> <DEDENT> def testTurnsContinue(self): <NEW_LINE> <INDENT> self.bot.replies = ['continue'] * 3 + ['next'] * 5 <NEW_LINE> random.shuffle(self.bot.replies) <NEW_LINE> self.bot.replies.append('lose') <NEW_LINE> self.game.play() <NEW_LINE> self.assertEqual(6, self.game.turns) <NEW_LINE> <DEDENT> def testTurnsMultiple(self): <NEW_LINE> <INDENT> self.bot.replies = ['question', 'everything', 'be', 'human', 'draw'] <NEW_LINE> self.game.play() <NEW_LINE> self.assertEqual(5, self.game.turns) <NEW_LINE> <DEDENT> def testTurnsSimple(self): <NEW_LINE> <INDENT> self.game.play() <NEW_LINE> self.assertEqual(1, self.game.turns) | Tests of playing the game. (unittest.TestCase) | 62598fb37b25080760ed755b |
class SMSOutputData(OutputData): <NEW_LINE> <INDENT> phone_number = None <NEW_LINE> def send(self): <NEW_LINE> <INDENT> logger.info("Sending via SMS") <NEW_LINE> vc = voicecall.Voicecall() <NEW_LINE> return vc.conduct_sms(self.phone_number, self.data, "outbound-sms") | Define necessary OutputData for sending via sms. | 62598fb363b5f9789fe85214 |
class DescribeInstancesResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((xml) The response from Amazon.) | 62598fb38a43f66fc4bf2224 |
class PasswordAuthBackend(ModelBackend): <NEW_LINE> <INDENT> def authenticate(self, username=None, password=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> user = User.objects.get(username=username) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> User().set_password(password) <NEW_LINE> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if user.check_password(password) and self.user_can_authenticate(user): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def get_user(self, user_id): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return User.objects.get(id=user_id) <NEW_LINE> <DEDENT> except User.DoesNotExist: <NEW_LINE> <INDENT> return None | Log in to Django without providing a password.
| 62598fb371ff763f4b5e781e |
class ParseLabelException(ParseBaseException): <NEW_LINE> <INDENT> pass | If dnspython can't decode this label, throw a stringy! | 62598fb3d268445f26639bd8 |
class OutputObserver(Callback): <NEW_LINE> <INDENT> def __init__(self, data, output_path, mask_colors, batch_size = 2, tmp_interval = 100): <NEW_LINE> <INDENT> self.epoch = 0 <NEW_LINE> self.data = data <NEW_LINE> self.output_path = output_path <NEW_LINE> self.mask_colors = mask_colors <NEW_LINE> if isinstance(data,(list,)): <NEW_LINE> <INDENT> data_len = data[0].shape[0] <NEW_LINE> data_out = data[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data_len = data.shape[0] <NEW_LINE> data_out = data <NEW_LINE> <DEDENT> self.batch_size = np.minimum(batch_size,data_len) <NEW_LINE> self.tmp_interval = tmp_interval <NEW_LINE> data_out[:,:,:,0] += 103.939 <NEW_LINE> data_out[:,:,:,1] += 116.779 <NEW_LINE> data_out[:,:,:,2] += 123.68 <NEW_LINE> data_out = data_out.astype('uint8') <NEW_LINE> if data_out.shape[-1] == 2: <NEW_LINE> <INDENT> data_out = np.concatenate((data_out, np.zeros((data_out.shape[:3]+(1,)))), axis=-1) <NEW_LINE> <DEDENT> elif data_out.shape[-1] == 1: <NEW_LINE> <INDENT> data_out = np.concatenate((data_out, np.zeros((data_out.shape[:3]+(2,)))), axis=-1) <NEW_LINE> <DEDENT> for i in range(data_out.shape[0]): <NEW_LINE> <INDENT> cv2.imwrite(os.path.join(self.output_path,'input_{}.png'.format(i)),data_out[i,:,:,:]) <NEW_LINE> <DEDENT> <DEDENT> def labelVisualize(self, y_pred): <NEW_LINE> <INDENT> x = np.argmax(y_pred, axis=-1) <NEW_LINE> colour_codes = np.array(self.mask_colors) <NEW_LINE> img = colour_codes[x.astype('uint8')] <NEW_LINE> return img <NEW_LINE> <DEDENT> def on_train_begin(self, logs={}): <NEW_LINE> <INDENT> y_pred = self.model.predict(self.data, batch_size=self.batch_size) <NEW_LINE> img = self.labelVisualize(y_pred[0,:,:,:]) <NEW_LINE> cv2.imwrite(os.path.join(self.output_path,'init.png'),img[:,:,::-1]) <NEW_LINE> <DEDENT> def on_batch_end(self, batch, logs={}): <NEW_LINE> <INDENT> if batch % self.tmp_interval == 0: <NEW_LINE> <INDENT> y_pred = self.model.predict(self.data, batch_size=self.batch_size) <NEW_LINE> np.save(os.path.join(self.output_path,'tmp.npy'),y_pred) <NEW_LINE> img = self.labelVisualize(y_pred[0,:,:,:]) <NEW_LINE> cv2.imwrite(os.path.join(self.output_path,'tmp.png'),img[:,:,::-1]) <NEW_LINE> <DEDENT> <DEDENT> def on_epoch_end(self, epoch, logs={}): <NEW_LINE> <INDENT> self.epoch += 1 <NEW_LINE> y_pred = self.model.predict(self.data, batch_size=self.batch_size) <NEW_LINE> np.save(os.path.join(self.output_path,'epoch_{}_img.npy'.format(epoch)),y_pred) <NEW_LINE> for i in range(y_pred.shape[0]): <NEW_LINE> <INDENT> img = self.labelVisualize(y_pred[i,:,:,:]) <NEW_LINE> cv2.imwrite(os.path.join(self.output_path,'epoch_{}_img_{}.png'.format(epoch,i)),img[:,:,::-1]) | "
Callback to save segmentation predictions during training.
# Arguments:
data data that should be used for prediction
output_path directory where epoch predictions are to be stored
mask_colors class colors used for visualizing predictions
batch_size batch size used for prediction, default 2
tmp_interval save interval for tmp image in batches, default 100 | 62598fb3be7bc26dc9251eb1 |
class AdaptorIpV6RssHashProfile(ManagedObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ManagedObject.__init__(self, "AdaptorIpV6RssHashProfile") <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def class_id(): <NEW_LINE> <INDENT> return "adaptorIpV6RssHashProfile" <NEW_LINE> <DEDENT> DN = "Dn" <NEW_LINE> IP_HASH = "IpHash" <NEW_LINE> RN = "Rn" <NEW_LINE> STATUS = "Status" <NEW_LINE> TCP_HASH = "TcpHash" | This class contains the relevant properties and constant supported by this MO. | 62598fb38e7ae83300ee914e |
class AutoUrlGetter(BaseUrlGetter): <NEW_LINE> <INDENT> def action(self): <NEW_LINE> <INDENT> print("开启自动爬取") <NEW_LINE> base_url = "http://weixin.sogou.com/weixin?type=1&query=###&ie=utf8&_sug_=n&_sug_type_=" <NEW_LINE> while True: <NEW_LINE> <INDENT> for record in self.biz_map_db.find_all(): <NEW_LINE> <INDENT> if 'last_update_time' in record and record['last_update_time'] > datetime.utcnow() - timedelta(hours=Constant.getConfig(Constant.SCRAWL_INTERVAL_HOUR)): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("开始爬取公众号", record['name']) <NEW_LINE> account_id = record['account'] if 'account' in record else record['name']; <NEW_LINE> page_content = get_url_content( base_url.replace("###", account_id), 'seccodeImage', 'seccodeInput', 'submit', account_id, [ManaulAnalyser(), CodePlatformAnalyser(Constant.getConfig(Constant.VERIFY_CODE_MAP)['sougou'])]) <NEW_LINE> artical_page_url = get_count_from_page(page_content, record) <NEW_LINE> if artical_page_url: <NEW_LINE> <INDENT> page_content = get_url_content(artical_page_url, 'verify_img', 'input', 'bt', account_id, [ManaulAnalyser(), CodePlatformAnalyser( Constant.getConfig(Constant.VERIFY_CODE_MAP)['penguin'])]) <NEW_LINE> match_obj = re.findall(r'msgList =(.*?)\n', page_content) <NEW_LINE> if match_obj: <NEW_LINE> <INDENT> self.dataAnalyse('str', match_obj[0], Constant.getConfig(Constant.WX_HIS_PAGE_PATH)) <NEW_LINE> self.biz_map_db.db.update_one({'_id': record['_id']}, {'$set': {'last_update_time': datetime.utcnow()}}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('没找到文章数据') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("没有搜索到公众号") <NEW_LINE> <DEDENT> time.sleep(60) <NEW_LINE> <DEDENT> <DEDENT> time.sleep(60 * 10) | 自动爬取公众号文章列表,不停的循环,每天爬一次
1、搜狗搜索微信公众号名字
2、找到对应的公众号链接
3、进入链接,获取前十条数据 | 62598fb3aad79263cf42e87d |
class SetUserCommand(UserCommand[SetUserRequest, UserResponse]): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def add_subparser(cls, name: str, subparsers: Any) -> ArgumentParser: <NEW_LINE> <INDENT> subparser: ArgumentParser = subparsers.add_parser( name, description=cls.__doc__, help='assign a password to a user') <NEW_LINE> subparser.add_argument('--password-file', type=FileType('r'), metavar='FILE', help='read the password from a file') <NEW_LINE> subparser.add_argument('--param', action='append', dest='params', default=[], metavar='KEY=VAL', help='additional parameters for the request') <NEW_LINE> subparser.add_argument('--no-password', action='store_true', help='send the request with no password value') <NEW_LINE> subparser.add_argument('username', help='the user name') <NEW_LINE> return subparser <NEW_LINE> <DEDENT> @property <NEW_LINE> def method(self) -> MethodProtocol[SetUserRequest, UserResponse]: <NEW_LINE> <INDENT> return self.client.SetUser <NEW_LINE> <DEDENT> def getpass(self) -> str | None: <NEW_LINE> <INDENT> if self.args.no_password: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> elif self.args.password_file: <NEW_LINE> <INDENT> line: str = self.args.password_file.readline() <NEW_LINE> return line.rstrip('\r\n') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return getpass.getpass() <NEW_LINE> <DEDENT> <DEDENT> def build_request(self) -> SetUserRequest: <NEW_LINE> <INDENT> args = self.args <NEW_LINE> params = self._parse_params(args.params) <NEW_LINE> password = self.getpass() <NEW_LINE> new_data = UserData(params=params) <NEW_LINE> if password is not None: <NEW_LINE> <INDENT> new_data.password = password <NEW_LINE> <DEDENT> return SetUserRequest(user=args.username, data=new_data) <NEW_LINE> <DEDENT> def _parse_params(self, params: Sequence[str]) -> Mapping[str, str]: <NEW_LINE> <INDENT> ret = {} <NEW_LINE> for param in params: <NEW_LINE> <INDENT> key, splitter, val = param.partition('=') <NEW_LINE> if not splitter: <NEW_LINE> <INDENT> raise ValueError(f'Expected key=val format: {param!r}') <NEW_LINE> <DEDENT> ret[key] = val <NEW_LINE> <DEDENT> return ret | Set the metadata for a user, creating it if it does not exist. | 62598fb3627d3e7fe0e06f59 |
class MongoProxy: <NEW_LINE> <INDENT> def __init__(self, conn, logger=None, wait_time=None): <NEW_LINE> <INDENT> if logger is None: <NEW_LINE> <INDENT> import logging <NEW_LINE> logger = logging.getLogger(__name__) <NEW_LINE> <DEDENT> self.conn = conn <NEW_LINE> self.logger = logger <NEW_LINE> self.wait_time = wait_time <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> item = self.conn[key] <NEW_LINE> if hasattr(item, '__call__'): <NEW_LINE> <INDENT> return MongoProxy(item, self.logger, self.wait_time) <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> attr = getattr(self.conn, key) <NEW_LINE> if hasattr(attr, '__call__'): <NEW_LINE> <INDENT> if key in EXECUTABLE_MONGO_METHODS: <NEW_LINE> <INDENT> return Executable(attr, self.logger, self.wait_time) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return MongoProxy(attr, self.logger, self.wait_time) <NEW_LINE> <DEDENT> <DEDENT> return attr <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.conn(*args, **kwargs) <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return dir(self.conn) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.conn.__str__() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.conn.__repr__() <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return True | Proxy for MongoDB connection.
Methods that are executable, i.e find, insert etc, get wrapped in an
Executable-instance that handles AutoReconnect-exceptions transparently. | 62598fb391f36d47f2230efd |
class Connection(base.Connection): <NEW_LINE> <INDENT> CAPABILITIES = utils.update_nested(base.Connection.CAPABILITIES, COMMON_AVAILABLE_CAPABILITIES) <NEW_LINE> STORAGE_CAPABILITIES = utils.update_nested( base.Connection.STORAGE_CAPABILITIES, AVAILABLE_STORAGE_CAPABILITIES, ) <NEW_LINE> def get_meters(self, user=None, project=None, resource=None, source=None, metaquery=None, limit=None): <NEW_LINE> <INDENT> if limit == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> metaquery = pymongo_utils.improve_keys(metaquery, metaquery=True) or {} <NEW_LINE> q = {} <NEW_LINE> if user is not None: <NEW_LINE> <INDENT> q['user_id'] = user <NEW_LINE> <DEDENT> if project is not None: <NEW_LINE> <INDENT> q['project_id'] = project <NEW_LINE> <DEDENT> if resource is not None: <NEW_LINE> <INDENT> q['_id'] = resource <NEW_LINE> <DEDENT> if source is not None: <NEW_LINE> <INDENT> q['source'] = source <NEW_LINE> <DEDENT> q.update(metaquery) <NEW_LINE> count = 0 <NEW_LINE> for r in self.db.resource.find(q): <NEW_LINE> <INDENT> for r_meter in r['meter']: <NEW_LINE> <INDENT> if limit and count >= limit: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> <DEDENT> yield models.Meter( name=r_meter['counter_name'], type=r_meter['counter_type'], unit=r_meter.get('counter_unit', ''), resource_id=r['_id'], project_id=r['project_id'], source=r['source'], user_id=r['user_id'], ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_samples(self, sample_filter, limit=None): <NEW_LINE> <INDENT> if limit == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> q = pymongo_utils.make_query_from_filter(sample_filter, require_meter=False) <NEW_LINE> return self._retrieve_samples(q, [("timestamp", pymongo.DESCENDING)], limit) <NEW_LINE> <DEDENT> def query_samples(self, filter_expr=None, orderby=None, limit=None): <NEW_LINE> <INDENT> if limit == 0: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> query_filter = {} <NEW_LINE> orderby_filter = [("timestamp", pymongo.DESCENDING)] <NEW_LINE> transformer = pymongo_utils.QueryTransformer() <NEW_LINE> if orderby is not None: <NEW_LINE> <INDENT> orderby_filter = transformer.transform_orderby(orderby) <NEW_LINE> <DEDENT> if filter_expr is not None: <NEW_LINE> <INDENT> query_filter = transformer.transform_filter(filter_expr) <NEW_LINE> <DEDENT> return self._retrieve_samples(query_filter, orderby_filter, limit) <NEW_LINE> <DEDENT> def _retrieve_samples(self, query, orderby, limit): <NEW_LINE> <INDENT> if limit is not None: <NEW_LINE> <INDENT> samples = self.db.meter.find(query, limit=limit, sort=orderby) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> samples = self.db.meter.find(query, sort=orderby) <NEW_LINE> <DEDENT> for s in samples: <NEW_LINE> <INDENT> del s['_id'] <NEW_LINE> s['counter_unit'] = s.get('counter_unit', '') <NEW_LINE> s['counter_volume'] = float(s.get('counter_volume')) <NEW_LINE> s['recorded_at'] = s.get('recorded_at') <NEW_LINE> if s.get('resource_metadata'): <NEW_LINE> <INDENT> s['resource_metadata'] = pymongo_utils.unquote_keys( s.get('resource_metadata')) <NEW_LINE> <DEDENT> yield models.Sample(**s) | Base Connection class for MongoDB and DB2 drivers. | 62598fb3a8370b77170f0487 |
class FeatureMessageBlock(blocks.StructBlock): <NEW_LINE> <INDENT> text = blocks.RichTextBlock() <NEW_LINE> link_url = URLOrPageBlock(required=False) <NEW_LINE> link_text = blocks.CharBlock(required=False) <NEW_LINE> link_button = blocks.ChoiceBlock(choices=FAIR_BUTTON_CHOICES, required=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> icon = 'openquote' <NEW_LINE> template = "pages/blocks/feature_message.html" | Shows a callout. Can be used in several positions of a StreamField. | 62598fb356b00c62f0fb2962 |
@wraps_model(GuildMember, alias='member') <NEW_LINE> class GuildMemberAdd(GatewayEvent): <NEW_LINE> <INDENT> pass | Sent when a user joins a guild.
Attributes
----------
member : :class:`disco.types.guild.GuildMember`
The member that has joined the guild. | 62598fb34527f215b58e9f7f |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.