code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class RequestPayloadError(Exception): <NEW_LINE> <INDENT> pass | Payload parsing error. | 62598fb3a8370b77170f0474 |
class NewBertRegressionModel(NewBertModel): <NEW_LINE> <INDENT> class Config(NewBertModel.Config): <NEW_LINE> <INDENT> class InputConfig(ConfigBase): <NEW_LINE> <INDENT> tokens: BERTTensorizer.Config = BERTTensorizer.Config( columns=["text1", "text2"], max_seq_len=128 ) <NEW_LINE> labels: NumericLabelTensorizer.Config = NumericLabelTensorizer.Config() <NEW_LINE> <DEDENT> inputs: InputConfig = InputConfig() <NEW_LINE> output_layer: RegressionOutputLayer.Config = RegressionOutputLayer.Config() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_config(cls, config: Config, tensorizers: Dict[str, Tensorizer]): <NEW_LINE> <INDENT> vocab = tensorizers["tokens"].vocab <NEW_LINE> encoder = create_module( config.encoder, padding_idx=vocab.get_pad_index(), vocab_size=vocab.__len__(), ) <NEW_LINE> decoder = create_module( config.decoder, in_dim=encoder.representation_dim, out_dim=1 ) <NEW_LINE> output_layer = RegressionOutputLayer.from_config(config.output_layer) <NEW_LINE> return cls(encoder, decoder, output_layer) <NEW_LINE> <DEDENT> def __init__(self, encoder, decoder, output_layer) -> None: <NEW_LINE> <INDENT> super().__init__(encoder, decoder, output_layer) <NEW_LINE> log_class_usage(__class__) | BERT single sentence (or concatenated sentences) regression. | 62598fb37047854f4633f472 |
class HoursAssignmentRequirement(Model): <NEW_LINE> <INDENT> def __init__(self, grade: Grade=None, grade_type: GradeType=None, grade_type_category: GradeTypeCategory=None): <NEW_LINE> <INDENT> self.openapi_types = { 'grade': Grade, 'grade_type': GradeType, 'grade_type_category': GradeTypeCategory } <NEW_LINE> self.attribute_map = { 'grade': 'grade', 'grade_type': 'gradeType', 'grade_type_category': 'gradeTypeCategory' } <NEW_LINE> self._grade = grade <NEW_LINE> self._grade_type = grade_type <NEW_LINE> self._grade_type_category = grade_type_category <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, dikt: dict) -> 'HoursAssignmentRequirement': <NEW_LINE> <INDENT> return util.deserialize_model(dikt, cls) <NEW_LINE> <DEDENT> @property <NEW_LINE> def grade(self): <NEW_LINE> <INDENT> return self._grade <NEW_LINE> <DEDENT> @grade.setter <NEW_LINE> def grade(self, grade): <NEW_LINE> <INDENT> self._grade = grade <NEW_LINE> <DEDENT> @property <NEW_LINE> def grade_type(self): <NEW_LINE> <INDENT> return self._grade_type <NEW_LINE> <DEDENT> @grade_type.setter <NEW_LINE> def grade_type(self, grade_type): <NEW_LINE> <INDENT> self._grade_type = grade_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def grade_type_category(self): <NEW_LINE> <INDENT> return self._grade_type_category <NEW_LINE> <DEDENT> @grade_type_category.setter <NEW_LINE> def grade_type_category(self, grade_type_category): <NEW_LINE> <INDENT> self._grade_type_category = grade_type_category | NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually. | 62598fb3283ffb24f3cf3924 |
class PycassaCluster(BaseCluster): <NEW_LINE> <INDENT> def __init__(self, hosts=None, keyspace=None, backend=Pycassa, **kwargs): <NEW_LINE> <INDENT> assert isinstance(hosts, collections.Iterable), 'hosts must be an iterable' <NEW_LINE> assert keyspace, 'keyspace must be set' <NEW_LINE> return super(PycassaCluster, self).__init__( hosts={ 0: { 'hosts': hosts, 'keyspace': keyspace, }, }, backend=backend, **kwargs ) | A PycassaCluster has a single host as pycassa internally handles routing
and communication within a set of nodes. | 62598fb37047854f4633f473 |
@attr('UNIT') <NEW_LINE> class TestSusbcriberChannel(PyonTestCase): <NEW_LINE> <INDENT> pass | SubscriberChannel is a blank for now | 62598fb356ac1b37e6302283 |
class AllowableList(Stringify): <NEW_LINE> <INDENT> def __init__(self, values): <NEW_LINE> <INDENT> self.values = values <NEW_LINE> <DEDENT> def to_wiki(self): <NEW_LINE> <INDENT> return "Allowed values: {0}".format(", ".join(self.values)) | Model of a allowableValues of type LIST
See https://github.com/wordnik/swagger-core/wiki/datatypes#complex-types | 62598fb3be383301e0253892 |
class RedisDirectTcpReplicationClientFactory(SynapseRedisFactory): <NEW_LINE> <INDENT> maxDelay = 5 <NEW_LINE> protocol = RedisSubscriber <NEW_LINE> def __init__( self, hs: "HomeServer", outbound_redis_connection: txredisapi.RedisProtocol ): <NEW_LINE> <INDENT> super().__init__( hs, uuid="subscriber", dbid=None, poolsize=1, replyTimeout=30, password=hs.config.redis.redis_password, ) <NEW_LINE> self.synapse_handler = hs.get_tcp_replication() <NEW_LINE> self.synapse_stream_name = hs.hostname <NEW_LINE> self.synapse_outbound_redis_connection = outbound_redis_connection <NEW_LINE> <DEDENT> def buildProtocol(self, addr: IAddress) -> RedisSubscriber: <NEW_LINE> <INDENT> p = super().buildProtocol(addr) <NEW_LINE> p = cast(RedisSubscriber, p) <NEW_LINE> p.synapse_handler = self.synapse_handler <NEW_LINE> p.synapse_outbound_redis_connection = self.synapse_outbound_redis_connection <NEW_LINE> p.synapse_stream_name = self.synapse_stream_name <NEW_LINE> return p | This is a reconnecting factory that connects to redis and immediately
subscribes to a stream.
Args:
hs
outbound_redis_connection: A connection to redis that will be used to
send outbound commands (this is separate to the redis connection
used to subscribe). | 62598fb355399d3f056265b2 |
class TestOrderCoupon(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testOrderCoupon(self): <NEW_LINE> <INDENT> pass | OrderCoupon unit test stubs | 62598fb35166f23b2e243472 |
class DoubleDict(dict): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) > 1: <NEW_LINE> <INDENT> raise TypeError('DoubleDict expected at most 1 arguments, got 2') <NEW_LINE> <DEDENT> super(DoubleDict, self).__init__(*args, **kwargs) <NEW_LINE> self._keys = self.keys if _PY3 else self.iterkeys <NEW_LINE> self._values = self.values if _PY3 else self.itervalues <NEW_LINE> self._items = self.items if _PY3 else self.iteritems <NEW_LINE> _check_valid_doubledict(self._values()) <NEW_LINE> <DEDENT> def _copyitems(self): <NEW_LINE> <INDENT> for k, v in self._items(): <NEW_LINE> <INDENT> yield deepcopy(k), deepcopy(v) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def reverse(self): <NEW_LINE> <INDENT> return DoubleDict({v: k for k, v in self._copyitems()}) <NEW_LINE> <DEDENT> @reverse.setter <NEW_LINE> def reverse(self, value): <NEW_LINE> <INDENT> raise AttributeError('DoubleDict.reverse cannot be set') <NEW_LINE> <DEDENT> def __setitem__(self, item, value): <NEW_LINE> <INDENT> if not isinstance(value, Hashable): <NEW_LINE> <INDENT> raise ValueError('DoubleDict values must be hashable') <NEW_LINE> <DEDENT> if item not in self._keys(): <NEW_LINE> <INDENT> if value in self._values(): <NEW_LINE> <INDENT> raise ValueError('DoubleDict values must be unique') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if value in self._values() and self[item] != value: <NEW_LINE> <INDENT> raise ValueError('DoubleDict values must be unique') <NEW_LINE> <DEDENT> <DEDENT> super(DoubleDict, self).__setitem__(item, value) <NEW_LINE> <DEDENT> def update(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) > 1: <NEW_LINE> <INDENT> raise TypeError('DoubleDict.update expected at most' '1 arguments, got 2') <NEW_LINE> <DEDENT> _check_update_doubledict(args, kwargs, self) <NEW_LINE> super(DoubleDict, self).update(*args, **kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> dict_repr = super(DoubleDict, self).__repr__() <NEW_LINE> return 'DoubleDict({})'.format(dict_repr) | Dictionary that enables reverse lookup.
>>> dd = DoubleDict([('a', 1), ('b', 2), ('c', 3)])
>>> dd == {'a': 1, 'b':2, 'c': 3}
True
>>> dd.reverse == {1: 'a', 2: 'b', 3: 'c'}
True
For this to work, dict values must be unique and hashable:
>>> dd = DoubleDict([('a', 1), ('b', 2), ('c', 1)])
Traceback (most recent call last):
...
ValueError: DoubleDict values must be unique
>>>
>>> dd = DoubleDict([('a', 1), ('b', 2), ('c', {'foo': 'bar'})])
Traceback (most recent call last):
...
ValueError: DoubleDict values must be hashable
DoubleDict.reverse is lazy, and also returns a DoubleDict. | 62598fb3fff4ab517ebcd87e |
class Switch(KlattComponent): <NEW_LINE> <INDENT> def __init__(self, mast): <NEW_LINE> <INDENT> KlattComponent.__init__(self, mast) <NEW_LINE> self.output = [] <NEW_LINE> self.output.append(np.zeros(self.mast.params["N_SAMP"])) <NEW_LINE> self.output.append(np.zeros(self.mast.params["N_SAMP"])) <NEW_LINE> <DEDENT> def send(self): <NEW_LINE> <INDENT> self.dests[0].receive(signal=self.output[0][:]) <NEW_LINE> self.dests[1].receive(signal=self.output[1][:]) <NEW_LINE> <DEDENT> def operate(self, choice): <NEW_LINE> <INDENT> for n in range(self.mast.params["N_SAMP"]): <NEW_LINE> <INDENT> if choice[n] == 0: <NEW_LINE> <INDENT> self.output[0][n] = self.input[n] <NEW_LINE> self.output[1][n] = 0 <NEW_LINE> <DEDENT> elif choice[n] == 1: <NEW_LINE> <INDENT> self.output[0][n] = 0 <NEW_LINE> self.output[1][n] = self.input[n] <NEW_LINE> <DEDENT> <DEDENT> self.send() <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> self.output = [] <NEW_LINE> self.output.append(np.zeros(self.mast.params["N_SAMP"])) <NEW_LINE> self.output.append(np.zeros(self.mast.params["N_SAMP"])) | Binary switch between two outputs.
Has two output signals (instead of one, as in other KlattComponents). Each
is connected to a different destination, and the operate() function
switches the input between the two possible outputs depending on a control
singal.
Arguments:
mast (KlattSynth): see parent class
Attributes:
output (list): List of two np.arrays as described above | 62598fb332920d7e50bc60ec |
class Gallery(models.Model): <NEW_LINE> <INDENT> title = models.CharField(u'Название', max_length=256) <NEW_LINE> create_date = models.DateField(u'Дата создания', default=datetime.date.today()) <NEW_LINE> preview_image = ImageRatioField('image', '221x100', verbose_name=u'Превью') <NEW_LINE> image = models.ImageField(verbose_name=u'Изображение галереи', upload_to='imagegallery/images', blank=True) <NEW_LINE> announce = models.TextField(u'Анонс', blank=True) <NEW_LINE> detail = models.TextField(u'Описание', blank=True) <NEW_LINE> order = models.PositiveIntegerField(u'Сортировка', default=0) <NEW_LINE> is_active = models.BooleanField(u'Активность', default=1) <NEW_LINE> def _short_announce(self): <NEW_LINE> <INDENT> return short_announce(self.announce, 150) <NEW_LINE> <DEDENT> _short_announce.short_description = u'Анонс' <NEW_LINE> _short_announce.allow_tags = True <NEW_LINE> def _show_galleryitems(self): <NEW_LINE> <INDENT> url = reverse('admin:imagegallery_galleryitem_changelist') + '?gallery=' + str(self.pk) <NEW_LINE> return '<a href="%s">=> К элементам галереи (%s)</a>' % (url, self.galleryitem_set.count()) <NEW_LINE> <DEDENT> _show_galleryitems.allow_tags = True <NEW_LINE> _show_galleryitems.short_description = u'Картинки' <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = u'Галерея' <NEW_LINE> verbose_name_plural = u'Галереи' <NEW_LINE> ordering = ['-id'] | Галерея | 62598fb344b2445a339b69be |
class Config: <NEW_LINE> <INDENT> base_dir: str <NEW_LINE> downloads_dir: str <NEW_LINE> jobs: int <NEW_LINE> platform: Platform <NEW_LINE> architecture: str <NEW_LINE> triple: str <NEW_LINE> native_config: "Config" <NEW_LINE> program_suffix: str <NEW_LINE> def __init__( self, base_dir: str, downloads_dir: str = None, jobs: int = 1, platform: Platform = None, architecture: str = None, triple: str = None, native_config: "Config" = None, program_suffix: str = "", ): <NEW_LINE> <INDENT> self.base_dir = os.path.realpath(base_dir) <NEW_LINE> if downloads_dir is None: <NEW_LINE> <INDENT> downloads_dir = os.path.join(base_dir, "downloads") <NEW_LINE> <DEDENT> self.downloads_dir = downloads_dir <NEW_LINE> self.jobs = jobs <NEW_LINE> self.platform = platform or Platform.get_platform(py_platform.system()) <NEW_LINE> self.architecture = architecture or py_platform.machine() <NEW_LINE> self.triple = triple or sysconfig.get_config_var("HOST_GNU_TYPE") <NEW_LINE> self.native_config = native_config or self <NEW_LINE> self.program_suffix = program_suffix <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_dir(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.base_dir, "dependencies") <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_src_dir(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.dependencies_dir, "src") <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_build_dir(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.dependencies_dir, "build") <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_install_dir(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.dependencies_dir, "install") <NEW_LINE> <DEDENT> @property <NEW_LINE> def dependencies_log_dir(self) -> str: <NEW_LINE> <INDENT> return os.path.join(self.dependencies_dir, "log") <NEW_LINE> <DEDENT> def is_freebsd(self) -> bool: <NEW_LINE> <INDENT> return self.platform == Platform.FREEBSD <NEW_LINE> <DEDENT> def is_linux(self) -> bool: <NEW_LINE> <INDENT> return self.platform == Platform.LINUX <NEW_LINE> <DEDENT> def is_macos(self) -> bool: <NEW_LINE> <INDENT> return self.platform == Platform.MACOS <NEW_LINE> <DEDENT> def is_mingw(self) -> bool: <NEW_LINE> <INDENT> return self.platform == Platform.MINGW <NEW_LINE> <DEDENT> @property <NEW_LINE> def make_command(self) -> str: <NEW_LINE> <INDENT> if self.is_freebsd(): <NEW_LINE> <INDENT> return "gmake" <NEW_LINE> <DEDENT> return "make" <NEW_LINE> <DEDENT> def create_directories(self): <NEW_LINE> <INDENT> os.makedirs(self.downloads_dir, exist_ok=True) <NEW_LINE> os.makedirs(self.dependencies_dir, exist_ok=True) <NEW_LINE> os.makedirs(self.dependencies_src_dir, exist_ok=True) <NEW_LINE> os.makedirs(self.dependencies_build_dir, exist_ok=True) <NEW_LINE> os.makedirs(self.dependencies_install_dir, exist_ok=True) <NEW_LINE> os.makedirs(self.dependencies_log_dir, exist_ok=True) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f"<Config for {self.platform}>" | A class to store the configuration for a given platform. | 62598fb301c39578d7f12e12 |
class DbenchTest(DfuseTestBase): <NEW_LINE> <INDENT> def test_dbench(self): <NEW_LINE> <INDENT> self.add_pool(connect=False) <NEW_LINE> self.add_container(self.pool) <NEW_LINE> self.start_dfuse(self.hostlist_clients, self.pool, self.container) <NEW_LINE> dbench_cmd = Dbench(self.hostlist_clients, self.tmp) <NEW_LINE> dbench_cmd.get_params(self) <NEW_LINE> dbench_cmd.directory.update(self.dfuse.mount_dir.value) <NEW_LINE> try: <NEW_LINE> <INDENT> dbench_cmd.run() <NEW_LINE> <DEDENT> except CommandFailure as error: <NEW_LINE> <INDENT> self.log.error( "Dbench command %s failed on hosts %s", str(dbench_cmd), str(NodeSet.fromlist(dbench_cmd.hosts)), exc_info=error) <NEW_LINE> self.fail("Test was expected to pass but it failed.") <NEW_LINE> <DEDENT> self.stop_dfuse() <NEW_LINE> self.container.destroy() <NEW_LINE> self.pool.destroy() | Base Dbench test class.
:avocado: recursive | 62598fb371ff763f4b5e780c |
class Workflow(AWSObject): <NEW_LINE> <INDENT> resource_type = "AWS::Transfer::Workflow" <NEW_LINE> props: PropsDictType = { "Description": (str, False), "OnExceptionSteps": ([WorkflowStep], False), "Steps": ([WorkflowStep], True), "Tags": (Tags, False), } | `Workflow <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-transfer-workflow.html>`__ | 62598fb35fc7496912d482c8 |
class DivOperatorTerm(Term): <NEW_LINE> <INDENT> name = 'dw_div' <NEW_LINE> arg_types = ('opt_material', 'virtual') <NEW_LINE> @staticmethod <NEW_LINE> def function(out, mat, vg): <NEW_LINE> <INDENT> div_bf = vg.bfg <NEW_LINE> n_el, n_qp, dim, n_ep = div_bf.shape <NEW_LINE> div_bf = div_bf.reshape((n_el, n_qp, dim * n_ep, 1)) <NEW_LINE> div_bf = nm.ascontiguousarray(div_bf) <NEW_LINE> if mat is not None: <NEW_LINE> <INDENT> status = vg.integrate(out, mat * div_bf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> status = vg.integrate(out, div_bf) <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def get_fargs(self, mat, virtual, mode=None, term_mode=None, diff_var=None, **kwargs): <NEW_LINE> <INDENT> vg, _ = self.get_mapping(virtual) <NEW_LINE> return mat, vg | :Description:
Weighted divergence term of a test function.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{v} \mbox { or } \int_{\Omega} c \nabla \cdot \ul{v}
:Arguments:
material : :math:`c` (optional),
virtual : :math:`\ul{v}` | 62598fb356b00c62f0fb2950 |
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "users" <NEW_LINE> id = db.Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) <NEW_LINE> first_name = db.Column(db.String(80), nullable=False) <NEW_LINE> last_name = db.Column(db.String(80), nullable=False) <NEW_LINE> email = db.Column(db.String(120), unique=True, nullable=False) <NEW_LINE> username = db.Column(db.String(80), unique=True, nullable=False) <NEW_LINE> password = db.Column(db.String(100)) <NEW_LINE> created_on = db.Column(db.DateTime(timezone=True), server_default=db.func.now()) <NEW_LINE> updated_on = db.Column( db.DateTime(timezone=True), server_default=db.func.now(), server_onupdate=db.func.now(), ) <NEW_LINE> matrices = db.relationship("Matrix", order_by="desc(Matrix.created_on)") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return "<User %r>" % self.username | Represents a User.
| 62598fb32ae34c7f260ab17b |
class ProductSerializer(CommonSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Product | Product serializer. | 62598fb3097d151d1a2c10c7 |
class Account(CapBaseObject): <NEW_LINE> <INDENT> login = StringField('Login') <NEW_LINE> password = StringField('Password') <NEW_LINE> properties = Field('List of key/value properties', dict) <NEW_LINE> def __init__(self, id=None): <NEW_LINE> <INDENT> CapBaseObject.__init__(self, id) | Describe an account and its properties. | 62598fb355399d3f056265b3 |
class CppTranslator(ApiTranslator): <NEW_LINE> <INDENT> methods = { 'List': { '@equivalent': 'vector', 'push': '#push_back', 'insert': '#insert(%{begin}, %{0})', 'remove_at': '#erase(%{begin}, %{0})', 'length': '#size', 'slice': Slice, 'slice_from': Slice, 'slice_to': Slice, }, 'Dictionary': { '@equivalent': 'unordered_map' }, 'String': { '@equivalent': 'string', 'length': '#length', 'substr': '#substr' } } <NEW_LINE> functions = { 'io': { 'display': lambda *args: Node('_cpp_cout', args=list(args[:-1]), pseudo_type=args[-1]), 'read': Read, 'read_file': ReadFile, 'write_file': 'write_file' }, 'math': { 'ln': 'log', 'tan': 'tan' } } <NEW_LINE> dependencies = { 'List': { '@all': ['iostream', 'vector'], 'remove': 'algorithm' }, 'Dictionary': { '@all': ['iostream', 'unordered_map'] }, 'String': { '@all': ['iostream', 'string'] }, 'Set': { '@all': ['iostream', 'set'] }, 'Tuple': { '@all': ['iostream', 'pair', 'tuple'] }, 'io': { 'display': 'iostream', 'read': ['iostream', 'string'], 'read_file': ['iostream', 'fstream', 'string'], 'write_file': ['iostream', 'fstream', 'string'] }, 'math': { '@all': 'math' }, 'Exception': { '@all': ['stdexcept', 'exception'] } } <NEW_LINE> def begin_placeholder(self, receiver, *args, equivalent): <NEW_LINE> <INDENT> return method_call(receiver, 'begin', [], 'CppIterator') <NEW_LINE> <DEDENT> def end_placeholder(self, receiver, *args, equivalent): <NEW_LINE> <INDENT> return method_call(receiver, 'end', [], 'CppIterator') <NEW_LINE> <DEDENT> def new_placeholder(self, receiver, *args, equivalent): <NEW_LINE> <INDENT> return Node('new_instance', class_name=equivalent, args=[], pseudo_type=equivalent) | C++ api translator
The DSL is explained in the ApiTranslator docstring
C++ specific:
'%{begin}':
expands to `%{self}.begin()`, useful for vector methods
'%{end}':
expands to `%{self}.end()`, useful for vector methods
'%{new}':
expands to `new %{equivalent}` | 62598fb3a17c0f6771d5c2cf |
class ConnectionTimeout(ConnectionError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return 'ConnectionTimeout caused by - %s(%s)' % ( self.info.__class__.__name__, self.info) | A network timeout. | 62598fb3e5267d203ee6b99d |
class ToOneField(FieldInUsePathMixin, ApiNameMixin, tastypie_fields.ToOneField): <NEW_LINE> <INDENT> def build_schema(self): <NEW_LINE> <INDENT> return { 'fields': self.to_class(self.get_api_name()).build_schema()['fields'], } | Extended tastypie ToOneField with support for nested schema and field
filtering. | 62598fb3f548e778e596b63d |
class FacetsAdmin(ModelView): <NEW_LINE> <INDENT> _can_create = True <NEW_LINE> _can_edit = True <NEW_LINE> _can_delete = True <NEW_LINE> column_list = ( 'collection', 'order', 'facet_name', ) <NEW_LINE> form_args = { 'collection': { 'validators': [ DataRequired(), ], 'allow_blank': False, 'query_factory': lambda: db.session.query(Collection).order_by(Collection.id), }, 'order': { 'validators': [ is_place_taken, DataRequired(), ], }, 'facet_name': { 'validators': [ is_module_facet_module, is_duplicated, DataRequired(), ], 'choices': LocalProxy(lambda: [ (facet_name, facet_name) for facet_name in facets.keys() ]), }, } <NEW_LINE> form_overrides = { 'facet_name': SelectField, 'order': IntegerField, } <NEW_LINE> column_default_sort = 'id_collection' <NEW_LINE> def __init__(self, app, *args, **kwargs): <NEW_LINE> <INDENT> super(FacetsAdmin, self).__init__(*args, **kwargs) | Flask-Admin module to manage facets configuration. | 62598fb31b99ca400228f57d |
class const(Stub): <NEW_LINE> <INDENT> _description_ = '<const>' <NEW_LINE> array_like = macro.Macro('const.array_like', const_array_like, callable=True, argnames=['ary']) | shared namespace
| 62598fb3dc8b845886d53650 |
class AnalyseGacsIgslOutput(AviTask): <NEW_LINE> <INDENT> query = AviParameter() <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(AnalyseGacsIgslOutput, self).__init__(*args, **kwargs) <NEW_LINE> self.output_file_prefix = hashlib.md5(self.query).hexdigest() <NEW_LINE> logger.info('output_file_prefix: %s' % (self.output_file_prefix)) <NEW_LINE> <DEDENT> def get_output_filename(self): <NEW_LINE> <INDENT> return self.output_file_prefix + '_analysed' <NEW_LINE> <DEDENT> def output(self): <NEW_LINE> <INDENT> return AviLocalTarget(os.path.join(settings.OUTPUT_PATH, self.get_output_filename())) <NEW_LINE> <DEDENT> def requires(self): <NEW_LINE> <INDENT> return self.task_dependency(ExecuteQuery, output_file_prefix=self.output_file_prefix) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> logger.info('Input VOTable file: %s' % self.input().path) <NEW_LINE> t = Table.read(self.input().path, format='votable') <NEW_LINE> df = pd.DataFrame(np.ma.filled(t.as_array()), columns=t.colnames) <NEW_LINE> gaiamagcols=['mag_bj', 'mag_g', 'mag_grvs', 'mag_rf'] <NEW_LINE> gaiadf = df[gaiamagcols] <NEW_LINE> cluster_pred = KMeans(n_clusters=4).fit_predict(gaiadf) <NEW_LINE> cluster_colours = {0:(123,50,148,.5), 1:(194,165,207,.5), 2:(166,219,160,.5), 3:(0,136,55,.5)} <NEW_LINE> hc_series = [{'name': 'Cluster %d' % cluster, 'showInLegend': True, 'color':'rgba(%s)' % ','.join([str(x) for x in colour]), 'data':gaiadf[['mag_bj', 'mag_g']][cluster_pred==cluster].sample(n=100).values.tolist()} for cluster,colour in sorted(cluster_colours.iteritems(), key=operator.itemgetter(0))] <NEW_LINE> analysis_context = {'gacs_result': True, 'gacs_dfdescription': gaiadf.describe().to_html(classes='table table-striped table-bordered table-hover'), 'gacs_hc_series': hc_series } <NEW_LINE> logger.debug(analysis_context) <NEW_LINE> with open(self.output().path, 'wb') as out: <NEW_LINE> <INDENT> json.dump(analysis_context, out) | Some cursory analysis of the the VOTable output by GACS. | 62598fb3e5267d203ee6b99e |
class MnliProcessor(DataProcessor): <NEW_LINE> <INDENT> def get_train_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "train.tsv")), "train") <NEW_LINE> <DEDENT> def get_dev_examples(self, data_dir): <NEW_LINE> <INDENT> return self._create_examples( self._read_tsv(os.path.join(data_dir, "dev.tsv")), "dev") <NEW_LINE> <DEDENT> def get_test_examples(self, input_file): <NEW_LINE> <INDENT> return self._create_examples(self._read_tsv(input_file), "test") <NEW_LINE> <DEDENT> def get_custom_examples(self, input_file): <NEW_LINE> <INDENT> return self._create_examples(self._read_tsv(input_file), "custom") <NEW_LINE> <DEDENT> def get_labels(self): <NEW_LINE> <INDENT> return ["true", "fake"] <NEW_LINE> <DEDENT> def get_original_num_labels(self): <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> def _create_examples(self, lines, set_type): <NEW_LINE> <INDENT> examples = [] <NEW_LINE> sentence1_index = 0 <NEW_LINE> sentence2_index = 0 <NEW_LINE> for (i, line) in enumerate(lines): <NEW_LINE> <INDENT> if i == 0: <NEW_LINE> <INDENT> for j, token in enumerate(line): <NEW_LINE> <INDENT> if token.strip() == "sentence1": <NEW_LINE> <INDENT> sentence1_index = j <NEW_LINE> <DEDENT> elif token.strip() == "sentence2": <NEW_LINE> <INDENT> sentence2_index = j <NEW_LINE> <DEDENT> <DEDENT> continue <NEW_LINE> <DEDENT> guid = "%s-%s" % (set_type, tokenization.convert_to_unicode(line[0])) <NEW_LINE> text_a = tokenization.convert_to_unicode(line[sentence1_index]) <NEW_LINE> text_b = tokenization.convert_to_unicode(line[sentence2_index]) <NEW_LINE> if set_type == "test": <NEW_LINE> <INDENT> label = "true" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> label = tokenization.convert_to_unicode(line[-1]) <NEW_LINE> <DEDENT> examples.append( InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label)) <NEW_LINE> <DEDENT> return examples | Processor for the MultiNLI data set (GLUE version). | 62598fb34e4d5625663724c0 |
class MouseLookTests(TestCase, PlayerCreationMixin): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.player = self.makePlayer(Vector(2, 4, -6)) <NEW_LINE> self.controller = PlayerController(self.player) <NEW_LINE> <DEDENT> def test_lookLeft(self): <NEW_LINE> <INDENT> self.controller.mouseMotion((100, 200), (-20, 20), None) <NEW_LINE> self.assertEquals( self.player.orientation.y, -20 * self.controller.mouseSensitivity) | Tests for mouse motion events which control the direction the player is
facing. | 62598fb3aad79263cf42e86d |
class EngineFacadeFixture(fixtures.Fixture): <NEW_LINE> <INDENT> def __init__(self, ctx_manager, engine, sessionmaker): <NEW_LINE> <INDENT> super(EngineFacadeFixture, self).__init__() <NEW_LINE> self._ctx_manager = ctx_manager <NEW_LINE> self._engine = engine <NEW_LINE> self._sessionmaker = sessionmaker <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> super(EngineFacadeFixture, self).setUp() <NEW_LINE> self._existing_factory = self._ctx_manager._root_factory <NEW_LINE> self._ctx_manager._root_factory = enginefacade._TestTransactionFactory( self._engine, self._sessionmaker, apply_global=False, synchronous_reader=True) <NEW_LINE> self.addCleanup(self.cleanup) <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> self._ctx_manager._root_factory = self._existing_factory | Fixture to isolation EngineFacade during tests.
Because many elements of EngineFacade are based on globals, once
an engine facade has been initialized, all future code goes
through it. This means that the initialization of sqlite in
databases in our Database fixture will drive all connections to
sqlite. While that's fine in a production environment, during
testing this means we can't test againts multiple backends in the
same test run.
oslo.db does not yet support a reset mechanism here. This builds a
custom in tree engine facade fixture to handle this. Eventually
this will be added to oslo.db and this can be removed. Tracked by
https://bugs.launchpad.net/oslo.db/+bug/1548960 | 62598fb3be383301e0253894 |
class TestAuctionDetails(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make_instance(self, include_optional): <NEW_LINE> <INDENT> if include_optional : <NEW_LINE> <INDENT> return AuctionDetails( current_price = null ) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> return AuctionDetails( current_price = null, ) <NEW_LINE> <DEDENT> <DEDENT> def testAuctionDetails(self): <NEW_LINE> <INDENT> inst_req_only = self.make_instance(include_optional=False) <NEW_LINE> inst_req_and_optional = self.make_instance(include_optional=True) | AuctionDetails unit test stubs | 62598fb3009cb60464d015bc |
class Proxy(BaseHTTPRequestHandler): <NEW_LINE> <INDENT> server_inst = None <NEW_LINE> @staticmethod <NEW_LINE> def start(): <NEW_LINE> <INDENT> def start_proxy(): <NEW_LINE> <INDENT> Proxy.server_inst = TCPServer(('127.0.0.1', 0), Proxy) <NEW_LINE> port = Proxy.server_inst.socket.getsockname()[1] <NEW_LINE> kodiutils.set_setting('manifest_proxy_port', str(port)) <NEW_LINE> _LOGGER.debug('Listening on port %s', port) <NEW_LINE> Proxy.server_inst.serve_forever() <NEW_LINE> <DEDENT> thread = threading.Thread(target=start_proxy) <NEW_LINE> thread.start() <NEW_LINE> return thread <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def stop(): <NEW_LINE> <INDENT> if Proxy.server_inst: <NEW_LINE> <INDENT> Proxy.server_inst.shutdown() <NEW_LINE> <DEDENT> <DEDENT> def do_GET(self): <NEW_LINE> <INDENT> _LOGGER.debug('HTTP GET Request received for %s', self.path) <NEW_LINE> if not self.path.startswith('/manifest'): <NEW_LINE> <INDENT> self.send_response(404) <NEW_LINE> self.end_headers() <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> params = parse_qs(urlparse(self.path).query) <NEW_LINE> url = params.get('path')[0] <NEW_LINE> _LOGGER.debug('Proxying to %s', url) <NEW_LINE> response = requests.get(url=url) <NEW_LINE> self.send_response(response.status_code) <NEW_LINE> self.end_headers() <NEW_LINE> if response.status_code == 200: <NEW_LINE> <INDENT> self.wfile.write(self.modify_manifest(response.text).encode()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.wfile.write(response.content) <NEW_LINE> <DEDENT> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> _LOGGER.exception(exc) <NEW_LINE> self.send_response(500) <NEW_LINE> self.end_headers() <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def modify_manifest(manifest): <NEW_LINE> <INDENT> def repl(matchobj): <NEW_LINE> <INDENT> adaptationset = matchobj.group(0) <NEW_LINE> adaptationset = re.sub(r' default_KID=\".*?\"', '', adaptationset) <NEW_LINE> if '<SegmentTemplate' in adaptationset: <NEW_LINE> <INDENT> match = re.search(r'<BaseURL>(.*?)</BaseURL>', adaptationset) <NEW_LINE> if match: <NEW_LINE> <INDENT> base_url = match.group(1) <NEW_LINE> adaptationset = re.sub(r'\s*?<BaseURL>.*?</BaseURL>', '', adaptationset) <NEW_LINE> adaptationset = re.sub(r'(<SegmentTemplate[^>]*?initialization=\")([^\"]*)(\"[^>]*?>)', r'\1' + base_url + r'\2\3', adaptationset) <NEW_LINE> adaptationset = re.sub(r'(<SegmentTemplate[^>]*?media=\")([^\"]*)(\"[^>]*?>)', r'\1' + base_url + r'\2\3', adaptationset) <NEW_LINE> <DEDENT> <DEDENT> return adaptationset <NEW_LINE> <DEDENT> output = re.sub(r'<AdaptationSet[^>]*>(.*?)</AdaptationSet>', repl, manifest, flags=re.DOTALL) <NEW_LINE> return output | Manifest Proxy to workaround a Inputstream Adaptive bug | 62598fb37c178a314d78d537 |
class Component: <NEW_LINE> <INDENT> SERIES = 0 <NEW_LINE> SHUNT = 1 <NEW_LINE> def __init__(self, connection_type): <NEW_LINE> <INDENT> self._connection_type = connection_type <NEW_LINE> self._input_freq = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def impedance(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_freq(self): <NEW_LINE> <INDENT> return self._input_freq <NEW_LINE> <DEDENT> @property <NEW_LINE> def ABCD(self): <NEW_LINE> <INDENT> if self._connection_type == self.SERIES: <NEW_LINE> <INDENT> return Component._get_series_ABCD(self.impedance) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Component._get_shunt_ABCD(self.impedance) <NEW_LINE> <DEDENT> <DEDENT> @input_freq.setter <NEW_LINE> def input_freq(self, omega): <NEW_LINE> <INDENT> self._input_freq = omega <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_series_ABCD(impedance): <NEW_LINE> <INDENT> return np.array([[np.ones(impedance.size), impedance], [np.zeros(impedance.size), np.ones(impedance.size)]], dtype=np.complex) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_shunt_ABCD(impedance): <NEW_LINE> <INDENT> return np.array([[np.ones(impedance.size), np.zeros(impedance.size)], [1 / impedance, np.ones(impedance.size)]], dtype=np.complex) | A basic electric component, all components have a connection_type
**abstract class** | 62598fb34428ac0f6e6585ba |
class GameObject: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def register_subclass(cls, subcls): <NEW_LINE> <INDENT> if subcls.id_ in cls._subs: <NEW_LINE> <INDENT> raise ValueError('Subclass id repeats twice: {subcls.id_}!') <NEW_LINE> <DEDENT> cls._subs[subcls.id_] = subcls <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_instance(cls, id_=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return cls._subs[id_]() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValueError(f'Subclass not specified correctly, got "{id_}", ' 'but {cls} does not have that subclass.') | The abstract parent of all game objects. | 62598fb3d58c6744b42dc326 |
class InformationDropoutLayer(Module): <NEW_LINE> <INDENT> def __init__(self, layer_type: Module, output_size: Tuple[int], max_alpha=1.0, *args, **kw) -> None: <NEW_LINE> <INDENT> super(InformationDropoutLayer, self).__init__() <NEW_LINE> self.max_alpha = max_alpha <NEW_LINE> self.layer, self.noise = lyrs = [layer_type(*args, **kw) for _ in '..'] <NEW_LINE> for l in lyrs: <NEW_LINE> <INDENT> kaiming_normal(l.weight) <NEW_LINE> <DEDENT> def pp(f): <NEW_LINE> <INDENT> return Parameter(f(1).type(TP.FloatTensor)) <NEW_LINE> <DEDENT> self.prior = NormalParameters(mean=pp(T.zeros), alpha=pp(T.ones)) <NEW_LINE> <DEDENT> def forward(self, inp: Variable) -> InfoActivations: <NEW_LINE> <INDENT> clean_values = F.softplus(self.layer(inp)) + 1e-4 <NEW_LINE> raw_noise = self.noise(inp) <NEW_LINE> print('noise stats:', raw_noise.mean().data[0], raw_noise.std().data[0]) <NEW_LINE> post_alpha = F.softplus(raw_noise) * self.max_alpha + 1e-3 <NEW_LINE> posterior = NormalParameters(mean=clean_values.log(), alpha=post_alpha) <NEW_LINE> kls = self.prior.kl_divergence(posterior) <NEW_LINE> return InfoActivations( activations=posterior.sample().exp(), kl=kls) | Information-dropout layer with softplus activations
Args (to `forward`):
`inp`: 'Activations to be passed through the layer'
Returns:
An `InfoActivations` instance. The `activations` field should be passed
on to the next layer. The `kl` field should be incorporated into the
loss, as in Achille & Soatto eq. (6), p. 5.
Using the softplus activation function instead of ReLU, because the math is
more solid for that. (No improper prior.)
Input is passed through a parallel layer of the same type and shape, to
generate standard deviations for multiplicative log-normal noise applied to
outputs.
Prior is independent log-normal for each activation, with learnable mean
and standard deviation constant over the activations. Initial mean /
standard deviation are 0, 1, respectively. | 62598fb332920d7e50bc60ee |
class UnsupportedFeaturesChecker(gast.NodeTransformer): <NEW_LINE> <INDENT> def visit_Global(self, node): <NEW_LINE> <INDENT> raise errors.AutoGraphError( 'The global keyword is not yet supported.') <NEW_LINE> <DEDENT> def visit_Nonlocal(self, node): <NEW_LINE> <INDENT> raise errors.AutoGraphError( 'The nonlocal keyword is not yet supported.') <NEW_LINE> <DEDENT> def visit_Yield(self, node): <NEW_LINE> <INDENT> raise errors.AutoGraphError( 'Generators are not supported by AutoGraph') <NEW_LINE> <DEDENT> def visit_YieldFrom(self, node): <NEW_LINE> <INDENT> raise errors.AutoGraphError( 'Generators are not supported by AutoGraph') | Quick check for Python features we know we don't support.
Any features detected will cause AutoGraph to not compile a function. | 62598fb35fdd1c0f98e5e028 |
class TestStops(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = Client() <NEW_LINE> self.url = reverse("stop:fetch") <NEW_LINE> <DEDENT> def test_fetching_train_stations_connection(self): <NEW_LINE> <INDENT> data = { "input_unit": "centimetre", "output_unit": "metre", "input_value": round(8096.894, 3) } <NEW_LINE> response = self.client.get(self.url, data) <NEW_LINE> self.assertContains(response, 80.969) | This class contains tests that convert measurements from one
unit of measurement to another. | 62598fb34a966d76dd5eef72 |
class TaskViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Task.objects.all() <NEW_LINE> serializer_class = serializers.TaskSerializer | API endpoint that allows authenticated users to view or edit tasks. | 62598fb360cbc95b063643e1 |
class Task: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.input = {} <NEW_LINE> self.output = {} <NEW_LINE> <DEDENT> def method(self): <NEW_LINE> <INDENT> return 'method' | A mola optimisation task | 62598fb3627d3e7fe0e06f49 |
class RK4_example_integrator: <NEW_LINE> <INDENT> with_jacobian = False <NEW_LINE> @staticmethod <NEW_LINE> def integrate_adaptive(rhs, jac, y0, x0, xend, dx0, **kwargs): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) <NEW_LINE> <DEDENT> xspan = xend - x0 <NEW_LINE> n = int(math.ceil(xspan/dx0)) <NEW_LINE> yout = [y0[:]] <NEW_LINE> xout = [x0] <NEW_LINE> k = [np.empty(len(y0)) for _ in range(4)] <NEW_LINE> for i in range(0, n+1): <NEW_LINE> <INDENT> x, y = xout[-1], yout[-1] <NEW_LINE> h = min(dx0, xend-x) <NEW_LINE> rhs(x, y, k[0]) <NEW_LINE> rhs(x + h/2, y + h/2*k[0], k[1]) <NEW_LINE> rhs(x + h/2, y + h/2*k[1], k[2]) <NEW_LINE> rhs(x + h, y + h*k[2], k[3]) <NEW_LINE> yout.append(y + h/6 * (k[0] + 2*k[1] + 2*k[2] + k[3])) <NEW_LINE> xout.append(x+h) <NEW_LINE> <DEDENT> return np.array(xout), np.array(yout), {'nfev': n*4} <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def integrate_predefined(rhs, jac, y0, xout, **kwargs): <NEW_LINE> <INDENT> if kwargs: <NEW_LINE> <INDENT> warnings.warn("Ignoring keyword-argumtents: %s" % ', '.join(kwargs.keys())) <NEW_LINE> <DEDENT> x_old = xout[0] <NEW_LINE> yout = [y0[:]] <NEW_LINE> k = [np.empty(len(y0)) for _ in range(4)] <NEW_LINE> for i, x in enumerate(xout[1:], 1): <NEW_LINE> <INDENT> y = yout[-1] <NEW_LINE> h = x - x_old <NEW_LINE> rhs(x_old, y, k[0]) <NEW_LINE> rhs(x_old + h/2, y + h/2*k[0], k[1]) <NEW_LINE> rhs(x_old + h/2, y + h/2*k[1], k[2]) <NEW_LINE> rhs(x_old + h, y + h*k[2], k[3]) <NEW_LINE> yout.append(y + h/6 * (k[0] + 2*k[1] + 2*k[2] + k[3])) <NEW_LINE> x_old = x <NEW_LINE> <DEDENT> return np.array(yout), {'nfev': (len(xout)-1)*4} | This is an example of how to implement a custom integrator.
It uses fixed step size and is usually not useful for real problems. | 62598fb330bbd722464699c6 |
class LightMapPack(Operator): <NEW_LINE> <INDENT> bl_idname = "uv.lightmap_pack" <NEW_LINE> bl_label = "Lightmap Pack" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> PREF_CONTEXT = bpy.props.EnumProperty( name="Selection", items=(("SEL_FACES", "Selected Faces", "Space all UVs evently"), ("ALL_FACES", "All Faces", "Average space UVs edge length of each loop"), ("ALL_OBJECTS", "Selected Mesh Object", "Average space UVs edge length of each loop") ), ) <NEW_LINE> PREF_PACK_IN_ONE = BoolProperty( name="Share Tex Space", description=("Objects Share texture space, map all objects " "into 1 uvmap"), default=True, ) <NEW_LINE> PREF_NEW_UVLAYER = BoolProperty( name="New UV Layer", description="Create a new UV layer for every mesh packed", default=False, ) <NEW_LINE> PREF_APPLY_IMAGE = BoolProperty( name="New Image", description=("Assign new images for every mesh (only one if " "shared tex space enabled)"), default=False, ) <NEW_LINE> PREF_IMG_PX_SIZE = IntProperty( name="Image Size", description="Width and Height for the new image", min=64, max=5000, default=512, ) <NEW_LINE> PREF_BOX_DIV = IntProperty( name="Pack Quality", description="Pre Packing before the complex boxpack", min=1, max=48, default=12, ) <NEW_LINE> PREF_MARGIN_DIV = FloatProperty( name="Margin", description="Size of the margin as a division of the UV", min=0.001, max=1.0, default=0.1, ) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> kwargs = self.as_keywords() <NEW_LINE> PREF_CONTEXT = kwargs.pop("PREF_CONTEXT") <NEW_LINE> if PREF_CONTEXT == 'SEL_FACES': <NEW_LINE> <INDENT> kwargs["PREF_ACT_ONLY"] = True <NEW_LINE> kwargs["PREF_SEL_ONLY"] = True <NEW_LINE> <DEDENT> elif PREF_CONTEXT == 'ALL_FACES': <NEW_LINE> <INDENT> kwargs["PREF_ACT_ONLY"] = True <NEW_LINE> kwargs["PREF_SEL_ONLY"] = False <NEW_LINE> <DEDENT> elif PREF_CONTEXT == 'ALL_OBJECTS': <NEW_LINE> <INDENT> kwargs["PREF_ACT_ONLY"] = False <NEW_LINE> kwargs["PREF_SEL_ONLY"] = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("invalid context") <NEW_LINE> <DEDENT> kwargs["PREF_MARGIN_DIV"] = int(1.0 / (kwargs["PREF_MARGIN_DIV"] / 100.0)) <NEW_LINE> return unwrap(self, context, **kwargs) <NEW_LINE> <DEDENT> def invoke(self, context, event): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> return wm.invoke_props_dialog(self) | Follow UVs from active quads along continuous face loops | 62598fb3e1aae11d1e7ce871 |
class nn_se_rSTWavMSE512(p40): <NEW_LINE> <INDENT> blstm_layers = 1 <NEW_LINE> lstm_layers = 1 <NEW_LINE> use_wav_as_feature = True <NEW_LINE> frame_length = 512 <NEW_LINE> frame_step = 128 <NEW_LINE> fft_dot = 512 <NEW_LINE> loss_weight = [100.0] <NEW_LINE> GPU_PARTION = 0.3 | cnn1blstm1lstm
short time wav as feature | 62598fb3236d856c2adc948c |
class IShoppingLists(Interface): <NEW_LINE> <INDENT> lists = Attribute("list") | Marker interface for Shopping Lists. | 62598fb356b00c62f0fb2952 |
class Model(dict): <NEW_LINE> <INDENT> __metaclass__ = ModelType <NEW_LINE> id = None <NEW_LINE> name = None <NEW_LINE> objects = None <NEW_LINE> def __init__(self, manager=None, id=None, **fields): <NEW_LINE> <INDENT> self.objects = manager <NEW_LINE> self.id = id <NEW_LINE> dict.__init__(self, self.prepare_fields(fields)) <NEW_LINE> <DEDENT> def __reduce__(self): <NEW_LINE> <INDENT> return (_unpickle_model, (self.__class__, self.id, dict(self)), None) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> id = self.id or self.objects.id(self.name) <NEW_LINE> self.objects[id] = self.prepare_save(dict(self)) <NEW_LINE> self.id = id <NEW_LINE> self.post_save() <NEW_LINE> return id <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> del(self.objects[self.id]) <NEW_LINE> self.post_delete() <NEW_LINE> <DEDENT> def prepare_save(self, fields): <NEW_LINE> <INDENT> return fields <NEW_LINE> <DEDENT> def prepare_fields(self, fields): <NEW_LINE> <INDENT> return fields <NEW_LINE> <DEDENT> def post_save(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def post_delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s %s>" % (self.id, super(Model, self).__repr__()) <NEW_LINE> <DEDENT> def __getattr__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(key) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> if key not in self._protected: <NEW_LINE> <INDENT> self[key] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> object.__setattr__(self, key, value) | A Model.
:param manager: The :class:`Manager` for this model.
:keyword id: Id of the entry, ``None`` if the entry has not been
created yet.
:keyword \*\*fields: Values of the entry.
.. attribute:: name
Name of the model.
**REQUIRED**
All models needs a name, this name is used to keep track of ids
related to this model.
.. attribute:: id
The unique id for this entry.
If the entry does not have an id, it means the entry has not yet
been created and a new id will be automatically assigned when saved.
.. attribute:: objects
:class:`Manager` instance for this model. | 62598fb3e5267d203ee6b99f |
class NamedMasters(iscconf.Clause, _Masters): <NEW_LINE> <INDENT> def __init__(self, masters_name, port=None, comment=None): <NEW_LINE> <INDENT> additional = [masters_name] <NEW_LINE> if port: <NEW_LINE> <INDENT> additional.extend(['port', port]) <NEW_LINE> <DEDENT> iscconf.Clause.__init__(self, 'masters', tuple(additional), comment=comment) | Class for named BIND masters clause.
This is different from a Masters object because it has a name and
is allowed only in the global context (as an element of a BINDConf
object). | 62598fb3ec188e330fdf892b |
class LoggingException(object): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> __swig_destroy__ = _math.delete_LoggingException <NEW_LINE> def getMessage(self): <NEW_LINE> <INDENT> return _math.LoggingException_getMessage(self) <NEW_LINE> <DEDENT> def __lshift__(self, v): <NEW_LINE> <INDENT> return _math.LoggingException___lshift__(self, v) <NEW_LINE> <DEDENT> def __init__(self, *args): <NEW_LINE> <INDENT> this = _math.new_LoggingException(*args) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.this = this | Proxy of C++ nupic::LoggingException class | 62598fb3442bda511e95c4f3 |
class MolPrintObjInteract(Operator): <NEW_LINE> <INDENT> bl_idname = "mesh.molprint_objinteract" <NEW_LINE> bl_label = "Convert name list to object list" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> interaction_list = [] <NEW_LINE> for each in bpy.context.scene.molprint_lists.internames["pairs"]: <NEW_LINE> <INDENT> pair = [] <NEW_LINE> for name in each: <NEW_LINE> <INDENT> pair.append(bpy.data.objects[name]) <NEW_LINE> <DEDENT> interaction_list.append(pair) <NEW_LINE> <DEDENT> bpy.context.scene.molprint_lists.interactionlist = interaction_list <NEW_LINE> return {'FINISHED'} | Make a name list into an object list | 62598fb34e4d5625663724c2 |
class OcclusionRanking(__Base__): <NEW_LINE> <INDENT> __tablename__ = 'occlusionrankings' <NEW_LINE> __table_args__ = {'autoload': True} | A single occlusion ranking. | 62598fb33539df3088ecc34e |
class Options(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.output_path = None <NEW_LINE> self.zip_to_output = None <NEW_LINE> self.toco = None <NEW_LINE> self.known_bugs_are_errors = False <NEW_LINE> self.ignore_converter_errors = False <NEW_LINE> self.save_graphdefs = False <NEW_LINE> self.run_with_flex = False <NEW_LINE> self.make_edgetpu_tests = False <NEW_LINE> self.tflite_convert_function = None <NEW_LINE> self.known_bugs = KNOWN_BUGS <NEW_LINE> self.make_forward_compat_test = False <NEW_LINE> self.no_tests_limit = False <NEW_LINE> self.no_conversion_report = False <NEW_LINE> self.multi_gen_state = None <NEW_LINE> self.use_experimental_converter = False <NEW_LINE> self.mlir_quantizer = False <NEW_LINE> self.expected_ops_in_converted_model = [] | All options for example generation. | 62598fb3cc0a2c111447b0af |
class StageModified(Stage): <NEW_LINE> <INDENT> SHORTCUT = 'Ctrl+S' <NEW_LINE> @staticmethod <NEW_LINE> def name(): <NEW_LINE> <INDENT> return N_('Stage Modified') <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> Stage.__init__(self, None) <NEW_LINE> self.paths = self.model.modified | Stage all modified files. | 62598fb3460517430c4320ac |
class getApkFiles_args(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'predicate', (LinkPredicate, LinkPredicate.thrift_spec), None, ), (2, TType.I32, 'pendings', None, None, ), ) <NEW_LINE> def __init__(self, predicate=None, pendings=None,): <NEW_LINE> <INDENT> self.predicate = predicate <NEW_LINE> self.pendings = pendings <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.predicate = LinkPredicate() <NEW_LINE> self.predicate.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.pendings = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getApkFiles_args') <NEW_LINE> if self.predicate is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('predicate', TType.STRUCT, 1) <NEW_LINE> self.predicate.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.pendings is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('pendings', TType.I32, 2) <NEW_LINE> oprot.writeI32(self.pendings) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- predicate
- pendings | 62598fb3d7e4931a7ef3c130 |
class TreeInfoModel: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tree_store = Gtk.TreeStore( GObject.TYPE_STRING, GObject.TYPE_BOOLEAN, GObject.TYPE_PYOBJECT, GObject.TYPE_STRING) <NEW_LINE> if not self.tree_store: <NEW_LINE> <INDENT> raise Exception("cannot create tree store") <NEW_LINE> <DEDENT> self.row_changed_handler_id = None <NEW_LINE> self.refresh_rows() <NEW_LINE> self.tree_store.set_sort_func(3, self.sort_func) <NEW_LINE> self.tree_store.set_sort_column_id(3, Gtk.SortType.ASCENDING) <NEW_LINE> <DEDENT> def get_model(self): <NEW_LINE> <INDENT> return self.tree_store <NEW_LINE> <DEDENT> def on_row_changed(self, __treemodel, path, __iter): <NEW_LINE> <INDENT> parent = self.tree_store[path[0]][2] <NEW_LINE> child = None <NEW_LINE> if len(path) == 2: <NEW_LINE> <INDENT> child = self.tree_store[path][2] <NEW_LINE> <DEDENT> value = self.tree_store[path][1] <NEW_LINE> options.set_tree(parent, child, value) <NEW_LINE> <DEDENT> def refresh_rows(self): <NEW_LINE> <INDENT> if self.row_changed_handler_id: <NEW_LINE> <INDENT> self.tree_store.disconnect(self.row_changed_handler_id) <NEW_LINE> <DEDENT> self.tree_store.clear() <NEW_LINE> for key in sorted(backends): <NEW_LINE> <INDENT> if not any(backends[key].get_options()): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> c_name = backends[key].get_name() <NEW_LINE> c_id = backends[key].get_id() <NEW_LINE> c_value = options.get_tree(c_id, None) <NEW_LINE> if not c_value and options.get('auto_hide') and backends[key].auto_hide(): <NEW_LINE> <INDENT> logger.debug("automatically hiding cleaner '%s'", c_id) <NEW_LINE> continue <NEW_LINE> <DEDENT> parent = self.tree_store.append(None, (c_name, c_value, c_id, "")) <NEW_LINE> for (o_id, o_name) in backends[key].get_options(): <NEW_LINE> <INDENT> o_value = options.get_tree(c_id, o_id) <NEW_LINE> self.tree_store.append(parent, (o_name, o_value, o_id, "")) <NEW_LINE> <DEDENT> <DEDENT> self.row_changed_handler_id = self.tree_store.connect("row-changed", self.on_row_changed) <NEW_LINE> <DEDENT> def sort_func(self, model, iter1, iter2, _user_data): <NEW_LINE> <INDENT> value1 = model[iter1][0].lower() <NEW_LINE> value2 = model[iter2][0].lower() <NEW_LINE> if value1 == value2: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> if value1 > value2: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return -1 | Model holds information to be displayed in the tree view | 62598fb35fdd1c0f98e5e029 |
class Field(object): <NEW_LINE> <INDENT> class arguments: <NEW_LINE> <INDENT> required = True <NEW_LINE> <DEDENT> def __init__(self,**kwds): <NEW_LINE> <INDENT> global _order_counter <NEW_LINE> self._order_counter = _order_counter = _order_counter + 1 <NEW_LINE> args = self.__class__.arguments <NEW_LINE> for argnm in dir(args): <NEW_LINE> <INDENT> if not argnm.startswith("__"): <NEW_LINE> <INDENT> setattr(self,argnm,kwds.get(argnm,getattr(args,argnm))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def parse_attributes(self,obj,attrs): <NEW_LINE> <INDENT> return attrs <NEW_LINE> <DEDENT> def parse_child_node(self,obj,node): <NEW_LINE> <INDENT> return dexml.PARSE_SKIP <NEW_LINE> <DEDENT> def parse_done(self,obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def render_attributes(self,obj,val,nsmap): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def render_children(self,obj,nsmap,val): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def __get__(self,instance,owner=None): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return instance.__dict__.get(self.field_name) <NEW_LINE> <DEDENT> def __set__(self,instance,value): <NEW_LINE> <INDENT> instance.__dict__[self.field_name] = value <NEW_LINE> <DEDENT> def _check_tagname(self,node,tagname): <NEW_LINE> <INDENT> if node.nodeType != node.ELEMENT_NODE: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if isinstance(tagname,str): <NEW_LINE> <INDENT> if node.localName != tagname: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if node.namespaceURI: <NEW_LINE> <INDENT> if node.namespaceURI != self.model_class.meta.namespace: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> (tagns,tagname) = tagname <NEW_LINE> if node.localName != tagname: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if node.namespaceURI != tagns: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Base class for all dexml Field classes.
Field classes are responsible for parsing and rendering individual
components to the XML. They also act as descriptors on dexml Model
instances, to get/set the corresponding properties.
Each field instance will magically be given the following properties:
* model_class: the Model subclass to which it is attached
* field_name: the name under which is appears on that class
The following methods are required for interaction with the parsing
and rendering machinery:
* parse_attributes: parse info out of XML node attributes
* parse_child_node: parse into out of an XML child node
* render_attributes: render XML for node attributes
* render_children: render XML for child nodes
| 62598fb3be7bc26dc9251eaa |
class nn_se_rSpecMSE_joint_SE_D_WDFLbyD_S300(p40): <NEW_LINE> <INDENT> model_name = 'DISCRIMINATOR_AD_MODEL' <NEW_LINE> D_GRL = True <NEW_LINE> D_Grad_DCC = True <NEW_LINE> blstm_layers = 1 <NEW_LINE> lstm_layers = 1 <NEW_LINE> loss_name = ["real_net_spec_mse"] <NEW_LINE> GPU_PARTION = 0.47 <NEW_LINE> se_grad_fromD_coef = 0.0 <NEW_LINE> discirminator_grad_coef = 1.0 <NEW_LINE> stop_criterion_losses = ["real_net_spec_mse"] <NEW_LINE> show_losses = ["real_net_spec_mse", "deep_features_loss", "d_loss", "deep_features_losses"] <NEW_LINE> deepFeatureLoss_softmaxLogits = True <NEW_LINE> D_used_losses = ["se_loss", "deep_feature_loss", "D_loss"] <NEW_LINE> deepFeatureLoss_coef = 1.0 <NEW_LINE> weighted_DFL_by_DLoss = True <NEW_LINE> D_strict_degree_for_DFL = 300.0 | half full vec constrained | 62598fb399cbb53fe6830f70 |
class VGG(nn.Module): <NEW_LINE> <INDENT> def __init__(self, arch: object, num_classes=1000) -> object: <NEW_LINE> <INDENT> super(VGG, self).__init__() <NEW_LINE> self.in_channels = 3 <NEW_LINE> self.conv3_64 = self.__make_layer(64, arch[0]) <NEW_LINE> self.conv3_128 = self.__make_layer(128, arch[1]) <NEW_LINE> self.conv3_256 = self.__make_layer(256, arch[2]) <NEW_LINE> self.conv3_512a = self.__make_layer(512, arch[3]) <NEW_LINE> self.conv3_512b = self.__make_layer(512, arch[4]) <NEW_LINE> self.fc1 = nn.Linear(7 * 7 * 512, 4096) <NEW_LINE> self.bn1 = nn.BatchNorm1d(4096) <NEW_LINE> self.bn2 = nn.BatchNorm1d(4096) <NEW_LINE> self.fc2 = nn.Linear(4096, 4096) <NEW_LINE> self.fc3 = nn.Linear(4096, num_classes) <NEW_LINE> <DEDENT> def __make_layer(self, channels, num): <NEW_LINE> <INDENT> layers = [] <NEW_LINE> for i in range(num): <NEW_LINE> <INDENT> layers.append(nn.Conv2d(self.in_channels, channels, 3, stride=1, padding=1, bias=False)) <NEW_LINE> layers.append(nn.BatchNorm2d(channels)) <NEW_LINE> layers.append(nn.ReLU()) <NEW_LINE> self.in_channels = channels <NEW_LINE> <DEDENT> return nn.Sequential(*layers) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> out = self.conv3_64(x) <NEW_LINE> out = F.max_pool2d(out, 2) <NEW_LINE> out = self.conv3_128(out) <NEW_LINE> out = F.max_pool2d(out, 2) <NEW_LINE> out = self.conv3_256(out) <NEW_LINE> out = F.max_pool2d(out, 2) <NEW_LINE> out = self.conv3_512a(out) <NEW_LINE> out = F.max_pool2d(out, 2) <NEW_LINE> out = self.conv3_512b(out) <NEW_LINE> out = F.max_pool2d(out, 2) <NEW_LINE> out = out.view(out.size(0), -1) <NEW_LINE> out = self.fc1(out) <NEW_LINE> out = self.bn1(out) <NEW_LINE> out = F.relu(out) <NEW_LINE> out = self.fc2(out) <NEW_LINE> out = self.bn2(out) <NEW_LINE> out = F.relu(out) <NEW_LINE> return F.softmax(self.fc3(out)) | VGG builder | 62598fb34a966d76dd5eef73 |
class scan(): <NEW_LINE> <INDENT> def __init__(self, scan_type, targets): <NEW_LINE> <INDENT> self.scan_type = scan_type <NEW_LINE> self.targets = targets <NEW_LINE> <DEDENT> def send_icmp_packet(self): <NEW_LINE> <INDENT> for target in self.targets: <NEW_LINE> <INDENT> print("\tSending ICMP packet to %s." % target) <NEW_LINE> <DEDENT> <DEDENT> def recv_icmp_packet(self): <NEW_LINE> <INDENT> for target in self.targets: <NEW_LINE> <INDENT> print("\tReceiving ICMP packet from %s." % target) <NEW_LINE> <DEDENT> <DEDENT> def show_results(self): <NEW_LINE> <INDENT> print("\n####################################\n") <NEW_LINE> print("Results for scan: %s." % self.scan_type) <NEW_LINE> for target in self.targets: <NEW_LINE> <INDENT> print("\tTarget: %s." % target) <NEW_LINE> <DEDENT> print("\n####################################\n") | Do the scan | 62598fb344b2445a339b69c0 |
class Cert(ShareableOrgMixin, AbstractCert): <NEW_LINE> <INDENT> ca = models.ForeignKey(Ca, verbose_name=_('CA'), on_delete=models.CASCADE) <NEW_LINE> class Meta(AbstractCert.Meta): <NEW_LINE> <INDENT> abstract = False <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> self._validate_org_relation('ca') | openwisp-controller cert model | 62598fb363b5f9789fe85206 |
class _Remove(argparse.Action): <NEW_LINE> <INDENT> def __init__(self, option_strings, dest=None, default=None, metavar=None): <NEW_LINE> <INDENT> super().__init__(option_strings=option_strings, dest=dest, metavar=metavar, default=default, help='remove pool') <NEW_LINE> <DEDENT> def __call__(self, parser, namespace, name, option_string=None): <NEW_LINE> <INDENT> setattr(namespace, 'command', 'remove') <NEW_LINE> setattr(namespace, 'name', name) | Action for argument parser that removes a pool | 62598fb3dc8b845886d53654 |
class IDocentimsFixcontainerviewLayer(IDefaultBrowserLayer): <NEW_LINE> <INDENT> pass | Marker interface that defines a browser layer. | 62598fb3498bea3a75a57bbd |
class TestSend(TransactionCase): <NEW_LINE> <INDENT> def test_send(self): <NEW_LINE> <INDENT> carrier = self.env.ref("delivery.normal_delivery_carrier") <NEW_LINE> picking_form = Form( self.env["stock.picking"].with_context( default_picking_type_id=self.env.ref("stock.picking_type_out").id, ) ) <NEW_LINE> picking_form.carrier_id = carrier <NEW_LINE> picking = picking_form.save() <NEW_LINE> with mock.patch.object(type(carrier), "fixed_send_shipping") as mocked: <NEW_LINE> <INDENT> mocked.return_value = [ dict( labels=[ dict( name="hello_world.pdf", file=base64.b64encode(bytes("hello world", "utf8")), file_type="pdf", ), ] ) ] <NEW_LINE> labels_before = self.env["shipping.label"].search([]) <NEW_LINE> carrier.send_shipping(picking) <NEW_LINE> label = self.env["shipping.label"].search([]) - labels_before <NEW_LINE> self.assertTrue(label, "No label created") <NEW_LINE> self.assertEqual( label.mimetype, "application/pdf", "Wrong attachment created" ) | Test sending a picking | 62598fb35fcc89381b26619b |
class TaskViewSet(viewsets.ViewSet): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated,) <NEW_LINE> prefix = r'tasks' <NEW_LINE> base_name = 'tasks' <NEW_LINE> @log() <NEW_LINE> @required_roles(['read']) <NEW_LINE> @load() <NEW_LINE> def list(self): <NEW_LINE> <INDENT> inspector = inspect() <NEW_LINE> data = {'active' : inspector.active(), 'scheduled': inspector.scheduled(), 'reserved' : inspector.reserved(), 'revoked' : inspector.revoked()} <NEW_LINE> return Response(data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> @log() <NEW_LINE> @required_roles(['read']) <NEW_LINE> @load() <NEW_LINE> def retrieve(self, pk): <NEW_LINE> <INDENT> result = celery.AsyncResult(pk) <NEW_LINE> if result.successful(): <NEW_LINE> <INDENT> result_data = result.result <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result_data = str(result.result) if result.result is not None else None <NEW_LINE> <DEDENT> data = {'id' : result.id, 'status' : result.status, 'successful': result.successful(), 'failed' : result.failed(), 'ready' : result.ready(), 'result' : result_data} <NEW_LINE> return Response(data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> @link() <NEW_LINE> @log() <NEW_LINE> @required_roles(['read']) <NEW_LINE> @load() <NEW_LINE> def get(self, pk): <NEW_LINE> <INDENT> result = celery.AsyncResult(pk) <NEW_LINE> return Response(result.get(), status=status.HTTP_200_OK) | Information about celery tasks | 62598fb3283ffb24f3cf392a |
class vb2pyOptions(model.CustomDialog): <NEW_LINE> <INDENT> def __init__(self, logger, *args, **kw): <NEW_LINE> <INDENT> model.CustomDialog.__init__(self, *args, **kw) <NEW_LINE> self.log = logger <NEW_LINE> self.log.info("Opening INI file") <NEW_LINE> fle = open(utils.relativePath("vb2py.ini"), "r") <NEW_LINE> try: <NEW_LINE> <INDENT> text = fle.read() <NEW_LINE> self.components.optionText.text = text <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> fle.close() <NEW_LINE> <DEDENT> <DEDENT> def on_btnOK_mouseClick(self, event): <NEW_LINE> <INDENT> self.saveINI() <NEW_LINE> self.Close() <NEW_LINE> <DEDENT> def saveINI(self): <NEW_LINE> <INDENT> self.log.info("Saving INI file") <NEW_LINE> fle = open(utils.relativePath("vb2py.ini"), "w") <NEW_LINE> try: <NEW_LINE> <INDENT> fle.write(self.components.optionText.text) <NEW_LINE> self.log.info("Succeeded!") <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> fle.close() <NEW_LINE> <DEDENT> <DEDENT> def on_btnApply_mouseClick(self, event): <NEW_LINE> <INDENT> self.saveINI() <NEW_LINE> self.parent.rereadOptions() <NEW_LINE> self.parent.updateView() | GUI for the vb2Py converter | 62598fb3a8370b77170f047a |
class RequestTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.app = create_app(config_name="testing") <NEW_LINE> self.client = self.app.test_client <NEW_LINE> self.user_data_sign = { 'user_email': 'ngolo@example.com', 'password': 'test_123' } <NEW_LINE> self.user_data_2 = { 'user_email': 'jumbo@example.com', 'password': 'test_123' } <NEW_LINE> self.ride_data_re = { "ride_name": "Mugumo's", "driver": "Martin Kamau", "reg_num": "KCS 124U", "start": "Kisauni", "stop": "Bamburi", "passengers": "4", "time": "10:00AM", "date": "21/6/2018", "cost": "KSH 5/KM " } <NEW_LINE> self.ride_data_2 = { "ride_name": "Kanjo's", "driver": "Martin Kamau", "reg_num": "KCS 124U", "start": "Kisauni", "stop": "Bamburi", "passengers": "4", "time": "10:00AM", "date": "21/6/2018", "cost": "KSH 5/KM " } <NEW_LINE> <DEDENT> def test_request_ride(self): <NEW_LINE> <INDENT> self.client().post('/api/v1/auth/signup', data=self.user_data_sign) <NEW_LINE> login_response = self.client().post('/api/v1/auth/login', data=self.user_data_sign) <NEW_LINE> access_token = json.loads(login_response.data.decode())['access_token'] <NEW_LINE> response = self.client().post('/api/v1/ride/create', headers=dict(Authorization='Bearer ' + access_token), data=self.ride_data_2) <NEW_LINE> response = self.client().post('/api/v1/ride/1/request', headers=dict(Authorization='Bearer ' + access_token)) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertEqual(result['message'], "Your ride request has been received successfully!") <NEW_LINE> <DEDENT> def test_request_ride_with_invalid_id(self): <NEW_LINE> <INDENT> self.client().post('/api/v1/auth/signup', data=self.user_data_2) <NEW_LINE> login_response = self.client().post('/api/v1/auth/login', data=self.user_data_2) <NEW_LINE> access_token = json.loads(login_response.data.decode())['access_token'] <NEW_LINE> response = self.client().post('/api/v1/ride/create', headers=dict(Authorization='Bearer ' + access_token), data=self.ride_data_re) <NEW_LINE> response = self.client().post( '/api/v1/ride/7/request', headers=dict(Authorization='Bearer ' + access_token)) <NEW_LINE> result = json.loads(response.data.decode()) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> self.assertEqual(result['message'], "The ride you are looking for is not available!") | Class for Ride offer request test cases | 62598fb37d43ff2487427451 |
class ReportHandler(Handles): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self, signal, weak=False): <NEW_LINE> <INDENT> self.signal = getsig(signal) <NEW_LINE> self.report_signal = getsig("badfile") <NEW_LINE> def dummy(sender, event): self.handle(sender,event) <NEW_LINE> dispatcher.connect(dummy, signal=self.signal, sender=dispatcher.Any, weak=weak) <NEW_LINE> <DEDENT> def report_problem_file(self, event, exception=None): <NEW_LINE> <INDENT> dispatcher.send(signal=self.report_signal, sender=self, event=event, exception=exception) | A handler that can also report problem files when things go wrong
through the report_problem_file routine | 62598fb31b99ca400228f57f |
class ModelsRegistry(object): <NEW_LINE> <INDENT> def __init__(self, model_name_class_tuples_list=None): <NEW_LINE> <INDENT> hard_coded_model_name_class_tuples = [("Logistic regression", LogisticRegressionModel), ("Linear regression", LinearRegressionModel), ("HTTP REST Model", HTTPRestModel), ("Openscoring REST Model", OpenScoringRestModel)] <NEW_LINE> if model_name_class_tuples_list is None: <NEW_LINE> <INDENT> model_name_class_tuples_list = hard_coded_model_name_class_tuples <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> model_name_class_tuples_list += hard_coded_model_name_class_tuples <NEW_LINE> <DEDENT> self.model_name_class_dict = {} <NEW_LINE> for model_name_class in model_name_class_tuples_list: <NEW_LINE> <INDENT> model_name, model_class = model_name_class <NEW_LINE> self.model_name_class_dict[model_name] = model_class | Registers a model name with a model class | 62598fb32c8b7c6e89bd3862 |
class GeometricDistribution(Distribution): <NEW_LINE> <INDENT> def __init__(self, p): <NEW_LINE> <INDENT> self.p = p <NEW_LINE> <DEDENT> def log_density(self, data): <NEW_LINE> <INDENT> assert(len(data.shape) == 1), "Expect 1D data!" <NEW_LINE> return np.log(1 - self.p) * (data - 1) + np.log(self.p) <NEW_LINE> <DEDENT> def estimate_parameters(self, data, weights): <NEW_LINE> <INDENT> assert(len(data.shape) == 1), "Expect 1D data!" <NEW_LINE> self.p = np.sum(weights) / np.sum(data * weights) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "Geom[p={p:.4g}]".format(p=self.p) | Geometric distribution with parameter (p). | 62598fb3cc0a2c111447b0b0 |
class Person(models.Model): <NEW_LINE> <INDENT> subject = models.TextField(null=True, blank=True) <NEW_LINE> birth_year = models.IntegerField(null=True, blank=True) <NEW_LINE> places = models.ManyToManyField('Location') <NEW_LINE> death_year = models.IntegerField(null=True, blank=True) <NEW_LINE> religion = models.TextField() <NEW_LINE> party = models.TextField(null=True, blank=True) | docstring for ClassName | 62598fb3f548e778e596b641 |
class ChannelRequest(dbus.service.Object): <NEW_LINE> <INDENT> @dbus.service.method('org.freedesktop.Telepathy.ChannelRequest', in_signature='', out_signature='') <NEW_LINE> def Proceed(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @dbus.service.method('org.freedesktop.Telepathy.ChannelRequest', in_signature='', out_signature='') <NEW_LINE> def Cancel(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> @dbus.service.signal('org.freedesktop.Telepathy.ChannelRequest', signature='ss') <NEW_LINE> def Failed(self, Error, Message): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @dbus.service.signal('org.freedesktop.Telepathy.ChannelRequest', signature='') <NEW_LINE> def Succeeded(self): <NEW_LINE> <INDENT> pass | A channel request is an object in the ChannelDispatcher representing
an ongoing request for some channels to be created or found. There
can be any number of ChannelRequest objects at the same time.
Its well-known bus name is the same as that of the ChannelDispatcher,
"org.freedesktop.Telepathy.ChannelDispatcher".
See
ChannelDispatcher.CreateChannel
for rationale for ChannelRequest being a separate object.
A channel request can be cancelled by any client (not just the one
that requested it). This means that the ChannelDispatcher will
Close
the resulting channel, or refrain from requesting it at all, rather
than dispatching it to a handler. | 62598fb3fff4ab517ebcd883 |
class PublicationViewSet(RetrieveElasticMixin, ListElasticMixin, viewsets.ReadOnlyModelViewSet): <NEW_LINE> <INDENT> serializer_class = PublicationSerializer <NEW_LINE> pagination_class = ElasticLimitOffsetPagination <NEW_LINE> idx = ElasticSettings.idx('PUBLICATION') <NEW_LINE> filter_fields = ('pmid', 'title', 'authors__name', 'tags__disease') | Returns a list of publications.
---
list:
response_serializer: PublicationSerializer
parameters:
- name: pmid
description: PubMed ID (e.g. 20937630).
required: false
type: string
paramType: query
- name: title
description: Title.
required: false
type: string
paramType: query
- name: authors__name
description: Author names.
required: false
type: string
paramType: query
- name: tags__disease
description: Disease tag (e.g. T1D).
required: false
type: string
paramType: query | 62598fb367a9b606de54606c |
class Advent3Tests(unittest.TestCase): <NEW_LINE> <INDENT> def test_3a(self): <NEW_LINE> <INDENT> known_values = { 1: 0, 12: 3, 23: 2, 1024: 31 } <NEW_LINE> for number, expected in known_values.items(): <NEW_LINE> <INDENT> actual = advent_3a(number) <NEW_LINE> message = ("Testing input '{}', expected '{}' but got '{}'" .format(number, expected, actual)) <NEW_LINE> self.assertEqual(advent_3a(number), expected, msg=message) <NEW_LINE> <DEDENT> <DEDENT> def test_3b(self): <NEW_LINE> <INDENT> known_values = { 0: 1, 1: 2, 2: 4, 37: 54, 100: 122 } <NEW_LINE> for number, expected in known_values.items(): <NEW_LINE> <INDENT> actual = advent_3b(number) <NEW_LINE> message = ("Testing input '{}', expected '{}' but got '{}'" .format(number, expected, actual)) <NEW_LINE> self.assertEqual(actual, expected, msg=message) <NEW_LINE> <DEDENT> <DEDENT> def test_get_coords(self): <NEW_LINE> <INDENT> known_values = { 1: (0, 0), 2: (1, 0), 3: (1, 1), 4: (0, 1), 5: (-1, 1), 6: (-1, 0), 7: (-1, -1), 8: (0, -1), 9: (1, -1), 10: (2, -1), 11: (2, 0), 12: (2, 1), 13: (2, 2), 14: (1, 2), 15: (0, 2), 16: (-1, 2), 17: (-2, 2), 18: (-2, 1), 19: (-2, 0), 20: (-2, -1), 21: (-2, -2), 22: (-1, -2), 23: (0, -2), 24: (1, -2), 25: (2, -2), 26: (3, -2), } <NEW_LINE> for number, expected in known_values.items(): <NEW_LINE> <INDENT> actual = coords_3a(number) <NEW_LINE> message = ("Testing input '{}', expected '{}' but got '{}'" .format(number, expected, actual)) <NEW_LINE> self.assertEqual(actual, expected, msg=message) <NEW_LINE> <DEDENT> <DEDENT> def test_traversal(self): <NEW_LINE> <INDENT> expected = [ (0, 0), (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1), (1, -1), (2, -1), (2, 0), (2, 1), (2, 2), (1, 2), (0, 2), (-1, 2), (-2, 2), (-2, 1), (-2, 0), (-2, -1), (-2, -2), (-1, -2), (0, -2), (1, -2), (2, -2), (3, -2) ] <NEW_LINE> num_desired = len(expected) <NEW_LINE> actual = list(itertools.islice(iter_coords(), num_desired)) <NEW_LINE> for i in range(0, num_desired): <NEW_LINE> <INDENT> message = "At index '{}', expected '{}' and got '{}'".format(i, expected[0], actual[0]) <NEW_LINE> self.assertEqual(expected[i], actual[i], msg=message) | Unit tests for the functions in this module | 62598fb35fdd1c0f98e5e02b |
class Not(Node): <NEW_LINE> <INDENT> def __init__(self, right): <NEW_LINE> <INDENT> self.right = right <NEW_LINE> <DEDENT> def evaluate(self): <NEW_LINE> <INDENT> right = self.right.evaluate() <NEW_LINE> if not isinstance(right, int): <NEW_LINE> <INDENT> raise SemanticError() <NEW_LINE> <DEDENT> if right == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | A node representing NOT. | 62598fb37d847024c075c461 |
class Rectangle: <NEW_LINE> <INDENT> def __init__(self, initP, initW, initH): <NEW_LINE> <INDENT> self.location = initP <NEW_LINE> self.width = initW <NEW_LINE> self.height = initH | Rectangle class using Point, width and height | 62598fb3f9cc0f698b1c531a |
class VirtualConnection(Component): <NEW_LINE> <INDENT> def __init__(self, name='VirtualConnection', pos=QPoint(0, 0)): <NEW_LINE> <INDENT> Component.__init__(self, name, pos) <NEW_LINE> self.compType = TYPE_CONN_VIRTUAL <NEW_LINE> self.addTerminal('IN', 1, TERM.CONN, QPointF(0, 0)) | @if English
@endif
@if Slovak
Neviditelny prepojovaci komponent pre spojenie sieti (net-ov).
Zoznam prepojeni sa vytvara dynamicky na zaklade terminalov sieti.
Poskytuje spolocny terminal pre vzajomne prepojenie virtualnych sieti.
Komponent nie je zaradeny do grafickeho kontaineru.
@endif | 62598fb367a9b606de54606d |
class ReplicationStatus(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'aggregated_state': {'readonly': True}, 'summary': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'aggregated_state': {'key': 'aggregatedState', 'type': 'str'}, 'summary': {'key': 'summary', 'type': '[RegionalReplicationStatus]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ReplicationStatus, self).__init__(**kwargs) <NEW_LINE> self.aggregated_state = None <NEW_LINE> self.summary = None | This is the replication status of the gallery Image Version.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar aggregated_state: This is the aggregated replication status based on all the regional
replication status flags. Possible values include: "Unknown", "InProgress", "Completed",
"Failed".
:vartype aggregated_state: str or
~azure.mgmt.compute.v2019_07_01.models.AggregatedReplicationState
:ivar summary: This is a summary of replication status for each region.
:vartype summary: list[~azure.mgmt.compute.v2019_07_01.models.RegionalReplicationStatus] | 62598fb35fdd1c0f98e5e02c |
class City(BaseModel): <NEW_LINE> <INDENT> state_id = "" <NEW_LINE> name = "" | [City]
Args:
BaseModel ([class]): class that inherited by City | 62598fb344b2445a339b69c1 |
class MQTTRPCBaseResponse(JSONSerializable): <NEW_LINE> <INDENT> def __init__(self, result=None, error=None, _id=None): <NEW_LINE> <INDENT> self.data = dict() <NEW_LINE> self.result = result <NEW_LINE> self.error = error <NEW_LINE> self._id = _id <NEW_LINE> if self.result is None and self.error is None: <NEW_LINE> <INDENT> raise ValueError("Either result or error should be used") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return self._data <NEW_LINE> <DEDENT> @data.setter <NEW_LINE> def data(self, value): <NEW_LINE> <INDENT> if not isinstance(value, dict): <NEW_LINE> <INDENT> raise ValueError("data should be dict") <NEW_LINE> <DEDENT> self._data = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def json(self): <NEW_LINE> <INDENT> return self.serialize(self.data) | Base class for JSON-RPC 1.0 and JSON-RPC 2.0 responses. | 62598fb38a43f66fc4bf2218 |
class DisabledFlavorsWithRealDBTest(test.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(DisabledFlavorsWithRealDBTest, self).setUp() <NEW_LINE> self.controller = flavors.FlavorsController() <NEW_LINE> self.req = fakes.HTTPRequestV3.blank('/flavors') <NEW_LINE> self.context = self.req.environ['nova.context'] <NEW_LINE> self.admin_context = context.get_admin_context() <NEW_LINE> self.disabled_type = self._create_disabled_instance_type() <NEW_LINE> self.inst_types = db.flavor_get_all(self.admin_context) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> db.flavor_destroy(self.admin_context, self.disabled_type['name']) <NEW_LINE> super(DisabledFlavorsWithRealDBTest, self).tearDown() <NEW_LINE> <DEDENT> def _create_disabled_instance_type(self): <NEW_LINE> <INDENT> inst_types = db.flavor_get_all(self.admin_context) <NEW_LINE> inst_type = inst_types[0] <NEW_LINE> del inst_type['id'] <NEW_LINE> inst_type['name'] += '.disabled' <NEW_LINE> inst_type['flavorid'] = unicode(max( [int(flavor['flavorid']) for flavor in inst_types]) + 1) <NEW_LINE> inst_type['disabled'] = True <NEW_LINE> disabled_type = db.flavor_create(self.admin_context, inst_type) <NEW_LINE> return disabled_type <NEW_LINE> <DEDENT> def test_index_should_not_list_disabled_flavors_to_user(self): <NEW_LINE> <INDENT> self.context.is_admin = False <NEW_LINE> flavor_list = self.controller.index(self.req)['flavors'] <NEW_LINE> api_flavorids = set(f['id'] for f in flavor_list) <NEW_LINE> db_flavorids = set(i['flavorid'] for i in self.inst_types) <NEW_LINE> disabled_flavorid = str(self.disabled_type['flavorid']) <NEW_LINE> self.assertTrue(disabled_flavorid in db_flavorids) <NEW_LINE> self.assertEqual(db_flavorids - set([disabled_flavorid]), api_flavorids) <NEW_LINE> <DEDENT> def test_index_should_list_disabled_flavors_to_admin(self): <NEW_LINE> <INDENT> self.context.is_admin = True <NEW_LINE> flavor_list = self.controller.index(self.req)['flavors'] <NEW_LINE> api_flavorids = set(f['id'] for f in flavor_list) <NEW_LINE> db_flavorids = set(i['flavorid'] for i in self.inst_types) <NEW_LINE> disabled_flavorid = str(self.disabled_type['flavorid']) <NEW_LINE> self.assertTrue(disabled_flavorid in db_flavorids) <NEW_LINE> self.assertEqual(db_flavorids, api_flavorids) <NEW_LINE> <DEDENT> def test_show_should_include_disabled_flavor_for_user(self): <NEW_LINE> <INDENT> self.context.is_admin = False <NEW_LINE> flavor = self.controller.show( self.req, self.disabled_type['flavorid'])['flavor'] <NEW_LINE> self.assertEqual(flavor['name'], self.disabled_type['name']) <NEW_LINE> <DEDENT> def test_show_should_include_disabled_flavor_for_admin(self): <NEW_LINE> <INDENT> self.context.is_admin = True <NEW_LINE> flavor = self.controller.show( self.req, self.disabled_type['flavorid'])['flavor'] <NEW_LINE> self.assertEqual(flavor['name'], self.disabled_type['name']) | Tests that disabled flavors should not be shown nor listed. | 62598fb3adb09d7d5dc0a62a |
class FacebookAgentMessageResponseBody(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.recipient_id = '' <NEW_LINE> self.message_id = '' | Request body for facebook agent message. | 62598fb3e1aae11d1e7ce873 |
class DanbooruError(Exception): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> self.message = message | Exception raised for errors with Danbooru.
Attributes:
message -- explanation of the error | 62598fb323849d37ff851152 |
class Item_Type(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'item_types' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> item_type = Column(String, nullable=False) <NEW_LINE> items = relationship('Item') | db model for Treasure Item Types | 62598fb360cbc95b063643e5 |
class DictionaryReaderUtilsTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_load_dummy_dict(self): <NEW_LINE> <INDENT> data_dir = fileutils.get_test_data_directory() <NEW_LINE> dummy_dict_path = os.path.join(data_dir, "dummy_dict.json") <NEW_LINE> self.assertTrue(os.path.exists(dummy_dict_path), "Missing data for test: %s!" % dummy_dict_path) <NEW_LINE> d = dictreader.load_dict(dummy_dict_path) <NEW_LINE> self.assertTrue(type(d) == dict) <NEW_LINE> self.assertTrue(d["-F"] == "of") | The dictionary reader loads json dicts. | 62598fb3a8370b77170f047b |
@nest.check_stack <NEW_LINE> @unittest.skipIf(not HAVE_GSL, 'GSL is not available') <NEW_LINE> class SiegertNeuronTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.rtol = 1.0 <NEW_LINE> self.N = 100 <NEW_LINE> self.rate_ex = 1.5 * 1e4 <NEW_LINE> self.J = 0.1 <NEW_LINE> self.simtime = 500. <NEW_LINE> self.dt = 0.1 <NEW_LINE> self.start = 200. <NEW_LINE> nest.set_verbosity('M_WARNING') <NEW_LINE> nest.ResetKernel() <NEW_LINE> nest.SetKernelStatus( {'resolution': self.dt, 'use_wfr': False, 'print_time': True}) <NEW_LINE> self.iaf_psc_delta = nest.Create( 'iaf_psc_delta', self.N) <NEW_LINE> self.poisson_generator = nest.Create( 'poisson_generator', params={'rate': self.rate_ex}) <NEW_LINE> nest.Connect(self.poisson_generator, self.iaf_psc_delta, syn_spec={'weight': self.J, 'delay': self.dt}) <NEW_LINE> self.spike_detector = nest.Create( "spike_detector", params={'start': self.start}) <NEW_LINE> nest.Connect( self.iaf_psc_delta, self.spike_detector) <NEW_LINE> neuron_status = nest.GetStatus(self.iaf_psc_delta)[0] <NEW_LINE> siegert_params = {'tau_m': neuron_status['tau_m'], 't_ref': neuron_status['t_ref'], 'theta': neuron_status['V_th'] - neuron_status['E_L'], 'V_reset': neuron_status['V_reset'] - neuron_status['E_L']} <NEW_LINE> self.siegert_neuron = nest.Create( 'siegert_neuron', params=siegert_params) <NEW_LINE> self.siegert_drive = nest.Create( 'siegert_neuron', 1, params={'mean': self.rate_ex, 'theta': siegert_params['theta']}) <NEW_LINE> J_mu_ex = neuron_status['tau_m'] * 1e-3 * self.J <NEW_LINE> J_sigma_ex = neuron_status['tau_m'] * 1e-3 * self.J ** 2 <NEW_LINE> syn_dict = {'drift_factor': J_mu_ex, 'diffusion_factor': J_sigma_ex, 'model': 'diffusion_connection'} <NEW_LINE> nest.Connect( self.siegert_drive, self.siegert_neuron, syn_spec=syn_dict) <NEW_LINE> self.multimeter = nest.Create( "multimeter", params={'record_from': ['rate'], 'interval': self.dt}) <NEW_LINE> nest.Connect( self.multimeter, self.siegert_neuron) <NEW_LINE> <DEDENT> def test_RatePrediction(self): <NEW_LINE> <INDENT> nest.Simulate(self.simtime) <NEW_LINE> events = nest.GetStatus(self.multimeter)[0]["events"] <NEW_LINE> senders = events['senders'] <NEW_LINE> rate = events['rate'][np.where(senders == self.siegert_neuron)] <NEW_LINE> rate_prediction = rate[-1] <NEW_LINE> rate_iaf = nest.GetStatus(self.spike_detector)[0][ "n_events"] / ((self.simtime - self.start) * 1e-3) / self.N <NEW_LINE> self.assertTrue(np.isclose(rate_iaf, rate_prediction, rtol=self.rtol)) <NEW_LINE> rate_prediction_test = 27.1095934379 <NEW_LINE> self.assertTrue(np.isclose(rate_prediction_test, rate_prediction)) | Test siegert_neuron
Details
-------
Compares the rate of a Poisson-driven iaf_psc_delta neuron
with the prediction from the siegert neuron. | 62598fb356b00c62f0fb2956 |
class WorkflowStatus(str, Enum): <NEW_LINE> <INDENT> created = "created" <NEW_LINE> queued = "queued" <NEW_LINE> pending = "pending" <NEW_LINE> running = "running" <NEW_LINE> failed = "failed" <NEW_LINE> finished = "finished" | Enumeration of workflow statuses.
Example:
WorkflowStatus.failed == "failed" # True | 62598fb3a79ad1619776a108 |
class ServiceManager: <NEW_LINE> <INDENT> _ec2 = None <NEW_LINE> _elb = None <NEW_LINE> _acm = None <NEW_LINE> _iam = None <NEW_LINE> _rds = None <NEW_LINE> _s3 = None <NEW_LINE> _ce = None <NEW_LINE> @property <NEW_LINE> def ec2(self): <NEW_LINE> <INDENT> if self._ec2 is None: self._ec2 = Ec2Service() <NEW_LINE> return self._ec2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def elb(self): <NEW_LINE> <INDENT> if self._elb is None: self._elb = ElbService() <NEW_LINE> return self._elb <NEW_LINE> <DEDENT> @property <NEW_LINE> def acm(self): <NEW_LINE> <INDENT> if self._acm is None: self._acm = AcmService() <NEW_LINE> return self._acm <NEW_LINE> <DEDENT> @property <NEW_LINE> def iam(self): <NEW_LINE> <INDENT> if self._iam is None: self._iam = IamService() <NEW_LINE> return self._iam <NEW_LINE> <DEDENT> @property <NEW_LINE> def rds(self): <NEW_LINE> <INDENT> if self._rds is None: self._rds = RdsService() <NEW_LINE> return self._rds <NEW_LINE> <DEDENT> @property <NEW_LINE> def s3(self): <NEW_LINE> <INDENT> if self._s3 is None: self._s3 = S3Service() <NEW_LINE> return self._s3 <NEW_LINE> <DEDENT> @property <NEW_LINE> def ce(self): <NEW_LINE> <INDENT> if self._ce is None: self._ce = CostExplorerService() <NEW_LINE> return self._ce <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_auth_config(cls): <NEW_LINE> <INDENT> if AwsBase.profile: <NEW_LINE> <INDENT> auth = {'Authorization': {'Type':'Profile', 'Value':AwsBase.profile}} <NEW_LINE> <DEDENT> elif AwsBase.access_key: <NEW_LINE> <INDENT> auth = {'Authorization': {'Type':'AccessKeys', 'Value':AwsBase.access_key}} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> auth = {'Authorization': {'Type':'Profile', 'Value': 'default'}} <NEW_LINE> <DEDENT> return auth <NEW_LINE> <DEDENT> def __init__(self, region, profile=None, access_key=None, secret_key=None): <NEW_LINE> <INDENT> AwsBase.region = region <NEW_LINE> AwsBase.access_key = access_key <NEW_LINE> AwsBase.secret_key = secret_key <NEW_LINE> AwsBase.profile = profile | Parent class that provides access to services.
For each service (ec2, s3, vpc ...) you are given access through a property of this class.
This property will return an instance of the corresponding class, for example Ec2Service or VpcService.
Each class of service (Ec2Service, S3Service ...) inherits from the AwsBase class. | 62598fb3dc8b845886d53656 |
@tf_export( "queue.PaddingFIFOQueue", v1=["queue.PaddingFIFOQueue", "io.PaddingFIFOQueue", "PaddingFIFOQueue"]) <NEW_LINE> @deprecation.deprecated_endpoints(["io.PaddingFIFOQueue", "PaddingFIFOQueue"]) <NEW_LINE> class PaddingFIFOQueue(QueueBase): <NEW_LINE> <INDENT> def __init__(self, capacity, dtypes, shapes, names=None, shared_name=None, name="padding_fifo_queue"): <NEW_LINE> <INDENT> dtypes = _as_type_list(dtypes) <NEW_LINE> shapes = _as_shape_list(shapes, dtypes, unknown_dim_allowed=True) <NEW_LINE> names = _as_name_list(names, dtypes) <NEW_LINE> if len(dtypes) != len(shapes): <NEW_LINE> <INDENT> raise ValueError("Shapes must be provided for all components, " f"but received {len(dtypes)} dtypes and " f"{len(shapes)} shapes.") <NEW_LINE> <DEDENT> queue_ref = gen_data_flow_ops.padding_fifo_queue_v2( component_types=dtypes, shapes=shapes, capacity=capacity, shared_name=_shared_name(shared_name), name=name) <NEW_LINE> super(PaddingFIFOQueue, self).__init__(dtypes, shapes, names, queue_ref) | A FIFOQueue that supports batching variable-sized tensors by padding.
A `PaddingFIFOQueue` may contain components with dynamic shape, while also
supporting `dequeue_many`. See the constructor for more details.
See `tf.queue.QueueBase` for a description of the methods on
this class. | 62598fb34f88993c371f055b |
class CoordinateAttribute(Attribute): <NEW_LINE> <INDENT> def __init__(self, frame, default=None, secondary_attribute=''): <NEW_LINE> <INDENT> self._frame = frame <NEW_LINE> super().__init__(default, secondary_attribute) <NEW_LINE> <DEDENT> def convert_input(self, value): <NEW_LINE> <INDENT> from astropy.coordinates import SkyCoord <NEW_LINE> if value is None: <NEW_LINE> <INDENT> return None, False <NEW_LINE> <DEDENT> elif isinstance(value, self._frame): <NEW_LINE> <INDENT> return value, False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = SkyCoord(value) <NEW_LINE> transformedobj = value.transform_to(self._frame) <NEW_LINE> return transformedobj.frame, True | A frame attribute which is a coordinate object. It can be given as a
`~astropy.coordinates.SkyCoord` or a low-level frame instance. If a
low-level frame instance is provided, it will always be upgraded to be a
`~astropy.coordinates.SkyCoord` to ensure consistent transformation
behavior. The coordinate object will always be returned as a low-level
frame instance when accessed.
Parameters
----------
frame : `~astropy.coordinates.BaseCoordinateFrame` class
The type of frame this attribute can be
default : object
Default value for the attribute if not provided
secondary_attribute : str
Name of a secondary instance attribute which supplies the value if
``default is None`` and no value was supplied during initialization. | 62598fb3f548e778e596b643 |
class Messaging: <NEW_LINE> <INDENT> WSCloseCode = WSCloseCode <NEW_LINE> MSG_JOIN = 'join' <NEW_LINE> MSG_NEW_PLAYER = 'new_player' <NEW_LINE> MSG_HANDSHAKE = 'handshake' <NEW_LINE> MSG_WORLD = 'world' <NEW_LINE> MSG_P_JOINED = 'p_joined' <NEW_LINE> MSG_P_GAMEOVER = 'p_gameover' <NEW_LINE> MSG_P_SCORE = 'p_score' <NEW_LINE> MSG_RESET_WORLD = 'reset_world' <NEW_LINE> MSG_TOP_SCORES = 'top_scores' <NEW_LINE> MSG_RENDER = 'render' <NEW_LINE> MSG_ERROR = 'error' <NEW_LINE> MSG_PING = 'ping' <NEW_LINE> MSG_PONG = 'pong' <NEW_LINE> MSG_SYNC = 'sync' <NEW_LINE> CMD_LEFT = 37 <NEW_LINE> CMD_UP = 38 <NEW_LINE> CMD_RIGHT = 39 <NEW_LINE> CMD_DOWN = 40 <NEW_LINE> @staticmethod <NEW_LINE> async def _send_one(ws, message): <NEW_LINE> <INDENT> msg = json.dumps(message) <NEW_LINE> await ws.send_str(msg) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> async def _send_all(wss, messages): <NEW_LINE> <INDENT> msg = json.dumps(messages) <NEW_LINE> for ws in wss: <NEW_LINE> <INDENT> if not ws.closed: <NEW_LINE> <INDENT> await ws.send_str(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> async def _close(ws, code=WSCloseCode.GOING_AWAY, message='Closing connection'): <NEW_LINE> <INDENT> await ws.close(code=code, message=message) | WebSocket messaging helper class. | 62598fb37b25080760ed7551 |
class NSNitroNserrCachefwpxyToomany(NSNitroCrErrors): <NEW_LINE> <INDENT> pass | Nitro error code 601
Forward proxy limit reached | 62598fb32ae34c7f260ab17f |
class BaseSVC(BaseLibSVM, ClassifierMixin): <NEW_LINE> <INDENT> def _validate_targets(self, y): <NEW_LINE> <INDENT> y = column_or_1d(y, warn=True) <NEW_LINE> cls, y = unique(y, return_inverse=True) <NEW_LINE> self.class_weight_ = compute_class_weight(self.class_weight, cls, y) <NEW_LINE> if len(cls) < 2: <NEW_LINE> <INDENT> raise ValueError( "The number of classes has to be greater than one; got %d" % len(cls)) <NEW_LINE> <DEDENT> self.classes_ = cls <NEW_LINE> return np.asarray(y, dtype=np.float64, order='C') <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> y = super(BaseSVC, self).predict(X) <NEW_LINE> return self.classes_.take(y.astype(np.int)) <NEW_LINE> <DEDENT> def predict_proba(self, X): <NEW_LINE> <INDENT> if not self.probability: <NEW_LINE> <INDENT> raise NotImplementedError( "probability estimates must be enabled to use this method") <NEW_LINE> <DEDENT> if self._impl not in ('c_svc', 'nu_svc'): <NEW_LINE> <INDENT> raise NotImplementedError("predict_proba only implemented for SVC " "and NuSVC") <NEW_LINE> <DEDENT> X = self._validate_for_predict(X) <NEW_LINE> pred_proba = (self._sparse_predict_proba if self._sparse else self._dense_predict_proba) <NEW_LINE> return pred_proba(X) <NEW_LINE> <DEDENT> def predict_log_proba(self, X): <NEW_LINE> <INDENT> return np.log(self.predict_proba(X)) <NEW_LINE> <DEDENT> def _dense_predict_proba(self, X): <NEW_LINE> <INDENT> X = self._compute_kernel(X) <NEW_LINE> kernel = self.kernel <NEW_LINE> if callable(kernel): <NEW_LINE> <INDENT> kernel = 'precomputed' <NEW_LINE> <DEDENT> svm_type = LIBSVM_IMPL.index(self._impl) <NEW_LINE> pprob = libsvm.predict_proba( X, self.support_, self.support_vectors_, self.n_support_, self.dual_coef_, self._intercept_, self._label, self.probA_, self.probB_, svm_type=svm_type, kernel=kernel, degree=self.degree, cache_size=self.cache_size, coef0=self.coef0, gamma=self._gamma) <NEW_LINE> return pprob <NEW_LINE> <DEDENT> def _sparse_predict_proba(self, X): <NEW_LINE> <INDENT> X.data = np.asarray(X.data, dtype=np.float64, order='C') <NEW_LINE> kernel = self.kernel <NEW_LINE> if callable(kernel): <NEW_LINE> <INDENT> kernel = 'precomputed' <NEW_LINE> <DEDENT> kernel_type = self._sparse_kernels.index(kernel) <NEW_LINE> return libsvm_sparse.libsvm_sparse_predict_proba( X.data, X.indices, X.indptr, self.support_vectors_.data, self.support_vectors_.indices, self.support_vectors_.indptr, self.dual_coef_.data, self._intercept_, LIBSVM_IMPL.index(self._impl), kernel_type, self.degree, self._gamma, self.coef0, self.tol, self.C, self.class_weight_, self.nu, self.epsilon, self.shrinking, self.probability, self.n_support_, self._label, self.probA_, self.probB_) | ABC for LibSVM-based classifiers. | 62598fb37047854f4633f47a |
class CreateQoSBandwidthLimitRule(qos_rule.QosRuleMixin, neutronv20.CreateCommand): <NEW_LINE> <INDENT> resource = BANDWIDTH_LIMIT_RULE_RESOURCE <NEW_LINE> def add_known_arguments(self, parser): <NEW_LINE> <INDENT> super(CreateQoSBandwidthLimitRule, self).add_known_arguments(parser) <NEW_LINE> add_bandwidth_limit_arguments(parser) <NEW_LINE> <DEDENT> def args2body(self, parsed_args): <NEW_LINE> <INDENT> body = {} <NEW_LINE> update_bandwidth_limit_args2body(parsed_args, body) <NEW_LINE> return {'bandwidth_limit_rule': body} | Create a qos bandwidth limit rule. | 62598fb355399d3f056265ba |
class Choose(callbacks.Plugin): <NEW_LINE> <INDENT> def __init__(self, irc): <NEW_LINE> <INDENT> callbacks.Plugin.__init__(self, irc) <NEW_LINE> <DEDENT> def choose(self, irc, msg, args, choices): <NEW_LINE> <INDENT> if choices is None: <NEW_LINE> <INDENT> if conf.get(conf.supybot.plugins.Infobot.personality): <NEW_LINE> <INDENT> irc.reply(choice(zero_choice_reply)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> irc.reply(no_choice_reply) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> options = [y.strip() for x in choices.split(' or ') for y in x.split(';') if y not in ['', ' ']] <NEW_LINE> if len(options) is 1: <NEW_LINE> <INDENT> if conf.get(conf.supybot.plugins.Infobot.personality): <NEW_LINE> <INDENT> irc.reply(choice(one_choice_reply)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> irc.reply(no_choice_reply) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> irc.reply(choice(options)) <NEW_LINE> <DEDENT> <DEDENT> choose = wrap(choose, [additional('text')]) | Make tough decisions between two or more options, easily. | 62598fb37d847024c075c462 |
class ThreadedTCPRequestHandler(BaseRequestHandler): <NEW_LINE> <INDENT> def handle(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> command = ['./script.sh'] + self.request.recv(1024).decode().split(' ') <NEW_LINE> print(command) <NEW_LINE> process = Popen(command, stdout=PIPE, stderr=PIPE, cwd=join('..', 'Question1')) <NEW_LINE> for line in process.stdout: <NEW_LINE> <INDENT> print(line) <NEW_LINE> self.request.sendall(line) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print(e) | This handler is used to handle each client in separate thread | 62598fb35fdd1c0f98e5e02d |
class PadServerTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.server = PadServer.objects.create(title=TS['title'], url=TS['url'], apikey=TS['apikey']) <NEW_LINE> <DEDENT> def testBasics(self): <NEW_LINE> <INDENT> self.assertTrue(isinstance(self.server, PadServer)) <NEW_LINE> self.assertEqual(self.server.__unicode__(), TS['url']) | Test cases for the Server model
| 62598fb3d268445f26639bd3 |
class RandomController(Controller): <NEW_LINE> <INDENT> def pick_procs(self, graph: Graph, n_procs: int) -> np.ndarray: <NEW_LINE> <INDENT> return np.random.randint(n_procs, size=len(graph.nodes)) <NEW_LINE> <DEDENT> def register_times(self, t: List[float]) -> None: <NEW_LINE> <INDENT> return None | Controller for random search of processor assignments | 62598fb326068e7796d4c9f6 |
class Objective(OptimizationExpression): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def clone(cls, objective, model=None, **kwargs): <NEW_LINE> <INDENT> return cls(cls._substitute_variables(objective, model=model), name=objective.name, direction=objective.direction, sloppy=True, **kwargs) <NEW_LINE> <DEDENT> def __init__(self, expression, value=None, direction='max', *args, **kwargs): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> self._direction = direction <NEW_LINE> super(Objective, self).__init__(expression, *args, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return {'max': 'Maximize', 'min': 'Minimize'}[self.direction] + '\n' + str(self.expression) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Objective): <NEW_LINE> <INDENT> return self.expression == other.expression and self.direction == other.direction <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _canonicalize(self, expression): <NEW_LINE> <INDENT> expression = super(Objective, self)._canonicalize(expression) <NEW_LINE> expression *= 1. <NEW_LINE> return expression <NEW_LINE> <DEDENT> @property <NEW_LINE> def direction(self): <NEW_LINE> <INDENT> return self._direction <NEW_LINE> <DEDENT> @direction.setter <NEW_LINE> def direction(self, value): <NEW_LINE> <INDENT> if value not in ['max', 'min']: <NEW_LINE> <INDENT> raise ValueError("Provided optimization direction %s is neither 'min' or 'max'." % value) <NEW_LINE> <DEDENT> self._direction = value | Objective function.
Attributes
----------
expression: sympy
The mathematical expression defining the objective.
name: str, optional
The name of the constraint.
direction: 'max' or 'min'
The optimization direction.
value: float, read-only
The current objective value.
problem: solver
The low-level solver object. | 62598fb3be7bc26dc9251eac |
class TestIoK8sApiCoreV1NodeStatus(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testIoK8sApiCoreV1NodeStatus(self): <NEW_LINE> <INDENT> pass | IoK8sApiCoreV1NodeStatus unit test stubs | 62598fb37d847024c075c463 |
class SDR_Classifier: <NEW_LINE> <INDENT> def __init__(self, alpha, input_sdr, num_labels): <NEW_LINE> <INDENT> self.alpha = alpha <NEW_LINE> self.input_sdr = input_sdr <NEW_LINE> self.num_labels = num_labels <NEW_LINE> self.stats = np.random.uniform( 0.1 * self.alpha, 0.2 * self.alpha, size=(self.input_sdr.size, self.num_labels)) <NEW_LINE> <DEDENT> def train(self, labels, input_sdr=None): <NEW_LINE> <INDENT> labels = np.array(labels) / np.sum(labels) <NEW_LINE> self.input_sdr.assign(input_sdr) <NEW_LINE> inputs = self.input_sdr.flat_index <NEW_LINE> self.stats[inputs, :] *= (1 - self.alpha) <NEW_LINE> self.stats[:, np.nonzero(labels)[0]] *= (1 - self.alpha) <NEW_LINE> updates = (labels - self.stats[inputs]) * self.alpha <NEW_LINE> self.stats[inputs] += updates <NEW_LINE> <DEDENT> def predict(self, input_sdr=None): <NEW_LINE> <INDENT> self.input_sdr.assign(input_sdr) <NEW_LINE> pdf = self.stats[self.input_sdr.flat_index, :] <NEW_LINE> pdf = pdf / np.sum(pdf, axis=1, keepdims=True) <NEW_LINE> if False: <NEW_LINE> <INDENT> return np.product(pdf, axis=0, keepdims=False) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return np.sum(pdf, axis=0, keepdims=False) | Maximum Likelyhood classifier for SDRs. | 62598fb367a9b606de54606f |
@ptah.layout( 'workspace', SiteRoot, parent='page', renderer='ptah_ws:templates/layout-workspace.pt') <NEW_LINE> class WorkspaceLayout(ptah.View): <NEW_LINE> <INDENT> def update(self): <NEW_LINE> <INDENT> self.user = ptah.auth_service.get_current_principal() <NEW_LINE> self.ptahManager = manage.check_access( ptah.auth_service.get_userid(), self.request) <NEW_LINE> self.isAnon = self.user is None | same as PageLayout, it uses 'page' as parent layout | 62598fb391f36d47f2230ef8 |
class RecieveTimingExcced(fsm.Event): <NEW_LINE> <INDENT> description: str = settings.Description.RecieveTimingExcced | 超时系统自动确认收货 | 62598fb33317a56b869be59c |
class SimpleQuantZmqProcessBase(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.context = None <NEW_LINE> self.loop = None <NEW_LINE> <DEDENT> def setup(self, sock_type, addr, bind, subscribe=b''): <NEW_LINE> <INDENT> self.context = zmq.asyncio.Context() <NEW_LINE> loop = asyncio.get_event_loop() <NEW_LINE> if isinstance(loop, zmq.asyncio.ZMQEventLoop): <NEW_LINE> <INDENT> self.loop = loop <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.loop = zmq.asyncio.ZMQEventLoop() <NEW_LINE> asyncio.set_event_loop(self.loop) <NEW_LINE> <DEDENT> sock = self.context.socket(sock_type) <NEW_LINE> if isinstance(addr, str): <NEW_LINE> <INDENT> addr = addr.split(':') <NEW_LINE> host, port = addr if len(addr) == 2 else (addr[0], None) <NEW_LINE> <DEDENT> if bind: <NEW_LINE> <INDENT> if port: <NEW_LINE> <INDENT> sock.bind('tcp://%s:%s' % (host, port)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> port = sock.bind_to_random_port('tcp://%s' % host) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sock.connect('tcp://%s:%s' % (host, port)) <NEW_LINE> <DEDENT> if sock_type == zmq.SUB: <NEW_LINE> <INDENT> sock.setsockopt(zmq.SUBSCRIBE, subscribe) <NEW_LINE> <DEDENT> self.sock = sock <NEW_LINE> <DEDENT> def stream(self, sock_type, addr, bind, callback=None, subscribe=b''): <NEW_LINE> <INDENT> sock = self.context.socket(sock_type) <NEW_LINE> if isinstance(addr, str): <NEW_LINE> <INDENT> addr = addr.split(':') <NEW_LINE> host, port = addr if len(addr) == 2 else (addr[0], None) <NEW_LINE> <DEDENT> if bind: <NEW_LINE> <INDENT> if port: <NEW_LINE> <INDENT> sock.bind('tcp://%s:%s' % (host, port)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> port = sock.bind_to_random_port('tcp://%s' % host) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> sock.connect('tcp://%s:%s' % (host, port)) <NEW_LINE> <DEDENT> if sock_type == zmq.SUB: <NEW_LINE> <INDENT> sock.setsockopt(zmq.SUBSCRIBE, subscribe) <NEW_LINE> <DEDENT> stream = zmqstream.ZMQStream(sock, self.loop) <NEW_LINE> if callback: <NEW_LINE> <INDENT> stream.on_recv(callback) <NEW_LINE> <DEDENT> return stream, int(port) | This is the base for all zmq processes and offers utility functions
for setup and creating new streams. | 62598fb3aad79263cf42e874 |
class GameRouter(View): <NEW_LINE> <INDENT> inactive_view = staticmethod(PlayInactiveGame.as_view()) <NEW_LINE> reinforcements_view = staticmethod(PlayReinforcements.as_view()) <NEW_LINE> finance_reinforcements_view = staticmethod(PlayFinanceReinforcements.as_view()) <NEW_LINE> orders_view = staticmethod(PlayOrders.as_view()) <NEW_LINE> strategic_view = staticmethod(PlayStrategic.as_view()) <NEW_LINE> retreats_view = staticmethod(PlayRetreats.as_view()) <NEW_LINE> def dispatch(self, request, *args, **kwargs): <NEW_LINE> <INDENT> game = get_game_or_404(slug=kwargs['slug']) <NEW_LINE> if game.started is None or game.finished is not None or game.phase == machiavelli.PHINACTIVE or (game.paused and not request.user.is_staff): <NEW_LINE> <INDENT> return self.inactive_view(request, *args, **kwargs) <NEW_LINE> <DEDENT> if game.phase == machiavelli.PHREINFORCE: <NEW_LINE> <INDENT> if game.configuration.finances: <NEW_LINE> <INDENT> return self.finance_reinforcements_view(request, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.reinforcements_view(request, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> if game.phase == machiavelli.PHORDERS: <NEW_LINE> <INDENT> return self.orders_view(request, *args, **kwargs) <NEW_LINE> <DEDENT> if game.phase == machiavelli.PHSTRATEGIC: <NEW_LINE> <INDENT> return self.strategic_view(request, *args, **kwargs) <NEW_LINE> <DEDENT> if game.phase == machiavelli.PHRETREATS: <NEW_LINE> <INDENT> return self.retreats_view(request, *args, **kwargs) | Return a View that depends on the current status of the game. | 62598fb38e7ae83300ee9145 |
class Controller: <NEW_LINE> <INDENT> def __init__(self, model: Model, view: View): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.view = view <NEW_LINE> self.view.on_open_profile.on(self.on_open_profile) <NEW_LINE> self.view.on_new_account.on(self.on_new_account) <NEW_LINE> self.view.on_account_bot_start.on(self.on_bot_start) <NEW_LINE> <DEDENT> def on_open_profile(self, config): <NEW_LINE> <INDENT> self.load_accounts(config['accounts']) <NEW_LINE> <DEDENT> def on_new_account(self): <NEW_LINE> <INDENT> self.view.new_account_card() <NEW_LINE> <DEDENT> def on_bot_start(self): <NEW_LINE> <INDENT> print('start bot') <NEW_LINE> self.model.start_service() <NEW_LINE> <DEDENT> def open_service(self): <NEW_LINE> <INDENT> self.model.open_service(account_settings) <NEW_LINE> <DEDENT> def load_accounts(self, accounts_config: list): <NEW_LINE> <INDENT> for config in accounts_config: <NEW_LINE> <INDENT> self.open_service() <NEW_LINE> self.view.add_account_card(config) | Контроллер. Отвечает за ввод пользователя | 62598fb330bbd722464699c9 |
class AnythingType(LittleScribeType): <NEW_LINE> <INDENT> def is_super_of(self, other): <NEW_LINE> <INDENT> return True | Takes any type. | 62598fb363b5f9789fe8520a |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.