code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class BpmFinderProceed(wx.Frame): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> wx.Frame.__init__(self, parent, id=wx.ID_ANY, title=u"BpmFinderProceed", pos=wx.DefaultPosition, size=wx.Size(174, 135), style=wx.DEFAULT_FRAME_STYLE | wx.TAB_TRAVERSAL) <NEW_LINE> self.SetSizeHintsSz(wx.DefaultSize, wx.DefaultSize) <NEW_LINE> frame_sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> self.panel = wx.Panel(self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL) <NEW_LINE> self.panel.SetForegroundColour(wx.SystemSettings.GetColour(wx.SYS_COLOUR_WINDOW)) <NEW_LINE> self.panel.SetBackgroundColour(wx.Colour(128, 255, 0)) <NEW_LINE> grid_sizer = wx.GridSizer(0, 2, 0, 0) <NEW_LINE> box_sizer1 = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> self.proceed_button = wx.Button(self.panel, wx.ID_ANY, u"PROCEED", wx.DefaultPosition, wx.Size(165, 105), 0) <NEW_LINE> self.proceed_button.SetFont(wx.Font(22, 74, 90, 90, False, ARIEL)) <NEW_LINE> box_sizer1.Add(self.proceed_button, 0, wx.ALL, 5) <NEW_LINE> self.panel.SetSizer(grid_sizer) <NEW_LINE> self.panel.Layout() <NEW_LINE> grid_sizer.Fit(self.panel) <NEW_LINE> frame_sizer.Add(self.panel, 1, wx.EXPAND | wx.ALL, 0) <NEW_LINE> self.SetSizer(frame_sizer) <NEW_LINE> self.Layout() <NEW_LINE> self.Centre(wx.BOTH) <NEW_LINE> self.proceed_button.Bind(wx.EVT_BUTTON, self.proceed) <NEW_LINE> <DEDENT> def proceed(self, event): <NEW_LINE> <INDENT> raise SystemExit | Subprocess proceed gui. | 62598fccbe7bc26dc925203e |
class RootParser(Tap): <NEW_LINE> <INDENT> field: str = '1' <NEW_LINE> def configure(self): <NEW_LINE> <INDENT> self.add_subparsers(help='All sub parser') <NEW_LINE> self.add_subparser('sub', SubParser) | <Root Parser> | 62598fcc377c676e912f6f5c |
class FunctionalFixture(PloneSandboxLayer): <NEW_LINE> <INDENT> defaultBases = (PLONE_APP_CONTENTTYPES_FIXTURE,) <NEW_LINE> def setUpZope(self, app, configurationContext): <NEW_LINE> <INDENT> import eea.ldapadmin <NEW_LINE> import plone.dexterity <NEW_LINE> import plone.app.textfield <NEW_LINE> self.loadZCML(package=plone.dexterity) <NEW_LINE> self.loadZCML(package=plone.app.dexterity) <NEW_LINE> self.loadZCML(package=plone.app.textfield) <NEW_LINE> self.loadZCML(package=eea.ldapadmin) | Fixture | 62598fccff9c53063f51aa16 |
class TemperatureLog: <NEW_LINE> <INDENT> def add(self,record): <NEW_LINE> <INDENT> raise NotImplementedError('TemperatureLog.add(...)') <NEW_LINE> <DEDENT> def write(line): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_max_min(self,channel): <NEW_LINE> <INDENT> xs,ys=self.extract(channel) <NEW_LINE> ys_for_period=[] <NEW_LINE> xxx=[] <NEW_LINE> ymin=[] <NEW_LINE> ymax=[] <NEW_LINE> x_previous=-1 <NEW_LINE> for x,y in zip(xs,ys): <NEW_LINE> <INDENT> if x_previous<0: <NEW_LINE> <INDENT> x_previous=x <NEW_LINE> ys_for_period=[] <NEW_LINE> <DEDENT> if x-x_previous>=1: <NEW_LINE> <INDENT> xxx.append(x_previous) <NEW_LINE> ymin.append(min(ys_for_period)) <NEW_LINE> ymax.append(max(ys_for_period)) <NEW_LINE> x_previous=x <NEW_LINE> x_previous=-1 <NEW_LINE> ys_for_period=[] <NEW_LINE> <DEDENT> ys_for_period.append(y) <NEW_LINE> <DEDENT> return (xxx,ymin,ymax) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass | Used to store temperature values
Abstract class, needs to be implemented by descendents | 62598fcc71ff763f4b5e7b4c |
class PerlDevelCycle(PerlPackage): <NEW_LINE> <INDENT> homepage = "http://search.cpan.org/~lds/Devel-Cycle-1.12/lib/Devel/Cycle.pm" <NEW_LINE> url = "http://search.cpan.org/CPAN/authors/id/L/LD/LDS/Devel-Cycle-1.12.tar.gz" <NEW_LINE> version('1.12', '3d9a963da87b17398fab9acbef63f277') | Find memory cycles in objects | 62598fcca219f33f346c6bd4 |
class Test_prob_1to1(TestProbMethod_1to1, prob): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(Test_prob_1to1, self).setUp() <NEW_LINE> self.disc._input_sample_set.estimate_volume_mc() <NEW_LINE> calcP.prob(self.disc) | Test :meth:`bet.calculateP.calculateP.prob` on a 1 to 1 map. | 62598fcc55399d3f056268e4 |
class MxNode(JuniperNode): <NEW_LINE> <INDENT> pass | Class Node to create JunOS mx node objects. | 62598fcc091ae35668704ff5 |
class TestHandleNumber: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> @pytest.mark.facade <NEW_LINE> @pytest.mark.sqlalchemy <NEW_LINE> def test_invalid_format(): <NEW_LINE> <INDENT> artifacts = _create_artifacts() <NEW_LINE> artifacts.open_api.type = "number" <NEW_LINE> artifacts.open_api.format = "unsupported" <NEW_LINE> with pytest.raises(exceptions.FeatureNotImplementedError): <NEW_LINE> <INDENT> simple._handle_number(artifacts=artifacts) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> @pytest.mark.parametrize( "format_, expected_number_cls", [ pytest.param(None, sqlalchemy.Float, id="None"), pytest.param("float", sqlalchemy.Float, id="float"), ], ) <NEW_LINE> @pytest.mark.facade <NEW_LINE> @pytest.mark.sqlalchemy <NEW_LINE> def test_valid(format_, expected_number_cls): <NEW_LINE> <INDENT> artifacts = _create_artifacts() <NEW_LINE> artifacts.open_api.type = "number" <NEW_LINE> artifacts.open_api.format = format_ <NEW_LINE> number = simple._handle_number(artifacts=artifacts) <NEW_LINE> assert isinstance(number, expected_number_cls) | Tests for _handle_number. | 62598fcc956e5f7376df5863 |
class DiskMetaTileStorage(MetaTileStorageImpl): <NEW_LINE> <INDENT> def __init__(self, root='.', dir_mode='hilbert', levels=range(0, 22), stride=1, format=None, readonly=False, gzip=False): <NEW_LINE> <INDENT> assert isinstance(root, six.string_types) <NEW_LINE> if not isinstance(format, FormatBundle): <NEW_LINE> <INDENT> raise MetaTileStorageError('Must specify format explicitly.') <NEW_LINE> <DEDENT> if not os.path.isabs(root): <NEW_LINE> <INDENT> raise MetaTileStorageError('Only accepts an absolute path.') <NEW_LINE> <DEDENT> key_mode = create_key_mode(dir_mode, prefix=root, extension=format.tile_format.extension, sep=os.sep, gzip=gzip) <NEW_LINE> serializer = MetaTileSerializer( gzip=gzip, mimetype=format.tile_format.mimetype) <NEW_LINE> persistent = DiskStorage() <NEW_LINE> storage = GenericStorageImpl(key_concept=key_mode, serializer_concept=serializer, storage_concept=persistent) <NEW_LINE> MetaTileStorageImpl.__init__(self, storage, levels=levels, stride=stride, readonly=readonly) | Store ``MetaTile`` on a file system.
:param root: Required, root directory of the storage, must be a
absolute filesystem path.
:type root: str
:param dir_mode: Specifies how the directory names are calculated from
metatile index, possible choices are:
`simple`
Same as the tile api url schema, ``z/x/y.ext``.
`hilbert`
Generate a hashed directory tree using Hilbert Curve.
`legacy`
Path mode used by old `mason` codebase.
`legacy` and `hilbert` mode will limit files and subdirs under a
directory by calculating a "hash" string from tile coordinate.
The directory tree structure also groups adjacent geographical
items together, improves filesystem cache performance, default
value is ``hilbert``.
:type dir_mode: str
:param levels: Zoom levels of the pyramid, must be a list of integers,
default value is ``0-22``.
:type levels: list
:param stride: Stride of the MetaTile in this pyramid, default
value is ``1``.
:type stride: int
:param format: `FormatBundle` of the storage which specifies:
- `mimetype` of the tiles stored in the storage,
- `exension` of the tiles, Note if `gzip` option is set to ``True``,
``.gz`` is appended to extension.
- How to split a `MetaTile` into tiles.
:type format: :class:`~stonemason.formatbundle.FormatBundle`
:param readonly: Whether the storage is created in read only mode, default
is ``False``, :meth:`put` and :meth:`retire` always raises
:exc:`ReadOnlyStorage` if `readonly` is set.
:type readonly: bool
:param gzip: Whether the metatile file stored on filesystem will be gzipped,
default is ``False``. Note when `gzip` is enabled, ``.gz`` is
automatically appended to `extension`.
:type gzip: bool | 62598fccf9cc0f698b1c54b9 |
class Lista(object): <NEW_LINE> <INDENT> def __init__(self, Primero=None): <NEW_LINE> <INDENT> self.PrimerNodo = Primero <NEW_LINE> self.UltimoNodo = None <NEW_LINE> self.size= 0 <NEW_LINE> <DEDENT> def getSize(self): <NEW_LINE> <INDENT> return self.size <NEW_LINE> <DEDENT> def Eliminar (self, ind): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> Actual = self.PrimerNodo <NEW_LINE> prev_node = None <NEW_LINE> while Actual: <NEW_LINE> <INDENT> if i == int(ind): <NEW_LINE> <INDENT> if prev_node: <NEW_LINE> <INDENT> prev_node.setSiguiente(Actual.getSiguiente()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.PrimerNodo = Actual.getSiguiente() <NEW_LINE> <DEDENT> self.size -= 1 <NEW_LINE> return Actual.getData() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prev_node = Actual <NEW_LINE> Actual = Actual.getSiguiente() <NEW_LINE> i+=1 <NEW_LINE> <DEDENT> <DEDENT> return "No se ha Podido Eliminar" <NEW_LINE> <DEDENT> def AgregarNodo(self,datos): <NEW_LINE> <INDENT> Nuevo = Nodo(datos,self.size) <NEW_LINE> if self.UltimoNodo: <NEW_LINE> <INDENT> self.UltimoNodo.setSiguiente(Nuevo) <NEW_LINE> self.UltimoNodo=Nuevo <NEW_LINE> self.size +=1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.PrimerNodo=Nuevo <NEW_LINE> self.UltimoNodo=Nuevo <NEW_LINE> self.size +=1 <NEW_LINE> <DEDENT> <DEDENT> def Buscar(self,dato): <NEW_LINE> <INDENT> i=0 <NEW_LINE> Actual = self.PrimerNodo <NEW_LINE> while Actual: <NEW_LINE> <INDENT> if Actual.getData()==dato: <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Actual = Actual.getSiguiente() <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> <DEDENT> return "No Existe el Dato" <NEW_LINE> <DEDENT> def buscarIndice(self,index): <NEW_LINE> <INDENT> i = 0 <NEW_LINE> Actual = self.PrimerNodo <NEW_LINE> while Actual: <NEW_LINE> <INDENT> if i==index: <NEW_LINE> <INDENT> return Actual.getData() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Actual = Actual.getSiguiente() <NEW_LINE> i+=1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def mostrar(self): <NEW_LINE> <INDENT> if not self.PrimerNodo: <NEW_LINE> <INDENT> print("LA LISTA ESTA VACIA") <NEW_LINE> <DEDENT> Actual = self.PrimerNodo <NEW_LINE> while Actual: <NEW_LINE> <INDENT> print(str(Actual.getData())) <NEW_LINE> Actual= Actual.getSiguiente() | docstring for Lista | 62598fcc3346ee7daa33782d |
class AboutPageView(TemplateView): <NEW_LINE> <INDENT> template_name = 'basic/about.html' | This class is responsible for the About View | 62598fccdc8b845886d53988 |
class TestFindSum(object): <NEW_LINE> <INDENT> def test_findSum_10(self): <NEW_LINE> <INDENT> assert findSum([3, 5], 10) == 23 <NEW_LINE> <DEDENT> def test_findSum_1000(self): <NEW_LINE> <INDENT> assert findSum([3, 5], 1000) == 266333 | This function is to test main.py's functions | 62598fcc97e22403b383b2d3 |
class FeedForwardNetwork(nn.Block): <NEW_LINE> <INDENT> def __init__(self, hidden_size, filter_size, relu_dropout, train, **kwargs): <NEW_LINE> <INDENT> super(FeedForwardNetwork, self).__init__(**kwargs) <NEW_LINE> self.hidden_size = hidden_size <NEW_LINE> self.filter_size = filter_size <NEW_LINE> self.relu_dropout = relu_dropout <NEW_LINE> self.train = train <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.filter_dense_layer = nn.Dense(self.filter_size, activation='relu', use_bias=True, flatten=False) <NEW_LINE> self.output_dense_layer = nn.Dense(self.hidden_size, use_bias=True, flatten=False) <NEW_LINE> self.dropout = nn.Dropout(1.0 - self.relu_dropout) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, x, padding=None): <NEW_LINE> <INDENT> ctx = x.context <NEW_LINE> batch_size = x.shape[0] <NEW_LINE> length = x.shape[1] <NEW_LINE> if padding is not None: <NEW_LINE> <INDENT> pad_mask = nd.reshape(padding, (-1)) <NEW_LINE> nonpad_ids = nd.array(np.where(pad_mask.asnumpy() < 1e-9), ctx=ctx) <NEW_LINE> x = nd.reshape(x, (-1, self.hidden_size)) <NEW_LINE> x = nd.gather_nd(x, indices=nonpad_ids) <NEW_LINE> x = nd.expand_dims(x, axis=0) <NEW_LINE> <DEDENT> output = self.filter_dense_layer(x) <NEW_LINE> if self.train: <NEW_LINE> <INDENT> output = self.dropout(output) <NEW_LINE> <DEDENT> output = self.output_dense_layer(output) <NEW_LINE> if padding is not None: <NEW_LINE> <INDENT> output = nd.squeeze(output, axis=0) <NEW_LINE> output = nd.scatter_nd(data=output, indices=nonpad_ids, shape=(batch_size * length, self.hidden_size)) <NEW_LINE> output = nd.reshape(output, shape=(batch_size, length, self.hidden_size)) <NEW_LINE> <DEDENT> return output | Fully connected feedforward network | 62598fcc283ffb24f3cf3c52 |
class ThreadPool: <NEW_LINE> <INDENT> def __init__(self, num_workers, q_size=0): <NEW_LINE> <INDENT> self.requestsQueue = Queue.Queue(q_size) <NEW_LINE> self.resultsQueue = Queue.Queue() <NEW_LINE> self.workers = [] <NEW_LINE> self.workRequests = {} <NEW_LINE> self.createWorkers(num_workers) <NEW_LINE> <DEDENT> def createWorkers(self, num_workers): <NEW_LINE> <INDENT> for i in range(num_workers): <NEW_LINE> <INDENT> self.workers.append(WorkerThread(self.requestsQueue, self.resultsQueue)) <NEW_LINE> <DEDENT> <DEDENT> def dismissWorkers(self, num_workers): <NEW_LINE> <INDENT> for i in range(min(num_workers, len(self.workers))): <NEW_LINE> <INDENT> worker = self.workers.pop() <NEW_LINE> worker.dismiss() <NEW_LINE> <DEDENT> <DEDENT> def putRequest(self, request): <NEW_LINE> <INDENT> self.requestsQueue.put(request) <NEW_LINE> self.workRequests[request.requestID] = request <NEW_LINE> <DEDENT> def poll(self, block=False): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if not self.workRequests: <NEW_LINE> <INDENT> raise NoResultsPending <NEW_LINE> <DEDENT> elif block and not self.workers: <NEW_LINE> <INDENT> raise NoWorkersAvailable <NEW_LINE> <DEDENT> request, result = self.resultsQueue.get(block=block) <NEW_LINE> if request.callback: <NEW_LINE> <INDENT> request.callback(request, result) <NEW_LINE> <DEDENT> del self.workRequests[request.requestID] <NEW_LINE> <DEDENT> except Queue.Empty: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def wait(self): <NEW_LINE> <INDENT> while 1: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.poll(True) <NEW_LINE> <DEDENT> except NoResultsPending: <NEW_LINE> <INDENT> break | A thread pool, distributing work requests and collecting results.
See the module doctring for more information. | 62598fcc7c178a314d78d86c |
class InterBoundaryIter(object): <NEW_LINE> <INDENT> def __init__(self, stream, boundary): <NEW_LINE> <INDENT> self._stream = stream <NEW_LINE> self._boundary = boundary <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return LazyStream(BoundaryIter(self._stream, self._boundary)) <NEW_LINE> <DEDENT> except InputStreamExhausted: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> <DEDENT> __next__ = next | A Producer that will iterate over boundaries. | 62598fccf9cc0f698b1c54ba |
class ImpalaEngineSpec(BaseEngineSpec): <NEW_LINE> <INDENT> engine = 'impala' <NEW_LINE> time_grains = ( Grain("Time Column", _('Time Column'), "{col}"), Grain("minute", _('minute'), "TRUNC({col}, 'MI')"), Grain("hour", _('hour'), "TRUNC({col}, 'HH')"), Grain("day", _('day'), "TRUNC({col}, 'DD')"), Grain("week", _('week'), "TRUNC({col}, 'WW')"), Grain("month", _('month'), "TRUNC({col}, 'MONTH')"), Grain("quarter", _('quarter'), "TRUNC({col}, 'Q')"), Grain("year", _('year'), "TRUNC({col}, 'YYYY')"), ) <NEW_LINE> @classmethod <NEW_LINE> def convert_dttm(cls, target_type, dttm): <NEW_LINE> <INDENT> tt = target_type.upper() <NEW_LINE> if tt == 'DATE': <NEW_LINE> <INDENT> return "'{}'".format(dttm.strftime('%Y-%m-%d')) <NEW_LINE> <DEDENT> return "'{}'".format(dttm.strftime('%Y-%m-%d %H:%M:%S')) | Engine spec for Cloudera's Impala | 62598fcc5fcc89381b266333 |
class NameSchemaNode(colander.SchemaNode): <NEW_LINE> <INDENT> schema_type = colander.String <NEW_LINE> max_len = 100 <NEW_LINE> editing = None <NEW_LINE> def validator(self, node, value): <NEW_LINE> <INDENT> context = self.bindings['context'] <NEW_LINE> request = self.bindings['request'] <NEW_LINE> editing = self.editing <NEW_LINE> if editing is not None: <NEW_LINE> <INDENT> if callable(editing): <NEW_LINE> <INDENT> editing = editing(context, request) <NEW_LINE> <DEDENT> if editing: <NEW_LINE> <INDENT> context = context.__parent__ <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> if editing: <NEW_LINE> <INDENT> value = context.validate_name(value) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = context.check_name(value) <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> raise colander.Invalid(node, e.args[0], value) <NEW_LINE> <DEDENT> if len(value) > self.max_len: <NEW_LINE> <INDENT> raise colander.Invalid( node, 'Length of name must be %s characters or fewer' % self.max_len, value ) | Convenience Colander schemanode used to represent the name (aka
``__name__``) of an object in a propertysheet or add form which allows for
customizing the detection of whether editing or adding is being done, and
setting a max length for the name.
By default it uses the context's ``check_name`` API to ensure that the name
provided is valid, and limits filename length to a default of 100
characters. Some usage examples follow.
This sets up the name_node to assume that it's in 'add' mode with the
default 100 character max limit.::
name_node = NameSchemaNode()
This sets up the name_node to assume that it's in 'add' mode, and that the
maximum length of the name provided is 20 characters::
name_node = NameSchemaNode(max_len=20)
This sets up the name_node to assume that it's in 'edit'
mode (``check_name`` will be called on the **parent** of the bind
context, not on the context itself)::
name_node = NameSchemaNode(editing=True)
This sets up the name_node to condition whether it's in edit mode on the
result of a function::
def i_am_editing(context, request):
return request.registry.content.istype(context, 'Document')
name_node = NameSchemaNode(editing=i_am_editing) | 62598fcc3617ad0b5ee06515 |
class FieldQuery(Persistent): <NEW_LINE> <INDENT> implements(IFieldQuery) <NEW_LINE> _field_id = None <NEW_LINE> _fieldname = None <NEW_LINE> def __init__(self, field, comparator, value): <NEW_LINE> <INDENT> if fieldtypes.IField.providedBy(field): <NEW_LINE> <INDENT> field = field.__name__ <NEW_LINE> <DEDENT> self._fieldname = str(field) <NEW_LINE> self.comparator = str(comparator) <NEW_LINE> self._value = value <NEW_LINE> <DEDENT> def _get_fieldname(self): <NEW_LINE> <INDENT> return self._fieldname or self._field_id[1] <NEW_LINE> <DEDENT> def _set_fieldname(self, name): <NEW_LINE> <INDENT> self._fieldname = name <NEW_LINE> <DEDENT> fieldname = property(_get_fieldname, _set_fieldname) <NEW_LINE> def field(self, schema): <NEW_LINE> <INDENT> name = self.fieldname <NEW_LINE> if name in getFieldNamesInOrder(schema): <NEW_LINE> <INDENT> return schema[name] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def build(self, schema): <NEW_LINE> <INDENT> if not self.validate(schema): <NEW_LINE> <INDENT> raise ValidationError('Unable to validate "%s"' % self.fieldname) <NEW_LINE> <DEDENT> return query_object(self, schema) <NEW_LINE> <DEDENT> def _set_value(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> def _get_value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> value = property(_get_value, _set_value) <NEW_LINE> def validate(self, schema): <NEW_LINE> <INDENT> field = self.field(schema) <NEW_LINE> if field is None: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> field.validate(self.value) <NEW_LINE> <DEDENT> except ValidationError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | FieldQuery is field / value / comparator entry. Field is persisted
using serialization (tuple of dotted name of interface, fieldname),
but resolution of field object is cached indefinitely in volatile. | 62598fccff9c53063f51aa1a |
@dataclass(order=True, frozen=True) <NEW_LINE> class FlatIndex: <NEW_LINE> <INDENT> invalid: bool <NEW_LINE> idx: Union[int, typing.Tuple[int, int]] | Flat indexes for both invalids and valids.
If invalid, there is no flat index.
The goal there is just to align the first invalids temporally rather than preserving flattened order.
Therefore, flat_idx is just a tuple of t and s. t comes first so sorting them puts t's together. | 62598fcc4c3428357761a68c |
class DiskItem(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> is_dir = None <NEW_LINE> is_removed = None <NEW_LINE> remove_date = None <NEW_LINE> path=None <NEW_LINE> def __init__(self, name, is_dir=False, is_removed=False, remove_date=None, path=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.is_dir = is_dir <NEW_LINE> self.is_removed = is_removed <NEW_LINE> self.remove_date = remove_date <NEW_LINE> self.path = path | Describes disk items.
Fields:
* name -- last elemet of path
* is_dir
* is_removed
* remove_date
* path -- path to item, relative to disk folder | 62598fcc71ff763f4b5e7b50 |
class DeviceManage(ModelDateMixin): <NEW_LINE> <INDENT> name = models.CharField(max_length=50, verbose_name='设备名称') <NEW_LINE> device_id = models.CharField(max_length=30, unique=True, verbose_name='设备id') <NEW_LINE> brand = models.CharField(max_length=20, verbose_name='设备品牌') <NEW_LINE> phone_model = models.CharField(max_length=50, verbose_name='设备机型') <NEW_LINE> phone_sys_ver = models.CharField(max_length=100, verbose_name='设备系统版本') <NEW_LINE> cpu_info = models.CharField(max_length=30, verbose_name='设备cpu核数') <NEW_LINE> mem_info = models.CharField(max_length=50, verbose_name='设备内存信息') <NEW_LINE> image_link = models.CharField(max_length=100, verbose_name='设备图片地址') <NEW_LINE> resolution_info = models.CharField(max_length=20, verbose_name='设备分辨率') <NEW_LINE> wireless_ip = models.CharField(unique=True, max_length=30, verbose_name='ip地址') <NEW_LINE> wireless_port = models.IntegerField(verbose_name='无线端口') <NEW_LINE> status = models.CharField(max_length=20, default='Ready') <NEW_LINE> run_info = models.CharField(max_length=50, blank=True, null=True, verbose_name='执行任务信息') | app表,设备管理 | 62598fccab23a570cc2d4f55 |
class AccountViewSet(ModelViewSet): <NEW_LINE> <INDENT> queryset = Account.objects.all() <NEW_LINE> serializer_class = AccountSerializer <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = AccountSerializer(data=request.data) <NEW_LINE> data = {} <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> account = serializer.save() <NEW_LINE> data['response'] = 'successfully registered a new user' <NEW_LINE> data['username'] = account.username <NEW_LINE> data['token'] = Token.objects.get(user=account).key <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = serializer.errors <NEW_LINE> return Response(data, status=status.HTTP_412_PRECONDITION_FAILED) <NEW_LINE> <DEDENT> return Response(data) <NEW_LINE> <DEDENT> def update(self, request, *args, **kwargs): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return super().update(request) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> response = {'message': 'All required fields have not been filled.'} <NEW_LINE> return Response(response, status=status.HTTP_412_PRECONDITION_FAILED) | View of Account | 62598fcc7cff6e4e811b5df6 |
class SortedSetEncoder(json.JSONEncoder): <NEW_LINE> <INDENT> def default(self, o): <NEW_LINE> <INDENT> if isinstance(o, set): <NEW_LINE> <INDENT> return sorted(o) <NEW_LINE> <DEDENT> return json.JSONEncoder.default(self, o) | Encode sets as sorted lists. | 62598fcca219f33f346c6bd8 |
class CellChars(enum.Enum): <NEW_LINE> <INDENT> FLAG = 'F' <NEW_LINE> BOMB = '*' <NEW_LINE> MISTAKE = 'O' <NEW_LINE> WRONG_FLAG = 'x' <NEW_LINE> NOT_OPENED = '.' <NEW_LINE> OPENED = lambda s: str(s or ' ') | Текстовое представление клеток поля | 62598fcca05bb46b3848ac3a |
class OperationDnsKeyContext(_messages.Message): <NEW_LINE> <INDENT> newValue = _messages.MessageField('DnsKey', 1) <NEW_LINE> oldValue = _messages.MessageField('DnsKey', 2) | A OperationDnsKeyContext object.
Fields:
newValue: The post-operation DnsKey resource.
oldValue: The pre-operation DnsKey resource. | 62598fccf9cc0f698b1c54bb |
@dataclass(frozen=True) <NEW_LINE> class EventInvalidReceivedWithdrawExpired(Event): <NEW_LINE> <INDENT> attempted_withdraw: WithdrawAmount <NEW_LINE> reason: str | Event emitted when an invalid withdraw expired event is received. | 62598fcccc40096d6161a3bf |
class Optional(RuleWrapper): <NEW_LINE> <INDENT> def __init__(self, rule, default=None): <NEW_LINE> <INDENT> super(Optional, self).__init__(rule) <NEW_LINE> self.default = default if default is not None else '' <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '[ {rule!r} ]'.format(rule=self.rule) <NEW_LINE> <DEDENT> def __invert__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def parse(self, s, context): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> context = super(Optional, self).parse(s, context=context.copy()) <NEW_LINE> <DEDENT> except NoMatchError: <NEW_LINE> <INDENT> context.update( _match='', _unparsed=s, ) <NEW_LINE> <DEDENT> return context | Optionally match a rule. | 62598fcc4527f215b58ea29f |
class ChapterStart(State): <NEW_LINE> <INDENT> def process(self, parser): <NEW_LINE> <INDENT> values = re.match(': start (\d*.\d*),', parser.remaining) <NEW_LINE> parser.root.update({'start': int(float(values.groups()[0])), 'start_time': values.groups()[0]}) <NEW_LINE> parser.remaining = parser.remaining[values.end():] <NEW_LINE> parser.state = ChapterEnd() | Inicio do Capitulo. | 62598fcc283ffb24f3cf3c57 |
class MaskExtract(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "mask.extraction" <NEW_LINE> bl_label = "Extract masked areas" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.active_object is not None and context.active_object.mode == 'SCULPT' <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> wm = context.window_manager <NEW_LINE> activeObj = context.active_object <NEW_LINE> modnam = "Mask Extract" <NEW_LINE> vname = "mask extraction vgroup" <NEW_LINE> if context.sculpt_object.use_dynamic_topology_sculpting: <NEW_LINE> <INDENT> self.report({'WARNING'}, "Exit Dyntopo First!") <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> isolate(activeObj) <NEW_LINE> duplicateObject(context.scene.name, activeObj.name, activeObj) <NEW_LINE> mask_to_vertex_group(context.active_object, vname, wm.mask_extract_offset) <NEW_LINE> if wm.mask_detect_layer == True: <NEW_LINE> <INDENT> self.report({'WARNING'}, "Masked Areas Present!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.report({'WARNING'}, "No Masked Areas!") <NEW_LINE> activeObj.select = False <NEW_LINE> bpy.ops.object.delete() <NEW_LINE> activeObj.select = True <NEW_LINE> bpy.context.scene.objects.active = bpy.data.objects[activeObj.name] <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> md = bpy.context.active_object.modifiers.new(modnam, 'MASK') <NEW_LINE> md.vertex_group = vname <NEW_LINE> bpy.ops.object.modifier_apply(apply_as='DATA', modifier=modnam) <NEW_LINE> smoothextracted() <NEW_LINE> md = bpy.context.active_object.modifiers.new(modnam + "solid", 'SOLIDIFY') <NEW_LINE> md.use_rim = True <NEW_LINE> md.thickness = wm.mask_extract_thickness <NEW_LINE> if wm.mask_solid_apply == True: <NEW_LINE> <INDENT> bpy.ops.object.modifier_apply(apply_as='DATA', modifier=modnam + "solid") <NEW_LINE> smoothextracted() <NEW_LINE> <DEDENT> if vname in bpy.context.active_object.vertex_groups: <NEW_LINE> <INDENT> bpy.ops.object.vertex_group_set_active(group=vname) <NEW_LINE> bpy.ops.object.vertex_group_remove(all=False) <NEW_LINE> <DEDENT> isolate(activeObj) <NEW_LINE> bpy.context.scene.objects.active = bpy.data.objects[activeObj.name] <NEW_LINE> return {'FINISHED'} | Decimate Masked Areas | 62598fccec188e330fdf8c67 |
class WebdriverBaseTest(unittest.TestCase): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> @read_browser_config("padded_sel.json") <NEW_LINE> def setUpClass(cls, **kwargs): <NEW_LINE> <INDENT> logger.debug(kwargs) <NEW_LINE> cls.browser = Webdriver(**kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def tearDownClass(cls): <NEW_LINE> <INDENT> cls.browser.quit() <NEW_LINE> <DEDENT> def run(self, result=None): <NEW_LINE> <INDENT> super(WebdriverBaseTest, self).run(TestResultEx(result, self)) | Base Test Class with support for creating and tearing down the browser object | 62598fccab23a570cc2d4f56 |
class TopologyAssociation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'resource_id': {'key': 'resourceId', 'type': 'str'}, 'association_type': {'key': 'associationType', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, name: Optional[str] = None, resource_id: Optional[str] = None, association_type: Optional[Union[str, "AssociationType"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(TopologyAssociation, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.resource_id = resource_id <NEW_LINE> self.association_type = association_type | Resources that have an association with the parent resource.
:param name: The name of the resource that is associated with the parent resource.
:type name: str
:param resource_id: The ID of the resource that is associated with the parent resource.
:type resource_id: str
:param association_type: The association type of the child resource to the parent resource.
Possible values include: "Associated", "Contains".
:type association_type: str or ~azure.mgmt.network.v2019_07_01.models.AssociationType | 62598fcc63b5f9789fe85549 |
class SoftwareIsolationEmbedding(MessageRateEmbedding): <NEW_LINE> <INDENT> def __init__(self, vsdn, connector): <NEW_LINE> <INDENT> super(SoftwareIsolationEmbedding, self).__init__( vsdn=vsdn, connector=connector, logname='SoftwareIsolationEmbedding' ) <NEW_LINE> <DEDENT> def calculate_limit(self, entity, update=True): <NEW_LINE> <INDENT> cpu_stress = self._calculate_hypervisor_cpu() <NEW_LINE> if entity.info.total_cpu * 0.9 >= cpu_stress: <NEW_LINE> <INDENT> if update: <NEW_LINE> <INDENT> entity.info.used_cpu = cpu_stress <NEW_LINE> entity.info.cfg_msg_rate += self._vsdn.message_rate <NEW_LINE> <DEDENT> return self._vsdn.message_rate <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise RuntimeError(('Requested message rate for software isolation ' + 'too high, total: {}, used: {}, requested: {}, ' + 'threshold: {}').format( entity.info.total_cpu, entity.info.used_cpu, cpu_stress, entity.info.total_cpu * 0.9) ) <NEW_LINE> <DEDENT> <DEDENT> def remove(self): <NEW_LINE> <INDENT> stub = management.hypervisor.HypervisorFactory .produce(element=self._vsdn.hypervisor) <NEW_LINE> stub.update_slice({'slice_name': self._vsdn.name, 'rate_limit': None}) <NEW_LINE> cpu_stress = self._calculate_hypervisor_cpu(negative_rate=True) <NEW_LINE> self._vsdn.hypervisor.info.used_cpu = cpu_stress <NEW_LINE> self._vsdn.hypervisor.info.cfg_msg_rate -= self._vsdn.message_rate <NEW_LINE> self._logger.info('successfully removed Software Limint on ' + 'slice {}'.format(self._vsdn.name)) <NEW_LINE> <DEDENT> def embed(self): <NEW_LINE> <INDENT> stub = management.hypervisor.HypervisorFactory .produce(element=self._vsdn.hypervisor) <NEW_LINE> limit = self.calculate_limit(entity=self._vsdn.hypervisor) <NEW_LINE> self._logger.debug('Update slice {}, set rate limit to {}'.format( self._vsdn.name, limit)) <NEW_LINE> stub.update_slice({'slice_name': self._vsdn.name, 'rate_limit': limit}) <NEW_LINE> self._logger.info(('successfully embedded Software Isolation ' + 'slice {}, rate {}.').format(self._vsdn.name, limit)) | Concrete implementation of ``MessageRateEmbedding`` for software isolation
on a hypervisor node.
Args:
logger (logging.Logger): Logger object
vsdn (data.dbinterfaces.Vsdn): Object representing VSDN for
which ratelimit should be embedded
connector (data.dbinterfaces.StormConnector): Connector object
to database | 62598fcc377c676e912f6f60 |
class PortfolioParamsValidatorMixin(BaseParamsValidatorMixin): <NEW_LINE> <INDENT> pass | Mixin with validators for validate
request parameters. | 62598fcc5fcc89381b266335 |
class Average(object): <NEW_LINE> <INDENT> def __init__(self, set_size): <NEW_LINE> <INDENT> self.average = 0 <NEW_LINE> self.time = 0 <NEW_LINE> self.price = None <NEW_LINE> self.set_size = set_size <NEW_LINE> self.points = [] <NEW_LINE> <DEDENT> def update(self, point): <NEW_LINE> <INDENT> if len(self.points) < self.set_size: <NEW_LINE> <INDENT> self.average = self.add_initial_points(float(point)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.average = self.add_successive_points(float(point)) <NEW_LINE> <DEDENT> self.time += 1 <NEW_LINE> self.price = float(point) <NEW_LINE> if len(self.points) > self.set_size: <NEW_LINE> <INDENT> self.points.pop(0) <NEW_LINE> <DEDENT> return self.average <NEW_LINE> <DEDENT> def add_successive_points(self, point): <NEW_LINE> <INDENT> raise NotImplementedError("add_successive_points not yet implemented") <NEW_LINE> <DEDENT> def add_initial_points(self, point): <NEW_LINE> <INDENT> raise NotImplementedError("add_initial_points not yet implemented") <NEW_LINE> <DEDENT> def to_json(self): <NEW_LINE> <INDENT> return dumps({ "price": self.price, "time": self.time, "average": self.average}) | Attributes:
average -- the current average at a particular tick
time -- the current time tick for this average
set_size -- the subset size for this moving average
points -- the list of points currently part of the moving average | 62598fcc7b180e01f3e49238 |
class UnparsedEntryTests(TestCase, EntryTestsMixin): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.entry = UnparsedEntry(" This is a bogus entry. \n") <NEW_LINE> <DEDENT> def test_fromString(self): <NEW_LINE> <INDENT> self.assertEqual(" This is a bogus entry. \n", self.entry._string) <NEW_LINE> <DEDENT> def test_matchesHost(self): <NEW_LINE> <INDENT> self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com")) <NEW_LINE> <DEDENT> def test_matchesKey(self): <NEW_LINE> <INDENT> self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey))) <NEW_LINE> <DEDENT> def test_toString(self): <NEW_LINE> <INDENT> self.assertEqual(" This is a bogus entry. ", self.entry.toString()) | Tests for L{UnparsedEntry} | 62598fcc167d2b6e312b734a |
class Embedding(t.nn.Module): <NEW_LINE> <INDENT> def __init__(self, vocab_size=1000, embedding_size=512, padding_idx=0, max_length=2048, dropout=0.1, scale_word_embedding=True): <NEW_LINE> <INDENT> super(Embedding, self).__init__() <NEW_LINE> self.word_embedding = t.nn.Embedding(vocab_size, embedding_size, padding_idx) <NEW_LINE> self.position_embedding = PositionalEncoding( d_model=embedding_size, dropout_rate=dropout, max_len=max_length) <NEW_LINE> t.nn.init.xavier_normal_(self.word_embedding.weight) <NEW_LINE> <DEDENT> def forward(self, word_id): <NEW_LINE> <INDENT> embedding = self.word_embedding(word_id) <NEW_LINE> embedding = self.position_embedding(embedding) <NEW_LINE> return embedding | combine word embedding and position embedding | 62598fcc851cf427c66b8685 |
class SchoolList(ListCreateAPIView): <NEW_LINE> <INDENT> authentication_classes = (TokenAuthentication, BasicAuthentication, SessionAuthentication) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> queryset = School.objects.all() <NEW_LINE> serializer_class = SchoolSerializer | This endpoint Lists Schools with GET requests, and Creates a new School with POST request
By making a ``GET`` request you can list all the Schools. Each school has the following attributes
* **id** - The ID of the School (int)
* **name** - The NAME of the school (str)
* **community_name** - The Community Unit Name in which the school belongs (str) (readonly)
* **num_students** - The Number of students in the school (int)
* **num_teachers** - The Number of teachers in the school (int)
* **location** - The location of the school (str: RURAL, URBAN, PERI-URBAN)
* **date_enrolled** - The date when the school was enrolled (date)
* **fees** - The Fees Structure for the school (dict)
By making a ``POST`` request, you can create a new School with the fields above | 62598fcc3d592f4c4edbb286 |
class UnfulfilledPromiseSentinel: <NEW_LINE> <INDENT> def __init__(self, fulfillingJobName: str, file_id: str, unpickled: Any) -> None: <NEW_LINE> <INDENT> self.fulfillingJobName = fulfillingJobName <NEW_LINE> self.file_id = file_id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __setstate__(stateDict: Dict[str, Any]) -> None: <NEW_LINE> <INDENT> jobName = stateDict['fulfillingJobName'] <NEW_LINE> file_id = stateDict['file_id'] <NEW_LINE> raise RuntimeError( f"This job was passed promise {file_id} that wasn't yet resolved when it " f"ran. The job {jobName} that fulfills this promise hasn't yet " f"finished. This means that there aren't enough constraints to " f"ensure the current job always runs after {jobName}. Consider adding a " f"follow-on indirection between this job and its parent, or adding " f"this job as a child/follow-on of {jobName}." ) | This should be overwritten by a proper promised value.
Throws an exception when unpickled. | 62598fccec188e330fdf8c69 |
class DoubleTapCallback(PointerXYCallback): <NEW_LINE> <INDENT> on_events = ['doubletap'] | Returns the mouse x/y-position on doubletap event. | 62598fccadb09d7d5dc0a94e |
class FederalLobbying(Choreography): <NEW_LINE> <INDENT> def __init__(self, temboo_session): <NEW_LINE> <INDENT> Choreography.__init__(self, temboo_session, '/Library/InfluenceExplorer/FederalLobbying') <NEW_LINE> <DEDENT> def new_input_set(self): <NEW_LINE> <INDENT> return FederalLobbyingInputSet() <NEW_LINE> <DEDENT> def _make_result_set(self, result, path): <NEW_LINE> <INDENT> return FederalLobbyingResultSet(result, path) <NEW_LINE> <DEDENT> def _make_execution(self, session, exec_id, path): <NEW_LINE> <INDENT> return FederalLobbyingChoreographyExecution(session, exec_id, path) | Create a new instance of the FederalLobbying Choreography. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied. | 62598fcc4c3428357761a691 |
class CaseType: <NEW_LINE> <INDENT> THT = 'THT' <NEW_LINE> SMD = 'SMD' | A class for holding constants for part types
.. note:: will be changed to enum when Python version allows it | 62598fcc377c676e912f6f61 |
class ContactsUpdateAPIview(UpdateAPIView): <NEW_LINE> <INDENT> queryset = Contacts.objects.all() <NEW_LINE> serializer_class = ContactsSerializer <NEW_LINE> def perform_update(self, serializer): <NEW_LINE> <INDENT> serializer.save() | Handle the URL to update contacts | 62598fcc091ae35668704ffd |
class Player(): <NEW_LINE> <INDENT> def __init__(self,name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.alive = 1 | The player doesn't necessarily have to be human.
The bots can also use the player class. | 62598fcc656771135c489a44 |
class TotalsCommitteePage(object): <NEW_LINE> <INDENT> swagger_types = { 'pagination': 'OffsetInfo', 'results': 'list[TotalsCommittee]' } <NEW_LINE> attribute_map = { 'pagination': 'pagination', 'results': 'results' } <NEW_LINE> def __init__(self, pagination=None, results=None): <NEW_LINE> <INDENT> self._pagination = None <NEW_LINE> self._results = None <NEW_LINE> self.discriminator = None <NEW_LINE> if pagination is not None: <NEW_LINE> <INDENT> self.pagination = pagination <NEW_LINE> <DEDENT> if results is not None: <NEW_LINE> <INDENT> self.results = results <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def pagination(self): <NEW_LINE> <INDENT> return self._pagination <NEW_LINE> <DEDENT> @pagination.setter <NEW_LINE> def pagination(self, pagination): <NEW_LINE> <INDENT> self._pagination = pagination <NEW_LINE> <DEDENT> @property <NEW_LINE> def results(self): <NEW_LINE> <INDENT> return self._results <NEW_LINE> <DEDENT> @results.setter <NEW_LINE> def results(self, results): <NEW_LINE> <INDENT> self._results = results <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(TotalsCommitteePage, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TotalsCommitteePage): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fcc60cbc95b06364711 |
class CreateAccountsRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.InstanceId = None <NEW_LINE> self.Accounts = None <NEW_LINE> self.Password = None <NEW_LINE> self.Description = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.InstanceId = params.get("InstanceId") <NEW_LINE> if params.get("Accounts") is not None: <NEW_LINE> <INDENT> self.Accounts = [] <NEW_LINE> for item in params.get("Accounts"): <NEW_LINE> <INDENT> obj = Account() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Accounts.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.Password = params.get("Password") <NEW_LINE> self.Description = params.get("Description") | CreateAccounts request structure.
| 62598fcc8a349b6b43686613 |
class ImportExportModelAdmin(ImportExportMixin, admin.ModelAdmin): <NEW_LINE> <INDENT> pass | Subclass of ModelAdmin with import/export functionality. | 62598fcc167d2b6e312b734c |
class CfgTestCase(base.JbutlerTestCase): <NEW_LINE> <INDENT> def test_defaults(self): <NEW_LINE> <INDENT> config = cfg.JbutlerConfigParser() <NEW_LINE> self.assertEqual('jobs', config.jobdir) <NEW_LINE> self.assertEqual('templates', config.templatedir) <NEW_LINE> self.assertTrue(config.ssl_verify) <NEW_LINE> <DEDENT> def test_no_server(self): <NEW_LINE> <INDENT> self.mkfile('noserver', contents='\n') <NEW_LINE> config = cfg.JbutlerConfigParser() <NEW_LINE> with self.assertRaises(cfg.MissingRequiredOptionError) as cm: <NEW_LINE> <INDENT> config.read(['noserver']) <NEW_LINE> self.assertIn("'server'", str(cm.exception)) | Test the cfg module of jbutler | 62598fcc5fc7496912d48461 |
class ModeManager( object ): <NEW_LINE> <INDENT> def __init__( self ): <NEW_LINE> <INDENT> self.modes = { '__quitting__': kQuittingMode } <NEW_LINE> self.current_mode = None <NEW_LINE> <DEDENT> def register_mode( self, mode_name, mode ): <NEW_LINE> <INDENT> assert mode_name not in self.modes <NEW_LINE> self.modes[ mode_name ] = mode <NEW_LINE> mode._registered_with_manager( self ) <NEW_LINE> <DEDENT> def switch_to_mode( self, mode_name ): <NEW_LINE> <INDENT> if mode_name is None: mode_name = '__quitting__' <NEW_LINE> assert mode_name in self.modes <NEW_LINE> if self.current_mode is not None: <NEW_LINE> <INDENT> self.current_mode.exit() <NEW_LINE> <DEDENT> self.current_mode = self.modes[ mode_name ] <NEW_LINE> self.current_mode.enter() <NEW_LINE> <DEDENT> def quitting( self ): <NEW_LINE> <INDENT> return self.current_mode is kQuittingMode | A class that manages switching between modes. | 62598fccff9c53063f51aa20 |
class ConnectFrame(Frame): <NEW_LINE> <INDENT> def __init__(self, message=None): <NEW_LINE> <INDENT> if message is None: <NEW_LINE> <INDENT> self.header = "CONNECT" <NEW_LINE> self.message = {"header": self.header} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(ConnectFrame, self).__init__(message) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"CONNECT Frame" <NEW_LINE> <DEDENT> def encode(self): <NEW_LINE> <INDENT> return json.dumps(self.message) | Connect frame sent by the client to the broker | 62598fcca219f33f346c6bde |
class Section: <NEW_LINE> <INDENT> def __init__( self, data: pnd.DataFrame, p1: Point, p2: Point, reverse: bool = False, z_adjustment: Union[None, float] = None ): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.p1, self.p2 = p1, p2 <NEW_LINE> self.projection = None <NEW_LINE> self.line = None <NEW_LINE> self.date = None <NEW_LINE> if reverse == True: <NEW_LINE> <INDENT> self.data.sort_index(ascending=False, inplace=True) <NEW_LINE> <DEDENT> if z_adjustment != None: <NEW_LINE> <INDENT> self.data['z'] = self.data['z'] + z_adjustment <NEW_LINE> <DEDENT> self.projection = og.project_points(self.data, self.p1, self.p2) <NEW_LINE> self.line = asLineString(list(zip(self.projection['d'],self.projection['z']))) <NEW_LINE> self.date = (self.data.iloc[0]['t']).split('T')[0] <NEW_LINE> <DEDENT> def plot(self, view='section', **kwargs) -> Union[None, Line2D]: <NEW_LINE> <INDENT> if view=='section': <NEW_LINE> <INDENT> ax = self.projection.plot('d','z',**kwargs) <NEW_LINE> <DEDENT> elif view=='map': <NEW_LINE> <INDENT> ax = self.data.plot('x','y',**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.warning('{0} is not a valid view option'.format(view)) <NEW_LINE> ax = None <NEW_LINE> <DEDENT> return ax | A Section view of a set of x,y,z coordinates.
Parameters:
data (pandas.DataFrame) : contains the data to project.
p1 (shapely.Point) : the start of a line of section.
p2 (shapely.Point) : the end of a line of section.
reverse (bool) : reverse the order of points in the section.
z_adjustment (float) : adjust the elevation of the data. | 62598fcc4c3428357761a693 |
class ArgumentParser(argparse.ArgumentParser): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.throw_errors = False <NEW_LINE> super(ArgumentParser, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def error(self, message): <NEW_LINE> <INDENT> if self.throw_errors: <NEW_LINE> <INDENT> raise ArgumentParserError(message) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(ArgumentParser, self).error(message) | Derived argument parser that provides a flag for throwing exceptions
normally handled internally by argparse. | 62598fcc377c676e912f6f62 |
class RegistrationRequest(RequestBase): <NEW_LINE> <INDENT> challenge = models.ForeignKey( Challenge, help_text="To which project does the user want to register?", on_delete=models.CASCADE, ) <NEW_LINE> @property <NEW_LINE> def base_object(self): <NEW_LINE> <INDENT> return self.challenge <NEW_LINE> <DEDENT> @property <NEW_LINE> def object_name(self): <NEW_LINE> <INDENT> return self.challenge.short_name <NEW_LINE> <DEDENT> @property <NEW_LINE> def add_method(self): <NEW_LINE> <INDENT> return self.base_object.add_participant <NEW_LINE> <DEDENT> @property <NEW_LINE> def remove_method(self): <NEW_LINE> <INDENT> return self.base_object.remove_participant <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f"{self.challenge.short_name} registration request by user {self.user.username}" <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> adding = self._state.adding <NEW_LINE> super().save(*args, **kwargs) <NEW_LINE> if adding: <NEW_LINE> <INDENT> process_access_request(request_object=self) <NEW_LINE> <DEDENT> <DEDENT> def delete(self): <NEW_LINE> <INDENT> ct = ContentType.objects.filter( app_label=self._meta.app_label, model=self._meta.model_name ).get() <NEW_LINE> Follow.objects.filter(object_id=self.pk, content_type=ct).delete() <NEW_LINE> super().delete() <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> unique_together = (("challenge", "user"),) | When a user wants to join a project, admins have the option of reviewing
each user before allowing or denying them. This class records the needed
info for that. | 62598fcc3346ee7daa337832 |
class Proudcer(threading.Thread): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.headers = {'User-Agent': USER_AGENT[randrange(0, len(USER_AGENT))]} <NEW_LINE> self.session = requests.Session() <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> while not CITY_URL_QUEUE.empty(): <NEW_LINE> <INDENT> city_url = CITY_URL_QUEUE.get() <NEW_LINE> for i in range(1, 100): <NEW_LINE> <INDENT> RENT_CONDITION.acquire() <NEW_LINE> if NEXT_URL_QUEUE.full(): <NEW_LINE> <INDENT> logger.warning("NEXT_URL_QUEUE is full.Blocked and Waiting for parseCity()...") <NEW_LINE> RENT_CONDITION.wait() <NEW_LINE> <DEDENT> url = city_url + 'pg{0}/#contentList'.format(i) <NEW_LINE> NEXT_URL_QUEUE.put(url) <NEW_LINE> RENT_CONDITION.notify() <NEW_LINE> RENT_CONDITION.release() <NEW_LINE> try: <NEW_LINE> <INDENT> with self.session.get(city_url+'pg{0}/#contentList'.format(i), headers=self.headers) as s1: <NEW_LINE> <INDENT> root_node_1 = etree.HTML(s1.content.decode(s1.encoding)) <NEW_LINE> <DEDENT> with self.session.get(city_url+'pg{0}/#contentList'.format(i+1), headers=self.headers) as s2: <NEW_LINE> <INDENT> root_node_2 = etree.HTML(s2.content.decode(s2.encoding)) <NEW_LINE> <DEDENT> processding = r"//div[@class='content__list']/div[1]" <NEW_LINE> rent_title_generator_1 = root_node_1.xpath(processding + r"//p[@class='content__list--item--title twoline']/a/text()") <NEW_LINE> rent_title_generator_2 = root_node_2.xpath(processding + r"//p[@class='content__list--item--title twoline']/a/text()") <NEW_LINE> li_title1 = [s.strip() for s in rent_title_generator_1] <NEW_LINE> li_title2 = [s.strip() for s in rent_title_generator_2] <NEW_LINE> if li_title1 == li_title2: <NEW_LINE> <INDENT> logger.warning("There is duplicate content, so drop url behind it.") <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> logger.error("Maybe get the end of the Url of Area!") <NEW_LINE> break <NEW_LINE> <DEDENT> logger.info("{0} has put into the Queue!".format(url)) <NEW_LINE> <DEDENT> <DEDENT> if CITY_URL_QUEUE.empty(): <NEW_LINE> <INDENT> global T_FLAG <NEW_LINE> T_FLAG = False | URLs for RentHouse INPUT TO QUQUE | 62598fcc55399d3f056268ef |
class IgnoreMessage(ListeningSocketError): <NEW_LINE> <INDENT> pass | Signal that this message should be ignored | 62598fcc7b180e01f3e4923a |
class NutnrJCsppTelemeteredDriver(SimpleDatasetDriver): <NEW_LINE> <INDENT> def _build_parser(self, stream_handle): <NEW_LINE> <INDENT> parser_config = { DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { METADATA_PARTICLE_CLASS_KEY: NutnrJCsppMetadataTelemeteredDataParticle, DATA_PARTICLE_CLASS_KEY: NutnrJCsppTelemeteredDataParticle } } <NEW_LINE> parser = NutnrJCsppParser(parser_config, stream_handle, self._exception_callback) <NEW_LINE> return parser | The nutnr_j_cspp telemetered driver class extends the SimpleDatasetDriver. | 62598fcc50812a4eaa620dcf |
class InvestmentDeleteTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> create_user() <NEW_LINE> investment = create_investment() <NEW_LINE> self.valid_args = {"investment_id": investment.id} <NEW_LINE> self.valid_url = "/investments/delete/{}/".format(investment.id) <NEW_LINE> <DEDENT> def test_delete_redirect_if_not_logged_in(self): <NEW_LINE> <INDENT> response = self.client.get( reverse("investments:investment_delete", kwargs=self.valid_args) ) <NEW_LINE> self.assertEqual(response.status_code, 302) <NEW_LINE> <DEDENT> def test_delete_no_redirect_if_logged_in(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.get( reverse("investments:investment_delete", kwargs=self.valid_args) ) <NEW_LINE> self.assertEqual(str(response.context['user']), 'user') <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_delete_url_exists_at_desired_location(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.get(self.valid_url) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> <DEDENT> def test_delete_html404_on_invalid_name(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.get( reverse("investments:investment_delete", kwargs={"investment_id": 999999999}) ) <NEW_LINE> self.assertEqual(response.status_code, 404) <NEW_LINE> <DEDENT> def test_delete_template(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.get( reverse("investments:investment_delete", kwargs=self.valid_args) ) <NEW_LINE> self.assertTemplateUsed(response, "investments/delete.html") <NEW_LINE> <DEDENT> def test_delete_redirect_to_dashboard(self): <NEW_LINE> <INDENT> self.client.login(username="user", password="abcd123456") <NEW_LINE> response = self.client.post( reverse("investments:investment_delete", kwargs=self.valid_args), follow=True, ) <NEW_LINE> self.assertRedirects(response, reverse("investments:dashboard")) <NEW_LINE> <DEDENT> def test_delete_confirm_deletion(self): <NEW_LINE> <INDENT> investment_total = Investment.objects.count() <NEW_LINE> self.client.login(username="user", password="abcd123456") <NEW_LINE> self.client.post( reverse("investments:investment_delete", kwargs=self.valid_args) ) <NEW_LINE> self.assertEqual(Investment.objects.count(), investment_total - 1) | Tests for the delete investment view | 62598fccfbf16365ca794492 |
class AppCluster(object): <NEW_LINE> <INDENT> def __init__(self, name, uuid, state, type): <NEW_LINE> <INDENT> if not uuid or not name or not state: <NEW_LINE> <INDENT> __log__.error("Must provide the name, state, and uuid!") <NEW_LINE> return <NEW_LINE> <DEDENT> if not isinstance(type, AppClusterType): <NEW_LINE> <INDENT> raise TypeError('cluster type must be an instance of AppClusterType') <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.uuid = uuid <NEW_LINE> self.state = state | Model for application clusters. Typically, the UUID is provided by the environment.
:param str name: Name for the AppCluster.
:param str uuid: Unique identifier for the AppCluster.
:param str state: State of the AppCluster.
:param AppClusterType type: Type of AppCluster. | 62598fcca219f33f346c6be0 |
class Plugin (plugin.TransformPlugin): <NEW_LINE> <INDENT> id=__name__ <NEW_LINE> options = [ plugin.Option('oper', 'IRI/Qname of the operator to unnest', default="rif:And"), plugin.Option('prop', 'IRI/Qname of value property of that operator', default="rif:formula"), ] <NEW_LINE> def __init__(self, oper='rif:And', prop='rif:formula'): <NEW_LINE> <INDENT> self.oper = qname.common.uri(oper) <NEW_LINE> self.prop = qname.common.uri(prop) <NEW_LINE> debug('unnest', 'oper: ', self.oper) <NEW_LINE> debug('unnest', 'prop: ', self.prop) <NEW_LINE> <DEDENT> def transform(self, instance): <NEW_LINE> <INDENT> instance.map_replace(self.replace) <NEW_LINE> return instance <NEW_LINE> <DEDENT> def replace(self, inst): <NEW_LINE> <INDENT> if inst.has_primary_type(self.oper): <NEW_LINE> <INDENT> debug('unnest(', 'found oper match', self.oper) <NEW_LINE> new = [] <NEW_LINE> multi = getattr(inst, self.prop) <NEW_LINE> for child in multi.values: <NEW_LINE> <INDENT> debug('unnest', 'child type:', child.primary_type) <NEW_LINE> if child.has_primary_type(self.oper): <NEW_LINE> <INDENT> debug('unnest', '... oper match', self.oper) <NEW_LINE> new.extend(getattr(child, self.prop).values) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new.append(child) <NEW_LINE> <DEDENT> <DEDENT> multi.replace_values(new) <NEW_LINE> debug('unnest)') <NEW_LINE> <DEDENT> return inst | Rewrite so that and(a,and(b,c)) is just and(a,b,c), etc | 62598fccaad79263cf42eba5 |
class RekallAction(actions.ActionPlugin): <NEW_LINE> <INDENT> in_rdfvalue = rekall_types.RekallRequest <NEW_LINE> out_rdfvalues = [rekall_types.RekallResponse] <NEW_LINE> def Run(self, args): <NEW_LINE> <INDENT> session_args = args.session.ToDict() <NEW_LINE> if "filename" not in session_args and args.device: <NEW_LINE> <INDENT> session_args["filename"] = args.device.path <NEW_LINE> <DEDENT> rekal_session = GrrRekallSession( action=self, initial_profiles=args.profiles, **session_args) <NEW_LINE> plugin_errors = [] <NEW_LINE> for plugin_request in args.plugins: <NEW_LINE> <INDENT> plugin_args = plugin_request.args.ToDict() <NEW_LINE> try: <NEW_LINE> <INDENT> rekal_session.RunPlugin(plugin_request.plugin, **plugin_args) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> tb = traceback.format_exc() <NEW_LINE> logging.error("While running plugin (%s): %s", plugin_request.plugin, tb) <NEW_LINE> plugin_errors.append(tb) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> rekal_session.Flush() <NEW_LINE> <DEDENT> <DEDENT> if plugin_errors: <NEW_LINE> <INDENT> self.SetStatus(rdf_flows.GrrStatus.ReturnedStatus.GENERIC_ERROR, u"\n\n".join(plugin_errors)) <NEW_LINE> <DEDENT> self.Progress() <NEW_LINE> self.ForceGC() | Runs a Rekall command on live memory. | 62598fcc55399d3f056268f1 |
class RequestHandler(pyjsonrpc.HttpRequestHandler): <NEW_LINE> <INDENT> @pyjsonrpc.rpcmethod <NEW_LINE> def getPreferenceForUser(self, user_id): <NEW_LINE> <INDENT> logger.debug("news_recommendation_service - getPreferenceForUser") <NEW_LINE> db = mongodb_client.get_db() <NEW_LINE> model = db[PREFERENCE_MODEL_TABLE_NAME].find_one({'userId':user_id}) <NEW_LINE> if model is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> sorted_tuples = sorted(model['preference'].items(), key=operator.itemgetter(1), reverse=True) <NEW_LINE> sorted_list = [x[0] for x in sorted_tuples] <NEW_LINE> sorted_value_list = [x[1] for x in sorted_tuples] <NEW_LINE> if isclose(float(sorted_value_list[0]), float(sorted_value_list[-1])): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return sorted_list | Get user's preference in an ordered class list | 62598fcc091ae35668705001 |
class JsonSystemLoggerTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.logger = JsonSystemLogger() <NEW_LINE> <DEDENT> def test_exception(self): <NEW_LINE> <INDENT> self.assertIsNone(self.logger.exception("Exception")) <NEW_LINE> <DEDENT> def test_error(self): <NEW_LINE> <INDENT> self.assertIsNone(self.logger.error("Error")) <NEW_LINE> <DEDENT> def test_debug(self): <NEW_LINE> <INDENT> self.assertIsNone(self.logger.debug("Debug")) <NEW_LINE> <DEDENT> def test_info(self): <NEW_LINE> <INDENT> self.assertIsNone(self.logger.info("Info")) <NEW_LINE> <DEDENT> def test_warn(self): <NEW_LINE> <INDENT> self.assertIsNone(self.logger.warn("Warn")) | Tests for JsonSystemLogger.
| 62598fcc9f28863672818a68 |
class Array2D: <NEW_LINE> <INDENT> def __init__(self, num_rows, num_cols): <NEW_LINE> <INDENT> self.rows = Array(num_rows) <NEW_LINE> for i in range(num_rows): <NEW_LINE> <INDENT> self.rows[i] = Array(num_cols) <NEW_LINE> <DEDENT> <DEDENT> def num_rows(self): <NEW_LINE> <INDENT> return len(self.rows) <NEW_LINE> <DEDENT> def num_cols(self): <NEW_LINE> <INDENT> return len(self.rows[0]) <NEW_LINE> <DEDENT> def clear(self, value): <NEW_LINE> <INDENT> for row in self.rows: <NEW_LINE> <INDENT> row.clear(value) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index_tuple): <NEW_LINE> <INDENT> assert len(index_tuple) == 2, "Invalid number of array subscripts." <NEW_LINE> row = index_tuple[0] <NEW_LINE> col = index_tuple[1] <NEW_LINE> if not (0 <= row < self.num_rows() and 0 <= col < self.num_cols()): <NEW_LINE> <INDENT> raise IndexError('Invalid index') <NEW_LINE> <DEDENT> array_1d = self.rows[row] <NEW_LINE> return array_1d[col] <NEW_LINE> <DEDENT> def __setitem__(self, index_tuple, value): <NEW_LINE> <INDENT> assert len(index_tuple) == 2, "Invalid number of array subscripts." <NEW_LINE> row = index_tuple[0] <NEW_LINE> col = index_tuple[1] <NEW_LINE> if not (0 <= row < self.num_rows() and 0 <= col < self.num_cols()): <NEW_LINE> <INDENT> raise IndexError('Invalid index') <NEW_LINE> <DEDENT> array_1d = self.rows[row] <NEW_LINE> array_1d[col] = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> st1 = "" <NEW_LINE> for row in self.rows: <NEW_LINE> <INDENT> st1 += str(row) + "\n" <NEW_LINE> <DEDENT> return st1 | Implementation of the Array2D ADT using an array of arrays. | 62598fcc4527f215b58ea2a6 |
class DeclarativeColumnsMetaclass(type): <NEW_LINE> <INDENT> def __new__(cls, name, bases, attrs): <NEW_LINE> <INDENT> attrs["_meta"] = opts = TableOptions(attrs.get("Meta", None)) <NEW_LINE> columns = [(name_, attrs.pop(name_)) for name_, column in attrs.items() if isinstance(column, Column)] <NEW_LINE> columns.sort(lambda x, y: cmp(x[1].creation_counter, y[1].creation_counter)) <NEW_LINE> parent_columns = [] <NEW_LINE> for base in bases[::-1]: <NEW_LINE> <INDENT> if hasattr(base, "base_columns"): <NEW_LINE> <INDENT> parent_columns = base.base_columns.items() + parent_columns <NEW_LINE> <DEDENT> <DEDENT> attrs["base_columns"] = SortedDict(parent_columns) <NEW_LINE> if opts.model: <NEW_LINE> <INDENT> extra = SortedDict(((f.name, Column(verbose_name=f.verbose_name)) for f in opts.model._meta.fields)) <NEW_LINE> attrs["base_columns"].update(extra) <NEW_LINE> <DEDENT> attrs["base_columns"].update(SortedDict(columns)) <NEW_LINE> for exclusion in opts.exclude: <NEW_LINE> <INDENT> if exclusion in attrs["base_columns"]: <NEW_LINE> <INDENT> attrs["base_columns"].pop(exclusion) <NEW_LINE> <DEDENT> <DEDENT> if opts.sequence: <NEW_LINE> <INDENT> opts.sequence.expand(attrs["base_columns"].keys()) <NEW_LINE> attrs["base_columns"] = SortedDict(((x, attrs["base_columns"][x]) for x in opts.sequence)) <NEW_LINE> <DEDENT> return type.__new__(cls, name, bases, attrs) | Metaclass that converts Column attributes on the class to a dictionary
called ``base_columns``, taking into account parent class ``base_columns``
as well. | 62598fccad47b63b2c5a7c32 |
class ProductSaleType(models.Model): <NEW_LINE> <INDENT> _name = 'ilusiones.product.sale.type' <NEW_LINE> pay_day = fields.Integer(string='Dia de cobro') <NEW_LINE> sale_type = fields.Selection( selection=[ ('Prepago', 'Prepago'), ('Plan', 'Plan'), ('Activación', 'Activación') ], string='Tipo de venta' ) <NEW_LINE> sale_products = fields.Many2many( comodel_name='product.product', string='Productos almacenables en esta venta', relation='combo_almacenables' ) <NEW_LINE> services = fields.Many2many( comodel_name='product.product', string='Servicios en esta venta', relation='combo_services' ) <NEW_LINE> insurance = fields.Many2one( comodel_name='product.product', string='Seguro en esta venta' ) <NEW_LINE> product_id = fields.Many2one( comodel_name='product.template', string='Combo relacionado' ) <NEW_LINE> user_id = fields.Many2one( comodel_name='res.partner', string='Cliente' ) <NEW_LINE> description = fields.Char(string="Descripción", required=True) <NEW_LINE> serial_number = fields.Char(string='Número de serie', default="") <NEW_LINE> contract = fields.Char(string='Número de contrato', default="") <NEW_LINE> active = fields.Boolean(string='Is active?', default=True) <NEW_LINE> _sql_constraints = [ ('contract_unique', 'unique(contract)', '¡El contrato ya existe!'), ('serial_number_unique', 'unique(serial_number)', '¡El número de serie ya existe!') ] <NEW_LINE> @api.constrains('pay_day') <NEW_LINE> def _check_pay_day(self): <NEW_LINE> <INDENT> for record in self: <NEW_LINE> <INDENT> if record.pay_day <= 0 or record.pay_day > 31: <NEW_LINE> <INDENT> raise ValidationError( 'Día de cobro fuera de rango(1-31): %s' % record.pay_day ) | ProductSaleType is a bundle of products.
Products on bundle depends of sale type.
Arguments:
models {Model} -- [Odoo Model]
Raises:
ValidationError -- [On fields constraints] | 62598fcc656771135c489a48 |
class FileServerServicer(binary_data_pb2_grpc.FileServerServicer): <NEW_LINE> <INDENT> def __init__(self, availible_server_space, database_filename): <NEW_LINE> <INDENT> self._DATABASE_FILENAME = database_filename <NEW_LINE> self._AVAILIBLE_SERVER_SPACE = availible_server_space <NEW_LINE> <DEDENT> def ValidateFileServer(self, request, context): <NEW_LINE> <INDENT> type_check_response = resources_server.type_check(request, binary_data_pb2.BlobSpec) <NEW_LINE> if type_check_response.error.has_occured: <NEW_LINE> <INDENT> return type_check_response <NEW_LINE> <DEDENT> blob_spec = request <NEW_LINE> response = can_create_blob(blob_spec, self._AVAILIBLE_SERVER_SPACE) <NEW_LINE> return response <NEW_LINE> <DEDENT> def Save(self, request, context): <NEW_LINE> <INDENT> type_check_response = resources_server.type_check(request, binary_data_pb2.Chunk) <NEW_LINE> if type_check_response.error.has_occured: <NEW_LINE> <INDENT> return type_check_response <NEW_LINE> <DEDENT> chunk = request <NEW_LINE> response = save_chunk(self._DATABASE_FILENAME, chunk) <NEW_LINE> return response <NEW_LINE> <DEDENT> def Download(self, request, context): <NEW_LINE> <INDENT> type_check_response = resources_server.type_check(request, binary_data_pb2.ChunkSpec) <NEW_LINE> if type_check_response.error.has_occured: <NEW_LINE> <INDENT> return type_check_response <NEW_LINE> <DEDENT> chunk_spec = request <NEW_LINE> response = download_chunk(self._DATABASE_FILENAME, chunk_spec) <NEW_LINE> return response <NEW_LINE> <DEDENT> def Delete(self, request, context): <NEW_LINE> <INDENT> type_check_response = resources_server.type_check(request, binary_data_pb2.BlobId) <NEW_LINE> if type_check_response.error.has_occured: <NEW_LINE> <INDENT> return type_check_response.error <NEW_LINE> <DEDENT> blob_id = request <NEW_LINE> error = delete_blob(self._DATABASE_FILENAME, blob_id) <NEW_LINE> return error | Interfaces exported by the server.
| 62598fcd97e22403b383b2df |
class TouristObjectUpdateView(UpdateView): <NEW_LINE> <INDENT> model = TouristObject <NEW_LINE> form_class = TouristObjectForm <NEW_LINE> template_name = 'places/edit.html' | Tourist object update view | 62598fcd0fa83653e46f52c0 |
class IntervalTimerExecutor(BaseTimerExecutor): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> pass | intverval timer executor TODO NOT IMPLEMENTED NOW | 62598fcd377c676e912f6f64 |
class ChinaBankMixin(object): <NEW_LINE> <INDENT> def _parse_table(self, zoom): <NEW_LINE> <INDENT> my_table = zoom.xpath("./table")[0] <NEW_LINE> trs = my_table.xpath("./tbody/tr") <NEW_LINE> table = [] <NEW_LINE> for tr in trs: <NEW_LINE> <INDENT> tr_line = [] <NEW_LINE> tds = tr.xpath("./td") <NEW_LINE> for td in tds: <NEW_LINE> <INDENT> tr_line.append(td.text_content()) <NEW_LINE> <DEDENT> table.append(tr_line) <NEW_LINE> <DEDENT> return "\r\n" + "{}".format(table) <NEW_LINE> <DEDENT> def _parse_detail_page(self, detail_page): <NEW_LINE> <INDENT> doc = html.fromstring(detail_page) <NEW_LINE> zoom = doc.xpath("//div[@id='zoom']")[0] <NEW_LINE> try: <NEW_LINE> <INDENT> table = self._parse_table(zoom) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> table = [] <NEW_LINE> <DEDENT> if table: <NEW_LINE> <INDENT> contents = [] <NEW_LINE> for node in zoom: <NEW_LINE> <INDENT> if node.tag == "table": <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> contents.append(node.text_content()) <NEW_LINE> <DEDENT> <DEDENT> return "".join(contents) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> detail_content = zoom.text_content() <NEW_LINE> return detail_content <NEW_LINE> <DEDENT> <DEDENT> def parse_detail_page(self, detail_page): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> article = self._parse_detail_page(detail_page) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return article <NEW_LINE> <DEDENT> <DEDENT> @retry(stop_max_attempt_number=10) <NEW_LINE> def _parse_list_page(self, list_page): <NEW_LINE> <INDENT> doc = html.fromstring(list_page) <NEW_LINE> news_area = doc.xpath("//div[@opentype='page']")[0] <NEW_LINE> news_title_parts = news_area.xpath("//font[@class='newslist_style']") <NEW_LINE> items = [] <NEW_LINE> for news_title_part in news_title_parts: <NEW_LINE> <INDENT> item = {} <NEW_LINE> try: <NEW_LINE> <INDENT> news_date_part = news_title_part.xpath("./following-sibling::span[@class='hui12']")[0].text_content() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> news_date_part = news_title_part.xpath("./following-sibling::a/span[@class='hui12']")[0].text_content() <NEW_LINE> <DEDENT> item["pub_date"] = news_date_part <NEW_LINE> news_title = news_title_part.xpath("./a")[0].text_content() <NEW_LINE> item["title"] = news_title <NEW_LINE> news_link = news_title_part.xpath("./a/@href")[0] <NEW_LINE> news_link = "http://www.pbc.gov.cn" + news_link <NEW_LINE> item["link"] = news_link <NEW_LINE> items.append(item) <NEW_LINE> <DEDENT> return items <NEW_LINE> <DEDENT> def parse_list_page(self, list_page): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> items = self._parse_list_page(list_page) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return items | 适用于中国银行的数据解析混入类 | 62598fcdd8ef3951e32c8048 |
class ProposalModelAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = JobProposal | Admin table for SolarCalculator model. | 62598fcdadb09d7d5dc0a954 |
class UserChangeForm(forms.ModelForm): <NEW_LINE> <INDENT> password = ReadOnlyPasswordHashField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( 'userid', 'password', 'is_active', 'is_staff', 'is_superuser', 'best_way_to_find', 'best_way_to_contact', 'phone', 'email', 'teams', ) <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> return self.initial["password"] | A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field. | 62598fcdcc40096d6161a3c4 |
class MongoDb(Plugin, DebianPlugin, UbuntuPlugin): <NEW_LINE> <INDENT> plugin_name = 'mongodb' <NEW_LINE> profiles = ('services',) <NEW_LINE> packages = ('mongodb-server',) <NEW_LINE> var_puppet_gen = "/var/lib/config-data/puppet-generated/mongodb" <NEW_LINE> files = ( '/etc/mongodb.conf', var_puppet_gen + '/etc/mongod.conf' ) <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.add_copy_spec([ "/etc/mongodb.conf", self.var_puppet_gen + "/etc/", self.var_puppet_gen + "/etc/systemd/system/mongod.service.d/", "/var/log/mongodb/mongodb.log", "/var/log/containers/mongodb/mongodb.log" ]) <NEW_LINE> self.add_cmd_output("du -s /var/lib/mongodb/") <NEW_LINE> <DEDENT> def postproc(self): <NEW_LINE> <INDENT> self.do_file_sub( "/etc/mongodb.conf", r"(mms-token\s*=\s*.*)", r"mms-token = ********" ) <NEW_LINE> self.do_file_sub( self.var_puppet_gen + "/etc/mongodb.conf", r"(mms-token\s*=\s*.*)", r"mms-token = ********" ) | MongoDB document database
| 62598fcd60cbc95b06364717 |
class SellingPublisher: <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self._seller = None <NEW_LINE> self._name = name <NEW_LINE> self._topic = "{}.{}".format(WAMP_TOPIC_PREFIX, self._name) <NEW_LINE> self._api_id = UUID('627f1b5c-58c2-43b1-8422-a34f7d3f5a04').bytes <NEW_LINE> <DEDENT> async def on_join(self, session): <NEW_LINE> <INDENT> market_maker_adr = binascii.a2b_hex(session.config.extra['market_maker_adr'][2:]) <NEW_LINE> seller_privkey = binascii.a2b_hex(session.config.extra['seller_privkey'][2:]) <NEW_LINE> seller = SimpleSeller(market_maker_adr, seller_privkey) <NEW_LINE> price = 35 * 10 ** 18 <NEW_LINE> interval = 300 <NEW_LINE> seller.add(self._api_id, self._topic, price, interval, None) <NEW_LINE> balance = await seller.start(session) <NEW_LINE> balance = int(balance / 10 ** 18) <NEW_LINE> print("Remaining balance: {} XBR".format(balance)) <NEW_LINE> self._seller = seller <NEW_LINE> <DEDENT> async def publish(self, session, topic, payload): <NEW_LINE> <INDENT> key_id, enc_ser, ciphertext = await self._seller.wrap(self._api_id, self._topic, payload) <NEW_LINE> if DEBUG_ACK_SEND: <NEW_LINE> <INDENT> pub = await session.publish(topic, key_id, enc_ser, ciphertext, options=PublishOptions(acknowledge=True)) <NEW_LINE> print("published {}: {}".format(pub.id, payload)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> session.publish(topic, key_id, enc_ser, ciphertext) <NEW_LINE> <DEDENT> if DEBUG_ENABLED: <NEW_LINE> <INDENT> debug_topic = 'debug.' + topic <NEW_LINE> session.publish(debug_topic, payload) | Publisher that sells the data on the XBR market. | 62598fcd167d2b6e312b7352 |
class CampaignExperimentError(enum.IntEnum): <NEW_LINE> <INDENT> UNSPECIFIED = 0 <NEW_LINE> UNKNOWN = 1 <NEW_LINE> DUPLICATE_NAME = 2 <NEW_LINE> INVALID_TRANSITION = 3 <NEW_LINE> CANNOT_CREATE_EXPERIMENT_WITH_SHARED_BUDGET = 4 <NEW_LINE> CANNOT_CREATE_EXPERIMENT_FOR_REMOVED_BASE_CAMPAIGN = 5 <NEW_LINE> CANNOT_CREATE_EXPERIMENT_FOR_NON_PROPOSED_DRAFT = 6 <NEW_LINE> CUSTOMER_CANNOT_CREATE_EXPERIMENT = 7 <NEW_LINE> CAMPAIGN_CANNOT_CREATE_EXPERIMENT = 8 <NEW_LINE> EXPERIMENT_DURATIONS_MUST_NOT_OVERLAP = 9 <NEW_LINE> EXPERIMENT_DURATION_MUST_BE_WITHIN_CAMPAIGN_DURATION = 10 <NEW_LINE> CANNOT_MUTATE_EXPERIMENT_DUE_TO_STATUS = 11 | Enum describing possible campaign experiment errors.
Attributes:
UNSPECIFIED (int): Enum unspecified.
UNKNOWN (int): The received error code is not known in this version.
DUPLICATE_NAME (int): An active campaign or experiment with this name already exists.
INVALID_TRANSITION (int): Experiment cannot be updated from the current state to the
requested target state. For example, an experiment can only graduate
if its status is ENABLED.
CANNOT_CREATE_EXPERIMENT_WITH_SHARED_BUDGET (int): Cannot create an experiment from a campaign using an explicitly shared
budget.
CANNOT_CREATE_EXPERIMENT_FOR_REMOVED_BASE_CAMPAIGN (int): Cannot create an experiment for a removed base campaign.
CANNOT_CREATE_EXPERIMENT_FOR_NON_PROPOSED_DRAFT (int): Cannot create an experiment from a draft, which has a status other than
proposed.
CUSTOMER_CANNOT_CREATE_EXPERIMENT (int): This customer is not allowed to create an experiment.
CAMPAIGN_CANNOT_CREATE_EXPERIMENT (int): This campaign is not allowed to create an experiment.
EXPERIMENT_DURATIONS_MUST_NOT_OVERLAP (int): Trying to set an experiment duration which overlaps with another
experiment.
EXPERIMENT_DURATION_MUST_BE_WITHIN_CAMPAIGN_DURATION (int): All non-removed experiments must start and end within their campaign's
duration.
CANNOT_MUTATE_EXPERIMENT_DUE_TO_STATUS (int): The experiment cannot be modified because its status is in a terminal
state, such as REMOVED. | 62598fcdff9c53063f51aa26 |
class BasePyClock(Window): <NEW_LINE> <INDENT> def __init__(self, title, init_width=230, init_height=230): <NEW_LINE> <INDENT> super(BasePyClock, self).__init__() <NEW_LINE> self.set_title(title=title) <NEW_LINE> self.resize(width=init_width, height=init_height) <NEW_LINE> self.set_position(position=WIN_POS_CENTER) <NEW_LINE> self.connect("destroy", main_quit) <NEW_LINE> self._time = datetime.now() <NEW_LINE> self._draw_area = DrawingArea() <NEW_LINE> self._draw_area.connect("expose-event", self._expose) <NEW_LINE> self.add(self._draw_area) <NEW_LINE> timeout_add(1000, self._update) <NEW_LINE> self.show_all() <NEW_LINE> <DEDENT> def _expose(self, *args): <NEW_LINE> <INDENT> self._context = self._draw_area.window.cairo_create() <NEW_LINE> content_area = Rectangle(width=self.allocation.width, height=self.allocation.height) <NEW_LINE> self._context.rectangle(content_area) <NEW_LINE> self._context.clip() <NEW_LINE> self._draw_clock() <NEW_LINE> <DEDENT> def _draw_clock(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _redraw_canvas(self): <NEW_LINE> <INDENT> if self.window: <NEW_LINE> <INDENT> dimensions = self.get_allocation() <NEW_LINE> rect = Rectangle(width=dimensions.width, height=dimensions.height) <NEW_LINE> self.window.invalidate_rect(rect, True) <NEW_LINE> self.window.process_updates(True) <NEW_LINE> <DEDENT> <DEDENT> def _update(self): <NEW_LINE> <INDENT> self._time = datetime.now() <NEW_LINE> self._redraw_canvas() <NEW_LINE> return True | Base class for PyClock. | 62598fcd3d592f4c4edbb28e |
class ByteArray(SimpleModel): <NEW_LINE> <INDENT> __type_name__ = 'base64Binary' <NEW_LINE> __namespace__ = "http://www.w3.org/2001/XMLSchema" <NEW_LINE> class Attributes(SimpleModel.Attributes): <NEW_LINE> <INDENT> encoding = BINARY_ENCODING_USE_DEFAULT <NEW_LINE> <DEDENT> def __new__(cls, **kwargs): <NEW_LINE> <INDENT> tn = None <NEW_LINE> if 'encoding' in kwargs: <NEW_LINE> <INDENT> v = kwargs['encoding'] <NEW_LINE> if v is None: <NEW_LINE> <INDENT> kwargs['encoding'] = BINARY_ENCODING_USE_DEFAULT <NEW_LINE> <DEDENT> elif v in ('base64', 'base64Binary', BINARY_ENCODING_BASE64): <NEW_LINE> <INDENT> tn = 'base64Binary' <NEW_LINE> kwargs['encoding'] = BINARY_ENCODING_BASE64 <NEW_LINE> <DEDENT> elif v in ('urlsafe_base64', BINARY_ENCODING_URLSAFE_BASE64): <NEW_LINE> <INDENT> tn = 'string' <NEW_LINE> kwargs['encoding'] = BINARY_ENCODING_URLSAFE_BASE64 <NEW_LINE> <DEDENT> elif v in ('hex', 'hexBinary', BINARY_ENCODING_HEX): <NEW_LINE> <INDENT> tn = 'hexBinary' <NEW_LINE> kwargs['encoding'] = BINARY_ENCODING_HEX <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("'encoding' must be one of: %r" % (tuple(ByteArray._encoding.handlers.values()),)) <NEW_LINE> <DEDENT> <DEDENT> retval = cls.customize(**kwargs) <NEW_LINE> if tn is not None: <NEW_LINE> <INDENT> retval.__type_name__ = tn <NEW_LINE> <DEDENT> return retval <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_default(cls): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def to_base64(cls, value): <NEW_LINE> <INDENT> return b64encode(_bytes_join(value)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_base64(cls, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [b64decode(_bytes_join(value))] <NEW_LINE> <DEDENT> except TypeError as e: <NEW_LINE> <INDENT> raise ValidationError(value) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def to_urlsafe_base64(cls, value): <NEW_LINE> <INDENT> return urlsafe_b64encode(_bytes_join(value)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_urlsafe_base64(cls, value): <NEW_LINE> <INDENT> if isinstance(value, unicode): <NEW_LINE> <INDENT> value = value.encode('utf8') <NEW_LINE> <DEDENT> return [urlsafe_b64decode(_bytes_join(value))] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def to_hex(cls, value): <NEW_LINE> <INDENT> return hexlify(_bytes_join(value)) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_hex(cls, value): <NEW_LINE> <INDENT> return [unhexlify(_bytes_join(value))] | Canonical container for arbitrary data. Every protocol has a different
way of encapsulating this type. E.g. xml-based protocols encode this as
base64, while HttpRpc just hands it over.
Its native python format is a sequence of ``str`` objects for Python 2.x
and a sequence of ``bytes`` objects for Python 3.x. | 62598fcd956e5f7376df586b |
class Motif(motifs.Motif, dict): <NEW_LINE> <INDENT> multiple_value_keys = set(['BF', 'OV', 'HP', 'BS', 'HC', 'DT', 'DR']) <NEW_LINE> reference_keys = set(['RX', 'RA', 'RT', 'RL']) | Store the information for one TRANSFAC motif.
This class inherits from the Bio.motifs.Motif base class, as well
as from a Python dictionary. All motif information found by the parser
is stored as attributes of the base class when possible; see the
Bio.motifs.Motif base class for a description of these attributes. All
other information associated with the motif is stored as (key, value)
pairs in the dictionary, where the key is the two-letter fields as found
in the TRANSFAC file. References are an exception: These are stored in
the .references attribute.
These fields are commonly found in TRANSFAC files::
AC: Accession number
AS: Accession numbers, secondary
BA: Statistical basis
BF: Binding factors
BS: Factor binding sites underlying the matrix
[sequence; SITE accession number; start position for matrix
sequence; length of sequence used; number of gaps inserted;
strand orientation.]
CC: Comments
CO: Copyright notice
DE: Short factor description
DR: External databases
[database name: database accession number]
DT: Date created/updated
HC: Subfamilies
HP: Superfamilies
ID: Identifier
NA: Name of the binding factor
OC: Taxonomic classification
OS: Species/Taxon
OV: Older version
PV: Preferred version
TY: Type
XX: Empty line; these are not stored in the Record.
References are stored in an .references attribute, which is a list of
dictionaries with the following keys::
RN: Reference number
RA: Reference authors
RL: Reference data
RT: Reference title
RX: PubMed ID
For more information, see the TRANSFAC documentation. | 62598fcd7c178a314d78d87a |
class Test_capture_output(unittest.TestCase): <NEW_LINE> <INDENT> def test_std_out(self): <NEW_LINE> <INDENT> with dcs.capture_output() as (out, err): <NEW_LINE> <INDENT> print('Hello, World!') <NEW_LINE> <DEDENT> output = out.getvalue().strip() <NEW_LINE> error = err.getvalue().strip() <NEW_LINE> self.assertEqual(output, 'Hello, World!') <NEW_LINE> self.assertEqual(error, '') <NEW_LINE> <DEDENT> def test_std_err(self): <NEW_LINE> <INDENT> with dcs.capture_output() as (out, err): <NEW_LINE> <INDENT> print('Error Raised.', file=sys.stderr) <NEW_LINE> <DEDENT> output = out.getvalue().strip() <NEW_LINE> error = err.getvalue().strip() <NEW_LINE> self.assertEqual(output, '') <NEW_LINE> self.assertEqual(error, 'Error Raised.') | Tests the capture_output function with these cases:
capture standard output
capture standard error | 62598fcda219f33f346c6be4 |
class ServiceProvider(object): <NEW_LINE> <INDENT> def __init__(self, infoset): <NEW_LINE> <INDENT> self._root = infoset <NEW_LINE> self.name = testXMLValue(self._root.find(nspath("Name"))) <NEW_LINE> self.url = testXMLValue(self._root.find(nspath("OnlineResource"))) <NEW_LINE> self.keywords = extract_xml_list(self._root.find(nspath("Keywords"))) | Implements IServiceProviderMetatdata | 62598fcdd8ef3951e32c8049 |
class ModlidadForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Modlidad | docstring for ModlidadForm | 62598fcdab23a570cc2d4f5b |
class ProductBySubCategoryView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = serializers.ProductSerializer <NEW_LINE> lookup_field = 'subcategory_id' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> queryset = models.Product.objects.filter(subcategory_id=self.kwargs['subcategory_id']) <NEW_LINE> return queryset | List all products with category and subcategories by subcategory. | 62598fcdad47b63b2c5a7c36 |
class CouldNotRestartSplunk(CommandExecutionFailure): <NEW_LINE> <INDENT> pass | Raised when a Splunk restart fails. | 62598fcdf9cc0f698b1c54c2 |
class AbstractProductClass(models.Model): <NEW_LINE> <INDENT> name = models.CharField(_('Name'), max_length=128) <NEW_LINE> slug = AutoSlugField(_('Slug'), max_length=128, unique=True, populate_from='name') <NEW_LINE> requires_shipping = models.BooleanField(_("Requires shipping?"), default=True) <NEW_LINE> track_stock = models.BooleanField(_("Track stock levels?"), default=True) <NEW_LINE> options = models.ManyToManyField( 'catalogue.Option', blank=True, verbose_name=_("Options")) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> abstract = True <NEW_LINE> app_label = 'catalogue' <NEW_LINE> ordering = ['name'] <NEW_LINE> verbose_name = _("Product class") <NEW_LINE> verbose_name_plural = _("Product classes") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def has_attributes(self): <NEW_LINE> <INDENT> return self.attributes.exists() | Used for defining options and attributes for a subset of products.
E.g. Books, DVDs and Toys. A product can only belong to one product class.
At least one product class must be created when setting up a new
Oscar deployment.
Not necessarily equivalent to top-level categories but usually will be. | 62598fcd377c676e912f6f66 |
class QueueFactory(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.__counter = 0 <NEW_LINE> self.__names = [] <NEW_LINE> self.q = Queue.Queue() <NEW_LINE> <DEDENT> def _makeMethod(self, constant, methodName, argc): <NEW_LINE> <INDENT> def __method(*args): <NEW_LINE> <INDENT> if len(args) != argc: <NEW_LINE> <INDENT> raise TypeError("%s() takes exactly %d arguments (%d given)" % (methodName, argc, len(args))) <NEW_LINE> <DEDENT> self.q.put((constant, args)) <NEW_LINE> <DEDENT> __method.__name__ = methodName <NEW_LINE> return __method <NEW_LINE> <DEDENT> def addMessage(self, name, argc): <NEW_LINE> <INDENT> if name in self.__names: <NEW_LINE> <INDENT> raise AttributeError("%s queue already has a message named %s" % (self.name, name)) <NEW_LINE> <DEDENT> const_name = self.name.upper() + "_CODE_" + name.upper() <NEW_LINE> setattr(self, const_name, self.__counter) <NEW_LINE> self.__counter += 1 <NEW_LINE> method_name = "send_" + name.lower() <NEW_LINE> method = self._makeMethod(getattr(self, const_name), method_name, argc) <NEW_LINE> setattr(self, method_name, method) <NEW_LINE> self.__names.append(name) | Constructs a new object wrapping a Queue.Queue, complete with constants
and sending functions for each type of message that can be put into the
queue.
Creating a new object using this class is done like so:
q = QueueFactory("progress")
And then adding messages to it is done like so:
q.addMessage("init", 0)
q.addMessage("step", 1)
The first call will create a new constant named PROGRESS_CODE_INIT and a
method named send_init that takes zero arguments. The second call will
create a new constant named PROGRESS_CODE_STEP and a method named send_step
that takes one argument.
Reusing names within the same class is not allowed. | 62598fcdbe7bc26dc9252048 |
class BaseFileInfo(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> raise NotImplementedError('This property must be implemented in a subclass!') <NEW_LINE> <DEDENT> @property <NEW_LINE> def extension(self): <NEW_LINE> <INDENT> return self.filename.rsplit('.', 1)[-1].lower() <NEW_LINE> <DEDENT> @property <NEW_LINE> def media_subdir(self): <NEW_LINE> <INDENT> if self.extension in VALID_IMAGE_EXTS: <NEW_LINE> <INDENT> return IMAGE_SUBDIR <NEW_LINE> <DEDENT> elif self.extension in VALID_AUDIO_EXTS: <NEW_LINE> <INDENT> return AUDIO_SUBDIR <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '' | Base class for gathering data about an
object that represents a file. | 62598fcd5fcc89381b26633b |
class Identifier: <NEW_LINE> <INDENT> pattern: str <NEW_LINE> target: str <NEW_LINE> def __init__(self, identifiers_config): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.pattern = identifiers_config["pattern"] <NEW_LINE> self.target = identifiers_config["target"] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print(f"Error loading identifier from identifierconfig: {identifiers_config}") <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ",".join([self.pattern, self.target]) <NEW_LINE> <DEDENT> def matches(self, booking): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = booking.__dict__[self.target] <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print(f"Couldn't find {self.target} in booking class. Has to be one of {booking.__dict__.keys()}") <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> if self.pattern in value: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | A helper class: Given a booking's target field and a pattern this class determines a match | 62598fcd656771135c489a4e |
class Material(models.Model): <NEW_LINE> <INDENT> data = models.TextField('Данные') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Материал' <NEW_LINE> verbose_name_plural = 'Материалы' | Материалы для калькулятора | 62598fcdcc40096d6161a3c6 |
class Module: <NEW_LINE> <INDENT> def __init__(self, *args1, **args2): <NEW_LINE> <INDENT> self.modules = [] <NEW_LINE> for i in args1: <NEW_LINE> <INDENT> self.load(i) <NEW_LINE> <DEDENT> for i, j in args2.items(): <NEW_LINE> <INDENT> self.load(i, **j) <NEW_LINE> <DEDENT> <DEDENT> def load(self, module, **args): <NEW_LINE> <INDENT> __import__(module) <NEW_LINE> plugin = sys.modules[module] <NEW_LINE> self.modules.append(plugin) <NEW_LINE> if len(args): <NEW_LINE> <INDENT> for i, j in args.items(): <NEW_LINE> <INDENT> setattr(plugin, i, j) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def unload(self, module): <NEW_LINE> <INDENT> plugin = sys.modules[module] <NEW_LINE> self.modules.remove(plugin) <NEW_LINE> del sys.modules[module] <NEW_LINE> <DEDENT> """ It sends a signal to a specific module """ <NEW_LINE> def signal_module(self, module, sign, *args1, **args2): <NEW_LINE> <INDENT> if hasattr(module, sign): <NEW_LINE> <INDENT> act = getattr(module, sign) <NEW_LINE> try: <NEW_LINE> <INDENT> act(*args1, **args2) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> debug() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> """ It sends a signal to all modules """ <NEW_LINE> def signal(self, sign, *args1, **args2): <NEW_LINE> <INDENT> for i in self.modules: <NEW_LINE> <INDENT> self.signal_module(i, sign, *args1, **args2) | This class is used to load/unload plugins.
It calls functions which are defined inside the plugin files
according to their names. These name functions correspond to irc
event commands. | 62598fcd97e22403b383b2e5 |
class PrognosticGroupStage8(_CaseInsensitiveEnum): <NEW_LINE> <INDENT> zero = '0' <NEW_LINE> ia = 'IA' <NEW_LINE> ib = 'IB' <NEW_LINE> iia = 'IIA' <NEW_LINE> iib = 'IIB' <NEW_LINE> iiia = 'IIIA' <NEW_LINE> iiic = 'IIIC' <NEW_LINE> iv = 'IV' <NEW_LINE> unknown = 'Unknown' | American Joint Committee on Cance (AJCC) edition 8's prognostic group stage | 62598fcd4c3428357761a69d |
class KeywordQueryEventListener(EventListener): <NEW_LINE> <INDENT> def on_event(self, event, extension): <NEW_LINE> <INDENT> items = [] <NEW_LINE> if event.get_argument(): <NEW_LINE> <INDENT> pw_length = int(event.get_argument()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pw_length = int(extension.preferences['pw_length']) <NEW_LINE> <DEDENT> pw_count = int(extension.preferences['pw_count']) <NEW_LINE> if pwgen_module: <NEW_LINE> <INDENT> passwords = pwgen.pwgen(pw_length, pw_count, False, False, True, True, False, True, '!$.#*+-_~()][?%&@,;', True) <NEW_LINE> <DEDENT> elif not pwgen_module and is_exist(program='pwgen'): <NEW_LINE> <INDENT> command = 'pwgen -1 -c -n -y {} {}'.format(str(pw_length), str(pw_count)) <NEW_LINE> output = check_output(command.split(' ')) <NEW_LINE> passwords = output.splitlines() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> passwords = [ 'Could not find neither pwgen module nor the command!' ] <NEW_LINE> <DEDENT> for password in passwords: <NEW_LINE> <INDENT> password = password.decode('utf-8') <NEW_LINE> items.append( ExtensionResultItem( icon='images/icon.png', name=password, description='Press Enter to copy this password to clipboard', highlightable=False, on_enter=CopyToClipboardAction(password))) <NEW_LINE> <DEDENT> return RenderResultListAction(items) | Class that listens to the Keyboard event | 62598fcd0fa83653e46f52c6 |
class ModulesConfig(BaseConfig): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> super(ModulesConfig, self).__init__(data) <NEW_LINE> python_requires = data.get('python_requires', MISSING) <NEW_LINE> if python_requires == MISSING: <NEW_LINE> <INDENT> raise KeyError('python_requires is required') <NEW_LINE> <DEDENT> self.python_requires = python_requires <NEW_LINE> self.python_versions = parse_python_requires(python_requires) <NEW_LINE> self.controller_only = python_requires == 'controller' | Configuration for modules. | 62598fcd091ae35668705009 |
class BackupsCollectionResource(resource.BaseResource): <NEW_LINE> <INDENT> def __init__(self, storage_driver): <NEW_LINE> <INDENT> self.db = storage_driver <NEW_LINE> <DEDENT> @policy.enforce('backups:get_all') <NEW_LINE> def on_get(self, req, resp, project_id): <NEW_LINE> <INDENT> user_id = req.get_header('X-User-ID') <NEW_LINE> offset = req.get_param_as_int('offset') or 0 <NEW_LINE> limit = req.get_param_as_int('limit') or 10 <NEW_LINE> search = self.json_body(req) <NEW_LINE> obj_list = self.db.search_backup(project_id=project_id, user_id=user_id, offset=offset, limit=limit, search=search) <NEW_LINE> resp.body = {'backups': obj_list} <NEW_LINE> <DEDENT> @policy.enforce('backups:create') <NEW_LINE> def on_post(self, req, resp, project_id): <NEW_LINE> <INDENT> doc = self.json_body(req) <NEW_LINE> if not doc: <NEW_LINE> <INDENT> raise freezer_api_exc.BadDataFormat( message='Missing request body') <NEW_LINE> <DEDENT> user_name = req.get_header('X-User-Name') <NEW_LINE> user_id = req.get_header('X-User-ID') <NEW_LINE> backup_id = self.db.add_backup(project_id=project_id, user_id=user_id, user_name=user_name, doc=doc) <NEW_LINE> resp.status = falcon.HTTP_201 <NEW_LINE> resp.body = {'backup_id': backup_id} | Handler for endpoint: /v2/{project_id}/backups | 62598fcdbf627c535bcb188a |
class LogLorentz1D(Fittable1DModel): <NEW_LINE> <INDENT> amplitude = Parameter(default=1) <NEW_LINE> log_x_0 = Parameter(default=0) <NEW_LINE> fwhm = Parameter(default=1) <NEW_LINE> @staticmethod <NEW_LINE> def evaluate(x, amplitude, log_x_0, fwhm): <NEW_LINE> <INDENT> return (amplitude * ((fwhm / 2.) ** 2) / ((np.log(x) - log_x_0) ** 2 + (fwhm / 2.) ** 2)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def fit_deriv(x, amplitude, log_x_0, fwhm): <NEW_LINE> <INDENT> log_x = np.log(x) <NEW_LINE> d_amplitude = fwhm ** 2 / (fwhm ** 2 + (log_x - log_x_0) ** 2) <NEW_LINE> d_log_x_0 = (amplitude * d_amplitude * (2 * log_x - 2 * log_x_0) / (fwhm ** 2 + (log_x - log_x_0) ** 2)) <NEW_LINE> d_fwhm = 2 * amplitude * d_amplitude / fwhm * (1 - d_amplitude) <NEW_LINE> return [d_amplitude, d_log_x_0, d_fwhm] <NEW_LINE> <DEDENT> def bounding_box(self, factor=25): <NEW_LINE> <INDENT> log_x_0 = self.log_x_0 <NEW_LINE> dx = factor * self.fwhm <NEW_LINE> return log_x_0 - dx, log_x_0 + dx <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_units(self): <NEW_LINE> <INDENT> if self.log_x_0.unit is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {'x': self.log_x_0.unit} <NEW_LINE> <DEDENT> <DEDENT> def _parameter_units_for_data_units(self, inputs_unit, outputs_unit): <NEW_LINE> <INDENT> return {'x_0': inputs_unit['x'], 'fwhm': inputs_unit['x'], 'amplitude': outputs_unit['y']} | One dimensional log-Lorentzian model.
Parameters
----------
amplitude : float
Peak value
log_x_0 : float
Position of the peak in log space.
fwhm : float
Full width at half maximum
See Also
--------
Gaussian1D, Box1D, RickerWavelet1D
Notes
-----
Model formula:
.. math::
f(x) = \frac{A \gamma^{2}}{\gamma^{2} + \left(\ln(x) - ln(x_{0})\right)^{2}}
Examples
--------
.. plot::
:include-source:
import numpy as np
import matplotlib.pyplot as plt
from astropy.modeling.models import Lorentz1D
plt.figure()
s1 = Lorentz1D()
r = np.arange(-5, 5, .01)
for factor in range(1, 4):
s1.amplitude = factor
plt.plot(r, s1(r), color=str(0.25 * factor), lw=2)
plt.axis([-5, 5, -1, 4])
plt.show() | 62598fcd71ff763f4b5e7b60 |
class IsType(Validator): <NEW_LINE> <INDENT> no_type = 'Type does not exist.' <NEW_LINE> no_subtype = 'Type is not a descendant type of {parent}' <NEW_LINE> def __init__(self, parent: str = None) -> None: <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> <DEDENT> def _repr_args(self): <NEW_LINE> <INDENT> return 'parent={0!r}'.format(self.parent) <NEW_LINE> <DEDENT> def __call__(self, type: str): <NEW_LINE> <INDENT> assert not self.parent or self.parent in app.resources <NEW_LINE> try: <NEW_LINE> <INDENT> r = app.resources[type] <NEW_LINE> if self.parent: <NEW_LINE> <INDENT> if not issubclass(r.__class__, app.resources[self.parent].__class__): <NEW_LINE> <INDENT> raise ValidationError(self.no_subtype.format(self.parent)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise ValidationError(self.no_type) | Validator which succeeds if the value it is passed is a registered
resource type.
:param parent: If set, type must be a subtype of such resource.
By default accept any resource. | 62598fcdd8ef3951e32c804b |
class ProxyServerPage(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{F66C99F8-D7C0-43C6-88C7-DC15435C5321}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C0FC1503-7E6F-11D2-AABF-00C04FA375F1}', 10, 2) | Esri Proxy Server property page. | 62598fcddc8b845886d5399c |
class InvalidJSONfile(Exception): <NEW_LINE> <INDENT> def __init__(self, fName): <NEW_LINE> <INDENT> self.msg = "Skipping simulation for {}\n".format(fName) | Raised when the json file fed to the program has problems | 62598fcdadb09d7d5dc0a95a |
class Deck: <NEW_LINE> <INDENT> def __init__(self, seed=42): <NEW_LINE> <INDENT> random.seed(seed) <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def shuffle(self, reset=False): <NEW_LINE> <INDENT> if reset: <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> n = self.countCards() <NEW_LINE> if n > 1: <NEW_LINE> <INDENT> for i in range(n-1, 0, -1): <NEW_LINE> <INDENT> j = random.randint(0, i) <NEW_LINE> if i != j: <NEW_LINE> <INDENT> self._cards[i], self._cards[j] = self._cards[j], self._cards[i] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def dealOneCard(self): <NEW_LINE> <INDENT> if self.countCards() == 0: <NEW_LINE> <INDENT> raise IndexError("Cannot deal from empty deck. Please reset or " "reshuffle the deck first.") <NEW_LINE> <DEDENT> return self._cards.pop() <NEW_LINE> <DEDENT> def dealNCards(self, n): <NEW_LINE> <INDENT> if self.countCards() < n: <NEW_LINE> <INDENT> raise IndexError(f"Less than {n} cards are left to deal. Please " "reshuffle the deck first by calling shuffle().") <NEW_LINE> <DEDENT> return [self._cards.pop() for _ in range(n)] <NEW_LINE> <DEDENT> def dealRandomCard(self): <NEW_LINE> <INDENT> num_cards = self.countCards() <NEW_LINE> if num_cards == 0: <NEW_LINE> <INDENT> raise IndexError("Cannot deal from empty deck. Please reshuffle " "the deck first by calling shuffle().") <NEW_LINE> <DEDENT> idx = random.randint(0, num_cards - 1) <NEW_LINE> return self._cards.pop(idx) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self._cards = [] <NEW_LINE> for suit in Suit: <NEW_LINE> <INDENT> for rank in Rank: <NEW_LINE> <INDENT> card = Card(suit, rank) <NEW_LINE> self._cards.append(card) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def countCards(self): <NEW_LINE> <INDENT> return len(self._cards) <NEW_LINE> <DEDENT> def print(self): <NEW_LINE> <INDENT> for c in self._cards: <NEW_LINE> <INDENT> print(c.name) | A class to represent a deck of 52 poker-style playing cards. | 62598fcd3d592f4c4edbb294 |
class Url(StringMixin, Raw): <NEW_LINE> <INDENT> def __init__(self, endpoint=None, absolute=False, scheme=None, **kwargs): <NEW_LINE> <INDENT> super(Url, self).__init__(**kwargs) <NEW_LINE> self.endpoint = endpoint <NEW_LINE> self.absolute = absolute <NEW_LINE> self.scheme = scheme <NEW_LINE> <DEDENT> def output(self, key, obj, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError("fields.Url is not implemented on Sanic-Restplus") <NEW_LINE> try: <NEW_LINE> <INDENT> data = to_marshallable_type(obj) <NEW_LINE> endpoint = self.endpoint if self.endpoint is not None else request.endpoint <NEW_LINE> o = urlparse(url_for(endpoint, _external=self.absolute, **data)) <NEW_LINE> if self.absolute: <NEW_LINE> <INDENT> scheme = self.scheme if self.scheme is not None else o.scheme <NEW_LINE> return urlunparse((scheme, o.netloc, o.path, "", "", "")) <NEW_LINE> <DEDENT> return urlunparse(("", "", o.path, "", "", "")) <NEW_LINE> <DEDENT> except TypeError as te: <NEW_LINE> <INDENT> raise MarshallingError(te) | A string representation of a Url
:param str endpoint: Endpoint name. If endpoint is ``None``, ``request.endpoint`` is used instead
:param bool absolute: If ``True``, ensures that the generated urls will have the hostname included
:param str scheme: URL scheme specifier (e.g. ``http``, ``https``) | 62598fcdf9cc0f698b1c54c4 |
class Feedback (object): <NEW_LINE> <INDENT> def __init__ (self, error="", success="", status=None): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> self.success = success <NEW_LINE> self.status = status <NEW_LINE> <DEDENT> def __str__ (self): <NEW_LINE> <INDENT> return render_template('interface.html', error=self.error, success=self.success, status="{}".format(self.status)) | provides some basic functions for rendering the html template.
basically error and success message are set in __init__().
__str__() returns the rendered template including error and
success message(s) | 62598fcd0fa83653e46f52c8 |
class EducationLevelApiTestCase(NamedModelApiTestCase): <NEW_LINE> <INDENT> factory_class = factories.EducationLevelModelFactory <NEW_LINE> model_class = models.EducationLevel <NEW_LINE> serializer_class = serializers.EducationLevelSerializer <NEW_LINE> url_detail = "education-level-detail" <NEW_LINE> url_list = "education-level-list" <NEW_LINE> name = factories.EducationLevelModelFactory.name <NEW_LINE> def test_create_education_level(self): <NEW_LINE> <INDENT> self.verify_create_defaults() <NEW_LINE> <DEDENT> def test_create_education_level_partial(self): <NEW_LINE> <INDENT> self.verify_create_defaults_partial() <NEW_LINE> <DEDENT> def test_get_education_level(self): <NEW_LINE> <INDENT> self.verify_get_defaults() <NEW_LINE> <DEDENT> def test_put_education_level_partial(self): <NEW_LINE> <INDENT> self.verify_put_partial() <NEW_LINE> <DEDENT> def test_delete_education_level(self): <NEW_LINE> <INDENT> self.verify_delete_default() | EducationLevel API unit test class. | 62598fcdec188e330fdf8c77 |
class StorageHandler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.containers = list() <NEW_LINE> <DEDENT> def attachContainer(self, container): <NEW_LINE> <INDENT> self.containers.append(container) <NEW_LINE> <DEDENT> def detachContainer(self, container): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> for i, c in enumerate(self.containers): <NEW_LINE> <INDENT> if c == container: <NEW_LINE> <INDENT> matches.append(i) <NEW_LINE> <DEDENT> <DEDENT> for i in reversed(matches): <NEW_LINE> <INDENT> self.containers.pop(i) <NEW_LINE> <DEDENT> <DEDENT> def close(self): <NEW_LINE> <INDENT> not_clean = False <NEW_LINE> for container in list(self.containers): <NEW_LINE> <INDENT> ret = self.detachContainer(container) <NEW_LINE> if not ret: <NEW_LINE> <INDENT> not_clean = True <NEW_LINE> <DEDENT> <DEDENT> return not not_clean | Base class. Children take care of specific ``StorageContainer``s.
Handlers for "ephemeral/volatile" containers may not need take care of
closing them down. This is needed for persistent storage handlers, though.
Children should minimally implement
* attachContainer()
* detachContainer()
* close() | 62598fcd4527f215b58ea2b0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.