code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class PrivateDnsZoneGroup(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'private_dns_zone_configs': {'key': 'properties.privateDnsZoneConfigs', 'type': '[PrivateDnsZoneConfig]'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, private_dns_zone_configs: Optional[List["PrivateDnsZoneConfig"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(PrivateDnsZoneGroup, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.provisioning_state = None <NEW_LINE> self.private_dns_zone_configs = private_dns_zone_configs | Private dns zone group resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: Name of the resource that is unique within a resource group. This name can be used
to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar provisioning_state: The provisioning state of the private dns zone group resource.
Possible values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2020_04_01.models.ProvisioningState
:param private_dns_zone_configs: A collection of private dns zone configurations of the private
dns zone group.
:type private_dns_zone_configs:
list[~azure.mgmt.network.v2020_04_01.models.PrivateDnsZoneConfig] | 62598fa1379a373c97d98e6a |
class Version(int): <NEW_LINE> <INDENT> id = 11 <NEW_LINE> size = 4 <NEW_LINE> def serialize(self): <NEW_LINE> <INDENT> assert(struct.calcsize('>i') == self.size) <NEW_LINE> return struct.pack('>i', self) | A serializable 4-byte version number. | 62598fa19c8ee82313040098 |
class ExtensionDescriptor(object): <NEW_LINE> <INDENT> name = None <NEW_LINE> alias = None <NEW_LINE> namespace = None <NEW_LINE> updated = None <NEW_LINE> def __init__(self, ext_mgr): <NEW_LINE> <INDENT> ext_mgr.register(self) <NEW_LINE> self.ext_mgr = ext_mgr <NEW_LINE> <DEDENT> def get_resources(self): <NEW_LINE> <INDENT> resources = [] <NEW_LINE> return resources <NEW_LINE> <DEDENT> def get_controller_extensions(self): <NEW_LINE> <INDENT> controller_exts = [] <NEW_LINE> return controller_exts <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def nsmap(cls): <NEW_LINE> <INDENT> nsmap = ext_nsmap.copy() <NEW_LINE> nsmap[cls.alias] = cls.namespace <NEW_LINE> return nsmap <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def xmlname(cls, name): <NEW_LINE> <INDENT> return '{%s}%s' % (cls.namespace, name) | Base class that defines the contract for extensions.
Note that you don't have to derive from this class to have a valid
extension; it is purely a convenience. | 62598fa17d43ff248742732c |
class CardRequest(object): <NEW_LINE> <INDENT> def __init__(self, newcardonly=False, readers=None, cardType=None, cardServiceClass=None, timeout=1): <NEW_LINE> <INDENT> self.pcsccardrequest = PCSCCardRequest(newcardonly, readers, cardType, cardServiceClass, timeout) <NEW_LINE> <DEDENT> def getReaders(self): <NEW_LINE> <INDENT> return self.pcsccardrequest.getReaders() <NEW_LINE> <DEDENT> def waitforcard(self): <NEW_LINE> <INDENT> return self.pcsccardrequest.waitforcard() <NEW_LINE> <DEDENT> def waitforcardevent(self): <NEW_LINE> <INDENT> return self.pcsccardrequest.waitforcardevent() | A CardRequest is used for waitForCard() invocations and specifies what
kind of smart card an application is waited for. | 62598fa1097d151d1a2c0e7e |
class Generator(nn.Module): <NEW_LINE> <INDENT> def __init__(self, z_dim=10, im_chan=1, hidden_dim=64): <NEW_LINE> <INDENT> super(Generator, self).__init__() <NEW_LINE> self.z_dim = z_dim <NEW_LINE> self.gen = nn.Sequential( self.make_gen_block(z_dim, hidden_dim * 4), self.make_gen_block(hidden_dim * 4, hidden_dim * 2, kernel_size=4, stride=1), self.make_gen_block(hidden_dim * 2, hidden_dim), self.make_gen_block(hidden_dim, im_chan, kernel_size=4, final_layer=True), ) <NEW_LINE> <DEDENT> def make_gen_block(self, input_channels, output_channels, kernel_size=3, stride=2, final_layer=False): <NEW_LINE> <INDENT> if not final_layer: <NEW_LINE> <INDENT> return nn.Sequential( nn.ConvTranspose2d(input_channels, output_channels, kernel_size, stride), nn.BatchNorm2d(output_channels), nn.ReLU(inplace=True), ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return nn.Sequential( nn.ConvTranspose2d(input_channels, output_channels, kernel_size, stride), nn.Tanh(), ) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, noise): <NEW_LINE> <INDENT> x = noise.view(len(noise), self.z_dim, 1, 1) <NEW_LINE> return self.gen(x) | Generator Class
Values:
z_dim: the dimension of the noise vector, a scalar
im_chan: the number of channels in the images, fitted for the dataset used, a scalar
(MNIST is black-and-white, so 1 channel is your default)
hidden_dim: the inner dimension, a scalar | 62598fa18da39b475be03033 |
class PixmapCheckWidget(QtWidgets.QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.path = '' <NEW_LINE> self.data = None <NEW_LINE> self.checked = False <NEW_LINE> self.initUI() <NEW_LINE> <DEDENT> def initUI(self): <NEW_LINE> <INDENT> self.ver_layout = QtWidgets.QVBoxLayout() <NEW_LINE> self.label = QtWidgets.QLabel() <NEW_LINE> self.checkbox = QtWidgets.QCheckBox('Show/Hide') <NEW_LINE> self.info = QtWidgets.QLineEdit() <NEW_LINE> self.info.setReadOnly(True) <NEW_LINE> self.checkbox.setChecked(False) <NEW_LINE> self.ver_layout.addWidget(self.label) <NEW_LINE> self.ver_layout.addWidget(self.info) <NEW_LINE> self.ver_layout.addWidget(self.checkbox) <NEW_LINE> self.ver_layout.setSpacing(0) <NEW_LINE> self.ver_layout.setContentsMargins(0, 0, 0, 0) <NEW_LINE> self.setLayout(self.ver_layout) <NEW_LINE> <DEDENT> def setValue(self, dic): <NEW_LINE> <INDENT> if 'data' in dic: <NEW_LINE> <INDENT> if not isinstance(dic['data'], QtGui.QPixmap): <NEW_LINE> <INDENT> self.data = QByteArray(dic['data']) <NEW_LINE> im = QtGui.QImage.fromData(self.data) <NEW_LINE> a = QtGui.QPixmap.fromImage(im) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> a = dic['data'] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> a = dic['pixmap'] <NEW_LINE> <DEDENT> if 'path' in dic: <NEW_LINE> <INDENT> self.path = dic['path'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.path = '' <NEW_LINE> <DEDENT> if 'info' in dic: <NEW_LINE> <INDENT> info = dic['info'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info = '' <NEW_LINE> <DEDENT> self.label.setPixmap(a) <NEW_LINE> self.checkbox.setChecked(dic['checked']) <NEW_LINE> self.info.setText(info) <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> return dict(pixmap=self.label.pixmap(), checked=self.checkbox.isChecked(), path=self.path) | value of this parameter is a dict with checked, data for the pixmap and optionally path in h5 node
| 62598fa1a17c0f6771d5c08f |
class StructureDescription(FrozenClass): <NEW_LINE> <INDENT> ua_types = [ ('DataTypeId', 'NodeId'), ('Name', 'QualifiedName'), ('StructureDefinition', 'StructureDefinition'), ] <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.DataTypeId = NodeId() <NEW_LINE> self.Name = QualifiedName() <NEW_LINE> self.StructureDefinition = StructureDefinition() <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'StructureDescription(' + 'DataTypeId:' + str(self.DataTypeId) + ', ' + 'Name:' + str(self.Name) + ', ' + 'StructureDefinition:' + str(self.StructureDefinition) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__ | :ivar DataTypeId:
:vartype DataTypeId: NodeId
:ivar Name:
:vartype Name: QualifiedName
:ivar StructureDefinition:
:vartype StructureDefinition: StructureDefinition | 62598fa13539df3088ecc109 |
class _Option(object): <NEW_LINE> <INDENT> def __init__(self, kind, required=False, default_factory=None, can_be_none=False): <NEW_LINE> <INDENT> if required and default_factory is not None: <NEW_LINE> <INDENT> raise ValueError("No default_factory value when option is required.") <NEW_LINE> <DEDENT> self.kind = kind <NEW_LINE> self.required = required <NEW_LINE> self.default_factory = default_factory <NEW_LINE> self.can_be_none = can_be_none | An option for _Config. | 62598fa10c0af96317c561d7 |
class NoteExtension(Extension): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.config = { 'prefix': ['<div>', 'Opening tag(s) which wrap the content'], 'postfix': ['</div>', 'Closing tag(s) which wrap the content'], 'tags': [{}, 'Template context passed into template rendering'], 'template_adapter': ['docdown.template_adapters.StringFormatAdapter', ('Adapter for rendering prefix and postfix templates' ' using your template language of choice.')], 'default_tag': ['', 'Default tag to use if the specified tag is not in the tags dict'], } <NEW_LINE> super(NoteExtension, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> md.registerExtension(self) <NEW_LINE> prefix = self.getConfig('prefix') <NEW_LINE> postfix = self.getConfig('postfix') <NEW_LINE> tags = self.getConfig('tags') <NEW_LINE> template_adapter = self.getConfig('template_adapter') <NEW_LINE> default_tag = self.getConfig('default_tag') <NEW_LINE> md.preprocessors.add('note_blocks', NoteBlockPreprocessor(prefix=prefix, postfix=postfix, tags=tags, template_adapter=template_adapter, default_tag=default_tag, markdown_instance=md), ">normalize_whitespace") | Renders a block of HTML with a title, svg image, and content to be displayed as a note.
The svg image is rendered using.
Configuration Example:
{
'template_adapter': 'docdown.template_adapters.StringFormatAdapter',
'prefix': ('<div class="{ tag }">'
' <div class="icon">'
' {% svg "{ svg }" %}'
' <img class="icon--pdf" src="{% static "{ svg_path }" %}"'
' </div>'
' <h5>{ title }</h5>'
'</div>'),
'postfix': '</div>',
'tags': {
'tag_name': {
'svg': 'standard/icon-must',
'svg_path': 'svg/standard/icon-must.svg',
'title': 'Must'
},
}
} | 62598fa176e4537e8c3ef408 |
class TestDBWriting(TestCase): <NEW_LINE> <INDENT> @mock.patch('requests.get', autospec=True) <NEW_LINE> def test_hr_sbi(self, response_mock): <NEW_LINE> <INDENT> with open(FIX_DIR + '/fixtures/sbi.json') as mockjson: <NEW_LINE> <INDENT> test_json = json.loads(mockjson.read()) <NEW_LINE> <DEDENT> type(response_mock.return_value).status_code = PropertyMock(return_value=200) <NEW_LINE> type(response_mock.return_value).json = MagicMock(return_value=test_json) <NEW_LINE> call_command('run_import', '--sbicodes') <NEW_LINE> count = handelsregister.SBIcodes.objects.count() <NEW_LINE> self.assertEqual(count, 2) <NEW_LINE> <DEDENT> @mock.patch('requests.get', autospec=True) <NEW_LINE> def test_hr_csv(self, response_mock): <NEW_LINE> <INDENT> factories.BuurtFactory.create( naam='testbuurt', ) <NEW_LINE> with open(FIX_DIR + '/fixtures/ds_hr.csv') as mockcsv: <NEW_LINE> <INDENT> test_csv = mockcsv.read() <NEW_LINE> <DEDENT> type(response_mock.return_value).status_code = PropertyMock(return_value=200) <NEW_LINE> type(response_mock.return_value).text = PropertyMock(return_value=test_csv) <NEW_LINE> call_command('run_import', '--handelsregister') <NEW_LINE> count = handelsregister.Handelsregister.objects.count() <NEW_LINE> self.assertEqual(count, 10) <NEW_LINE> <DEDENT> @mock.patch('requests.get', autospec=True) <NEW_LINE> def test_rapport(self, response_mock): <NEW_LINE> <INDENT> factories.BuurtFactory.create( naam='testbuurt', ) <NEW_LINE> with open(FIX_DIR + '/fixtures/sbi.json') as mockjson: <NEW_LINE> <INDENT> test_json = json.loads(mockjson.read()) <NEW_LINE> <DEDENT> with open(FIX_DIR + '/fixtures/ds_hr.csv') as mockcsv: <NEW_LINE> <INDENT> test_csv = mockcsv.read() <NEW_LINE> <DEDENT> type(response_mock.return_value).status_code = PropertyMock(return_value=200) <NEW_LINE> type(response_mock.return_value).text = PropertyMock(return_value=test_csv) <NEW_LINE> type(response_mock.return_value).json = MagicMock(return_value=test_json) <NEW_LINE> call_command('run_import', '--handelsregister') <NEW_LINE> call_command('run_import', '--sbicodes') <NEW_LINE> call_command('run_import', '--hrrapport') | HR and SBI api code
Test writing to database | 62598fa1925a0f43d25e7e92 |
class PycFile(object): <NEW_LINE> <INDENT> def __init__(self, magic, origin, timestamp, file_size, code): <NEW_LINE> <INDENT> self.magic = magic <NEW_LINE> self.origin = origin <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self.file_size = file_size <NEW_LINE> self.code = code | This class describes a parsed .pyc file and is returned by
:func:`pyc_load` and :func:`pyc_loads`. | 62598fa1e1aae11d1e7ce74e |
class _CovarianceFunctionContainer(CovarianceFunction): <NEW_LINE> <INDENT> _cov = None <NEW_LINE> _start_hyp = None <NEW_LINE> @property <NEW_LINE> def cov(self): <NEW_LINE> <INDENT> return self._cov <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_hyp(self): <NEW_LINE> <INDENT> return self._start_hyp <NEW_LINE> <DEDENT> def __init__(self, cov, hyp=None, name='covariance Function Container'): <NEW_LINE> <INDENT> assert isinstance(cov, tuple) or isinstance(cov, list) <NEW_LINE> assert len(cov) >= 1 <NEW_LINE> num_input = cov[0].num_input <NEW_LINE> num_hyp = 0 <NEW_LINE> start_hyp = [0] <NEW_LINE> for k in cov: <NEW_LINE> <INDENT> assert isinstance(k, CovarianceFunction) <NEW_LINE> assert num_input == k.num_input <NEW_LINE> num_hyp += k.num_hyp <NEW_LINE> start_hyp += [start_hyp[-1] + k.num_hyp] <NEW_LINE> <DEDENT> self._cov = cov <NEW_LINE> self._start_hyp = start_hyp[:-1] <NEW_LINE> super(_CovarianceFunctionContainer, self).__init__(num_input, num_hyp=num_hyp, hyp=hyp, name=name) <NEW_LINE> <DEDENT> def _to_string(self, pad): <NEW_LINE> <INDENT> s = super(_CovarianceFunctionContainer, self)._to_string(pad) + '\n' <NEW_LINE> s += pad + ' Contents:' <NEW_LINE> for k in self.cov: <NEW_LINE> <INDENT> s += '\n' + k._to_string(pad + ' ') <NEW_LINE> <DEDENT> return s <NEW_LINE> <DEDENT> def _get_hyp_of(self, i, hyp): <NEW_LINE> <INDENT> return hyp[self.start_hyp[i]: self.start_hyp[i] + self.cov[i].num_hyp] <NEW_LINE> <DEDENT> def _eval_all(self, x, y, func, hyp): <NEW_LINE> <INDENT> if hyp is None: <NEW_LINE> <INDENT> hyp = self.hyp <NEW_LINE> <DEDENT> if hyp is None: <NEW_LINE> <INDENT> raise ValueError('You must specify the hyper-parameters.') <NEW_LINE> <DEDENT> if not self._check_hyp(hyp): <NEW_LINE> <INDENT> raise ValueError('Wrong number of parameters.') <NEW_LINE> <DEDENT> return [getattr(k, func)(x, y, hyp=self._get_hyp_of(i, hyp)) for k, i in itertools.izip(self.cov, range(len(self.cov)))] | A container for covariance functions. | 62598fa163d6d428bbee2606 |
class BufWr(UGen): <NEW_LINE> <INDENT> _has_done_flag = True <NEW_LINE> _ordered_input_names = collections.OrderedDict( [("buffer_id", None), ("phase", 0.0), ("loop", 1.0), ("source", None)] ) <NEW_LINE> _unexpanded_input_names = ("source",) <NEW_LINE> _valid_calculation_rates = (CalculationRate.AUDIO, CalculationRate.CONTROL) | A buffer-writing oscillator.
::
>>> buffer_id = 23
>>> phase = supriya.ugens.Phasor.ar(
... rate=supriya.ugens.BufRateScale.kr(buffer_id),
... start=0,
... stop=supriya.ugens.BufFrames.kr(buffer_id),
... )
>>> source = supriya.ugens.SoundIn.ar(bus=(0, 1))
>>> buf_wr = supriya.ugens.BufWr.ar(
... buffer_id=buffer_id,
... loop=1,
... phase=phase,
... source=source,
... )
>>> buf_wr
BufWr.ar() | 62598fa15f7d997b871f930a |
class LogEntry(object): <NEW_LINE> <INDENT> def __init__(self, raw_entry): <NEW_LINE> <INDENT> self.raw = raw_entry <NEW_LINE> self.lines = self.raw.splitlines() <NEW_LINE> self._first = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def first(self): <NEW_LINE> <INDENT> if self._first is None: <NEW_LINE> <INDENT> self._first = LogFields(self.lines[0]) <NEW_LINE> <DEDENT> return self._first | Represents a single entry we read in. | 62598fa107f4c71912baf299 |
class NoActiveTransactionError(Exception): <NEW_LINE> <INDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'No active transaction for the request, channel closed' | Raised when a transaction method is issued but the transaction has not
been initiated. | 62598fa18e71fb1e983bb909 |
@admin.register(User) <NEW_LINE> class UserAdmin(UserAdmin): <NEW_LINE> <INDENT> fieldsets = ( (None, {'fields': ('email', 'password')}), (_('Personal info'), {'fields': ('first_name', 'last_name')}), (_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions')}), (_('Important dates'), {'fields': ('last_login', 'date_joined')}), ) <NEW_LINE> add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', 'password1', 'password2'), }), ) <NEW_LINE> list_display = ('email', 'first_name', 'last_name', 'is_staff') <NEW_LINE> search_fields = ('email', 'first_name', 'last_name') <NEW_LINE> ordering = ('email',) | Define admin model for custom User model with no email field. | 62598fa11f5feb6acb162a78 |
class CanScheduleGame(permissions.BasePermission): <NEW_LINE> <INDENT> NONADMIN_METHODS = [] <NEW_LINE> def _get_game(self, request): <NEW_LINE> <INDENT> parts = request.META['PATH_INFO'].split('/') <NEW_LINE> game_id = int(parts[3]) <NEW_LINE> return models.Game.objects.get(pk=game_id) <NEW_LINE> <DEDENT> def has_permission(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if request.method in self.NONADMIN_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> game = self._get_game(request) <NEW_LINE> return request.user in game.league.schedulers.all() <NEW_LINE> <DEDENT> has_object_permission = has_permission | Whether the user can schedule an game.
This is gross, because it parses the PATH_INFO of the
request to get the game id. That's probably bad, but
I don't see a better way to do it. | 62598fa1baa26c4b54d4f105 |
class ApplicationGatewayBackendHealthPool(Model): <NEW_LINE> <INDENT> _attribute_map = { 'backend_address_pool': {'key': 'backendAddressPool', 'type': 'ApplicationGatewayBackendAddressPool'}, 'backend_http_settings_collection': {'key': 'backendHttpSettingsCollection', 'type': '[ApplicationGatewayBackendHealthHttpSettings]'}, } <NEW_LINE> def __init__(self, backend_address_pool=None, backend_http_settings_collection=None): <NEW_LINE> <INDENT> self.backend_address_pool = backend_address_pool <NEW_LINE> self.backend_http_settings_collection = backend_http_settings_collection | Application gateway BackendHealth pool.
:param backend_address_pool: Reference of an
ApplicationGatewayBackendAddressPool resource.
:type backend_address_pool:
~azure.mgmt.network.v2017_06_01.models.ApplicationGatewayBackendAddressPool
:param backend_http_settings_collection: List of
ApplicationGatewayBackendHealthHttpSettings resources.
:type backend_http_settings_collection:
list[~azure.mgmt.network.v2017_06_01.models.ApplicationGatewayBackendHealthHttpSettings] | 62598fa1a219f33f346c6670 |
class GetBundle: <NEW_LINE> <INDENT> def __init__(self, url, token): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.token = token <NEW_LINE> requests.packages.urllib3.disable_warnings(InsecureRequestWarning) <NEW_LINE> headerauth = {"Authorization":"Bearer " + self.token + ""} <NEW_LINE> bundleurl='https://' + self.url + '/api/clientbundle' <NEW_LINE> self.bundle = requests.get(bundleurl,headers=headerauth, verify=False) <NEW_LINE> <DEDENT> def extractBundle(self, path): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> ucpzip = zipfile.ZipFile(io.BytesIO(self.bundle.content)) <NEW_LINE> ucpzip.extractall(path=self.path) | Generate and download a new UCP client bundle | 62598fa1b7558d5895463483 |
class VarGen(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.default = '_Boole' <NEW_LINE> self._name_index = {} <NEW_LINE> <DEDENT> def get_name(self, name=None, free_in=None): <NEW_LINE> <INDENT> if name != None: <NEW_LINE> <INDENT> pad = name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pad = self.default <NEW_LINE> <DEDENT> if free_in is None: <NEW_LINE> <INDENT> inc_name(pad, self._name_index) <NEW_LINE> i = self._name_index[pad] <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> return pad <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "{0!s}_{1!s}".format(pad, i) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not (pad in free_in): <NEW_LINE> <INDENT> return pad <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> i = 0 <NEW_LINE> fresh = "{0!s}_{1!s}".format(pad, i) <NEW_LINE> while fresh in free_in: <NEW_LINE> <INDENT> i += 1 <NEW_LINE> fresh = "{0!s}_{1!s}".format(pad, i) <NEW_LINE> <DEDENT> return fresh | Generate a fresh name according to a dictionary
sending names to a counter. These should never be reset. | 62598fa1be8e80087fbbeeb5 |
class BaseHelper: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_timezone(): <NEW_LINE> <INDENT> from pytz import timezone <NEW_LINE> from os import environ <NEW_LINE> if environ.get('TZ') is not None: <NEW_LINE> <INDENT> return timezone(os.environ['TZ']) <NEW_LINE> <DEDENT> return timezone('Europe/Berlin') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def setup_logger(name: str): <NEW_LINE> <INDENT> logger = logging.getLogger(name) <NEW_LINE> log_formatter = logging.Formatter( "%(asctime)s [%(filename)s:%(lineno)s - %(funcName)20s()]" " [%(levelname)-5.5s] %(message)s" ) <NEW_LINE> file_handler = logging.FileHandler( os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "%s_broker.log" % name), mode='w') <NEW_LINE> file_handler.setFormatter(log_formatter) <NEW_LINE> logger.addHandler(file_handler) <NEW_LINE> console_handler = logging.StreamHandler() <NEW_LINE> console_handler.setFormatter(log_formatter) <NEW_LINE> logger.addHandler(console_handler) <NEW_LINE> logger.setLevel(logging.INFO) <NEW_LINE> logger.debug("Logging Setup successful") <NEW_LINE> return logger | Helper class for logging and config parsing | 62598fa1851cf427c66b811e |
class TaskAttachment(Model): <NEW_LINE> <INDENT> _attribute_map = { '_links': {'key': '_links', 'type': 'ReferenceLinks'}, 'created_on': {'key': 'createdOn', 'type': 'iso-8601'}, 'last_changed_by': {'key': 'lastChangedBy', 'type': 'str'}, 'last_changed_on': {'key': 'lastChangedOn', 'type': 'iso-8601'}, 'name': {'key': 'name', 'type': 'str'}, 'record_id': {'key': 'recordId', 'type': 'str'}, 'timeline_id': {'key': 'timelineId', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'} } <NEW_LINE> def __init__(self, _links=None, created_on=None, last_changed_by=None, last_changed_on=None, name=None, record_id=None, timeline_id=None, type=None): <NEW_LINE> <INDENT> super(TaskAttachment, self).__init__() <NEW_LINE> self._links = _links <NEW_LINE> self.created_on = created_on <NEW_LINE> self.last_changed_by = last_changed_by <NEW_LINE> self.last_changed_on = last_changed_on <NEW_LINE> self.name = name <NEW_LINE> self.record_id = record_id <NEW_LINE> self.timeline_id = timeline_id <NEW_LINE> self.type = type | TaskAttachment.
:param _links:
:type _links: :class:`ReferenceLinks <task.v4_1.models.ReferenceLinks>`
:param created_on:
:type created_on: datetime
:param last_changed_by:
:type last_changed_by: str
:param last_changed_on:
:type last_changed_on: datetime
:param name:
:type name: str
:param record_id:
:type record_id: str
:param timeline_id:
:type timeline_id: str
:param type:
:type type: str | 62598fa130bbd722464698a2 |
class Wishlist(ndb.Model): <NEW_LINE> <INDENT> sessionKeys = ndb.KeyProperty(repeated=True) | Wishlist -- Profile session wishlist object | 62598fa1442bda511e95c2b1 |
class SysTrayNotifier(ByComponentNotifier): <NEW_LINE> <INDENT> def __init__(self, icon_name="user-available-symbolic"): <NEW_LINE> <INDENT> super(SysTrayNotifier, self).__init__() <NEW_LINE> import gi <NEW_LINE> gi.require_version('Gtk', '3.0') <NEW_LINE> gi.require_version('AppIndicator3', '0.1') <NEW_LINE> from gi.repository import Gtk as gtk, AppIndicator3 as appindicator <NEW_LINE> self._indicator = appindicator.Indicator.new( "customtray", icon_name, appindicator.IndicatorCategory.APPLICATION_STATUS ) <NEW_LINE> self._gtk = gtk <NEW_LINE> self._active = appindicator.IndicatorStatus.ACTIVE <NEW_LINE> self._passive = appindicator.IndicatorStatus.PASSIVE <NEW_LINE> self._indicator.set_status(self._passive) <NEW_LINE> menu = gtk.Menu() <NEW_LINE> entry = gtk.MenuItem(label='Dexter') <NEW_LINE> entry.connect('activate', lambda x: None) <NEW_LINE> menu.append(entry) <NEW_LINE> menu.show_all() <NEW_LINE> self._indicator.set_menu(menu) <NEW_LINE> <DEDENT> def update_status(self, component, status): <NEW_LINE> <INDENT> if component is None or status is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if status is Notifier.IDLE: <NEW_LINE> <INDENT> self._indicator.set_status(self._passive) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._indicator.set_status(self._active) <NEW_LINE> <DEDENT> <DEDENT> def _start(self): <NEW_LINE> <INDENT> thread = Thread(target=self._gtk.main) <NEW_LINE> thread.deamon = True <NEW_LINE> thread.start() <NEW_LINE> <DEDENT> def _stop(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._gtk.main_quit() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass | A notifier which flags in the system tray. | 62598fa1097d151d1a2c0e80 |
class MockTeamsConfigurationService: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.teamset = mock.MagicMock() <NEW_LINE> self.teamset.configure_mock(name=TEAMSET_NAME) <NEW_LINE> <DEDENT> def get_teams_configuration(self, _): <NEW_LINE> <INDENT> return mock.MagicMock( teamsets_by_id={TEAMSET_ID: self.teamset} ) | Fixture class for testing ``TeamMixin``. | 62598fa101c39578d7f12bd5 |
class PosixComplianceStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> INVALID = "Invalid" <NEW_LINE> ENABLED = "Enabled" <NEW_LINE> DISABLED = "Disabled" | If provisioned storage is posix compliant.
| 62598fa17cff6e4e811b587b |
class CompoundExpression(AbstractFunction): <NEW_LINE> <INDENT> __metaclass__ = FillArgSpecMeta <NEW_LINE> kwonlyargs = {} <NEW_LINE> @classmethod <NEW_LINE> def get_compute_func(cls): <NEW_LINE> <INDENT> return cls.build_expr <NEW_LINE> <DEDENT> def as_simple_expr(self, context): <NEW_LINE> <INDENT> args = [as_simple_expr(arg, context) for arg in self.args] <NEW_LINE> kwargs = {name: as_simple_expr(arg, context) for name, arg in self.kwargs} <NEW_LINE> expr = self.build_expr(context, *args, **kwargs) <NEW_LINE> return expr.as_simple_expr(context) <NEW_LINE> <DEDENT> def build_expr(self, context, *args, **kwargs): <NEW_LINE> <INDENT> raise NotImplementedError() | function expression written in terms of other expressions | 62598fa13c8af77a43b67e6b |
class LargeConfig(object): <NEW_LINE> <INDENT> init_scale = 0.04 <NEW_LINE> use_adam = False <NEW_LINE> learning_rate = 1.0 <NEW_LINE> max_grad_norm = 10 <NEW_LINE> num_layers = 2 <NEW_LINE> num_steps = 35 <NEW_LINE> hidden_size = 1500 <NEW_LINE> max_epoch = 14 <NEW_LINE> max_max_epoch = 55 <NEW_LINE> keep_prob = 0.35 <NEW_LINE> lr_decay = 1 / 1.15 <NEW_LINE> batch_size = 20 <NEW_LINE> vocab_size = 10000 <NEW_LINE> rnn_mode = BLOCK <NEW_LINE> lower_rank = 512 <NEW_LINE> use_dynamic = False <NEW_LINE> n_experts = 15 | Large config. | 62598fa1fbf16365ca793f11 |
class Test(unittest.TestCase): <NEW_LINE> <INDENT> def test_ctor_saves_environ(self): <NEW_LINE> <INDENT> environ = {} <NEW_LINE> service = TestService(environ) <NEW_LINE> self.failUnless(service.environ is environ) <NEW_LINE> <DEDENT> def test_portypes(self): <NEW_LINE> <INDENT> app = Application([TestService], 'tns') <NEW_LINE> _wsdl = app.get_wsdl('') <NEW_LINE> wsdl = etree.fromstring(_wsdl) <NEW_LINE> porttype = wsdl.find('{http://schemas.xmlsoap.org/wsdl/}portType') <NEW_LINE> srv = TestService() <NEW_LINE> self.assertEquals( len(srv.public_methods), len(porttype.getchildren())) <NEW_LINE> <DEDENT> def test_override_param_names(self): <NEW_LINE> <INDENT> app = Application([TestService], 'tns') <NEW_LINE> _wsdl = app.get_wsdl('') <NEW_LINE> for n in ['self', 'import', 'return', 'from']: <NEW_LINE> <INDENT> self.assertTrue(n in _wsdl, '"%s" not in _wsdl' % n) <NEW_LINE> <DEDENT> <DEDENT> def test_multiple_return(self): <NEW_LINE> <INDENT> app = Application([MultipleReturnService], 'tns') <NEW_LINE> app.get_wsdl('') <NEW_LINE> srv = MultipleReturnService() <NEW_LINE> message = srv.public_methods[0].out_message() <NEW_LINE> self.assertEquals(len(message._type_info), 3) <NEW_LINE> sent_xml = etree.Element('test') <NEW_LINE> message.to_parent_element( ('a','b','c'), srv.get_tns(), sent_xml ) <NEW_LINE> sent_xml = sent_xml[0] <NEW_LINE> response_data = message.from_xml(sent_xml) <NEW_LINE> self.assertEquals(len(response_data), 3) <NEW_LINE> self.assertEqual(response_data[0], 'a') <NEW_LINE> self.assertEqual(response_data[1], 'b') <NEW_LINE> self.assertEqual(response_data[2], 'c') <NEW_LINE> <DEDENT> def test_multiple_ns(self): <NEW_LINE> <INDENT> svc = Application([MultipleNamespaceService], 'tns') <NEW_LINE> wsdl = svc.get_wsdl("URL") | Most of the service tests are performed through the interop tests. | 62598fa17b25080760ed72ff |
class UserGroup(Base): <NEW_LINE> <INDENT> command_base = 'user-group' <NEW_LINE> @classmethod <NEW_LINE> def add_role(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'add-role' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_user(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'add-user' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def add_user_group(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'add-user-group' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_role(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'remove-role' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_user(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'remove-user' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_user_group(cls, options=None): <NEW_LINE> <INDENT> cls.command_sub = 'remove-user-group' <NEW_LINE> return cls.execute(cls._construct_command(options), output_format='csv') | Manipulates Foreman's user group. | 62598fa1009cb60464d0137c |
class TestFloat( unittest.TestCase ): <NEW_LINE> <INDENT> def setUp( self ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown( self ): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testFloat( self ): <NEW_LINE> <INDENT> style = MySubStyle() <NEW_LINE> self.assertEqual( None, MySubStyle.prop.default, msg = "Class default value for 'prop' wrong" ) <NEW_LINE> self.assertEqual( None, style.prop, msg = "Instance default value for 'style.prop' wrong" ) <NEW_LINE> style.prop = 5.0 <NEW_LINE> self.assertEqual( None, MySubStyle.prop.default, msg = "1) Class default value for 'prop' changed" ) <NEW_LINE> self.assertEqual( 5.0, style.prop, msg = "1) Instance value for 'style.prop' wrong" ) <NEW_LINE> newStyle = MySubStyle( prop = 3.0 ) <NEW_LINE> self.assertEqual( None, MySubStyle.prop.default, msg = "2) Class default value for 'prop' changed" ) <NEW_LINE> self.assertEqual( 5.0, style.prop, msg = "2) Instance value for 'style.prop' wrong" ) <NEW_LINE> self.assertEqual( 3.0, newStyle.prop, msg = "2) Instance value for 'newStyle.prop' wrong" ) <NEW_LINE> newStyle.prop = 9 <NEW_LINE> self.assertEqual( None, MySubStyle.prop.default, msg = "3) Class default value for 'prop' changed" ) <NEW_LINE> self.assertEqual( 5.0, style.prop, msg = "3) Instance value for 'style.prop' wrong" ) <NEW_LINE> self.assertEqual( 9.0, newStyle.prop, msg = "3) Instance value for 'newStyle.prop' wrong" ) <NEW_LINE> msg = "Failed to raise on invalid value." <NEW_LINE> self.assertRaises( Exception, MySubStyle, prop='bad', msg = msg ) <NEW_LINE> msg = "Failed to raise on low value." <NEW_LINE> self.assertRaises( Exception, MySubStyle, prop=-1.0, msg = msg ) <NEW_LINE> msg = "Failed to raise on high value." <NEW_LINE> self.assertRaises( Exception, MySubStyle, prop=100.0, msg = msg ) <NEW_LINE> s = 'Float: MySubStyle.prop' <NEW_LINE> self.assertEqual( s, str(MySubStyle.prop), msg = "Incorrect string value" ) | Float module. | 62598fa13539df3088ecc10b |
class LobbyistFirm2Cd(CalAccessBaseModel): <NEW_LINE> <INDENT> firm_id = models.IntegerField(db_column='FIRM_ID') <NEW_LINE> session_id = models.IntegerField(db_column='SESSION_ID') <NEW_LINE> firm_name = models.CharField(db_column='FIRM_NAME', max_length=60) <NEW_LINE> current_qtr_amt = models.FloatField(db_column='CURRENT_QTR_AMT') <NEW_LINE> session_total_amt = models.FloatField(db_column='SESSION_TOTAL_AMT') <NEW_LINE> contributor_id = models.IntegerField( db_column='CONTRIBUTOR_ID', blank=True, null=True ) <NEW_LINE> session_yr_1 = models.IntegerField(db_column='SESSION_YR_1') <NEW_LINE> session_yr_2 = models.IntegerField(db_column='SESSION_YR_2') <NEW_LINE> yr_1_ytd_amt = models.FloatField(db_column='YR_1_YTD_AMT') <NEW_LINE> yr_2_ytd_amt = models.FloatField(db_column='YR_2_YTD_AMT') <NEW_LINE> qtr_1 = models.FloatField(db_column='QTR_1') <NEW_LINE> qtr_2 = models.FloatField(db_column='QTR_2') <NEW_LINE> qtr_3 = models.FloatField(db_column='QTR_3') <NEW_LINE> qtr_4 = models.FloatField(db_column='QTR_4') <NEW_LINE> qtr_5 = models.FloatField(db_column='QTR_5') <NEW_LINE> qtr_6 = models.FloatField(db_column='QTR_6') <NEW_LINE> qtr_7 = models.FloatField(db_column='QTR_7') <NEW_LINE> qtr_8 = models.FloatField(db_column='QTR_8') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'calaccess_raw' <NEW_LINE> db_table = 'LOBBYIST_FIRM2_CD' <NEW_LINE> verbose_name = 'LOBBYIST_FIRM2_CD' <NEW_LINE> verbose_name_plural = 'LOBBYIST_FIRM2_CD' | This is an undocumented model. | 62598fa166673b3332c3021d |
class RshClient(ExecClient): <NEW_LINE> <INDENT> def __init__(self, node, command, worker, stderr, timeout, autoclose=False, rank=None): <NEW_LINE> <INDENT> ExecClient.__init__(self, node, command, worker, stderr, timeout, autoclose, rank) <NEW_LINE> self.rsh_rc = None <NEW_LINE> <DEDENT> def _build_cmd(self): <NEW_LINE> <INDENT> task = self.worker.task <NEW_LINE> path = task.info("rsh_path") or "rsh" <NEW_LINE> user = task.info("rsh_user") <NEW_LINE> options = task.info("rsh_options") <NEW_LINE> cmd_l = [os.path.expanduser(pathc) for pathc in shlex.split(path)] <NEW_LINE> if user: <NEW_LINE> <INDENT> cmd_l.append("-l") <NEW_LINE> cmd_l.append(user) <NEW_LINE> <DEDENT> if options: <NEW_LINE> <INDENT> cmd_l += shlex.split(options) <NEW_LINE> <DEDENT> cmd_l.append("%s" % self.key) <NEW_LINE> cmd_l.append("%s" % self.command) <NEW_LINE> cmd_l.append("; echo XXRETCODE: $?") <NEW_LINE> return (cmd_l, None) <NEW_LINE> <DEDENT> def _on_nodeset_msgline(self, nodes, msg, sname): <NEW_LINE> <INDENT> match = re.search(r"^XXRETCODE: (\d+)$", msg.decode()) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.rsh_rc = int(match.group(1)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ExecClient._on_nodeset_msgline(self, nodes, msg, sname) <NEW_LINE> <DEDENT> <DEDENT> def _on_nodeset_close(self, nodes, rc): <NEW_LINE> <INDENT> if (rc == 0 or rc == 1) and self.rsh_rc is not None: <NEW_LINE> <INDENT> rc = self.rsh_rc <NEW_LINE> <DEDENT> ExecClient._on_nodeset_close(self, nodes, rc) | Rsh EngineClient. | 62598fa1c432627299fa2e31 |
class IComingSoon(IObjectEvent): <NEW_LINE> <INDENT> pass | Zope Event to be notified when a plone content
refers to a date that is coming soon. | 62598fa18e7ae83300ee8ef7 |
class MachineDiscardError(Exception): <NEW_LINE> <INDENT> pass | Failed to discard saved state of a virtual machine | 62598fa17047854f4633f22e |
class PoolManager(RequestMethods): <NEW_LINE> <INDENT> def __init__(self, num_pools=10, **connection_pool_kw): <NEW_LINE> <INDENT> self.connection_pool_kw = connection_pool_kw <NEW_LINE> self.pools = RecentlyUsedContainer(num_pools) <NEW_LINE> <DEDENT> def connection_from_host(self, host, port=80, scheme='http'): <NEW_LINE> <INDENT> pool_key = (scheme, host, port) <NEW_LINE> pool = self.pools.get(pool_key) <NEW_LINE> if pool: <NEW_LINE> <INDENT> return pool <NEW_LINE> <DEDENT> pool_cls = pool_classes_by_scheme[scheme] <NEW_LINE> pool = pool_cls(host, port, **self.connection_pool_kw) <NEW_LINE> self.pools[pool_key] = pool <NEW_LINE> return pool <NEW_LINE> <DEDENT> def connection_from_url(self, url): <NEW_LINE> <INDENT> scheme, host, port = get_host(url) <NEW_LINE> port = port or port_by_scheme.get(scheme, 80) <NEW_LINE> return self.connection_from_host(host, port=port, scheme=scheme) <NEW_LINE> <DEDENT> def urlopen(self, method, url, **kw): <NEW_LINE> <INDENT> conn = self.connection_from_url(url) <NEW_LINE> try: <NEW_LINE> <INDENT> return conn.urlopen(method, url, **kw) <NEW_LINE> <DEDENT> except HostChangedError as e: <NEW_LINE> <INDENT> kw['retries'] = e.retries <NEW_LINE> return self.urlopen(method, e.url, **kw) | Allows for arbitrary requests while transparently keeping track of
necessary connection pools for you.
:param num_pools:
Number of connection pools to cache before discarding the least recently
used pool.
:param \**connection_pool_kw:
Additional parameters are used to create fresh
:class:`urllib3.connectionpool.ConnectionPool` instances.
Example: ::
>>> manager = PoolManager(num_pools=2)
>>> r = manager.urlopen("http://google.com/")
>>> r = manager.urlopen("http://google.com/mail")
>>> r = manager.urlopen("http://yahoo.com/")
>>> len(manager.pools)
2 | 62598fa155399d3f05626379 |
class TestAnnonymousSurvey(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> question = "What language did you first learn to speak?" <NEW_LINE> self.my_survey = AnnonymousSurvey(question) <NEW_LINE> self.responses = ['English', 'Mandarin', 'Malay'] <NEW_LINE> <DEDENT> def test_store_single_response(self): <NEW_LINE> <INDENT> self.my_survey.store_responses(self.responses[0]) <NEW_LINE> self.assertIn(self.responses[0], self.my_survey.responses) <NEW_LINE> <DEDENT> def test_store_three_reponses(self): <NEW_LINE> <INDENT> for response in self.responses: <NEW_LINE> <INDENT> self.my_survey.store_responses(response) <NEW_LINE> <DEDENT> for response in self.responses: <NEW_LINE> <INDENT> self.assertIn(response, self.my_survey.responses) | Tests for the class AnoymousSurvey.py | 62598fa10c0af96317c561d9 |
class ReadonlyText(Text): <NEW_LINE> <INDENT> def __init__(self, *lw, **kw): <NEW_LINE> <INDENT> super(ReadonlyText, self).__init__(*lw, **kw) <NEW_LINE> self.params['readonly']="1" | >>> r=ReadonlyText("label")
>>> r.render("name","value")
u'<label for="name" class="table" >label</label><input name="name" value="value" readonly="1" type="text"/>'
>>> | 62598fa1462c4b4f79dbb863 |
@dataclass <NEW_LINE> class RetornoConsulta: <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> namespace = "http://nfse.blumenau.sc.gov.br" <NEW_LINE> <DEDENT> cabecalho: Optional["RetornoConsulta.Cabecalho"] = field( default=None, metadata={ "name": "Cabecalho", "type": "Element", "namespace": "", "required": True, } ) <NEW_LINE> alerta: List[TpEvento] = field( default_factory=list, metadata={ "name": "Alerta", "type": "Element", "namespace": "", } ) <NEW_LINE> erro: List[TpEvento] = field( default_factory=list, metadata={ "name": "Erro", "type": "Element", "namespace": "", } ) <NEW_LINE> nfe: List[TpNfe] = field( default_factory=list, metadata={ "name": "NFe", "type": "Element", "namespace": "", "max_occurs": 50, } ) <NEW_LINE> @dataclass <NEW_LINE> class Cabecalho: <NEW_LINE> <INDENT> sucesso: Optional[bool] = field( default=None, metadata={ "name": "Sucesso", "type": "Element", "namespace": "", "required": True, } ) <NEW_LINE> versao: str = field( init=False, default="1", metadata={ "name": "Versao", "type": "Attribute", "required": True, "pattern": r"[0-9]{1,3}", } ) | Schema utilizado para RETORNO de pedidos de consulta de NFS-e/RPS,
consultade NFS-e recebidas e consulta de lote.
Este Schema XML é utilizado pelo Web Service para informar aos
tomadores e/ou prestadores de serviços o resultado de pedidos de
consulta de NFS-e/RPS, consultade NFS-e recebidas e consulta de
lote.
:ivar cabecalho: Cabeçalho do retorno.
:ivar alerta: Elemento que representa a ocorrência de eventos de
alerta durante o processamento da mensagem XML.
:ivar erro: Elemento que representa a ocorrência de eventos de erro
durante o processamento da mensagem XML.
:ivar nfe: Elemento NFe - Cada item será um NFS-e. | 62598fa163d6d428bbee2608 |
class CreateServerDump(command.Command): <NEW_LINE> <INDENT> def get_parser(self, prog_name): <NEW_LINE> <INDENT> parser = super(CreateServerDump, self).get_parser(prog_name) <NEW_LINE> parser.add_argument( 'server', metavar='<server>', nargs='+', help=_('Server(s) to create dump file (name or ID)'), ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> compute_client = self.app.client_manager.compute <NEW_LINE> for server in parsed_args.server: <NEW_LINE> <INDENT> utils.find_resource( compute_client.servers, server, ).trigger_crash_dump() | Create a dump file in server(s)
Trigger crash dump in server(s) with features like kdump in Linux.
It will create a dump file in the server(s) dumping the server(s)'
memory, and also crash the server(s). OSC sees the dump file
(server dump) as a kind of resource.
This command requires ``--os-compute-api-version`` 2.17 or greater. | 62598fa199fddb7c1ca62d13 |
class TestingConfig(Config): <NEW_LINE> <INDENT> TESTING = True <NEW_LINE> SQLALCHEMY_DATABASE_URI = os.environ.get('TEST_DATABASE_URL') or 'sqlite:///' + os.path.join(basedir, 'data-test.sqlite') <NEW_LINE> WTF_CSRF_ENABLED = False | 测试环境配置类 | 62598fa1dd821e528d6d8d8c |
class _ProtoFile(object): <NEW_LINE> <INDENT> def __init__(self, path, parser_config): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> self.path_prefix = parser_config.defined_in_prefix <NEW_LINE> self.code_url_prefix = parser_config.code_url_prefix <NEW_LINE> <DEDENT> def is_builtin(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_python_file(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def is_generated_file(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Defined in [`{prefix}{path}`]({code_prefix}{path}).\n\n'.format( path=self.path, prefix=self.path_prefix, code_prefix=self.code_url_prefix) | This class indicates that the object is defined in a .proto file.
This can be used for the `defined_in` slot of the `PageInfo` objects. | 62598fa1498bea3a75a57978 |
class ReadJavaConfig: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.root_dir = ReadConfig().get_file_path("dto_file_path") <NEW_LINE> <DEDENT> def get_filename(self): <NEW_LINE> <INDENT> java_f_list = [] <NEW_LINE> f_list = os.listdir(self.root_dir) <NEW_LINE> for i in f_list: <NEW_LINE> <INDENT> if os.path.splitext(i)[1] == '.java': <NEW_LINE> <INDENT> java_f_list.append(i) <NEW_LINE> <DEDENT> <DEDENT> return java_f_list <NEW_LINE> <DEDENT> def get_argument(self, file_name): <NEW_LINE> <INDENT> java_arg_list = [] <NEW_LINE> tmp = os.path.join(self.root_dir, file_name) <NEW_LINE> file_path = tmp + '.java' <NEW_LINE> pattern = r"(public|private|protected) (\w+|java\.util\.Date) ([a-zA-Z0-9_$]*);" <NEW_LINE> try: <NEW_LINE> <INDENT> with open(file_path, 'r', encoding='utf-8') as file: <NEW_LINE> <INDENT> for line in file.readlines(): <NEW_LINE> <INDENT> line_data = line.strip() <NEW_LINE> r_tmp = re.search(pattern, line_data) <NEW_LINE> if r_tmp is not None: <NEW_LINE> <INDENT> java_arg_list.append(r_tmp.group(3)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except FileNotFoundError: <NEW_LINE> <INDENT> print("请检查 " + file_name + " 文件是否存在!") <NEW_LINE> <DEDENT> return java_arg_list | :读取JAVA配置类 | 62598fa1ac7a0e7691f72363 |
class IdentityPaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[Identity]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(IdentityPaged, self).__init__(*args, **kwargs) | A paging container for iterating over a list of :class:`Identity <azure.mgmt.msi.models.Identity>` object | 62598fa1796e427e5384e5eb |
class LoginPageLocator(object): <NEW_LINE> <INDENT> USERNAME = (By.ID, 'email') <NEW_LINE> PASSWORD = (By.ID, 'pass') <NEW_LINE> LOGIN_BTN = (By.ID, 'send2') | Class for login page locators, all page locators should come here | 62598fa1379a373c97d98e6f |
class About(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.window = gtk.AboutDialog() <NEW_LINE> self.window.connect('response', self.on_aboutdialog_response) <NEW_LINE> self.window.set_program_name('GNU MyServer Control') <NEW_LINE> self.window.set_icon_list(logo) <NEW_LINE> self.window.set_version(version) <NEW_LINE> self.window.set_copyright(copyright_notice) <NEW_LINE> self.window.set_license(gpl_v3) <NEW_LINE> self.window.set_website('http://www.gnu.org/software/myserver/') <NEW_LINE> self.window.show_all() <NEW_LINE> <DEDENT> def on_aboutdialog_response(self, widget, response): <NEW_LINE> <INDENT> widget.destroy() | GNU MyServer Control about window. | 62598fa1097d151d1a2c0e82 |
class PizzaSerializer(UpdateSerializerMixin,serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Pizza <NEW_LINE> fields = ( 'id', 'name', ) <NEW_LINE> read_only_fields = ( 'id', ) | Serializer to be used by :model:`product.Pizza` | 62598fa101c39578d7f12bd7 |
class FDAcategoryDSchema(SchemaObject): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.schema = 'FDAcategoryD' | Schema Mixin for FDAcategoryD
Usage: place after django model in class definition, schema will return the schema.org url for the object
A designation by the US FDA signifying that there is positive evidence of human fetal risk based on adverse reaction data from investigational or marketing experience or studies in humans, but potential benefits may warrant use of the drug in pregnant women despite potential risks. | 62598fa14e4d56256637227c |
class Log(_BaseExpr): <NEW_LINE> <INDENT> _op = _ExprOp.LOG <NEW_LINE> def __init__(self, num: 'TypeFloat', base: 'TypeFloat'): <NEW_LINE> <INDENT> self._children = (num, base) | Create "log" operator for logarithm of "num" with base "base".
All arguments must resolve to floats.
Requires server version 5.6.0+. | 62598fa1009cb60464d0137e |
class Detectors(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.detector= MTCNN() <NEW_LINE> <DEDENT> def Detect(self, frame): <NEW_LINE> <INDENT> centers = [] <NEW_LINE> faces = self.detector.detect_faces(frame) <NEW_LINE> for result in faces: <NEW_LINE> <INDENT> x,y,width,height = result['box'] <NEW_LINE> cv2.rectangle(frame, (x, y), (x+width,y+height), (0, 255, 0), 2) <NEW_LINE> centerX = int(x + 0.5 * (width - 1)); <NEW_LINE> centerY = int(y + 0.5 * (height - 1)); <NEW_LINE> b = np.array([[centerX], [centerY]]) <NEW_LINE> centers.append(b) <NEW_LINE> <DEDENT> cv2.imshow("Window", frame) <NEW_LINE> return centers | Detectors class to detect objects in video frame
Attributes:
None | 62598fa166673b3332c3021f |
class Fibonacci_Result(metaclass=Metaclass_Fibonacci_Result): <NEW_LINE> <INDENT> __slots__ = [ '_sequence', ] <NEW_LINE> _fields_and_field_types = { 'sequence': 'sequence<int32>', } <NEW_LINE> SLOT_TYPES = ( rosidl_parser.definition.UnboundedSequence(rosidl_parser.definition.BasicType('int32')), ) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> assert all('_' + key in self.__slots__ for key in kwargs.keys()), 'Invalid arguments passed to constructor: %s' % ', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__)) <NEW_LINE> self.sequence = array.array('i', kwargs.get('sequence', [])) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> typename = self.__class__.__module__.split('.') <NEW_LINE> typename.pop() <NEW_LINE> typename.append(self.__class__.__name__) <NEW_LINE> args = [] <NEW_LINE> for s, t in zip(self.__slots__, self.SLOT_TYPES): <NEW_LINE> <INDENT> field = getattr(self, s) <NEW_LINE> fieldstr = repr(field) <NEW_LINE> if ( isinstance(t, rosidl_parser.definition.AbstractSequence) and isinstance(t.value_type, rosidl_parser.definition.BasicType) and t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64'] ): <NEW_LINE> <INDENT> if len(field) == 0: <NEW_LINE> <INDENT> fieldstr = '[]' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert fieldstr.startswith('array(') <NEW_LINE> prefix = "array('X', " <NEW_LINE> suffix = ')' <NEW_LINE> fieldstr = fieldstr[len(prefix):-len(suffix)] <NEW_LINE> <DEDENT> <DEDENT> args.append(s[1:] + '=' + fieldstr) <NEW_LINE> <DEDENT> return '%s(%s)' % ('.'.join(typename), ', '.join(args)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if self.sequence != other.sequence: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_fields_and_field_types(cls): <NEW_LINE> <INDENT> from copy import copy <NEW_LINE> return copy(cls._fields_and_field_types) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sequence(self): <NEW_LINE> <INDENT> return self._sequence <NEW_LINE> <DEDENT> @sequence.setter <NEW_LINE> def sequence(self, value): <NEW_LINE> <INDENT> if isinstance(value, array.array): <NEW_LINE> <INDENT> assert value.typecode == 'i', "The 'sequence' array.array() must have the type code of 'i'" <NEW_LINE> self._sequence = value <NEW_LINE> return <NEW_LINE> <DEDENT> if __debug__: <NEW_LINE> <INDENT> from collections.abc import Sequence <NEW_LINE> from collections.abc import Set <NEW_LINE> from collections import UserList <NEW_LINE> from collections import UserString <NEW_LINE> assert ((isinstance(value, Sequence) or isinstance(value, Set) or isinstance(value, UserList)) and not isinstance(value, str) and not isinstance(value, UserString) and all(isinstance(v, int) for v in value) and all(val >= -2147483648 and val < 2147483648 for val in value)), "The 'sequence' field must be a set or sequence and each value of type 'int' and each integer in [-2147483648, 2147483647]" <NEW_LINE> <DEDENT> self._sequence = array.array('i', value) | Message class 'Fibonacci_Result'. | 62598fa1097d151d1a2c0e83 |
class Channel: <NEW_LINE> <INDENT> def __init__(self, connection, serializer=StringSerializer()): <NEW_LINE> <INDENT> self.core = Core( connection=connection, serializer=serializer, ) <NEW_LINE> <DEDENT> def subscribe(self, name, topic): <NEW_LINE> <INDENT> return Subscription( self.core, name, topic ) <NEW_LINE> <DEDENT> def session(self, name, force=False, persist=False, standby=False): <NEW_LINE> <INDENT> return Session( self.core, name, force, persist, standby ) <NEW_LINE> <DEDENT> def request(self, name=None, payload=None, timeout=None, ttl=None): <NEW_LINE> <INDENT> return RequestStub( self.core, name, payload, timeout, ttl ) | This is the main API interface provided for interacting with nervix servers.
An instance of this class can be obtained by calling the create_channel() function. Which
is the easiest way, this will also set up the mainloop for you. Alternatively one can call the
constructor directly with a Connection instance as it's first parameter.
This class is the starting point for all interactions with a nervix server.
For creating subscriptions: call the subscribe() method.
For creating sessions: call the session() method.
for doing requests: call the request() method. | 62598fa191af0d3eaad39c65 |
class ModuleDependency(ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = "ir.module.dependency" <NEW_LINE> name = fields.Char('Name') <NEW_LINE> module = fields.Many2One('ir.module', 'Module', select=True, ondelete='CASCADE', required=True) <NEW_LINE> state = fields.Function(fields.Selection([ ('uninstalled', 'Not Installed'), ('installed', 'Installed'), ('to upgrade', 'To be upgraded'), ('to remove', 'To be removed'), ('to install', 'To be installed'), ('unknown', 'Unknown'), ], 'State', readonly=True), 'get_state') <NEW_LINE> @classmethod <NEW_LINE> def __setup__(cls): <NEW_LINE> <INDENT> super(ModuleDependency, cls).__setup__() <NEW_LINE> table = cls.__table__() <NEW_LINE> cls._sql_constraints += [ ('name_module_uniq', Unique(table, table.name, table.module), 'Dependency must be unique by module!'), ] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __register__(cls, module_name): <NEW_LINE> <INDENT> TableHandler = backend.get('TableHandler') <NEW_LINE> old_table = 'ir_module_module_dependency' <NEW_LINE> if TableHandler.table_exist(old_table): <NEW_LINE> <INDENT> TableHandler.table_rename(old_table, cls._table) <NEW_LINE> <DEDENT> super(ModuleDependency, cls).__register__(module_name) <NEW_LINE> <DEDENT> def get_state(self, name): <NEW_LINE> <INDENT> pool = Pool() <NEW_LINE> Module = pool.get('ir.module') <NEW_LINE> dependencies = Module.search([ ('name', '=', self.name), ]) <NEW_LINE> if dependencies: <NEW_LINE> <INDENT> return dependencies[0].state <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'unknown' | Module dependency | 62598fa10c0af96317c561db |
class RootViaSudoExecutionController( CheckBoxDifferentialExecutionController): <NEW_LINE> <INDENT> def __init__(self, provider_list): <NEW_LINE> <INDENT> super().__init__(provider_list) <NEW_LINE> try: <NEW_LINE> <INDENT> in_sudo_group = grp.getgrnam("sudo").gr_gid in posix.getgroups() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> in_sudo_group = False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> in_admin_group = grp.getgrnam("admin").gr_gid in posix.getgroups() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> in_admin_group = False <NEW_LINE> <DEDENT> self.user_can_sudo = in_sudo_group or in_admin_group <NEW_LINE> <DEDENT> def get_execution_command(self, job, job_state, config, session_dir, nest_dir): <NEW_LINE> <INDENT> cmd = ['sudo', '-u', job.user, 'env'] <NEW_LINE> env = self.get_differential_execution_environment( job, job_state, config, session_dir, nest_dir) <NEW_LINE> cmd += ["{key}={value}".format(key=key, value=value) for key, value in sorted(env.items())] <NEW_LINE> cmd += [job.shell, '-c', job.command] <NEW_LINE> return cmd <NEW_LINE> <DEDENT> def get_checkbox_score(self, job): <NEW_LINE> <INDENT> if 'win32' in job.get_flag_set(): <NEW_LINE> <INDENT> return -1 <NEW_LINE> <DEDENT> if job.user is not None and self.user_can_sudo: <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return -1 | Execution controller that gains root by using sudo.
This controller should be used for jobs that need root but cannot be
executed by the plainbox-trusted-launcher-1.
This happens whenever the job is not in the system-wide provider location.
In practice it is used when working with the special
'checkbox-in-source-tree' provider as well as for jobs that need to run as
root from the non-system-wide location.
Using this controller is preferable to pkexec if running on command line as
unlike pkexec, it retains 'memory' and doesn't ask for the password over
and over again. | 62598fa1925a0f43d25e7e96 |
class NotRecordedError(ExtractorError): <NEW_LINE> <INDENT> pass | Exception to be raised when trying to get something that wasn't recorded | 62598fa12c8b7c6e89bd361f |
class ChildPopulation: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def roulette(pop, evaluated_pop): <NEW_LINE> <INDENT> new_pop = [] <NEW_LINE> total = np.sum(evaluated_pop) <NEW_LINE> list_pom = [] <NEW_LINE> n = 0 <NEW_LINE> for i in evaluated_pop: <NEW_LINE> <INDENT> list_pom.append(i / total) <NEW_LINE> <DEDENT> list_pom = np.cumsum(list_pom) <NEW_LINE> for i in range(len(list_pom)): <NEW_LINE> <INDENT> pom = np.random.random() <NEW_LINE> for j in range(len(list_pom)): <NEW_LINE> <INDENT> if list_pom[j] > pom: <NEW_LINE> <INDENT> new_pop.append(pop[j]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> new_pop = np.array(new_pop) <NEW_LINE> return new_pop <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def cross(pop, pk): <NEW_LINE> <INDENT> new_pop = np.ndarray(pop.shape, dtype=np.ndarray) <NEW_LINE> length = len(pop[0]) <NEW_LINE> for i in range(len(pop)): <NEW_LINE> <INDENT> pom = np.random.random() <NEW_LINE> if pom < pk: <NEW_LINE> <INDENT> pom2 = np.random.randint(1, high=length) <NEW_LINE> if i + 1 < len(pop): <NEW_LINE> <INDENT> n1 = pop[i][0:pom2] <NEW_LINE> n2 = pop[i + 1][pom2:length] <NEW_LINE> n3 = np.concatenate((n1, n2)) <NEW_LINE> new_pop[i] = n3 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n1 = pop[i][0:pom2] <NEW_LINE> n2 = pop[0][pom2:length] <NEW_LINE> n3 = np.concatenate((n1, n2)) <NEW_LINE> new_pop[i] = n3 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> new_pop[i] = pop[i] <NEW_LINE> <DEDENT> <DEDENT> return new_pop <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def mutate(pop, pm): <NEW_LINE> <INDENT> new_pop = np.ndarray(pop.shape, dtype=np.ndarray) <NEW_LINE> for i in range(len(pop)): <NEW_LINE> <INDENT> new_pop[i] = pop[i] <NEW_LINE> for n in range(len(pop[i])): <NEW_LINE> <INDENT> rand = np.random.random() <NEW_LINE> if rand < pm: <NEW_LINE> <INDENT> if pop[i][n] == 1: <NEW_LINE> <INDENT> new_pop[i][n] = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_pop[i][n] = 1 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> return new_pop | Selection, cross, mutation | 62598fa1498bea3a75a5797a |
class MeasurementLikwidPower(Measurement): <NEW_LINE> <INDENT> def __init__(self,confFile): <NEW_LINE> <INDENT> super().__init__(confFile) <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> super().init() <NEW_LINE> self.timeToMeasure = self.tryGetIntValue('time_to_measure') <NEW_LINE> <DEDENT> def measure(self): <NEW_LINE> <INDENT> super().copyFileOverFTP() <NEW_LINE> compilation_command="cd "+self.targetRunDir + " ; gcc main.s -o individual &>/dev/null;" <NEW_LINE> execution_command="cd "+self.targetRunDir+" ; " <NEW_LINE> for core in self.coresToUse: <NEW_LINE> <INDENT> execution_command+="taskset -c "+str(core)+" ./individual &>/dev/null & " <NEW_LINE> <DEDENT> execution_command+=" sudo likwid-powermeter -s "+str(self.timeToMeasure) +"s > tmp ; pkill individual &> /dev/null;" <NEW_LINE> output_command="cd "+self.targetRunDir + " ; cat tmp | grep Watt | head -n 1 | awk '{print $3}'; rm main.s; rm individual; rm tmp; "; <NEW_LINE> super().executeSSHcommand(compilation_command) <NEW_LINE> super().executeSSHcommand(execution_command) <NEW_LINE> stdout=super().executeSSHcommand(output_command) <NEW_LINE> for line in stdout: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> test=float(line) <NEW_LINE> power_meas=test <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> print ("Exception line not power") <NEW_LINE> <DEDENT> <DEDENT> measurements=[]; <NEW_LINE> measurements.append(power_meas); <NEW_LINE> return measurements; | classdocs | 62598fa1442bda511e95c2b4 |
class UserRegistrationForm(UserCreationForm): <NEW_LINE> <INDENT> password1 = forms.CharField(label="Password",widget=forms.PasswordInput) <NEW_LINE> password2 = forms.CharField( label="Password Confirmation", widget=forms.PasswordInput) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = MyUser <NEW_LINE> fields = ['email', 'username', 'password1', 'password2'] <NEW_LINE> <DEDENT> def clean_email(self): <NEW_LINE> <INDENT> User = get_user_model() <NEW_LINE> email = self.cleaned_data.get('email') <NEW_LINE> username = self.cleaned_data.get('username') <NEW_LINE> if User.objects.filter(email=email): <NEW_LINE> <INDENT> raise forms.ValidationError('Email address must be unique') <NEW_LINE> <DEDENT> return email <NEW_LINE> <DEDENT> def clean_password2(self): <NEW_LINE> <INDENT> password1 = self.cleaned_data.get('password1') <NEW_LINE> password2 = self.cleaned_data.get('password2') <NEW_LINE> if not password1 or not password2: <NEW_LINE> <INDENT> raise ValidationError("Please confirm your password") <NEW_LINE> <DEDENT> if password1 != password2: <NEW_LINE> <INDENT> raise ValidationError("Passwords must match") <NEW_LINE> <DEDENT> return password2 | Form used to register a new user | 62598fa1379a373c97d98e70 |
class FscSensorBase(object): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if 'name' in kwargs: <NEW_LINE> <INDENT> self.name = kwargs['name'] <NEW_LINE> <DEDENT> if 'read_source' in kwargs: <NEW_LINE> <INDENT> self.read_source = kwargs['read_source'] <NEW_LINE> <DEDENT> if 'write_source' in kwargs: <NEW_LINE> <INDENT> self.write_source = kwargs['write_source'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.write_source = None <NEW_LINE> <DEDENT> self.read_source_fail_counter = 0 <NEW_LINE> self.write_source_fail_counter = 0 <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def read(self, **kwargs): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def write(self, **kwargs): <NEW_LINE> <INDENT> pass | Fsc sensor base class | 62598fa116aa5153ce40035a |
@estimate_engine.register('EFPA') <NEW_LINE> class efpa(estimate_engine.estimate_engine): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Run(Q, x, epsilon): <NEW_LINE> <INDENT> return EFPA.EFPA(x, 1, epsilon) | Estimate engine with the EFPA algorithm. | 62598fa1796e427e5384e5ed |
class ESContentSearchResponse(ContentSearchResponse): <NEW_LINE> <INDENT> def __init__(self, search: Search, response: Response) -> None: <NEW_LINE> <INDENT> self._response = response <NEW_LINE> self._search = search <NEW_LINE> total_hits = self._response["hits"]["total"]["value"] <NEW_LINE> is_total_hit_accurate = self._response["hits"]["total"]["relation"] == "eq" <NEW_LINE> contents = [] <NEW_LINE> for hit in response["hits"]["hits"]: <NEW_LINE> <INDENT> source = hit["_source"] <NEW_LINE> try: <NEW_LINE> <INDENT> comments = [ SearchedDigestComment( content_id=comment["content_id"], parent_id=comment.get("parent_id") ) for comment in source["comments"] ] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> comments = [] <NEW_LINE> <DEDENT> path = [SearchedDigestContent(**component) for component in source["path"]] <NEW_LINE> dict_workspace = source["workspace"] <NEW_LINE> workspace = SearchedDigestWorkspace( workspace_id=dict_workspace["workspace_id"], label=dict_workspace["label"] ) <NEW_LINE> dict_last_modifier = source["last_modifier"] <NEW_LINE> last_modifier = SearchedDigestUser(**dict_last_modifier) <NEW_LINE> dict_author = source["author"] <NEW_LINE> author = SearchedDigestUser(**dict_author) <NEW_LINE> source.update( dict( workspace=workspace, author=author, last_modifier=last_modifier, comments=comments, modified=parse(source["modified"]), created=parse(source["created"]), score=hit["_score"], path=path, ) ) <NEW_LINE> content = SearchedContent(**source) <NEW_LINE> contents.append(content) <NEW_LINE> <DEDENT> aggregations = response["aggregations"] <NEW_LINE> facets = ContentFacets( workspace_names=facet_count(aggregations, "workspace_names"), author__public_names=facet_count(aggregations, "author__public_names"), last_modifier__public_names=facet_count(aggregations, "last_modifier__public_names"), file_extensions=facet_count(aggregations, "file_extensions", exclude_empty_values=True), statuses=facet_count(aggregations, "statuses"), content_types=facet_count(aggregations, "content_types"), tags=facet_count(aggregations, "tags"), ) <NEW_LINE> super().__init__( contents=contents, total_hits=total_hits, is_total_hits_accurate=is_total_hit_accurate, facets=facets, created_range=date_range_from_aggregation(aggregations, "created"), modified_range=date_range_from_aggregation(aggregations, "modified"), ) | Response of search using LibSearch
This is both an seriable content and a Custom Response object
for elasticsearch search | 62598fa18e7ae83300ee8efa |
class ShaResNet(HybridBlock): <NEW_LINE> <INDENT> def __init__(self, channels, init_block_channels, bottleneck, conv1_stride, bn_use_global_stats=False, in_channels=3, in_size=(224, 224), classes=1000, **kwargs): <NEW_LINE> <INDENT> super(ShaResNet, self).__init__(**kwargs) <NEW_LINE> self.in_size = in_size <NEW_LINE> self.classes = classes <NEW_LINE> with self.name_scope(): <NEW_LINE> <INDENT> self.features = nn.HybridSequential(prefix="") <NEW_LINE> self.features.add(ResInitBlock( in_channels=in_channels, out_channels=init_block_channels, bn_use_global_stats=bn_use_global_stats)) <NEW_LINE> in_channels = init_block_channels <NEW_LINE> for i, channels_per_stage in enumerate(channels): <NEW_LINE> <INDENT> stage = nn.HybridSequential(prefix="stage{}_".format(i + 1)) <NEW_LINE> shared_conv = None <NEW_LINE> with stage.name_scope(): <NEW_LINE> <INDENT> for j, out_channels in enumerate(channels_per_stage): <NEW_LINE> <INDENT> strides = 2 if (j == 0) and (i != 0) else 1 <NEW_LINE> unit = ShaResUnit( in_channels=in_channels, out_channels=out_channels, strides=strides, bn_use_global_stats=bn_use_global_stats, bottleneck=bottleneck, conv1_stride=conv1_stride, shared_conv=shared_conv) <NEW_LINE> if (shared_conv is None) and not (bottleneck and not conv1_stride and strides > 1): <NEW_LINE> <INDENT> shared_conv = unit.body.conv2.conv <NEW_LINE> <DEDENT> stage.add(unit) <NEW_LINE> in_channels = out_channels <NEW_LINE> <DEDENT> <DEDENT> self.features.add(stage) <NEW_LINE> <DEDENT> self.features.add(nn.AvgPool2D( pool_size=7, strides=1)) <NEW_LINE> self.output = nn.HybridSequential(prefix="") <NEW_LINE> self.output.add(nn.Flatten()) <NEW_LINE> self.output.add(nn.Dense( units=classes, in_units=in_channels)) <NEW_LINE> <DEDENT> <DEDENT> def hybrid_forward(self, F, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = self.output(x) <NEW_LINE> return x | ShaResNet model from 'ShaResNet: reducing residual network parameter number by sharing weights,'
https://arxiv.org/abs/1702.08782.
Parameters:
----------
channels : list of list of int
Number of output channels for each unit.
init_block_channels : int
Number of output channels for the initial unit.
bottleneck : bool
Whether to use a bottleneck or simple block in units.
conv1_stride : bool
Whether to use stride in the first or the second convolution layer in units.
bn_use_global_stats : bool, default False
Whether global moving statistics is used instead of local batch-norm for BatchNorm layers.
Useful for fine-tuning.
in_channels : int, default 3
Number of input channels.
in_size : tuple of two ints, default (224, 224)
Spatial size of the expected input image.
classes : int, default 1000
Number of classification classes. | 62598fa1097d151d1a2c0e84 |
class TestQuandl(unittest.TestCase): <NEW_LINE> <INDENT> @requests_mock.mock() <NEW_LINE> def test_current_rate(self, request_mock): <NEW_LINE> <INDENT> address = quandl.URL + "/datasets/USTREASURY/YIELD.json?api_key{0}".format( quandl.API_KEY ) <NEW_LINE> request_mock.get( address, status_code=200, json={ "dataset": { "column_names": [ "Date", "1 MO", "3 MO", "6 MO", "1 YR", "2 YR", "3 YR", "5 YR", "7 YR", "10 YR", "20 YR", "30 YR" ], "data": [ [ "2018-03-16", 1.71, 1.78, 1.96, 2.08, 2.31, 2.44, 2.65, 2.78, 2.85, 2.96, 3.08 ], ] }, } ) <NEW_LINE> rate = quandl.get_current_rate() <NEW_LINE> self.assertEqual(rate, Rate(datetime(2018, 3, 16), 0.0208)) | Quandl test class. | 62598fa1f7d966606f747e3c |
class C2dMeanSummaryStatistic(__MeanInnerSummaryStatistic__, Asymmetry): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> __MeanInnerSummaryStatistic__.__init__(self, 'C2d') <NEW_LINE> <DEDENT> @property <NEW_LINE> def statistics_dependence(self): <NEW_LINE> <INDENT> return [C2dStatistic] | summary mean statistic for C2d | 62598fa1435de62698e9bc4e |
class CookieMiddleware(MiddlewareMixin): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> if request.META['PATH_INFO'] == reverse('account_logout') and request.method == 'POST': <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> request.user.auth_token.delete() <NEW_LINE> <DEDENT> except (AttributeError, ObjectDoesNotExist): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> current_path = request.META['PATH_INFO'] <NEW_LINE> if current_path == reverse('rest_login') and getattr(response, 'data', None) and response.data.get('key'): <NEW_LINE> <INDENT> response.set_cookie('auth_token', 'Token %s' % response.data['key']) <NEW_LINE> <DEDENT> elif current_path == reverse('account_login') and request.method == 'POST' and request.user.is_authenticated: <NEW_LINE> <INDENT> _, __, token = token_cache.get_token_keys(user=request.user) <NEW_LINE> response.set_cookie('auth_token', 'Token %s' % token) <NEW_LINE> <DEDENT> elif current_path in (reverse('account_logout'), reverse('rest_logout')) and request.method == 'POST': <NEW_LINE> <INDENT> response.delete_cookie('auth_token') <NEW_LINE> response.delete_cookie('user_name') <NEW_LINE> response.delete_cookie('user_id') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> auth_token = request.COOKIES.get('auth_token', request.META.get('HTTP_AUTHORIZATION')) <NEW_LINE> if auth_token: <NEW_LINE> <INDENT> response.set_cookie('auth_token', auth_token) <NEW_LINE> <DEDENT> <DEDENT> if request.user and hasattr(request.user, 'name'): <NEW_LINE> <INDENT> response.set_cookie('user_name', request.user.name) <NEW_LINE> response.set_cookie('user_id', request.user.pk) <NEW_LINE> <DEDENT> response.set_cookie('release_version', settings.VERSION_NUMBER) <NEW_LINE> return response | Set cookie in response -
1. set auth_token from AUTHORIZATION request header,
see apps.users.authentication.CookieAuthentication
2. set extra cookie variables
Delete "auth_token" when django logout | 62598fa13c8af77a43b67e6d |
class VNFAppMonitor(object): <NEW_LINE> <INDENT> OPTS = [ cfg.ListOpt( 'app_monitor_driver', default=['zabbix'], help=_('App monitoring driver to communicate with ' 'Hosting VNF/logical service ' 'instance tacker plugin will use')), ] <NEW_LINE> cfg.CONF.register_opts(OPTS, 'tacker') <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._application_monitor_manager = driver_manager.DriverManager( 'tacker.tacker.app_monitor.drivers', cfg.CONF.tacker.app_monitor_driver) <NEW_LINE> <DEDENT> def _create_app_monitoring_dict(self, dev_attrs, mgmt_url): <NEW_LINE> <INDENT> app_policy = 'app_monitoring_policy' <NEW_LINE> appmonitoring_dict = ast.literal_eval(dev_attrs[app_policy]) <NEW_LINE> vdulist = appmonitoring_dict['vdus'].keys() <NEW_LINE> for vduname in vdulist: <NEW_LINE> <INDENT> temp = ast.literal_eval(mgmt_url) <NEW_LINE> appmonitoring_dict['vdus'][vduname]['mgmt_ip'] = temp[vduname] <NEW_LINE> <DEDENT> return appmonitoring_dict <NEW_LINE> <DEDENT> def create_app_dict(self, context, vnf_dict): <NEW_LINE> <INDENT> dev_attrs = vnf_dict['attributes'] <NEW_LINE> mgmt_url = vnf_dict['mgmt_url'] <NEW_LINE> return self._create_app_monitoring_dict(dev_attrs, mgmt_url) <NEW_LINE> <DEDENT> def _invoke(self, driver, **kwargs): <NEW_LINE> <INDENT> method = inspect.stack()[1][3] <NEW_LINE> return self._application_monitor_manager. invoke(driver, method, **kwargs) <NEW_LINE> <DEDENT> def add_to_appmonitor(self, applicationvnfdict, vnf_dict): <NEW_LINE> <INDENT> vdunode = applicationvnfdict['vdus'].keys() <NEW_LINE> driver = applicationvnfdict['vdus'][vdunode[0]]['name'] <NEW_LINE> kwargs = applicationvnfdict <NEW_LINE> return self._invoke(driver, vnf=vnf_dict, kwargs=kwargs) | VNF App monitor | 62598fa145492302aabfc32b |
class BaseProductExports: <NEW_LINE> <INDENT> def __init__(self, exports): <NEW_LINE> <INDENT> self.exports = sorted([self.export_class(**_) for _ in exports]) <NEW_LINE> self.export_IDs = {export.export_ID: export for export in self.exports} <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for _ in self.exports: <NEW_LINE> <INDENT> yield _ <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return self.exports[index] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.exports) <NEW_LINE> <DEDENT> def get_by_ID(self, export_ID): <NEW_LINE> <INDENT> return self.export_IDs[export_ID] | Container for product exports. | 62598fa17cff6e4e811b587f |
class _ProtoJsonApiTools(protojson.ProtoJson): <NEW_LINE> <INDENT> _INSTANCE = None <NEW_LINE> @classmethod <NEW_LINE> def Get(cls): <NEW_LINE> <INDENT> if cls._INSTANCE is None: <NEW_LINE> <INDENT> cls._INSTANCE = cls() <NEW_LINE> <DEDENT> return cls._INSTANCE <NEW_LINE> <DEDENT> def decode_message(self, message_type, encoded_message): <NEW_LINE> <INDENT> if message_type in _CUSTOM_MESSAGE_CODECS: <NEW_LINE> <INDENT> return _CUSTOM_MESSAGE_CODECS[message_type].decoder(encoded_message) <NEW_LINE> <DEDENT> old_level = logging.getLogger().level <NEW_LINE> logging.getLogger().setLevel(logging.ERROR) <NEW_LINE> result = _DecodeCustomFieldNames(message_type, encoded_message) <NEW_LINE> result = super(_ProtoJsonApiTools, self).decode_message( message_type, result) <NEW_LINE> logging.getLogger().setLevel(old_level) <NEW_LINE> result = _ProcessUnknownEnums(result, encoded_message) <NEW_LINE> result = _ProcessUnknownMessages(result, encoded_message) <NEW_LINE> return _DecodeUnknownFields(result, encoded_message) <NEW_LINE> <DEDENT> def decode_field(self, field, value): <NEW_LINE> <INDENT> for decoder in _GetFieldCodecs(field, 'decoder'): <NEW_LINE> <INDENT> result = decoder(field, value) <NEW_LINE> value = result.value <NEW_LINE> if result.complete: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> if isinstance(field, messages.MessageField): <NEW_LINE> <INDENT> field_value = self.decode_message(field.message_type, json.dumps(value)) <NEW_LINE> <DEDENT> elif isinstance(field, messages.EnumField): <NEW_LINE> <INDENT> value = GetCustomJsonEnumMapping(field.type, json_name=value) or value <NEW_LINE> try: <NEW_LINE> <INDENT> field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) <NEW_LINE> <DEDENT> except messages.DecodeError: <NEW_LINE> <INDENT> if not isinstance(value, basestring): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> field_value = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) <NEW_LINE> <DEDENT> return field_value <NEW_LINE> <DEDENT> def encode_message(self, message): <NEW_LINE> <INDENT> if isinstance(message, messages.FieldList): <NEW_LINE> <INDENT> return '[%s]' % (', '.join(self.encode_message(x) for x in message)) <NEW_LINE> <DEDENT> if type(message) in _CUSTOM_MESSAGE_CODECS: <NEW_LINE> <INDENT> return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message) <NEW_LINE> <DEDENT> message = _EncodeUnknownFields(message) <NEW_LINE> result = super(_ProtoJsonApiTools, self).encode_message(message) <NEW_LINE> return _EncodeCustomFieldNames(message, result) <NEW_LINE> <DEDENT> def encode_field(self, field, value): <NEW_LINE> <INDENT> for encoder in _GetFieldCodecs(field, 'encoder'): <NEW_LINE> <INDENT> result = encoder(field, value) <NEW_LINE> value = result.value <NEW_LINE> if result.complete: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> <DEDENT> if isinstance(field, messages.EnumField): <NEW_LINE> <INDENT> if field.repeated: <NEW_LINE> <INDENT> remapped_value = [GetCustomJsonEnumMapping( field.type, python_name=e.name) or e.name for e in value] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> remapped_value = GetCustomJsonEnumMapping( field.type, python_name=value.name) <NEW_LINE> <DEDENT> if remapped_value: <NEW_LINE> <INDENT> return remapped_value <NEW_LINE> <DEDENT> <DEDENT> if (isinstance(field, messages.MessageField) and not isinstance(field, message_types.DateTimeField)): <NEW_LINE> <INDENT> value = json.loads(self.encode_message(value)) <NEW_LINE> <DEDENT> return super(_ProtoJsonApiTools, self).encode_field(field, value) | JSON encoder used by apitools clients. | 62598fa14a966d76dd5eed3c |
class _SealedRelatedQuerySet(QuerySet): <NEW_LINE> <INDENT> def _clone(self, *args, **kwargs): <NEW_LINE> <INDENT> clone = super()._clone(*args, **kwargs) <NEW_LINE> clone.__class__ = self._unsealed_class <NEW_LINE> return clone <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> if self._result_cache is None: <NEW_LINE> <INDENT> warnings.warn(self._sealed_warning, category=UnsealedAttributeAccess, stacklevel=2) <NEW_LINE> <DEDENT> return super().__getitem__(item) <NEW_LINE> <DEDENT> def _fetch_all(self): <NEW_LINE> <INDENT> if self._result_cache is None: <NEW_LINE> <INDENT> warnings.warn(self._sealed_warning, category=UnsealedAttributeAccess, stacklevel=3) <NEW_LINE> <DEDENT> super()._fetch_all() | QuerySet that prevents any fetching from taking place on its current form.
As soon as the query is cloned it gets unsealed. | 62598fa1009cb60464d0137f |
class SameInputLayer(Layer): <NEW_LINE> <INDENT> def __init__(self, nodes, dtype=None): <NEW_LINE> <INDENT> self.nodes = nodes <NEW_LINE> dtype = self._check_props(dtype) <NEW_LINE> input_dim = self.nodes[0].input_dim <NEW_LINE> for node in self.nodes: <NEW_LINE> <INDENT> if not node.input_dim == input_dim: <NEW_LINE> <INDENT> err = "The nodes have different input dimensions." <NEW_LINE> raise mdp.NodeException(err) <NEW_LINE> <DEDENT> <DEDENT> output_dim = self._get_output_dim_from_nodes() <NEW_LINE> super(Layer, self).__init__(input_dim=input_dim, output_dim=output_dim, dtype=dtype) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def is_invertible(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def _train(self, x, *args, **kwargs): <NEW_LINE> <INDENT> for node in self.nodes: <NEW_LINE> <INDENT> if node.is_training(): <NEW_LINE> <INDENT> node.train(x, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _pre_execution_checks(self, x): <NEW_LINE> <INDENT> if self.output_dim is None: <NEW_LINE> <INDENT> for node in self.nodes: <NEW_LINE> <INDENT> node._pre_execution_checks(x) <NEW_LINE> <DEDENT> self.output_dim = self._get_output_dim_from_nodes() <NEW_LINE> if self.output_dim is None: <NEW_LINE> <INDENT> err = "output_dim must be set at this point for all nodes" <NEW_LINE> raise mdp.NodeException(err) <NEW_LINE> <DEDENT> <DEDENT> super(Layer, self)._pre_execution_checks(x) <NEW_LINE> <DEDENT> def _execute(self, x, *args, **kwargs): <NEW_LINE> <INDENT> out_start = 0 <NEW_LINE> out_stop = 0 <NEW_LINE> y = None <NEW_LINE> for node in self.nodes: <NEW_LINE> <INDENT> out_start = out_stop <NEW_LINE> out_stop += node.output_dim <NEW_LINE> if y is None: <NEW_LINE> <INDENT> node_y = node.execute(x, *args, **kwargs) <NEW_LINE> y = numx.zeros([node_y.shape[0], self.output_dim], dtype=node_y.dtype) <NEW_LINE> y[:,out_start:out_stop] = node_y <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> y[:,out_start:out_stop] = node.execute(x, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> return y | SameInputLayer is a layer were all nodes receive the full input.
So instead of splitting the input according to node dimensions, all nodes
receive the complete input data. | 62598fa1adb09d7d5dc0a3e4 |
class triSubEnum(treedict.Tree_dict): <NEW_LINE> <INDENT> def __init__(self, lowbit, bitcount, enumClass, **kwargs): <NEW_LINE> <INDENT> bitmask=2**bitcount-1 <NEW_LINE> self.lowbit=lowbit <NEW_LINE> self.bitmask=bitmask << lowbit <NEW_LINE> self.enumClass=enumClass <NEW_LINE> super().__init__(**kwargs) <NEW_LINE> <DEDENT> def getCurrent(self): <NEW_LINE> <INDENT> return self.enumClass((self.parent.curval & self.bitmask) >> self.lowbit) <NEW_LINE> <DEDENT> def set(self, value): <NEW_LINE> <INDENT> shv=value.value<<self.lowbit <NEW_LINE> assert shv & self.bitmask==shv <NEW_LINE> pv=self.parent.curval & ~self.bitmask <NEW_LINE> self.parent.curval = pv | shv | a class for an enumeration that is a few bits somewhere in the register.
The register field is effectively an int, but since each value has a unique meaning, this
allows meaninful names to be used. | 62598fa1d58c6744b42dc200 |
class PootleUserManager(UserManager): <NEW_LINE> <INDENT> def get_default_user(self): <NEW_LINE> <INDENT> return super(PootleUserManager, self).get_query_set().select_related(depth=1).get(username='default') <NEW_LINE> <DEDENT> def get_nobody_user(self): <NEW_LINE> <INDENT> return super(PootleUserManager, self).get_query_set().select_related(depth=1).get(username='nobody') <NEW_LINE> <DEDENT> def hide_defaults(self): <NEW_LINE> <INDENT> return super(PootleUserManager, self).get_query_set().exclude(username__in=('nobody', 'default')) | A manager class which is meant to replace the manager class for the User model. This manager
hides the 'nobody' and 'default' users for normal queries, since they are special users. Code
that needs access to these users should use the methods get_default_user and get_nobody_user. | 62598fa1009cb60464d01380 |
class JSONRPCResponseManager(object): <NEW_LINE> <INDENT> RESPONSE_CLASS_MAP = { "1.0": JSONRPC10Response, "2.0": JSONRPC20Response, } <NEW_LINE> @classmethod <NEW_LINE> def handle(cls, request_str, dispatcher): <NEW_LINE> <INDENT> if isinstance(request_str, bytes): <NEW_LINE> <INDENT> request_str = request_str.decode("utf-8") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> data = json.loads(request_str) <NEW_LINE> <DEDENT> except (TypeError, ValueError): <NEW_LINE> <INDENT> return JSONRPC20Response(error=JSONRPCParseError()._data) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> request = JSONRPCRequest.from_data(data) <NEW_LINE> <DEDENT> except JSONRPCInvalidRequestException: <NEW_LINE> <INDENT> return JSONRPC20Response(error=JSONRPCInvalidRequest()._data) <NEW_LINE> <DEDENT> return cls.handle_request(request, dispatcher) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def handle_request(cls, request, dispatcher): <NEW_LINE> <INDENT> rs = request if isinstance(request, JSONRPC20BatchRequest) else [request] <NEW_LINE> responses = [r for r in cls._get_responses(rs, dispatcher) if r is not None] <NEW_LINE> if not responses: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if isinstance(request, JSONRPC20BatchRequest): <NEW_LINE> <INDENT> response = JSONRPC20BatchResponse(*responses) <NEW_LINE> response.request = request <NEW_LINE> return response <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return responses[0] <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def _get_responses(cls, requests, dispatcher): <NEW_LINE> <INDENT> for request in requests: <NEW_LINE> <INDENT> def make_response(**kwargs): <NEW_LINE> <INDENT> response = cls.RESPONSE_CLASS_MAP[request.JSONRPC_VERSION]( _id=request._id, **kwargs) <NEW_LINE> response.request = request <NEW_LINE> return response <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> method = dispatcher[request.method] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> output = make_response(error=JSONRPCMethodNotFound()._data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> result = method(*request.args, **request.kwargs) <NEW_LINE> <DEDENT> except JSONRPCDispatchException as e: <NEW_LINE> <INDENT> output = make_response(error=e.error._data) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> data = { "type": e.__class__.__name__, "args": e.args, "message": str(e), } <NEW_LINE> logger.exception("API Exception: {0}".format(data)) <NEW_LINE> if isinstance(e, TypeError) and is_invalid_params( method, *request.args, **request.kwargs): <NEW_LINE> <INDENT> output = make_response( error=JSONRPCInvalidParams(data=data)._data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output = make_response( error=JSONRPCServerError(data=data)._data) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> output = make_response(result=result) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> if not request.is_notification: <NEW_LINE> <INDENT> yield output | JSON-RPC response manager.
Method brings syntactic sugar into library. Given dispatcher it handles
request (both single and batch) and handles errors.
Request could be handled in parallel, it is server responsibility.
:param str request_str: json string. Will be converted into
JSONRPC20Request, JSONRPC20BatchRequest or JSONRPC10Request
:param dict dispather: dict<function_name:function>. | 62598fa199cbb53fe6830d2e |
class MaxPooling3D(_Pooling3D): <NEW_LINE> <INDENT> def __init__(self, pool_size=(2, 2, 2), strides=None, border_mode='valid', dim_ordering=K.image_dim_ordering(), **kwargs): <NEW_LINE> <INDENT> if K._BACKEND != 'theano': <NEW_LINE> <INDENT> raise Exception(self.__class__.__name__ + ' is currently only working with Theano backend.') <NEW_LINE> <DEDENT> super(MaxPooling3D, self).__init__(pool_size, strides, border_mode, dim_ordering, **kwargs) <NEW_LINE> <DEDENT> def _pooling_function(self, inputs, pool_size, strides, border_mode, dim_ordering): <NEW_LINE> <INDENT> output = K.pool3d(inputs, pool_size, strides, border_mode, dim_ordering, pool_mode='max') <NEW_LINE> return output | Max pooling operation for 3D data (spatial or spatio-temporal).
Note: this layer will only work with Theano for the time being.
# Arguments
pool_size: tuple of 3 integers,
factors by which to downscale (dim1, dim2, dim3).
(2, 2, 2) will halve the size of the 3D input in each dimension.
strides: tuple of 3 integers, or None. Strides values.
border_mode: 'valid' or 'same'.
dim_ordering: 'th' or 'tf'. In 'th' mode, the channels dimension
(the depth) is at index 1, in 'tf' mode is it at index 4.
It defaults to the `image_dim_ordering` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "th".
# Input shape
5D tensor with shape:
`(samples, channels, len_pool_dim1, len_pool_dim2, len_pool_dim3)` if dim_ordering='th'
or 5D tensor with shape:
`(samples, len_pool_dim1, len_pool_dim2, len_pool_dim3, channels)` if dim_ordering='tf'.
# Output shape
5D tensor with shape:
`(nb_samples, channels, pooled_dim1, pooled_dim2, pooled_dim3)` if dim_ordering='th'
or 5D tensor with shape:
`(samples, pooled_dim1, pooled_dim2, pooled_dim3, channels)` if dim_ordering='tf'. | 62598fa167a9b606de545e25 |
class ILocationReference(ISheet): <NEW_LINE> <INDENT> pass | Marker interface for the location reference sheet. | 62598fa1cc0a2c111447ae69 |
class Plan(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'publisher': {'key': 'publisher', 'type': 'str'}, 'product': {'key': 'product', 'type': 'str'}, 'promotion_code': {'key': 'promotionCode', 'type': 'str'}, 'version': {'key': 'version', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, name: str=None, publisher: str=None, product: str=None, promotion_code: str=None, version: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(Plan, self).__init__(**kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.publisher = publisher <NEW_LINE> self.product = product <NEW_LINE> self.promotion_code = promotion_code <NEW_LINE> self.version = version | Plan for the resource.
:param name: The plan ID.
:type name: str
:param publisher: The publisher ID.
:type publisher: str
:param product: The offer ID.
:type product: str
:param promotion_code: The promotion code.
:type promotion_code: str
:param version: The plan's version.
:type version: str | 62598fa1d486a94d0ba2be32 |
class ListFirewallsResultSet(ResultSet): <NEW_LINE> <INDENT> def getJSONFromString(self, str): <NEW_LINE> <INDENT> return json.loads(str) <NEW_LINE> <DEDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) <NEW_LINE> <DEDENT> def get_NewAccessToken(self): <NEW_LINE> <INDENT> return self._output.get('NewAccessToken', None) | A ResultSet with methods tailored to the values returned by the ListFirewalls Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution. | 62598fa110dbd63aa1c70a0a |
class TwoLayerNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=3 * 32 * 32, hidden_dim=100, num_classes=10, weight_scale=1e-3, reg=0.0): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> if weight_scale: <NEW_LINE> <INDENT> self.params['W1'] = np.random.randn(input_dim, hidden_dim) * weight_scale <NEW_LINE> self.params['W2'] = np.random.randn(hidden_dim, num_classes) * weight_scale <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.params['W1'] = np.random.randn(input_dim, hidden_dim) * np.sqrt(2.0/input_dim) <NEW_LINE> self.params['W2'] = np.random.randn(hidden_dim, num_classes) * np.sqrt(2.0/hidden_dim) <NEW_LINE> <DEDENT> self.params['b1'] = np.zeros((hidden_dim,)) <NEW_LINE> self.params['b2'] = np.zeros((num_classes,)) <NEW_LINE> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> scores = None <NEW_LINE> temp1, cache1 = affine_relu_forward(x=X, w=self.params['W1'], b=self.params['b1']) <NEW_LINE> temp2, cache2 = affine_forward(x=temp1, w=self.params['W2'], b=self.params['b2']) <NEW_LINE> scores = temp2 <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss, dLdscore = softmax_loss(scores, y) <NEW_LINE> dldtemp1, dldW2, dldb2 = affine_backward(dLdscore, cache2) <NEW_LINE> dldx, dldW1, dldb1 = affine_relu_backward(dldtemp1, cache1) <NEW_LINE> grads['W1'] = dldW1 + self.reg * self.params['W1'] <NEW_LINE> grads['W2'] = dldW2 + self.reg * self.params['W2'] <NEW_LINE> grads['b1'] = dldb1 <NEW_LINE> grads['b2'] = dldb2 <NEW_LINE> loss += 0.5 * self.reg * (np.linalg.norm(self.params['W1']) ** 2 + np.linalg.norm(self.params['W2']) ** 2) <NEW_LINE> return loss, grads | A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays. | 62598fa15f7d997b871f930d |
class SocketCreateResponsePacket(XBeeAPIPacket): <NEW_LINE> <INDENT> __MIN_PACKET_LENGTH = 8 <NEW_LINE> def __init__(self, frame_id, socket_id, status, op_mode=OperatingMode.API_MODE): <NEW_LINE> <INDENT> if frame_id < 0 or frame_id > 255: <NEW_LINE> <INDENT> raise ValueError("Frame ID must be between 0 and 255") <NEW_LINE> <DEDENT> if socket_id < 0 or socket_id > 255: <NEW_LINE> <INDENT> raise ValueError("Socket ID must be between 0 and 255") <NEW_LINE> <DEDENT> super().__init__(ApiFrameType.SOCKET_CREATE_RESPONSE, op_mode=op_mode) <NEW_LINE> self._frame_id = frame_id <NEW_LINE> self.__socket_id = socket_id <NEW_LINE> self.__status = status <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_packet(raw, operating_mode): <NEW_LINE> <INDENT> if operating_mode not in (OperatingMode.ESCAPED_API_MODE, OperatingMode.API_MODE): <NEW_LINE> <INDENT> raise InvalidOperatingModeException(op_mode=operating_mode) <NEW_LINE> <DEDENT> XBeeAPIPacket._check_api_packet( raw, min_length=SocketCreateResponsePacket.__MIN_PACKET_LENGTH) <NEW_LINE> if raw[3] != ApiFrameType.SOCKET_CREATE_RESPONSE.code: <NEW_LINE> <INDENT> raise InvalidPacketException( message="This packet is not a Socket Create Response packet.") <NEW_LINE> <DEDENT> return SocketCreateResponsePacket( raw[4], raw[5], SocketStatus.get(raw[6]), op_mode=operating_mode) <NEW_LINE> <DEDENT> def needs_id(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def _get_api_packet_spec_data(self): <NEW_LINE> <INDENT> ret = bytearray() <NEW_LINE> ret.append(self.__socket_id) <NEW_LINE> ret.append(self.__status.code) <NEW_LINE> return ret <NEW_LINE> <DEDENT> def _get_api_packet_spec_data_dict(self): <NEW_LINE> <INDENT> return { DictKeys.SOCKET_ID.value: utils.hex_to_string(bytearray([self.__socket_id])), DictKeys.STATUS.value: "%s (%s)" % (self.__status.code, self.__status.description)} <NEW_LINE> <DEDENT> @property <NEW_LINE> def socket_id(self): <NEW_LINE> <INDENT> return self.__socket_id <NEW_LINE> <DEDENT> @socket_id.setter <NEW_LINE> def socket_id(self, socket_id): <NEW_LINE> <INDENT> if socket_id < 0 or socket_id > 255: <NEW_LINE> <INDENT> raise ValueError("Socket ID must be between 0 and 255") <NEW_LINE> <DEDENT> self.__socket_id = socket_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self.__status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self.__status = status | This class represents a Socket Create Response packet. Packet is built using
the parameters of the constructor.
The device sends this frame in response to a Socket Create (0x40) frame. It
contains a socket ID that should be used for future transactions with the
socket and a status field.
If the status field is non-zero, which indicates an error, the socket ID
will be set to 0xFF and the socket will not be opened.
.. seealso::
| :class:`.SocketCreatePacket`
| :class:`.XBeeAPIPacket` | 62598fa17d847024c075c221 |
class OutputFilenameComponent(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def can_get_wav_file_name(nameLists: Tuple[List[str], Dict[int, str]]) -> bool: <NEW_LINE> <INDENT> wavFileNames = nameLists[0] <NEW_LINE> filenames = nameLists[1] <NEW_LINE> if len(wavFileNames) > len(filenames): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def awb_file(path: str) -> int: <NEW_LINE> <INDENT> if os.path.splitext(os.path.basename(path))[1].lower() == ".awb": <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> elif os.path.basename(path).lower().endswith("awb.txt"): <NEW_LINE> <INDENT> return 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 | docstring | 62598fa167a9b606de545e26 |
class FactoryScriptBase(FactoryProcess): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> default_timeout = kwargs.pop("default_timeout", None) <NEW_LINE> super().__init__(*args, **kwargs) <NEW_LINE> if default_timeout is None: <NEW_LINE> <INDENT> if not sys.platform.startswith(("win", "darwin")): <NEW_LINE> <INDENT> default_timeout = 30 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> default_timeout = 120 <NEW_LINE> <DEDENT> <DEDENT> self.default_timeout = default_timeout <NEW_LINE> self._terminal_timeout_set_explicitly = False <NEW_LINE> <DEDENT> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> timeout = kwargs.pop("_timeout", None) <NEW_LINE> self._terminal_timeout = timeout or self.default_timeout <NEW_LINE> self._terminal_timeout_set_explicitly = timeout is not None <NEW_LINE> cmdline = self.build_cmdline(*args, **kwargs) <NEW_LINE> timeout_expire = time.time() + self._terminal_timeout <NEW_LINE> log.info( "%sRunning %r in CWD: %s ...", self.get_log_prefix(), cmdline, self.cwd ) <NEW_LINE> terminal = self.init_terminal(cmdline, cwd=self.cwd, env=self.environ,) <NEW_LINE> timmed_out = False <NEW_LINE> while True: <NEW_LINE> <INDENT> if timeout_expire < time.time(): <NEW_LINE> <INDENT> timmed_out = True <NEW_LINE> break <NEW_LINE> <DEDENT> if terminal.poll() is not None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> time.sleep(0.25) <NEW_LINE> <DEDENT> result = self.terminate() <NEW_LINE> if timmed_out: <NEW_LINE> <INDENT> raise ProcessTimeout( "{}Failed to run: {}; Error: Timed out after {:.2f} seconds!".format( self.get_log_prefix(), cmdline, time.time() - start_time ), stdout=result.stdout, stderr=result.stderr, cmdline=cmdline, exitcode=result.exitcode, ) <NEW_LINE> <DEDENT> exitcode = result.exitcode <NEW_LINE> stdout, stderr, json_out = self.process_output( result.stdout, result.stderr, cmdline=cmdline ) <NEW_LINE> log.info( "%sCompleted %r in CWD: %s after %.2f seconds", self.get_log_prefix(), cmdline, self.cwd, time.time() - start_time, ) <NEW_LINE> return ShellResult(exitcode, stdout, stderr, json=json_out, cmdline=cmdline) <NEW_LINE> <DEDENT> def process_output(self, stdout, stderr, cmdline=None): <NEW_LINE> <INDENT> if stdout: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json_out = json.loads(stdout) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> log.debug( "%sFailed to load JSON from the following output:\n%r", self.get_log_prefix(), stdout, ) <NEW_LINE> json_out = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> json_out = None <NEW_LINE> <DEDENT> return stdout, stderr, json_out | Base class for CLI scripts | 62598fa17d847024c075c222 |
class Pronoun(Nominal): <NEW_LINE> <INDENT> __tablename__ = None <NEW_LINE> __mapper_args__ = {'polymorphic_identity': Tag.PRONOUN} | A complete form. This partially corresponds to Panini's **sarvanāman**:
1.1.26 "sarva" etc. are called `sarvanāman`.
However, adjectival words like "sarva" and "eka" are stored as adjectives. | 62598fa130bbd722464698a5 |
class TestUBInt64(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.ubint64 = basic_types.UBInt64() <NEW_LINE> <DEDENT> def test_get_size(self): <NEW_LINE> <INDENT> self.assertEqual(self.ubint64.get_size(), 8) | Test of UBInt64 BasicType. | 62598fa13c8af77a43b67e6e |
class ROCIFField(RegexField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _("Enter a valid CIF."), } <NEW_LINE> def __init__(self, max_length=10, min_length=2, *args, **kwargs): <NEW_LINE> <INDENT> super(ROCIFField, self).__init__(r'^(RO)?[0-9]{2,10}', max_length, min_length, *args, **kwargs) <NEW_LINE> <DEDENT> def clean(self, value): <NEW_LINE> <INDENT> value = super(ROCIFField, self).clean(value).strip() <NEW_LINE> if value in EMPTY_VALUES: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> if value[0:2] == 'RO': <NEW_LINE> <INDENT> value = value[2:] <NEW_LINE> <DEDENT> key = '753217532'[::-1] <NEW_LINE> value = value[::-1] <NEW_LINE> key_iter = iter(key) <NEW_LINE> checksum = 0 <NEW_LINE> for digit in value[1:]: <NEW_LINE> <INDENT> checksum += int(digit) * int(next(key_iter)) <NEW_LINE> <DEDENT> checksum = checksum * 10 % 11 <NEW_LINE> if checksum == 10: <NEW_LINE> <INDENT> checksum = 0 <NEW_LINE> <DEDENT> if checksum != int(value[0]): <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['invalid']) <NEW_LINE> <DEDENT> return value[::-1] | A Romanian fiscal identity code (CIF) field
For CIF validation algorithm see http://www.validari.ro/cui.html | 62598fa1d6c5a102081e1fa3 |
class Baz_base(Baz_abstract): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.v_base = 8 <NEW_LINE> self.p_base = 9 <NEW_LINE> dic = self.__dict__ <NEW_LINE> print('Baz_base id(__dict__) = {:x}'.format(id(dic))) <NEW_LINE> print('Baz_base.__dict__ =', Baz_base.__dict__) <NEW_LINE> print() | help for Baz_base | 62598fa1435de62698e9bc50 |
class FilterBankLeftRightImagery(FilterBank): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> if 'events' in kwargs.keys(): <NEW_LINE> <INDENT> raise(ValueError('LeftRightImagery dont accept events')) <NEW_LINE> <DEDENT> super().__init__(events=['left_hand', 'right_hand'], **kwargs) <NEW_LINE> <DEDENT> def used_events(self, dataset): <NEW_LINE> <INDENT> return {ev: dataset.event_id[ev] for ev in self.events} <NEW_LINE> <DEDENT> @property <NEW_LINE> def scoring(self): <NEW_LINE> <INDENT> return 'roc_auc' | Filter Bank Motor Imagery for left hand/right hand classification
Metric is 'roc_auc' | 62598fa17b25080760ed7305 |
class Const: <NEW_LINE> <INDENT> LIMIT_SIZE_WEIGHT = 50.0 <NEW_LINE> GRID_EMPTY_TUPLE = (-1, -1) <NEW_LINE> FILE_NAMES = ("a_example", "b_short_walk", "c_going_green", "d_wide_selection", "e_precise_fit", "f_different_footprints", "g_test_ulysse", "h_test_pierre") <NEW_LINE> THRESH_TIME_PRINT = 0.1 <NEW_LINE> SAFE_DIST = 2 <NEW_LINE> NUM_PROJ = 0 <NEW_LINE> COORD_PROJ = 1 <NEW_LINE> CARAC_PROJ = 2 <NEW_LINE> UP = 0 <NEW_LINE> LEFT = 1 <NEW_LINE> DOWN = 2 <NEW_LINE> RIGHT = 3 | Classe regroupant les constantes utiles de chaque module
pour l'importer depuis un sous-package, il faut écrire ces lignes en début de module :
import sys
import os
sys.path.insert(0, "/".join(os.path.dirname(os.path.abspath(__file__)).split("/")[:-2]) + "/")
from src.constants import Const | 62598fa14e4d562566372280 |
class ApplicationGatewayCustomError(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'status_code': {'key': 'statusCode', 'type': 'str'}, 'custom_error_page_url': {'key': 'customErrorPageUrl', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewayCustomError, self).__init__(**kwargs) <NEW_LINE> self.status_code = kwargs.get('status_code', None) <NEW_LINE> self.custom_error_page_url = kwargs.get('custom_error_page_url', None) | Customer error of an application gateway.
:param status_code: Status code of the application gateway customer error. Possible values
include: "HttpStatus403", "HttpStatus502".
:type status_code: str or
~azure.mgmt.network.v2020_06_01.models.ApplicationGatewayCustomErrorStatusCode
:param custom_error_page_url: Error page URL of the application gateway customer error.
:type custom_error_page_url: str | 62598fa1dd821e528d6d8d91 |
class TencentCloudSDKException(Exception): <NEW_LINE> <INDENT> def __init__(self, code=None, message=None, requestId=None): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.message = message <NEW_LINE> self.requestId = requestId <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[TencentCloudSDKException] code:%s message:%s requestId:%s" % ( self.code, self.message, self.requestId) <NEW_LINE> <DEDENT> def get_code(self): <NEW_LINE> <INDENT> return self.code <NEW_LINE> <DEDENT> def get_message(self): <NEW_LINE> <INDENT> return self.message <NEW_LINE> <DEDENT> def get_request_id(self): <NEW_LINE> <INDENT> return self.requestId | tencentcloudapi sdk 异常类 | 62598fa16fb2d068a7693d63 |
class Debuff_acid_explosion(Effect): <NEW_LINE> <INDENT> def __init__(self, client, ctx, carrier, team_a, team_b): <NEW_LINE> <INDENT> Effect.__init__( self, client, ctx, carrier, team_a, team_b ) <NEW_LINE> self.name = "Acid explosion" <NEW_LINE> self.icon = self.game_icon['effect']['acid_explosion'] <NEW_LINE> self.id = 3 <NEW_LINE> self.initial_duration = 2 <NEW_LINE> self.duration = 2 <NEW_LINE> self.max_stack = 1 <NEW_LINE> self.stack = 1 <NEW_LINE> <DEDENT> async def apply(self): <NEW_LINE> <INDENT> effect_checker = Effect_checker(self.carrier) <NEW_LINE> reduction = int(0.02 * self.carrier.defense.spirit) <NEW_LINE> acid_ref = await effect_checker.get_effect( 1, self.client, self.ctx, self.carrier, self.team_a, self.team_b ) <NEW_LINE> carrier_acid = await effect_checker.get_debuff(acid_ref) <NEW_LINE> if(carrier_acid != None): <NEW_LINE> <INDENT> reduction = int(reduction * carrier_acid.stack) <NEW_LINE> <DEDENT> self.carrier.defense.spirit -= reduction <NEW_LINE> return | Represents the acid_explosion debuff. | 62598fa13539df3088ecc111 |
class ConsistentHashRing(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._keys = [] <NEW_LINE> self._nodes = {} <NEW_LINE> <DEDENT> def _hash(self, key): <NEW_LINE> <INDENT> return long(md5.md5(key).hexdigest(), 16) <NEW_LINE> <DEDENT> def __setitem__(self, nodename, node): <NEW_LINE> <INDENT> hash_ = self._hash(nodename) <NEW_LINE> self._nodes[hash_] = node <NEW_LINE> bisect.insort(self._keys, hash_) <NEW_LINE> <DEDENT> def __delitem__(self, nodename): <NEW_LINE> <INDENT> hash_ = self._hash(nodename) <NEW_LINE> del self._nodes[hash_] <NEW_LINE> index = bisect.bisect_left(self._keys, hash_) <NEW_LINE> del self._keys[index] <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> hash_ = self._hash(key) <NEW_LINE> start = bisect.bisect(self._keys, hash_) <NEW_LINE> if start == len(self._keys): <NEW_LINE> <INDENT> start = 0 <NEW_LINE> <DEDENT> return self._nodes[self._keys[start]] | Implement a consistent hashing ring. | 62598fa10a50d4780f705237 |
class Infinity(Benchmark): <NEW_LINE> <INDENT> def __init__(self, dimensions=2): <NEW_LINE> <INDENT> Benchmark.__init__(self, dimensions) <NEW_LINE> self._bounds = zip([-1.0] * self.N, [1.0] * self.N) <NEW_LINE> self.global_optimum = [[1e-16 for _ in range(self.N)]] <NEW_LINE> self.fglob = 0.0 <NEW_LINE> self.change_dimensionality = True <NEW_LINE> <DEDENT> def fun(self, x, *args): <NEW_LINE> <INDENT> self.nfev += 1 <NEW_LINE> return sum(x ** 6.0 * (sin(1.0 / x) + 2.0)) | Infinity objective function.
This class defines the Infinity [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Infinity}}(x) = \sum_{i=1}^{n} x_i^{6}
\left [ \sin\left ( \frac{1}{x_i} \right ) + 2 \right ]
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-1, 1]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., n`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015 | 62598fa199cbb53fe6830d30 |
class ImageFileCreateEntry(Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'contents': {'key': 'contents', 'type': 'bytearray'}, 'tag_ids': {'key': 'tagIds', 'type': '[str]'}, 'regions': {'key': 'regions', 'type': '[Region]'}, } <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(ImageFileCreateEntry, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.contents = kwargs.get('contents', None) <NEW_LINE> self.tag_ids = kwargs.get('tag_ids', None) <NEW_LINE> self.regions = kwargs.get('regions', None) | ImageFileCreateEntry.
:param name:
:type name: str
:param contents:
:type contents: bytearray
:param tag_ids:
:type tag_ids: list[str]
:param regions:
:type regions:
list[~azure.cognitiveservices.vision.customvision.training.models.Region] | 62598fa1c432627299fa2e37 |
class TimeSeries(BaseRef): <NEW_LINE> <INDENT> def __init__(self, sec_name=None, vecs=None, detail=None): <NEW_LINE> <INDENT> BaseRef.__init__(self) <NEW_LINE> self.sec_name = sec_name <NEW_LINE> self.vecs = vecs <NEW_LINE> self.detail = detail | Extend the classic VecRef from Neuronvisio to allocate
the biochemical results. | 62598fa11f037a2d8b9e3f45 |
class Statistic(Base): <NEW_LINE> <INDENT> __tablename__ = "pytwb_statistics" <NEW_LINE> statistic_id = Column(BigInteger, primary_key=True, autoincrement=True) <NEW_LINE> statistic_friends_count = Column(Integer, nullable=False, default=0) <NEW_LINE> statistic_followers_count = Column(Integer, nullable=False, default=0) <NEW_LINE> statistic_statuses_count = Column(Integer, nullable=False, default=0) <NEW_LINE> statistic_date = Column(DateTime, nullable=False, default=datetime.datetime.now()) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "Statistic(id={}, friends_count={}, followers_count={}, statuses_count={}, date={})".format( self.statistic_id, self.statistic_friends_count, self.statistic_followers_count, self.statistic_statuses_count, self.statistic_date) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u"Statistic(id={}, friends_count={}, followers_count={}, statuses_count={}, date={})".format( self.statistic_id, self.statistic_friends_count, self.statistic_followers_count, self.statistic_statuses_count, self.statistic_date) | Bot's statistics | 62598fa1e5267d203ee6b76a |
class CourseOutlineSubsection(CourseOutlineContainer, CourseOutlineChild): <NEW_LINE> <INDENT> url = None <NEW_LINE> BODY_SELECTOR = '.outline-subsection' <NEW_LINE> NAME_SELECTOR = '.subsection-title' <NEW_LINE> NAME_FIELD_WRAPPER_SELECTOR = '.subsection-header .wrapper-xblock-field' <NEW_LINE> CHILD_CLASS = CourseOutlineUnit <NEW_LINE> def unit(self, title): <NEW_LINE> <INDENT> return self.child(title) <NEW_LINE> <DEDENT> def units(self): <NEW_LINE> <INDENT> return self.children() <NEW_LINE> <DEDENT> def unit_at(self, index): <NEW_LINE> <INDENT> return self.child_at(index) <NEW_LINE> <DEDENT> def add_unit(self): <NEW_LINE> <INDENT> self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).click() | :class`.PageObject` that wraps a subsection block on the Studio Course Outline page. | 62598fa15166f23b2e243233 |
class mean_filter(PluginFunction): <NEW_LINE> <INDENT> category = "Binarization/RegionInformation" <NEW_LINE> return_type = ImageType([FLOAT], "output") <NEW_LINE> self_type = ImageType([GREYSCALE,GREY16,FLOAT]) <NEW_LINE> args = Args([Int("region size", default=5)]) <NEW_LINE> doc_examples = [(GREYSCALE,), (GREY16,), (FLOAT,)] <NEW_LINE> def __call__(self, region_size=5): <NEW_LINE> <INDENT> return _binarization.mean_filter(self, region_size) <NEW_LINE> <DEDENT> __call__ = staticmethod(__call__) | Returns the regional mean of an image as a FLOAT.
*region_size*
The size of the region in which to calculate a mean. | 62598fa1fff4ab517ebcd64b |
class TldLegalAgreement(Model): <NEW_LINE> <INDENT> _validation = { 'agreement_key': {'required': True}, 'title': {'required': True}, 'content': {'required': True}, } <NEW_LINE> _attribute_map = { 'agreement_key': {'key': 'agreementKey', 'type': 'str'}, 'title': {'key': 'title', 'type': 'str'}, 'content': {'key': 'content', 'type': 'str'}, 'url': {'key': 'url', 'type': 'str'}, } <NEW_LINE> def __init__(self, agreement_key, title, content, url=None): <NEW_LINE> <INDENT> self.agreement_key = agreement_key <NEW_LINE> self.title = title <NEW_LINE> self.content = content <NEW_LINE> self.url = url | Legal agreement for a top level domain.
:param agreement_key: Unique identifier for the agreement.
:type agreement_key: str
:param title: Agreement title.
:type title: str
:param content: Agreement details.
:type content: str
:param url: URL where a copy of the agreement details is hosted.
:type url: str | 62598fa1ac7a0e7691f72368 |
class InvoiceItem(Domain): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create(params, *auth_args): <NEW_LINE> <INDENT> return PaymentsApi.create("invoiceItem", auth_args, params) <NEW_LINE> <DEDENT> def delete(self, *auth_args): <NEW_LINE> <INDENT> return PaymentsApi.delete("invoiceItem", auth_args, self.object_id) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def find(object_id, *auth_args): <NEW_LINE> <INDENT> return PaymentsApi.find("invoiceItem", auth_args, object_id) <NEW_LINE> <DEDENT> def update(self, *auth_args): <NEW_LINE> <INDENT> return PaymentsApi.update("invoiceItem", auth_args, self.object_id, self.to_dict()) | A InvoiceItem object. | 62598fa1bd1bec0571e14ff2 |
class GLgetPlayersInLobby_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.LIST, 'success', (TType.STRUCT,(GLPlayer, GLPlayer.thrift_spec)), None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype31, _size28) = iprot.readListBegin() <NEW_LINE> for _i32 in xrange(_size28): <NEW_LINE> <INDENT> _elem33 = GLPlayer() <NEW_LINE> _elem33.read(iprot) <NEW_LINE> self.success.append(_elem33) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('GLgetPlayersInLobby_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.STRUCT, len(self.success)) <NEW_LINE> for iter34 in self.success: <NEW_LINE> <INDENT> iter34.write(oprot) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success | 62598fa14f88993c371f0438 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.