code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class MetacategorieCreateView(CreateView): <NEW_LINE> <INDENT> model = Categorie <NEW_LINE> template_name = 'create.html' <NEW_LINE> fields = ['categorie'] <NEW_LINE> success_url = reverse_lazy('administration') <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> form.instance.proprietaire = user <NEW_LINE> form.instance.ismeta = True <NEW_LINE> return super(MetacategorieCreateView, self).form_valid(form) | Création d'une nouvelle métacatégorie | 62598fd055399d3f05626973 |
class Cuisine(BaseModel): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> classification = models.ForeignKey( Classification, models.PROTECT, related_name='cuisine') <NEW_LINE> ingestion_kcal = models.IntegerField( blank=True, null=True, validators=[MinValueValidator(1), MaxValueValidator(9999)]) <NEW_LINE> serves = models.IntegerField( blank=True, null=True, validators=[MinValueValidator(1), MaxValueValidator(999)]) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'cuisine' <NEW_LINE> verbose_name = 'メニュー' <NEW_LINE> verbose_name_plural = 'メニュー' | メニュー | 62598fd08a349b6b4368669a |
class BasePosition: <NEW_LINE> <INDENT> def __init__(self, cash=0.0, *args, **kwargs): <NEW_LINE> <INDENT> self._settle_type = self.ST_NO <NEW_LINE> <DEDENT> def skip_update(self) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_stock(self, stock_id: str) -> bool: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `check_stock` method") <NEW_LINE> <DEDENT> def update_order(self, order: Order, trade_val: float, cost: float, trade_price: float): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `update_order` method") <NEW_LINE> <DEDENT> def update_stock_price(self, stock_id, price: float): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `update stock price` method") <NEW_LINE> <DEDENT> def calculate_stock_value(self) -> float: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `calculate_stock_value` method") <NEW_LINE> <DEDENT> def get_stock_list(self) -> List: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_stock_list` method") <NEW_LINE> <DEDENT> def get_stock_price(self, code) -> float: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_stock_price` method") <NEW_LINE> <DEDENT> def get_stock_amount(self, code) -> float: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_stock_amount` method") <NEW_LINE> <DEDENT> def get_cash(self, include_settle: bool = False) -> float: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_cash` method") <NEW_LINE> <DEDENT> def get_stock_amount_dict(self) -> Dict: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_stock_amount_dict` method") <NEW_LINE> <DEDENT> def get_stock_weight_dict(self, only_stock: bool = False) -> Dict: <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `get_stock_weight_dict` method") <NEW_LINE> <DEDENT> def add_count_all(self, bar): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `add_count_all` method") <NEW_LINE> <DEDENT> def update_weight_all(self): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `add_count_all` method") <NEW_LINE> <DEDENT> ST_CASH = "cash" <NEW_LINE> ST_NO = None <NEW_LINE> def settle_start(self, settle_type: str): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `settle_conf` method") <NEW_LINE> <DEDENT> def settle_commit(self): <NEW_LINE> <INDENT> raise NotImplementedError(f"Please implement the `settle_commit` method") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__dict__.__str__() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.__dict__.__repr__() | The Position want to maintain the position like a dictionary
Please refer to the `Position` class for the position | 62598fd03617ad0b5ee065a1 |
class EditUserNameRepeatValidate(object): <NEW_LINE> <INDENT> def __init__(self, message=None): <NEW_LINE> <INDENT> self.message = message <NEW_LINE> <DEDENT> def __call__(self, form, field): <NEW_LINE> <INDENT> condition = [ User.name == field.data, User.id != form.id.data, ] <NEW_LINE> row = get_user_row(*condition) <NEW_LINE> if row: <NEW_LINE> <INDENT> raise ValidationError(self.message or _('Data duplication')) | 编辑用户名称重复校验
(编辑重复校验排除当前用户名称) | 62598fd04527f215b58ea329 |
class ParquetSerDe(AWSProperty): <NEW_LINE> <INDENT> props: PropsDictType = { "BlockSizeBytes": (integer, False), "Compression": (str, False), "EnableDictionaryCompression": (boolean, False), "MaxPaddingBytes": (integer, False), "PageSizeBytes": (integer, False), "WriterVersion": (str, False), } | `ParquetSerDe <http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-kinesisfirehose-deliverystream-parquetserde.html>`__ | 62598fd03d592f4c4edbb30f |
class DeleteSingleCheckmarksTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.one = Checkmarks.objects.create(name='one', value=True) <NEW_LINE> self.two = Checkmarks.objects.create(name='two', value=False) <NEW_LINE> <DEDENT> def test_valid_delete_checkmark(self): <NEW_LINE> <INDENT> response = client.delete( reverse('checkmarks-detail', kwargs={'pk': self.one.pk})) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT) <NEW_LINE> <DEDENT> def test_invalid_delete_checkmark(self): <NEW_LINE> <INDENT> response = client.delete( reverse('checkmarks-detail', kwargs={'pk': 30})) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND) | Test module for deleting an existing checkmark record | 62598fd0099cdd3c6367560c |
class Seq2SeqEncoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, rnn_type, input_size, hidden_size, num_layers=1, bias=True, dropout=0.0, bidirectional=False): <NEW_LINE> <INDENT> assert issubclass(rnn_type, nn.RNNBase), "rnn_type must be a class inheriting from torch.nn.RNNBase" <NEW_LINE> super(Seq2SeqEncoder, self).__init__() <NEW_LINE> self.rnn_type = rnn_type <NEW_LINE> self.input_size = input_size <NEW_LINE> self.hidden_size = hidden_size <NEW_LINE> self.num_layers = num_layers <NEW_LINE> self.bias = bias <NEW_LINE> self.dropout = dropout <NEW_LINE> self.bidirectional = bidirectional <NEW_LINE> self._encoder = rnn_type(input_size, hidden_size, num_layers=num_layers, bias=bias, batch_first=True, dropout=dropout, bidirectional=bidirectional) <NEW_LINE> <DEDENT> def forward(self, sequences_batch, sequences_lengths): <NEW_LINE> <INDENT> sorted_batch, sorted_lengths, _, restoration_idx = sort_by_seq_lens(sequences_batch, sequences_lengths) <NEW_LINE> packed_batch = nn.utils.rnn.pack_padded_sequence(sorted_batch.cpu(), sorted_lengths.cpu(), batch_first=True).cuda() <NEW_LINE> outputs, (last_hidden, _) = self._encoder(packed_batch, None) <NEW_LINE> last_hidden = last_hidden.permute(1, 0, 2).reshape(last_hidden.shape[1], -1) <NEW_LINE> outputs, _ = nn.utils.rnn.pad_packed_sequence(outputs, batch_first=True) <NEW_LINE> restoration_idx = torch.tensor(restoration_idx).cuda() <NEW_LINE> reordered_outputs = outputs.index_select(0, restoration_idx) <NEW_LINE> last_hidden = last_hidden.index_select(0, restoration_idx) <NEW_LINE> return reordered_outputs, last_hidden | RNN taking variable length padded sequences of vectors as input and
encoding them into padded sequences of vectors of the same length.
This module is useful to handle batches of padded sequences of vectors
that have different lengths and that need to be passed through a RNN.
The sequences are sorted in descending order of their lengths, packed,
passed through the RNN, and the resulting sequences are then padded and
permuted back to the original order of the input sequences. | 62598fd0ad47b63b2c5a7cb6 |
class MyUDPHandler(socketserver.BaseRequestHandler): <NEW_LINE> <INDENT> clienthello = "" <NEW_LINE> ddid = 0 <NEW_LINE> ctx = "" <NEW_LINE> device_id = "" <NEW_LINE> dname = "" <NEW_LINE> connmode = "udp" <NEW_LINE> blocked_from_client_list = [] <NEW_LINE> blocked_from_cloud_list = [] <NEW_LINE> Cloudi = CloudClient() <NEW_LINE> Cloudi_lock = threading.Lock() <NEW_LINE> def send_data_to_cloud(self, data): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def send_data_to_client(self, data): <NEW_LINE> <INDENT> self.socket.sendto(data, self.client_address) <NEW_LINE> <DEDENT> def send_data_to_client_and_wait_for_result(self, data, msgid, timeout=5.0): <NEW_LINE> <INDENT> ev = threading.Event() <NEW_LINE> self.Cloudi.expect_message(msgid, ev) <NEW_LINE> self.send_data_to_client(data) <NEW_LINE> return ev.wait(timeout) <NEW_LINE> <DEDENT> def handle(self): <NEW_LINE> <INDENT> data = self.request[0].strip() <NEW_LINE> self.socket = self.request[1] <NEW_LINE> self.Cloudi_lock.acquire() <NEW_LINE> self.Cloudi = MyUDPHandler.Cloudi <NEW_LINE> thread_id = threading.get_ident() <NEW_LINE> print(" --------------- Thread-id: {} ({})".format(thread_id, self)) <NEW_LINE> print("{}:{} via udp wrote:".format(*self.client_address)) <NEW_LINE> if len(data) == 0: <NEW_LINE> <INDENT> print("Connection closed") <NEW_LINE> <DEDENT> elif len(data) < 32: <NEW_LINE> <INDENT> print("len < 32, discarding message") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if data[0:2] == bytes.fromhex("2131"): <NEW_LINE> <INDENT> self.Cloudi.process_data(self, data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Unknown message: {}".format(data)) <NEW_LINE> <DEDENT> <DEDENT> self.Cloudi_lock.release() <NEW_LINE> print(" --------------- Thread-id: %s closed" % thread_id) | This class works similar to the TCP handler class, except that
self.request consists of a pair of data and client socket, and since
there is no connection the client address must be given explicitly
when sending data back via sendto(). | 62598fd0ec188e330fdf8cf1 |
class Reservoir(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._lock = threading.Lock() <NEW_LINE> self._quota = None <NEW_LINE> self._TTL = None <NEW_LINE> self._this_sec = 0 <NEW_LINE> self._taken_this_sec = 0 <NEW_LINE> self._borrowed_this_sec = 0 <NEW_LINE> self._report_interval = 1 <NEW_LINE> self._report_elapsed = 0 <NEW_LINE> <DEDENT> def borrow_or_take(self, now, can_borrow): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> return self._borrow_or_take(now, can_borrow) <NEW_LINE> <DEDENT> <DEDENT> def load_quota(self, quota, TTL, interval): <NEW_LINE> <INDENT> if quota is not None: <NEW_LINE> <INDENT> self._quota = quota <NEW_LINE> <DEDENT> if TTL is not None: <NEW_LINE> <INDENT> self._TTL = TTL <NEW_LINE> <DEDENT> if interval is not None: <NEW_LINE> <INDENT> self._report_interval = interval / 10 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def quota(self): <NEW_LINE> <INDENT> return self._quota <NEW_LINE> <DEDENT> @property <NEW_LINE> def TTL(self): <NEW_LINE> <INDENT> return self._TTL <NEW_LINE> <DEDENT> def _time_to_report(self): <NEW_LINE> <INDENT> if self._report_elapsed + 1 >= self._report_interval: <NEW_LINE> <INDENT> self._report_elapsed = 0 <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._report_elapsed += 1 <NEW_LINE> <DEDENT> <DEDENT> def _borrow_or_take(self, now, can_borrow): <NEW_LINE> <INDENT> self._adjust_this_sec(now) <NEW_LINE> if (self._quota is not None and self._quota >= 0 and self._TTL is not None and self._TTL >= now): <NEW_LINE> <INDENT> if(self._taken_this_sec >= self._quota): <NEW_LINE> <INDENT> return ReservoirDecision.NO <NEW_LINE> <DEDENT> self._taken_this_sec = self._taken_this_sec + 1 <NEW_LINE> return ReservoirDecision.TAKE <NEW_LINE> <DEDENT> if can_borrow: <NEW_LINE> <INDENT> if self._borrowed_this_sec >= 1: <NEW_LINE> <INDENT> return ReservoirDecision.NO <NEW_LINE> <DEDENT> self._borrowed_this_sec = self._borrowed_this_sec + 1 <NEW_LINE> return ReservoirDecision.BORROW <NEW_LINE> <DEDENT> <DEDENT> def _adjust_this_sec(self, now): <NEW_LINE> <INDENT> if now != self._this_sec: <NEW_LINE> <INDENT> self._taken_this_sec = 0 <NEW_LINE> self._borrowed_this_sec = 0 <NEW_LINE> self._this_sec = now | Centralized thread-safe reservoir which holds fixed sampling
quota, borrowed count and TTL. | 62598fd155399d3f05626975 |
class UsageKey(CourseObjectMixin, OpaqueKey): <NEW_LINE> <INDENT> KEY_TYPE = 'usage_key' <NEW_LINE> __slots__ = () <NEW_LINE> @property <NEW_LINE> @abstractmethod <NEW_LINE> def definition_key(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def block_type(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> @abstractmethod <NEW_LINE> def block_id(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> @property <NEW_LINE> def context_key(self): <NEW_LINE> <INDENT> return self.course_key | An :class:`opaque_keys.OpaqueKey` identifying an XBlock usage. | 62598fd17cff6e4e811b5e84 |
class TLSSocketServerMixIn: <NEW_LINE> <INDENT> def finish_request(self, sock, client_address): <NEW_LINE> <INDENT> tlsConnection = TLSConnection(sock) <NEW_LINE> if self.handshake(tlsConnection) == True: <NEW_LINE> <INDENT> self.RequestHandlerClass(tlsConnection, client_address, self) <NEW_LINE> tlsConnection.close() <NEW_LINE> <DEDENT> <DEDENT> def handshake(self, tlsConnection): <NEW_LINE> <INDENT> raise NotImplementedError() | This class can be mixed in with any L{SocketServer.TCPServer} to
add TLS support.
To use this class, define a new class that inherits from it and
some L{SocketServer.TCPServer} (with the mix-in first). Then
implement the handshake() method, doing some sort of server
handshake on the connection argument. If the handshake method
returns True, the RequestHandler will be triggered. Below is a
complete example of a threaded HTTPS server::
from SocketServer import *
from BaseHTTPServer import *
from SimpleHTTPServer import *
from tlslite.api import *
s = open("./serverX509Cert.pem").read()
x509 = X509()
x509.parse(s)
certChain = X509CertChain([x509])
s = open("./serverX509Key.pem").read()
privateKey = parsePEMKey(s, private=True)
sessionCache = SessionCache()
class MyHTTPServer(ThreadingMixIn, TLSSocketServerMixIn,
HTTPServer):
def handshake(self, tlsConnection):
try:
tlsConnection.handshakeServer(certChain=certChain,
privateKey=privateKey,
sessionCache=sessionCache)
tlsConnection.ignoreAbruptClose = True
return True
except TLSError, error:
print "Handshake failure:", str(error)
return False
httpd = MyHTTPServer(('localhost', 443), SimpleHTTPRequestHandler)
httpd.serve_forever() | 62598fd13d592f4c4edbb311 |
class FirmwareAssociation(object): <NEW_LINE> <INDENT> openapi_types = { 'firmwarename': 'str', 'nodes': 'list[str]' } <NEW_LINE> attribute_map = { 'firmwarename': 'firmwarename', 'nodes': 'nodes' } <NEW_LINE> def __init__(self, firmwarename=None, nodes=None): <NEW_LINE> <INDENT> self._firmwarename = None <NEW_LINE> self._nodes = None <NEW_LINE> self.discriminator = None <NEW_LINE> if firmwarename is not None: <NEW_LINE> <INDENT> self.firmwarename = firmwarename <NEW_LINE> <DEDENT> if nodes is not None: <NEW_LINE> <INDENT> self.nodes = nodes <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def firmwarename(self): <NEW_LINE> <INDENT> return self._firmwarename <NEW_LINE> <DEDENT> @firmwarename.setter <NEW_LINE> def firmwarename(self, firmwarename): <NEW_LINE> <INDENT> self._firmwarename = firmwarename <NEW_LINE> <DEDENT> @property <NEW_LINE> def nodes(self): <NEW_LINE> <INDENT> return self._nodes <NEW_LINE> <DEDENT> @nodes.setter <NEW_LINE> def nodes(self, nodes): <NEW_LINE> <INDENT> self._nodes = nodes <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, FirmwareAssociation): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fd197e22403b383b365 |
class EmotionScores(object): <NEW_LINE> <INDENT> def __init__(self, anger=None, disgust=None, fear=None, joy=None, sadness=None): <NEW_LINE> <INDENT> self.anger = anger <NEW_LINE> self.disgust = disgust <NEW_LINE> self.fear = fear <NEW_LINE> self.joy = joy <NEW_LINE> self.sadness = sadness <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _from_dict(cls, _dict): <NEW_LINE> <INDENT> args = {} <NEW_LINE> if 'anger' in _dict: <NEW_LINE> <INDENT> args['anger'] = _dict['anger'] <NEW_LINE> <DEDENT> if 'disgust' in _dict: <NEW_LINE> <INDENT> args['disgust'] = _dict['disgust'] <NEW_LINE> <DEDENT> if 'fear' in _dict: <NEW_LINE> <INDENT> args['fear'] = _dict['fear'] <NEW_LINE> <DEDENT> if 'joy' in _dict: <NEW_LINE> <INDENT> args['joy'] = _dict['joy'] <NEW_LINE> <DEDENT> if 'sadness' in _dict: <NEW_LINE> <INDENT> args['sadness'] = _dict['sadness'] <NEW_LINE> <DEDENT> return cls(**args) <NEW_LINE> <DEDENT> def _to_dict(self): <NEW_LINE> <INDENT> _dict = {} <NEW_LINE> if hasattr(self, 'anger') and self.anger is not None: <NEW_LINE> <INDENT> _dict['anger'] = self.anger <NEW_LINE> <DEDENT> if hasattr(self, 'disgust') and self.disgust is not None: <NEW_LINE> <INDENT> _dict['disgust'] = self.disgust <NEW_LINE> <DEDENT> if hasattr(self, 'fear') and self.fear is not None: <NEW_LINE> <INDENT> _dict['fear'] = self.fear <NEW_LINE> <DEDENT> if hasattr(self, 'joy') and self.joy is not None: <NEW_LINE> <INDENT> _dict['joy'] = self.joy <NEW_LINE> <DEDENT> if hasattr(self, 'sadness') and self.sadness is not None: <NEW_LINE> <INDENT> _dict['sadness'] = self.sadness <NEW_LINE> <DEDENT> return _dict <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return json.dumps(self._to_dict(), indent=2) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | EmotionScores.
:attr float anger: (optional) Anger score from 0 to 1. A higher score means that the text is more likely to convey anger.
:attr float disgust: (optional) Disgust score from 0 to 1. A higher score means that the text is more likely to convey disgust.
:attr float fear: (optional) Fear score from 0 to 1. A higher score means that the text is more likely to convey fear.
:attr float joy: (optional) Joy score from 0 to 1. A higher score means that the text is more likely to convey joy.
:attr float sadness: (optional) Sadness score from 0 to 1. A higher score means that the text is more likely to convey sadness. | 62598fd155399d3f05626977 |
class OAuthFixture: <NEW_LINE> <INDENT> def __init__(self, hass, aiohttp_client, aioclient_mock): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.aiohttp_client = aiohttp_client <NEW_LINE> self.aioclient_mock = aioclient_mock <NEW_LINE> <DEDENT> async def async_oauth_flow(self, result): <NEW_LINE> <INDENT> state = config_entry_oauth2_flow._encode_jwt( self.hass, { "flow_id": result["flow_id"], "redirect_uri": "https://example.com/auth/external/callback", }, ) <NEW_LINE> oauth_authorize = OAUTH2_AUTHORIZE.format(project_id=PROJECT_ID) <NEW_LINE> assert result["type"] == "external" <NEW_LINE> assert result["url"] == ( f"{oauth_authorize}?response_type=code&client_id={CLIENT_ID}" "&redirect_uri=https://example.com/auth/external/callback" f"&state={state}&scope=https://www.googleapis.com/auth/sdm.service" "+https://www.googleapis.com/auth/pubsub" "&access_type=offline&prompt=consent" ) <NEW_LINE> client = await self.aiohttp_client(self.hass.http.app) <NEW_LINE> resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") <NEW_LINE> assert resp.status == 200 <NEW_LINE> assert resp.headers["content-type"] == "text/html; charset=utf-8" <NEW_LINE> self.aioclient_mock.post( OAUTH2_TOKEN, json={ "refresh_token": "mock-refresh-token", "access_token": "mock-access-token", "type": "Bearer", "expires_in": 60, }, ) <NEW_LINE> with patch( "homeassistant.components.nest.async_setup_entry", return_value=True ) as mock_setup: <NEW_LINE> <INDENT> await self.hass.config_entries.flow.async_configure(result["flow_id"]) <NEW_LINE> assert len(mock_setup.mock_calls) == 1 | Simulate the oauth flow used by the config flow. | 62598fd18a349b6b4368669e |
class TestBlacklistChangeForm(BlackWhitelistEditFormMixin, TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(TestBlacklistChangeForm, self).setUp() <NEW_LINE> self.factory = BlacklistFactory <NEW_LINE> self.form = BlacklistedNumberEditForm | Exercise Blacklist number editing | 62598fd1ab23a570cc2d4f9c |
class TagsFactory(JSBASE): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__jslocation__ = "j.data.tags" <NEW_LINE> JSBASE.__init__(self) <NEW_LINE> <DEDENT> def getObject(self, tagstring="", setFunction4Tagstring=None, keepcase=False): <NEW_LINE> <INDENT> return Tags(tagstring, setFunction4Tagstring, keepcase=keepcase) <NEW_LINE> <DEDENT> def getTagString(self, labels=None, tags=None): <NEW_LINE> <INDENT> labels = labels or set() <NEW_LINE> tags = tags or dict() <NEW_LINE> if not isinstance(labels, set): <NEW_LINE> <INDENT> raise TypeError("labels must be of type set") <NEW_LINE> <DEDENT> if not isinstance(tags, dict): <NEW_LINE> <INDENT> raise TypeError("tags must be of type dict") <NEW_LINE> <DEDENT> t = Tags() <NEW_LINE> t.labels = labels <NEW_LINE> t.tags = tags <NEW_LINE> return str(t) | Factory Class of dealing with TAGS | 62598fd13d592f4c4edbb313 |
class ScoreForm(messages.Message): <NEW_LINE> <INDENT> user_name = messages.StringField(1, required=True) <NEW_LINE> date = messages.StringField(2, required=True) <NEW_LINE> won = messages.BooleanField(3, required=True) <NEW_LINE> attempts_remaining = messages.IntegerField(4, required=True) | outbound Score information | 62598fd1099cdd3c6367560e |
class TreeViewDelegate(QtWidgets.QStyledItemDelegate): <NEW_LINE> <INDENT> def __init__(self, tree): <NEW_LINE> <INDENT> QtWidgets.QStyledItemDelegate.__init__(self) <NEW_LINE> self.tree = tree <NEW_LINE> <DEDENT> def paint(self, painter, option, index): <NEW_LINE> <INDENT> item = self.tree.model.itemFromIndex(index.sibling(index.row(), index.column())) <NEW_LINE> painted = False <NEW_LINE> if hasattr(item, "paint"): <NEW_LINE> <INDENT> painted = item.paint(painter, option, index) <NEW_LINE> <DEDENT> if painted is False: <NEW_LINE> <INDENT> super(TreeViewDelegate, self).paint(painter, option, index) <NEW_LINE> <DEDENT> <DEDENT> def sizeHint(self, option, index): <NEW_LINE> <INDENT> if not index: <NEW_LINE> <INDENT> return QtCore.QSize(0, 0) <NEW_LINE> <DEDENT> item = self.tree.model.itemFromIndex(index.sibling(index.row(), index.column())) <NEW_LINE> size = super(TreeViewDelegate, self).sizeHint(option, index) <NEW_LINE> if item.is_root: <NEW_LINE> <INDENT> size.setHeight(24) <NEW_LINE> return size <NEW_LINE> <DEDENT> return size | Extended QStyledItemDelegate class for providing a custom painter for CommonStandardItems in the tree view
Args:
tree (pe_tree.tree.PETree): PE Tree | 62598fd1fbf16365ca79451c |
class Text(Descriptor, CodecDescriptor): <NEW_LINE> <INDENT> def __init__(self, tag, codec=None, xmlns=None, required=False, multiple=False, encoder=None, decoder=None): <NEW_LINE> <INDENT> Descriptor.__init__(self, tag, xmlns=xmlns, required=required, multiple=multiple) <NEW_LINE> CodecDescriptor.__init__(self, codec=codec, encoder=encoder, decoder=decoder) <NEW_LINE> <DEDENT> def __get__(self, obj, cls=None): <NEW_LINE> <INDENT> if isinstance(obj, Element): <NEW_LINE> <INDENT> if self.multiple: <NEW_LINE> <INDENT> return ElementList(obj, self, string_type) <NEW_LINE> <DEDENT> <DEDENT> return super(Text, self).__get__(obj, cls) <NEW_LINE> <DEDENT> def start_element(self, element, attribute): <NEW_LINE> <INDENT> return element <NEW_LINE> <DEDENT> def end_element(self, reserved_value, content): <NEW_LINE> <INDENT> content = self.decode(content, reserved_value) <NEW_LINE> if self.multiple: <NEW_LINE> <INDENT> reserved_value._data.setdefault(self, []).append(content) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reserved_value._data.setdefault(self, content) | Descriptor that declares a possible child element that only cosists
of character data. All other attributes and child nodes are ignored.
:param tag: the XML tag name
:type tag: :class:`str`
:param codec: an optional codec object to use. if it's callable and
not an instance of :class:`Codec`, its return value will
be used instead. it means this can take class object of
:class:`Codec` subtype that is not instantiated yet
unless the constructor require any arguments
:type codec: :class:`Codec`, :class:`collections.Callable`
:param xmlns: an optional XML namespace URI
:type xmlns: :class:`str`
:param required: whether the child is required or not.
it's exclusive to ``multiple``.
:const:`False` by default
:type required: :class:`bool`
:param multiple: whether the child can be multiple.
it's exclusive to ``required``.
:const:`False` by default
:type multiple: :class:`bool`
:param encoder: an optional function that encodes Python value into
XML text value e.g. :func:`str()`. the encoder function
has to take an argument
:type encoder: :class:`collections.Callable`
:param decoder: an optional function that decodes XML text value into
Python value e.g. :func:`int()`. the decoder function
has to take a string argument
:type decoder: :class:`collections.Callable` | 62598fd14527f215b58ea32f |
class ManageIngredient(ListCreateViewSet): <NEW_LINE> <INDENT> serializer_class = IngredientSerializer <NEW_LINE> queryset = Ingredient.objects.all() | manage the ingredient objects. | 62598fd1956e5f7376df58ad |
class DETR(tf.keras.Model): <NEW_LINE> <INDENT> def __init__(self,d_model = 256,nhead = 8,num_decoder_layer = 6,num_encoder_layer = 6,num_classes = 1000,num_query = 100): <NEW_LINE> <INDENT> super(DETR, self).__init__() <NEW_LINE> self.d_model = d_model <NEW_LINE> self.backbone = ResNet50(include_top = False,input_shape=(224,224,3),weights=None) <NEW_LINE> self.conv2d = tf.keras.layers.Conv2D(d_model,1) <NEW_LINE> self.encoder = Encoder(d_model,num_encoder_layer,nhead) <NEW_LINE> self.decoder = Decoder(d_model,num_decoder_layer,nhead) <NEW_LINE> self.linear_class = tf.keras.layers.Dense(num_classes+1,activation='softmax') <NEW_LINE> self.linear_bbox = tf.keras.layers.Dense(4,activation='sigmoid') <NEW_LINE> self.query_pos = tf.constant(tf.random.uniform(shape=(max_length,num_query,d_model))) <NEW_LINE> <DEDENT> def call(self,inputs,training=True): <NEW_LINE> <INDENT> x = self.backbone(inputs) <NEW_LINE> x = self.conv2d(x) <NEW_LINE> print(x.shape) <NEW_LINE> size = x.get_shape().as_list() <NEW_LINE> x = tf.keras.layers.Reshape((size[1]*size[2],self.d_model))(x) <NEW_LINE> print(x.shape) <NEW_LINE> encoder_output, _ = self.encoder(x,training=training) <NEW_LINE> print(encoder_output.shape) <NEW_LINE> logits, _, _ = self.decoder(self.query_pos[:tf.shape(encoder_output)[0],:,:],encoder_output,training=training) <NEW_LINE> pred_logits = self.linear_class(logits) <NEW_LINE> pred_boxes = self.linear_bbox(logits) <NEW_LINE> return pred_logits,pred_boxes | Class for the DETR
Args:
d_model: d_model in the paper (depth size of the model) (default = 256)
num_decoder_layer: number of layer in decoder (default = 6)
num_encoder_layer: number of layer in encoder (default = 6)
nhead: number of heads in multiattention layer (default = 8)
num_classes: number of classes in output (default = 1000)
num_query: number of queries in output (default = 100) | 62598fd1dc8b845886d53a1e |
class DebugWrapper(object): <NEW_LINE> <INDENT> def __init__(self, runnable, prompt): <NEW_LINE> <INDENT> import pdb <NEW_LINE> self.debugger = pdb.Pdb() <NEW_LINE> self.debugger.prompt = prompt <NEW_LINE> self.runnable = runnable <NEW_LINE> if hasattr(runnable, "func_name"): <NEW_LINE> <INDENT> self.__name__ = runnable.__name__ <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, oargs, **nargs): <NEW_LINE> <INDENT> return self.debugger.runcall(self.runnable, *oargs, **nargs) | Wrapper to run a thread inside the debugger | 62598fd1bf627c535bcb190f |
class FakeRequest(object): <NEW_LINE> <INDENT> def __init__(self, user, sessionDict=None, method='POST', dataDict=None, path='/ct/somewhere/'): <NEW_LINE> <INDENT> self.user = user <NEW_LINE> self.path = path <NEW_LINE> self.method = method <NEW_LINE> if not sessionDict: <NEW_LINE> <INDENT> sessionDict = {} <NEW_LINE> <DEDENT> self.session = sessionDict <NEW_LINE> if not dataDict: <NEW_LINE> <INDENT> dataDict = {} <NEW_LINE> <DEDENT> setattr(self, method, dataDict) | trivial holder for request data to pass to test calls | 62598fd1cc40096d6161a408 |
class ImageExtension(markdown.Extension): <NEW_LINE> <INDENT> def extendMarkdown(self, md, md_globals): <NEW_LINE> <INDENT> md.preprocessors.add('dw-images', ImagePreprocessor(md), '>html_block') <NEW_LINE> md.postprocessors.add('dw-images-cleanup', ImagePostprocessor(md), '>raw_html') | Images plugin markdown extension for django-wiki. | 62598fd10fa83653e46f534a |
class UserStatisticViewSet(mixins.ListModelMixin, viewsets.GenericViewSet): <NEW_LINE> <INDENT> filter_backends = (DjangoFilterBackend,) <NEW_LINE> filterset_class = UserStatisticFilterSet <NEW_LINE> queryset = Ticket.objects.all() <NEW_LINE> def list(self, request, **kwargs): <NEW_LINE> <INDENT> queryset = self.filter_queryset(self.get_queryset()).values('state').annotate( cnt=Count('state') ).values('state', 'cnt') <NEW_LINE> return Response(queryset) | Докстринг статистики тикетов по пользователям | 62598fd1283ffb24f3cf3ce8 |
class ModFidMismatchError(ModError): <NEW_LINE> <INDENT> def __init__(self, in_name, debug_str, fid_expected, fid_actual): <NEW_LINE> <INDENT> debug_str = _join_sigs(debug_str) <NEW_LINE> message_form = f'{debug_str}: FormIDs do not match - expected ' f'{fid_expected!r} but got {fid_actual!r}' <NEW_LINE> super(ModFidMismatchError, self).__init__(in_name, message_form) | Mod Error: Two FormIDs that should be equal are not. | 62598fd1dc8b845886d53a20 |
class Pen: <NEW_LINE> <INDENT> color = _get_hslf_color(0.5, 0.75, 0.5) <NEW_LINE> selection_color = QColor(0, 120, 255) <NEW_LINE> get_red_pen = _create_pen_getter(QColor(255, 0, 0), 1.43) <NEW_LINE> get_green_pen = _create_pen_getter(QColor(0, 255, 0), 0.67) <NEW_LINE> get_purple_pen = _create_pen_getter(QColor(255, 0, 255), 0.67) <NEW_LINE> get_gray_pen = _create_pen_getter(QColor(105, 105, 105), 1.00) <NEW_LINE> get_blue_pen = _create_pen_getter(QColor(0, 0, 255), 1.43) <NEW_LINE> get_other_pen = _create_pen_getter(QColor(0, 255, 255), 0.67) <NEW_LINE> get_text_pen = _create_pen_getter(color, 0.67) <NEW_LINE> get_pin_pen = _create_pen_getter(color, 1.43) <NEW_LINE> get_element_outline_pen = _create_pen_getter(color, 0.67) <NEW_LINE> get_manual_element_outline_pen = _create_pen_getter(QColor(0, 0, 255), 0.67) <NEW_LINE> @classmethod <NEW_LINE> def get_selection_pen(cls, width=1, update_interval=0.4): <NEW_LINE> <INDENT> pen = get_pen(Pen.selection_color, width) <NEW_LINE> pen.setStyle(Qt.CustomDashLine) <NEW_LINE> pattern = (0, 3, 0, 3, 3, 0, 3, 0) <NEW_LINE> mod = int(monotonic() / update_interval) % (len(pattern) // 2) <NEW_LINE> pen.setDashPattern(pattern[mod * 2:] + pattern[:mod * 2]) <NEW_LINE> return pen <NEW_LINE> <DEDENT> pin_point_radius = 0.250 <NEW_LINE> pin_inner_radius = 0.833 <NEW_LINE> pin_outer_radius = 1.500 | Used pens parameters. Usage example:
from gui.pen import Pen
my_pen1 = Pen.get_red_pen()
my_pen2 = Pen.get_red_pen(width=1.5) | 62598fd1ec188e330fdf8cf9 |
class ComputedEntry(MSONable): <NEW_LINE> <INDENT> def __init__(self, composition, energy, correction=0.0, parameters=None, data=None, entry_id=None, attribute=None): <NEW_LINE> <INDENT> self.uncorrected_energy = energy <NEW_LINE> self.composition = Composition(composition) <NEW_LINE> self.correction = correction <NEW_LINE> self.parameters = parameters if parameters else {} <NEW_LINE> self.data = data if data else {} <NEW_LINE> self.entry_id = entry_id <NEW_LINE> self.name = self.composition.reduced_formula <NEW_LINE> self.attribute = attribute <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_element(self): <NEW_LINE> <INDENT> return self.composition.is_element <NEW_LINE> <DEDENT> @property <NEW_LINE> def energy(self): <NEW_LINE> <INDENT> return self.uncorrected_energy + self.correction <NEW_LINE> <DEDENT> @property <NEW_LINE> def energy_per_atom(self): <NEW_LINE> <INDENT> return self.energy / self.composition.num_atoms <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> output = ["ComputedEntry {} - {}".format(self.entry_id, self.composition.formula), "Energy = {:.4f}".format(self.uncorrected_energy), "Correction = {:.4f}".format(self.correction), "Parameters:"] <NEW_LINE> for k, v in self.parameters.items(): <NEW_LINE> <INDENT> output.append("{} = {}".format(k, v)) <NEW_LINE> <DEDENT> output.append("Data:") <NEW_LINE> for k, v in self.data.items(): <NEW_LINE> <INDENT> output.append("{} = {}".format(k, v)) <NEW_LINE> <DEDENT> return "\n".join(output) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls, d): <NEW_LINE> <INDENT> dec = MontyDecoder() <NEW_LINE> return cls(d["composition"], d["energy"], d["correction"], dec.process_decoded(d.get("parameters", {})), dec.process_decoded(d.get("data", {})), entry_id=d.get("entry_id", None), attribute=d["attribute"] if "attribute" in d else None) <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> return {"@module": self.__class__.__module__, "@class": self.__class__.__name__, "energy": self.uncorrected_energy, "composition": self.composition.as_dict(), "correction": self.correction, "parameters": json.loads(json.dumps(self.parameters, cls=MontyEncoder)), "data": json.loads(json.dumps(self.data, cls=MontyEncoder)), "entry_id": self.entry_id, "attribute": self.attribute} | An lightweight ComputedEntry object containing key computed data
for many purposes. Extends a PDEntry so that it can be used for phase
diagram generation. The difference between a ComputedEntry and a standard
PDEntry is that it includes additional parameters like a correction and
run_parameters. | 62598fd17cff6e4e811b5e8c |
class Regression: <NEW_LINE> <INDENT> def __init__(self, no_of_iters, step_rate): <NEW_LINE> <INDENT> self.iters = no_of_iters <NEW_LINE> self.learning_factor = step_rate <NEW_LINE> self.training_errs = [] <NEW_LINE> <DEDENT> def set_up_weights(self, features_count): <NEW_LINE> <INDENT> threshold = 1 / pow(features_count, 0.5) <NEW_LINE> self.weights = numpy.random.uniform(-threshold, threshold, (features_count)) <NEW_LINE> <DEDENT> def fit_constants(self, x_value, y_value): <NEW_LINE> <INDENT> ndarrray = numpy.insert(x_value, 0, 1, axis=1) <NEW_LINE> self.set_up_weights(ndarrray.shape[1]) <NEW_LINE> for i in range(self.iters): <NEW_LINE> <INDENT> y_prediction = ndarrray.dot(self.weights) <NEW_LINE> ms_error = numpy.mean(0.5 * (y_value - y_prediction) ** 2 + self.regularization(self.weights)) <NEW_LINE> self.training_errs.append(ms_error) <NEW_LINE> gradient_weight = -(y_value - y_prediction).dot(ndarrray) + self.regularization.grad(self.weights) <NEW_LINE> self.weights -= self.learning_factor * gradient_weight <NEW_LINE> <DEDENT> <DEDENT> def regularization(self, factor): <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> def make_prediction(self, x_value): <NEW_LINE> <INDENT> x = numpy.insert(x_value, 0, 1, axis=1) <NEW_LINE> return x.dot(self.weights) | Models the relationship between a scalar independent and
a dependent variable | 62598fd1a05bb46b3848accf |
class Shares(Base): <NEW_LINE> <INDENT> __tablename__ = 'shares' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> number = Column(Integer()) <NEW_LINE> date_of_acquisition = Column(Date(), nullable=False) <NEW_LINE> reference_code = Column(Unicode(255), unique=True) <NEW_LINE> signature_received = Column(Boolean, default=False) <NEW_LINE> signature_received_date = Column( Date(), default=date(1970, 1, 1)) <NEW_LINE> signature_confirmed = Column(Boolean, default=False) <NEW_LINE> signature_confirmed_date = Column( Date(), default=date(1970, 1, 1)) <NEW_LINE> payment_received = Column(Boolean, default=False) <NEW_LINE> payment_received_date = Column( Date(), default=date(1970, 1, 1)) <NEW_LINE> payment_confirmed = Column(Boolean, default=False) <NEW_LINE> payment_confirmed_date = Column( Date(), default=date(1970, 1, 1)) <NEW_LINE> accountant_comment = Column(Unicode(255)) | A package of shares which a member acquires.
Each shares package consists of one to sixty shares. One member may own
several packages, e.g. from membership application, crowdfunding and
requesting the acquisition of additional shares.
Shares packages only come to existence once the business process is finished
and the transaction is completed. Information about ongoing processes cannot
here. | 62598fd1bf627c535bcb1911 |
class List(LoginRequiredMixin, StaticContextMixin, generic.ListView): <NEW_LINE> <INDENT> form_class, model = forms.Application, models.Application <NEW_LINE> template_name = 'inventory/list.html' <NEW_LINE> static_context = { 'page_title': 'Applications', } <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context['href_create'] = reverse('inventory:application:create') <NEW_LINE> return context <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> return self.request.user.setting_set.get().applications | View names of visible Applications. | 62598fd13d592f4c4edbb319 |
class ChangeSourceMixin(object): <NEW_LINE> <INDENT> changesource = None <NEW_LINE> started = False <NEW_LINE> DUMMY_CHANGESOURCE_ID = 20 <NEW_LINE> OTHER_MASTER_ID = 93 <NEW_LINE> DEFAULT_NAME = "ChangeSource" <NEW_LINE> def setUpChangeSource(self): <NEW_LINE> <INDENT> self.master = fakemaster.make_master(wantDb=True, wantData=True, testcase=self) <NEW_LINE> assert not hasattr(self.master, 'addChange') <NEW_LINE> return defer.succeed(None) <NEW_LINE> <DEDENT> def tearDownChangeSource(self): <NEW_LINE> <INDENT> if not self.started: <NEW_LINE> <INDENT> return defer.succeed(None) <NEW_LINE> <DEDENT> if self.changesource.running: <NEW_LINE> <INDENT> return defer.maybeDeferred(self.changesource.stopService) <NEW_LINE> <DEDENT> return defer.succeed(None) <NEW_LINE> <DEDENT> def attachChangeSource(self, cs): <NEW_LINE> <INDENT> self.changesource = cs <NEW_LINE> self.changesource.master = self.master <NEW_LINE> self.changesource.clock = task.Clock() <NEW_LINE> <DEDENT> def startChangeSource(self): <NEW_LINE> <INDENT> self.started = True <NEW_LINE> self.changesource.startService() <NEW_LINE> <DEDENT> def stopChangeSource(self): <NEW_LINE> <INDENT> d = self.changesource.stopService() <NEW_LINE> def mark_stopped(_): <NEW_LINE> <INDENT> self.started = False <NEW_LINE> <DEDENT> d.addCallback(mark_stopped) <NEW_LINE> return d <NEW_LINE> <DEDENT> def setChangeSourceToMaster(self, otherMaster): <NEW_LINE> <INDENT> if self.changesource is not None: <NEW_LINE> <INDENT> name = self.changesource.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name = self.DEFAULT_NAME <NEW_LINE> <DEDENT> self.master.data.updates.changesourceIds[name] = self.DUMMY_CHANGESOURCE_ID <NEW_LINE> if otherMaster: <NEW_LINE> <INDENT> self.master.data.updates.changesourceMasters[self.DUMMY_CHANGESOURCE_ID] = otherMaster <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del self.master.data.updates.changesourceMasters[self.DUMMY_CHANGESOURCE_ID] | This class is used for testing change sources, and handles a few things:
- starting and stopping a ChangeSource service
- a fake master with a data API implementation | 62598fd1cc40096d6161a409 |
class _InvalidFunctionArgException(_ExpGeneratorException): <NEW_LINE> <INDENT> pass | This exception may be raised in response to invalid or incompatible function
arguments. | 62598fd150812a4eaa620e16 |
class DecisionTreeClassifier(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def fit(self, X, t): <NEW_LINE> <INDENT> mask = np.ones(X.shape[1], dtype=bool) <NEW_LINE> self.tree = self._ID3(X, t, mask) <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, X): <NEW_LINE> <INDENT> return self._predict(self.tree, X) <NEW_LINE> <DEDENT> def _ID3(self, X, t, mask): <NEW_LINE> <INDENT> if len(np.unique(t))==1: <NEW_LINE> <INDENT> return t[0] <NEW_LINE> <DEDENT> n_feat = X.shape[1] <NEW_LINE> info_gain = np.zeros(n_feat) <NEW_LINE> for i in range(n_feat): <NEW_LINE> <INDENT> if mask[i]: <NEW_LINE> <INDENT> info_gain[i] = self._info_gain(X[:,i],t) <NEW_LINE> <DEDENT> <DEDENT> idx = np.argmax(info_gain) <NEW_LINE> features = np.unique(X[:,idx]) <NEW_LINE> tree = {} <NEW_LINE> tree[idx] = {} <NEW_LINE> mask[idx] = False <NEW_LINE> for val in features: <NEW_LINE> <INDENT> subX, subt = self._split(idx, val, X, t) <NEW_LINE> tree[idx][val] = self._ID3(subX, subt, mask) <NEW_LINE> <DEDENT> return tree <NEW_LINE> <DEDENT> def _entropy(self, target): <NEW_LINE> <INDENT> labels, counts = np.unique(target, return_counts=True) <NEW_LINE> prob = [cnt/np.sum(counts) for cnt in counts] <NEW_LINE> entropy = np.sum([-p*np.log2(p) for p in prob]) <NEW_LINE> return entropy <NEW_LINE> <DEDENT> def _info_gain(self, feature, target): <NEW_LINE> <INDENT> entropy = self._entropy(target) <NEW_LINE> value, counts = np.unique(feature, return_counts=True) <NEW_LINE> prob = [cnt/np.sum(counts) for cnt in counts] <NEW_LINE> for i in range(len(value)): <NEW_LINE> <INDENT> entropy -= prob[i]*self._entropy(target[feature==value[i]]) <NEW_LINE> <DEDENT> return entropy <NEW_LINE> <DEDENT> def _split(self, idx, val, X, t): <NEW_LINE> <INDENT> i = X[:,idx]==val <NEW_LINE> return X[i], t[i] <NEW_LINE> <DEDENT> def _predict(self, tree, X): <NEW_LINE> <INDENT> idx = list(tree.keys())[0] <NEW_LINE> branch = X[idx] <NEW_LINE> subTree = tree[idx][branch] <NEW_LINE> if isinstance(subTree, dict): <NEW_LINE> <INDENT> return self._predict(subTree, X) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return subTree | Decision Tree Classifier using ID3 | 62598fd19f28863672818aaf |
class WritingReceiver(DummyReceiver): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> dsp = 'disposable' if self.disposable else 'persistent' <NEW_LINE> self.print('%s writer running'%dsp) <NEW_LINE> last_print = 0. <NEW_LINE> frames_since_last_print = 0 <NEW_LINE> total_frames = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> meta = self.sock.recv_json() <NEW_LINE> if meta['htype'] == 'header': <NEW_LINE> <INDENT> self.print(meta) <NEW_LINE> fn = meta['filename'] <NEW_LINE> ow = meta['overwritable'] <NEW_LINE> if fn.lower() == 'none': <NEW_LINE> <INDENT> fn = '' <NEW_LINE> <DEDENT> if fn: <NEW_LINE> <INDENT> if ow==False: <NEW_LINE> <INDENT> while os.path.exists(fn): <NEW_LINE> <INDENT> fn = fn.split('.')[0] + '_.' + fn.split('.', maxsplit=1)[-1] <NEW_LINE> <DEDENT> <DEDENT> fp = h5py.File(fn, 'w') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.print('Filename empty, not saving!') <NEW_LINE> <DEDENT> <DEDENT> elif meta['htype'] == 'image': <NEW_LINE> <INDENT> frames_since_last_print += 1 <NEW_LINE> total_frames += 1 <NEW_LINE> buff = self.sock.recv() <NEW_LINE> m, n = meta['shape'][:2] <NEW_LINE> frame = np.frombuffer(buff, dtype=meta['type']).reshape((m, n)) <NEW_LINE> extra = self.sock.recv_pyobj() <NEW_LINE> extra.append(frame) <NEW_LINE> if fn: <NEW_LINE> <INDENT> if meta['frame'] == 0: <NEW_LINE> <INDENT> group = fp.create_group('entry/instrument/xspress3') <NEW_LINE> group.attrs["NX_class"]="NXdetector" <NEW_LINE> for i, item in enumerate(extra): <NEW_LINE> <INDENT> print(i,item,type(item)) <NEW_LINE> d = group.create_dataset(EXTRA[i], shape=(1,)+item.shape, maxshape=(None,)+item.shape, dtype=item.dtype, chunks=(1,)+item.shape) <NEW_LINE> d[:] = item <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i, item in enumerate(extra): <NEW_LINE> <INDENT> d = fp["entry/instrument/xspress3"][EXTRA[i]] <NEW_LINE> old = d.shape[0] <NEW_LINE> d.resize((old+1,) + d.shape[1:]) <NEW_LINE> d[old] = item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if (time.time() - last_print) > 1.: <NEW_LINE> <INDENT> self.print('WritingReceiver: got %u new frames (total %u)' %(frames_since_last_print, total_frames)) <NEW_LINE> last_print = time.time() <NEW_LINE> frames_since_last_print = 0 <NEW_LINE> <DEDENT> <DEDENT> elif meta['htype'] == 'series_end': <NEW_LINE> <INDENT> self.print('WritingReceiver: got %u new frames (total %u)' %(frames_since_last_print, total_frames)) <NEW_LINE> self.print(meta) <NEW_LINE> if fn: <NEW_LINE> <INDENT> fp.flush() <NEW_LINE> fp.close() <NEW_LINE> <DEDENT> if self.disposable: <NEW_LINE> <INDENT> self.print('disposable writer done') <NEW_LINE> return 0 | Receiver which reads from the data PUB socket and writes to hdf5. | 62598fd1d8ef3951e32c808e |
class EAWTextField(models.TextField): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> if self.max_length is not None: <NEW_LINE> <INDENT> self.validators.append(EAWMaxLengthValidator(self.max_length)) | Derived TextField that checks for its content's EAW lengh.
This adds an extra validator that counts EAW wide characters as two
instead of one. | 62598fd14527f215b58ea335 |
class WM_OT_studiolight_copy_settings(Operator): <NEW_LINE> <INDENT> bl_idname = 'wm.studiolight_copy_settings' <NEW_LINE> bl_label = "Copy Studio Light settings" <NEW_LINE> index: bpy.props.IntProperty() <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> prefs = context.preferences <NEW_LINE> system = prefs.system <NEW_LINE> for studio_light in prefs.studio_lights: <NEW_LINE> <INDENT> if studio_light.index == self.index: <NEW_LINE> <INDENT> system.light_ambient = studio_light.light_ambient <NEW_LINE> for sys_light, light in zip(system.solid_lights, studio_light.solid_lights): <NEW_LINE> <INDENT> sys_light.use = light.use <NEW_LINE> sys_light.diffuse_color = light.diffuse_color <NEW_LINE> sys_light.specular_color = light.specular_color <NEW_LINE> sys_light.smooth = light.smooth <NEW_LINE> sys_light.direction = light.direction <NEW_LINE> <DEDENT> return {'FINISHED'} <NEW_LINE> <DEDENT> <DEDENT> return {'CANCELLED'} | Copy Studio Light settings to the Studio light editor | 62598fd1956e5f7376df58b0 |
@command(user_commands) <NEW_LINE> class user_list(_init_synnefo_astakosclient, _optional_json): <NEW_LINE> <INDENT> arguments = dict( detail=FlagArgument('Detailed listing', ('-l', '--detail')) ) <NEW_LINE> @errors.generic.all <NEW_LINE> @errors.user.astakosclient <NEW_LINE> def _run(self): <NEW_LINE> <INDENT> self._print([u if self['detail'] else (dict( id=u['id'], name=u['name'])) for u in self.auth_base.list_users()]) <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> super(self.__class__, self)._run() <NEW_LINE> self._run() | List (cached) session users | 62598fd13d592f4c4edbb31b |
class EntityNotFound(MoltenError): <NEW_LINE> <INDENT> pass | Raised when an entity is not found using an `exists` check in sqlalchemy. | 62598fd1dc8b845886d53a24 |
class FetchRequest(Request): <NEW_LINE> <INDENT> api = "fetch" <NEW_LINE> parts = ( ("replica_id", Int32), ("max_wait_time", Int32), ("min_bytes", Int32), ("topics", Array.of(TopicRequest)), ) | ::
FetchRequest =>
replica_id => Int32
max_wait_time => Int32
min_bytes => Int32
topics => [TopicRequest] | 62598fd14527f215b58ea337 |
class CustomerFollowUp(models.Model): <NEW_LINE> <INDENT> customer = models.ForeignKey('Customer', on_delete=models.CASCADE) <NEW_LINE> content = models.TextField(verbose_name='Follow-up Content') <NEW_LINE> consultant = models.ForeignKey('UserProfile', on_delete=models.CASCADE) <NEW_LINE> intention_choices = ((0, 'Enroll in two weeks'), (1, 'Enroll in one month'), (2, 'No enrollment plan recently'), (3, 'Enrolled in other school'), (4, 'Already enrolled'), (5, 'Refused'),) <NEW_LINE> intention = models.SmallIntegerField(choices=intention_choices) <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "<%s : %s>" %(self.customer, self.intention) | Customer Follow-up Table | 62598fd1a05bb46b3848acd3 |
class Node: <NEW_LINE> <INDENT> def __init__(self, val: int): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.prev = None <NEW_LINE> self.next = None | Node of a linked list | 62598fd150812a4eaa620e18 |
class NetworkManager(object): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super(NetworkManager, self).__init__() <NEW_LINE> self.sockets = {} <NEW_LINE> self.wininets = {} <NEW_LINE> self.curr_fd = 4 <NEW_LINE> self.curr_handle = 0x20 <NEW_LINE> self.config = config <NEW_LINE> self.dns = {} <NEW_LINE> WininetComponent.config = config <NEW_LINE> self.dns = self.config.get('dns') <NEW_LINE> <DEDENT> def new_socket(self, family, stype, protocol, flags): <NEW_LINE> <INDENT> fd = self.curr_fd <NEW_LINE> sock = Socket(fd, family, stype, protocol, flags) <NEW_LINE> self.curr_fd += 4 <NEW_LINE> if self.config: <NEW_LINE> <INDENT> winsock = self.config.get('winsock') <NEW_LINE> if winsock: <NEW_LINE> <INDENT> responses = winsock.get('responses') <NEW_LINE> if responses: <NEW_LINE> <INDENT> sock.fill_recv_queue(responses) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.sockets.update({fd: sock}) <NEW_LINE> return sock <NEW_LINE> <DEDENT> def name_lookup(self, domain): <NEW_LINE> <INDENT> if not self.dns: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> names = self.dns.get('names') <NEW_LINE> if domain.lower() not in names.keys(): <NEW_LINE> <INDENT> return names.get('default') <NEW_LINE> <DEDENT> return names.get(domain) <NEW_LINE> <DEDENT> def get_dns_txt(self, domain): <NEW_LINE> <INDENT> def _read_txt_data(txt): <NEW_LINE> <INDENT> path = txt.get('path') <NEW_LINE> if path: <NEW_LINE> <INDENT> path = normalize_response_path(path) <NEW_LINE> with open(path, 'rb') as f: <NEW_LINE> <INDENT> return f.read() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if not self.dns: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> txts = self.dns.get('txt', []) <NEW_LINE> txt = [t for t in txts if t.get('name', '') == domain] <NEW_LINE> if txt: <NEW_LINE> <INDENT> return _read_txt_data(txt[0]) <NEW_LINE> <DEDENT> txt = [t for t in txts if t.get('name', '') == 'default'] <NEW_LINE> if txt: <NEW_LINE> <INDENT> return _read_txt_data(txt[0]) <NEW_LINE> <DEDENT> <DEDENT> def ip_lookup(self, ip): <NEW_LINE> <INDENT> for item in self.dns: <NEW_LINE> <INDENT> if item['response'] == ip: <NEW_LINE> <INDENT> return item['query'] <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def new_wininet_inst(self, user_agent, access, proxy, bypass, flags): <NEW_LINE> <INDENT> wini = WininetInstance(user_agent, access, proxy, bypass, flags) <NEW_LINE> self.wininets.update({wini.get_handle(): wini}) <NEW_LINE> return wini <NEW_LINE> <DEDENT> def get_wininet_object(self, handle): <NEW_LINE> <INDENT> for hinst, inst in self.wininets.items(): <NEW_LINE> <INDENT> if hinst == handle: <NEW_LINE> <INDENT> return inst <NEW_LINE> <DEDENT> for hsess, sess in inst.sessions.items(): <NEW_LINE> <INDENT> if hsess == handle: <NEW_LINE> <INDENT> return sess <NEW_LINE> <DEDENT> for hreq, req in sess.requests.items(): <NEW_LINE> <INDENT> if hreq == handle: <NEW_LINE> <INDENT> return req <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def close_wininet_object(self, handle): <NEW_LINE> <INDENT> if self.wininets.get(handle): <NEW_LINE> <INDENT> self.wininets.pop(handle) <NEW_LINE> <DEDENT> <DEDENT> def get_socket(self, fd): <NEW_LINE> <INDENT> return self.sockets.get(fd) <NEW_LINE> <DEDENT> def close_socket(self, fd): <NEW_LINE> <INDENT> self.sockets.pop(fd) | Class that manages network connections during emulation | 62598fd1a219f33f346c6c70 |
class Heap(object): <NEW_LINE> <INDENT> def __init__(self, is_max=False): <NEW_LINE> <INDENT> self._array = [] <NEW_LINE> self._size = 0 <NEW_LINE> self._is_max = is_max <NEW_LINE> self._left = lambda index: 2 * index + 1 <NEW_LINE> self._right = lambda index: 2 * index + 2 <NEW_LINE> self._parent = lambda index: (index - 1) // 2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> @property <NEW_LINE> def array(self): <NEW_LINE> <INDENT> return self._array <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_max(self): <NEW_LINE> <INDENT> return self._is_max <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_empty(self): <NEW_LINE> <INDENT> return self.size == 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def peek(self): <NEW_LINE> <INDENT> return None if self.size == 0 else self.array[0] <NEW_LINE> <DEDENT> def heapify(self, index): <NEW_LINE> <INDENT> left = self._left(index) <NEW_LINE> right = self._right(index) <NEW_LINE> if self.is_max: <NEW_LINE> <INDENT> largest = (left if left < self.size and self.array[left][0] > self.array[index][0] else index) <NEW_LINE> largest = (right if right < self.size and self.array[right][0] > self.array[largest][0] else largest) <NEW_LINE> if largest != index: <NEW_LINE> <INDENT> self._swap(index, largest) <NEW_LINE> self.heapify(largest) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> smallest = (left if left < self.size and self.array[left][0] < self.array[index][0] else index) <NEW_LINE> smallest = (right if right < self.size and self.array[right][0] < self.array[smallest][0] else smallest) <NEW_LINE> if smallest != index: <NEW_LINE> <INDENT> self._swap(index, smallest) <NEW_LINE> self.heapify(smallest) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def extract(self): <NEW_LINE> <INDENT> if self.is_empty: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> key, val = self.array[0] <NEW_LINE> self.array[0] = self.array[self.size - 1] <NEW_LINE> self._size -= 1 <NEW_LINE> self.heapify(0) <NEW_LINE> return key, val <NEW_LINE> <DEDENT> def insert(self, key, val=None): <NEW_LINE> <INDENT> if self.size < len(self.array): <NEW_LINE> <INDENT> self.array[self.size] = key, val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.array.append((key, val)) <NEW_LINE> <DEDENT> self._size += 1 <NEW_LINE> index = self.size - 1 <NEW_LINE> if self.is_max: <NEW_LINE> <INDENT> while index > 0 and self.array[self._parent(index)][0] < self.array[index][0]: <NEW_LINE> <INDENT> self._swap(index, self._parent(index)) <NEW_LINE> index = self._parent(index) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> while index > 0 and self.array[self._parent(index)][0] > self.array[index][0]: <NEW_LINE> <INDENT> self._swap(index, self._parent(index)) <NEW_LINE> index = self._parent(index) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _swap(self, i, j): <NEW_LINE> <INDENT> tmp = self.array[i] <NEW_LINE> self.array[i] = self.array[j] <NEW_LINE> self.array[j] = tmp | Binary heap implementation of priority queue.
| 62598fd160cbc95b063647a7 |
class _ContentTypesItem(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(_ContentTypesItem, self).__init__() <NEW_LINE> self.__defaults = None <NEW_LINE> self.__overrides = None <NEW_LINE> <DEDENT> def __getitem__(self, partname): <NEW_LINE> <INDENT> if self.__defaults is None or self.__overrides is None: <NEW_LINE> <INDENT> tmpl = "lookup _ContentTypesItem['%s'] attempted before load" <NEW_LINE> raise ValueError(tmpl % partname) <NEW_LINE> <DEDENT> if partname in self.__overrides: <NEW_LINE> <INDENT> return self.__overrides[partname] <NEW_LINE> <DEDENT> ext = os.path.splitext(partname)[1] <NEW_LINE> ext = ext[1:] if ext.startswith('.') else ext <NEW_LINE> if ext in self.__defaults: <NEW_LINE> <INDENT> return self.__defaults[ext] <NEW_LINE> <DEDENT> tmpl = "no content type for part '%s' in [Content_Types].xml" <NEW_LINE> raise LookupError(tmpl % partname) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> count = len(self.__defaults) if self.__defaults is not None else 0 <NEW_LINE> count += len(self.__overrides) if self.__overrides is not None else 0 <NEW_LINE> return count <NEW_LINE> <DEDENT> def compose(self, parts): <NEW_LINE> <INDENT> def_cts = pptx.spec.default_content_types <NEW_LINE> self.__defaults = dict((ext[1:], def_cts[ext]) for ext in ('.rels', '.xml')) <NEW_LINE> self.__overrides = {} <NEW_LINE> for part in parts: <NEW_LINE> <INDENT> ext = os.path.splitext(part.partname)[1] <NEW_LINE> if ext == '.xml': <NEW_LINE> <INDENT> self.__overrides[part.partname] = part.content_type <NEW_LINE> <DEDENT> elif ext in def_cts: <NEW_LINE> <INDENT> self.__defaults[ext[1:]] = def_cts[ext] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tmpl = "extension '%s' not found in default_content_types" <NEW_LINE> raise LookupError(tmpl % (ext)) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def element(self): <NEW_LINE> <INDENT> nsmap = {None: pptx.spec.nsmap['ct']} <NEW_LINE> element = etree.Element(qtag('ct:Types'), nsmap=nsmap) <NEW_LINE> if self.__defaults: <NEW_LINE> <INDENT> for ext in sorted(self.__defaults.keys()): <NEW_LINE> <INDENT> subelm = etree.SubElement(element, qtag('ct:Default')) <NEW_LINE> subelm.set('Extension', ext) <NEW_LINE> subelm.set('ContentType', self.__defaults[ext]) <NEW_LINE> <DEDENT> <DEDENT> if self.__overrides: <NEW_LINE> <INDENT> for partname in sorted(self.__overrides.keys()): <NEW_LINE> <INDENT> subelm = etree.SubElement(element, qtag('ct:Override')) <NEW_LINE> subelm.set('PartName', partname) <NEW_LINE> subelm.set('ContentType', self.__overrides[partname]) <NEW_LINE> <DEDENT> <DEDENT> return element <NEW_LINE> <DEDENT> def load(self, fs): <NEW_LINE> <INDENT> element = fs.getelement('/[Content_Types].xml') <NEW_LINE> defaults = element.findall(qtag('ct:Default')) <NEW_LINE> overrides = element.findall(qtag('ct:Override')) <NEW_LINE> self.__defaults = dict((d.get('Extension'), d.get('ContentType')) for d in defaults) <NEW_LINE> self.__overrides = dict((o.get('PartName'), o.get('ContentType')) for o in overrides) <NEW_LINE> return self | Lookup content type by part name using dictionary syntax, e.g.
``content_type = cti['/ppt/presentation.xml']``. | 62598fd1ab23a570cc2d4fa2 |
class Solution: <NEW_LINE> <INDENT> def findRec(self, n1, n2, s, found): <NEW_LINE> <INDENT> if s=="" and found: return True <NEW_LINE> n3 = str(n1+n2) <NEW_LINE> if s[:min(len(n3), len(s))] == n3: return self.findRec(n2, int(n3), s[len(n3):], True) <NEW_LINE> return False <NEW_LINE> <DEDENT> def isAdditiveNumber(self, nums: str) -> bool: <NEW_LINE> <INDENT> for i in range(1, len(nums)-1): <NEW_LINE> <INDENT> n1 = int(nums[:i]) <NEW_LINE> if str(n1) != nums[:i]: break <NEW_LINE> for j in range(i+1, len(nums)): <NEW_LINE> <INDENT> n2 = int(nums[i:j]) <NEW_LINE> if str(n2) != nums[i:j]: break <NEW_LINE> if self.findRec(n1, n2, nums[j:], False): return True <NEW_LINE> <DEDENT> <DEDENT> return False | The idea is to recursively solve the problem
Break the string at every possible point, and see if we can find a combination which actually holds this fibonaci property | 62598fd1956e5f7376df58b2 |
class ComplexAugmentedAssignmentMixinTests: <NEW_LINE> <INDENT> def test_generic_2282_isub_definition(self, a: ClassUnderTest, b: ClassUnderTest): <NEW_LINE> <INDENT> a_expected = a - b <NEW_LINE> a -= b <NEW_LINE> self.assertEqual(a, a_expected) <NEW_LINE> <DEDENT> def test_generic_2284_iadd_definition(self, a: ClassUnderTest, b: ClassUnderTest): <NEW_LINE> <INDENT> a_expected = a + b <NEW_LINE> a += b <NEW_LINE> self.assertEqual(a, a_expected) <NEW_LINE> <DEDENT> def test_generic_2285_imul_definition(self, a: ClassUnderTest, b: ClassUnderTest): <NEW_LINE> <INDENT> a_expected = a * b <NEW_LINE> a *= b <NEW_LINE> self.assertEqual(a, a_expected) <NEW_LINE> <DEDENT> def test_generic_2286_itruediv_definition( self, a: ClassUnderTest, b: ClassUnderTest ): <NEW_LINE> <INDENT> assume(not b == self.zero) <NEW_LINE> a_expected = a / b <NEW_LINE> a /= b <NEW_LINE> self.assertEqual(a, a_expected) | Tests of the basic arithmetic assignment operators. | 62598fd17cff6e4e811b5e92 |
class MainListView(ListView): <NEW_LINE> <INDENT> model = CompanyInformation <NEW_LINE> template_name = "mainapp/main_page.html" <NEW_LINE> context_object_name = "companies" <NEW_LINE> paginate_by = 3 <NEW_LINE> def get_ordering(self): <NEW_LINE> <INDENT> ordering = self.request.GET.get("order_by") <NEW_LINE> return ordering <NEW_LINE> <DEDENT> def get_context_data(self, *, object_list=None, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> context["order_by"] = self.request.GET.get("order_by") <NEW_LINE> return context | Главная страница
вывод всех компаний с пагинацией и сортировкой | 62598fd1377c676e912f6fad |
class BattleSide(object): <NEW_LINE> <INDENT> def __init__(self,members,state = 1,matrixType = PLAYER_PET, matrixSetting = {}, preDict = {'extVitper':1,'extStrper':1, 'extDexper':1,'extWisper':1,'extSpiper':1}): <NEW_LINE> <INDENT> self.matrixType = matrixType <NEW_LINE> self.preDict = preDict <NEW_LINE> self.members = [] <NEW_LINE> self.matrixSetting = {} <NEW_LINE> self.lord = members[0].baseInfo.id <NEW_LINE> if members[0].getCharacterType()==1 and matrixType==PLAYER_PET: <NEW_LINE> <INDENT> player = members[0] <NEW_LINE> self.matrixType = PLAYER_PET <NEW_LINE> self.members = [] <NEW_LINE> for eyeNo in range(1,10): <NEW_LINE> <INDENT> memID = player.matrix._matrixSetting.get('eyes_%d'%eyeNo) <NEW_LINE> if memID<0: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if not memID: <NEW_LINE> <INDENT> self.members.append(player) <NEW_LINE> memID = player.baseInfo.id <NEW_LINE> self.matrixSetting[memID] = eyeNo <NEW_LINE> <DEDENT> elif memID>=3000000: <NEW_LINE> <INDENT> pet = player.pet.getPet(memID) <NEW_LINE> if pet and pet.attribute.getHp()>0: <NEW_LINE> <INDENT> self.members.append(pet) <NEW_LINE> self.matrixSetting[memID] = eyeNo <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> from app.game.core.character import PlayerCharacter <NEW_LINE> pet = PlayerCharacter.PlayerCharacter(memID) <NEW_LINE> self.members.append(pet) <NEW_LINE> self.matrixSetting[memID] = eyeNo <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> elif matrixType==PLAYER_PLAYER: <NEW_LINE> <INDENT> self.members = members <NEW_LINE> self.matrixSetting = matrixSetting <NEW_LINE> if not matrixSetting: <NEW_LINE> <INDENT> self.autoPosition() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.members = members <NEW_LINE> if state and not self.matrixSetting: <NEW_LINE> <INDENT> self.autoPosition() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.matrixSetting = matrixSetting <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def autoPosition(self): <NEW_LINE> <INDENT> rule = [9,8,7,6,4,3,2,1,5] <NEW_LINE> for index in range(len(self.members)): <NEW_LINE> <INDENT> character = self.members[index] <NEW_LINE> self.matrixSetting[character.baseInfo.id] = rule[index] <NEW_LINE> <DEDENT> <DEDENT> def setMatrixPositionBatch(self,rule): <NEW_LINE> <INDENT> for index in range(len(rule)): <NEW_LINE> <INDENT> pos = rule[index] <NEW_LINE> character = self.members[index] <NEW_LINE> self.matrixSetting[character.baseInfo.id] = pos <NEW_LINE> <DEDENT> <DEDENT> def getCharacterEyeNo(self,characterId,characterType = 2): <NEW_LINE> <INDENT> eyeNo = self.matrixSetting.get(characterId) <NEW_LINE> return eyeNo <NEW_LINE> <DEDENT> def getMembers(self): <NEW_LINE> <INDENT> fighters = [] <NEW_LINE> for member in self.members: <NEW_LINE> <INDENT> data = member.getFightData() <NEW_LINE> fighters.append(data) <NEW_LINE> <DEDENT> return fighters <NEW_LINE> <DEDENT> def getLord(self): <NEW_LINE> <INDENT> return self.lord | 战斗方类 | 62598fd13346ee7daa33787c |
class Graph(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.nodes = set() <NEW_LINE> self.edges = defaultdict(list) <NEW_LINE> self.distances = {} <NEW_LINE> self.stops = {} <NEW_LINE> self.airlines = {} <NEW_LINE> <DEDENT> def add_edge(self, from_node, to_node, weight, stops, airline_code): <NEW_LINE> <INDENT> self.nodes.add(from_node) <NEW_LINE> self.nodes.add(to_node) <NEW_LINE> self.edges[from_node].append(to_node) <NEW_LINE> self.distances[from_node, to_node] = weight <NEW_LINE> self.stops[from_node, to_node] = stops <NEW_LINE> self.airlines[from_node, to_node] = airline_code | Graph class
This class represents an airport.
:param airline_code: The airline code.
:param from_node: The source airport code.
:param stops: The number of stops.
:param to_node: The destination airport code.
:param status: The status of the Vertex indicating visited or unvisited. | 62598fd1cc40096d6161a40c |
class Result(Generic[Output]): <NEW_LINE> <INDENT> def or_die(self) -> Output: <NEW_LINE> <INDENT> raise NotImplementedError() | Abstract algebraic base class for ``Success`` and ``Failure``.
The class of all values returned from Parser.parse. | 62598fd197e22403b383b373 |
class VideoBackend: <NEW_LINE> <INDENT> re_code = None <NEW_LINE> re_detect = None <NEW_LINE> pattern_url = None <NEW_LINE> pattern_thumbnail_url = None <NEW_LINE> allow_https = True <NEW_LINE> template_name = "embed_video/embed_code.html" <NEW_LINE> default_query = "" <NEW_LINE> is_secure = False <NEW_LINE> def __init__(self, url): <NEW_LINE> <INDENT> self.backend = self.__class__.__name__ <NEW_LINE> self._url = url <NEW_LINE> self.query = QueryDict(self.default_query, mutable=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def code(self): <NEW_LINE> <INDENT> return self.get_code() <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return self.get_url() <NEW_LINE> <DEDENT> @property <NEW_LINE> def protocol(self): <NEW_LINE> <INDENT> return "https" if self.allow_https and self.is_secure else "http" <NEW_LINE> <DEDENT> @property <NEW_LINE> def thumbnail(self): <NEW_LINE> <INDENT> return self.get_thumbnail_url() <NEW_LINE> <DEDENT> @property <NEW_LINE> def info(self): <NEW_LINE> <INDENT> return self.get_info() <NEW_LINE> <DEDENT> @property <NEW_LINE> def query(self): <NEW_LINE> <INDENT> return self._query <NEW_LINE> <DEDENT> @query.setter <NEW_LINE> def query(self, value): <NEW_LINE> <INDENT> self._query = ( value if isinstance(value, QueryDict) else QueryDict(value, mutable=True) ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def is_valid(cls, url): <NEW_LINE> <INDENT> return True if cls.re_detect.match(url) else False <NEW_LINE> <DEDENT> def get_code(self): <NEW_LINE> <INDENT> match = self.re_code.search(self._url) <NEW_LINE> if match: <NEW_LINE> <INDENT> return match.group("code") <NEW_LINE> <DEDENT> <DEDENT> def get_url(self): <NEW_LINE> <INDENT> url = self.pattern_url.format(code=self.code, protocol=self.protocol) <NEW_LINE> url += "?" + self.query.urlencode() if self.query else "" <NEW_LINE> return mark_safe(url) <NEW_LINE> <DEDENT> def get_thumbnail_url(self): <NEW_LINE> <INDENT> return self.pattern_thumbnail_url.format(code=self.code, protocol=self.protocol) <NEW_LINE> <DEDENT> def get_embed_code(self, width, height): <NEW_LINE> <INDENT> return render_to_string( self.template_name, {"backend": self, "width": width, "height": height} ) <NEW_LINE> <DEDENT> def get_info(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def set_options(self, options): <NEW_LINE> <INDENT> for key in options: <NEW_LINE> <INDENT> setattr(self, key, options[key]) | Base class used as parental class for backends.
Backend variables:
.. autosummary::
url
code
thumbnail
query
info
is_secure
protocol
template_name
.. code-block:: python
class MyBackend(VideoBackend):
... | 62598fd150812a4eaa620e19 |
class TwoLayerNet(object): <NEW_LINE> <INDENT> def __init__(self, input_dim=3*32*32, hidden_dims=100, num_classes=10, dropout=0, weight_scale=1e-3, reg=0.0): <NEW_LINE> <INDENT> self.params = {} <NEW_LINE> self.reg = reg <NEW_LINE> self.params['W1'] = np.random.normal(scale = weight_scale,size = (input_dim,hidden_dims)) <NEW_LINE> self.params['W2'] = np.random.normal(scale = weight_scale,size = (hidden_dims,num_classes)) <NEW_LINE> self.params['b1'] = np.zeros(hidden_dims) <NEW_LINE> self.params['b2'] = np.zeros(num_classes) <NEW_LINE> <DEDENT> def loss(self, X, y=None): <NEW_LINE> <INDENT> scores = None <NEW_LINE> scores = X <NEW_LINE> scores,cache_1 = affine_relu_forward(scores,self.params['W1'],self.params['b1']) <NEW_LINE> scores,cache_2 = affine_forward(scores,self.params['W2'],self.params['b2']) <NEW_LINE> if y is None: <NEW_LINE> <INDENT> return scores <NEW_LINE> <DEDENT> loss, grads = 0, {} <NEW_LINE> loss,grads_out = softmax_loss(scores,y) <NEW_LINE> loss += 0.5*self.reg*np.sum(self.params['W1']**2) + 0.5*self.reg*np.sum(self.params['W2']**2) <NEW_LINE> dX_2,grads['W2'],grads['b2'] = affine_backward(grads_out,cache_2) <NEW_LINE> dX_1,grads['W1'],grads['b1'] = affine_relu_backward(dX_2,cache_1) <NEW_LINE> grads['W1'] += self.reg*self.params['W1'] <NEW_LINE> grads['W2'] += self.reg*self.params['W2'] <NEW_LINE> return loss, grads | A two-layer fully-connected neural network with ReLU nonlinearity and
softmax loss that uses a modular layer design. We assume an input dimension
of D, a hidden dimension of H, and perform classification over C classes.
The architecure should be affine - relu - affine - softmax.
Note that this class does not implement gradient descent; instead, it
will interact with a separate Solver object that is responsible for running
optimization.
The learnable parameters of the model are stored in the dictionary
self.params that maps parameter names to numpy arrays. | 62598fd1adb09d7d5dc0a9e5 |
class itkShrinkImageFilterIUL2IUL2(itkImageToImageFilterAPython.itkImageToImageFilterIUL2IUL2): <NEW_LINE> <INDENT> thisown = _swig_property(lambda x: x.this.own(), lambda x, v: x.this.own(v), doc='The membership flag') <NEW_LINE> def __init__(self, *args, **kwargs): raise AttributeError("No constructor defined") <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> ImageDimension = _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_ImageDimension <NEW_LINE> OutputImageDimension = _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_OutputImageDimension <NEW_LINE> InputConvertibleToOutputCheck = _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_InputConvertibleToOutputCheck <NEW_LINE> SameDimensionCheck = _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_SameDimensionCheck <NEW_LINE> def __New_orig__(): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2___New_orig__() <NEW_LINE> <DEDENT> __New_orig__ = staticmethod(__New_orig__) <NEW_LINE> def SetShrinkFactors(self, *args): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_SetShrinkFactors(self, *args) <NEW_LINE> <DEDENT> def SetShrinkFactor(self, *args): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_SetShrinkFactor(self, *args) <NEW_LINE> <DEDENT> def GetShrinkFactors(self): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_GetShrinkFactors(self) <NEW_LINE> <DEDENT> def GenerateOutputInformation(self): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_GenerateOutputInformation(self) <NEW_LINE> <DEDENT> def GenerateInputRequestedRegion(self): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_GenerateInputRequestedRegion(self) <NEW_LINE> <DEDENT> __swig_destroy__ = _itkShrinkImageFilterPython.delete_itkShrinkImageFilterIUL2IUL2 <NEW_LINE> def cast(*args): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_cast(*args) <NEW_LINE> <DEDENT> cast = staticmethod(cast) <NEW_LINE> def GetPointer(self): <NEW_LINE> <INDENT> return _itkShrinkImageFilterPython.itkShrinkImageFilterIUL2IUL2_GetPointer(self) <NEW_LINE> <DEDENT> def New(*args, **kargs): <NEW_LINE> <INDENT> obj = itkShrinkImageFilterIUL2IUL2.__New_orig__() <NEW_LINE> import itkTemplate <NEW_LINE> itkTemplate.New(obj, *args, **kargs) <NEW_LINE> return obj <NEW_LINE> <DEDENT> New = staticmethod(New) | Proxy of C++ itkShrinkImageFilterIUL2IUL2 class | 62598fd1ad47b63b2c5a7cc6 |
class ModTest(TestCase): <NEW_LINE> <INDENT> def do_test(self, mod, o, *a): <NEW_LINE> <INDENT> args = [data.HexData(i) for i in a] <NEW_LINE> self.assertEqual(mod(*args), data.HexData(o)) | A TestCase base class for any mods. | 62598fd1dc8b845886d53a28 |
class boleto_boleto(osv.osv): <NEW_LINE> <INDENT> _name = 'boleto.boleto' <NEW_LINE> _columns = { 'name': fields.char('Name', size=20, required=True), 'carteira': fields.char('Carteira', size=10), 'instrucoes': fields.text(u'Instruções'), 'sacado': fields.many2one('res.partner', 'Sacado'), 'banco': fields.selection([('bb', 'Banco do Brasil'), ('real', 'Banco Real'), ('bradesco', 'Banco Bradesco'), ('caixa', 'Banco Caixa Federal')], 'Banco'), 'agencia_cedente': fields.char('Agencia', size=6), 'conta_cedente': fields.char('Conta', size=8), 'convenio': fields.char('Convenio', size=8), 'nosso_numero': fields.integer(u'Nosso Número'), 'cedente': fields.many2one('res.company', 'Empresa'), 'move_line_id': fields.many2one('account.move.line', 'Move Line'), 'data_vencimento': fields.date('Data do Vencimento'), 'data_documento': fields.date('Data do Documento'), 'data_processamento': fields.date('Data do Processamento'), 'valor': fields.float('Valor', digits=(12, 6)), 'numero_documento': fields.char(u'Número do Documento', size=20), 'endereco': fields.char(u'Endereço', size=20), } | Boleto | 62598fd19f28863672818ab2 |
class PromocaoInline(nested_admin.NestedTabularInline): <NEW_LINE> <INDENT> extra = 3 <NEW_LINE> model = models.Promocao | Inline para o model Promocao. | 62598fd1a219f33f346c6c72 |
class GeneralData: <NEW_LINE> <INDENT> def __init__(self, meter: int = None, number_of_rooms: int = None, tariff: float = None, consumption: int = None, communal_per_person: int = None) -> None: <NEW_LINE> <INDENT> self.meter = meter <NEW_LINE> self.number_of_rooms = number_of_rooms <NEW_LINE> self.tariff = tariff <NEW_LINE> self.consumption = consumption <NEW_LINE> self.communal_per_person = communal_per_person <NEW_LINE> <DEDENT> def calculate_consumption(self, recent_meter: int) -> None: <NEW_LINE> <INDENT> self.consumption = self.meter - recent_meter <NEW_LINE> <DEDENT> def calculate_communal_per_person(self, rooms: dict, room_n: str = ROOM_N, consumption: str = CONSUMPTION, humans: str = HUMANS) -> None: <NEW_LINE> <INDENT> persons = 0 <NEW_LINE> communal_consumption = self.consumption <NEW_LINE> for num in range(self.number_of_rooms): <NEW_LINE> <INDENT> room = rooms[room_n + str(num + 1)] <NEW_LINE> persons += room[humans] <NEW_LINE> communal_consumption -= room[consumption] <NEW_LINE> <DEDENT> self.communal_per_person = (self.tariff * communal_consumption) / persons | Класс, содержащий в качестве атрибутов общие данниые по квартире,
необходимые для рассчетов. | 62598fd1d8ef3951e32c8091 |
class CabinetInfo(object): <NEW_LINE> <INDENT> def __init__(self, filename=None, date_time=None): <NEW_LINE> <INDENT> self.filename, self.date_time = filename, date_time <NEW_LINE> self.file_size = 0 <NEW_LINE> self.external_attr = 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<CabinetInfo %s, size=%s, date=%r, attrib=%x>"%(self.filename, self.file_size, self.date_time, self.external_attr) | A simple class to encapsulate information about cabinet members | 62598fd1fbf16365ca794528 |
class TestSoledadDbSync(test_sync.TestDbSync, BaseSoledadTest): <NEW_LINE> <INDENT> scenarios = [ ('py-http', { 'make_app_with_state': make_soledad_app, 'make_database_for_test': tests.make_memory_database_for_test, }), ('py-token-http', { 'make_app_with_state': make_token_soledad_app, 'make_database_for_test': tests.make_memory_database_for_test, 'token': True }), ] <NEW_LINE> oauth = False <NEW_LINE> token = False <NEW_LINE> def do_sync(self, target_name): <NEW_LINE> <INDENT> if self.token: <NEW_LINE> <INDENT> extra = dict(creds={'token': { 'uuid': 'user-uuid', 'token': 'auth-token', }}) <NEW_LINE> target_url = self.getURL(target_name) <NEW_LINE> return Synchronizer( self.db, target.SoledadSyncTarget( target_url, crypto=self._soledad._crypto, **extra)).sync(autocreate=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return test_sync.TestDbSync.do_sync(self, target_name) <NEW_LINE> <DEDENT> <DEDENT> def test_db_sync(self): <NEW_LINE> <INDENT> doc1 = self.db.create_doc_from_json(tests.simple_doc) <NEW_LINE> doc2 = self.db2.create_doc_from_json(tests.nested_doc) <NEW_LINE> local_gen_before_sync = self.do_sync('test2.db') <NEW_LINE> gen, _, changes = self.db.whats_changed(local_gen_before_sync) <NEW_LINE> self.assertEqual(1, len(changes)) <NEW_LINE> self.assertEqual(doc2.doc_id, changes[0][0]) <NEW_LINE> self.assertEqual(1, gen - local_gen_before_sync) <NEW_LINE> self.assertGetEncryptedDoc( self.db2, doc1.doc_id, doc1.rev, tests.simple_doc, False) <NEW_LINE> self.assertGetEncryptedDoc( self.db, doc2.doc_id, doc2.rev, tests.nested_doc, False) <NEW_LINE> <DEDENT> def test_db_sync_autocreate(self): <NEW_LINE> <INDENT> doc1 = self.db.create_doc_from_json(tests.simple_doc) <NEW_LINE> local_gen_before_sync = self.do_sync('test3.db') <NEW_LINE> gen, _, changes = self.db.whats_changed(local_gen_before_sync) <NEW_LINE> self.assertEqual(0, gen - local_gen_before_sync) <NEW_LINE> db3 = self.request_state.open_database('test3.db') <NEW_LINE> gen, _, changes = db3.whats_changed() <NEW_LINE> self.assertEqual(1, len(changes)) <NEW_LINE> self.assertEqual(doc1.doc_id, changes[0][0]) <NEW_LINE> self.assertGetEncryptedDoc( db3, doc1.doc_id, doc1.rev, tests.simple_doc, False) <NEW_LINE> t_gen, _ = self.db._get_replica_gen_and_trans_id('test3.db') <NEW_LINE> s_gen, _ = db3._get_replica_gen_and_trans_id('test1') <NEW_LINE> self.assertEqual(1, t_gen) <NEW_LINE> self.assertEqual(1, s_gen) | Test db.sync remote sync shortcut | 62598fd1656771135c489adc |
class MagGrad(Gradient): <NEW_LINE> <INDENT> def preprocess(self, img): <NEW_LINE> <INDENT> gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) <NEW_LINE> sobelx = cv2.Sobel(gray, cv2.CV_64F, 1, 0, ksize=self.sobel_kernel) <NEW_LINE> sobely = cv2.Sobel(gray, cv2.CV_64F, 0, 1, ksize=self.sobel_kernel) <NEW_LINE> mag = np.sqrt(np.square(sobelx) + np.square(sobely)) <NEW_LINE> scaled_mag = np.uint8(255 * mag / np.max(mag)) <NEW_LINE> sxbinary = np.zeros_like(scaled_mag) <NEW_LINE> sxbinary[(scaled_mag >= self.sx_thresh[0]) & (scaled_mag <= self.sx_thresh[1])] = 1 <NEW_LINE> return sxbinary | Calculate gradient magnitude | 62598fd1ff9c53063f51aab8 |
class Reference(ReferenceBase): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> ReferenceBase.__init__(self, value) <NEW_LINE> self.split = value.split('/') <NEW_LINE> self.resolved = None <NEW_LINE> <DEDENT> def getPaths(self): <NEW_LINE> <INDENT> return [self.value] <NEW_LINE> <DEDENT> def resolve(self, thissetting): <NEW_LINE> <INDENT> if self.resolved: <NEW_LINE> <INDENT> return self.resolved <NEW_LINE> <DEDENT> item = thissetting.parent <NEW_LINE> parts = list(self.split) <NEW_LINE> if parts[0] == '': <NEW_LINE> <INDENT> while item.parent is not None: <NEW_LINE> <INDENT> item = item.parent <NEW_LINE> <DEDENT> parts = parts[1:] <NEW_LINE> <DEDENT> for p in parts: <NEW_LINE> <INDENT> if p == '..': <NEW_LINE> <INDENT> if item.parent is not None: <NEW_LINE> <INDENT> item = item.parent <NEW_LINE> <DEDENT> <DEDENT> elif p == '': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if item.isWidget(): <NEW_LINE> <INDENT> child = item.getChild(p) <NEW_LINE> if not child: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = item.settings.get(p) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise self.ResolveException() <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> item = child <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = item.get(p) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise self.ResolveException() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if len(self.split) > 2 and self.split[1] == 'StyleSheet': <NEW_LINE> <INDENT> self.resolved = item <NEW_LINE> <DEDENT> return item <NEW_LINE> <DEDENT> def setOnModified(self, setn, fn): <NEW_LINE> <INDENT> resolved = self.resolve(setn) <NEW_LINE> resolved.setOnModified(fn) | A value a setting can have to point to another setting.
Formats of a reference are like
/foo/bar/setting or
../Line/width
alternatively style sheets can be used with the format, e.g.
/StyleSheet/linewidth | 62598fd150812a4eaa620e1a |
class ControllerManager: <NEW_LINE> <INDENT> def __init__(self, hass, controller): <NEW_LINE> <INDENT> self._hass = hass <NEW_LINE> self._device_registry = None <NEW_LINE> self._entity_registry = None <NEW_LINE> self.controller = controller <NEW_LINE> self._signals = [] <NEW_LINE> <DEDENT> async def connect_listeners(self): <NEW_LINE> <INDENT> self._device_registry, self._entity_registry = await asyncio.gather( self._hass.helpers.device_registry.async_get_registry(), self._hass.helpers.entity_registry.async_get_registry(), ) <NEW_LINE> self._signals.append( self.controller.dispatcher.connect( heos_const.SIGNAL_CONTROLLER_EVENT, self._controller_event ) ) <NEW_LINE> self._signals.append( self.controller.dispatcher.connect( heos_const.SIGNAL_HEOS_EVENT, self._heos_event ) ) <NEW_LINE> <DEDENT> async def disconnect(self): <NEW_LINE> <INDENT> for signal_remove in self._signals: <NEW_LINE> <INDENT> signal_remove() <NEW_LINE> <DEDENT> self._signals.clear() <NEW_LINE> self.controller.dispatcher.disconnect_all() <NEW_LINE> await self.controller.disconnect() <NEW_LINE> <DEDENT> async def _controller_event(self, event, data): <NEW_LINE> <INDENT> if event == heos_const.EVENT_PLAYERS_CHANGED: <NEW_LINE> <INDENT> self.update_ids(data[heos_const.DATA_MAPPED_IDS]) <NEW_LINE> <DEDENT> self._hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_HEOS_UPDATED) <NEW_LINE> <DEDENT> async def _heos_event(self, event): <NEW_LINE> <INDENT> if event == heos_const.EVENT_CONNECTED: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = await self.controller.load_players() <NEW_LINE> self.update_ids(data[heos_const.DATA_MAPPED_IDS]) <NEW_LINE> <DEDENT> except HeosError as ex: <NEW_LINE> <INDENT> _LOGGER.error("Unable to refresh players: %s", ex) <NEW_LINE> <DEDENT> <DEDENT> self._hass.helpers.dispatcher.async_dispatcher_send(SIGNAL_HEOS_UPDATED) <NEW_LINE> <DEDENT> def update_ids(self, mapped_ids: Dict[int, int]): <NEW_LINE> <INDENT> for new_id, old_id in mapped_ids.items(): <NEW_LINE> <INDENT> entry = self._device_registry.async_get_device({(DOMAIN, old_id)}, set()) <NEW_LINE> new_identifiers = {(DOMAIN, new_id)} <NEW_LINE> if entry: <NEW_LINE> <INDENT> self._device_registry.async_update_device( entry.id, new_identifiers=new_identifiers ) <NEW_LINE> _LOGGER.debug( "Updated device %s identifiers to %s", entry.id, new_identifiers ) <NEW_LINE> <DEDENT> entity_id = self._entity_registry.async_get_entity_id( MEDIA_PLAYER_DOMAIN, DOMAIN, str(old_id) ) <NEW_LINE> if entity_id: <NEW_LINE> <INDENT> self._entity_registry.async_update_entity( entity_id, new_unique_id=str(new_id) ) <NEW_LINE> _LOGGER.debug("Updated entity %s unique id to %s", entity_id, new_id) | Class that manages events of the controller. | 62598fd17b180e01f3e49286 |
@pytest.mark.usefixtures('mocker') <NEW_LINE> class TestWriterManager: <NEW_LINE> <INDENT> @pytest.mark.parametrize('file_name,expected_class', [ ('test.csv', CSVWriter), ('test.xml', XMLWriter), ('test.csvvv', XMLWriter), ]) <NEW_LINE> def test_select_csv_writer(self, file_name, expected_class): <NEW_LINE> <INDENT> manager = WriterManager(file_name) <NEW_LINE> assert isinstance(manager.writer, expected_class) <NEW_LINE> <DEDENT> def test_skip_writing_when_no_data(self, mocker): <NEW_LINE> <INDENT> save_mck = mocker.patch('writer.CSVWriter.save') <NEW_LINE> manager = WriterManager('test.csv') <NEW_LINE> manager.export_data([]) <NEW_LINE> assert not save_mck.called <NEW_LINE> <DEDENT> @pytest.mark.parametrize('data', [ [1, 2, 3], (1, 2, 3), {1, 2, 3}, ]) <NEW_LINE> def test_writing_with_data(self, mocker, data): <NEW_LINE> <INDENT> save_mck = mocker.patch('writer.CSVWriter.save') <NEW_LINE> manager = WriterManager('test.csv') <NEW_LINE> manager.export_data(data) <NEW_LINE> assert save_mck.called <NEW_LINE> assert save_mck.call_args_list[0][0][0] == data <NEW_LINE> <DEDENT> def test_skip_writing_with_string(self, mocker): <NEW_LINE> <INDENT> save_mck = mocker.patch('writer.CSVWriter.save') <NEW_LINE> manager = WriterManager('test.csv') <NEW_LINE> with pytest.raises(AttributeError): <NEW_LINE> <INDENT> manager.export_data('1, 2, 3') <NEW_LINE> <DEDENT> assert not save_mck.called | Tests class for Writer Manager. | 62598fd1656771135c489ade |
class Operation(object): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> if len(args) < 1: <NEW_LINE> <INDENT> self.terms = [0]*2 <NEW_LINE> <DEDENT> elif type(args[0]) is list: <NEW_LINE> <INDENT> self.terms = args[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.terms = args <NEW_LINE> <DEDENT> <DEDENT> def operate(self): <NEW_LINE> <INDENT> raise NotImplementedError() | Abstract base operation class | 62598fd1377c676e912f6faf |
class InvoiceDetailViewTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> user = User.objects.create(username="nerd", is_staff=True) <NEW_LINE> self.client = APIClient() <NEW_LINE> self.client.force_authenticate(user=user) <NEW_LINE> self.invoice_data = { "first_name": "Test name", "last_name": "Test Last Name", "street": "Street name, 33", "city": "City name", "country": "ES", "state": "State name", "zipcode": "ES46000", } <NEW_LINE> <DEDENT> def test_can_retrieve_invoice_data(self): <NEW_LINE> <INDENT> invoice = mixer.blend(Invoice) <NEW_LINE> self.response = self.client.get( reverse("invoice-detail", kwargs={"pk": invoice.pk}), format="json" ) <NEW_LINE> self.assertEqual(self.response.status_code, status.HTTP_200_OK) <NEW_LINE> <DEDENT> def test_can_delete_invoice(self): <NEW_LINE> <INDENT> invoice = mixer.blend(Invoice) <NEW_LINE> old_count = Invoice.objects.count() <NEW_LINE> self.response = self.client.delete( reverse("invoice-detail", kwargs={"pk": invoice.pk}), format="json" ) <NEW_LINE> new_count = Invoice.objects.count() <NEW_LINE> self.assertEqual(new_count, old_count - 1) <NEW_LINE> self.assertEqual(self.response.status_code, status.HTTP_204_NO_CONTENT) <NEW_LINE> <DEDENT> def test_can_update_invoice(self): <NEW_LINE> <INDENT> invoice = mixer.blend(Invoice) <NEW_LINE> self.response = self.client.put( reverse("invoice-detail", kwargs={"pk": invoice.pk}), self.invoice_data, format="json", ) <NEW_LINE> self.assertEqual(self.response.status_code, status.HTTP_200_OK) | Test suite for invoice detail view | 62598fd15fdd1c0f98e5e3fa |
class Node: <NEW_LINE> <INDENT> def __init__(self, val, l = None, r = None): <NEW_LINE> <INDENT> self.l_child = None <NEW_LINE> self.r_child = None <NEW_LINE> self.data = val | Constructor for Node class with attributes:
l_child = None
r_child = None
data = val | 62598fd13d592f4c4edbb323 |
class _DawgNode: <NEW_LINE> <INDENT> _nextid = 1 <NEW_LINE> @staticmethod <NEW_LINE> def stringify_edges(edges, arr): <NEW_LINE> <INDENT> for prefix, node in edges.items(): <NEW_LINE> <INDENT> arr.append(prefix + u':' + (u'0' if node is None else str(node.id))) <NEW_LINE> <DEDENT> return "_".join(arr) <NEW_LINE> <DEDENT> def __init__(self): <NEW_LINE> <INDENT> self.id = _DawgNode._nextid <NEW_LINE> _DawgNode._nextid += 1 <NEW_LINE> self.edges = dict() <NEW_LINE> self.final = False <NEW_LINE> self._strng = None <NEW_LINE> self._hash = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if not self._strng: <NEW_LINE> <INDENT> arr = [] <NEW_LINE> if self.final: <NEW_LINE> <INDENT> arr.append("|") <NEW_LINE> <DEDENT> self._strng = _DawgNode.stringify_edges(self.edges, arr) <NEW_LINE> <DEDENT> return self._strng <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> if not self._hash: <NEW_LINE> <INDENT> self._hash = self.__str__().__hash__() <NEW_LINE> <DEDENT> return self._hash <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__str__() == other.__str__() <NEW_LINE> <DEDENT> def reset_id(self, newid): <NEW_LINE> <INDENT> self.id = newid <NEW_LINE> self._strng = None <NEW_LINE> self._hash = None | A _DawgNode is a node in a Directed Acyclic Word Graph (DAWG).
It contains:
* a node identifier (a simple unique sequence number);
* a dictionary of edges (children) where each entry has a prefix
(following letter(s)) together with its child _DawgNode;
* and a Bool (final) indicating whether this node in the graph
also marks the end of a legal word.
A _DawgNode has a string representation which can be hashed to
determine whether it is identical to a previously encountered node,
i.e. whether it has the same final flag and the same edges with
prefixes leading to the same child nodes. This assumes
that the child nodes have already been subjected to the same
test, i.e. whether they are identical to previously encountered
nodes and, in that case, modified to point to the previous, identical
subgraph. Each graph layer can thus depend on the (shallow) comparisons
made in previous layers and deep comparisons are not necessary. This
is an important optimization when building the graph. | 62598fd14a966d76dd5ef346 |
class Replacement(Fitness): <NEW_LINE> <INDENT> def __init__(self, fitness_list): <NEW_LINE> <INDENT> Fitness.__init__(self, fitness_list) <NEW_LINE> self._replacement_count = 0 | This is the base class for the classes that identify which members are to
be replaced. It is basically the same as a fitness class, but attempts to
identify the worst, not the best. | 62598fd1283ffb24f3cf3cf4 |
class Dom0BridgePIFParamTestClass2(Dom0BridgePIFParamTestClass1): <NEW_LINE> <INDENT> caps = [] <NEW_LINE> required = False <NEW_LINE> OFFLOAD_CONFIG = {'sg': 'on', 'tso': 'on', 'gso': 'on', 'gro': 'off', 'lro': 'off', 'rxvlan': 'on', 'txvlan': 'on'} | A class for Dom0 - VM PIF param testing | 62598fd150812a4eaa620e1b |
class Product(VariadicExpression): <NEW_LINE> <INDENT> return_types = frozenset(["int", "long", "float", "complex"]) | Arithmetic * (variadic). | 62598fd1adb09d7d5dc0a9e9 |
class HcronTreeCache: <NEW_LINE> <INDENT> def __init__(self, username, ignore_match_fn=None): <NEW_LINE> <INDENT> def false_match(*args): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> self.username = username <NEW_LINE> self.ignore_match_fn = ignore_match_fn or false_match <NEW_LINE> self.path = get_hcron_tree_filename(username, HOST_NAME) <NEW_LINE> self.ignored = {} <NEW_LINE> self.cache = {} <NEW_LINE> self.load() <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> f = tarfile.open(self.path) <NEW_LINE> ignored = {} <NEW_LINE> link_cache = {} <NEW_LINE> cache = {} <NEW_LINE> for m in f.getmembers(): <NEW_LINE> <INDENT> if m.name.startswith("events/"): <NEW_LINE> <INDENT> name = os.path.normpath(m.name) <NEW_LINE> basename = os.path.basename(name) <NEW_LINE> dirname = os.path.dirname(name) <NEW_LINE> if self.ignore_match_fn(basename) or dirname in ignored: <NEW_LINE> <INDENT> ignored[name] = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if m.issym(): <NEW_LINE> <INDENT> link_cache[m.name] = self.resolve_symlink(m.name, m.linkname) <NEW_LINE> <DEDENT> elif m.isfile(): <NEW_LINE> <INDENT> cache[m.name] = f.extractfile(m).read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cache[m.name] = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> f.close() <NEW_LINE> for name, linkname in link_cache.items(): <NEW_LINE> <INDENT> for _ in xrange(10): <NEW_LINE> <INDENT> if linkname in cache: <NEW_LINE> <INDENT> cache[name] = cache[linkname] <NEW_LINE> break <NEW_LINE> <DEDENT> elif linkname in link_cache: <NEW_LINE> <INDENT> linkname = resolve_symlink(linkname, link_cache[linkname]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for name in cache.keys(): <NEW_LINE> <INDENT> if cache[name] == None: <NEW_LINE> <INDENT> del cache[name] <NEW_LINE> <DEDENT> <DEDENT> self.cache = cache <NEW_LINE> self.ignored = ignored <NEW_LINE> <DEDENT> def resolve_symlink(self, name, linkname): <NEW_LINE> <INDENT> if linkname.startswith("/"): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return os.path.normpath(os.path.dirname(name)+"/"+linkname) <NEW_LINE> <DEDENT> <DEDENT> def get_event_contents(self, name): <NEW_LINE> <INDENT> if name.startswith("/"): <NEW_LINE> <INDENT> return self.get_contents(os.path.normpath("events/"+name)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_include_contents(self, name): <NEW_LINE> <INDENT> if name.startswith("/"): <NEW_LINE> <INDENT> st = self.get_contents(os.path.normpath("includes/"+name)) <NEW_LINE> if st == None: <NEW_LINE> <INDENT> st = self.get_contents(os.path.normpath("events/"+name)) <NEW_LINE> <DEDENT> return st <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def is_ignored_event(self, name): <NEW_LINE> <INDENT> return os.path.normpath("events/"+name) in self.ignored <NEW_LINE> <DEDENT> def get_event_names(self): <NEW_LINE> <INDENT> names = [] <NEW_LINE> for name in self.cache.keys(): <NEW_LINE> <INDENT> if name.startswith("events/"): <NEW_LINE> <INDENT> names.append(name[6:]) <NEW_LINE> <DEDENT> <DEDENT> return names <NEW_LINE> <DEDENT> def get_names(self): <NEW_LINE> <INDENT> return self.cache.keys() <NEW_LINE> <DEDENT> def get_contents(self, tree_path): <NEW_LINE> <INDENT> return self.cache.get(tree_path) | Interface to packaged hcron tree file containing members as:
events/... | 62598fd17b180e01f3e49287 |
class WorkflowListResponseSchema(BaseSchema): <NEW_LINE> <INDENT> id = fields.Integer(required=True) <NEW_LINE> name = fields.String(required=True) <NEW_LINE> description = fields.String(required=False, allow_none=True) <NEW_LINE> enabled = fields.Boolean( required=False, allow_none=True, missing=True, default=True) <NEW_LINE> created = fields.DateTime( required=False, allow_none=True, missing=datetime.datetime.utcnow, default=datetime.datetime.utcnow) <NEW_LINE> updated = fields.DateTime( required=False, allow_none=True, missing=datetime.datetime.utcnow, default=datetime.datetime.utcnow) <NEW_LINE> version = fields.Integer(required=True) <NEW_LINE> image = fields.String(required=False, allow_none=True) <NEW_LINE> is_template = fields.Boolean( required=False, allow_none=True, missing=False, default=False) <NEW_LINE> is_system_template = fields.Boolean( required=False, allow_none=True, missing=False, default=False) <NEW_LINE> is_public = fields.Boolean( required=False, allow_none=True, missing=False, default=False) <NEW_LINE> deployment_enabled = fields.Boolean( required=False, allow_none=True, missing=False, default=False) <NEW_LINE> publishing_enabled = fields.Boolean( required=False, allow_none=True, missing=False, default=False) <NEW_LINE> publishing_status = fields.String(required=False, allow_none=True, validate=[OneOf(list(PublishingStatus.__dict__.keys()))]) <NEW_LINE> type = fields.String(required=False, allow_none=True, missing=WorkflowType.WORKFLOW, default=WorkflowType.WORKFLOW, validate=[OneOf(list(WorkflowType.__dict__.keys()))]) <NEW_LINE> preferred_cluster_id = fields.Integer(required=False, allow_none=True) <NEW_LINE> tasks = fields.Nested( 'tahiti.schema.TaskListResponseSchema', allow_none=True, many=True) <NEW_LINE> flows = fields.Nested( 'tahiti.schema.FlowListResponseSchema', allow_none=True, many=True) <NEW_LINE> variables = fields.Nested( 'tahiti.schema.WorkflowVariableListResponseSchema', allow_none=True, many=True) <NEW_LINE> platform = fields.Nested( 'tahiti.schema.PlatformListResponseSchema', required=True) <NEW_LINE> subset = fields.Nested( 'tahiti.schema.OperationSubsetListResponseSchema', allow_none=True) <NEW_LINE> user = fields.Function( lambda x: { "id": x.user_id, "name": x.user_name, "login": x.user_login}) <NEW_LINE> @post_load <NEW_LINE> def make_object(self, data, **kwargs): <NEW_LINE> <INDENT> return Workflow(**data) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordered = True <NEW_LINE> unknown = EXCLUDE | JSON serialization schema | 62598fd1fbf16365ca79452c |
class _RebarGroup: <NEW_LINE> <INDENT> def __init__(self, obj_name): <NEW_LINE> <INDENT> self.Type = "RebarGroup" <NEW_LINE> self.rebar_group = FreeCAD.ActiveDocument.addObject( "App::DocumentObjectGroupPython", obj_name ) <NEW_LINE> self.ties_group = self.rebar_group.newObject( "App::DocumentObjectGroupPython", "Ties" ) <NEW_LINE> self.main_rebars_group = self.rebar_group.newObject( "App::DocumentObjectGroupPython", "MainRebars" ) <NEW_LINE> properties = [ ("App::PropertyLinkList", "RebarGroups", "List of rebar groups", 1) ] <NEW_LINE> setGroupProperties(properties, self.rebar_group) <NEW_LINE> self.rebar_group.RebarGroups = [self.ties_group, self.main_rebars_group] <NEW_LINE> self.Object = self.rebar_group <NEW_LINE> <DEDENT> def execute(self, obj): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def addTies(self, ties_list): <NEW_LINE> <INDENT> if isinstance(ties_list, list): <NEW_LINE> <INDENT> self.ties_group.addObjects(ties_list) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ties_group.addObject(ties_list) <NEW_LINE> ties_list = [ties_list] <NEW_LINE> <DEDENT> prev_ties_list = self.ties_group.Ties <NEW_LINE> prev_ties_list.extend(ties_list) <NEW_LINE> self.ties_group.Ties = prev_ties_list <NEW_LINE> <DEDENT> def addMainRebars(self, main_rebars_list): <NEW_LINE> <INDENT> self.main_rebars_group.addObjects(main_rebars_list) <NEW_LINE> prev_main_rebars_list = self.main_rebars_group.MainRebars <NEW_LINE> main_rebars_list.extend(prev_main_rebars_list) <NEW_LINE> self.main_rebars_group.MainRebars = main_rebars_list | A Rebar Group object. | 62598fd1956e5f7376df58b5 |
class RecordNotFoundError(Error): <NEW_LINE> <INDENT> pass | %s with uid=%d not found | 62598fd17cff6e4e811b5e98 |
class LocationFilterIDL(object): <NEW_LINE> <INDENT> thrift_spec = (None, (1, TType.I32, 'subtype', None, None)) <NEW_LINE> def __init__(self, subtype = None): <NEW_LINE> <INDENT> self.subtype = subtype <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid,) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.I32: <NEW_LINE> <INDENT> self.subtype = iprot.readI32() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('LocationFilterIDL') <NEW_LINE> if self.subtype != None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('subtype', TType.I32, 1) <NEW_LINE> oprot.writeI32(self.subtype) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = [ '%s=%r' % (key, value) for (key, value,) in self.__dict__.iteritems() ] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | Attributes:
- subtype | 62598fd10fa83653e46f5358 |
class RouterWebServiceReverseWeb(RouterWebService): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create(transport, path, config): <NEW_LINE> <INDENT> personality = transport.worker.personality <NEW_LINE> personality.WEB_SERVICE_CHECKERS['reverseproxy'](personality, config) <NEW_LINE> host = config['host'] <NEW_LINE> port = int(config.get('port', 80)) <NEW_LINE> base_path = config.get('path', '').encode('utf-8') <NEW_LINE> forwarded_port = int(config.get('forwarded_port', 80)) <NEW_LINE> forwarded_proto = config.get('forwarded_proto', 'http').encode('ascii') <NEW_LINE> resource = ExtReverseProxyResource(host, port, base_path, forwarded_port=forwarded_port, forwarded_proto=forwarded_proto) <NEW_LINE> if path == '/': <NEW_LINE> <INDENT> resource = RootResource(resource, {}) <NEW_LINE> <DEDENT> return RouterWebServiceReverseWeb(transport, base_path, config, resource) | Reverse Web proxy service. | 62598fd1dc8b845886d53a2e |
class Tcp(UpdateMonitorMixin, Resource): <NEW_LINE> <INDENT> def __init__(self, tcps): <NEW_LINE> <INDENT> super(Tcp, self).__init__(tcps) <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:ltm:monitor:tcp:tcpstate' | BIG-IP® Tcp monitor resource. | 62598fd13617ad0b5ee065b9 |
class BrotabClient: <NEW_LINE> <INDENT> clients = {} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @cached(ttl=15) <NEW_LINE> def is_installed(self): <NEW_LINE> <INDENT> result = subprocess.run(['which', 'brotab']) <NEW_LINE> if result.returncode == 0: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def index_clients(self): <NEW_LINE> <INDENT> self.clients = {} <NEW_LINE> clients = return_clients() <NEW_LINE> for client in clients: <NEW_LINE> <INDENT> self.clients[client.__dict__["_prefix"].replace(".", "")] = client.__dict__["_browser"] <NEW_LINE> <DEDENT> <DEDENT> @cached(ttl=5) <NEW_LINE> def fetch_tabs(self): <NEW_LINE> <INDENT> logger.info("Fetching tabs") <NEW_LINE> self.index_clients() <NEW_LINE> loop = new_event_loop() <NEW_LINE> set_event_loop(loop) <NEW_LINE> tabs_listed = return_tabs() <NEW_LINE> tabs = [] <NEW_LINE> for tab in tabs_listed: <NEW_LINE> <INDENT> tab = tab.split("\t") <NEW_LINE> tabs.append({ "prefix": tab[0], "name": tab[1], "url": tab[2], "icon": self.get_browser_icon_from_prefix(tab[0][:1]) }) <NEW_LINE> <DEDENT> return tabs <NEW_LINE> <DEDENT> def search_tabs(self, filter_term=None): <NEW_LINE> <INDENT> allTabs = self.fetch_tabs() <NEW_LINE> if not filter_term: <NEW_LINE> <INDENT> return allTabs <NEW_LINE> <DEDENT> tabs = [] <NEW_LINE> for tab in allTabs: <NEW_LINE> <INDENT> if filter_term.lower() in tab["name"].lower() or filter_term.lower() in tab["url"].lower(): <NEW_LINE> <INDENT> tabs.append(tab) <NEW_LINE> <DEDENT> <DEDENT> return tabs <NEW_LINE> <DEDENT> def activate_tab(self, prefix): <NEW_LINE> <INDENT> activate_tab(prefix) <NEW_LINE> <DEDENT> def close_tab(self, prefix): <NEW_LINE> <INDENT> close_tab(prefix) <NEW_LINE> <DEDENT> def get_browser_icon_from_prefix(self, prefix): <NEW_LINE> <INDENT> if prefix not in self.clients.keys(): <NEW_LINE> <INDENT> return 'icon.png' <NEW_LINE> <DEDENT> client = self.clients.get(prefix) <NEW_LINE> if "chrome" in client: <NEW_LINE> <INDENT> return 'icon-chrome.png' <NEW_LINE> <DEDENT> if "firefox" in client: <NEW_LINE> <INDENT> return "icon-firefox.png" <NEW_LINE> <DEDENT> if "brave" in client: <NEW_LINE> <INDENT> return "icon-brave.png" <NEW_LINE> <DEDENT> return "icon.png" | Client to interact with Brotab Command line tool | 62598fd1fbf16365ca79452e |
class EventViewTabs(renderers.TabLayout): <NEW_LINE> <INDENT> event_queue = "event_select" <NEW_LINE> names = ["Event", "Subject"] <NEW_LINE> delegated_renderers = ["EventView", "EventSubjectView"] <NEW_LINE> def Layout(self, request, response): <NEW_LINE> <INDENT> self.state["container"] = request.REQ.get("container") <NEW_LINE> self.state["event"] = request.REQ.get("event") <NEW_LINE> response = super(EventViewTabs, self).Layout(request, response) <NEW_LINE> return self.CallJavascript(response, "EventViewTabs.Layout", event_queue=self.event_queue, container=self.state["container"], renderer=self.__class__.__name__) | Show tabs to allow inspection of the event.
Listening Javascript Events:
- event_select(event_id): Indicates the user has selected this event in the
table, we re-render ourselves with the new event_id.
Post Parameters:
- container: The container name for the timeline.
- event: The event id within the timeseries container to render. | 62598fd155399d3f0562698c |
class MENUITEMINFOW(common.MayhemStructure): <NEW_LINE> <INDENT> _fields_ = [ ('cbSize', ctypes.c_uint), ('fMask', ctypes.c_uint), ('fType', ctypes.c_uint), ('fState', ctypes.c_uint), ('wID', ctypes.c_uint), ('hSubMenu', HANDLE), ('hbmpChecked', HANDLE), ('hbmpUnchecked', HANDLE), ('dwItemData', ctypes.POINTER(ctypes.c_ulong)), ('dwTypeData', ctypes.c_wchar_p), ('cch', ctypes.c_uint), ('hbmpItem', HANDLE), ] | see:
https://msdn.microsoft.com/en-us/library/windows/desktop/ms647578(v=vs.85).aspx | 62598fd1656771135c489ae2 |
class ActionLog(BASE): <NEW_LINE> <INDENT> __tablename__ = 'action_log' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> timestamp = Column(Interval, index=True) <NEW_LINE> match_id = Column(Integer, ForeignKey('matches.id', ondelete='cascade'), index=True) <NEW_LINE> match = relationship('Match', foreign_keys=[match_id]) <NEW_LINE> player_number = Column(Integer, index=True) <NEW_LINE> action_id = Column(Integer, ForeignKey('actions.id'), index=True) <NEW_LINE> action = relationship('Action', foreign_keys=[action_id], viewonly=True) <NEW_LINE> action_x = Column(Float) <NEW_LINE> action_y = Column(Float) | Action log. | 62598fd1956e5f7376df58b6 |
class Authentication(object): <NEW_LINE> <INDENT> def __init__(self, headers): <NEW_LINE> <INDENT> self.username = None <NEW_LINE> self.password = None <NEW_LINE> auth_schemes = {b"Basic": self.decode_basic} <NEW_LINE> if "authorization" in headers: <NEW_LINE> <INDENT> header = headers.get("authorization") <NEW_LINE> assert isinstance(header, binary_type) <NEW_LINE> auth_type, data = header.split(b" ", 1) <NEW_LINE> if auth_type in auth_schemes: <NEW_LINE> <INDENT> self.username, self.password = auth_schemes[auth_type](data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise HTTPException(400, "Unsupported authentication scheme %s" % auth_type) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def decode_basic(self, data): <NEW_LINE> <INDENT> assert isinstance(data, binary_type) <NEW_LINE> decoded_data = base64.b64decode(data) <NEW_LINE> return decoded_data.split(b":", 1) | Object for dealing with HTTP Authentication
.. attribute:: username
The username supplied in the HTTP Authorization
header, or None
.. attribute:: password
The password supplied in the HTTP Authorization
header, or None
Both attributes are binary strings (`str` in Py2, `bytes` in Py3), since
RFC7617 Section 2.1 does not specify the encoding for username & password
(as long it's compatible with ASCII). UTF-8 should be a relatively safe
choice if callers need to decode them as most browsers use it. | 62598fd1a05bb46b3848acdd |
class Subject(): <NEW_LINE> <INDENT> def __init__(self, code, name, weight, grade): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.name = name <NEW_LINE> self.weight = weight <NEW_LINE> self.grade = grade <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.code == other.code and self.name == other.name and self.weight == other.weight and self.grade == other.grade | Store code, name, weight and grade for subject | 62598fd1bf627c535bcb191f |
class Organization(Location): <NEW_LINE> <INDENT> __tablename__ = _TN <NEW_LINE> __mapper_args__ = {'polymorphic_identity': _TN} <NEW_LINE> valid_parents = [] <NEW_LINE> id = Column(ForeignKey(Location.id, ondelete='CASCADE'), primary_key=True) <NEW_LINE> __table_args__ = ({'info': {'unique_fields': ['name']}},) | Organization is a subtype of location | 62598fd1099cdd3c63675618 |
class BaseLayout(object): <NEW_LINE> <INDENT> layout = None <NEW_LINE> @classmethod <NEW_LINE> def create(cls, source_low, source_up, dest_low, dest_up): <NEW_LINE> <INDENT> source_dict = {} <NEW_LINE> source = source_low + source_up <NEW_LINE> dest_dict = {} <NEW_LINE> dest = dest_low + dest_up <NEW_LINE> for index, char in enumerate(source): <NEW_LINE> <INDENT> source_dict[char] = dest[index] <NEW_LINE> <DEDENT> for index, char in enumerate(dest): <NEW_LINE> <INDENT> dest_dict[char] = source[index] <NEW_LINE> <DEDENT> return { 'source_dict': source_dict, 'source_set': set(source), 'dest_dict': dest_dict, 'dest_set': set(dest), } <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def convert_term(cls, term): <NEW_LINE> <INDENT> if not cls.layout: <NEW_LINE> <INDENT> return term <NEW_LINE> <DEDENT> layout_dict = cls.detect_layout_dict(term) <NEW_LINE> return reduce(lambda a, b: a + layout_dict.get(b, b), term, '') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def detect_layout_dict(cls, term): <NEW_LINE> <INDENT> source_len = len(set(term) & cls.layout['source_set']) <NEW_LINE> dest_len = len(set(term) & cls.layout['dest_set']) <NEW_LINE> name = 'source_dict' if source_len > dest_len else 'dest_dict' <NEW_LINE> return cls.layout[name] | Base class for keyboard layout. | 62598fd1283ffb24f3cf3cf6 |
class GomaOAuth2Config(dict): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> dict.__init__(self) <NEW_LINE> self._path = self._GetLocation() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _GetLocation(): <NEW_LINE> <INDENT> env_name = 'GOMA_OAUTH2_CONFIG_FILE' <NEW_LINE> env = os.environ.get(env_name) <NEW_LINE> if env: <NEW_LINE> <INDENT> return env <NEW_LINE> <DEDENT> homedir = os.path.expanduser('~') <NEW_LINE> if homedir == '~': <NEW_LINE> <INDENT> raise Error('Cannot find user\'s home directory.') <NEW_LINE> <DEDENT> return os.path.join(homedir, DEFAULT_GOMA_OAUTH2_CONFIG_FILE_NAME) <NEW_LINE> <DEDENT> def Load(self): <NEW_LINE> <INDENT> if not os.path.exists(self._path): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with open(self._path) as f: <NEW_LINE> <INDENT> self.update(json.load(f)) <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not self.get('refresh_token'): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True <NEW_LINE> <DEDENT> def Save(self): <NEW_LINE> <INDENT> with open(self._path, 'wb') as f: <NEW_LINE> <INDENT> if os.name == 'posix': <NEW_LINE> <INDENT> os.fchmod(f.fileno(), 0o600) <NEW_LINE> <DEDENT> json.dump(self, f) <NEW_LINE> <DEDENT> <DEDENT> def Delete(self): <NEW_LINE> <INDENT> if not os.path.exists(self._path): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> os.remove(self._path) | File-backed OAuth2 configuration. | 62598fd155399d3f0562698d |
class CheckListFormTestCase(SimpleTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> self.request = mock.MagicMock( user=mock.MagicMock( token=generate_tokens(), ), ) <NEW_LINE> <DEDENT> def test_get_object_list(self): <NEW_LINE> <INDENT> with responses.RequestsMock() as rsps: <NEW_LINE> <INDENT> rsps.add( rsps.GET, api_url('/security/checks/'), json={ 'count': 0, 'results': [], }, ) <NEW_LINE> form = CheckListForm( self.request, data={ 'page': 2, }, ) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> self.assertListEqual(form.get_object_list(), []) <NEW_LINE> api_call_made = rsps.calls[-1].request.url <NEW_LINE> self.assertDictEqual( parse_qs(api_call_made.split('?', 1)[1]), { 'offset': ['20'], 'limit': ['20'], 'status': ['pending'], 'credit_resolution': ['initial'], }, ) | Tests related to the CheckListForm. | 62598fd1cc40096d6161a411 |
class Maybe(object): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> if isinstance(value, Maybe): <NEW_LINE> <INDENT> self.__value = value.get() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__value = value <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> value = self.__value <NEW_LINE> if isinstance(value, str): <NEW_LINE> <INDENT> value = "'{}'".format(re.escape(value)) <NEW_LINE> <DEDENT> return '{}?'.format(value) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> return self.__operation(operator.__add__, other) <NEW_LINE> <DEDENT> def __sub__(self, other): <NEW_LINE> <INDENT> return self.__operation(operator.__sub__, other) <NEW_LINE> <DEDENT> def __div__(self, other): <NEW_LINE> <INDENT> return self.__operation(operator.__div__, other) <NEW_LINE> <DEDENT> def __mul__(self, other): <NEW_LINE> <INDENT> return self.__operation(operator.__mul__, other) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Maybe): <NEW_LINE> <INDENT> return self.__value == other.__value <NEW_LINE> <DEDENT> return self.__value == other <NEW_LINE> <DEDENT> def __operation(self, operator_, other): <NEW_LINE> <INDENT> if not self.exists(): <NEW_LINE> <INDENT> return Nothing() <NEW_LINE> <DEDENT> if type(other) != Maybe: <NEW_LINE> <INDENT> if other is None: <NEW_LINE> <INDENT> return Nothing() <NEW_LINE> <DEDENT> return Maybe(operator_(self.get(), other)) <NEW_LINE> <DEDENT> if not other.exists(): <NEW_LINE> <INDENT> return Nothing() <NEW_LINE> <DEDENT> return Maybe(operator_(self.get(), other.get())) <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> if self.exists(): <NEW_LINE> <INDENT> return self.__value <NEW_LINE> <DEDENT> raise ValueError() <NEW_LINE> <DEDENT> def exists(self): <NEW_LINE> <INDENT> return self.__value is not None <NEW_LINE> <DEDENT> def or_(self, replacement): <NEW_LINE> <INDENT> return self.__value if self.exists() else replacement | Maybe object
>>> x_ = Maybe(123)
>>> y_ = Maybe(None)
>>> z_ = Maybe(['A', 'B', 'C'])
>>> x_, y_, z_
(123?, None?, ['A', 'B', 'C']?)
>>> x_.get()
123
>>> y_.get()
Traceback (most recent call last):
...
ValueError
>>> x_.exists(), y_.exists(), z_.exists()
(True, False, True)
>>> x_.or_('ABC'), y_.or_('ABC'), z_.or_('ABC')
(123, 'ABC', ['A', 'B', 'C'])
>>> x_ == y_
False
>>> x_ == Maybe(123)
True
>>> x_ == Maybe('123')
False
Operators
>>> x_ = Maybe(2)
>>> y_ = Maybe(3)
>>> z_ = Maybe(None)
>>> x_ + y_
5?
>>> x_ - y_
-1?
>>> x_ + y_ + z_
<Nothing>
>>> (x_ + y_ + z_).get()
Traceback (most recent call last):
...
ValueError
>>> (x_ + y_ + z_).or_(100)
100
>>> x_ + 100
102?
>>> x_ + None
<Nothing>
:param object value: | 62598fd1bf627c535bcb1921 |
class CharIconPainter: <NEW_LINE> <INDENT> def paint(self, iconic, painter, rect, mode, state, options): <NEW_LINE> <INDENT> for opt in options: <NEW_LINE> <INDENT> self._paint_icon(iconic, painter, rect, mode, state, opt) <NEW_LINE> <DEDENT> <DEDENT> def _paint_icon(self, iconic, painter, rect, mode, state, options): <NEW_LINE> <INDENT> painter.save() <NEW_LINE> color, char = options['color'], options['char'] <NEW_LINE> if mode == QIcon.Disabled: <NEW_LINE> <INDENT> color = options.get('color_disabled', color) <NEW_LINE> char = options.get('disabled', char) <NEW_LINE> <DEDENT> elif mode == QIcon.Active: <NEW_LINE> <INDENT> color = options.get('color_active', color) <NEW_LINE> char = options.get('active', char) <NEW_LINE> <DEDENT> elif mode == QIcon.Selected: <NEW_LINE> <INDENT> color = options.get('color_selected', color) <NEW_LINE> char = options.get('selected', char) <NEW_LINE> <DEDENT> painter.setPen(QColor(color)) <NEW_LINE> draw_size = 0.875 * qRound(rect.height() * options['scale_factor']) <NEW_LINE> prefix = options['prefix'] <NEW_LINE> animation = options.get('animation') <NEW_LINE> if animation is not None: <NEW_LINE> <INDENT> animation.setup(self, painter, rect) <NEW_LINE> <DEDENT> painter.setFont(iconic.font(prefix, draw_size)) <NEW_LINE> if 'offset' in options: <NEW_LINE> <INDENT> rect = QRect(rect) <NEW_LINE> rect.translate(options['offset'][0] * rect.width(), options['offset'][1] * rect.height()) <NEW_LINE> <DEDENT> painter.setOpacity(options.get('opacity', 1.0)) <NEW_LINE> painter.drawText(rect, Qt.AlignCenter | Qt.AlignVCenter, char) <NEW_LINE> painter.restore() | Char icon painter | 62598fd150812a4eaa620e1e |
class ConvertPointsFromHomogeneous(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ConvertPointsFromHomogeneous, self).__init__() <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> return convert_points_from_homogeneous(input) | Creates a transformation that converts points from homogeneous to
Euclidean space.
Args:
points (Tensor): tensor of N-dimensional points.
Returns:
Tensor: tensor of N-1-dimensional points.
Shape:
- Input: :math:`(B, D, N)` or :math:`(D, N)`
- Output: :math:`(B, D, N + 1)` or :math:`(D, N + 1)`
Examples::
>>> input = torch.rand(2, 4, 3) # BxNx3
>>> transform = tgm.ConvertPointsFromHomogeneous()
>>> output = transform(input) # BxNx2 | 62598fd1656771135c489ae6 |
class UserNotFound(WordPressORMException): <NEW_LINE> <INDENT> pass | WordPress user not found. | 62598fd17cff6e4e811b5e9e |
@python_2_unicode_compatible <NEW_LINE> class release_raw(models.Model): <NEW_LINE> <INDENT> id = models.AutoField(primary_key=True) <NEW_LINE> title = models.CharField(max_length=255) <NEW_LINE> artist = models.CharField(max_length=255) <NEW_LINE> added = models.DateTimeField(auto_now=True) <NEW_LINE> last_modified = models.DateTimeField(auto_now=True) <NEW_LINE> lookup_count = models.IntegerField(default=0) <NEW_LINE> modify_count = models.IntegerField(default=0) <NEW_LINE> source = models.IntegerField(default=0) <NEW_LINE> barcode = models.CharField(max_length=255, null=True) <NEW_LINE> comment = models.CharField(max_length=255, default='') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = 'release_raw' | Not all parameters are listed here, only those that present some interest
in their Django implementation. | 62598fd1dc8b845886d53a34 |
class Bib82x(db.Model): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> __tablename__ = 'bib82x' <NEW_LINE> id = db.Column(db.MediumInteger(8, unsigned=True), primary_key=True, autoincrement=True) <NEW_LINE> tag = db.Column(db.String(6), nullable=False, index=True, server_default='') <NEW_LINE> value = db.Column(db.Text(35), nullable=False, index=True) | Represents a Bib82x record. | 62598fd1ab23a570cc2d4fa9 |
class BrowseResponse(FrozenClass): <NEW_LINE> <INDENT> def __init__(self, binary=None): <NEW_LINE> <INDENT> if binary is not None: <NEW_LINE> <INDENT> self._binary_init(binary) <NEW_LINE> self._freeze = True <NEW_LINE> return <NEW_LINE> <DEDENT> self.TypeId = FourByteNodeId(ObjectIds.BrowseResponse_Encoding_DefaultBinary) <NEW_LINE> self.ResponseHeader = ResponseHeader() <NEW_LINE> self.Results = [] <NEW_LINE> self.DiagnosticInfos = [] <NEW_LINE> self._freeze = True <NEW_LINE> <DEDENT> def to_binary(self): <NEW_LINE> <INDENT> packet = [] <NEW_LINE> packet.append(self.TypeId.to_binary()) <NEW_LINE> packet.append(self.ResponseHeader.to_binary()) <NEW_LINE> packet.append(uatype_Int32.pack(len(self.Results))) <NEW_LINE> for fieldname in self.Results: <NEW_LINE> <INDENT> packet.append(fieldname.to_binary()) <NEW_LINE> <DEDENT> packet.append(uatype_Int32.pack(len(self.DiagnosticInfos))) <NEW_LINE> for fieldname in self.DiagnosticInfos: <NEW_LINE> <INDENT> packet.append(fieldname.to_binary()) <NEW_LINE> <DEDENT> return b''.join(packet) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def from_binary(data): <NEW_LINE> <INDENT> return BrowseResponse(data) <NEW_LINE> <DEDENT> def _binary_init(self, data): <NEW_LINE> <INDENT> self.TypeId = NodeId.from_binary(data) <NEW_LINE> self.ResponseHeader = ResponseHeader.from_binary(data) <NEW_LINE> length = uatype_Int32.unpack(data.read(4))[0] <NEW_LINE> array = [] <NEW_LINE> if length != -1: <NEW_LINE> <INDENT> for _ in range(0, length): <NEW_LINE> <INDENT> array.append(BrowseResult.from_binary(data)) <NEW_LINE> <DEDENT> <DEDENT> self.Results = array <NEW_LINE> length = uatype_Int32.unpack(data.read(4))[0] <NEW_LINE> array = [] <NEW_LINE> if length != -1: <NEW_LINE> <INDENT> for _ in range(0, length): <NEW_LINE> <INDENT> array.append(DiagnosticInfo.from_binary(data)) <NEW_LINE> <DEDENT> <DEDENT> self.DiagnosticInfos = array <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'BrowseResponse(' + 'TypeId:' + str(self.TypeId) + ', ' + 'ResponseHeader:' + str(self.ResponseHeader) + ', ' + 'Results:' + str(self.Results) + ', ' + 'DiagnosticInfos:' + str(self.DiagnosticInfos) + ')' <NEW_LINE> <DEDENT> __repr__ = __str__ | Browse the references for one or more nodes from the server address space.
:ivar TypeId:
:vartype TypeId: NodeId
:ivar ResponseHeader:
:vartype ResponseHeader: ResponseHeader
:ivar Results:
:vartype Results: BrowseResult
:ivar DiagnosticInfos:
:vartype DiagnosticInfos: DiagnosticInfo | 62598fd10fa83653e46f535f |
class Issue(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> for arg in args: <NEW_LINE> <INDENT> if isinstance(arg, dict): <NEW_LINE> <INDENT> for name, value in arg.items(): <NEW_LINE> <INDENT> setattr(self, name, value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for name, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, name, value) <NEW_LINE> <DEDENT> if not hasattr(self, 'date'): <NEW_LINE> <INDENT> setattr(self, 'date', IssueDate()) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, name, value): <NEW_LINE> <INDENT> if name == '_id': <NEW_LINE> <INDENT> object.__setattr__(self, 'id', value) <NEW_LINE> <DEDENT> elif name == 'user' and isinstance(value, dict): <NEW_LINE> <INDENT> object.__setattr__(self, name, IssueUser(**value)) <NEW_LINE> <DEDENT> elif name == 'date' and isinstance(value, dict): <NEW_LINE> <INDENT> object.__setattr__(self, name, IssueDate(**value)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> object.__setattr__(self, name, value) <NEW_LINE> <DEDENT> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return [ 'id', 'subject', 'user', 'status', 'tag', 'date', 'field' ] | A generic issue | 62598fd13d592f4c4edbb32e |
class TestClient(RpcClient): <NEW_LINE> <INDENT> def __init__(self, req_ip, req_port, sub_addr, sub_port): <NEW_LINE> <INDENT> super(TestClient, self).__init__(req_ip, req_port, sub_addr, sub_port) <NEW_LINE> <DEDENT> def callback(self, topic, data): <NEW_LINE> <INDENT> print('client received topic:', topic, ', data:', data) | RPC测试客户端 | 62598fd160cbc95b063647b5 |
class InstanceCreateStart(EventBaseModel): <NEW_LINE> <INDENT> kwarg_map = {'image_name': 'image_name'} <NEW_LINE> kwarg_map.update(BASE_KWARG_MAP) <NEW_LINE> def __init__(self, access_ip_v4, access_ip_v6, architecture, availability_zone, cell_name, created_at, deleted_at, disk_gb, display_name, ephemeral_gb, host, hostname, image_meta, image_name, image_ref_url, instance_flavor_id, instance_id, instance_type, instance_type_id, kernel_id, launched_at, memory_mb, metadata, node, os_type, progress, ramdisk_id, reservation_id, root_gb, state, state_description, tenant_id, terminated_at, user_id, vcpus): <NEW_LINE> <INDENT> super(InstanceCreateStart, self).__init__(locals()) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _dict_to_obj(cls, json_dict): <NEW_LINE> <INDENT> obj = cls._map_values_to_kwargs(json_dict) <NEW_LINE> obj.image_meta = ImageMeta._dict_to_obj(obj.image_meta) <NEW_LINE> return obj | Compute Instance Create Start Response Model
@summary: Response model for a compute.instance.create.start
event notification
@note: Represents a single event notification
JSON Example:
{
"access_ip_v4": "10.10.0.0",
"access_ip_v6": null,
"architecture": "x64",
"availability_zone": null,
"cell_name": "cell name",
"created_at": "2015-01-15 18:59:29",
"deleted_at": "",
"disk_gb": 20,
"display_name": "server123456",
"ephemeral_gb": 0,
"host": null,
"hostname": "server123456",
"image_meta": { <ImageMeta> },
"image_name": "Ubuntu 13.10 (Saucy Salamander)",
"image_ref_url": "http://127.0.0.1/images/my_image",
"instance_flavor_id": "instance_flavor_id",
"instance_id": "performance1-1",
"instance_type": "1 GB Performance",
"instance_type_id": "9",
"kernel_id": "",
"launched_at": "",
"memory_mb": 1024,
"metadata": {},
"node": null,
"os_type": "linux",
"progress": "",
"ramdisk_id": "",
"reservation_id": "r-abcdefg",
"root_gb": 20,
"state": "building",
"state_description": "",
"tenant_id": "123456",
"terminated_at": "",
"user_id": "123456789",
"vcpus": 1
} | 62598fd19f28863672818ab9 |
class GetComment(AuthenticatedMethod): <NEW_LINE> <INDENT> method_name = 'wp.getComment' <NEW_LINE> method_args = ('comment_id',) <NEW_LINE> results_class = WordPressComment | Retrieve an individual comment.
Parameters:
`comment_id`: ID of the comment to retrieve.
Returns: `WordPressPost` instance. | 62598fd17b180e01f3e4928c |
class SaltNewEdit(View): <NEW_LINE> <INDENT> def get(self, request, salt_new_id): <NEW_LINE> <INDENT> new_salt = _models.SaltNew.objects.filter(is_delete=False, id=salt_new_id).first() <NEW_LINE> salt_na_queryset = view_model.view_common(_models.SaltNA, "id", "name") <NEW_LINE> team_set = view_model.view_common(_models.Team, "id", "name") <NEW_LINE> inspector_set = view_model.view_common(_models.Inspector, "id", "name") <NEW_LINE> stove_number_set = view_model.view_common(_models.StoveNumber, "id", "number") <NEW_LINE> if new_salt: <NEW_LINE> <INDENT> return render(request, 'admin/salt/salt_new_edit.html', context={ "data": new_salt, "salt_na_queryset": salt_na_queryset, "team_set": team_set, "inspector_set": inspector_set, "stove_number_set": stove_number_set }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Http404("新盐数据不存在") <NEW_LINE> <DEDENT> <DEDENT> def delete(self, request, salt_new_id): <NEW_LINE> <INDENT> new_salt = _models.SaltNew.objects.only("id").filter(is_delete=False, id=salt_new_id).first() <NEW_LINE> if new_salt: <NEW_LINE> <INDENT> new_salt.is_delete = True <NEW_LINE> new_salt.save(update_fields=["is_delete"]) <NEW_LINE> return to_json_data(errmsg="新盐数据删除成功!") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return to_json_data(errno=Code.PARAMERR, errmsg=error_map[Code.PARAMERR]) <NEW_LINE> <DEDENT> <DEDENT> def put(self, request, salt_new_id): <NEW_LINE> <INDENT> new_salt = _models.SaltNew.objects.filter(is_delete=False, id=salt_new_id).first() <NEW_LINE> if not new_salt: <NEW_LINE> <INDENT> return to_json_data(errno=Code.PARAMERR, errmsg=error_map[Code.PARAMERR]) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> json_data = request.body <NEW_LINE> if not json_data: <NEW_LINE> <INDENT> return to_json_data(errno=Code.PARAMERR, errmsg=error_map[Code.PARAMERR]) <NEW_LINE> <DEDENT> dict_data = json.loads(json_data.decode()) <NEW_LINE> _check_time = datetime.strptime(dict_data.get("check_time"), "%Y-%m-%d") <NEW_LINE> dict_data["check_time"] = _check_time <NEW_LINE> _thaw_date = datetime.strptime(dict_data.get("thaw_date"), "%Y-%m-%d") <NEW_LINE> dict_data["thaw_date"] = _thaw_date <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.info("新盐数据更新获取失败:{}".format(e)) <NEW_LINE> return to_json_data(errno=Code.UNKOWNERR, errmsg=error_map[Code.UNKOWNERR]) <NEW_LINE> <DEDENT> form = _forms.SaltNewEditForm(dict_data) <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> for key, value in form.cleaned_data.items(): <NEW_LINE> <INDENT> setattr(new_salt, key, value) <NEW_LINE> <DEDENT> new_salt.save() <NEW_LINE> return to_json_data(errmsg="新盐数据更新成功") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> err_str = error_msg.err_msg_list(form) <NEW_LINE> return to_json_data(errno=Code.PARAMERR, errmsg=err_str) | 新盐数据编辑 | 62598fd1d8ef3951e32c8098 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.