code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class CVEmailer(Messenger): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> print("[+] CVEmailer instance created. ") <NEW_LINE> <DEDENT> def compose(self, data_list): <NEW_LINE> <INDENT> self.content = info_data <NEW_LINE> print("compose()") <NEW_LINE> <DEDENT> def send(self): <NEW_LINE> <INDENT> API_AUTH = self.config['auth'] <NEW_LINE> API_ENDPOINT_URL = self.config['endpoint'] <NEW_LINE> headers = { 'Authorization': str(API_AUTH), 'Content-Type': 'application/json', 'Accept': 'application/json' } <NEW_LINE> data = { 'from': "", 'to': "", 'subject': "", 'text': self.content } <NEW_LINE> r=None <NEW_LINE> try: <NEW_LINE> <INDENT> r = requests.post(API_ENDPOINT_URL, data=data, headers=headers, timeout=10) <NEW_LINE> r.raise_for_status() <NEW_LINE> <DEDENT> except requests.exceptions.RequestException as e: <NEW_LINE> <INDENT> print("Request error", e) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> content = r.json() <NEW_LINE> if(res == "200"): <NEW_LINE> <INDENT> print("[+] API request success..") <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = content['requestError']['serviceException']['messageId'] <NEW_LINE> print("[-] API request failed.. Error message: " + str(res)) <NEW_LINE> return False | Send CVE notifications via email | 62598fbc7047854f4633f59b |
class TestDeletePlatformInteractor(itb.InteractorTestBase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super().setUp() <NEW_LINE> def get_platforms(): <NEW_LINE> <INDENT> platform = {'id': 'id', 'name': 'a platform'} <NEW_LINE> return [p.Platform.from_dict(platform)] <NEW_LINE> <DEDENT> self.__target = pi.DeletePlatformInteractor() <NEW_LINE> self.persistence.get_platforms = Mock(side_effect=get_platforms) <NEW_LINE> self.__target.persistence = self.persistence <NEW_LINE> self.__target.validate_integer_field = self.validate_integer_field <NEW_LINE> self.__target.validate_string_field = self.validate_string_field <NEW_LINE> <DEDENT> def test_is_instance_of_interactor(self): <NEW_LINE> <INDENT> self.assertIsInstance(self.__target, i.Interactor) <NEW_LINE> <DEDENT> def test_execute_calls_delete_platform_persistence_method(self): <NEW_LINE> <INDENT> platform = p.Platform() <NEW_LINE> platform.id = "id" <NEW_LINE> self.__target.execute(platform=platform.id) <NEW_LINE> self.persistence.delete_platform.assert_called_with(platform.id) <NEW_LINE> <DEDENT> def test_execute_with_none_platform_raises_type_error(self): <NEW_LINE> <INDENT> self.assertRaises(TypeError, self.__target.execute, None) <NEW_LINE> <DEDENT> def test_execute_platform_does_not_exist_raises_platform_not_found_exception(self): <NEW_LINE> <INDENT> platform = p.Platform() <NEW_LINE> platform.id = 'non-existant' <NEW_LINE> self.assertRaises(pi.PlatformNotFoundException, self.__target.execute, platform.id) | Unit tests for DeletePlatformInteractor | 62598fbc57b8e32f525081ff |
class batch_manager(context_error_handler): <NEW_LINE> <INDENT> def __init__( self, *inputs, n_elem: int = 1e6, batch_size: Optional[int] = None, max_batch_size: int = 1024, ): <NEW_LINE> <INDENT> if not inputs: <NEW_LINE> <INDENT> raise ValueError("inputs should be provided in general_batch_manager") <NEW_LINE> <DEDENT> input_lengths = list(map(len, inputs)) <NEW_LINE> self._n, self._inputs = input_lengths[0], inputs <NEW_LINE> assert_msg = "inputs should be of same length" <NEW_LINE> assert all(length == self._n for length in input_lengths), assert_msg <NEW_LINE> if batch_size is not None: <NEW_LINE> <INDENT> self._batch_size = batch_size <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> n_elem = int(n_elem) <NEW_LINE> self._batch_size = int( n_elem / sum(map(lambda arr: prod(arr.shape[1:]), inputs)) ) <NEW_LINE> <DEDENT> self._batch_size = min(max_batch_size, min(self._n, self._batch_size)) <NEW_LINE> self._n_epoch = int(self._n / self._batch_size) <NEW_LINE> self._n_epoch += int(self._n_epoch * self._batch_size < self._n) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> self._start, self._end = 0, self._batch_size <NEW_LINE> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self._start >= self._n: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> batched_data = tuple( map( lambda arr: arr[self._start : self._end], self._inputs, ) ) <NEW_LINE> self._start, self._end = self._end, self._end + self._batch_size <NEW_LINE> if len(batched_data) == 1: <NEW_LINE> <INDENT> return batched_data[0] <NEW_LINE> <DEDENT> return batched_data <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._n_epoch | Process data in batch.
Parameters
----------
inputs : tuple(np.ndarray), auxiliary array inputs.
n_elem : {int, float}, indicates how many elements will be processed in a batch.
batch_size : int, indicates the batch_size; if None, batch_size will be
calculated by `n_elem`.
Examples
--------
>>> with batch_manager(np.arange(5), np.arange(1, 6), batch_size=2) as manager:
>>> for arr, tensor in manager:
>>> print(arr, tensor)
>>> # Will print:
>>> # [0 1], [1 2]
>>> # [2 3], [3 4]
>>> # [4] , [5] | 62598fbcbf627c535bcb166a |
class UpdateProject(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = 'panda_engine.update_project' <NEW_LINE> bl_label = 'Update Project Files' <NEW_LINE> def execute(self, _context): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> config = pman.get_config(os.path.dirname(bpy.data.filepath) if bpy.data.filepath else None) <NEW_LINE> pman.create_project(pman.get_abs_path(config, '')) <NEW_LINE> return {'FINISHED'} <NEW_LINE> <DEDENT> except pman.PManException as err: <NEW_LINE> <INDENT> self.report({'ERROR'}, str(err)) <NEW_LINE> return {'CANCELLED'} | Re-copies any missing project files | 62598fbca8370b77170f05a5 |
class TextTableCell(object): <NEW_LINE> <INDENT> def __init__(self, content, colspan=None, rowspan=None, bgColor=None, alignH=None, alignV=None): <NEW_LINE> <INDENT> self.__content=content <NEW_LINE> self.__colspan=colspan <NEW_LINE> self.__rowspan=rowspan <NEW_LINE> self.__bgColor=bgColor <NEW_LINE> self.__alignH=alignH <NEW_LINE> self.__alignV=alignV <NEW_LINE> <DEDENT> def content(self): <NEW_LINE> <INDENT> return self.__content <NEW_LINE> <DEDENT> def colspan(self): <NEW_LINE> <INDENT> return self.__colspan <NEW_LINE> <DEDENT> def rowspan(self): <NEW_LINE> <INDENT> return self.__rowspan <NEW_LINE> <DEDENT> def bgColor(self): <NEW_LINE> <INDENT> return self.__bgColor <NEW_LINE> <DEDENT> def alignH(self): <NEW_LINE> <INDENT> return self.__alignH <NEW_LINE> <DEDENT> def alignV(self): <NEW_LINE> <INDENT> return self.__alignV | A really basic HTML cell definition | 62598fbc50812a4eaa620cce |
class EdgeConv(MessagePassing): <NEW_LINE> <INDENT> def __init__(self, nn: Callable, aggr: str = 'max', **kwargs): <NEW_LINE> <INDENT> super().__init__(aggr=aggr, **kwargs) <NEW_LINE> self.nn = nn <NEW_LINE> self.reset_parameters() <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> reset(self.nn) <NEW_LINE> <DEDENT> def forward(self, x: Union[Tensor, PairTensor], edge_index: Adj) -> Tensor: <NEW_LINE> <INDENT> if isinstance(x, Tensor): <NEW_LINE> <INDENT> x: PairTensor = (x, x) <NEW_LINE> <DEDENT> return self.propagate(edge_index, x=x, size=None) <NEW_LINE> <DEDENT> def message(self, x_i: Tensor, x_j: Tensor) -> Tensor: <NEW_LINE> <INDENT> return self.nn(torch.cat([x_i, x_j - x_i], dim=-1)) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return f'{self.__class__.__name__}(nn={self.nn})' | The edge convolutional operator from the `"Dynamic Graph CNN for
Learning on Point Clouds" <https://arxiv.org/abs/1801.07829>`_ paper
.. math::
\mathbf{x}^{\prime}_i = \sum_{j \in \mathcal{N}(i)}
h_{\mathbf{\Theta}}(\mathbf{x}_i \, \Vert \,
\mathbf{x}_j - \mathbf{x}_i),
where :math:`h_{\mathbf{\Theta}}` denotes a neural network, *.i.e.* a MLP.
Args:
nn (torch.nn.Module): A neural network :math:`h_{\mathbf{\Theta}}` that
maps pair-wise concatenated node features :obj:`x` of shape
:obj:`[-1, 2 * in_channels]` to shape :obj:`[-1, out_channels]`,
*e.g.*, defined by :class:`torch.nn.Sequential`.
aggr (string, optional): The aggregation scheme to use
(:obj:`"add"`, :obj:`"mean"`, :obj:`"max"`).
(default: :obj:`"max"`)
**kwargs (optional): Additional arguments of
:class:`torch_geometric.nn.conv.MessagePassing`.
Shapes:
- **input:**
node features :math:`(|\mathcal{V}|, F_{in})` or
:math:`((|\mathcal{V}|, F_{in}), (|\mathcal{V}|, F_{in}))`
if bipartite,
edge indices :math:`(2, |\mathcal{E}|)`
- **output:** node features :math:`(|\mathcal{V}|, F_{out})` or
:math:`(|\mathcal{V}_t|, F_{out})` if bipartite | 62598fbc091ae35668704de9 |
class ListValueRule: <NEW_LINE> <INDENT> def __init__(self, *, marshal): <NEW_LINE> <INDENT> self._marshal = marshal <NEW_LINE> <DEDENT> def to_python(self, value, *, absent: bool = None): <NEW_LINE> <INDENT> return ( None if absent else repeated.RepeatedComposite(value.values, marshal=self._marshal) ) <NEW_LINE> <DEDENT> def to_proto(self, value) -> struct_pb2.ListValue: <NEW_LINE> <INDENT> if isinstance(value, struct_pb2.ListValue): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if isinstance(value, repeated.RepeatedComposite): <NEW_LINE> <INDENT> return struct_pb2.ListValue(values=[v for v in value.pb]) <NEW_LINE> <DEDENT> return struct_pb2.ListValue( values=[self._marshal.to_proto(struct_pb2.Value, v) for v in value] ) | A rule translating google.protobuf.ListValue and list-like objects. | 62598fbc97e22403b383b0ce |
class Answer(models.Model): <NEW_LINE> <INDENT> question = models.ForeignKey(Question, on_delete=models.CASCADE, related_name='answers') <NEW_LINE> mark = models.PositiveIntegerField(default=0) <NEW_LINE> text = models.CharField(max_length=60, blank=True) <NEW_LINE> img = models.ImageField(upload_to='answer', blank=True) | "Відповідь | 62598fbc92d797404e388c46 |
class SAPSSFSLKY(Packet): <NEW_LINE> <INDENT> name = "SAP SSFS LKY" <NEW_LINE> fields_desc = [ StrFixedLenField("preamble", "RSecSSFsLKY", 11), ] | SAP SSFS LKY file format packet.
| 62598fbcad47b63b2c5a7a1c |
class PasswordCredentialHash( OktaObject ): <NEW_LINE> <INDENT> def __init__(self, config=None): <NEW_LINE> <INDENT> super().__init__(config) <NEW_LINE> if config: <NEW_LINE> <INDENT> if "algorithm" in config: <NEW_LINE> <INDENT> if isinstance(config["algorithm"], password_credential_hash_algorithm.PasswordCredentialHashAlgorithm): <NEW_LINE> <INDENT> self.algorithm = config["algorithm"] <NEW_LINE> <DEDENT> elif config["algorithm"] is not None: <NEW_LINE> <INDENT> self.algorithm = password_credential_hash_algorithm.PasswordCredentialHashAlgorithm( config["algorithm"].upper() ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.algorithm = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.algorithm = None <NEW_LINE> <DEDENT> self.salt = config["salt"] if "salt" in config else None <NEW_LINE> self.salt_order = config["saltOrder"] if "saltOrder" in config else None <NEW_LINE> self.value = config["value"] if "value" in config else None <NEW_LINE> self.work_factor = config["workFactor"] if "workFactor" in config else None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.algorithm = None <NEW_LINE> self.salt = None <NEW_LINE> self.salt_order = None <NEW_LINE> self.value = None <NEW_LINE> self.work_factor = None <NEW_LINE> <DEDENT> <DEDENT> def request_format(self): <NEW_LINE> <INDENT> parent_req_format = super().request_format() <NEW_LINE> current_obj_format = { "algorithm": self.algorithm, "salt": self.salt, "saltOrder": self.salt_order, "value": self.value, "workFactor": self.work_factor } <NEW_LINE> parent_req_format.update(current_obj_format) <NEW_LINE> return parent_req_format | A class for PasswordCredentialHash objects. | 62598fbc4428ac0f6e6586ea |
class TestCreateAndGetSingleCustomer(APITestCase): <NEW_LINE> <INDENT> fake = Factory.create() <NEW_LINE> customer_data = { 'first_name': fake.first_name(), 'last_name': fake.last_name(), 'email': fake.safe_email(), } <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> url = reverse('collect_opinions:customers') <NEW_LINE> self.assertEqual(url, '/collect_opinions/api/customers/') <NEW_LINE> response = self.client.post(url, self.customer_data, format='json') <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> def test_get_created_customer(self): <NEW_LINE> <INDENT> factory = APIRequestFactory() <NEW_LINE> url = reverse('collect_opinions:customer-detail', kwargs={'pk': 1}) <NEW_LINE> request = factory.get(url) <NEW_LINE> response = self.client.get(url) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK) <NEW_LINE> self.customer_data.update({ "customer_url": request.build_absolute_uri() }) <NEW_LINE> print('response.data:') <NEW_LINE> pprint(response.data) <NEW_LINE> print('customer_data:') <NEW_LINE> pprint(self.customer_data) <NEW_LINE> self.assertEqual(response.data, self.customer_data) | Test module for GET single customer API using endpoint api/customers/{pk} | 62598fbcdc8b845886d53781 |
class ProductionAgroindustrial(models.Model): <NEW_LINE> <INDENT> production = models.ForeignKey( "producer.Production", related_name="production_agroindustrial", on_delete=models.CASCADE ) <NEW_LINE> description = models.CharField(max_length=50, blank=True, null=True) <NEW_LINE> raw_material = models.CharField(max_length=20, blank=True, null=True) <NEW_LINE> is_mechanized = models.BooleanField(default=False) <NEW_LINE> knowledge = models.CharField(max_length=30, blank=True, null=True) | Produccion agroindustrial | 62598fbc57b8e32f52508200 |
class DiscoveryStartRequest(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'start_ip': 'str', 'end_ip': 'str', 'use_agents': 'bool', 'connection_timeout': 'int', 'max_ports_to_use': 'int' } <NEW_LINE> self.attribute_map = { 'start_ip': 'startIP', 'end_ip': 'endIP', 'use_agents': 'useAgents', 'connection_timeout': 'connectionTimeout', 'max_ports_to_use': 'maxPortsToUse' } <NEW_LINE> self._start_ip = None <NEW_LINE> self._end_ip = None <NEW_LINE> self._use_agents = None <NEW_LINE> self._connection_timeout = None <NEW_LINE> self._max_ports_to_use = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def start_ip(self): <NEW_LINE> <INDENT> return self._start_ip <NEW_LINE> <DEDENT> @start_ip.setter <NEW_LINE> def start_ip(self, start_ip): <NEW_LINE> <INDENT> self._start_ip = start_ip <NEW_LINE> <DEDENT> @property <NEW_LINE> def end_ip(self): <NEW_LINE> <INDENT> return self._end_ip <NEW_LINE> <DEDENT> @end_ip.setter <NEW_LINE> def end_ip(self, end_ip): <NEW_LINE> <INDENT> self._end_ip = end_ip <NEW_LINE> <DEDENT> @property <NEW_LINE> def use_agents(self): <NEW_LINE> <INDENT> return self._use_agents <NEW_LINE> <DEDENT> @use_agents.setter <NEW_LINE> def use_agents(self, use_agents): <NEW_LINE> <INDENT> self._use_agents = use_agents <NEW_LINE> <DEDENT> @property <NEW_LINE> def connection_timeout(self): <NEW_LINE> <INDENT> return self._connection_timeout <NEW_LINE> <DEDENT> @connection_timeout.setter <NEW_LINE> def connection_timeout(self, connection_timeout): <NEW_LINE> <INDENT> self._connection_timeout = connection_timeout <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_ports_to_use(self): <NEW_LINE> <INDENT> return self._max_ports_to_use <NEW_LINE> <DEDENT> @max_ports_to_use.setter <NEW_LINE> def max_ports_to_use(self, max_ports_to_use): <NEW_LINE> <INDENT> self._max_ports_to_use = max_ports_to_use <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fbc63d6d428bbee2978 |
class HighWeekDaysTimerGenerator(CyclicTimerGenerator): <NEW_LINE> <INDENT> def __init__(self, clock, seed): <NEW_LINE> <INDENT> start_date = pd.Timestamp("6 June 2016 00:00:00") <NEW_LINE> CyclicTimerGenerator.__init__(self, clock=clock, seed=seed, config=CyclicTimerProfile( profile=[5., 5., 5., 5., 5., 3., 3.], profile_time_steps="1D", start_date=start_date), ) | Basic CyclicTimerGenerator with a one week period that allocates higher
probabilities to week-day vs week-ends | 62598fbcf548e778e596b76e |
class _Player: <NEW_LINE> <INDENT> def __init__(self, name, rating): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.rating = rating <NEW_LINE> <DEDENT> def compareRating(self, opponent): <NEW_LINE> <INDENT> return (1 + 10 ** ((opponent.rating - self.rating) / 400.0)) ** -1 | A class to represent a player in the Elo Rating System | 62598fbc9f2886367281895f |
class Solution(object): <NEW_LINE> <INDENT> def restoreIpAddresses(self, s): <NEW_LINE> <INDENT> ans = [] <NEW_LINE> self.helper(ans, s, 4, []) <NEW_LINE> return ['.'.join(x) for x in ans] <NEW_LINE> <DEDENT> def helper(self, ans, s, k, temp): <NEW_LINE> <INDENT> if len(s) > k* 3: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if k == 0: <NEW_LINE> <INDENT> ans.append(temp[:]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(min(3, len(s) - k + 1)): <NEW_LINE> <INDENT> if i == 2 and int(s[:3]) > 255 or i > 0 and s[0] == '0': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.helper(ans, s[i + 1:], k - 1, temp + [s[:i + 1]]) | DFS回溯来实现,注意判断条件即可
Runtime: 20 ms, faster than 100.00% of Python online submissions for Restore IP Addresses.
Memory Usage: 11.9 MB, less than 5.09% of Python online submissions for Restore IP Addresses. | 62598fbc97e22403b383b0d0 |
class vggish_stat(): <NEW_LINE> <INDENT> def __init__(self,data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> def mean(self): <NEW_LINE> <INDENT> MEAN = [] <NEW_LINE> for embedding in self.data: <NEW_LINE> <INDENT> mean = [] <NEW_LINE> samples = 128 <NEW_LINE> for idx in range(samples): <NEW_LINE> <INDENT> temp = np.mean(embedding[:,idx]) <NEW_LINE> mean.append(temp) <NEW_LINE> <DEDENT> MEAN.append(mean) <NEW_LINE> <DEDENT> return MEAN <NEW_LINE> <DEDENT> def std(self): <NEW_LINE> <INDENT> STD = [] <NEW_LINE> for embedding in self.data: <NEW_LINE> <INDENT> std = [] <NEW_LINE> samples = 128 <NEW_LINE> for idx in range(samples): <NEW_LINE> <INDENT> temp = np.std(embedding[:,idx]) <NEW_LINE> std.append(temp) <NEW_LINE> <DEDENT> STD.append(std) <NEW_LINE> <DEDENT> return STD | taking standard deviation and mean as final features from the entire segment | 62598fbcec188e330fdf8a5a |
class AssignmentPersonalInfo(PersonalInfo): <NEW_LINE> <INDENT> headline = ugettext_lazy('I am candidate for the following elections') <NEW_LINE> default_weight = 40 <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return (Assignment.objects.filter(assignment_related_users__user=self.request.user) .exclude(assignment_related_users__status=AssignmentRelatedUser.STATUS_BLOCKED)) | Class for personal info block for the assignment app. | 62598fbcfff4ab517ebcd9ac |
class RedisHelper(object): <NEW_LINE> <INDENT> _client = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> if RedisHelper._client is None: <NEW_LINE> <INDENT> self._create_redis_client() <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_client(cls): <NEW_LINE> <INDENT> if RedisHelper._client is None: <NEW_LINE> <INDENT> cls._create_redis_client() <NEW_LINE> <DEDENT> return RedisHelper._client <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def ping_redis(cls): <NEW_LINE> <INDENT> cls.get_client().ping() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _create_redis_client(cls): <NEW_LINE> <INDENT> RedisHelper._client = redis.StrictRedis( host=settings.REDIS_HOST, port=settings.REDIS_PORT, db=settings.REDIS_DB, password=settings.REDIS_PASSWORD) | redis 连接助手 | 62598fbce1aae11d1e7ce909 |
class Axes(): <NEW_LINE> <INDENT> def __init__(self, fig, xscale='linear', yscale='linear'): <NEW_LINE> <INDENT> self._fig = fig <NEW_LINE> self._xscale = xscale <NEW_LINE> self._yscale = yscale <NEW_LINE> <DEDENT> def axis(self, lims): <NEW_LINE> <INDENT> l = __last_fig()._graph.activeLayer() <NEW_LINE> if 4 != len(lims): <NEW_LINE> <INDENT> raise ValueError("Error: 4 real values are required for the x and y axes limits") <NEW_LINE> <DEDENT> l.setScale(*lims) <NEW_LINE> <DEDENT> def set_xlabel(self, lbl): <NEW_LINE> <INDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> l.setXTitle(lbl) <NEW_LINE> <DEDENT> def set_ylabel(self, lbl): <NEW_LINE> <INDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> l.setYTitle(lbl) <NEW_LINE> <DEDENT> def set_xlim(self, xmin, xmax): <NEW_LINE> <INDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> l.setAxisScale(2, xmin, xmax) <NEW_LINE> <DEDENT> def set_ylim(self, ymin, ymax): <NEW_LINE> <INDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> l.setAxisScale(0, ymin, ymax) <NEW_LINE> <DEDENT> def set_xscale(self, scale_str): <NEW_LINE> <INDENT> if 'log' != scale_str and 'linear' != scale_str: <NEW_LINE> <INDENT> raise ValueError("You need to specify either 'log' or 'linear' type of scale for the x axis." ) <NEW_LINE> <DEDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> if scale_str == 'log': <NEW_LINE> <INDENT> if 'log' == self._yscale: <NEW_LINE> <INDENT> l.logLogAxes() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.logXLinY() <NEW_LINE> <DEDENT> <DEDENT> elif scale_str == 'linear': <NEW_LINE> <INDENT> if 'log' == self._yscale: <NEW_LINE> <INDENT> l.logYlinX() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.linearAxes() <NEW_LINE> <DEDENT> <DEDENT> self._xscale = scale_str <NEW_LINE> <DEDENT> def set_yscale(self, scale_str): <NEW_LINE> <INDENT> if 'log' != scale_str and 'linear' != scale_str: <NEW_LINE> <INDENT> raise ValueError("You need to specify either 'log' or 'linear' type of scale for the y axis." ) <NEW_LINE> <DEDENT> l = self.get_figure()._graph.activeLayer() <NEW_LINE> if scale_str == 'log': <NEW_LINE> <INDENT> if 'log' == self._xscale: <NEW_LINE> <INDENT> l.logLogAxes() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.logYlinX() <NEW_LINE> <DEDENT> <DEDENT> elif scale_str == 'linear': <NEW_LINE> <INDENT> if 'log' == self._xscale: <NEW_LINE> <INDENT> l.logXLinY() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> l.linearAxes() <NEW_LINE> <DEDENT> <DEDENT> self._yscale = scale_str <NEW_LINE> <DEDENT> def get_figure(self, ): <NEW_LINE> <INDENT> return self._fig | A very minimal replica of matplotlib.axes.Axes. The true Axes is a
sublcass of matplotlib.artist and provides tons of functionality.
At the moment this just provides a few set methods for properties
such as labels and axis limits. | 62598fbcaad79263cf42e99d |
class StockForecastDiffView(TemplateView): <NEW_LINE> <INDENT> template_name = 'pages/forecast_diff.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context_data = super(StockForecastDiffView, self).get_context_data() <NEW_LINE> code = self.request.GET.get('stock_code') <NEW_LINE> name = self.request.GET.get('stock_name') <NEW_LINE> context_data.update({'stock_code': code, 'stock_name': name}) <NEW_LINE> return context_data | 股票对比页面 | 62598fbc23849d37ff85127c |
class AnalyticDriver(Driver): <NEW_LINE> <INDENT> def __init__(self, options, connection): <NEW_LINE> <INDENT> super(AnalyticDriver, self).__init__(options, connection) <NEW_LINE> self.commands += ['send'] <NEW_LINE> <DEDENT> def send(self, **kwargs): <NEW_LINE> <INDENT> raise Exception("This method needs to be implemented by a child class") | Driver containing basic commands used by all analytics drivers. | 62598fbc3d592f4c4edbb088 |
class ArcSummary(object): <NEW_LINE> <INDENT> GCOV_ARC_ON_TREE = 1 <NEW_LINE> GCOV_ARC_FAKE = 1 << 1 <NEW_LINE> GCOV_ARC_FALLTHROUGH = 1 << 2 <NEW_LINE> def __init__(self, src_block, dst_block, flag): <NEW_LINE> <INDENT> self.src_block = src_block <NEW_LINE> self.dst_block = dst_block <NEW_LINE> self.on_tree = bool(flag & self.GCOV_ARC_ON_TREE) <NEW_LINE> self.fake = bool(flag & self.GCOV_ARC_FAKE) <NEW_LINE> self.fallthrough = bool(flag & self.GCOV_ARC_FALLTHROUGH) <NEW_LINE> self.resolved = False <NEW_LINE> self.count = 0 <NEW_LINE> <DEDENT> def Resolve(self): <NEW_LINE> <INDENT> if self.fake and not self.fallthrough: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.src_block.exit_arcs.remove(self) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> elif (len(self.src_block.entry_arcs) > 0 and all(a.resolved for a in self.src_block.entry_arcs) and all(a.resolved for a in self.src_block.exit_arcs if a != self)): <NEW_LINE> <INDENT> in_flow = sum(a.count for a in self.src_block.entry_arcs) <NEW_LINE> out_flow = sum(a.count for a in self.src_block.exit_arcs if a != self) <NEW_LINE> self.count = in_flow - out_flow <NEW_LINE> self.resolved = True <NEW_LINE> <DEDENT> elif (len(self.dst_block.exit_arcs) > 0 and all(a.resolved for a in self.dst_block.exit_arcs) and all(a.resolved for a in self.dst_block.entry_arcs if a != self)): <NEW_LINE> <INDENT> out_flow = sum(a.count for a in self.dst_block.exit_arcs) <NEW_LINE> in_flow = sum(a.count for a in self.dst_block.entry_arcs if a != self) <NEW_LINE> self.count = out_flow - in_flow <NEW_LINE> self.resolved = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return True | Summarizes an arc from a .gcno file.
Attributes:
src_block_index: integer index of the source basic block.
dstBlockIndex: integer index of the destination basic block.
on_tree: True iff arc has flag GCOV_ARC_ON_TREE.
fake: True iff arc has flag GCOV_ARC_FAKE.
fallthrough: True iff arc has flag GCOV_ARC_FALLTHROUGH.
resolved: True iff the arc's count has been resolved.
count: Integer number of times the arc was covered. | 62598fbc3539df3088ecc476 |
class SubAreaSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> subs = AreaSerializer(many=True, read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Area <NEW_LINE> fields = ('id', 'name', 'subs') | 子行政区划信息序列化器 | 62598fbc091ae35668704ded |
@dataclass <NEW_LINE> class TestCase: <NEW_LINE> <INDENT> pipeline_func: Callable <NEW_LINE> mode: kfp.dsl.PipelineExecutionMode = kfp.dsl.PipelineExecutionMode.V2_COMPATIBLE <NEW_LINE> enable_caching: bool = False <NEW_LINE> arguments: Optional[Dict[str, str]] = None <NEW_LINE> verify_func: Callable[[ int, kfp_server_api.ApiRun, kfp_server_api. ApiRunDetail, metadata_store_pb2.MetadataStoreClientConfig ], None] = _default_verify_func | Test case for running a KFP sample | 62598fbcd7e4931a7ef3c25e |
class Gate(object): <NEW_LINE> <INDENT> def __init__(self, name, targets=None, controls=None, arg_value=None, arg_label=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.targets = None <NEW_LINE> self.controls = None <NEW_LINE> if not isinstance(targets, Iterable) and targets is not None: <NEW_LINE> <INDENT> self.targets = [targets] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.targets = targets <NEW_LINE> <DEDENT> if not isinstance(controls, Iterable) and controls is not None: <NEW_LINE> <INDENT> self.controls = [controls] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.controls = controls <NEW_LINE> <DEDENT> for ind_list in [self.targets, self.controls]: <NEW_LINE> <INDENT> if isinstance(ind_list, Iterable): <NEW_LINE> <INDENT> all_integer = all( [isinstance(ind, np.int) for ind in ind_list]) <NEW_LINE> if not all_integer: <NEW_LINE> <INDENT> raise ValueError("Index of a qubit must be an integer") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if name in ["SWAP", "ISWAP", "SQRTISWAP", "SQRTSWAP", "BERKELEY", "SWAPalpha"]: <NEW_LINE> <INDENT> if (self.targets is None) or (len(self.targets) != 2): <NEW_LINE> <INDENT> raise ValueError("Gate %s requires two targets" % name) <NEW_LINE> <DEDENT> if self.controls is not None: <NEW_LINE> <INDENT> raise ValueError("Gate %s cannot have a control" % name) <NEW_LINE> <DEDENT> <DEDENT> elif name in ["CNOT", "CSIGN", "CRX", "CRY", "CRZ"]: <NEW_LINE> <INDENT> if self.targets is None or len(self.targets) != 1: <NEW_LINE> <INDENT> raise ValueError("Gate %s requires one target" % name) <NEW_LINE> <DEDENT> if self.controls is None or len(self.controls) != 1: <NEW_LINE> <INDENT> raise ValueError("Gate %s requires one control" % name) <NEW_LINE> <DEDENT> <DEDENT> elif name in ["SNOT", "RX", "RY", "RZ", "PHASEGATE"]: <NEW_LINE> <INDENT> if self.controls is not None: <NEW_LINE> <INDENT> raise ValueError("Gate %s does not take controls" % name) <NEW_LINE> <DEDENT> <DEDENT> elif name in ["RX", "RY", "RZ", "CPHASE", "SWAPalpha", "PHASEGATE", "GLOBALPHASE", "CRX", "CRY", "CRZ"]: <NEW_LINE> <INDENT> if arg_value is None: <NEW_LINE> <INDENT> raise ValueError("Gate %s requires an argument value" % name) <NEW_LINE> <DEDENT> <DEDENT> self.arg_value = arg_value <NEW_LINE> self.arg_label = arg_label <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "Gate(%s, targets=%s, controls=%s)" % (self.name, self.targets, self.controls) <NEW_LINE> return s <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def _repr_latex_(self): <NEW_LINE> <INDENT> return str(self) | Representation of a quantum gate, with its required parametrs, and target
and control qubits.
Parameters
----------
name : string
Gate name.
targets : list or int
Gate targets.
controls : list or int
Gate controls.
arg_value : float
Argument value(phi).
arg_label : string
Label for gate representation. | 62598fbc377c676e912f6e56 |
class OnlyTrainSinkNode(NilSinkNode): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(OnlyTrainSinkNode, self).__init__(**kwargs) <NEW_LINE> self.set_permanent_attributes(dummy_collection = DummyDataset()) <NEW_LINE> <DEDENT> def process_current_split(self): <NEW_LINE> <INDENT> for _,_ in self.input_node.request_data_for_training(False): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.current_split + 1 > self.dummy_collection.meta_data["splits"]: <NEW_LINE> <INDENT> self.dummy_collection.meta_data["splits"] = self.current_split + 1 <NEW_LINE> <DEDENT> <DEDENT> def request_data_for_testing(self): <NEW_LINE> <INDENT> return self.input_node.request_data_for_testing() | Store only meta information and perform training but not testing
The node performs only training on the node chain,
so that the test procedure can be performed manually,
e.g. for debug and testing reasons.
The node is very similar to the NilSinkNode.
.. todo:: Merge the nil-nodes
.. todo:: Change name to more meaningful.
**Parameters**
**Exemplary Call**
.. code-block:: yaml
-
node: Only_Train_Sink
:Author: Hendrik Woehrle (hendrik.woehrle@dfki.de)
:Created: 2011/07/14 | 62598fbc71ff763f4b5e7944 |
class RedirectedStdio(object): <NEW_LINE> <INDENT> def __init__(self, connection): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> self.redir = rpyc.classic.redirected_stdio(self.conn) <NEW_LINE> self.redir.__enter__() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> self.redir.__exit__(type, value, traceback) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.__exit__(None, None, None) <NEW_LINE> <DEDENT> except EOFError: <NEW_LINE> <INDENT> pass | redirect stdio of remote host to this local host | 62598fbc9f28863672818960 |
class PsicovManager(CorrMutGeneric): <NEW_LINE> <INDENT> def __init__(self, seqsManager, outPath): <NEW_LINE> <INDENT> CorrMutGeneric.__init__(self,seqsManager, outPath) <NEW_LINE> self.seqsManager= seqsManager <NEW_LINE> self.corrMutOutPath= myMakeDir(outPath,"corrMut") <NEW_LINE> self.featName="psicov" <NEW_LINE> <DEDENT> def lauchCorrMutProgram(self, aligFormatedName): <NEW_LINE> <INDENT> cmdArray=[self.psicovBin,"-p", "-z", str(self.psiBlastNThrs),"-d", "0.03", "-r", "0.001","-o", "-j", "0", aligFormatedName ] <NEW_LINE> print(" ".join(cmdArray)) <NEW_LINE> process= Popen(cmdArray, stdout=PIPE, stderr=PIPE) <NEW_LINE> processOut= process.communicate() <NEW_LINE> try: <NEW_LINE> <INDENT> iterOfCorrelatedRows= self.processOutput(processOut) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> iterOfCorrelatedRows=None <NEW_LINE> <DEDENT> return iterOfCorrelatedRows <NEW_LINE> <DEDENT> def processOutput(self, processOut): <NEW_LINE> <INDENT> if len(processOut[1])>0 or processOut[0]=="" or "*** Sorry" in processOut[0]: <NEW_LINE> <INDENT> print(processOut) <NEW_LINE> raise ValueError("Error processing data psicov") <NEW_LINE> <DEDENT> for line in processOut[0].split("\n")[1:]: <NEW_LINE> <INDENT> if len(line)==0: break <NEW_LINE> i, j, score= line.split() <NEW_LINE> i, j=int(i)-1, int(j)-1 <NEW_LINE> yield [i,j, float(score)] | Computes corrMut and processes their outputs. Extends class CorrMutGeneric | 62598fbc7d43ff24874274e9 |
class ObjectIdShuffler(SONManipulator): <NEW_LINE> <INDENT> def will_copy(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def transform_incoming(self, son, collection): <NEW_LINE> <INDENT> if not "_id" in son: <NEW_LINE> <INDENT> return son <NEW_LINE> <DEDENT> transformed = SON({"_id": son["_id"]}) <NEW_LINE> transformed.update(son) <NEW_LINE> return transformed | A son manipulator that moves _id to the first position.
| 62598fbc099cdd3c636754c7 |
class MRDANGLE0Test(systemtesting.MantidSystemTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> wsg = MRFilterCrossSections(Filename="REF_M_24949") <NEW_LINE> ws_norm = LoadEventNexus(Filename="REF_M_24945", NXentryName="entry-Off_Off", OutputWorkspace="r_24945") <NEW_LINE> theta = MRGetTheta(Workspace=wsg[0], UseSANGLE=False, SpecularPixel=127.9) <NEW_LINE> theta0 = MRGetTheta(Workspace=wsg[0], UseSANGLE=False, SpecularPixel=126.9) <NEW_LINE> dangle0 = wsg[0].getRun()['DANGLE0'].getStatistics().mean <NEW_LINE> dangle0 += (theta-theta0)*2.0*180./math.pi <NEW_LINE> MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationWorkspace=ws_norm, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=127.9, UseSANGLE=False, DAngle0Overwrite=dangle0, ConstantQBinning=False, OutputWorkspace="r_24949") <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> self.disableChecking.append('Instrument') <NEW_LINE> self.disableChecking.append('Sample') <NEW_LINE> self.disableChecking.append('SpectraMap') <NEW_LINE> return "r_24949", 'MagnetismReflectometryReductionTest.nxs' | Test data loading and cross-section extraction | 62598fbc2c8b7c6e89bd3990 |
class PluginScriptSubmissionMachine(generics.ListAPIView): <NEW_LINE> <INDENT> authentication_classes = (ApiKeyAuthentication,) <NEW_LINE> permission_classes = (HasRWPermission,) <NEW_LINE> serializer_class = PluginScriptSubmissionSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> serial = self.kwargs['serial'] <NEW_LINE> machine = Machine.objects.get(serial=serial) <NEW_LINE> return PluginScriptSubmission.objects.filter(machine=machine) | Get the plugin script submissions for a machine | 62598fbce1aae11d1e7ce90a |
@adapter(IPloneSiteRoot, IHTTPRequest) <NEW_LINE> @implementer(ITraversable) <NEW_LINE> class APITraverser(object): <NEW_LINE> <INDENT> def __init__(self, context, request=None): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> self.request = request <NEW_LINE> <DEDENT> def traverse(self, name, postpath): <NEW_LINE> <INDENT> alsoProvides(self.request, IAPIRequest) <NEW_LINE> return self.context | The root API traverser
| 62598fbcbe7bc26dc9251f41 |
class RedSFTPFile(object): <NEW_LINE> <INDENT> def __init__(self,sftp,remote_path,sftp_flags,file_mode): <NEW_LINE> <INDENT> self.sftp = sftp <NEW_LINE> self.remote_path = remote_path <NEW_LINE> self.sftp_flags = sftp_flags <NEW_LINE> self.file_mode = file_mode <NEW_LINE> self.open() <NEW_LINE> <DEDENT> def __check_for_attr__(self,attr): <NEW_LINE> <INDENT> return(attr in self.__dict__) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> self.close() <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> if self.__check_for_attr__('file_obj')==False: <NEW_LINE> <INDENT> self.file_obj = self.sftp.ssh_session._block(self.sftp.client.open,self.remote_path,self.sftp_flags,self.file_mode) <NEW_LINE> <DEDENT> <DEDENT> def fsetstat(self,*args,**kwargs): <NEW_LINE> <INDENT> return(self.sftp.fsetstat(self.file_obj,*args,**kwargs)) <NEW_LINE> <DEDENT> def fstat(self): <NEW_LINE> <INDENT> return(self.sftp.fstat(self.file_obj)) <NEW_LINE> <DEDENT> def fstatvfs(self): <NEW_LINE> <INDENT> return(self.sftp.fstatvfs(self.file_obj)) <NEW_LINE> <DEDENT> def fsync(self): <NEW_LINE> <INDENT> return(self.sftp.fsync(self.file_obj)) <NEW_LINE> <DEDENT> def read(self,*args,**kwargs): <NEW_LINE> <INDENT> return(self.sftp.read(self.file_obj,*args,**kwargs)) <NEW_LINE> <DEDENT> def rewind(self): <NEW_LINE> <INDENT> return(self.sftp.rewind(self.file_obj)) <NEW_LINE> <DEDENT> def seek(self,*args,**kwargs): <NEW_LINE> <INDENT> return(self.sftp.seek(self.file_obj,*args,**kwargs)) <NEW_LINE> <DEDENT> def write(self,*args,**kwargs): <NEW_LINE> <INDENT> return(self.sftp.write(self.file_obj,*args,**kwargs)) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.__check_for_attr__('file_obj')==True: <NEW_LINE> <INDENT> self.sftp.close(self.file_obj) <NEW_LINE> del self.file_obj | Interact with files over SFTP using a class rather than passing a file handle around.
.. warning::
This class simply uses the functions from `redssh.sftp.RedSFTP` minus any requirement for the `file_obj` argument for calls.
:param sftp: `redssh.sftp.RedSFTP` object from the session you'd like to interact via.
:type sftp: `redssh.sftp.RedSFTP`
:param remote_path: Path that file is located at on the remote server.
:type remote_path: ``str``
:param sftp_flags: Flags for the SFTP session to understand what you are going to do with the file.
:type sftp_flags: ``int``
:param file_mode: File mode for the file being opened.
:type file_mode: ``int`` | 62598fbcad47b63b2c5a7a20 |
class ArrayIndependentMetropolis(ArrayMetropolis): <NEW_LINE> <INDENT> def __init__(self, scale=1.): <NEW_LINE> <INDENT> self.scale = scale <NEW_LINE> <DEDENT> def calibrate(self, W, x): <NEW_LINE> <INDENT> m, cov = rs.wmean_and_cov(W, view_2d_array(x.theta)) <NEW_LINE> x.shared['mean'] = m <NEW_LINE> x.shared['chol_cov'] = self.scale * linalg.cholesky(cov, lower=True) <NEW_LINE> <DEDENT> def proposal(self, x, xprop): <NEW_LINE> <INDENT> mu = x.shared['mean'] <NEW_LINE> L = x.shared['chol_cov'] <NEW_LINE> arr = view_2d_array(x.theta) <NEW_LINE> arr_prop = view_2d_array(xprop.theta) <NEW_LINE> z = stats.norm.rvs(size=arr.shape) <NEW_LINE> zx = linalg.solve_triangular(L, np.transpose(arr - mu), lower=True) <NEW_LINE> delta_lp = 0.5 * (np.sum(z * z, axis=1) - np.sum(zx * zx, axis=0)) <NEW_LINE> arr_prop[:, :] = mu + z @ L.T <NEW_LINE> return delta_lp | Independent Metropolis (Gaussian proposal).
| 62598fbc23849d37ff85127e |
class IFFT(_BaseIFFT): <NEW_LINE> <INDENT> def __init__(self, invec, outvec, nbatch=1, size=None): <NEW_LINE> <INDENT> super(IFFT, self).__init__(invec, outvec, nbatch, size) <NEW_LINE> logging.warning(WARN_MSG) <NEW_LINE> self.prec, self.itype, self.otype = _check_fft_args(invec, outvec) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> ifft(self.invec, self.outvec, self.prec, self.itype, self.otype) | Class for performing IFFTs via the numpy interface. | 62598fbc44b2445a339b6a5b |
class BlackSwaptionPricerHelper(object): <NEW_LINE> <INDENT> def make_payers_swaption_wrt_strike( self, init_swap_rate, swap_annuity, option_maturity, vol): <NEW_LINE> <INDENT> function = mafipy.function <NEW_LINE> return lambda option_strike: function.black_payers_swaption_value( init_swap_rate=init_swap_rate, option_strike=option_strike, swap_annuity=swap_annuity, option_maturity=option_maturity, vol=vol) <NEW_LINE> <DEDENT> def make_receivers_swaption_wrt_strike( self, init_swap_rate, swap_annuity, option_maturity, vol): <NEW_LINE> <INDENT> function = mafipy.function <NEW_LINE> return lambda option_strike: function.black_receivers_swaption_value( init_swap_rate=init_swap_rate, option_strike=option_strike, swap_annuity=swap_annuity, option_maturity=option_maturity, vol=vol) | BlackSwaptionPricerHelper
Helper functions to generate a function with respect to a sigle variable.
For instance, black formula as a function of volatility is needed to
evaluate market smile by implied volatility. | 62598fbc21bff66bcd722e35 |
class TextItem(Item): <NEW_LINE> <INDENT> def __init__(self, arg): <NEW_LINE> <INDENT> super(TextItem, self).__init__() <NEW_LINE> self.arg = arg | docstring for TextItem | 62598fbc5fcc89381b266232 |
class NumEpisodesObserver(object): <NEW_LINE> <INDENT> def __init__(self, variable_scope='num_episodes_step_observer'): <NEW_LINE> <INDENT> with tf.compat.v1.variable_scope(variable_scope): <NEW_LINE> <INDENT> self._num_episodes = common.create_variable( 'num_episodes', 0, shape=[], dtype=tf.int32) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def num_episodes(self): <NEW_LINE> <INDENT> return self._num_episodes <NEW_LINE> <DEDENT> @num_episodes.setter <NEW_LINE> def num_episodes(self, num_episodes): <NEW_LINE> <INDENT> self._num_episodes.assign(num_episodes) <NEW_LINE> <DEDENT> def __call__(self, traj): <NEW_LINE> <INDENT> with tf.control_dependencies([ self._num_episodes.assign_add( tf.cast(traj.is_last()[0], dtype=tf.int32)) ]): <NEW_LINE> <INDENT> return tf.nest.map_structure(tf.identity, traj) | Class to count number of episodes run by an observer. | 62598fbc3539df3088ecc478 |
class MyProfileForm(Form): <NEW_LINE> <INDENT> username = StringField( 'Username', validators=[ optional(), Regexp( r'^[a-zA-Z0-9_]+$', message=("Username should be one word, letters, " "numbers, and underscores only.") ), username_exists ]) <NEW_LINE> email = StringField( 'Email', validators=[ optional(), Email(), email_exists ]) | Edit user profile | 62598fbc60cbc95b0636450a |
class strLabelConverter(object): <NEW_LINE> <INDENT> def __init__(self, alphabet, ignore_case=True): <NEW_LINE> <INDENT> self._ignore_case = ignore_case <NEW_LINE> self.alphabet = alphabet + '-' <NEW_LINE> self.dict = {} <NEW_LINE> for i, char in enumerate(alphabet): <NEW_LINE> <INDENT> self.dict[char] = i + 1 <NEW_LINE> <DEDENT> <DEDENT> def encode(self, text): <NEW_LINE> <INDENT> if isinstance(text, str): <NEW_LINE> <INDENT> text = [ self.dict[char.lower() if self._ignore_case else char] for char in text ] <NEW_LINE> length = [len(text)] <NEW_LINE> <DEDENT> elif isinstance(text, collections.Iterable): <NEW_LINE> <INDENT> length = [len(s) for s in text] <NEW_LINE> text = ''.join(text) <NEW_LINE> text, _ = self.encode(text) <NEW_LINE> <DEDENT> return (torch.IntTensor(text), torch.IntTensor(length)) <NEW_LINE> <DEDENT> def decode(self, t, length, raw=False): <NEW_LINE> <INDENT> if length.numel() == 1: <NEW_LINE> <INDENT> length = length[0] <NEW_LINE> assert t.numel() == length, "text with length: {} does not match declared length: {}".format(t.numel(), length) <NEW_LINE> if raw: <NEW_LINE> <INDENT> return ''.join([self.alphabet[i - 1] for i in t]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> char_list = [] <NEW_LINE> for i in range(length): <NEW_LINE> <INDENT> if t[i] != 0 and (not (i > 0 and t[i - 1] == t[i])): <NEW_LINE> <INDENT> char_list.append(self.alphabet[t[i] - 1]) <NEW_LINE> <DEDENT> <DEDENT> return ''.join(char_list) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> assert t.numel() == length.sum(), "texts with length: {} does not match declared length: {}".format(t.numel(), length.sum()) <NEW_LINE> texts = [] <NEW_LINE> index = 0 <NEW_LINE> for i in range(length.numel()): <NEW_LINE> <INDENT> l = length[i] <NEW_LINE> texts.append( self.decode( t[index:index + l], torch.IntTensor([l]), raw=raw)) <NEW_LINE> index += l <NEW_LINE> <DEDENT> return texts | Convert between str and label.
NOTE:
Insert `blank` to the alphabet for CTC.
Args:
alphabet (str): set of the possible characters.
ignore_case (bool, default=True): whether or not to ignore all of the case. | 62598fbc63d6d428bbee297d |
class Device(base.Device): <NEW_LINE> <INDENT> def __init__(self, event_loop): <NEW_LINE> <INDENT> super().__init__(event_loop) <NEW_LINE> self.prepared_data = None <NEW_LINE> <DEDENT> def update(self, data): <NEW_LINE> <INDENT> self.prepared_data = data <NEW_LINE> for future in self.readers: <NEW_LINE> <INDENT> future.set_result(self.get_sim_data(self.prepared_data)) <NEW_LINE> <DEDENT> self.readers.clear() <NEW_LINE> <DEDENT> def get_sim_data(self, prepared): <NEW_LINE> <INDENT> return prepared <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> if self.prepared_data is not None: <NEW_LINE> <INDENT> prepared = self.get_sim_data(self.prepared_data) <NEW_LINE> self.prepared_data = None <NEW_LINE> future = asyncio.Future() <NEW_LINE> future.set_result(prepared) <NEW_LINE> return future <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super().read() | Sim device class | 62598fbc167d2b6e312b7142 |
class Token(namedtuple('Token', FIELD_NAMES_PLUS)): <NEW_LINE> <INDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Token): <NEW_LINE> <INDENT> raise TypeError("unorderable types: %s < %s" % (self.__class__.__name__, other.__class__.__name__)) <NEW_LINE> <DEDENT> self_fields = self[:-1] <NEW_LINE> other_fields = other[:-1] <NEW_LINE> def get_extra(token): <NEW_LINE> <INDENT> if token.extra is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return sorted(token.extra.items()) <NEW_LINE> <DEDENT> <DEDENT> if self_fields == other_fields: <NEW_LINE> <INDENT> return get_extra(self) < get_extra(other) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self_fields < other_fields <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> items = [(field, getattr(self, field, None)) for field in FIELD_NAMES_PLUS] <NEW_LINE> fields = ['%s=%r' % (k, v) for k, v in items if v is not None] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(fields)) <NEW_LINE> <DEDENT> def as_conll(self): <NEW_LINE> <INDENT> def get(field): <NEW_LINE> <INDENT> value = getattr(self, field) <NEW_LINE> if value is None: <NEW_LINE> <INDENT> value = '_' <NEW_LINE> <DEDENT> elif field == 'feats': <NEW_LINE> <INDENT> value = '|'.join(value) <NEW_LINE> <DEDENT> return str(value) <NEW_LINE> <DEDENT> return '\t'.join([get(field) for field in FIELD_NAMES]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_conll(this_class, text): <NEW_LINE> <INDENT> fields = text.split('\t') <NEW_LINE> fields[0] = int(fields[0]) <NEW_LINE> fields[6] = int(fields[6]) <NEW_LINE> if fields[5] != '_': <NEW_LINE> <INDENT> fields[5] = tuple(fields[5].split('|')) <NEW_LINE> <DEDENT> fields = [value if value != '_' else None for value in fields] <NEW_LINE> fields.append(None) <NEW_LINE> return this_class(**dict(zip(FIELD_NAMES_PLUS, fields))) | CoNLL-X style dependency token. Fields include:
- form (the word form)
- lemma (the word's base form or lemma) -- empty for SubprocessBackend
- pos (part of speech tag)
- index (index of the token in the sentence)
- head (index of the head of this token), and
- deprel (the dependency relation between this token and its head)
There are other fields but they typically won't be populated by
StanfordDependencies. Fields are immutable.
See CoNLL-X shared task on Multilingual Dependency Parsing by
Buchholz and Marsi(2006) (http://aclweb.org/anthology/W06-2920)
(Section 3: Data format, task definition)
for a complete description. | 62598fbcec188e330fdf8a5e |
class ReprovisionPolicy(Model): <NEW_LINE> <INDENT> _validation = { 'update_hub_assignment': {'required': True}, 'migrate_device_data': {'required': True}, } <NEW_LINE> _attribute_map = { 'update_hub_assignment': {'key': 'updateHubAssignment', 'type': 'bool'}, 'migrate_device_data': {'key': 'migrateDeviceData', 'type': 'bool'}, } <NEW_LINE> def __init__(self, update_hub_assignment=True, migrate_device_data=True): <NEW_LINE> <INDENT> super(ReprovisionPolicy, self).__init__() <NEW_LINE> self.update_hub_assignment = update_hub_assignment <NEW_LINE> self.migrate_device_data = migrate_device_data | The behavior of the service when a device is re-provisioned to an IoT hub.
:param update_hub_assignment: When set to true (default), the Device
Provisioning Service will evaluate the device's IoT Hub assignment and
update it if necessary for any provisioning requests beyond the first from
a given device. If set to false, the device will stay assigned to its
current IoT hub. Default value: True .
:type update_hub_assignment: bool
:param migrate_device_data: When set to true (default), the Device
Provisioning Service will migrate the device's data (twin, device
capabilities, and device ID) from one IoT hub to another during an IoT hub
assignment update. If set to false, the Device Provisioning Service will
reset the device's data to the initial desired configuration stored in the
corresponding enrollment list. Default value: True .
:type migrate_device_data: bool | 62598fbcff9c53063f51a81a |
class NDaysBeforeLastTradingDayOfWeek(TradingDayOfWeekRule): <NEW_LINE> <INDENT> def __init__(self, n): <NEW_LINE> <INDENT> super(NDaysBeforeLastTradingDayOfWeek, self).__init__(n, invert=True) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_last_trading_day_of_week(dt, cal): <NEW_LINE> <INDENT> prev = None <NEW_LINE> try: <NEW_LINE> <INDENT> while not prev or dt.weekday() > prev.weekday(): <NEW_LINE> <INDENT> prev = dt <NEW_LINE> dt = cal.next_trading_day(dt) <NEW_LINE> <DEDENT> <DEDENT> except NoFurtherDataError: <NEW_LINE> <INDENT> prev = dt <NEW_LINE> <DEDENT> if cal.is_open_on_day(prev): <NEW_LINE> <INDENT> return prev <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return cal.previous_trading_day(prev) <NEW_LINE> <DEDENT> <DEDENT> date_func = get_last_trading_day_of_week | A rule that triggers n days before the last trading day of the week. | 62598fbc2c8b7c6e89bd3992 |
class DevicesField(ObjectFieldRelatedSet): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> super(ObjectFieldRelatedSet, self).__init__(name, default=[], readonly=True) <NEW_LINE> <DEDENT> def datum_to_value(self, instance, datum): <NEW_LINE> <INDENT> if datum is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if not isinstance(datum, Sequence): <NEW_LINE> <INDENT> raise TypeError("datum must be a sequence, not %s" % type(datum).__name__) <NEW_LINE> <DEDENT> bound = getattr(instance._origin, "FilesystemGroupDevices") <NEW_LINE> return bound((get_device_object(instance._origin, item) for item in datum)) | Field for `FilesystemGroupDevices`. | 62598fbc23849d37ff851280 |
class Seat: <NEW_LINE> <INDENT> def __init__(self, location: str, factor: int=8): <NEW_LINE> <INDENT> self.encodedlocation = location <NEW_LINE> self.rowfactor = factor <NEW_LINE> <DEDENT> @property <NEW_LINE> def encodedlocation(self): <NEW_LINE> <INDENT> return self.__encodedlocation <NEW_LINE> <DEDENT> @encodedlocation.setter <NEW_LINE> def encodedlocation(self, location): <NEW_LINE> <INDENT> self.__encodedlocation = location <NEW_LINE> <DEDENT> @property <NEW_LINE> def rowfactor(self): <NEW_LINE> <INDENT> return self.__rowfactor <NEW_LINE> <DEDENT> @rowfactor.setter <NEW_LINE> def rowfactor(self, factor: int): <NEW_LINE> <INDENT> self.__rowfactor = factor <NEW_LINE> <DEDENT> def decode_rowcol(self, rows: int=128, cols: int=8): <NEW_LINE> <INDENT> col = 0 <NEW_LINE> row = 0 <NEW_LINE> col_stride = int(cols / 2) <NEW_LINE> row_stride = int(rows / 2) <NEW_LINE> for code in self.encodedlocation: <NEW_LINE> <INDENT> if code in "FB": <NEW_LINE> <INDENT> if code == "B": <NEW_LINE> <INDENT> row += row_stride <NEW_LINE> <DEDENT> row_stride = int(row_stride/ 2) <NEW_LINE> <DEDENT> if code in "LR": <NEW_LINE> <INDENT> if code == "R": <NEW_LINE> <INDENT> col += col_stride <NEW_LINE> <DEDENT> col_stride = int(col_stride / 2) <NEW_LINE> <DEDENT> <DEDENT> return row, col <NEW_LINE> <DEDENT> def get_seatid(self, row: int, col: int): <NEW_LINE> <INDENT> return self.rowfactor * row + col | "binary space partitioning" machine system for encoding airplane seating assignment...
in the North Pole... | 62598fbc3d592f4c4edbb08b |
class IPTagger(object): <NEW_LINE> <INDENT> name='ip_tagger' <NEW_LINE> def __init__(self, nlp, pattern_id='IPTagger', attrs=('has_ipv4', 'is_ipv4', 'ipv4'), force_extension=False, subnets_to_keep=4): <NEW_LINE> <INDENT> self._has_ipv4, self._is_ipv4, self._ipv4 = attrs <NEW_LINE> self.matcher = Matcher(nlp.vocab) <NEW_LINE> if (subnets_to_keep < 1) or (subnets_to_keep > 4): <NEW_LINE> <INDENT> raise ValueError('Subnets_to_keep must be in the range 1-4') <NEW_LINE> <DEDENT> self.subnets_to_keep = subnets_to_keep <NEW_LINE> self._ipv4_re = re.compile(ipv4_expr, re.VERBOSE | re.I | re.UNICODE) <NEW_LINE> ipv4_mask = lambda text: bool(self._ipv4_re.match(text)) <NEW_LINE> ipv4_flag = nlp.vocab.add_flag(ipv4_mask) <NEW_LINE> self.matcher.add('IPV4', None, [{ipv4_flag: True}]) <NEW_LINE> Doc.set_extension(self._has_ipv4, getter=self.has_ipv4, force=force_extension) <NEW_LINE> Doc.set_extension(self._ipv4, getter=self.iter_ipv4, force=force_extension) <NEW_LINE> Span.set_extension(self._has_ipv4, getter=self.has_ipv4, force=force_extension) <NEW_LINE> Span.set_extension(self._ipv4, getter=self.iter_ipv4, force=force_extension) <NEW_LINE> Token.set_extension(self._is_ipv4, default=False, force=force_extension) <NEW_LINE> <DEDENT> def __call__(self, doc): <NEW_LINE> <INDENT> matches = self.matcher(doc) <NEW_LINE> spans = [] <NEW_LINE> for match_id, start, end in matches: <NEW_LINE> <INDENT> span = doc[start : end] <NEW_LINE> for token in span: <NEW_LINE> <INDENT> token._.set(self._is_ipv4, True) <NEW_LINE> token.lemma_ = stem_ip_addr(token.text, self.subnets_to_keep) <NEW_LINE> <DEDENT> spans.append(span) <NEW_LINE> <DEDENT> return doc <NEW_LINE> <DEDENT> def has_ipv4(self, tokens): <NEW_LINE> <INDENT> return any(token._.get(self._is_ipv4) for token in tokens) <NEW_LINE> <DEDENT> def iter_ipv4(self, tokens): <NEW_LINE> <INDENT> return [(i, t) for i, t in enumerate(tokens) if t._.get(self._is_ipv4)] | spaCy v2.0 pipeline component for adding IP meta data to `Doc` objects.
USAGE:
>>> import spacy
>>> from spacy.lang.en import English
>>> from cyberspacy import IPTagger
>>> nlp = English()
>>> ip_Tagger = IPTagger(nlp)
>>> nlp.add_pipe(ip_Tagger, first=True)
>>> doc = nlp(u'This is a sentence which contains 2.3.4.5 as an IP address')
>>> assert doc._.has_ipv4 == True
>>> assert doc[0]._.is_ipv4 == False
>>> assert doc[6]._.is_ipv4 == True
>>> assert len(doc._.ipv4) == 1
>>> idx, ipv4_token = doc._.ipv4[0]
>>> assert idx == 6
>>> assert ipv4_token.text == '2.3.4.5' | 62598fbc3317a56b869be635 |
class _ConfigSection(object): <NEW_LINE> <INDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> if attr in self.__dict__: <NEW_LINE> <INDENT> return self.__dict__[attr] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AttributeError('No "%s" setting in section.' % attr) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return getattr(self, item) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(tuple(self.__dict__.keys())) | Hold settings for a section of the configuration file. | 62598fbc76e4537e8c3ef773 |
class AuthTokenSerializer(serializers.Serializer): <NEW_LINE> <INDENT> email = serializers.CharField() <NEW_LINE> password = serializers.CharField( style={'input_type': 'password'}, trim_whitespace=False ) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> email = attrs.get('email') <NEW_LINE> password = attrs.get('password') <NEW_LINE> user = authenticate( request=self.context.get('request'), username=email, password=password ) <NEW_LINE> if not user: <NEW_LINE> <INDENT> msg = _('Unable to authenticate with the provided credintials') <NEW_LINE> raise serializers.ValidationError(msg, code='authentication') <NEW_LINE> <DEDENT> attrs['user'] = user <NEW_LINE> return attrs | Serializer for the user authentication object | 62598fbc21bff66bcd722e37 |
class DateQueryState(QueryState): <NEW_LINE> <INDENT> def __init__(self, filter, date): <NEW_LINE> <INDENT> QueryState.__init__(self, filter) <NEW_LINE> self.date = date <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<DateQueryState date=%r>' % (self.date,) | Create a new DateQueryState object.
:cvar date: date | 62598fbca219f33f346c69d3 |
class Timeseries(object): <NEW_LINE> <INDENT> def __init__(self, hadm_id, data_id, timeseries_data): <NEW_LINE> <INDENT> self.hadm_id = hadm_id <NEW_LINE> self.data_id = data_id <NEW_LINE> self.series = timeseries_data.sort_index() if timeseries_data is not None else pd.Series() <NEW_LINE> self.series = self.series.rename(self.data_id.uniq_id) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> unformatted = "------\n{0}\n#Data Points:{1}\n------" <NEW_LINE> return unformatted.format(self.data_id, self.series.size) <NEW_LINE> <DEDENT> def time_split(self,start_dt,end_dt,duration_td,hadm_id_index=True): <NEW_LINE> <INDENT> split_list = [] <NEW_LINE> while start_dt < end_dt: <NEW_LINE> <INDENT> next_dt = start_dt + duration_td <NEW_LINE> split_ts = Timeseries(self.hadm_id,self.data_id,self.series[start_dt:next_dt]) <NEW_LINE> split_list.append(split_ts) <NEW_LINE> start_dt = next_dt <NEW_LINE> <DEDENT> return split_list | Wrapper for timeseries data, contains a time-indexed pandas.Series object as
well as a dataIDs.DataID object describing what the data is and the hadm_id
signifying where the data came from. | 62598fbc442bda511e95c62a |
class CalibrationMode(enum.IntEnum): <NEW_LINE> <INDENT> BootTareGyroAccel = 0 <NEW_LINE> Temperature = 1 <NEW_LINE> Magnetometer12Pt = 2 <NEW_LINE> Magnetometer360 = 3 <NEW_LINE> Accelerometer = 5 <NEW_LINE> Unknown = -1 | Various calibration modes supported by Pigeon. | 62598fbc3539df3088ecc47a |
class LinkedBag(object): <NEW_LINE> <INDENT> def __init__(self,sourceCollection = None): <NEW_LINE> <INDENT> self._items = None <NEW_LINE> self._size = None <NEW_LINE> if sourceCollection: <NEW_LINE> <INDENT> for item in sourceCollection: <NEW_LINE> <INDENT> self.add(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> cursor = self._items <NEW_LINE> while cursor is not None: <NEW_LINE> <INDENT> yield cursor._data <NEW_LINE> cursor = cursor._next <NEW_LINE> <DEDENT> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def add(self,item): <NEW_LINE> <INDENT> self._items = Node(item,self._items) <NEW_LINE> self._size += 1 <NEW_LINE> <DEDENT> def remove(self,item): <NEW_LINE> <INDENT> if item not in self: <NEW_LINE> <INDENT> raise KeyError(str(item) + "not in bag.") <NEW_LINE> <DEDENT> probe = self._items <NEW_LINE> trailer = None <NEW_LINE> for targetItem in self: <NEW_LINE> <INDENT> if targetItem == item: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> trailer = probe <NEW_LINE> probe = probe._next <NEW_LINE> <DEDENT> if probe == self._items: <NEW_LINE> <INDENT> self._items = self._items.next <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> trailer._next = probe._next <NEW_LINE> <DEDENT> self._size -= 1 <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> return len(self) == 0 <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{" + ",".join(map(str, self)) + "}" <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> result = LinkedBag(self) <NEW_LINE> for item in other: <NEW_LINE> <INDENT> result.add(item) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is other: return True <NEW_LINE> if type(self) != type(other) or len(self) != len(other): return False <NEW_LINE> for item in self: <NEW_LINE> <INDENT> if not item in other: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | A link-based bag implementation. | 62598fbc283ffb24f3cf3a50 |
class Numbers(object): <NEW_LINE> <INDENT> _precision = 0 <NEW_LINE> _near0 = 1.0 <NEW_LINE> _near100 = 99.0 <NEW_LINE> def __init__(self, n_files=0, n_statements=0, n_excluded=0, n_missing=0, n_branches=0, n_partial_branches=0, n_missing_branches=0 ): <NEW_LINE> <INDENT> self.n_files = n_files <NEW_LINE> self.n_statements = n_statements <NEW_LINE> self.n_excluded = n_excluded <NEW_LINE> self.n_missing = n_missing <NEW_LINE> self.n_branches = n_branches <NEW_LINE> self.n_partial_branches = n_partial_branches <NEW_LINE> self.n_missing_branches = n_missing_branches <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def set_precision(cls, precision): <NEW_LINE> <INDENT> assert 0 <= precision < 10 <NEW_LINE> cls._precision = precision <NEW_LINE> cls._near0 = 1.0 / 10**precision <NEW_LINE> cls._near100 = 100.0 - cls._near0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_executed(self): <NEW_LINE> <INDENT> return self.n_statements - self.n_missing <NEW_LINE> <DEDENT> @property <NEW_LINE> def n_executed_branches(self): <NEW_LINE> <INDENT> return self.n_branches - self.n_missing_branches <NEW_LINE> <DEDENT> @property <NEW_LINE> def pc_covered(self): <NEW_LINE> <INDENT> if self.n_statements > 0: <NEW_LINE> <INDENT> pc_cov = (100.0 * (self.n_executed + self.n_executed_branches) / (self.n_statements + self.n_branches)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pc_cov = 100.0 <NEW_LINE> <DEDENT> return pc_cov <NEW_LINE> <DEDENT> @property <NEW_LINE> def pc_covered_str(self): <NEW_LINE> <INDENT> pc = self.pc_covered <NEW_LINE> if 0 < pc < self._near0: <NEW_LINE> <INDENT> pc = self._near0 <NEW_LINE> <DEDENT> elif self._near100 < pc < 100: <NEW_LINE> <INDENT> pc = self._near100 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pc = round(pc, self._precision) <NEW_LINE> <DEDENT> return "%.*f" % (self._precision, pc) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pc_str_width(cls): <NEW_LINE> <INDENT> width = 3 <NEW_LINE> if cls._precision > 0: <NEW_LINE> <INDENT> width += 1 + cls._precision <NEW_LINE> <DEDENT> return width <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> nums = Numbers() <NEW_LINE> nums.n_files = self.n_files + other.n_files <NEW_LINE> nums.n_statements = self.n_statements + other.n_statements <NEW_LINE> nums.n_excluded = self.n_excluded + other.n_excluded <NEW_LINE> nums.n_missing = self.n_missing + other.n_missing <NEW_LINE> nums.n_branches = self.n_branches + other.n_branches <NEW_LINE> nums.n_partial_branches = ( self.n_partial_branches + other.n_partial_branches ) <NEW_LINE> nums.n_missing_branches = ( self.n_missing_branches + other.n_missing_branches ) <NEW_LINE> return nums <NEW_LINE> <DEDENT> def __radd__(self, other): <NEW_LINE> <INDENT> if other == 0: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> return NotImplemented | The numerical results of measuring coverage.
This holds the basic statistics from `Analysis`, and is used to roll
up statistics across files. | 62598fbc56ac1b37e63023bb |
class InfrastructureStorage: <NEW_LINE> <INDENT> def __init__(self, context): <NEW_LINE> <INDENT> self.__context = context <NEW_LINE> <DEDENT> def configure_tiers(self, datacenter, tier): <NEW_LINE> <INDENT> log.info("Enabling tier %s..." % tier) <NEW_LINE> tiers = datacenter.listTiers() <NEW_LINE> tiers[0].setName(tier) <NEW_LINE> tiers[0].update() <NEW_LINE> for tier in tiers[1:]: <NEW_LINE> <INDENT> tier.setEnabled(False) <NEW_LINE> tier.update() <NEW_LINE> <DEDENT> return tiers[0] <NEW_LINE> <DEDENT> def create_device(self, datacenter, devname, devtype, devaddress, devmanaddress, user, password): <NEW_LINE> <INDENT> log.info("Creating storage device %s at %s..." % (devname, devaddress)) <NEW_LINE> device = StorageDevice.builder(self.__context, datacenter) .name(devname) .type(devtype) .iscsiIp(devaddress) .managementIp(devmanaddress) .username(user) .password(password) .build() <NEW_LINE> device.save() <NEW_LINE> return device <NEW_LINE> <DEDENT> def create_pool(self, device, tier, poolname): <NEW_LINE> <INDENT> log.info("Adding pool %s..." % poolname) <NEW_LINE> pool = device.findRemoteStoragePool( StoragePoolPredicates.name(poolname)) <NEW_LINE> pool.setTier(tier) <NEW_LINE> pool.save() <NEW_LINE> return pool | Provides access to infrastructure storage features. | 62598fbc60cbc95b0636450c |
class InvalidTestRunError(Exception): <NEW_LINE> <INDENT> pass | Raised if test run is invalid. | 62598fbc091ae35668704df1 |
@urls.register <NEW_LINE> class FloatingIPs(generic.View): <NEW_LINE> <INDENT> url_regex = r'network/floatingips/$' <NEW_LINE> @rest_utils.ajax() <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> result = api.neutron.tenant_floating_ip_list(request) <NEW_LINE> return {'items': [ip.to_dict() for ip in result]} | API for floating IP addresses. | 62598fbc9f28863672818962 |
class TransactionsCommandsMixin: <NEW_LINE> <INDENT> def unwatch(self): <NEW_LINE> <INDENT> fut = self._pool_or_conn.execute(b'UNWATCH') <NEW_LINE> return wait_ok(fut) <NEW_LINE> <DEDENT> def watch(self, key, *keys): <NEW_LINE> <INDENT> fut = self._pool_or_conn.execute(b'WATCH', key, *keys) <NEW_LINE> return wait_ok(fut) <NEW_LINE> <DEDENT> def multi_exec(self): <NEW_LINE> <INDENT> return MultiExec(self._pool_or_conn, self.__class__, loop=self._pool_or_conn._loop) <NEW_LINE> <DEDENT> def pipeline(self): <NEW_LINE> <INDENT> return Pipeline(self._pool_or_conn, self.__class__, loop=self._pool_or_conn._loop) | Transaction commands mixin.
For commands details see: http://redis.io/commands/#transactions
Transactions HOWTO:
>>> tr = redis.multi_exec()
>>> result_future1 = tr.incr('foo')
>>> result_future2 = tr.incr('bar')
>>> try:
... result = await tr.execute()
... except MultiExecError:
... pass # check what happened
>>> result1 = await result_future1
>>> result2 = await result_future2
>>> assert result == [result1, result2] | 62598fbc7d43ff24874274eb |
class _Query(object): <NEW_LINE> <INDENT> __slots__ = ('flags', 'ns', 'ntoskip', 'ntoreturn', 'spec', 'fields', 'codec_options', 'read_preference', 'limit', 'batch_size') <NEW_LINE> name = 'find' <NEW_LINE> def __init__(self, flags, ns, ntoskip, ntoreturn, spec, fields, codec_options, read_preference, limit, batch_size): <NEW_LINE> <INDENT> self.flags = flags <NEW_LINE> self.ns = ns <NEW_LINE> self.ntoskip = ntoskip <NEW_LINE> self.ntoreturn = ntoreturn <NEW_LINE> self.spec = spec <NEW_LINE> self.fields = fields <NEW_LINE> self.codec_options = codec_options <NEW_LINE> self.read_preference = read_preference <NEW_LINE> self.limit = limit <NEW_LINE> self.batch_size = batch_size <NEW_LINE> <DEDENT> def as_command(self): <NEW_LINE> <INDENT> dbn, coll = self.ns.split('.', 1) <NEW_LINE> return _gen_find_command(coll, self.spec, self.fields, self.ntoskip, self.limit, self.batch_size, self.flags), dbn <NEW_LINE> <DEDENT> def get_message(self, set_slave_ok, is_mongos): <NEW_LINE> <INDENT> if is_mongos: <NEW_LINE> <INDENT> self.spec = _maybe_add_read_preference(self.spec, self.read_preference) <NEW_LINE> <DEDENT> if set_slave_ok: <NEW_LINE> <INDENT> flags = self.flags | 4 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> flags = self.flags <NEW_LINE> <DEDENT> return query(flags, self.ns, self.ntoskip, self.ntoreturn, self.spec, self.fields, self.codec_options) | A query operation. | 62598fbc377c676e912f6e58 |
class PersonalInformation(forms.Form): <NEW_LINE> <INDENT> nickname = forms.CharField(error_messages={ 'max_length': '昵称长度必须小于50位', }, required=False, max_length=50, ) <NEW_LINE> telephone = forms.CharField(validators=[ RegexValidator(r'^1[13456789]\d{9}$', "提示信息:手机号码格式错误"), ], required=False) <NEW_LINE> birthday = forms.DateField(required=False) <NEW_LINE> school = forms.CharField(max_length=50, required=False, error_messages={ 'max_length': '学校名字长度必须小于50位', }) <NEW_LINE> nativePlace = forms.CharField(max_length=50, required=False, error_messages={ 'max_length': '学校名字长度必须小于50位', }) <NEW_LINE> location = forms.CharField(max_length=50, required=False, error_messages={ 'max_length': '地址长度必须小于50位', }) <NEW_LINE> def clean_birthday(self): <NEW_LINE> <INDENT> now = date.today() <NEW_LINE> time = self.cleaned_data.get('birthday') <NEW_LINE> if time is None: <NEW_LINE> <INDENT> return time <NEW_LINE> <DEDENT> if time > now: <NEW_LINE> <INDENT> raise forms.ValidationError('请正确填写日期') <NEW_LINE> <DEDENT> return time | 验证个人信息合格性 | 62598fbc3346ee7daa33772f |
class TableClause(Immutable, FromClause): <NEW_LINE> <INDENT> __visit_name__ = "table" <NEW_LINE> named_with_column = True <NEW_LINE> implicit_returning = False <NEW_LINE> _autoincrement_column = None <NEW_LINE> def __init__(self, name, *columns, **kw): <NEW_LINE> <INDENT> super(TableClause, self).__init__() <NEW_LINE> self.name = self.fullname = name <NEW_LINE> self._columns = ColumnCollection() <NEW_LINE> self.primary_key = ColumnSet() <NEW_LINE> self.foreign_keys = set() <NEW_LINE> for c in columns: <NEW_LINE> <INDENT> self.append_column(c) <NEW_LINE> <DEDENT> schema = kw.pop("schema", None) <NEW_LINE> if schema is not None: <NEW_LINE> <INDENT> self.schema = schema <NEW_LINE> <DEDENT> if kw: <NEW_LINE> <INDENT> raise exc.ArgumentError("Unsupported argument(s): %s" % list(kw)) <NEW_LINE> <DEDENT> <DEDENT> def _init_collections(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @util.memoized_property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> if util.py3k: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.name.encode("ascii", "backslashreplace") <NEW_LINE> <DEDENT> <DEDENT> def append_column(self, c): <NEW_LINE> <INDENT> existing = c.table <NEW_LINE> if existing is not None and existing is not self: <NEW_LINE> <INDENT> raise exc.ArgumentError( "column object '%s' already assigned to table %r" % (c.key, getattr(existing, "description", existing)) ) <NEW_LINE> <DEDENT> self._columns[c.key] = c <NEW_LINE> c.table = self <NEW_LINE> <DEDENT> def get_children(self, column_collections=True, **kwargs): <NEW_LINE> <INDENT> if column_collections: <NEW_LINE> <INDENT> return [c for c in self.c] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> @util.dependencies("sqlalchemy.sql.dml") <NEW_LINE> def insert(self, dml, values=None, inline=False, **kwargs): <NEW_LINE> <INDENT> return dml.Insert(self, values=values, inline=inline, **kwargs) <NEW_LINE> <DEDENT> @util.dependencies("sqlalchemy.sql.dml") <NEW_LINE> def update( self, dml, whereclause=None, values=None, inline=False, **kwargs ): <NEW_LINE> <INDENT> return dml.Update( self, whereclause=whereclause, values=values, inline=inline, **kwargs ) <NEW_LINE> <DEDENT> @util.dependencies("sqlalchemy.sql.dml") <NEW_LINE> def delete(self, dml, whereclause=None, **kwargs): <NEW_LINE> <INDENT> return dml.Delete(self, whereclause, **kwargs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _from_objects(self): <NEW_LINE> <INDENT> return [self] | Represents a minimal "table" construct.
This is a lightweight table object that has only a name, a
collection of columns, which are typically produced
by the :func:`_expression.column` function, and a schema::
from sqlalchemy import table, column
user = table("user",
column("id"),
column("name"),
column("description"),
)
The :class:`_expression.TableClause` construct serves as the base for
the more commonly used :class:`_schema.Table` object, providing
the usual set of :class:`_expression.FromClause` services including
the ``.c.`` collection and statement generation methods.
It does **not** provide all the additional schema-level services
of :class:`_schema.Table`, including constraints, references to other
tables, or support for :class:`_schema.MetaData`-level services.
It's useful
on its own as an ad-hoc construct used to generate quick SQL
statements when a more fully fledged :class:`_schema.Table`
is not on hand. | 62598fbc56ac1b37e63023bc |
class VolumeTypeManager(base.ManagerWithFind): <NEW_LINE> <INDENT> resource_class = VolumeType <NEW_LINE> def list(self): <NEW_LINE> <INDENT> warnings.warn('The novaclient.v2.volume_types module is deprecated ' 'and will be removed after Nova 2016.1 is released. Use ' 'python-cinderclient or python-openstacksdk instead.', DeprecationWarning) <NEW_LINE> with self.alternate_service_type('volume'): <NEW_LINE> <INDENT> return self._list("/types", "volume_types") <NEW_LINE> <DEDENT> <DEDENT> def get(self, volume_type): <NEW_LINE> <INDENT> warnings.warn('The novaclient.v2.volume_types module is deprecated ' 'and will be removed after Nova 2016.1 is released. Use ' 'python-cinderclient or python-openstacksdk instead.', DeprecationWarning) <NEW_LINE> with self.alternate_service_type('volume'): <NEW_LINE> <INDENT> return self._get("/types/%s" % base.getid(volume_type), "volume_type") <NEW_LINE> <DEDENT> <DEDENT> def delete(self, volume_type): <NEW_LINE> <INDENT> warnings.warn('The novaclient.v2.volume_types module is deprecated ' 'and will be removed after Nova 2016.1 is released. Use ' 'python-cinderclient or python-openstacksdk instead.', DeprecationWarning) <NEW_LINE> with self.alternate_service_type('volume'): <NEW_LINE> <INDENT> self._delete("/types/%s" % base.getid(volume_type)) <NEW_LINE> <DEDENT> <DEDENT> def create(self, name): <NEW_LINE> <INDENT> warnings.warn('The novaclient.v2.volume_types module is deprecated ' 'and will be removed after Nova 2016.1 is released. Use ' 'python-cinderclient or python-openstacksdk instead.', DeprecationWarning) <NEW_LINE> with self.alternate_service_type('volume'): <NEW_LINE> <INDENT> body = { "volume_type": { "name": name, } } <NEW_LINE> return self._create("/types", body, "volume_type") | DEPRECATED: Manage :class:`VolumeType` resources. | 62598fbc956e5f7376df5765 |
@core.off_by_default <NEW_LINE> @core.flake8ext <NEW_LINE> class MockAutospecCheck(object): <NEW_LINE> <INDENT> name = "mock_check" <NEW_LINE> version = "1.00" <NEW_LINE> def __init__(self, tree, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.tree = tree <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> mcv = MockCheckVisitor(self.filename) <NEW_LINE> mcv.visit(self.tree) <NEW_LINE> for message in mcv.messages: <NEW_LINE> <INDENT> yield message | Check for 'autospec' in mock.patch/mock.patch.object calls
Okay: mock.patch('target_module_1', autospec=True)
Okay: mock.patch('target_module_1', autospec=False)
Okay: mock.patch('target_module_1', autospec=None)
Okay: mock.patch('target_module_1', defined_mock)
Okay: mock.patch('target_module_1', new=defined_mock)
Okay: mock.patch('target_module_1', new_callable=SomeFunc)
Okay: mock.patch('target_module_1', defined_mock)
Okay: mock.patch('target_module_1', spec=1000)
Okay: mock.patch('target_module_1', spec_set=['data'])
H210: mock.patch('target_module_1')
Okay: mock.patch('target_module_1') # noqa
H210: mock.patch('target_module_1', somearg=23)
Okay: mock.patch('target_module_1', somearg=23) # noqa
Okay: mock.patch.object('target_module_2', 'attribute', autospec=True)
Okay: mock.patch.object('target_module_2', 'attribute', autospec=False)
Okay: mock.patch.object('target_module_2', 'attribute', autospec=None)
Okay: mock.patch.object('target_module_2', 'attribute', new=defined_mock)
Okay: mock.patch.object('target_module_2', 'attribute', defined_mock)
Okay: mock.patch.object('target_module_2', 'attribute', new_callable=AFunc)
Okay: mock.patch.object('target_module_2', 'attribute', spec=3)
Okay: mock.patch.object('target_module_2', 'attribute', spec_set=[3])
H210: mock.patch.object('target_module_2', 'attribute', somearg=2)
H210: mock.patch.object('target_module_2', 'attribute') | 62598fbdcc40096d6161a2c0 |
class PVPowerSim(Simulation): <NEW_LINE> <INDENT> settings = SimParameter( ID="Tuscon_SAPM", path="~/SimKit_Simulations", thresholds=None, interval=[1, "hour"], sim_length=[0, "hours"], write_frequency=0, write_fields={ "data": ["latitude", "longitude", "Tamb", "Uwind"], "outputs": ["monthly_energy", "annual_energy"] }, display_frequency=12, display_fields={ "data": ["latitude", "longitude", "Tamb", "Uwind"], "outputs": ["monthly_energy", "annual_energy"] }, commands=['start', 'pause'] ) | PV Power Demo Simulations | 62598fbdf548e778e596b775 |
class Namelist_Stmt(StmtBase): <NEW_LINE> <INDENT> subclass_names = [] <NEW_LINE> use_names = ['Namelist_Group_Name', 'Namelist_Group_Object_List'] <NEW_LINE> @staticmethod <NEW_LINE> def match(string): <NEW_LINE> <INDENT> if string[:8].upper()!='NAMELIST': <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> line = string[8:].lstrip() <NEW_LINE> parts = line.split('/') <NEW_LINE> items = [] <NEW_LINE> fst = parts.pop(0) <NEW_LINE> assert not fst,repr((fst, parts)) <NEW_LINE> while len(parts)>=2: <NEW_LINE> <INDENT> name,lst = parts[:2] <NEW_LINE> del parts[:2] <NEW_LINE> name = name.strip() <NEW_LINE> lst = lst.strip() <NEW_LINE> if lst.endswith(','): <NEW_LINE> <INDENT> lst = lst[:-1].rstrip() <NEW_LINE> <DEDENT> items.append((Namelist_Group_Name(name),Namelist_Group_Object_List(lst))) <NEW_LINE> <DEDENT> assert not parts,repr(parts) <NEW_LINE> return tuple(items) <NEW_LINE> <DEDENT> def tostr(self): <NEW_LINE> <INDENT> return 'NAMELIST ' + ', '.join('/%s/ %s' % (name_lst) for name_lst in self.items) | ::
<namelist-stmt> = NAMELIST / <namelist-group-name> / <namelist-group-object-list> [ [ , ] / <namelist-group-name> / <namelist-group-object-list> ]...
Attributes
----------
items : (Namelist_Group_Name, Namelist_Group_Object_List)-tuple
| 62598fbdf9cc0f698b1c53b6 |
class Wait(base.CreateCommand): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Args(parser): <NEW_LINE> <INDENT> _AddWaitArgs(parser) <NEW_LINE> <DEDENT> def Run(self, args): <NEW_LINE> <INDENT> return operations_util.Wait(operations.OperationsClient(), args.operation) | Wait for a Cloud ML Engine operation to complete. | 62598fbd57b8e32f52508204 |
class Meta: <NEW_LINE> <INDENT> model = Relationtype | MetatypeAdminForm's Meta | 62598fbd4527f215b58ea09d |
class GM2MTgtQuerySet(query.QuerySet): <NEW_LINE> <INDENT> def __init__(self, model=None, query=None, using=None, hints=None): <NEW_LINE> <INDENT> super(GM2MTgtQuerySet, self).__init__(model, query, using, hints) <NEW_LINE> try: <NEW_LINE> <INDENT> if self._iterable_class is not query.ModelIterable: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self._iterable_class = GM2MTgtQuerySetIterable <NEW_LINE> <DEDENT> def iterator(self): <NEW_LINE> <INDENT> return iter(self._iterable_class(self, chunked_fetch=True)) <NEW_LINE> <DEDENT> def filter(self, *args, **kwargs): <NEW_LINE> <INDENT> model = kwargs.pop('Model', None) <NEW_LINE> models = kwargs.pop('Model__in', set()) <NEW_LINE> if model: <NEW_LINE> <INDENT> models.add(model) <NEW_LINE> <DEDENT> ctypes = [] <NEW_LINE> for m in models: <NEW_LINE> <INDENT> if isinstance(m, six.string_types): <NEW_LINE> <INDENT> m = self.model._meta.apps.get_model(m) <NEW_LINE> <DEDENT> ctypes.append(get_content_type(m).pk) <NEW_LINE> <DEDENT> if ctypes: <NEW_LINE> <INDENT> kwargs[self.model._meta._field_names['tgt_ct'] + '__in'] = ctypes <NEW_LINE> <DEDENT> return super(GM2MTgtQuerySet, self).filter(*args, **kwargs) | A QuerySet for GM2M models which yields actual target generic objects
instead of GM2M objects when iterated over
It can also filter the output by model (= content type) | 62598fbd26068e7796d4cb2a |
class MirroredVariable(DistributedVariable, Mirrored, checkpointable.CheckpointableBase): <NEW_LINE> <INDENT> def __init__(self, index, primary_var, aggregation): <NEW_LINE> <INDENT> for v in six.itervalues(index): <NEW_LINE> <INDENT> v._mirrored_container = weakref.ref(self) <NEW_LINE> <DEDENT> self._primary_var = primary_var <NEW_LINE> self._keras_initialized = False <NEW_LINE> self._aggregation = aggregation <NEW_LINE> super(MirroredVariable, self).__init__(index) <NEW_LINE> <DEDENT> def _assign_func(self, *args, **kwargs): <NEW_LINE> <INDENT> f = kwargs.pop("f") <NEW_LINE> if distribute_lib.get_cross_tower_context(): <NEW_LINE> <INDENT> update_device = distribute_lib.get_update_device() <NEW_LINE> if update_device is not None: <NEW_LINE> <INDENT> v = self.get(device=update_device) <NEW_LINE> return f(v, *args, **kwargs) <NEW_LINE> <DEDENT> return distribute_lib.get_distribution_strategy().update( self, f, *args, **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self._aggregation == vs.VariableAggregation.NONE: <NEW_LINE> <INDENT> raise ValueError("You must specify an aggregation method to update a " "MirroredVariable in Tower Context.") <NEW_LINE> <DEDENT> def merge_fn(strategy, value): <NEW_LINE> <INDENT> return strategy.update( self, f, strategy.reduce( aggregation=self._aggregation, value=value, destinations=self)) <NEW_LINE> <DEDENT> return distribute_lib.get_tower_context().merge_call(merge_fn, *args, **kwargs) <NEW_LINE> <DEDENT> <DEDENT> def assign_sub(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._assign_func(f=state_ops.assign_sub, *args, **kwargs) <NEW_LINE> <DEDENT> def assign_add(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._assign_func(f=state_ops.assign_add, *args, **kwargs) <NEW_LINE> <DEDENT> def assign(self, *args, **kwargs): <NEW_LINE> <INDENT> return self._assign_func(f=state_ops.assign, *args, **kwargs) <NEW_LINE> <DEDENT> def is_initialized(self, name=None): <NEW_LINE> <INDENT> values_list = list(self._index.values()) <NEW_LINE> result = values_list[0].is_initialized() <NEW_LINE> for v in values_list[1:-1]: <NEW_LINE> <INDENT> result = math_ops.logical_and(result, v.is_initialized()) <NEW_LINE> <DEDENT> result = math_ops.logical_and(result, values_list[-1].is_initialized(), name=name) <NEW_LINE> return result <NEW_LINE> <DEDENT> @property <NEW_LINE> def initializer(self): <NEW_LINE> <INDENT> return control_flow_ops.group([v.initializer for v in self._index.values()]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def aggregation(self): <NEW_LINE> <INDENT> return self._aggregation <NEW_LINE> <DEDENT> def _get_cross_tower(self): <NEW_LINE> <INDENT> device = device_util.canonicalize(device_util.current()) <NEW_LINE> if device in self._index: <NEW_LINE> <INDENT> return array_ops.identity(self._index[device]) <NEW_LINE> <DEDENT> return array_ops.identity(self._primary_var) <NEW_LINE> <DEDENT> def _as_graph_element(self): <NEW_LINE> <INDENT> if distribute_lib.get_cross_tower_context(): <NEW_LINE> <INDENT> return self._primary_var._as_graph_element() <NEW_LINE> <DEDENT> return self.get()._as_graph_element() <NEW_LINE> <DEDENT> def _gather_saveables_for_checkpoint(self): <NEW_LINE> <INDENT> def _saveable_factory(name=self._common_name): <NEW_LINE> <INDENT> return _MirroredSaveable(self, self._primary_var, name) <NEW_LINE> <DEDENT> return {checkpointable.VARIABLE_VALUE_KEY: _saveable_factory} | Holds a map from device to variables whose values are kept in sync. | 62598fbdcc0a2c111447b1dc |
class Task(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey("UserProfile") <NEW_LINE> task_type_choices = ((0, 'cmd'), (1, 'file_transfer')) <NEW_LINE> task_type = models.SmallIntegerField(choices=task_type_choices) <NEW_LINE> content = models.TextField(verbose_name="任务内容") <NEW_LINE> login_ip = models.CharField(verbose_name="操作IP", max_length=64, blank=True, null=True) <NEW_LINE> date = models.DateTimeField(auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return "%s %s" % (self.task_type, self.content) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> db_table = "ops_task" <NEW_LINE> verbose_name_plural = '任务记录' | 批量任务记录表 | 62598fbd97e22403b383b0d8 |
class EqualTo(BaseValidator): <NEW_LINE> <INDENT> NOT_EQUAL = 'notEqual' <NEW_LINE> error_messages = {NOT_EQUAL: "'$value' is not equal to '$comp_value'"} <NEW_LINE> def __init__(self, comp_value=None, *args, **kwargs): <NEW_LINE> <INDENT> super(EqualTo, self).__init__(*args, **kwargs) <NEW_LINE> self.comp_value = comp_value <NEW_LINE> self.message_values.update({'comp_value': self.comp_value}) <NEW_LINE> <DEDENT> def _internal_is_valid(self, value, *args, **kwargs): <NEW_LINE> <INDENT> if value != self.comp_value: <NEW_LINE> <INDENT> self.error(self.NOT_EQUAL, value) <NEW_LINE> return False <NEW_LINE> <DEDENT> return True | Compares value with a static value. | 62598fbd656771135c48983e |
class MySysLogHandler(logging.handlers.SysLogHandler): <NEW_LINE> <INDENT> def __init__(self, address, facility=logging.handlers.SysLogHandler.LOG_USER, socktype=socket.SOCK_DGRAM, ssl_enabled=False): <NEW_LINE> <INDENT> logging.Handler.__init__(self) <NEW_LINE> self.address = address <NEW_LINE> self.facility = facility <NEW_LINE> self.socktype = socktype <NEW_LINE> self.ssl_enabled = ssl_enabled <NEW_LINE> self.socket = socket.socket(socket.AF_INET, socktype) <NEW_LINE> if socktype == socket.SOCK_STREAM and ssl_enabled: <NEW_LINE> <INDENT> self.ssl_socket = ssl.wrap_socket( self.socket, ca_certs=certifi.where(), cert_reqs=ssl.CERT_REQUIRED) <NEW_LINE> self.ssl_socket.connect(address) <NEW_LINE> <DEDENT> elif socktype == socket.SOCK_STREAM: <NEW_LINE> <INDENT> self.socket.connect(address) <NEW_LINE> <DEDENT> self.socktype = socktype <NEW_LINE> self.formatter = None <NEW_LINE> self.ssl_enabled = ssl_enabled <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.socket: <NEW_LINE> <INDENT> self.socket.close() <NEW_LINE> <DEDENT> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> msg = self.format(record) + '\n' <NEW_LINE> prio = b'<%d>' % self.encodePriority(self.facility, self.mapPriority(record.levelname)) <NEW_LINE> if type(msg) is six.text_type: <NEW_LINE> <INDENT> msg = msg.encode('utf-8') <NEW_LINE> <DEDENT> msg = prio + msg <NEW_LINE> try: <NEW_LINE> <INDENT> if self.socktype == socket.SOCK_DGRAM: <NEW_LINE> <INDENT> self.socket.sendto(msg, self.address) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if self.ssl_enabled: <NEW_LINE> <INDENT> self.ssl_socket.sendall(msg) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.socket.sendall(msg) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except (KeyboardInterrupt, SystemExit): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.handleError(record) | Custom Syslog logging handler that includes CEFEvent.
For some reason python SysLogHandler appends \x00 byte to every record sent,
This fixes it and replaces it with \n. | 62598fbd71ff763f4b5e794a |
class SlowNumbaPickler(pickle._Pickler): <NEW_LINE> <INDENT> dispatch = pickle._Pickler.dispatch.copy() <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.__trace = [] <NEW_LINE> self.__memo = {} <NEW_LINE> <DEDENT> def save(self, obj): <NEW_LINE> <INDENT> self.__trace.append(f"{type(obj)}: {id(obj)}" ) <NEW_LINE> try: <NEW_LINE> <INDENT> return super().save(obj) <NEW_LINE> <DEDENT> except _TracedPicklingError: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> def perline(items): <NEW_LINE> <INDENT> return '\n'.join(f" [{depth}]: {it}" for depth, it in enumerate(items)) <NEW_LINE> <DEDENT> m = (f"Failed to pickle because of\n {type(e).__name__}: {e}" f"\ntracing... \n{perline(self.__trace)}") <NEW_LINE> raise _TracedPicklingError(m) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.__trace.pop() <NEW_LINE> <DEDENT> <DEDENT> def _save_function(self, func): <NEW_LINE> <INDENT> if _is_importable(func): <NEW_LINE> <INDENT> return self.save_global(func) <NEW_LINE> <DEDENT> if id(func) in self.__memo: <NEW_LINE> <INDENT> msg = f"Recursive function reference on {func}" <NEW_LINE> raise pickle.PicklingError(msg) <NEW_LINE> <DEDENT> self.__memo[id(func)] = func <NEW_LINE> try: <NEW_LINE> <INDENT> gls = _get_function_globals_for_reduction(func) <NEW_LINE> args = _reduce_function(func, gls) <NEW_LINE> self.save_reduce(_rebuild_function, args, obj=func) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.__memo.pop(id(func)) <NEW_LINE> <DEDENT> <DEDENT> dispatch[FunctionType] = _save_function <NEW_LINE> def _save_module(self, mod): <NEW_LINE> <INDENT> return self.save_reduce(_rebuild_module, (mod.__name__,)) <NEW_LINE> <DEDENT> dispatch[ModuleType] = _save_module <NEW_LINE> def _save_custom_pickled(self, cp): <NEW_LINE> <INDENT> return self.save_reduce(*cp._reduce()) <NEW_LINE> <DEDENT> dispatch[_CustomPickled] = _save_custom_pickled | Extends the pure-python Pickler to support the pickling need in Numba.
Adds pickling for closure functions, modules.
Adds customized pickling for _CustomPickled to avoid invoking a new
Pickler instance.
Note: this is used on Python < 3.8 unless `pickle5` is installed.
Note: This is good for debugging because the C-pickler hides the traceback | 62598fbd56ac1b37e63023be |
class Meta(BaseTable.Meta): <NEW_LINE> <INDENT> model = models.ConfigCompliance <NEW_LINE> fields = ( "pk", "device", ) | Metaclass attributes of ConfigComplianceTable. | 62598fbd55399d3f056266e5 |
class gates(element): <NEW_LINE> <INDENT> def __init__(self, width = 32, *inputs): <NEW_LINE> <INDENT> super(gates, self).__init__() <NEW_LINE> self.width = width <NEW_LINE> self.inputs = inputs <NEW_LINE> self.value = None <NEW_LINE> <DEDENT> def setInputs(*inputs): <NEW_LINE> <INDENT> self.inputs = inputs | docstring for element | 62598fbde1aae11d1e7ce90d |
class CachedSimpleResource(SimpleResource): <NEW_LINE> <INDENT> def __init__(self, uri, duration=datetime.timedelta(weeks=1), invalidateCache=False): <NEW_LINE> <INDENT> self.needsCaching = invalidateCache or self.isExpired(uri) <NEW_LINE> if self.needsCaching: <NEW_LINE> <INDENT> self.uri = uri <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.uri = self.getCachedURI(uri) <NEW_LINE> <DEDENT> self.expires = None <NEW_LINE> if isinstance(duration, datetime.timedelta): <NEW_LINE> <INDENT> self.expires = datetime.datetime.now() + duration <NEW_LINE> <DEDENT> elif duration > 0: <NEW_LINE> <INDENT> self.expires = datetime.datetime.now() + datetime.timedelta(seconds=duration) <NEW_LINE> <DEDENT> <DEDENT> def run(self, resource): <NEW_LINE> <INDENT> result = super(CachedSimpleResource, self).run(resource) <NEW_LINE> if self.needsCaching: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> log.debug("Caching resource %s to %s" % (self.uri, self.getCachedURI(self.uri))) <NEW_LINE> if not os.path.exists(os.path.join(tempfile.gettempdir(), "heimdall")): <NEW_LINE> <INDENT> os.makedirs(os.path.join(tempfile.gettempdir(), "heimdall")) <NEW_LINE> <DEDENT> open(self.getCachedURI(self.uri), "wb").write(result) <NEW_LINE> if self.expires: <NEW_LINE> <INDENT> open(self.getCachedURI(self.uri) + ".expires", "w").write(self.expires.strftime("%Y-%m-%d %H:%M:%S")) <NEW_LINE> <DEDENT> elif os.path.exists(self.getCachedURI(self.uri) + ".expires"): <NEW_LINE> <INDENT> os.remove(self.getCachedURI(self.uri) + ".expires") <NEW_LINE> <DEDENT> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.exception("Failed to cache resource: %s" % e) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def getCachedURI(self, uri): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.cachedURI <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.cachedURI = os.path.join(tempfile.gettempdir(), "heimdall", hashlib.md5(uri).hexdigest()) <NEW_LINE> self.cachedURI = urlparse.urlparse(self.cachedURI).path <NEW_LINE> return self.cachedURI <NEW_LINE> <DEDENT> <DEDENT> def isExpired(self, uri): <NEW_LINE> <INDENT> if os.path.exists(self.getCachedURI(uri) + ".expires"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> timestring = open(self.getCachedURI(uri) + ".expires").read() <NEW_LINE> timeobject = datetime.datetime.strptime(timestring, "%Y-%m-%d %H:%M:%S") <NEW_LINE> return timeobject <= datetime.datetime.now() <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> log.error("\"%s\" can't be parsed by strptime(): %s" % (timestring, e)) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> log.error(e) <NEW_LINE> <DEDENT> <DEDENT> return not os.path.exists(self.getCachedURI(uri)) | Adds a caching layer on top of SimpleResource. duration is a timedelta or
number of seconds for which the cache is valid (default: one week). Setting
invalidateCache to True re-caches immediately. | 62598fbd3617ad0b5ee06317 |
class menu_create_result: <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.I16, 'success', None, None, ), (1, TType.STRUCT, 'e', (MPError, MPError.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.I16: <NEW_LINE> <INDENT> self.success = iprot.readI16() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = MPError() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('menu_create_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.I16, 0) <NEW_LINE> oprot.writeI16(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.success) <NEW_LINE> value = (value * 31) ^ hash(self.e) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e | 62598fbd5166f23b2e2435af |
class Type: <NEW_LINE> <INDENT> __slots__ = ["name", "tparam", "python_class"] <NEW_LINE> def __init__(self, name, tparam=None, python_class=None): <NEW_LINE> <INDENT> if tparam is None: <NEW_LINE> <INDENT> tparam = [] <NEW_LINE> <DEDENT> assert isinstance(name, _string_types) <NEW_LINE> assert isinstance(tparam, list) <NEW_LINE> self.name = name <NEW_LINE> self.tparam = tparam <NEW_LINE> self.python_class = python_class <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.name, tuple(self.tparam))) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.name == other.name and self.tparam == other.tparam <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> ret = self.name <NEW_LINE> if len(self.tparam) > 0: <NEW_LINE> <INDENT> ret += "[" + ",".join(repr(x) for x in self.tparam) + "]" <NEW_LINE> <DEDENT> return ret <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__repr__() <NEW_LINE> <DEDENT> def sexp(self): <NEW_LINE> <INDENT> if len(self.tparam) == 0: <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = [self.name] <NEW_LINE> ret.append([a.sexp() if hasattr(a, "sexp") else a for a in self.tparam]) <NEW_LINE> return ret | - Type.name : A string with the name of the object
- Type.tparam : For classes with template parameters, (list, dict), this
contains a list of Type objects of the template parameters
- Type.python_class : The original python class implementing this type.
Two Type objects compare equal
only on name and tparam and not python_class | 62598fbdf9cc0f698b1c53b7 |
class SourceCheck(Check): <NEW_LINE> <INDENT> source = True <NEW_LINE> def check_target_unit_with_flag(self, sources, targets, unit): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_single(self, source, target, unit): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def check_source(self, source, unit): <NEW_LINE> <INDENT> raise NotImplementedError() | Basic class for source checks. | 62598fbde5267d203ee6bad0 |
class muli(CALExt): <NEW_LINE> <INDENT> def block(self, d, a, value): <NEW_LINE> <INDENT> code = self.get_active_code() <NEW_LINE> temp = code.prgm.acquire_register((value, value, value, value)) <NEW_LINE> code.add(cal.mul(d, a, temp)) <NEW_LINE> code.prgm.release_register(temp) <NEW_LINE> return | Floating point multiply immediate | 62598fbd4c3428357761a48d |
class PilImage(qrcode.image.base.BaseImage): <NEW_LINE> <INDENT> def __init__(self, border, width, box_size): <NEW_LINE> <INDENT> if Image is None and ImageDraw is None: <NEW_LINE> <INDENT> raise NotImplementedError("PIL not available") <NEW_LINE> <DEDENT> super(PilImage, self).__init__(border, width, box_size) <NEW_LINE> self.kind = "PNG" <NEW_LINE> pixelsize = (self.width + self.border * 2) * self.box_size <NEW_LINE> self._img = Image.new("1", (pixelsize, pixelsize), "white") <NEW_LINE> self._idr = ImageDraw.Draw(self._img) <NEW_LINE> <DEDENT> def drawrect(self, row, col): <NEW_LINE> <INDENT> x = (col + self.border) * self.box_size <NEW_LINE> y = (row + self.border) * self.box_size <NEW_LINE> box = [(x, y), (x + self.box_size - 1, y + self.box_size - 1)] <NEW_LINE> self._idr.rectangle(box, fill="black") <NEW_LINE> <DEDENT> def save(self, stream, kind=None): <NEW_LINE> <INDENT> if kind is None: <NEW_LINE> <INDENT> kind = self.kind <NEW_LINE> <DEDENT> self._img.save(stream, kind) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> return getattr(self._img, name) | PIL image builder, default format is PNG. | 62598fbd63d6d428bbee2982 |
class DSNfe(FrontEnd): <NEW_LINE> <INDENT> def __init__(self, name, inputs=None, band=None, pols_out=None, output_names=None, active=True): <NEW_LINE> <INDENT> mylogger = logging.getLogger(module_logger.name+".DSNfe") <NEW_LINE> mylogger.debug(" initializing FrontEnd %s", self) <NEW_LINE> band, output_names, pols_out = get_FE_band_and_pols(inputs, band=band, pols_out=pols_out, output_names=output_names) <NEW_LINE> FrontEnd.__init__(self, name, inputs=inputs, output_names=output_names, active=active) <NEW_LINE> self.logger = mylogger <NEW_LINE> self.outputs = connect_FE_inputs_and_outputs(self.inputs, band, self.outputs, pols_out) <NEW_LINE> self.logger.debug(" initialized for %s",self) | A generic DSN front end.
This handles bands S, X and Ka. A DSN front end has only one input but
either one or two outputs for one or two polarizations.
The standard DSN receivers are dual S/X and dual X/Ka, separately fed using
a dichroic reflector to divert the longer wavelength beam. It may have two
simultaneous polarizations, RCP and LCP, and so two IFs, or one manually
selectable polarization. The normal position of the polarizer is then RCP.
If the band is not specified with the keyword argument 'band' then this
superclass requires a naming convention for the band in which the actual
band code appears first in the name, for example, X-X/Ka and Ka-X/Ka.
The first occurrence of a valid band code in the name is used as the
band.
The polarization of each output is specified with a dict in the keyword
argument 'pols_out'. The output port names are the keys of the dict.
If the dict is not specified then the keyword argument 'output_names'
must be specified with one polarization code appearing in the name. | 62598fbd4f6381625f1995aa |
class MergeCssInDocumentOneLineCommand(sublime_plugin.TextCommand): <NEW_LINE> <INDENT> def run(self, edit): <NEW_LINE> <INDENT> view = self.view <NEW_LINE> if ST2: <NEW_LINE> <INDENT> setlists = Lib.get_default_set() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> setlists = modeCSS.Lib.get_default_set() <NEW_LINE> <DEDENT> setlists["notSel"] = "all" <NEW_LINE> setlists["all_in_one"] = True <NEW_LINE> merge_css(self, edit, setlists) | 压缩整个文档为一行 | 62598fbd167d2b6e312b7148 |
class TestWriters(unittest.TestCase): <NEW_LINE> <INDENT> def test_writers(self): <NEW_LINE> <INDENT> test_urls = [ 'https://www.youtube.com/watch?v=5qap5aO4i9A', 'https://www.youtube.com/channel/UCSJ4gkVC6NrvII8umztf0Ow' ] <NEW_LINE> downloader = ChatDownloader() <NEW_LINE> with tempfile.TemporaryDirectory() as tmp: <NEW_LINE> <INDENT> for index, test_url in enumerate(test_urls): <NEW_LINE> <INDENT> for extension in ContinuousWriter._SUPPORTED_WRITERS: <NEW_LINE> <INDENT> path = os.path.join(tmp, f'test_{index}.{extension}') <NEW_LINE> chat = list(downloader.get_chat( test_url, max_messages=10, output=path)) <NEW_LINE> size = os.stat(path).st_size <NEW_LINE> self.assertFalse(size == 0) <NEW_LINE> chat = list(downloader.get_chat( test_url, max_messages=10, output=path, overwrite=False)) <NEW_LINE> self.assertGreater(os.stat(path).st_size, size) <NEW_LINE> formatting_path = os.path.join( tmp, f'{{id}}_{{title}}.{extension}') <NEW_LINE> chat = downloader.get_chat( test_url, max_messages=10, output=formatting_path) <NEW_LINE> list(chat) <NEW_LINE> self.assertTrue(os.path.exists( chat._output_writer.file_name)) | Class used to run unit tests for writers. | 62598fbdec188e330fdf8a64 |
class GetDebugConfigResponse(_messages.Message): <NEW_LINE> <INDENT> config = _messages.StringField(1) | Response to a get debug configuration request.
Fields:
config: The encoded debug configuration for the requested component. | 62598fbd97e22403b383b0da |
class AndroidResources(AndroidTarget): <NEW_LINE> <INDENT> def __init__(self, resource_dir=None, **kwargs): <NEW_LINE> <INDENT> super(AndroidResources, self).__init__(**kwargs) <NEW_LINE> address = kwargs['address'] <NEW_LINE> try: <NEW_LINE> <INDENT> self.resource_dir = os.path.join(address.spec_path, resource_dir) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> raise TargetDefinitionException(self, 'An android_resources target must specify a ' '\'resource_dir\' that contains the target\'s ' 'resource files.') <NEW_LINE> <DEDENT> <DEDENT> def globs_relative_to_buildroot(self): <NEW_LINE> <INDENT> return {'globs': os.path.join(self.resource_dir, '**')} | Android resources used to generate R.java. | 62598fbd7d43ff24874274ed |
class OrganizationUserTreeView(APIView): <NEW_LINE> <INDENT> authentication_classes = (JSONWebTokenAuthentication,) <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> def get(self, request, format=None): <NEW_LINE> <INDENT> organizations = Organization.objects.all() <NEW_LINE> serializer = OrganizationUserTreeSerializer(organizations, many=True) <NEW_LINE> tree_dict = {} <NEW_LINE> tree_data = [] <NEW_LINE> for item in serializer.data: <NEW_LINE> <INDENT> new_item = { 'id': 'o' + str(item['id']), 'label': item['label'], 'pid': item['pid'], 'children': item['children'] } <NEW_LINE> tree_dict[item['id']] = new_item <NEW_LINE> <DEDENT> for i in tree_dict: <NEW_LINE> <INDENT> if tree_dict[i]['pid']: <NEW_LINE> <INDENT> pid = tree_dict[i]['pid'] <NEW_LINE> parent = tree_dict[pid] <NEW_LINE> parent['children'].append(tree_dict[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tree_data.append(tree_dict[i]) <NEW_LINE> <DEDENT> <DEDENT> return CassResponse(tree_data) | 组织架构关联用户树 | 62598fbd71ff763f4b5e794c |
class ExtendedManifestAE1(Component): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(ExtendedManifestAE1, self).__init__('ext_mft', 'Extended Manifest', 0) <NEW_LINE> <DEDENT> def dump_info(self, pref, comp_filter): <NEW_LINE> <INDENT> hdr = self.cdir['ext_mft_hdr'] <NEW_LINE> if hdr.adir['length'].val == 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> out = '{}{}'.format(pref, self.name) <NEW_LINE> out += ' ver {}'.format(hdr.adir['ver']) <NEW_LINE> out += ' entries {}'.format(hdr.adir['entries']) <NEW_LINE> print(out) <NEW_LINE> self.dump_comp_info(pref, comp_filter + ['Header']) | Extended manifest
| 62598fbd2c8b7c6e89bd3998 |
class SecurityGroupViewResult(Model): <NEW_LINE> <INDENT> _attribute_map = { 'network_interfaces': {'key': 'networkInterfaces', 'type': '[SecurityGroupNetworkInterface]'}, } <NEW_LINE> def __init__(self, network_interfaces=None): <NEW_LINE> <INDENT> super(SecurityGroupViewResult, self).__init__() <NEW_LINE> self.network_interfaces = network_interfaces | The information about security rules applied to the specified VM.
:param network_interfaces: List of network interfaces on the specified VM.
:type network_interfaces:
list[~azure.mgmt.network.v2017_03_01.models.SecurityGroupNetworkInterface] | 62598fbdff9c53063f51a820 |
class SchoolList(ObtainTokenBase): <NEW_LINE> <INDENT> def test_school_list(self): <NEW_LINE> <INDENT> token, university = self.obtain_token() <NEW_LINE> self.client.credentials(HTTP_AUTHORIZATION='Bearer {0}'.format(token)) <NEW_LINE> response = self.client.get(reverse('school_list'), data={'format': 'json'}) <NEW_LINE> self.assertEqual(response.status_code, status.HTTP_200_OK, response.content) | Test for School list api. | 62598fbde1aae11d1e7ce90e |
class OGPublication(Publication): <NEW_LINE> <INDENT> _name: str = '' <NEW_LINE> suggestedTime = ( 5 ) <NEW_LINE> def service(self) -> 'OGService': <NEW_LINE> <INDENT> return typing.cast('OGService', super().service()) <NEW_LINE> <DEDENT> def marshal(self) -> bytes: <NEW_LINE> <INDENT> return '\t'.join(['v1', self._name]).encode('utf8') <NEW_LINE> <DEDENT> def unmarshal(self, data: bytes) -> None: <NEW_LINE> <INDENT> vals = data.decode('utf8').split('\t') <NEW_LINE> if vals[0] == 'v1': <NEW_LINE> <INDENT> self._name = vals[1] <NEW_LINE> <DEDENT> <DEDENT> def publish(self) -> str: <NEW_LINE> <INDENT> self._name = 'Publication {}'.format(getSqlDatetime()) <NEW_LINE> return State.FINISHED <NEW_LINE> <DEDENT> def checkState(self) -> str: <NEW_LINE> <INDENT> return State.FINISHED <NEW_LINE> <DEDENT> def reasonOfError(self) -> str: <NEW_LINE> <INDENT> return 'No error possible :)' <NEW_LINE> <DEDENT> def destroy(self) -> str: <NEW_LINE> <INDENT> return State.FINISHED <NEW_LINE> <DEDENT> def cancel(self) -> str: <NEW_LINE> <INDENT> return self.destroy() | This class provides the publication of a oVirtLinkedService | 62598fbd4428ac0f6e6586f5 |
class StringGrid(BaseGrid): <NEW_LINE> <INDENT> def load_source(self): <NEW_LINE> <INDENT> return self.filename.translate(maketrans( '0%s' % self.free_char, self.mystery_char * 2)) | Grid loaded from a String | 62598fbd99fddb7c1ca62ed5 |
class DataFlowJavaOperator(BaseOperator): <NEW_LINE> <INDENT> template_fields = ['options', 'jar'] <NEW_LINE> ui_color = '#0273d4' <NEW_LINE> @apply_defaults <NEW_LINE> def __init__( self, jar, dataflow_default_options=None, options=None, gcp_conn_id='google_cloud_default', delegate_to=None, *args, **kwargs): <NEW_LINE> <INDENT> super(DataFlowJavaOperator, self).__init__(*args, **kwargs) <NEW_LINE> dataflow_default_options = dataflow_default_options or {} <NEW_LINE> options = options or {} <NEW_LINE> self.gcp_conn_id = gcp_conn_id <NEW_LINE> self.delegate_to = delegate_to <NEW_LINE> self.jar = jar <NEW_LINE> self.dataflow_default_options = dataflow_default_options <NEW_LINE> self.options = options <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> bucket_helper = GoogleCloudBucketHelper( self.gcp_conn_id, self.delegate_to) <NEW_LINE> self.jar = bucket_helper.google_cloud_to_local(self.jar) <NEW_LINE> hook = DataFlowHook(gcp_conn_id=self.gcp_conn_id, delegate_to=self.delegate_to) <NEW_LINE> dataflow_options = copy.copy(self.dataflow_default_options) <NEW_LINE> dataflow_options.update(self.options) <NEW_LINE> hook.start_java_dataflow(self.task_id, dataflow_options, self.jar) | Start a Java Cloud DataFlow batch job. The parameters of the operation
will be passed to the job.
It's a good practice to define dataflow_* parameters in the default_args of the dag
like the project, zone and staging location.
```
default_args = {
'dataflow_default_options': {
'project': 'my-gcp-project',
'zone': 'europe-west1-d',
'stagingLocation': 'gs://my-staging-bucket/staging/'
}
}
```
You need to pass the path to your dataflow as a file reference with the ``jar``
parameter, the jar needs to be a self executing jar. Use ``options`` to pass on
options to your job.
```
t1 = DataFlowOperation(
task_id='datapflow_example',
jar='{{var.value.gcp_dataflow_base}}pipeline/build/libs/pipeline-example-1.0.jar',
options={
'autoscalingAlgorithm': 'BASIC',
'maxNumWorkers': '50',
'start': '{{ds}}',
'partitionType': 'DAY'
},
dag=my-dag)
```
Both ``jar`` and ``options`` are templated so you can use variables in them. | 62598fbd5fcc89381b266236 |
class APIResourceNotFoundError(APIError): <NEW_LINE> <INDENT> def __init__(self, field, message=''): <NEW_LINE> <INDENT> super(APIResourceNotFoundError, self).__init__('value:notfound', field, message) | Indicate the resource was not found. The data specifies the resource name.
表明找不到资源,data说明资源名字 | 62598fbd66673b3332c305a5 |
class TelegramPoll(BaseTelegramBotEntity): <NEW_LINE> <INDENT> def __init__(self, bot, hass, allowed_chat_ids): <NEW_LINE> <INDENT> BaseTelegramBotEntity.__init__(self, hass, allowed_chat_ids) <NEW_LINE> self.update_id = 0 <NEW_LINE> self.websession = async_get_clientsession(hass) <NEW_LINE> self.update_url = '{0}/getUpdates'.format(bot.base_url) <NEW_LINE> self.polling_task = None <NEW_LINE> self.timeout = 15 <NEW_LINE> self.post_data = {'timeout': self.timeout - 5} <NEW_LINE> <DEDENT> def start_polling(self): <NEW_LINE> <INDENT> self.polling_task = self.hass.async_add_job(self.check_incoming()) <NEW_LINE> <DEDENT> def stop_polling(self): <NEW_LINE> <INDENT> self.polling_task.cancel() <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def get_updates(self, offset): <NEW_LINE> <INDENT> resp = None <NEW_LINE> if offset: <NEW_LINE> <INDENT> self.post_data['offset'] = offset <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> with async_timeout.timeout(self.timeout, loop=self.hass.loop): <NEW_LINE> <INDENT> resp = yield from self.websession.post( self.update_url, data=self.post_data, headers={CONNECTION: KEEP_ALIVE} ) <NEW_LINE> <DEDENT> if resp.status == 200: <NEW_LINE> <INDENT> _json = yield from resp.json() <NEW_LINE> return _json <NEW_LINE> <DEDENT> raise WrongHttpStatus('wrong status {}'.format(resp.status)) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if resp is not None: <NEW_LINE> <INDENT> yield from resp.release() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @asyncio.coroutine <NEW_LINE> def check_incoming(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _updates = yield from self.get_updates(self.update_id) <NEW_LINE> <DEDENT> except (WrongHttpStatus, ClientError) as err: <NEW_LINE> <INDENT> _LOGGER.error(err) <NEW_LINE> yield from asyncio.sleep(RETRY_SLEEP) <NEW_LINE> <DEDENT> except (asyncio.TimeoutError, ValueError): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _updates = _updates.get('result') <NEW_LINE> if _updates is None: <NEW_LINE> <INDENT> _LOGGER.error("Incorrect result received.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for update in _updates: <NEW_LINE> <INDENT> self.update_id = update['update_id'] + 1 <NEW_LINE> self.process_message(update) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> except CancelledError: <NEW_LINE> <INDENT> _LOGGER.debug("Stopping Telegram polling bot") | Asyncio telegram incoming message handler. | 62598fbd97e22403b383b0db |
class AsyncResponse(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.error = None <NEW_LINE> self.version = None <NEW_LINE> self.status = None <NEW_LINE> self.reason = None <NEW_LINE> self.headers = None <NEW_LINE> self.body = None <NEW_LINE> self.json_body = None <NEW_LINE> self.request = None <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "e:%s v:%s s:%s r:%s h:%s b:%s" % (self.error, self.version, self.status, self.reason, self.headers, self.body) <NEW_LINE> <DEDENT> def set_resp(self, version, status, reason, headers, body): <NEW_LINE> <INDENT> self.version = version <NEW_LINE> self.status = status <NEW_LINE> self.reason = reason <NEW_LINE> self.headers = headers <NEW_LINE> self.body = body <NEW_LINE> try: <NEW_LINE> <INDENT> self.json_body = json.loads(body.decode('utf8')) <NEW_LINE> <DEDENT> except ValueError as ex: <NEW_LINE> <INDENT> self.json_body = None <NEW_LINE> <DEDENT> <DEDENT> def set_error(self, error): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> <DEDENT> def set_request(self, request): <NEW_LINE> <INDENT> self.request = request | Store the response of asynchronous request
When get the response, user should check if error is None (which means no
Exception happens).
If error is None, then should check if the status is expected. | 62598fbd50812a4eaa620cd4 |
class ModifyDDoSPolicyCaseResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Success = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> if params.get("Success") is not None: <NEW_LINE> <INDENT> self.Success = SuccessCode() <NEW_LINE> self.Success._deserialize(params.get("Success")) <NEW_LINE> <DEDENT> self.RequestId = params.get("RequestId") | ModifyDDoSPolicyCase response structure.
| 62598fbd2c8b7c6e89bd399a |
class AsyncWithWrapper: <NEW_LINE> <INDENT> def __init__(self, ctxmanager, *args, **kwargs): <NEW_LINE> <INDENT> self.manager = ctxmanager(*args, **kwargs) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self.manager.__enter__() <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> return self.manager.__exit__(exc_type, exc_val, exc_tb) <NEW_LINE> <DEDENT> async def __aenter__(self): <NEW_LINE> <INDENT> return self.__enter__() <NEW_LINE> <DEDENT> async def __aexit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> return self.__exit__(exc_type, exc_val, exc_tb) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def wrap(cls, meth): <NEW_LINE> <INDENT> return functools.partial(cls, meth) | A wrapper that allows using a ``with`` context manager with ``async with``. | 62598fbd55399d3f056266e9 |
class start_page(ProtectedPage): <NEW_LINE> <INDENT> def GET(self): <NEW_LINE> <INDENT> log.clear(NAME) <NEW_LINE> cmd = "sudo chkconfig watchdog on" <NEW_LINE> run_process(cmd) <NEW_LINE> cmd = "sudo /etc/init.d/watchdog start" <NEW_LINE> run_process(cmd) <NEW_LINE> restart(3) <NEW_LINE> return self.core_render.restarting(plugin_url(status_page)) | Start watchdog service page | 62598fbd92d797404e388c4d |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.