code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class Receiver(): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> self.client = client <NEW_LINE> self.received = False <NEW_LINE> <DEDENT> def receive_notifications(self, private_key, sender_id, mtype, params, extra): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> self.received = True <NEW_LINE> print ('Notification:', private_key, sender_id, mtype, params, extra) <NEW_LINE> <DEDENT> def receiver_call(self, private_key, sender_id, msg_id, mtype, params, extra): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> self.received = True <NEW_LINE> print ('Call:', private_key, sender_id, msg_id, mtype, params, extra) <NEW_LINE> self.client.reply( msg_id, {'samp.status': 'samp.ok', 'samp.result': {}}) <NEW_LINE> <DEDENT> def point_select(self, private_key, send_id, mtype, params, extra): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> self.received = True | SAMP listener | 62598fbf92d797404e388c73 |
class IosBaseCompiler(RouterCompiler): <NEW_LINE> <INDENT> lo_interface = "Loopback0" <NEW_LINE> def compile(self, node): <NEW_LINE> <INDENT> super(IosBaseCompiler, self).compile(node) <NEW_LINE> if node in self.anm['isis']: <NEW_LINE> <INDENT> self.isis(node) <NEW_LINE> <DEDENT> <DEDENT> def interfaces(self, node): <NEW_LINE> <INDENT> ip_node = self.anm.overlay.ip.node(node) <NEW_LINE> loopback_subnet = netaddr.IPNetwork("0.0.0.0/32") <NEW_LINE> super(IosBaseCompiler, self).interfaces(node) <NEW_LINE> G_ospf = self.anm['ospf'] <NEW_LINE> G_isis = self.anm['isis'] <NEW_LINE> for interface in node.interfaces: <NEW_LINE> <INDENT> ospf_link = G_ospf.edge(interface._edge_id) <NEW_LINE> if ospf_link: <NEW_LINE> <INDENT> interface['ospf_cost'] = ospf_link.cost <NEW_LINE> <DEDENT> isis_link = G_isis.edge(interface._edge_id) <NEW_LINE> if isis_link: <NEW_LINE> <INDENT> interface['isis'] = True <NEW_LINE> isis_node = G_isis.node(node) <NEW_LINE> interface['isis_process_id'] = isis_node.process_id <NEW_LINE> interface['isis_metric'] = isis_link.metric <NEW_LINE> <DEDENT> <DEDENT> is_isis_node = bool(G_isis.node(node)) <NEW_LINE> node.interfaces.append( id = self.lo_interface, description = "Loopback", ip_address = ip_node.loopback, subnet = loopback_subnet, isis = is_isis_node, physical = False, ) <NEW_LINE> <DEDENT> def bgp(self, node): <NEW_LINE> <INDENT> node.bgp.lo_interface = self.lo_interface <NEW_LINE> super(IosBaseCompiler, self).bgp(node) <NEW_LINE> <DEDENT> def isis(self, node): <NEW_LINE> <INDENT> G_isis = self.anm['isis'] <NEW_LINE> isis_node = self.anm['isis'].node(node) <NEW_LINE> node.isis.net = isis_node.net <NEW_LINE> node.isis.process_id = isis_node.process_id <NEW_LINE> node.isis.lo_interface = self.lo_interface <NEW_LINE> node.isis.isis_links = [] <NEW_LINE> for interface in node.interfaces: <NEW_LINE> <INDENT> isis_link = G_isis.edge(interface._edge_id) <NEW_LINE> if isis_link: <NEW_LINE> <INDENT> node.isis.isis_links.append( id = interface.id, metric = isis_link.metric, ) | Base IOS compiler | 62598fbf5166f23b2e243601 |
class PandasReadBigFile(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.mkdir("testdata") <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> df = list() <NEW_LINE> complexity = 10000 <NEW_LINE> for _ in range(complexity): <NEW_LINE> <INDENT> df.append( ( fmter.tpl.randstr(32), random.randint(1, 1000), random.random(), timewrapper.randdate("1980-01-01", "2015-04-28"). strftime("%b %m, %Y"), timewrapper.randdatetime("1980-01-01 00:00:00", "2015-04-28 23:59:59"). strftime("%m/%d/%Y %I:%M:%S %p") ) ) <NEW_LINE> <DEDENT> df = pd.DataFrame(df, columns=["ID", "INTEGER", "REAL", "CREATE_DATE", "CREATE_DATETIME"]) <NEW_LINE> df.to_csv(r"testdata\bigcsvfile.txt", index=False) <NEW_LINE> <DEDENT> def test_verify(self): <NEW_LINE> <INDENT> timer.start() <NEW_LINE> pd.read_csv(r"testdata\bigcsvfile.txt", nrows=2) <NEW_LINE> timer.stop() <NEW_LINE> self.assertLess(timer.elapse, 0.01) <NEW_LINE> <DEDENT> def test_pandas_builtin_datetime_parser(self): <NEW_LINE> <INDENT> timer.start() <NEW_LINE> df = pd.read_csv(r"testdata\bigcsvfile.txt", usecols=[3,4], parse_dates=[0,1]) <NEW_LINE> [i.to_datetime().date() for i in df["CREATE_DATE"]] <NEW_LINE> [i.to_datetime() for i in df["CREATE_DATETIME"]] <NEW_LINE> timer.timeup() <NEW_LINE> timer.start() <NEW_LINE> df = pd.read_csv(r"testdata\bigcsvfile.txt", usecols=[3,4]) <NEW_LINE> [timewrapper.str2date(i) for i in df["CREATE_DATE"]] <NEW_LINE> [timewrapper.str2datetime(i) for i in df["CREATE_DATETIME"]] <NEW_LINE> timer.timeup() | 验证pandas读取大文件时, 如果只读取前两行, 是否需要的时间极短 | 62598fbf2c8b7c6e89bd39e2 |
class MetaPayload(Payload): <NEW_LINE> <INDENT> name = 'meta' <NEW_LINE> @classmethod <NEW_LINE> def new(cls, version, id, protocol, gateway, client): <NEW_LINE> <INDENT> payload = cls() <NEW_LINE> payload.set('version', version) <NEW_LINE> payload.set('id', id) <NEW_LINE> payload.set('protocol', protocol) <NEW_LINE> payload.set('gateway', gateway) <NEW_LINE> payload.set('datetime', date_to_str(utcnow())) <NEW_LINE> payload.set('client', client) <NEW_LINE> return payload | Class definition for request/response meta payloads. | 62598fbf50812a4eaa620cfa |
class IPMIPacket: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def createIPMIPacket(cls, packet): <NEW_LINE> <INDENT> auth_type = ord(packet[0]) <NEW_LINE> if auth_type == IPMI_SES_HDR_AUTH_TYPE_RMCPPLUS: <NEW_LINE> <INDENT> received = IPMI20Packet(packet=packet) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> received = IPMI15Packet(packet=packet) <NEW_LINE> <DEDENT> return received | IPMI packet factory class | 62598fbf4f88993c371f061a |
class Convert: <NEW_LINE> <INDENT> head = None <NEW_LINE> def sorted_list_to_bst(self, head): <NEW_LINE> <INDENT> current, length = head, 0 <NEW_LINE> while current is not None: <NEW_LINE> <INDENT> current, length = current.next, length + 1 <NEW_LINE> <DEDENT> self.head = head <NEW_LINE> return self.sorted_list_to_bst_recur(0, length) <NEW_LINE> <DEDENT> def sorted_list_to_bst_recur(self, start, end): <NEW_LINE> <INDENT> if start == end: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> mid = math.floor(start + (end - start) / 2) <NEW_LINE> left = self.sorted_list_to_bst_recur(start, mid) <NEW_LINE> current = TreeNode(self.head.val) <NEW_LINE> current.left = left <NEW_LINE> self.head = self.head.next <NEW_LINE> current.right = self.sorted_list_to_bst_recur(mid + 1, end) <NEW_LINE> return current | Convert linked list to bst. | 62598fbf99fddb7c1ca62efd |
class Config(BaseSampleReport.Config): <NEW_LINE> <INDENT> allow_population_by_field_name = True <NEW_LINE> orm_mode = True | Configure the sample report behavior. | 62598fbf23849d37ff8512d5 |
class Settings: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 800 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = (60, 60, 60) <NEW_LINE> self.bullets_allowed = 3 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.speedup_scale = 1.1 <NEW_LINE> self.score_scale = 1.5 <NEW_LINE> self.initialize_dynamic_settings() <NEW_LINE> self.alien_points = 50 <NEW_LINE> <DEDENT> def initialize_dynamic_settings(self): <NEW_LINE> <INDENT> self.ship_speed = 1.5 <NEW_LINE> self.bullet_speed = 3.0 <NEW_LINE> self.alien_speed = 1.0 <NEW_LINE> self.fleet_direction = 1 <NEW_LINE> <DEDENT> def increase_speed(self): <NEW_LINE> <INDENT> self.ship_speed *= self.speedup_scale <NEW_LINE> self.bullet_speed *= self.speedup_scale <NEW_LINE> self.alien_speed *= self.speedup_scale <NEW_LINE> self.alien_points = int(self.alien_points * self.score_scale) | A class to store all settings for Alien Invasion
| 62598fbf3346ee7daa337759 |
class PythonexpTransform(BaseTransform): <NEW_LINE> <INDENT> supported_options = { 'python_expressions': {'type': str_list} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PythonexpTransform, self).__init__(*args, **kwargs) <NEW_LINE> self.python_expressions = self.read_option('python_expressions') <NEW_LINE> if not self.is_valid_python_expression(self.python_expressions): <NEW_LINE> <INDENT> raise ValueError('Python expression is not valid') <NEW_LINE> <DEDENT> self.interpreter = Interpreter() <NEW_LINE> self.logger.info('PythonexpTransform has been initiated. Expressions: {!r}'.format( self.python_expressions) ) <NEW_LINE> <DEDENT> def transform_batch(self, batch): <NEW_LINE> <INDENT> for item in batch: <NEW_LINE> <INDENT> context = create_context(item=item) <NEW_LINE> for expression in self.python_expressions: <NEW_LINE> <INDENT> self.interpreter.eval(expression, context=context) <NEW_LINE> <DEDENT> yield item <NEW_LINE> <DEDENT> self.logger.debug('Transformed items') <NEW_LINE> <DEDENT> def is_valid_python_expression(self, python_expressions): <NEW_LINE> <INDENT> return True | It applies python expressions to items.
- python_expression (str)
Valid python expression | 62598fbf56ac1b37e630240f |
class MediaAssetEvidenceListView(generics.ListAPIView): <NEW_LINE> <INDENT> serializer_class = CampaignMediaAssetEvidenceSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return CampaignEvidence.objects.filter(campaign_media_asset=self.kwargs['campaign_media_asset_id'], start_date__lte=timezone.now(), is_active=True).order_by('id') | Media Asset For Evidence List View | 62598fbf4f6381625f1995d3 |
class IPResolver(IPResolverBase): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(IPResolver, self).__init__() <NEW_LINE> self.cache = utils.FastStore(max_size=100) <NEW_LINE> <DEDENT> def RetrieveIPInfo(self, ip): <NEW_LINE> <INDENT> if not ip: <NEW_LINE> <INDENT> return (IPInfo.UNKNOWN, "No ip information.") <NEW_LINE> <DEDENT> ip_str = utils.SmartStr(ip) <NEW_LINE> try: <NEW_LINE> <INDENT> return self.cache.Get(ip_str) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> ip = ipaddr.IPAddress(ip_str) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return (IPInfo.UNKNOWN, "No ip information.") <NEW_LINE> <DEDENT> if ip.version == 6: <NEW_LINE> <INDENT> res = self.RetrieveIP6Info(ip) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> res = self.RetrieveIP4Info(ip) <NEW_LINE> <DEDENT> self.cache.Put(ip_str, res) <NEW_LINE> return res <NEW_LINE> <DEDENT> def RetrieveIP4Info(self, ip): <NEW_LINE> <INDENT> if ip.is_private: <NEW_LINE> <INDENT> return (IPInfo.INTERNAL, "Internal IP address.") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> res = socket.getnameinfo((str(ip), 0), socket.NI_NAMEREQD) <NEW_LINE> return (IPInfo.EXTERNAL, res[0]) <NEW_LINE> <DEDENT> except (socket.error, socket.herror, socket.gaierror): <NEW_LINE> <INDENT> return (IPInfo.EXTERNAL, "Unknown IP address.") <NEW_LINE> <DEDENT> <DEDENT> def RetrieveIP6Info(self, ip): <NEW_LINE> <INDENT> _ = ip <NEW_LINE> return (IPInfo.INTERNAL, "Internal IP6 address.") | Resolves IP addresses to hostnames. | 62598fbf4c3428357761a4df |
class DelayedTask(Environmentable): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__(Environment('DelayedTask')) <NEW_LINE> <DEDENT> def execute(self) -> None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.run() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.error('Job %s raised an exception: %s', self.__class__, e) <NEW_LINE> <DEDENT> <DEDENT> def run(self) -> None: <NEW_LINE> <INDENT> logging.error("Base run of job called for class") <NEW_LINE> raise NotImplementedError <NEW_LINE> <DEDENT> def register(self, suggestedTime: int, tag: str = '', check: bool = True) -> None: <NEW_LINE> <INDENT> from .delayed_task_runner import DelayedTaskRunner <NEW_LINE> if check and DelayedTaskRunner.runner().checkExists(tag): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> DelayedTaskRunner.runner().insert(self, suggestedTime, tag) | This class represents a single delayed task object.
This is an object that represents an execution to be done "later" | 62598fbfad47b63b2c5a7a78 |
class CVExactPredictionParameters(PredictionParameters): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> PredictionParameters.__init__( self, 'constant velocity (direct exact computation)', None) <NEW_LINE> <DEDENT> def computeCrossingsCollisionsAtInstant(self, currentInstant, obj1, obj2, collisionDistanceThreshold, timeHorizon, computeCZ=False, debug=False, *kwargs): <NEW_LINE> <INDENT> p1 = obj1.getPositionAtInstant(currentInstant) <NEW_LINE> p2 = obj2.getPositionAtInstant(currentInstant) <NEW_LINE> v1 = obj1.getVelocityAtInstant(currentInstant) <NEW_LINE> v2 = obj2.getVelocityAtInstant(currentInstant) <NEW_LINE> intersection = moving.intersection(p1, p1 + v1, p2, p2 + v2) <NEW_LINE> if intersection is not None: <NEW_LINE> <INDENT> ttc = moving.Point.timeToCollision( p1, p2, v1, v2, collisionDistanceThreshold) <NEW_LINE> if ttc: <NEW_LINE> <INDENT> return currentInstant, [ SafetyPoint(intersection, 1., ttc)], [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return currentInstant, [], [] | Prediction parameters of prediction at constant velocity
using direct computation of the intersecting point (solving the equation)
Warning: the computed time to collision may be higher than timeHorizon (not used) | 62598fbf1f5feb6acb162e43 |
class ResourcesMoveInfo(Model): <NEW_LINE> <INDENT> _attribute_map = { 'resources': {'key': 'resources', 'type': '[str]'}, 'target_resource_group': {'key': 'targetResourceGroup', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, resources=None, target_resource_group: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(ResourcesMoveInfo, self).__init__(**kwargs) <NEW_LINE> self.resources = resources <NEW_LINE> self.target_resource_group = target_resource_group | Parameters of move resources.
:param resources: The ids of the resources.
:type resources: list[str]
:param target_resource_group: The target resource group.
:type target_resource_group: str | 62598fbfaad79263cf42e9f8 |
class _ScatteredEmbeddingColumn( _FeatureColumn, fc_core._DenseColumn, collections.namedtuple("_ScatteredEmbeddingColumn", [ "column_name", "size", "dimension", "hash_key", "combiner", "initializer" ])): <NEW_LINE> <INDENT> def __new__(cls, column_name, size, dimension, hash_key, combiner="sqrtn", initializer=None): <NEW_LINE> <INDENT> if initializer is not None and not callable(initializer): <NEW_LINE> <INDENT> raise ValueError("initializer must be callable if specified. " "column_name: {}".format(column_name)) <NEW_LINE> <DEDENT> if initializer is None: <NEW_LINE> <INDENT> logging.warn("The default stddev value of initializer will change from " "\"0.1\" to \"1/sqrt(dimension)\" after 2017/02/25.") <NEW_LINE> stddev = 0.1 <NEW_LINE> initializer = init_ops.truncated_normal_initializer( mean=0.0, stddev=stddev) <NEW_LINE> <DEDENT> return super(_ScatteredEmbeddingColumn, cls).__new__(cls, column_name, size, dimension, hash_key, combiner, initializer) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "{}_scattered_embedding".format(self.column_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def config(self): <NEW_LINE> <INDENT> return {self.column_name: parsing_ops.VarLenFeature(dtypes.string)} <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._key_without_properties(["initializer"]) <NEW_LINE> <DEDENT> def insert_transformed_feature(self, columns_to_tensors): <NEW_LINE> <INDENT> columns_to_tensors[self] = columns_to_tensors[self.column_name] <NEW_LINE> <DEDENT> def _deep_embedding_lookup_arguments(self, input_tensor): <NEW_LINE> <INDENT> return _DeepEmbeddingLookupArguments( input_tensor=input_tensor, weight_tensor=None, vocab_size=self.size, initializer=self.initializer, combiner=self.combiner, dimension=self.dimension, shared_embedding_name=None, hash_key=self.hash_key, max_norm=None, trainable=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _variable_shape(self): <NEW_LINE> <INDENT> return tensor_shape.TensorShape([self.dimension]) <NEW_LINE> <DEDENT> def _get_dense_tensor(self, inputs, weight_collections=None, trainable=None): <NEW_LINE> <INDENT> return _embeddings_from_arguments( self, self._deep_embedding_lookup_arguments(inputs.get(self)), weight_collections, trainable) <NEW_LINE> <DEDENT> def _transform_feature(self, inputs): <NEW_LINE> <INDENT> return inputs.get(self.column_name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _parse_example_spec(self): <NEW_LINE> <INDENT> return self.config | See `scattered_embedding_column`. | 62598fbf26068e7796d4cb7f |
class Role(db.Model): <NEW_LINE> <INDENT> user = db.StringProperty(required=True) <NEW_LINE> role = db.StringProperty(required=True) <NEW_LINE> @classmethod <NEW_LINE> def get_roles(cls, username): <NEW_LINE> <INDENT> key = 'acl.role.%s' % (username) <NEW_LINE> roles = memcache.get(key) <NEW_LINE> if roles is not None: <NEW_LINE> <INDENT> return roles <NEW_LINE> <DEDENT> entities = cls.all().filter('user =', username).fetch(100) <NEW_LINE> if entities: <NEW_LINE> <INDENT> roles = [entity.role for entity in entities] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> roles = [] <NEW_LINE> <DEDENT> if not memcache.add(key, roles): <NEW_LINE> <INDENT> logging.error('Memcache set failed.') <NEW_LINE> <DEDENT> return roles <NEW_LINE> <DEDENT> def put(self): <NEW_LINE> <INDENT> self.delete_cache() <NEW_LINE> super(Role, self).put() <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self.delete_cache() <NEW_LINE> super(Role, self).delete() <NEW_LINE> <DEDENT> def delete_cache(self): <NEW_LINE> <INDENT> key = 'acl.role.%s' % (self.user) <NEW_LINE> memcache.delete(key) | Model to store user roles. | 62598fbf796e427e5384e9b9 |
class Meta: <NEW_LINE> <INDENT> model = Area <NEW_LINE> fields = ('pk', 'nombre', 'descripcion', 'img_area', 'estado') | campos | 62598fbfbe7bc26dc9251f6e |
@value.value_equality <NEW_LINE> class PauliStringExpectation(WaveFunctionDisplay): <NEW_LINE> <INDENT> def __init__(self, pauli_string: 'pauli_string.PauliString', key: Hashable=''): <NEW_LINE> <INDENT> self._pauli_string = pauli_string <NEW_LINE> self._key = key <NEW_LINE> <DEDENT> @property <NEW_LINE> def qubits(self) -> Tuple[raw_types.QubitId, ...]: <NEW_LINE> <INDENT> return self._pauli_string.qubits <NEW_LINE> <DEDENT> def with_qubits(self, *new_qubits: raw_types.QubitId ) -> 'PauliStringExpectation': <NEW_LINE> <INDENT> return PauliStringExpectation( self._pauli_string.with_qubits(*new_qubits), self._key ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self) -> Hashable: <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> def value_derived_from_wavefunction(self, state: np.ndarray, qubit_map: Dict[raw_types.QubitId, int] ) -> float: <NEW_LINE> <INDENT> num_qubits = state.shape[0].bit_length() - 1 <NEW_LINE> ket = np.reshape(np.copy(state), (2,) * num_qubits) <NEW_LINE> for qubit, pauli in self._pauli_string.items(): <NEW_LINE> <INDENT> buffer = np.empty(ket.shape, dtype=state.dtype) <NEW_LINE> args = protocols.ApplyUnitaryArgs( target_tensor=ket, available_buffer=buffer, axes=(qubit_map[qubit],) ) <NEW_LINE> ket = protocols.apply_unitary(pauli, args) <NEW_LINE> <DEDENT> ket = np.reshape(ket, state.shape) <NEW_LINE> return np.dot(state.conj(), ket) <NEW_LINE> <DEDENT> def _value_equality_values_(self): <NEW_LINE> <INDENT> return self._pauli_string, self._key | Expectation value of a Pauli string. | 62598fbf7047854f4633f5f8 |
class HostManager(object): <NEW_LINE> <INDENT> host_state_cls = HostState <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.service_states = {} <NEW_LINE> self.filter_classes = filters.get_filter_classes( FLAGS.scheduler_available_filters) <NEW_LINE> <DEDENT> def _choose_host_filters(self, filters): <NEW_LINE> <INDENT> if filters is None: <NEW_LINE> <INDENT> filters = FLAGS.scheduler_default_filters <NEW_LINE> <DEDENT> if not isinstance(filters, (list, tuple)): <NEW_LINE> <INDENT> filters = [filters] <NEW_LINE> <DEDENT> good_filters = [] <NEW_LINE> bad_filters = [] <NEW_LINE> for filter_name in filters: <NEW_LINE> <INDENT> found_class = False <NEW_LINE> for cls in self.filter_classes: <NEW_LINE> <INDENT> if cls.__name__ == filter_name: <NEW_LINE> <INDENT> found_class = True <NEW_LINE> filter_instance = cls() <NEW_LINE> filter_func = getattr(filter_instance, 'host_passes', None) <NEW_LINE> if filter_func: <NEW_LINE> <INDENT> good_filters.append(filter_func) <NEW_LINE> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> if not found_class: <NEW_LINE> <INDENT> bad_filters.append(filter_name) <NEW_LINE> <DEDENT> <DEDENT> if bad_filters: <NEW_LINE> <INDENT> msg = ", ".join(bad_filters) <NEW_LINE> raise exception.SchedulerHostFilterNotFound(filter_name=msg) <NEW_LINE> <DEDENT> return good_filters <NEW_LINE> <DEDENT> def filter_hosts(self, hosts, filter_properties, filters=None): <NEW_LINE> <INDENT> filtered_hosts = [] <NEW_LINE> filter_fns = self._choose_host_filters(filters) <NEW_LINE> for host in hosts: <NEW_LINE> <INDENT> if host.passes_filters(filter_fns, filter_properties): <NEW_LINE> <INDENT> filtered_hosts.append(host) <NEW_LINE> <DEDENT> <DEDENT> return filtered_hosts <NEW_LINE> <DEDENT> def update_service_capabilities(self, service_name, host, capabilities): <NEW_LINE> <INDENT> node = capabilities.get('hypervisor_hostname') <NEW_LINE> if node is not None: <NEW_LINE> <INDENT> host = "%s/%s" % (host, node) <NEW_LINE> <DEDENT> LOG.debug(_("Received %(service_name)s service update from " "%(host)s.") % locals()) <NEW_LINE> service_caps = self.service_states.get(host, {}) <NEW_LINE> capab_copy = dict(capabilities) <NEW_LINE> capab_copy["timestamp"] = timeutils.utcnow() <NEW_LINE> service_caps[service_name] = capab_copy <NEW_LINE> self.service_states[host] = service_caps <NEW_LINE> <DEDENT> def get_all_host_states(self, context, topic): <NEW_LINE> <INDENT> if topic != 'compute': <NEW_LINE> <INDENT> raise NotImplementedError(_( "host_manager only implemented for 'compute'")) <NEW_LINE> <DEDENT> host_state_map = {} <NEW_LINE> compute_nodes = db.compute_node_get_all(context) <NEW_LINE> for compute in compute_nodes: <NEW_LINE> <INDENT> service = compute['service'] <NEW_LINE> if not service: <NEW_LINE> <INDENT> LOG.warn(_("No service for compute ID %s") % compute['id']) <NEW_LINE> continue <NEW_LINE> <DEDENT> host = service['host'] <NEW_LINE> nodename = compute.get('hypervisor_hostname') <NEW_LINE> if nodename is not None: <NEW_LINE> <INDENT> host_node = '%s/%s' % (host, nodename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> host_node = host <NEW_LINE> <DEDENT> capabilities = self.service_states.get(host_node, None) <NEW_LINE> host_state = self.host_state_cls(host, topic, capabilities=capabilities, service=dict(service.iteritems()), nodename=nodename) <NEW_LINE> host_state.update_from_compute_node(compute) <NEW_LINE> host_state_map[host_node] = host_state <NEW_LINE> <DEDENT> return host_state_map | Base HostManager class. | 62598fbf63b5f9789fe85396 |
class VerifyEmailForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = VerifyCode <NEW_LINE> fields = ['code'] | Provides form for verifying user email address. | 62598fbf66673b3332c305f7 |
class MaxVol(object): <NEW_LINE> <INDENT> def __init__(self, max_vol=None): <NEW_LINE> <INDENT> self.swagger_types = { 'max_vol': 'int' } <NEW_LINE> self.attribute_map = { 'max_vol': 'maxVol' } <NEW_LINE> self._max_vol = max_vol <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_vol(self): <NEW_LINE> <INDENT> return self._max_vol <NEW_LINE> <DEDENT> @max_vol.setter <NEW_LINE> def max_vol(self, max_vol): <NEW_LINE> <INDENT> if max_vol is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `max_vol`, must not be `None`") <NEW_LINE> <DEDENT> self._max_vol = max_vol <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fbf656771135c489894 |
class AMQPLogSource(AMQPMessageConsumer): <NEW_LINE> <INDENT> def message_callback(self, record_data, msg): <NEW_LINE> <INDENT> record = object.__new__(logging.LogRecord) <NEW_LINE> record.__dict__.update(record_data) <NEW_LINE> logger = logging.getLogger(record.name) <NEW_LINE> if logger.isEnabledFor(record.levelno): <NEW_LINE> <INDENT> logger.handle(record) | Receiving part of logging-over-AMQP solution.
Works in pair with :class:`AMQPHandler`: receives its log messages
with respect to provided routing key -- logger name. Relogs all received
log records. | 62598fbf283ffb24f3cf3aa9 |
class Amenity(BaseModel): <NEW_LINE> <INDENT> name = "" | Represent an amenity.
Attributes:
name (str): The name of the amenity. | 62598fbf67a9b606de5461f0 |
class PoolMonitor(neutron.NeutronAPIDictWrapper): <NEW_LINE> <INDENT> def __init__(self, apiresource): <NEW_LINE> <INDENT> apiresource['admin_state'] = 'UP' if apiresource['admin_state_up'] else 'DOWN' <NEW_LINE> super(PoolMonitor, self).__init__(apiresource) | Wrapper for neutron load balancer pool health monitor. | 62598fbf63d6d428bbee29d7 |
class RunNotFoundError(ValueError): <NEW_LINE> <INDENT> def __init__(self, run_id: str) -> None: <NEW_LINE> <INDENT> super().__init__(f"Run {run_id} was not found.") | Error raised when a given Run ID is not found in the store. | 62598fbf851cf427c66b84db |
class CopyDirFile(object): <NEW_LINE> <INDENT> def copy_file(self, file_name, src, dest): <NEW_LINE> <INDENT> if os.path.exists(src): <NEW_LINE> <INDENT> src_file = open(src + "/" + file_name, "r", encoding="utf8") <NEW_LINE> dest_file = open(dest + "/" + file_name, "a+", encoding="utf8") <NEW_LINE> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = src_file.readline() <NEW_LINE> dest_file.write(data) <NEW_LINE> dest_file.flush() <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if dest_file != None: <NEW_LINE> <INDENT> dest_file.close() <NEW_LINE> <DEDENT> if src_file != None: <NEW_LINE> <INDENT> src_file.close() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("源文件不存在.") <NEW_LINE> <DEDENT> <DEDENT> def main(self): <NEW_LINE> <INDENT> src_dir_name = "D:/python/script/PythonSenior/com/process" <NEW_LINE> names_list = os.listdir(src_dir_name) <NEW_LINE> dest_dir_name = src_dir_name + "mul_enhance_copy" <NEW_LINE> try: <NEW_LINE> <INDENT> os.mkdir(dest_dir_name) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> p = multiprocessing.Pool(5) <NEW_LINE> for file_name in names_list: <NEW_LINE> <INDENT> p.apply_async(self.copy_file, args=(file_name, src_dir_name, dest_dir_name)) <NEW_LINE> <DEDENT> p.close() <NEW_LINE> p.join() | 拷贝文件夹下的文件 | 62598fbf57b8e32f52508230 |
class HexBGCode(_HexCode): <NEW_LINE> <INDENT> ground = 'back' | A hex background colour. | 62598fbf4f88993c371f061c |
class ZMemoryBadStoryfileSize(ZMemoryError): <NEW_LINE> <INDENT> pass | Story is too large for Z-machine version. | 62598fbf66656f66f7d5a618 |
class AlteredSetting(object): <NEW_LINE> <INDENT> def __init__(self, name, value): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.value = value <NEW_LINE> self.settings = sublime.load_settings('RustEnhanced.sublime-settings') <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.orig = self.settings.get(self.name) <NEW_LINE> self.settings.set(self.name, self.value) <NEW_LINE> <DEDENT> def __exit__(self, type, value, traceback): <NEW_LINE> <INDENT> self.settings.set(self.name, self.orig) | Utility to help with temporarily changing a setting. | 62598fbf4527f215b58ea0f4 |
class NotifiableItemDelegate(QStyledItemDelegate): <NEW_LINE> <INDENT> editing_done = pyqtSignal() | The parent delegate class for providing common actions for all custom delegates. | 62598fbfff9c53063f51a874 |
class FindNode(BaseCommand): <NEW_LINE> <INDENT> name = Commands.FIND_NODE.value <NEW_LINE> description = 'Neo4j Find Node' <NEW_LINE> dependencies = BaseCommand.default_dependencies.union({'xep_0122', 'neo4j_wrapper'}) <NEW_LINE> def post_init(self): <NEW_LINE> <INDENT> super(FindNode, self).post_init() <NEW_LINE> self._command_handler = self.xmpp['neo4j_wrapper'] <NEW_LINE> <DEDENT> def command_start(self, iq, initial_session): <NEW_LINE> <INDENT> if not initial_session['payload']: <NEW_LINE> <INDENT> initial_session['notes'] = [('error', 'Cannot execute without a payload')] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> payload = StoragePayload(initial_session['payload']) <NEW_LINE> if logger.isEnabledFor(logging.DEBUG): <NEW_LINE> <INDENT> logger.debug('Find Node iq: %s' % iq) <NEW_LINE> logger.debug('Initial_session: %s' % initial_session) <NEW_LINE> logger.debug('about: %s' % payload.about) <NEW_LINE> logger.debug('relationships: %s' % payload.references) <NEW_LINE> logger.debug('properties: %s' % payload.properties) <NEW_LINE> logger.debug('types: %s' % payload.types) <NEW_LINE> <DEDENT> created = False <NEW_LINE> nodes = self._command_handler.find_nodes(payload.types, **payload.properties) <NEW_LINE> if not nodes and FindFlags.CREATE_IF_MISSING.fetch_from(payload.flags): <NEW_LINE> <INDENT> node = self._command_handler.create_node(types=payload.types, properties=payload.properties, relationships=payload.references) <NEW_LINE> created = True <NEW_LINE> nodes.append(node) <NEW_LINE> <DEDENT> result_collection_payload = ResultCollectionPayload() <NEW_LINE> for node in nodes: <NEW_LINE> <INDENT> payload = ResultPayload(about=node.uri, types=node.labels) <NEW_LINE> if created: <NEW_LINE> <INDENT> payload.add_flag(FindResults.CREATED, True) <NEW_LINE> <DEDENT> result_collection_payload.append(payload) <NEW_LINE> <DEDENT> initial_session['payload'] = result_collection_payload.populate_payload() <NEW_LINE> <DEDENT> return initial_session | Neo4j Storage plugin for finding data. | 62598fbf7047854f4633f5fa |
class block: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._statements = [] <NEW_LINE> self._variables = [] <NEW_LINE> <DEDENT> def add_statement(self, s): <NEW_LINE> <INDENT> self._statements += [s] <NEW_LINE> <DEDENT> def declare_variable(self, n): <NEW_LINE> <INDENT> self._variables += [n] <NEW_LINE> <DEDENT> def emit(self, e): <NEW_LINE> <INDENT> e.add_line("{") <NEW_LINE> for v in self._variables: <NEW_LINE> <INDENT> init_value = "" if not isinstance(v,crep.cpp_variable) or not v.initial_value() else " ({0})".format(v.initial_value().as_cpp()) <NEW_LINE> e.add_line("{0} {1}{2};".format(v.cpp_type(), v.as_cpp(), init_value)) <NEW_LINE> <DEDENT> for s in self._statements: <NEW_LINE> <INDENT> s.emit(e) <NEW_LINE> <DEDENT> e.add_line("}") | This is a bock of statements surrounded by a scoping (like open close bracket, for loop, etc.) | 62598fbf56ac1b37e6302414 |
@unittest.skipIf(tf_version.is_tf2(), 'Skipping TF1.X only test.') <NEW_LINE> class SSDResnet152V1FeatureExtractorTest( ssd_resnet_v1_fpn_feature_extractor_testbase. SSDResnetFPNFeatureExtractorTestBase): <NEW_LINE> <INDENT> def _create_feature_extractor(self, depth_multiplier, pad_to_multiple, use_explicit_padding=False, min_depth=32, use_keras=False): <NEW_LINE> <INDENT> is_training = True <NEW_LINE> return ( ssd_resnet_v1_fpn_feature_extractor.SSDResnet152V1FpnFeatureExtractor( is_training, depth_multiplier, min_depth, pad_to_multiple, self.conv_hyperparams_fn, use_explicit_padding=use_explicit_padding)) <NEW_LINE> <DEDENT> def _resnet_scope_name(self): <NEW_LINE> <INDENT> return 'resnet_v1_152' | SSDResnet152v1Fpn feature extractor test. | 62598fbfa05bb46b3848aa93 |
class NeuralFactorizationMachineModel(torch.nn.Module): <NEW_LINE> <INDENT> def __init__(self, field_dims, embed_dim, mlp_dims, dropouts): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.embedding = FeaturesEmbedding(field_dims, embed_dim) <NEW_LINE> self.linear = FeaturesLinear(field_dims) <NEW_LINE> self.fm = torch.nn.Sequential( FactorizationMachine(reduce_sum=False), torch.nn.BatchNorm1d(embed_dim), torch.nn.Dropout(dropouts[0]) ) <NEW_LINE> self.mlp = MultiLayerPerceptron(embed_dim, mlp_dims, dropouts[1]) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> cross_term = self.fm(self.embedding(x)) <NEW_LINE> x = self.linear(x) + self.mlp(cross_term) <NEW_LINE> return torch.sigmoid(x.squeeze(1)) | A pytorch implementation of Neural Factorization Machine.
Reference:
X He and TS Chua, Neural Factorization Machines for Sparse Predictive Analytics, 2017. | 62598fbf3d592f4c4edbb0e5 |
class TopicUpdateParameters(Model): <NEW_LINE> <INDENT> _attribute_map = { 'tags': {'key': 'tags', 'type': '{str}'}, } <NEW_LINE> def __init__(self, tags=None): <NEW_LINE> <INDENT> super(TopicUpdateParameters, self).__init__() <NEW_LINE> self.tags = tags | Properties of the Topic update.
:param tags: Tags of the resource
:type tags: dict[str, str] | 62598fbf4f88993c371f061d |
class InstanceActionAPI(base.Base): <NEW_LINE> <INDENT> def actions_get(self, context, instance): <NEW_LINE> <INDENT> return objects.InstanceActionList.get_by_instance_uuid( context, instance.uuid) <NEW_LINE> <DEDENT> def action_get_by_request_id(self, context, instance, request_id): <NEW_LINE> <INDENT> return objects.InstanceAction.get_by_request_id( context, instance.uuid, request_id) <NEW_LINE> <DEDENT> def action_events_get(self, context, instance, action_id): <NEW_LINE> <INDENT> return objects.InstanceActionEventList.get_by_action( context, action_id) | Sub-set of the Compute Manager API for managing instance actions. | 62598fbf7cff6e4e811b5c4b |
class ContainsKeyAndInstanceConfigValidator(AbstractConfigValidator): <NEW_LINE> <INDENT> def validate(self,config): <NEW_LINE> <INDENT> for key in self.config: <NEW_LINE> <INDENT> if not key in config: <NEW_LINE> <INDENT> logger.warn("Key " + str(key) + " is not in the configuration") <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(config[key],self.config[key]): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return True | Checks if the configuration dict contains all keys, and values are instance of the ones provided as validator config
Sample config
{"list":type([])} : will check a list in a "list" key in the config dict | 62598fbf50812a4eaa620cfd |
class RegionCountries(ListAPIView): <NEW_LINE> <INDENT> serializer_class = CountrySerializer <NEW_LINE> fields = ('url', 'code', 'name') <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> pk = self.kwargs.get('pk') <NEW_LINE> region = geodata.models.Region.objects.get(pk=pk) <NEW_LINE> return region.countries | Returns a list of IATI Countries within region.
## URI Format
```
/api/regions/{region_id}/countries
```
### URI Parameters
- `region_id`: Desired region ID | 62598fbfa8370b77170f0609 |
class ApiStatus(object): <NEW_LINE> <INDENT> swagger_types = { 'version': 'str', 'ready': 'bool' } <NEW_LINE> attribute_map = { 'version': 'version', 'ready': 'ready' } <NEW_LINE> def __init__(self, version=None, ready=None): <NEW_LINE> <INDENT> self._version = None <NEW_LINE> self._ready = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.version = version <NEW_LINE> self.ready = ready <NEW_LINE> <DEDENT> @property <NEW_LINE> def version(self): <NEW_LINE> <INDENT> return self._version <NEW_LINE> <DEDENT> @version.setter <NEW_LINE> def version(self, version): <NEW_LINE> <INDENT> if version is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `version`, must not be `None`") <NEW_LINE> <DEDENT> self._version = version <NEW_LINE> <DEDENT> @property <NEW_LINE> def ready(self): <NEW_LINE> <INDENT> return self._ready <NEW_LINE> <DEDENT> @ready.setter <NEW_LINE> def ready(self, ready): <NEW_LINE> <INDENT> if ready is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `ready`, must not be `None`") <NEW_LINE> <DEDENT> self._ready = ready <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(ApiStatus, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ApiStatus): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fbfdc8b845886d537e2 |
class ModifiedDom(object): <NEW_LINE> <INDENT> def __init__(self, txt): <NEW_LINE> <INDENT> if not isinstance(txt, unicode): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> txt = txt.decode('utf8') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.txt = txt <NEW_LINE> self.match = BODY_RE.search(txt) <NEW_LINE> self.dom = None <NEW_LINE> self.group = None <NEW_LINE> if self.match: <NEW_LINE> <INDENT> group = self.match.group('body') <NEW_LINE> if not isinstance(group, unicode): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> group = group.decode('utf8') <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> html = parse(StringIO(self.txt)) <NEW_LINE> body = html.xpath('//body') <NEW_LINE> if body: <NEW_LINE> <INDENT> self.dom = body[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dom = html <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def cssselect(self, what): <NEW_LINE> <INDENT> if self.dom is None: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> expression = GenericTranslator().css_to_xpath(what, '//') <NEW_LINE> return self.dom.xpath(expression) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.dom is None: <NEW_LINE> <INDENT> return safe_unicode(self.txt, 'utf-8').encode('utf-8') <NEW_LINE> <DEDENT> txt = self.txt.replace(self.match.group(), tostring(self.dom, method='html')) <NEW_LINE> return safe_unicode(txt, 'utf-8').encode('utf-8').replace('&#13;', '') | A class to be able to parse the body tag while
leaving the rest of the document in tact | 62598fbf796e427e5384e9bd |
class ProjectSettingView(DetailView): <NEW_LINE> <INDENT> template_name = "issue_tracker/project/project_settings.html" <NEW_LINE> model = Project <NEW_LINE> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if request.is_ajax(): <NEW_LINE> <INDENT> context = dict() <NEW_LINE> initial_date = self.request.GET.get('initial_date') <NEW_LINE> final_date = self.request.GET.get('final_date') <NEW_LINE> context = self.story(context, initial_date, final_date) <NEW_LINE> data = render_to_string('issue_tracker/story/table.html', context=context) <NEW_LINE> if self.request.GET.get('mail'): <NEW_LINE> <INDENT> tasks.email.delay(data, self.request.user.email) <NEW_LINE> <DEDENT> return HttpResponse(data, content_type="html") <NEW_LINE> <DEDENT> return super(ProjectSettingView, self).get(request, *args, **kwargs) <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> id = self.kwargs['pk'] <NEW_LINE> context = super(ProjectSettingView, self).get_context_data(**kwargs) <NEW_LINE> initial = (Story.objects.filter(project_title=id)).order_by('date') <NEW_LINE> if initial: <NEW_LINE> <INDENT> initial_date = initial[0].date <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> initial_date = datetime.date.today() <NEW_LINE> <DEDENT> context = self.story(context, initial_date, datetime.date.today()) <NEW_LINE> result = render_to_string('issue_tracker/story/table.html', context=context) <NEW_LINE> context['result'] = result <NEW_LINE> context['init'] = initial_date <NEW_LINE> context['final'] = datetime.date.today() <NEW_LINE> return context <NEW_LINE> <DEDENT> def story(self, context, initial_date, final_date): <NEW_LINE> <INDENT> id = self.kwargs['pk'] <NEW_LINE> project = Project.objects.get(id=id) <NEW_LINE> assignees = project.assigned_to.all() <NEW_LINE> storys = (Story.objects.filter(project_title=id, visibility='ys', date__range=(initial_date, final_date))).order_by('assignee__email') <NEW_LINE> list = [] <NEW_LINE> i = 0 <NEW_LINE> for each in assignees: <NEW_LINE> <INDENT> list.append([]) <NEW_LINE> list[i] = (storys.filter(assignee=each).exclude(status='deliv')).values('assignee__email', 'status','estimate').annotate(d=Count('assignee'),d1=Sum('estimate')) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> un_assigned_story = storys.filter(assignee=None) <NEW_LINE> count_story = un_assigned_story.count() <NEW_LINE> count_estimate = un_assigned_story.aggregate(Sum('estimate')) <NEW_LINE> context['result']=izip(assignees, list) <NEW_LINE> context['count_story']=count_story <NEW_LINE> context['count_estimate']=count_estimate <NEW_LINE> return context | View to show the different member and their story.
Story that are started un-started and finished
and to show unassigned story of that project | 62598fbfff9c53063f51a876 |
class Settings: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.screen_width = 1200 <NEW_LINE> self.screen_height = 700 <NEW_LINE> self.bg_color = (230, 230, 230) <NEW_LINE> self.ship_limit = 3 <NEW_LINE> self.bullet_width = 3 <NEW_LINE> self.bullet_height = 15 <NEW_LINE> self.bullet_color = (60, 60, 60) <NEW_LINE> self.bullets_allowed = 3 <NEW_LINE> self.fleet_drop_speed = 10 <NEW_LINE> self.speedup_scale = 1.1 <NEW_LINE> self.score_scale = 1.5 <NEW_LINE> self.initialize_dynamic_settings() <NEW_LINE> <DEDENT> def initialize_dynamic_settings(self): <NEW_LINE> <INDENT> self.ship_speed = 1.5 <NEW_LINE> self.bullet_speed = 3.0 <NEW_LINE> self.alien_speed = 1.0 <NEW_LINE> self.fleet_direction = 1 <NEW_LINE> self.alien_points = 50 <NEW_LINE> <DEDENT> def increase_speed(self): <NEW_LINE> <INDENT> self.ship_speed *= self.speedup_scale <NEW_LINE> self.bullet_speed *= self.speedup_scale <NEW_LINE> self.alien_speed *= self.speedup_scale <NEW_LINE> self.alien_points = int(self.alien_points * self.score_scale) | A class to store all settings for Alien Invasion | 62598fbfe1aae11d1e7ce939 |
class Scene2D: <NEW_LINE> <INDENT> _x = 0 <NEW_LINE> _y = 0 <NEW_LINE> _width = 0 <NEW_LINE> _height = 0 <NEW_LINE> _objects = [] <NEW_LINE> def __init__(self, x, y, width, height): <NEW_LINE> <INDENT> self._x = x <NEW_LINE> self._y = y <NEW_LINE> self._width = width <NEW_LINE> self._height = height <NEW_LINE> <DEDENT> def add_object(self, obj): <NEW_LINE> <INDENT> if "2d" not in obj.dimension: <NEW_LINE> <INDENT> print('Only 2D objects can be added in a 2D scene') <NEW_LINE> return <NEW_LINE> <DEDENT> self._objects.append(obj) <NEW_LINE> return self <NEW_LINE> <DEDENT> def get_2D_info(self): <NEW_LINE> <INDENT> return {'x':self._x, 'y':self._y, 'width':self._width, 'height':self._height} <NEW_LINE> <DEDENT> def get_objects(self): <NEW_LINE> <INDENT> return self._objects <NEW_LINE> <DEDENT> def get_obj_names(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def visualise(self): <NEW_LINE> <INDENT> return self | Class defining the environment
Values of the abstract properties
* **_x,_y,_z,_width,_height,_depth** = "Dimensions of the scene"
* **_objects** = "a dictionary of objects of the 'physical object' class"
Members
* **_visualise** (): | 62598fbf5fdd1c0f98e5e1ba |
class DlgFromDict(Dlg): <NEW_LINE> <INDENT> def __init__(self, dictionary, title='',fixed=[]): <NEW_LINE> <INDENT> Dlg.__init__(self, title) <NEW_LINE> self.dictionary=dictionary <NEW_LINE> keys = self.dictionary.keys() <NEW_LINE> keys.sort() <NEW_LINE> types=dict([]) <NEW_LINE> for field in keys: <NEW_LINE> <INDENT> types[field] = type(self.dictionary[field]) <NEW_LINE> if field in fixed: <NEW_LINE> <INDENT> self.addFixedField(field,self.dictionary[field]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.addField(field,self.dictionary[field]) <NEW_LINE> <DEDENT> <DEDENT> self.show() <NEW_LINE> if self.OK: <NEW_LINE> <INDENT> for n,thisKey in enumerate(keys): <NEW_LINE> <INDENT> self.dictionary[thisKey]=self.data[n] | Creates a dialogue box that represents a dictionary of values.
Any values changed by the user are change (in-place) by this
dialogue box.
e.g.:
::
info = {'Observer':'jwp', 'GratingOri':45, 'ExpVersion': 1.1}
infoDlg = gui.DlgFromDict(dictionary=info, title='TestExperiment', fixed=['ExpVersion'])
if infoDlg.OK:
print info
else: print 'User Cancelled'
In the code above, the contents of *info* will be updated to the values
returned by the dialogue box.
If the user cancels (rather than pressing OK),
then the dictionary remains unchanged. If you want to check whether
the user hit OK, then check whether DlgFromDict.OK equals
True or False | 62598fbf7047854f4633f5fc |
class IdGenerator(xblock.runtime.IdGenerator): <NEW_LINE> <INDENT> def create_usage(self, def_id): <NEW_LINE> <INDENT> definition_key = ndb.Key(store.DefinitionEntity, str(def_id)) <NEW_LINE> assert definition_key.get() is not None <NEW_LINE> usage_id = generate_id() <NEW_LINE> usage = store.UsageEntity(id=usage_id) <NEW_LINE> usage.definition_id = def_id <NEW_LINE> usage.put() <NEW_LINE> return usage_id <NEW_LINE> <DEDENT> def create_definition(self, block_type): <NEW_LINE> <INDENT> definition_id = generate_id() <NEW_LINE> definition = store.DefinitionEntity(id=definition_id) <NEW_LINE> definition.block_type = block_type <NEW_LINE> definition.put() <NEW_LINE> return definition_id | Implementation of XBlock IdGenerator using App Engine datastore.
This manages the graph of many-to-one relationships between
usages, definitions, and blocks. The schema is:
usage (n) -- (1) definition (n) -- (1) block_type | 62598fbf71ff763f4b5e79a4 |
class OverkizDescriptiveEntity(OverkizEntity): <NEW_LINE> <INDENT> def __init__( self, device_url: str, coordinator: OverkizDataUpdateCoordinator, description: OverkizSensorDescription | OverkizBinarySensorDescription, ): <NEW_LINE> <INDENT> super().__init__(device_url, coordinator) <NEW_LINE> self.entity_description = description <NEW_LINE> self._attr_name = f"{super().name} {self.entity_description.name}" <NEW_LINE> self._attr_unique_id = f"{super().unique_id}-{self.entity_description.key}" | Representation of a Overkiz device entity based on a description. | 62598fbf4f6381625f1995d6 |
class IFlyTekSTT(AbstractSTTEngine): <NEW_LINE> <INDENT> SLUG = "iflytek-stt" <NEW_LINE> def __init__(self, api_id, api_key, url, **kwargs): <NEW_LINE> <INDENT> self._logger = logging.getLogger(__name__) <NEW_LINE> self.api_id = api_id <NEW_LINE> self.api_key = api_key <NEW_LINE> self.url = url <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_config(cls): <NEW_LINE> <INDENT> return config.get('iflytek_yuyin', {}) <NEW_LINE> <DEDENT> def transcribe(self, fp): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> wav_file = wave.open(fp, 'rb') <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> self._logger.critical('wav file not found: %s', fp, exc_info=True) <NEW_LINE> return [] <NEW_LINE> <DEDENT> n_frames = wav_file.getnframes() <NEW_LINE> frame_rate = wav_file.getframerate() <NEW_LINE> Param = str({ "auf": "16k", "aue": "raw", "scene": "main", "sample_rate": "%s" % str(frame_rate) }) <NEW_LINE> XParam = base64.b64encode(Param) <NEW_LINE> audio = wav_file.readframes(n_frames) <NEW_LINE> base_data = base64.b64encode(audio) <NEW_LINE> data = { 'voice_data': base_data, 'api_id': self.api_id, 'api_key': self.api_key, 'sample_rate': frame_rate, 'XParam': XParam } <NEW_LINE> r = requests.post(self.url, data=data) <NEW_LINE> try: <NEW_LINE> <INDENT> r.raise_for_status() <NEW_LINE> text = '' <NEW_LINE> if r.json()['code'] == '00000': <NEW_LINE> <INDENT> text = r.json()['data']['result'].encode('utf-8') <NEW_LINE> <DEDENT> <DEDENT> except requests.exceptions.HTTPError: <NEW_LINE> <INDENT> self._logger.critical('Request failed with response: %r', r.text, exc_info=True) <NEW_LINE> return [] <NEW_LINE> <DEDENT> except requests.exceptions.RequestException: <NEW_LINE> <INDENT> self._logger.critical('Request failed.', exc_info=True) <NEW_LINE> return [] <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> self._logger.critical('Cannot parse response: %s', e.args[0]) <NEW_LINE> return [] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self._logger.critical('Cannot parse response.', exc_info=True) <NEW_LINE> return [] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._logger.warning('Cannot parse response.(code: %s)' % r.json()['code']) <NEW_LINE> transcribed = [] <NEW_LINE> if text: <NEW_LINE> <INDENT> transcribed.append(text.upper()) <NEW_LINE> <DEDENT> self._logger.info(u'讯飞语音识别到了: %s' % text) <NEW_LINE> return transcribed <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def is_available(cls): <NEW_LINE> <INDENT> return diagnose.check_network_connection() | 科大讯飞的语音识别API.
要使用本模块, 首先到 http://aiui.xfyun.cn/default/index 注册一个开发者账号,
之后创建一个新应用, 然后在应用管理的那查看 API id 和 API Key
填入 profile.xml 中. | 62598fbf4c3428357761a4e5 |
class RPyRException(RuntimeError): <NEW_LINE> <INDENT> pass | Runtime error while running R code. | 62598fbf4a966d76dd5ef0fd |
class TrayIcon(object): <NEW_LINE> <INDENT> def __init__(self, icon_name="TestTrayIcon", icon_file=None, menu=None, activate=None): <NEW_LINE> <INDENT> if icon_file: <NEW_LINE> <INDENT> self.status_icon=Gtk.status_icon_new_from_file(icon_file) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.status_icon=Gtk.status_icon_new_from_icon_name(icon_name) <NEW_LINE> <DEDENT> if menu: <NEW_LINE> <INDENT> self.connect_popup_menu(menu) <NEW_LINE> <DEDENT> if activate: <NEW_LINE> <INDENT> self.connect_activate(activate) <NEW_LINE> <DEDENT> <DEDENT> def connect_activate(self, method): <NEW_LINE> <INDENT> def activate_callback(status_icon, *data): <NEW_LINE> <INDENT> method() <NEW_LINE> <DEDENT> self.status_icon.connect("activate", activate_callback) <NEW_LINE> <DEDENT> def connect_popup_menu(self, menu): <NEW_LINE> <INDENT> def popup_menu_callback(status_icon, button, activate_time, *data): <NEW_LINE> <INDENT> menu.popup(None, None, Gtk.status_icon_position_menu, button, activate_time, status_icon) <NEW_LINE> <DEDENT> self.status_icon.connect("popup-menu", popup_menu_callback) | This is possibly the thinnest wrapper class I've written. Ever.
It's a tray icon that you can parameterize during initialisation with the
name or file of an icon, with a menu and a simple callback
It will create such an Icon, will display the image, call back the callback
when left clicked and pop up the menu when right clicked. | 62598fbfec188e330fdf8abc |
class Student(object): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'student object (name: %s)'%self.name <NEW_LINE> <DEDENT> __repr__ = __str__ | __str__()是在print时调用的方法,重定义实现打印所需要的格式,类似C++的<<运算符重载
__repr__()是直接敲变量,打印出来的东西
operator<<(istream in, Student stu){
in << stu.name
return in;
} | 62598fbf5fcc89381b266261 |
class NoMatchError(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> Exception.__init__(self, *args, **kwargs) | Exception to be raised when no match can be found. | 62598fbf63d6d428bbee29db |
class MetricsAgeCache(ModelMixin, BaseModel): <NEW_LINE> <INDENT> __tablename__ = 'metrics_age_cache' <NEW_LINE> dateInserted = Column('date_inserted', UTCDateTime, default=datetime.utcnow(), nullable=False) <NEW_LINE> hpoId = Column('hpo_id', String(20), nullable=False) <NEW_LINE> hpoName = Column('hpo_name', String(255), nullable=False) <NEW_LINE> date = Column('date', Date, nullable=False) <NEW_LINE> ageRange = Column('age_range', String(255), nullable=False) <NEW_LINE> ageCount = Column('age_count', Integer, nullable=False) <NEW_LINE> __table_args__ = ( UniqueConstraint('date_inserted', 'hpo_id', 'hpo_name', 'date', 'age_range'), ) | Contains age range metrics data grouped by HPO ID and date.
| 62598fbf377c676e912f6e87 |
class Parser(Parser3): <NEW_LINE> <INDENT> pass | Parser to convert Python to JavaScript.
Instantiate this class with the Python code. Retrieve the JS code
using the dump() method.
In a subclass, you can implement methods called "function_x" or
"method_x", which will then be called during parsing when a
function/method with name "x" is encountered. Several methods and
functions are already implemented in this way.
While working on ast parsing, this resource is very helpful:
https://greentreesnakes.readthedocs.org
Parameters:
code (str): the Python source code.
module (str, optional): the module name. If given, produces an
AMD module.
indent (int): the base indentation level (default 0). One
indentation level means 4 spaces.
docstrings (bool): whether docstrings are included in JS
(default True).
inline_stdlib (bool): whether the used stdlib functions are inlined
(default True). Set to False if the stdlib is already loaded. | 62598fbf99fddb7c1ca62f01 |
class Extractor(): <NEW_LINE> <INDENT> def __init__(self, blockSize=3, image=False, rawPage=''): <NEW_LINE> <INDENT> self.blockSize = blockSize <NEW_LINE> self.saveImage = image <NEW_LINE> self.rawPage = rawPage <NEW_LINE> self.ctexts = [] <NEW_LINE> self.cblocks = [] <NEW_LINE> <DEDENT> def processTags(self): <NEW_LINE> <INDENT> self.body = re.sub(reCOMM, '', self.body) <NEW_LINE> self.body = re.sub(reTRIM.format('script'), '', re.sub(reTRIM.format('style'), '', self.body)) <NEW_LINE> self.body = re.sub(reTRIM.format('SCRIPT'), '', re.sub(reTRIM.format('STYLE'), '', self.body)) <NEW_LINE> self.body = re.sub(reTAG, '', self.body) <NEW_LINE> <DEDENT> def processBlocks(self): <NEW_LINE> <INDENT> self.ctexts = self.body.split('\n') <NEW_LINE> self.textLens = [len(text) for text in self.ctexts] <NEW_LINE> self.cblocks = [0]*(len(self.ctexts) - self.blockSize - 1) <NEW_LINE> lines = len(self.ctexts) <NEW_LINE> for i in range(self.blockSize): <NEW_LINE> <INDENT> self.cblocks = list(map(lambda x,y: x+y, self.textLens[i : lines-1-self.blockSize+i], self.cblocks)) <NEW_LINE> <DEDENT> maxTextLen = max(self.cblocks) <NEW_LINE> self.start = self.end = self.cblocks.index(maxTextLen) <NEW_LINE> while self.start > 0 and self.cblocks[self.start] > min(self.textLens): <NEW_LINE> <INDENT> self.start -= 1 <NEW_LINE> <DEDENT> while self.end < lines - self.blockSize and self.cblocks[self.end] > min(self.textLens): <NEW_LINE> <INDENT> self.end += 1 <NEW_LINE> <DEDENT> return ''.join(self.ctexts[self.start:self.end]) <NEW_LINE> <DEDENT> def processImages(self): <NEW_LINE> <INDENT> self.body = reIMG.sub(r'{{\1}}', self.body) <NEW_LINE> <DEDENT> def getContext(self): <NEW_LINE> <INDENT> body = re.findall(reBODY, self.rawPage) <NEW_LINE> self.body = '' <NEW_LINE> if body: <NEW_LINE> <INDENT> self.body = body[0] <NEW_LINE> <DEDENT> if self.saveImage: <NEW_LINE> <INDENT> self.processImages() <NEW_LINE> <DEDENT> self.processTags() <NEW_LINE> return self.processBlocks() | 根据文本密度提取正文区 | 62598fbf26068e7796d4cb85 |
class StoragePropertyExpectedValueStoredEvent(StoragePropertyEvent): <NEW_LINE> <INDENT> EVENT_NAME: str = "storages.expectedPropertySaved" | Event fired by storage when property value is written to storage
@package FastyBird:MiniServer!
@module storage
@author Adam Kadlec <adam.kadlec@fastybird.com> | 62598fbf23849d37ff8512dd |
class IMERouteChanged(IMEValue, uint8_t): <NEW_LINE> <INDENT> pass | Route changed | 62598fbfa8370b77170f060b |
class TestDuration(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testDuration(self): <NEW_LINE> <INDENT> pass | Duration unit test stubs | 62598fbfdc8b845886d537e4 |
class TestRunnerInterface: <NEW_LINE> <INDENT> def __init__(self, testrunner): <NEW_LINE> <INDENT> self.runner = testrunner <NEW_LINE> <DEDENT> def run(self, argv): <NEW_LINE> <INDENT> cf = self.runner.config <NEW_LINE> cf.flags.INTERACTIVE = True <NEW_LINE> cf.flags.DEBUG = 0 <NEW_LINE> cf.flags.VERBOSE = 0 <NEW_LINE> optlist, longopts, args = getopt.getopt(argv[1:], "h?dDvIc:f:m:e:r:") <NEW_LINE> for opt, optarg in optlist: <NEW_LINE> <INDENT> if opt in ("-h", "-?"): <NEW_LINE> <INDENT> print(TestRunnerInterfaceDoc.format( name=os.path.basename(argv[0]))) <NEW_LINE> return <NEW_LINE> <DEDENT> elif opt == "-d": <NEW_LINE> <INDENT> cf.flags.DEBUG += 1 <NEW_LINE> <DEDENT> elif opt == "-D": <NEW_LINE> <INDENT> from pycopia import autodebug <NEW_LINE> <DEDENT> elif opt == "-v": <NEW_LINE> <INDENT> cf.flags.VERBOSE += 1 <NEW_LINE> <DEDENT> elif opt == "-I": <NEW_LINE> <INDENT> cf.flags.INTERACTIVE = False <NEW_LINE> <DEDENT> elif opt == "-c" or opt == "-f": <NEW_LINE> <INDENT> cf.mergefile(optarg) <NEW_LINE> <DEDENT> elif opt == "-m": <NEW_LINE> <INDENT> cf.comment = optarg <NEW_LINE> <DEDENT> elif opt == "-r": <NEW_LINE> <INDENT> cf.reportname = optarg <NEW_LINE> <DEDENT> elif opt == "-e": <NEW_LINE> <INDENT> cf.environmentname = optarg <NEW_LINE> <DEDENT> <DEDENT> cf.evalupdate(longopts) <NEW_LINE> cf.arguments = [os.path.basename(argv[0])] + argv[1:] <NEW_LINE> ui = UI.get_userinterface(themename="ANSITheme") <NEW_LINE> if not args: <NEW_LINE> <INDENT> from . import simplechooser <NEW_LINE> args = simplechooser.choose_tests(ui) <NEW_LINE> <DEDENT> if not args: <NEW_LINE> <INDENT> return 10 <NEW_LINE> <DEDENT> objects, errors = module.get_objects(args) <NEW_LINE> if errors: <NEW_LINE> <INDENT> logging.warn("Errors found while loading test object:") <NEW_LINE> for error in errors: <NEW_LINE> <INDENT> logging.warn(error) <NEW_LINE> <DEDENT> <DEDENT> if objects: <NEW_LINE> <INDENT> cf.argv = args <NEW_LINE> rv = self.runner.run(objects, ui) <NEW_LINE> if rv is None: <NEW_LINE> <INDENT> return 11 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return int(rv) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> return 12 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return len(errors) + 20 | A Basic CLI interface to a TestRunner object.
Instantiate with an instance of a TestRunner.
Call the instance of this with an argv list to instantiate and run the
given tests. | 62598fbfff9c53063f51a878 |
class Cacher: <NEW_LINE> <INDENT> def __init__(self, cache_dir): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> os.makedirs(cache_dir) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.cache_dir = cache_dir <NEW_LINE> <DEDENT> def add(self, data): <NEW_LINE> <INDENT> h = hashlib.sha256() <NEW_LINE> h.update(data) <NEW_LINE> key = h.hexdigest() <NEW_LINE> path = os.path.join(self.cache_dir, key) <NEW_LINE> with open(path, "wb") as fp: <NEW_LINE> <INDENT> fp.write(data) <NEW_LINE> <DEDENT> return key <NEW_LINE> <DEDENT> def remove(self, key): <NEW_LINE> <INDENT> path = os.path.join(self.cache_dir, key) <NEW_LINE> try: <NEW_LINE> <INDENT> os.remove(path) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> path = os.path.join(self.cache_dir, key) <NEW_LINE> if os.path.exists(path): <NEW_LINE> <INDENT> with open(path, "rb") as fp: <NEW_LINE> <INDENT> return fp.read() <NEW_LINE> <DEDENT> <DEDENT> return None | Simple on-disk cache.
Note that as entries are stored as
individual files, the data being stored should be of significant
size (several KB) or a lot of disk space will likely be wasted. | 62598fbfaad79263cf42e9ff |
class EventFormTestCase(TestCase): <NEW_LINE> <INDENT> longMessage = True <NEW_LINE> def test_validates_and_saves_input(self): <NEW_LINE> <INDENT> self.user = UserFactory() <NEW_LINE> data = { 'title': 'Foo', 'venue': 'Bar', 'start': timezone.now(), 'end': timezone.now() + timezone.timedelta(days=11), } <NEW_LINE> form = EventForm(data=data, created_by=self.user) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> instance = form.save() <NEW_LINE> self.assertEqual(Event.objects.all().count(), 1) <NEW_LINE> data.update({'street': 'Foostreet'}) <NEW_LINE> form = EventForm(data=data, instance=instance, created_by=self.user) <NEW_LINE> instance = form.save() <NEW_LINE> self.assertEqual(instance.street, 'Foostreet') <NEW_LINE> form = EventForm(data=data, instance=instance, created_by=self.user, create_from_template=True) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> instance = form.save() <NEW_LINE> self.assertEqual(Event.objects.all().count(), 2) <NEW_LINE> data.update({'template_name': 'Foo'}) <NEW_LINE> form = EventForm(data=data, created_by=self.user) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> form.save() <NEW_LINE> self.assertEqual( Event.objects.exclude(template_name__exact='').count(), 1) <NEW_LINE> data.update({'street': 'Barstreet'}) <NEW_LINE> instance = Event.objects.get(template_name='Foo') <NEW_LINE> form = EventForm(data=data, instance=instance, created_by=self.user) <NEW_LINE> self.assertTrue(form.is_valid()) <NEW_LINE> instance = form.save() <NEW_LINE> self.assertEqual(instance.street, 'Barstreet') | Tests for the ``EventForm`` form class. | 62598fbf63b5f9789fe8539c |
class CollectionForm(InvenioBaseForm): <NEW_LINE> <INDENT> id = HiddenField() <NEW_LINE> name = StringField(_('Name')) <NEW_LINE> dbquery = StringField(_('Query')) | Collecty form. | 62598fbf442bda511e95c689 |
class Utils: <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def location_matcher(cls, location_snippet, locations): <NEW_LINE> <INDENT> matched_location = '' <NEW_LINE> for location in locations: <NEW_LINE> <INDENT> if location_snippet in location: <NEW_LINE> <INDENT> print('%s setting location to: %s' % (location_snippet, location)) <NEW_LINE> matched_location = location <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not matched_location: <NEW_LINE> <INDENT> print('No visited location matches {}.'.format(location_snippet)) <NEW_LINE> raise LocationException('Missing location.') <NEW_LINE> <DEDENT> LOGGER.debug('Matched location=%s', matched_location) <NEW_LINE> return matched_location <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def read_csv(cls, location, csv_files): <NEW_LINE> <INDENT> if not location: <NEW_LINE> <INDENT> raise LocationException('Missing location.') <NEW_LINE> <DEDENT> for file in csv_files: <NEW_LINE> <INDENT> if location in file: <NEW_LINE> <INDENT> LOGGER.debug('Reading data from file: %s', file) <NEW_LINE> with open(file) as csvfile: <NEW_LINE> <INDENT> items = csvfile.readlines() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return items[1:] | Utilities class. | 62598fbf3617ad0b5ee06372 |
@provides(IWindow) <NEW_LINE> class Window(MWindow, Widget): <NEW_LINE> <INDENT> position = Property(Tuple) <NEW_LINE> size = Property(Tuple) <NEW_LINE> title = Unicode <NEW_LINE> activated = Event <NEW_LINE> closed = Event <NEW_LINE> closing = Event <NEW_LINE> deactivated = Event <NEW_LINE> key_pressed = Event(KeyPressedEvent) <NEW_LINE> opened = Event <NEW_LINE> opening = Event <NEW_LINE> _position = Tuple((-1, -1)) <NEW_LINE> _size = Tuple((-1, -1)) <NEW_LINE> def show(self, visible): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _add_event_listeners(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def _get_position(self): <NEW_LINE> <INDENT> return self._position <NEW_LINE> <DEDENT> def _set_position(self, position): <NEW_LINE> <INDENT> old = self._position <NEW_LINE> self._position = position <NEW_LINE> self.trait_property_changed('position', old, position) <NEW_LINE> <DEDENT> def _get_size(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def _set_size(self, size): <NEW_LINE> <INDENT> old = self._size <NEW_LINE> self._size = size <NEW_LINE> self.trait_property_changed('size', old, size) | The toolkit specific implementation of a Window. See the IWindow
interface for the API documentation. | 62598fbfad47b63b2c5a7a80 |
class Lightning(pygame.sprite.Sprite): <NEW_LINE> <INDENT> def __init__(self, difficulty): <NEW_LINE> <INDENT> pygame.sprite.Sprite.__init__(self) <NEW_LINE> self.image = pygame.image.load("attackOrb1.gif") <NEW_LINE> self.image = self.image.convert() <NEW_LINE> self.rect = self.image.get_rect() <NEW_LINE> self.difficulty = difficulty <NEW_LINE> self.playerPosX = 0 <NEW_LINE> self.playerPosY = 0 <NEW_LINE> self.attacking = False <NEW_LINE> self.attackElapsed = 0 <NEW_LINE> self.attackDelay = 60 <NEW_LINE> if(self.difficulty == "easy"): <NEW_LINE> <INDENT> self.attackDuration = PLAYER_ATTACK_DURATION_EASY <NEW_LINE> <DEDENT> elif(self.difficulty == "medium"): <NEW_LINE> <INDENT> self.attackDuration = PLAYER_ATTACK_DURATION_MEDIUM <NEW_LINE> <DEDENT> elif(self.difficulty == "hard"): <NEW_LINE> <INDENT> self.attackDuration = PLAYER_ATTACK_DURATION_HARD <NEW_LINE> <DEDENT> self.fullyCharged = True <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.lightningXMax = self.playerPosX + LIGHTNING_X_MAX_FROM_PLAYER <NEW_LINE> self.lightningXMin = self.playerPosX - LIGHTNING_X_MAX_FROM_PLAYER <NEW_LINE> self.lightningYMax = self.playerPosY <NEW_LINE> self.lightningYMin = self.playerPosY <NEW_LINE> if(self.attacking and self.attackElapsed == 0): <NEW_LINE> <INDENT> self.fullyCharged = False <NEW_LINE> self.attack() <NEW_LINE> <DEDENT> if(self.attacking or not self.fullyCharged): <NEW_LINE> <INDENT> self.attackElapsed += 1 <NEW_LINE> <DEDENT> if(self.attackElapsed == self.attackDuration): <NEW_LINE> <INDENT> self.attacking = False <NEW_LINE> self.neutral() <NEW_LINE> <DEDENT> if(self.attackElapsed >= self.attackDelay): <NEW_LINE> <INDENT> self.fullyCharged = True <NEW_LINE> self.attackElapsed = 0 <NEW_LINE> <DEDENT> self.followMouse() <NEW_LINE> <DEDENT> def followMouse(self): <NEW_LINE> <INDENT> (mouseX, mouseY) = pygame.mouse.get_pos() <NEW_LINE> self.rect.center = (mouseX * MOUSE_SENSITIVITY_FACTOR, self.lightningYMax) <NEW_LINE> if( self.rect.centerx > self.lightningXMax): <NEW_LINE> <INDENT> self.rect.centerx = self.lightningXMax <NEW_LINE> <DEDENT> if( self.rect.centerx < self.lightningXMin): <NEW_LINE> <INDENT> self.rect.centerx = self.lightningXMin <NEW_LINE> <DEDENT> if( self.rect.centery > self.lightningYMax): <NEW_LINE> <INDENT> self.rect.centery = self.lightningYMax <NEW_LINE> <DEDENT> if( self.rect.centery < self.lightningYMin): <NEW_LINE> <INDENT> self.rect.centery = self.lightningYMin <NEW_LINE> <DEDENT> <DEDENT> def attack(self): <NEW_LINE> <INDENT> self.image = pygame.image.load("attackOrb2.gif") <NEW_LINE> self.attacking = False <NEW_LINE> <DEDENT> def neutral(self): <NEW_LINE> <INDENT> self.image = pygame.image.load("attackOrb1.gif") | The lightning class represents the player's attack. Lightning is used to destroy enemies. It is controlled
with the mouse. Some of its functions reflect the Player class' functions. | 62598fbf5fdd1c0f98e5e1bd |
class Channel(_Output, _Input): <NEW_LINE> <INDENT> def __init__(self, time, packet_loss, node_id, maximum_transmission_time): <NEW_LINE> <INDENT> if time is None: <NEW_LINE> <INDENT> raise ValueError('Parameter "time": a time abstraction object' ' expected but "None" value given!') <NEW_LINE> <DEDENT> if packet_loss is None: <NEW_LINE> <INDENT> raise ValueError('Parameter "packet_loss": an object' ' representing a packet loss model expected' ' but "None" value given!') <NEW_LINE> <DEDENT> self.__logger = logger.get_logger(Channel.__name__) <NEW_LINE> assert self.__logger is not None, 'A logger object expected but "None" value got!' <NEW_LINE> check_argument_type(Channel.__name__, 'node_id', int, node_id, self.__logger) <NEW_LINE> if node_id < 0: <NEW_LINE> <INDENT> raise ValueError('Parameter "node_id": a value of the identifier' ' cannot be less that zero but %d given!' % int(node_id)) <NEW_LINE> <DEDENT> check_argument_type(Channel.__name__, 'maximum_transmission_time', float, maximum_transmission_time, self.__logger) <NEW_LINE> if maximum_transmission_time < 0.0: <NEW_LINE> <INDENT> raise ValueError('Parameter "maximum_transmission_time": a value' ' of the maximum message transmission time' ' cannot be less that zero but %f given!' % float(maximum_transmission_time)) <NEW_LINE> <DEDENT> super(Channel, self).__init__(time, packet_loss, node_id, maximum_transmission_time) | This class implements bidirectional communication channels for each node in
the simulated network.
The class has no members and inherits all its methods from two classes:
:class:`_Input` and :class:`_Output`.
Application message passing is implemented here as follows. First, a
message is sent locally by the :meth:`_Output.send_message` method. Then,
it is transmitted in a packet to neighboring nodes by the
:meth:`_Output.transmit_packets` method. If the transmission is
successful, the packet leaves the output channel by calling the
:meth:`_Output.deliver_packet` method and will be transferred to receiving
nodes by calling the :meth:`_Input.capture_packet` methods. Finally, the
message can be received by the application by calling the
:meth:`_Input.receive_message` method. | 62598fbf5166f23b2e24360b |
class CarShopDetailSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> district = serializers.SerializerMethodField() <NEW_LINE> @staticmethod <NEW_LINE> def get_district(car_shop): <NEW_LINE> <INDENT> return DistrictSimpleSerializer(car_shop.district).data <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = CarShop <NEW_LINE> fields = '__all__' | 店铺详情序列化器 | 62598fbf76e4537e8c3ef7d1 |
class Card: <NEW_LINE> <INDENT> __slots__ = ('_rank', '_suit') <NEW_LINE> def __init__(self, rank: Rank, suit: Suit) -> None: <NEW_LINE> <INDENT> self._rank = rank <NEW_LINE> self._suit = suit <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.get_code() <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return '<{}: {}>'.format(self.__class__.__name__, self.get_name()) <NEW_LINE> <DEDENT> def __hash__(self) -> int: <NEW_LINE> <INDENT> return hash(self.get_code()) <NEW_LINE> <DEDENT> def get_rank(self) -> Rank: <NEW_LINE> <INDENT> return self._rank <NEW_LINE> <DEDENT> def get_suit(self) -> Suit: <NEW_LINE> <INDENT> return self._suit <NEW_LINE> <DEDENT> def get_code(self) -> str: <NEW_LINE> <INDENT> return self._rank.get_code() + self._suit.get_code() <NEW_LINE> <DEDENT> def get_name(self) -> str: <NEW_LINE> <INDENT> return '{} of {}'.format(self._rank.get_name(), self._suit.get_name()) <NEW_LINE> <DEDENT> def serialize(self) -> str: <NEW_LINE> <INDENT> return self.get_code() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deserialize(cls, code: str) -> Card: <NEW_LINE> <INDENT> return cards.get(code=code) | A card
Parameters
----------
rank : Rank
A card rank
suit : Suit
A card suit
Methods
-------
__str__()
__repr__()
__hash__()
get_rank() : Rank
Returns a card rank
get_suit() : Suit
Returns a card suit
get_code() : str
Returns a card code
get_name() : str
Returns a card name | 62598fbf851cf427c66b84e1 |
class Calculator(object): <NEW_LINE> <INDENT> def calculate(self, **kwargs): <NEW_LINE> <INDENT> pass | Base class of a calculator hierarchy.
NOTE: DO NOT modify this class. | 62598fbfa05bb46b3848aa97 |
class ValueMap(metaclass=_ValueMapMeta): <NEW_LINE> <INDENT> def __new__(cls, *args: object, **kwargs: object) -> 'ValueMap': <NEW_LINE> <INDENT> raise TypeError('ValueMap or derivatives cannot be instantiated.') | An ABC for classes that contain values. They cannot be instantiated.
Allows for sort of a static, immutable dict, but with attribute access.
Also kind of like an Enum, but with direct access to the value. | 62598fbf956e5f7376df5794 |
class EvaluatedNetworkSecurityGroup(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'rules_evaluation_result': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'network_security_group_id': {'key': 'networkSecurityGroupId', 'type': 'str'}, 'applied_to': {'key': 'appliedTo', 'type': 'str'}, 'matched_rule': {'key': 'matchedRule', 'type': 'MatchedRule'}, 'rules_evaluation_result': {'key': 'rulesEvaluationResult', 'type': '[NetworkSecurityRulesEvaluationResult]'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(EvaluatedNetworkSecurityGroup, self).__init__(**kwargs) <NEW_LINE> self.network_security_group_id = kwargs.get('network_security_group_id', None) <NEW_LINE> self.applied_to = kwargs.get('applied_to', None) <NEW_LINE> self.matched_rule = kwargs.get('matched_rule', None) <NEW_LINE> self.rules_evaluation_result = None | Results of network security group evaluation.
Variables are only populated by the server, and will be ignored when sending a request.
:param network_security_group_id: Network security group ID.
:type network_security_group_id: str
:param applied_to: Resource ID of nic or subnet to which network security group is applied.
:type applied_to: str
:param matched_rule: Matched network security rule.
:type matched_rule: ~azure.mgmt.network.v2020_08_01.models.MatchedRule
:ivar rules_evaluation_result: List of network security rules evaluation results.
:vartype rules_evaluation_result:
list[~azure.mgmt.network.v2020_08_01.models.NetworkSecurityRulesEvaluationResult] | 62598fbf7cff6e4e811b5c4f |
class CollapsingDispatcherMixin: <NEW_LINE> <INDENT> _event_delay = 0.2 <NEW_LINE> def dispatch_events(self, event_queue, timeout): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> event_buffer = self._event_buffer <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> event_buffer = self._event_buffer = {} <NEW_LINE> <DEDENT> now = time.time() <NEW_LINE> for (src_path, watch), buffered in tuple(event_buffer.items()): <NEW_LINE> <INDENT> while buffered: <NEW_LINE> <INDENT> event = buffered[0] <NEW_LINE> if getattr(event, '_cancelled', False): <NEW_LINE> <INDENT> buffered.pop() <NEW_LINE> event_queue.task_done() <NEW_LINE> continue <NEW_LINE> <DEDENT> event_time = getattr(event, '_time', 0) <NEW_LINE> if now >= event_time: <NEW_LINE> <INDENT> event_buffer.pop((src_path, watch)) <NEW_LINE> self._dispatch_event(event_queue, watch, event) <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> timeout = min(timeout, event_time - now) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> event, watch = event_queue.get(block=True, timeout=timeout) <NEW_LINE> src_path = event.src_path <NEW_LINE> if event.is_directory: <NEW_LINE> <INDENT> self._dispatch_event(event_queue, watch, event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> buffered = event_buffer[src_path, watch] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> buffered = event_buffer[src_path, watch] = deque() <NEW_LINE> <DEDENT> if not event.is_directory: <NEW_LINE> <INDENT> event_type = event.event_type <NEW_LINE> cancel = self._cancel <NEW_LINE> if event_type is EVENT_TYPE_MODIFIED: <NEW_LINE> <INDENT> cancel(buffered, (EVENT_TYPE_DELETED, EVENT_TYPE_MODIFIED)) <NEW_LINE> <DEDENT> elif event_type is EVENT_TYPE_CREATED: <NEW_LINE> <INDENT> if cancel(buffered, (EVENT_TYPE_DELETED,)): <NEW_LINE> <INDENT> event = None <NEW_LINE> <DEDENT> <DEDENT> elif event_type is EVENT_TYPE_DELETED: <NEW_LINE> <INDENT> if cancel(buffered, (EVENT_TYPE_CREATED,)): <NEW_LINE> <INDENT> event = None <NEW_LINE> <DEDENT> cancel(buffered, (EVENT_TYPE_MODIFIED,)) <NEW_LINE> <DEDENT> <DEDENT> if event: <NEW_LINE> <INDENT> event._time = now + self._event_delay <NEW_LINE> buffered.appendleft(event) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> event_queue.task_done() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _cancel(l, event_types): <NEW_LINE> <INDENT> n = 0 <NEW_LINE> for event in l: <NEW_LINE> <INDENT> if event.event_type in event_types: <NEW_LINE> <INDENT> event._cancelled = True <NEW_LINE> n += 1 <NEW_LINE> <DEDENT> <DEDENT> return n <NEW_LINE> <DEDENT> def _dispatch_event(self, event_queue, watch, event): <NEW_LINE> <INDENT> with self._lock: <NEW_LINE> <INDENT> for handler in list(self._handlers.get(watch, [])): <NEW_LINE> <INDENT> if handler in self._handlers.get(watch, []): <NEW_LINE> <INDENT> handler.dispatch(event) <NEW_LINE> <DEDENT> <DEDENT> event_queue.task_done() | This is a slight modification of watchdog's dispatch mechanism. It
buffers events in separate per-file queues. When an event is fired,
rather than being handled immediately, it is postponed for a brief
period (`_event_delay`). Within this window, further incoming events
associated with this file path are collapsed:
- MODIFIED events cancel all pending MODIFIED events, effectively
combining them into a single MODIFIED event. Any DELETED events are
cancelled as well. Collapsing multiple MODIFIED events is of special
interest to us because we only want to reload a module file once per
"atomic" write.
- CREATED and DELETED events cancel each other out. Some programs will
delete the old file before writing out its current contents, or
create temporary files which are deleted almost instantly. We don't
want to know about either of these cases. | 62598fbffff4ab517ebcda11 |
class CopyOperationResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'status': {'required': True}, 'created_date_time': {'required': True}, 'last_updated_date_time': {'required': True}, } <NEW_LINE> _attribute_map = { 'status': {'key': 'status', 'type': 'str'}, 'created_date_time': {'key': 'createdDateTime', 'type': 'iso-8601'}, 'last_updated_date_time': {'key': 'lastUpdatedDateTime', 'type': 'iso-8601'}, 'copy_result': {'key': 'copyResult', 'type': 'CopyResult'}, } <NEW_LINE> def __init__( self, *, status: Union[str, "OperationStatus"], created_date_time: datetime.datetime, last_updated_date_time: datetime.datetime, copy_result: Optional["CopyResult"] = None, **kwargs ): <NEW_LINE> <INDENT> super(CopyOperationResult, self).__init__(**kwargs) <NEW_LINE> self.status = status <NEW_LINE> self.created_date_time = created_date_time <NEW_LINE> self.last_updated_date_time = last_updated_date_time <NEW_LINE> self.copy_result = copy_result | Status and result of the queued copy operation.
All required parameters must be populated in order to send to Azure.
:ivar status: Required. Operation status. Possible values include: "notStarted", "running",
"succeeded", "failed".
:vartype status: str or ~azure.ai.formrecognizer.v2_1.models.OperationStatus
:ivar created_date_time: Required. Date and time (UTC) when the copy operation was submitted.
:vartype created_date_time: ~datetime.datetime
:ivar last_updated_date_time: Required. Date and time (UTC) when the status was last updated.
:vartype last_updated_date_time: ~datetime.datetime
:ivar copy_result: Results of the copy operation.
:vartype copy_result: ~azure.ai.formrecognizer.v2_1.models.CopyResult | 62598fbf7b180e01f3e49165 |
class PhagocyteDestroyInternals(Destroy): <NEW_LINE> <INDENT> def __init__(self, node_types, probability, phagocyte_compartment, internal_compartment, healed_phagocyte_compartment=None): <NEW_LINE> <INDENT> self.phagocyte_compartment = phagocyte_compartment <NEW_LINE> self.healed_phagocyte_compartment = healed_phagocyte_compartment <NEW_LINE> Destroy.__init__(self, node_types, probability, internal_compartment) <NEW_LINE> <DEDENT> def increment_state_variable_from_node(self, node, network): <NEW_LINE> <INDENT> if node.subpopulations[self.compartment_destroyed] > 0: <NEW_LINE> <INDENT> return node.subpopulations[self.phagocyte_compartment] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def update_node(self, node, network): <NEW_LINE> <INDENT> Destroy.update_node(self, node, network) <NEW_LINE> if node.subpopulations[self.compartment_destroyed] < node.subpopulations[self.phagocyte_compartment]: <NEW_LINE> <INDENT> node.update_subpopulation(self.phagocyte_compartment, -1) <NEW_LINE> node.update_subpopulation(self.healed_phagocyte_compartment, 1) | Phagocyte destroys something that is inside it. | 62598fbf56ac1b37e6302419 |
@add_printer <NEW_LINE> class BoostSharedPtr: <NEW_LINE> <INDENT> printer_name = 'boost::shared/weak_ptr/array' <NEW_LINE> version = '1.40' <NEW_LINE> template_name = ['boost::shared_array', 'boost::shared_ptr', 'boost::weak_array', 'boost::weak_ptr'] <NEW_LINE> def __init__(self, value): <NEW_LINE> <INDENT> self.typename = value.type_name <NEW_LINE> self.value = value <NEW_LINE> <DEDENT> def to_string(self): <NEW_LINE> <INDENT> if self.value['px'] == 0x0: <NEW_LINE> <INDENT> return '(%s) %s' % (self.typename, self.value['px']) <NEW_LINE> <DEDENT> countobj = self.value['pn']['pi_'].dereference() <NEW_LINE> refcount = countobj['use_count_'] <NEW_LINE> weakcount = countobj['weak_count_'] <NEW_LINE> return '(%s) (count %d, weak count %d) %s' % (self.typename, refcount, weakcount, self.value['px']) | Pretty Printer for boost::shared/weak_ptr/array (Boost.SmartPtr) | 62598fbf5fdd1c0f98e5e1be |
class DummyAdapter(ApiAdapter): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(DummyAdapter, self).__init__(**kwargs) <NEW_LINE> self.background_task_counter = 0 <NEW_LINE> if self.options.get('background_task_enable', False): <NEW_LINE> <INDENT> task_interval = float( self.options.get('background_task_interval', 1.0) ) <NEW_LINE> logging.debug( "Launching background task with interval %.2f secs", task_interval ) <NEW_LINE> self.background_task = PeriodicCallback( self.background_task_callback, task_interval * 1000 ) <NEW_LINE> self.background_task.start() <NEW_LINE> <DEDENT> logging.debug('DummyAdapter loaded') <NEW_LINE> <DEDENT> def background_task_callback(self): <NEW_LINE> <INDENT> logging.debug("%s: background task running, count = %d", self.name, self.background_task_counter) <NEW_LINE> self.background_task_counter += 1 <NEW_LINE> <DEDENT> @response_types('application/json', default='application/json') <NEW_LINE> def get(self, path, request): <NEW_LINE> <INDENT> if path == 'background_task_count': <NEW_LINE> <INDENT> response = {'response': { 'background_task_count': self.background_task_counter} } <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = {'response': 'DummyAdapter: GET on path {}'.format(path)} <NEW_LINE> <DEDENT> content_type = 'application/json' <NEW_LINE> status_code = 200 <NEW_LINE> logging.debug(response) <NEW_LINE> return ApiAdapterResponse(response, content_type=content_type, status_code=status_code) <NEW_LINE> <DEDENT> @request_types('application/json', 'application/vnd.odin-native') <NEW_LINE> @response_types('application/json', default='application/json') <NEW_LINE> def put(self, path, request): <NEW_LINE> <INDENT> response = {'response': 'DummyAdapter: PUT on path {}'.format(path)} <NEW_LINE> content_type = 'application/json' <NEW_LINE> status_code = 200 <NEW_LINE> logging.debug(response) <NEW_LINE> return ApiAdapterResponse(response, content_type=content_type, status_code=status_code) <NEW_LINE> <DEDENT> def delete(self, path, request): <NEW_LINE> <INDENT> response = 'DummyAdapter: DELETE on path {}'.format(path) <NEW_LINE> status_code = 200 <NEW_LINE> logging.debug(response) <NEW_LINE> return ApiAdapterResponse(response, status_code=status_code) <NEW_LINE> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> logging.debug("DummyAdapter cleanup: stopping background task") <NEW_LINE> self.background_task.stop() <NEW_LINE> self.background_task_counter = 0 | Dummy adapter class for the ODIN server.
This dummy adapter implements the basic operation of an adapter including initialisation
and HTTP verb methods (GET, PUT, DELETE) with various request and response types allowed. | 62598fbf71ff763f4b5e79a8 |
class DnsClient(): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> addresses = ArrayList(len(args)) <NEW_LINE> for item in args: <NEW_LINE> <INDENT> addresses.add(InetSocketAddress(item[0], item[1])) <NEW_LINE> <DEDENT> self.java_obj = org.vertx.java.platform.impl.JythonVerticleFactory.createDnsClient(addresses) <NEW_LINE> <DEDENT> def lookup(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.lookup(name, AsyncHandler(handler, self.__address_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def lookup_4(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.lookup4(name, AsyncHandler(handler, self.__address_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def lookup_6(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.lookup6(name, AsyncHandler(handler, self.__address_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_a(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolveA(name, AsyncHandler(handler, self.__address_array_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_aaaa(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolveAAAA(name, AsyncHandler(handler, self.__address_array_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_cname(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolveCNAME(name, AsyncHandler(handler)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_txt(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolveTXT(name, AsyncHandler(handler)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_mx(self, name, handler): <NEW_LINE> <INDENT> def converter(array): <NEW_LINE> <INDENT> def record_converter(record): <NEW_LINE> <INDENT> return MxRecord(record) <NEW_LINE> <DEDENT> return map(record_converter, array) <NEW_LINE> <DEDENT> self.java_obj.resolveMX(name, AsyncHandler(handler, converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_ptr(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolvePTR(name, AsyncHandler(handler)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_ns(self, name, handler): <NEW_LINE> <INDENT> self.java_obj.resolveNS(name, AsyncHandler(handler)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def resolve_srv(self, name, handler): <NEW_LINE> <INDENT> def converter(array): <NEW_LINE> <INDENT> def record_converter(record): <NEW_LINE> <INDENT> return SrvRecord(record) <NEW_LINE> <DEDENT> return map(record_converter, array) <NEW_LINE> <DEDENT> self.java_obj.resolveSRV(name, AsyncHandler(handler, converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def reverse_lookup(self, ip, handler): <NEW_LINE> <INDENT> self.java_obj.reverseLookup(ip, AsyncHandler(handler, self.__host_converter)) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __address_converter(self, addr): <NEW_LINE> <INDENT> return addr.getHostAddress() <NEW_LINE> <DEDENT> def __host_converter(self, addr): <NEW_LINE> <INDENT> return addr.getHostName() <NEW_LINE> <DEDENT> def __address_array_converter(self, array): <NEW_LINE> <INDENT> return map(self.__address_converter, array) | Provides a way to asynchronous lookup informations from DNS-Servers. | 62598fbf4f6381625f1995d8 |
class OrderForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Order <NEW_LINE> fields = ('name', 'email', 'phone', 'street_address', 'suburb', 'city', 'post_code', 'country') <NEW_LINE> <DEDENT> def clean(self): <NEW_LINE> <INDENT> if not self.cleaned_data.get('email', None) and not self.cleaned_data.get('phone', None): <NEW_LINE> <INDENT> self._errors['email'] = forms.util.ErrorList(['Please enter an email or phone.']) <NEW_LINE> <DEDENT> return self.cleaned_data <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(OrderForm, self).__init__(*args, **kwargs) <NEW_LINE> for f in ('name', 'email', 'street_address', 'city', 'post_code', 'country'): <NEW_LINE> <INDENT> if f in self.fields: <NEW_LINE> <INDENT> self.fields[f].required = True | Standard order information form. | 62598fbf656771135c48989c |
class StoryFindFrame(wx.Frame): <NEW_LINE> <INDENT> def __init__(self, storyPanel, app, parent = None): <NEW_LINE> <INDENT> self.storyPanel = storyPanel <NEW_LINE> self.app = app <NEW_LINE> wx.Frame.__init__(self, parent, wx.ID_ANY, title = 'Find in Story', style = wx.MINIMIZE_BOX | wx.CLOSE_BOX | wx.CAPTION | wx.SYSTEM_MENU) <NEW_LINE> sizer = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> self.SetSizer(sizer) <NEW_LINE> findPanel = FindPanel(parent = self, onFind = self.onFind, onClose = self.onClose) <NEW_LINE> findPanel.focus() <NEW_LINE> sizer.Add(findPanel) <NEW_LINE> sizer.Fit(self) <NEW_LINE> self.SetIcon(self.app.icon) <NEW_LINE> self.Show() <NEW_LINE> <DEDENT> def onFind(self, regexp, flags): <NEW_LINE> <INDENT> self.storyPanel.findWidgetRegexp(regexp, flags) <NEW_LINE> <DEDENT> def onClose(self): <NEW_LINE> <INDENT> self.Close() | This allows the user to search a StoryPanel for a string of text.
This is just a front-end to method calls on StoryPanel. | 62598fbfec188e330fdf8ac0 |
class TextSpan(Renderable): <NEW_LINE> <INDENT> def __init__(self, innertext: str): <NEW_LINE> <INDENT> self.innertext = innertext <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<{self.innertext}>" | A length of text. | 62598fbf5166f23b2e24360d |
class Pix2Sky_STG(Zenithal): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Pix2Sky_STG, self).__init__(parnames=[]) <NEW_LINE> <DEDENT> def _compute_rtheta(self, x, y): <NEW_LINE> <INDENT> return np.sqrt(x**2 + y**2) <NEW_LINE> <DEDENT> def inverse(self): <NEW_LINE> <INDENT> return Sky2Pix_STG() <NEW_LINE> <DEDENT> def __call__(self, x, y): <NEW_LINE> <INDENT> x = np.asarray(x) + 0. <NEW_LINE> y = np.asarray(y) + 0. <NEW_LINE> phi = np.rad2deg(np.arctan2(x, -y)) <NEW_LINE> rtheta = self._compute_rtheta(x, y) <NEW_LINE> theta = 90 - np.rad2deg(2 * np.arctan(rtheta/(2*self.r0))) <NEW_LINE> return phi, theta | STG : Stereographic Projection - pixel to sky. | 62598fbf091ae35668704e53 |
class Source(object): <NEW_LINE> <INDENT> def __init__(self, directory, hashed_files, keyrings, require_signature=True): <NEW_LINE> <INDENT> self.hashed_files = hashed_files <NEW_LINE> self._dsc_file = None <NEW_LINE> for f in hashed_files: <NEW_LINE> <INDENT> if re_file_dsc.match(f.filename): <NEW_LINE> <INDENT> if self._dsc_file is not None: <NEW_LINE> <INDENT> raise InvalidSourceException("Multiple .dsc found ({0} and {1})".format(self._dsc_file.filename, f.filename)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._dsc_file = f <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self._dsc_file.check(directory) <NEW_LINE> dsc_file_path = os.path.join(directory, self._dsc_file.filename) <NEW_LINE> data = open(dsc_file_path, 'r').read() <NEW_LINE> self._signed_file = SignedFile(data, keyrings, require_signature) <NEW_LINE> self.dsc = apt_pkg.TagSection(self._signed_file.contents) <NEW_LINE> self._files = None <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_file(cls, directory, filename, keyrings, require_signature=True): <NEW_LINE> <INDENT> hashed_file = HashedFile.from_file(directory, filename) <NEW_LINE> return cls(directory, [hashed_file], keyrings, require_signature) <NEW_LINE> <DEDENT> @property <NEW_LINE> def files(self): <NEW_LINE> <INDENT> if self._files is None: <NEW_LINE> <INDENT> self._files = parse_file_list(self.dsc, False) <NEW_LINE> <DEDENT> return self._files <NEW_LINE> <DEDENT> @property <NEW_LINE> def primary_fingerprint(self): <NEW_LINE> <INDENT> return self._signed_file.primary_fingerprint <NEW_LINE> <DEDENT> @property <NEW_LINE> def valid_signature(self): <NEW_LINE> <INDENT> return self._signed_file.valid <NEW_LINE> <DEDENT> @property <NEW_LINE> def component(self): <NEW_LINE> <INDENT> if 'Section' not in self.dsc: <NEW_LINE> <INDENT> return 'main' <NEW_LINE> <DEDENT> fields = self.dsc['Section'].split('/') <NEW_LINE> if len(fields) > 1: <NEW_LINE> <INDENT> return fields[0] <NEW_LINE> <DEDENT> return "main" <NEW_LINE> <DEDENT> @property <NEW_LINE> def filename(self): <NEW_LINE> <INDENT> return self._dsc_file.filename | Representation of a source package
| 62598fbf99fddb7c1ca62f03 |
class DiscontinuousElement(FiniteElementBase): <NEW_LINE> <INDENT> def __init__(self, element): <NEW_LINE> <INDENT> super(DiscontinuousElement, self).__init__() <NEW_LINE> self.element = element <NEW_LINE> <DEDENT> @property <NEW_LINE> def cell(self): <NEW_LINE> <INDENT> return self.element.cell <NEW_LINE> <DEDENT> @property <NEW_LINE> def degree(self): <NEW_LINE> <INDENT> return self.element.degree <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def formdegree(self): <NEW_LINE> <INDENT> return self.element.cell.get_spatial_dimension() <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def _entity_dofs(self): <NEW_LINE> <INDENT> result = {dim: {i: [] for i in entities} for dim, entities in self.cell.get_topology().items()} <NEW_LINE> cell_dimension = self.cell.get_dimension() <NEW_LINE> result[cell_dimension][0].extend(range(self.space_dimension())) <NEW_LINE> return result <NEW_LINE> <DEDENT> def entity_dofs(self): <NEW_LINE> <INDENT> return self._entity_dofs <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def entity_permutations(self): <NEW_LINE> <INDENT> if self.element.entity_dofs() == self.element.entity_closure_dofs(): <NEW_LINE> <INDENT> return self.element.entity_permutations <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError(f"entity_permutations not yet implemented for a general {type(self)}") <NEW_LINE> <DEDENT> <DEDENT> def space_dimension(self): <NEW_LINE> <INDENT> return self.element.space_dimension() <NEW_LINE> <DEDENT> @property <NEW_LINE> def index_shape(self): <NEW_LINE> <INDENT> return self.element.index_shape <NEW_LINE> <DEDENT> @property <NEW_LINE> def value_shape(self): <NEW_LINE> <INDENT> return self.element.value_shape <NEW_LINE> <DEDENT> @cached_property <NEW_LINE> def fiat_equivalent(self): <NEW_LINE> <INDENT> return FIAT.DiscontinuousElement(self.element.fiat_equivalent) <NEW_LINE> <DEDENT> def basis_evaluation(self, order, ps, entity=None, coordinate_mapping=None): <NEW_LINE> <INDENT> return self.element.basis_evaluation(order, ps, entity, coordinate_mapping=coordinate_mapping) <NEW_LINE> <DEDENT> def point_evaluation(self, order, refcoords, entity=None): <NEW_LINE> <INDENT> return self.element.point_evaluation(order, refcoords, entity) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dual_basis(self): <NEW_LINE> <INDENT> return self.element.dual_basis <NEW_LINE> <DEDENT> @property <NEW_LINE> def mapping(self): <NEW_LINE> <INDENT> return self.element.mapping | Element wrapper that makes a FInAT element discontinuous. | 62598fbf956e5f7376df5795 |
class Movie(Video): <NEW_LINE> <INDENT> def __init__(self, title, storyline, poster_image, trailer_youtube): <NEW_LINE> <INDENT> Video.__init__(self, title, storyline) <NEW_LINE> self.poster_image_url = poster_image <NEW_LINE> self.trailer_youtube_url = trailer_youtube <NEW_LINE> <DEDENT> def show_trailer(self): <NEW_LINE> <INDENT> webbrowser.open(self.trailer_youtube_url) | This class provides a way to store movie-related information.
Inherits from Video. | 62598fbf66656f66f7d5a620 |
class IPKeyBlind(IPBlind, HelperActionPress): <NEW_LINE> <INDENT> def __init__(self, device_description, proxy, resolveparamsets=False): <NEW_LINE> <INDENT> super().__init__(device_description, proxy, resolveparamsets) <NEW_LINE> self.EVENTNODE.update({"PRESS_SHORT": [1, 2], "PRESS_LONG": [1, 2]}) <NEW_LINE> <DEDENT> @property <NEW_LINE> def ELEMENT(self): <NEW_LINE> <INDENT> return [4] | Blind switch that raises and lowers homematic ip roller shutters or window blinds. | 62598fbfdc8b845886d537e8 |
@NS.route('/mostanswers') <NEW_LINE> class UserQuestionAnswer(Resource): <NEW_LINE> <INDENT> @cors.crossdomain(origin='*') <NEW_LINE> @jwt_required <NEW_LINE> @V2_API.doc('Question with most answers') <NEW_LINE> @V2_API.response(200, 'Success') <NEW_LINE> def get(self): <NEW_LINE> <INDENT> questions = Question.get_all() <NEW_LINE> myquestions = [question for question in questions if question['created_by'] == get_jwt_identity()] <NEW_LINE> list_num = [question['answers'] for question in questions if question['created_by'] == get_jwt_identity()] <NEW_LINE> if questions == []: <NEW_LINE> <INDENT> response = { 'message': 'There are no questions' } <NEW_LINE> return jsonify(response), 404 <NEW_LINE> <DEDENT> most_answer = heapq.nlargest(2, list_num) <NEW_LINE> print(most_answer) <NEW_LINE> for i in most_answer: <NEW_LINE> <INDENT> if i < 0: <NEW_LINE> <INDENT> response = { 'message': 'Your question has no answers' } <NEW_LINE> return jsonify(response) <NEW_LINE> <DEDENT> <DEDENT> all_quiz = [quiz for quiz in myquestions if quiz['answers'] in most_answer and quiz['answers'] > 0] <NEW_LINE> response = { 'total': len(all_quiz), 'data': all_quiz } <NEW_LINE> return jsonify(response), 200 | Most answered question | 62598fbf50812a4eaa620d00 |
class CorsItem(Item): <NEW_LINE> <INDENT> code = Field() <NEW_LINE> name = Field() <NEW_LINE> desc = Field() <NEW_LINE> mc = Field() <NEW_LINE> lecture_time_table = Field() <NEW_LINE> tutorial_time_table = Field() <NEW_LINE> exam = Field() <NEW_LINE> prerequisite = Field() <NEW_LINE> preclusion = Field() <NEW_LINE> workload = Field() | Scrapy data structure
| 62598fbf7d847024c075c5ea |
class AlignRecords(object): <NEW_LINE> <INDENT> qname="" <NEW_LINE> rname="" <NEW_LINE> start=1 <NEW_LINE> end=1 <NEW_LINE> strand=1 <NEW_LINE> def __init__(self, alignment_record=None): <NEW_LINE> <INDENT> if alignment_record is not None: <NEW_LINE> <INDENT> self.qname=alignment_record.query_name <NEW_LINE> self.rname=alignment_record.reference_name <NEW_LINE> self.start=alignment_record.reference_start <NEW_LINE> self.end=alignment_record.reference_end <NEW_LINE> if self.start > self.end: <NEW_LINE> <INDENT> self.start,self.end= self.end, self.start <NEW_LINE> <DEDENT> self.strand=1 if alignment_record.is_read1 else 2 <NEW_LINE> <DEDENT> <DEDENT> def get_length(self): <NEW_LINE> <INDENT> return self.end-self.start <NEW_LINE> <DEDENT> def is_valid(self, minlen): <NEW_LINE> <INDENT> return True if (self.start < self.end) and (self.end-self.start>minlen) else False <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "{0}_{1} {2} {3} {4}".format(self.qname, self.strand, self.rname, (self.start+1),(self.end+1)) | Tiny version of pysam.AlignedSegment | 62598fbfcc40096d6161a2f0 |
class BoxGUI(model.Element): <NEW_LINE> <INDENT> def __init__(self, drawers, x, y, w, h, batch, groups): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.w = w <NEW_LINE> self.h = h <NEW_LINE> drawers_gui = [] <NEW_LINE> for drawer in drawers: <NEW_LINE> <INDENT> drawers_gui.append(DrawerGUI(drawer.subelems, self, batch, groups)) <NEW_LINE> <DEDENT> super().__init__(drawers_gui) <NEW_LINE> <DEDENT> def resize(self, x, y, w, h, bm, dm, hh, ht): <NEW_LINE> <INDENT> drawer_num = len(self.subelems) <NEW_LINE> module_h = (h - 2 * bm) / drawer_num <NEW_LINE> drawer_w = w - 2 * (bm + dm) <NEW_LINE> drawer_h = module_h - 2 * dm <NEW_LINE> drawer_x = x + bm + dm <NEW_LINE> module_y = y + bm + dm <NEW_LINE> for i,drawer in enumerate(self.subelems): <NEW_LINE> <INDENT> drawer_y = module_y + (drawer_num - 1 - i) * module_h <NEW_LINE> drawer.resize(drawer_x, drawer_y, drawer_w, drawer_h, hh, ht) | Class of box objects with resizeable GUI | 62598fbfadb09d7d5dc0a7ac |
class APIC(Frame): <NEW_LINE> <INDENT> _framespec = [ EncodingSpec('encoding'), Latin1TextSpec('mime'), PictureTypeSpec('type'), EncodedTextSpec('desc'), BinaryDataSpec('data'), ] <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> return self.data == other <NEW_LINE> <DEDENT> __hash__ = Frame.__hash__ <NEW_LINE> @property <NEW_LINE> def HashKey(self): <NEW_LINE> <INDENT> return '%s:%s' % (self.FrameID, self.desc) <NEW_LINE> <DEDENT> def _merge_frame(self, other): <NEW_LINE> <INDENT> other.desc += u" " <NEW_LINE> return other <NEW_LINE> <DEDENT> def _pprint(self): <NEW_LINE> <INDENT> type_desc = str(self.type) <NEW_LINE> if hasattr(self.type, "_pprint"): <NEW_LINE> <INDENT> type_desc = self.type._pprint() <NEW_LINE> <DEDENT> return "%s, %s (%s, %d bytes)" % ( type_desc, self.desc, self.mime, len(self.data)) | Attached (or linked) Picture.
Attributes:
* encoding -- text encoding for the description
* mime -- a MIME type (e.g. image/jpeg) or '-->' if the data is a URI
* type -- the source of the image (3 is the album front cover)
* desc -- a text description of the image
* data -- raw image data, as a byte string
Mutagen will automatically compress large images when saving tags. | 62598fbf7d43ff248742751c |
class InitializerBase(abc.ABC): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __call__(self, img, band=0, dx=None, seed=None): <NEW_LINE> <INDENT> if dx is None: <NEW_LINE> <INDENT> dx = numpy.ones(img.ndim, dtype=numpy.float) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dx = numpy.array(dx, dtype=numpy.float) <NEW_LINE> if len(dx) != img.ndim: <NEW_LINE> <INDENT> msg = "Number of dx terms ({}) doesn't match dimensions ({})" <NEW_LINE> raise ValueError(msg.format(len(dx), img.ndim)) <NEW_LINE> <DEDENT> <DEDENT> if seed is None: <NEW_LINE> <INDENT> seed = numpy.array(img.shape) / 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> seed = numpy.array(seed) <NEW_LINE> <DEDENT> init_mask = self.initialize(img=img, dx=dx, seed=seed) <NEW_LINE> if not isinstance(init_mask, numpy.ndarray): <NEW_LINE> <INDENT> msg = ("Returned initializer was type {} but " "should be numpy.ndarray") <NEW_LINE> raise TypeError(msg.format(type(init_mask))) <NEW_LINE> <DEDENT> if init_mask.dtype != numpy.bool: <NEW_LINE> <INDENT> msg = "Returned initializer was dtype {} but should be bool" <NEW_LINE> raise TypeError(msg.format(init_mask.dtype)) <NEW_LINE> <DEDENT> if init_mask.shape != img.shape: <NEW_LINE> <INDENT> msg = "Returned initializer was shape {} but should be {}" <NEW_LINE> raise ValueError(msg.format(init_mask.shape, img.shape)) <NEW_LINE> <DEDENT> u = 2 * init_mask.astype(numpy.float) - 1 <NEW_LINE> dist, mask = distance_transform(arr=u, band=band, dx=dx) <NEW_LINE> return u, dist, mask <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def initialize(self, img, dx, seed): <NEW_LINE> <INDENT> raise NotImplementedError | The abstract base class for level set initializer functions.
| 62598fbf377c676e912f6e8a |
class ChildService(Service, object): <NEW_LINE> <INDENT> log = Logger() <NEW_LINE> def __init__(self, fd, protocolFactory): <NEW_LINE> <INDENT> self.fd = fd <NEW_LINE> self.protocolFactory = protocolFactory <NEW_LINE> <DEDENT> def startService(self): <NEW_LINE> <INDENT> factory = ReportingWrapperFactory( self.protocolFactory, self.fd, self.createTransport ) <NEW_LINE> self.wrappedProtocolFactory = factory <NEW_LINE> factory.inheritedPort.startReading() <NEW_LINE> factory.inheritedPort.reportStatus("0") <NEW_LINE> return super(ChildService, self).startService() <NEW_LINE> <DEDENT> def stopService(self): <NEW_LINE> <INDENT> factory = self.wrappedProtocolFactory <NEW_LINE> factory.inheritedPort.stopReading() <NEW_LINE> factory.allConnectionsClosed() <NEW_LINE> return super(ChildService, self).stopService() <NEW_LINE> <DEDENT> def createTransport(self, socket, peer, data, protocol): <NEW_LINE> <INDENT> from twisted.internet import reactor <NEW_LINE> factory = self.wrappedProtocolFactory <NEW_LINE> factory.inheritedPort.reportStatus("+") <NEW_LINE> self.log.info("{factory.inheritedPort.statusQueue}", factory=factory) <NEW_LINE> socketFD = socket.fileno() <NEW_LINE> transport = reactor.adoptStreamConnection( socketFD, getsockfam(socketFD), factory ) <NEW_LINE> transport.startReading() <NEW_LINE> return transport | Service for child processes. | 62598fbf57b8e32f52508235 |
class UpdateOwnStstus(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.user_profile.id == request.user.id | Allow users to update their own profile | 62598fbf99fddb7c1ca62f04 |
class TimedBlockPublisher(BlockPublisherInterface): <NEW_LINE> <INDENT> def __init__(self, wait_time=20): <NEW_LINE> <INDENT> self._wait_time = wait_time <NEW_LINE> self._last_block_time = time.time() <NEW_LINE> <DEDENT> def initialize_block(self, block_header): <NEW_LINE> <INDENT> block_header.consensus = b"TimedDevmode" <NEW_LINE> <DEDENT> def check_publish_block(self, block): <NEW_LINE> <INDENT> if time.time() - self._last_block_time > self._wait_time: <NEW_LINE> <INDENT> self._last_block_time = time.time() <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def finalize_block(self, block_header): <NEW_LINE> <INDENT> pass | Provides a timed block claim mechanism based on
the number of seconds since that validator last claimed
a block | 62598fc0a8370b77170f0611 |
class MssClamp(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required=[] <NEW_LINE> self.b_key = "mss-clamp" <NEW_LINE> self.a10_url="/axapi/v3/cgnv6/lsn/tcp/mss-clamp" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.mss_subtract = "" <NEW_LINE> self.mss_value = "" <NEW_LINE> self.mss_clamp_type = "" <NEW_LINE> self.A10WW_min = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value) | Class Description::
LSN TCP MSS Clamping.
Class mss-clamp supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param mss_subtract: {"description": "Specify the value to subtract from the TCP MSS (default: not configured)", "format": "number", "type": "number", "maximum": 1460, "minimum": 0, "optional": true}
:param mss_value: {"description": "The max value allowed for the TCP MSS (default: not configured)},", "format": "number", "type": "number", "maximum": 1460, "minimum": 0, "optional": true}
:param mss_clamp_type: {"description": "'fixed': Specify a fixed max value for the TCP MSS; 'subtract': Specify the value to subtract from the TCP MSS; 'none': No TCP MSS clamping (default); ", "format": "enum", "default": "none", "type": "string", "enum": ["fixed", "subtract", "none"], "optional": true}
:param min: {"description": "Specify the min value allowed for the TCP MSS (Specify the min value allowed for the TCP MSS (default: ((576 - 60 - 60))))", "format": "number", "default": 456, "optional": true, "maximum": 1460, "minimum": 0, "type": "number"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/cgnv6/lsn/tcp/mss-clamp`. | 62598fc097e22403b383b138 |
class UserLoginForm(forms.Form): <NEW_LINE> <INDENT> user = forms.CharField( required=True, error_messages={'required': '用户名不能为空'} ) <NEW_LINE> pwd = forms.CharField( required=True, min_length=6, max_length=12, error_messages={'required': '密码不能为空', 'min_length': '密码长度不能小于6', 'max_length': '密码长度不能大于12'} ) | 登录表单验证 | 62598fc07b180e01f3e49167 |
class RegisterStatistics(object): <NEW_LINE> <INDENT> def __init__(self, op_type, statistic_type): <NEW_LINE> <INDENT> if not isinstance(op_type, six.string_types): <NEW_LINE> <INDENT> raise TypeError("op_type must be a string.") <NEW_LINE> <DEDENT> if "," in op_type: <NEW_LINE> <INDENT> raise TypeError("op_type must not contain a comma.") <NEW_LINE> <DEDENT> self._op_type = op_type <NEW_LINE> if not isinstance(statistic_type, six.string_types): <NEW_LINE> <INDENT> raise TypeError("statistic_type must be a string.") <NEW_LINE> <DEDENT> if "," in statistic_type: <NEW_LINE> <INDENT> raise TypeError("statistic_type must not contain a comma.") <NEW_LINE> <DEDENT> self._statistic_type = statistic_type <NEW_LINE> <DEDENT> def __call__(self, f): <NEW_LINE> <INDENT> _stats_registry.register(f, self._op_type + "," + self._statistic_type) <NEW_LINE> return f | A decorator for registering the statistics function for an op type.
This decorator is very similar to the RegisterShapes class, and can be defined
for an op type so that it gives a report on the resources used by an instance
of an operator, in the form of an OpStats object.
Well-known types of statistics include these so far:
- weight_parameters: For operations like MatMul, Conv, and BiasAdd that take
learned weights as inputs, this statistic captures how many numerical values
are used. This is good to know because the weights take up most of the size
of a typical serialized graph on disk.
- flops: When running a graph, the bulk of the computation happens doing
numerical calculations like matrix multiplications. This type allows a node
to return how many floating-point operations it takes to complete. The
total number of FLOPs for a graph is a good guide to its expected latency.
You can add your own statistics just by picking a new type string, registering
functions for the ops you care about, and then calling something like
python/tools/graph_metrics.py with the new type as an argument.
If a statistic for an op is registered multiple times, a KeyError will be
raised.
For example, you can define a new metric called doohickey for a Foo operation
by placing this in your code:
```python
@ops.RegisterStatistics("Foo", "doohickey")
def _calc_foo_bojangles(unused_graph, unused_node_def):
return ops.OpStats("doohickey", 20)
```
Then in client code you can retrieve the value by making this call:
```python
doohickey = ops.get_stats_for_node_def(graph, node_def, "doohickey")
```
If the NodeDef is for an op with a registered doohickey function, you'll get
back the calculated amount in doohickey.value, or None if it's not defined. | 62598fc05fdd1c0f98e5e1c2 |
class ProductDomain(RandomDomain): <NEW_LINE> <INDENT> is_ProductDomain = True <NEW_LINE> def __new__(cls, *domains): <NEW_LINE> <INDENT> symbols = sumsets([domain.symbols for domain in domains]) <NEW_LINE> domains2 = [] <NEW_LINE> for domain in domains: <NEW_LINE> <INDENT> if not domain.is_ProductDomain: <NEW_LINE> <INDENT> domains2.append(domain) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> domains2.extend(domain.domains) <NEW_LINE> <DEDENT> <DEDENT> domains2 = FiniteSet(*domains2) <NEW_LINE> if all(domain.is_Finite for domain in domains2): <NEW_LINE> <INDENT> from sympy.stats.frv import ProductFiniteDomain <NEW_LINE> cls = ProductFiniteDomain <NEW_LINE> <DEDENT> if all(domain.is_Continuous for domain in domains2): <NEW_LINE> <INDENT> from sympy.stats.crv import ProductContinuousDomain <NEW_LINE> cls = ProductContinuousDomain <NEW_LINE> <DEDENT> return Basic.__new__(cls, *domains2) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sym_domain_dict(self): <NEW_LINE> <INDENT> return dict((symbol, domain) for domain in self.domains for symbol in domain.symbols) <NEW_LINE> <DEDENT> @property <NEW_LINE> def symbols(self): <NEW_LINE> <INDENT> return FiniteSet(*[sym for domain in self.domains for sym in domain.symbols]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def domains(self): <NEW_LINE> <INDENT> return self.args <NEW_LINE> <DEDENT> @property <NEW_LINE> def set(self): <NEW_LINE> <INDENT> return ProductSet(domain.set for domain in self.domains) <NEW_LINE> <DEDENT> def __contains__(self, other): <NEW_LINE> <INDENT> for domain in self.domains: <NEW_LINE> <INDENT> elem = frozenset([item for item in other if sympify(domain.symbols.contains(item[0])) is S.true]) <NEW_LINE> if elem not in domain: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def as_boolean(self): <NEW_LINE> <INDENT> return And(*[domain.as_boolean() for domain in self.domains]) | A domain resulting from the merger of two independent domains
See Also
========
sympy.stats.crv.ProductContinuousDomain
sympy.stats.frv.ProductFiniteDomain | 62598fc0cc40096d6161a2f1 |
class GetFileMetadataByExprResult(object): <NEW_LINE> <INDENT> def __init__(self, metadata=None, isSupported=None,): <NEW_LINE> <INDENT> self.metadata = metadata <NEW_LINE> self.isSupported = isSupported <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.MAP: <NEW_LINE> <INDENT> self.metadata = {} <NEW_LINE> (_ktype659, _vtype660, _size658) = iprot.readMapBegin() <NEW_LINE> for _i662 in range(_size658): <NEW_LINE> <INDENT> _key663 = iprot.readI64() <NEW_LINE> _val664 = MetadataPpdResult() <NEW_LINE> _val664.read(iprot) <NEW_LINE> self.metadata[_key663] = _val664 <NEW_LINE> <DEDENT> iprot.readMapEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.isSupported = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('GetFileMetadataByExprResult') <NEW_LINE> if self.metadata is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('metadata', TType.MAP, 1) <NEW_LINE> oprot.writeMapBegin(TType.I64, TType.STRUCT, len(self.metadata)) <NEW_LINE> for kiter665, viter666 in self.metadata.items(): <NEW_LINE> <INDENT> oprot.writeI64(kiter665) <NEW_LINE> viter666.write(oprot) <NEW_LINE> <DEDENT> oprot.writeMapEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.isSupported is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('isSupported', TType.BOOL, 2) <NEW_LINE> oprot.writeBool(self.isSupported) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> if self.metadata is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field metadata is unset!') <NEW_LINE> <DEDENT> if self.isSupported is None: <NEW_LINE> <INDENT> raise TProtocolException(message='Required field isSupported is unset!') <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- metadata
- isSupported | 62598fc097e22403b383b139 |
class ApiConfigRegistry(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__registered_classes = set() <NEW_LINE> self.__api_configs = set() <NEW_LINE> self.__api_methods = {} <NEW_LINE> <DEDENT> def register_backend(self, config_contents): <NEW_LINE> <INDENT> if config_contents is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> parsed_config = json.loads(config_contents) <NEW_LINE> self.__register_class(parsed_config) <NEW_LINE> self.__api_configs.add(config_contents) <NEW_LINE> self.__register_methods(parsed_config) <NEW_LINE> <DEDENT> def __register_class(self, parsed_config): <NEW_LINE> <INDENT> methods = parsed_config.get('methods') <NEW_LINE> if not methods: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> service_classes = set() <NEW_LINE> for method in methods.itervalues(): <NEW_LINE> <INDENT> rosy_method = method.get('rosyMethod') <NEW_LINE> if rosy_method and '.' in rosy_method: <NEW_LINE> <INDENT> method_class = rosy_method.split('.', 1)[0] <NEW_LINE> service_classes.add(method_class) <NEW_LINE> <DEDENT> <DEDENT> for service_class in service_classes: <NEW_LINE> <INDENT> if service_class in self.__registered_classes: <NEW_LINE> <INDENT> raise api_exceptions.ApiConfigurationError( 'API class %s has already been registered.' % service_class) <NEW_LINE> <DEDENT> self.__registered_classes.add(service_class) <NEW_LINE> <DEDENT> <DEDENT> def __register_methods(self, parsed_config): <NEW_LINE> <INDENT> methods = parsed_config.get('methods') <NEW_LINE> if not methods: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for method_name, method in methods.iteritems(): <NEW_LINE> <INDENT> self.__api_methods[method_name] = method.get('rosyMethod') <NEW_LINE> <DEDENT> <DEDENT> def lookup_api_method(self, api_method_name): <NEW_LINE> <INDENT> return self.__api_methods.get(api_method_name) <NEW_LINE> <DEDENT> def all_api_configs(self): <NEW_LINE> <INDENT> return list(self.__api_configs) | Registry of active APIs to be registered with Google API Server. | 62598fc063b5f9789fe853a2 |
class SyncManager: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.server_address = "" <NEW_LINE> self.username = "" <NEW_LINE> self.password = "" <NEW_LINE> self.certificate = "" <NEW_LINE> self.certificate_file = None <NEW_LINE> self.sync = None <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.certificate_file: <NEW_LINE> <INDENT> self.certificate_file.close() <NEW_LINE> <DEDENT> <DEDENT> def get_binary_sync_settings(self): <NEW_LINE> <INDENT> if self.sync: <NEW_LINE> <INDENT> return Packer.compress(json.dumps({ "server-address": self.server_address, "username": self.username, "password": self.password, "certificate": self.certificate }).encode('utf-8')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return b'' <NEW_LINE> <DEDENT> <DEDENT> def load_binary_sync_settings(self, data): <NEW_LINE> <INDENT> settings_dict = json.loads(str(Packer.decompress(data), encoding='utf-8')) <NEW_LINE> if "server-address" in settings_dict and "username" in settings_dict and "password" in settings_dict and "certificate" in settings_dict: <NEW_LINE> <INDENT> self.server_address = settings_dict["server-address"] <NEW_LINE> self.username = settings_dict["username"] <NEW_LINE> self.password = settings_dict["password"] <NEW_LINE> self.certificate = settings_dict["certificate"] <NEW_LINE> if self.certificate_file: <NEW_LINE> <INDENT> self.certificate_file.close() <NEW_LINE> <DEDENT> self.certificate_file = NamedTemporaryFile() <NEW_LINE> self.certificate_file.write(self.certificate.encode('utf-8')) <NEW_LINE> self.certificate_file.seek(0) <NEW_LINE> self.create_sync() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Sync settings konnten nicht geladen werden.") <NEW_LINE> <DEDENT> <DEDENT> def ask_for_sync_settings(self): <NEW_LINE> <INDENT> print("Bitte geben Sie die Einstellungen für Ihren Synchronisations-Server an...") <NEW_LINE> self.server_address = input("URL: ") <NEW_LINE> self.username = input("Benutzername: ") <NEW_LINE> self.password = input("Passwort: ") <NEW_LINE> line = input("Zertifikat im .pem-Format (beenden mit einer Leerzeile): ") <NEW_LINE> while len(line) > 0: <NEW_LINE> <INDENT> self.certificate += line + "\n" <NEW_LINE> line = input("") <NEW_LINE> <DEDENT> self.certificate += line <NEW_LINE> if self.certificate_file: <NEW_LINE> <INDENT> self.certificate_file.close() <NEW_LINE> <DEDENT> self.certificate_file = NamedTemporaryFile() <NEW_LINE> self.certificate_file.write(self.certificate.encode('utf-8')) <NEW_LINE> self.certificate_file.seek(0) <NEW_LINE> self.create_sync() <NEW_LINE> print("Teste die Verbindung...") <NEW_LINE> if len(self.sync.pull()) > 0: <NEW_LINE> <INDENT> print("Verbindung erfolgreich getestet.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("Es konnte keine Verbindung aufgebaut werden.") <NEW_LINE> <DEDENT> <DEDENT> def create_sync(self): <NEW_LINE> <INDENT> self.sync = Sync(self.server_address, self.username, self.password, self.certificate_file.name) <NEW_LINE> <DEDENT> def pull(self): <NEW_LINE> <INDENT> if self.sync: <NEW_LINE> <INDENT> return self.sync.pull() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False, '' <NEW_LINE> <DEDENT> <DEDENT> def push(self, data): <NEW_LINE> <INDENT> if self.sync: <NEW_LINE> <INDENT> if not self.sync.push(data): <NEW_LINE> <INDENT> print("Synchronisation fehlgeschlagen.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> print("Sie haben keine gültigen Einstellungen für den sync server.") | Synchronization manager. This initializes and stores settings and handles the Sync object. | 62598fc04f6381625f1995da |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.