code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class ISO8601Interval(IntervalBase): <NEW_LINE> <INDENT> NULL_INTERVAL_STRING = "P0Y" <NEW_LINE> TYPE = CYCLER_TYPE_ISO8601 <NEW_LINE> TYPE_SORT_KEY = CYCLER_TYPE_SORT_KEY_ISO8601 <NEW_LINE> __slots__ = ('value') <NEW_LINE> @classmethod <NEW_LINE> def get_null(cls): <NEW_LINE> <INDENT> return ISO8601Interval("P0Y") <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_null_offset(cls): <NEW_LINE> <INDENT> return ISO8601Interval("+P0Y") <NEW_LINE> <DEDENT> def standardise(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.value = str(interval_parse(self.value)) <NEW_LINE> <DEDENT> except IsodatetimeError: <NEW_LINE> <INDENT> raise IntervalParsingError(type(self), self.value) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def add(self, other): <NEW_LINE> <INDENT> if isinstance(other, ISO8601Interval): <NEW_LINE> <INDENT> return ISO8601Interval( self._iso_interval_add(self.value, other.value)) <NEW_LINE> <DEDENT> return other + self <NEW_LINE> <DEDENT> def _cmp(self, other: 'IntervalBase') -> int: <NEW_LINE> <INDENT> return self._iso_interval_cmp(self.value, other.value) <NEW_LINE> <DEDENT> def sub(self, other): <NEW_LINE> <INDENT> return ISO8601Interval( self._iso_interval_sub(self.value, other.value)) <NEW_LINE> <DEDENT> def __abs__(self): <NEW_LINE> <INDENT> return ISO8601Interval( self._iso_interval_abs(self.value, self.NULL_INTERVAL_STRING)) <NEW_LINE> <DEDENT> def __mul__(self, factor): <NEW_LINE> <INDENT> return ISO8601Interval(self._iso_interval_mul(self.value, factor)) <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return self._iso_interval_nonzero(self.value) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_abs(interval_string, other_interval_string): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> other = interval_parse(other_interval_string) <NEW_LINE> if interval < other: <NEW_LINE> <INDENT> return str(interval * -1) <NEW_LINE> <DEDENT> return interval_string <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_add(interval_string, other_interval_string): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> other = interval_parse(other_interval_string) <NEW_LINE> return str(interval + other) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_cmp(interval_string, other_interval_string): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> other = interval_parse(other_interval_string) <NEW_LINE> return cmp(interval, other) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_sub(interval_string, other_interval_string): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> other = interval_parse(other_interval_string) <NEW_LINE> return str(interval - other) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_mul(interval_string, factor): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> return str(interval * factor) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @lru_cache(10000) <NEW_LINE> def _iso_interval_nonzero(interval_string): <NEW_LINE> <INDENT> interval = interval_parse(interval_string) <NEW_LINE> return bool(interval) | The interval between points in an ISO8601 date time sequence. | 62598fd8c4546d3d9def7529 |
class YAMLParser(BaseParser): <NEW_LINE> <INDENT> media_type = "application/yaml" <NEW_LINE> def parse(self, stream, media_type=None, parser_context=None): <NEW_LINE> <INDENT> assert yaml, "YAMLParser requires pyyaml to be installed" <NEW_LINE> parser_context = parser_context or {} <NEW_LINE> encoding = parser_context.get("encoding", settings.DEFAULT_CHARSET) <NEW_LINE> try: <NEW_LINE> <INDENT> data = stream.read().decode(encoding) <NEW_LINE> return yaml.safe_load(data) <NEW_LINE> <DEDENT> except (ValueError, yaml.parser.ParserError) as exc: <NEW_LINE> <INDENT> raise ParseError("YAML parse error - %s" % force_str(exc)) | Parses YAML-serialized data. | 62598fd89f28863672818b24 |
class SCD30Conf(AbstractSCD30Conf): <NEW_LINE> <INDENT> def __init__(self, sample_interval, temperature_offset): <NEW_LINE> <INDENT> super().__init__(sample_interval, temperature_offset) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def scd30(): <NEW_LINE> <INDENT> baseline = SCD30Baseline.load(Host, skeleton=True) <NEW_LINE> return SCD30(baseline) <NEW_LINE> <DEDENT> def __str__(self, *args, **kwargs): <NEW_LINE> <INDENT> return "SCD30Conf(dfe):{sample_interval:%s, temperature_offset:%s}" % (self.sample_interval, self.temperature_offset) | classdocs | 62598fd826238365f5fad0b1 |
class Vulns(QWidget): <NEW_LINE> <INDENT> def __init__(self, args, parent): <NEW_LINE> <INDENT> super().__init__(parent) <NEW_LINE> self.lst = args[0] <NEW_LINE> self.database = args[1] <NEW_LINE> self.add_fct = args[2] <NEW_LINE> self.tabs = {} <NEW_LINE> self.init_tab() <NEW_LINE> self.grid = QGridLayout() <NEW_LINE> self.grid.setSpacing(5) <NEW_LINE> self.grid.setContentsMargins(5, 5, 5, 5) <NEW_LINE> self.grid.addWidget(self.tabw) <NEW_LINE> self.setLayout(self.grid) <NEW_LINE> <DEDENT> def init_tab(self): <NEW_LINE> <INDENT> self.tabw = QTabWidget() <NEW_LINE> self.tabw.setTabsClosable(True) <NEW_LINE> self.tabw.tabCloseRequested.connect(self.close_tab) <NEW_LINE> tab_lst = OrderedDict() <NEW_LINE> tab_lst["All"] = self.lst <NEW_LINE> for label, lst in tab_lst.items(): <NEW_LINE> <INDENT> self.add_tab(label, lst, self.database, self.add_fct) <NEW_LINE> <DEDENT> self.tabw.tabBar().setTabButton(0, QTabBar.RightSide, None) <NEW_LINE> self.tabw.tabBar().setTabButton(0, QTabBar.LeftSide, None) <NEW_LINE> self.tabs["All"].fields["categorySort"].init_sorts() <NEW_LINE> <DEDENT> def add_tab(self, label, lst, database, add_fct=None): <NEW_LINE> <INDENT> if label in self.tabs: <NEW_LINE> <INDENT> for i in range(self.tabw.count()): <NEW_LINE> <INDENT> if self.tabw.tabText(i) == label: <NEW_LINE> <INDENT> self.tabw.setCurrentWidget(self.tabw.widget(i)) <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if label == "All" or len(LANGUAGES) == 1: <NEW_LINE> <INDENT> self.tabs[label] = Tab(self, lst, database, add_fct) <NEW_LINE> self.tabw.addTab(self.tabs[label], label) <NEW_LINE> self.tabw.setCurrentWidget(self.tabs[label]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tabw = QTabWidget() <NEW_LINE> tabs = OrderedDict() <NEW_LINE> for lang in LANGUAGES: <NEW_LINE> <INDENT> tabs[lang] = Tab(self, lst[lang], database, add_fct) <NEW_LINE> tabw.addTab(tabs[lang], lang) <NEW_LINE> <DEDENT> self.tabs[label] = tabs <NEW_LINE> self.tabw.addTab(tabw, label) <NEW_LINE> self.tabw.setCurrentWidget(tabw) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def close_tab(self, index): <NEW_LINE> <INDENT> if len(LANGUAGES) == 1: <NEW_LINE> <INDENT> self.tabs[self.tabw.tabText(index)].save_histories() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for lang in LANGUAGES: <NEW_LINE> <INDENT> self.tabs[self.tabw.tabText(index)][lang].save_histories() <NEW_LINE> <DEDENT> <DEDENT> del self.tabs[self.tabw.tabText(index)] <NEW_LINE> self.tabw.removeTab(index) <NEW_LINE> <DEDENT> def load(self, values): <NEW_LINE> <INDENT> return self.tabs["All"].load(values) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> return self.tabs["All"].save() | Class for the features of the "Vulns" tab. | 62598fd8a219f33f346c6d56 |
class TestCollector(BasicTestCollector): <NEW_LINE> <INDENT> handled_getters = [FunctionalDocTestGetter, UnitDocTestGetter, PythonTestGetter, SimpleDocTestGetter] | A TestCollector that wraps doctests and PythonTests.
| 62598fd88a349b6b43686790 |
class PycbcDarkVsBrightInjectionsExecutable(Executable): <NEW_LINE> <INDENT> current_retention_level = Executable.FINAL_RESULT <NEW_LINE> def __init__(self, cp, exe_name, universe=None, ifos=None, out_dir=None, tags=None): <NEW_LINE> <INDENT> if tags is None: <NEW_LINE> <INDENT> tags = [] <NEW_LINE> <DEDENT> Executable.__init__(self, cp, exe_name, universe, ifos, out_dir, tags=tags) <NEW_LINE> self.cp = cp <NEW_LINE> self.out_dir = out_dir <NEW_LINE> self.exe_name = exe_name <NEW_LINE> <DEDENT> def create_node(self, parent, segment, tags=None): <NEW_LINE> <INDENT> if tags is None: <NEW_LINE> <INDENT> tags = [] <NEW_LINE> <DEDENT> node = Node(self) <NEW_LINE> if not parent: <NEW_LINE> <INDENT> raise ValueError("Must provide an input file.") <NEW_LINE> <DEDENT> node = Node(self) <NEW_LINE> node.add_input_opt('-i', parent) <NEW_LINE> if self.has_opt('write-compress'): <NEW_LINE> <INDENT> ext = '.xml.gz' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ext = '.xml' <NEW_LINE> <DEDENT> tag=['POTENTIALLY_BRIGHT'] <NEW_LINE> node.new_output_file_opt(segment, ext, '--output-bright', store_file=self.retain_files, tags=tags+tag) <NEW_LINE> tag=['DIM_ONLY'] <NEW_LINE> node.new_output_file_opt(segment, ext, '--output-dim', store_file=self.retain_files, tags=tags+tag) <NEW_LINE> return node | The clase used to create jobs for the pycbc_dark_vs_bright_injections Executable. | 62598fd8656771135c489bc2 |
@dataclass(frozen=True) <NEW_LINE> class ParseErrorJsonFile(ReportItemMessage): <NEW_LINE> <INDENT> file_type_code: file_type_codes.FileTypeCode <NEW_LINE> line_number: int <NEW_LINE> column_number: int <NEW_LINE> position: int <NEW_LINE> reason: str <NEW_LINE> full_msg: str <NEW_LINE> file_path: Optional[str] <NEW_LINE> _code = codes.PARSE_ERROR_JSON_FILE <NEW_LINE> @property <NEW_LINE> def message(self) -> str: <NEW_LINE> <INDENT> return ( "Unable to parse {_file_type} file{_file_path}: {full_msg}" ).format( _file_path=format_optional(self.file_path, " '{}'"), _file_type=_format_file_role(self.file_type_code), full_msg=self.full_msg, ) | Unable to parse a file with JSON data
file_type_code -- item from pcs.common.file_type_codes
line_number -- the line where parsing failed
column_number -- the column where parsing failed
position -- the start index of the file where parsing failed
reason -- the unformatted error message
full_msg -- full error message including above int attributes
file_path -- path to the parsed file if available | 62598fd8283ffb24f3cf3dd1 |
class MockClientWorker(object): <NEW_LINE> <INDENT> def __init__(self, client=None): <NEW_LINE> <INDENT> self.stats_collector = MockStatsCollector() <NEW_LINE> self.client = client <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> pass | Mock client worker for GetClientStatsActionTest. | 62598fd8ad47b63b2c5a7da3 |
class PiJukeboxScreens(Screens): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Screens.__init__(self) <NEW_LINE> self.screen_list.append(ScreenPlaying(SCREEN)) <NEW_LINE> self.screen_list.append(ScreenPlaylist(SCREEN)) <NEW_LINE> self.screen_list.append(ScreenLibrary(SCREEN)) <NEW_LINE> self.screen_list.append(ScreenDirectory(SCREEN)) <NEW_LINE> self.screen_list.append(ScreenRadio(SCREEN)) <NEW_LINE> <DEDENT> def mpd_updates(self): <NEW_LINE> <INDENT> self.screen_list[self.current_index].update() | Manages Pi Jukebox's main screens.
- Player screen
- Library screen
Handles screen switching, clicking and swiping and displaying mpd status
updates on screen(s) | 62598fd80fa83653e46f5439 |
class PictureDownloadHandler(PictureObjectHandler): <NEW_LINE> <INDENT> @asynchronous <NEW_LINE> def on_picture_found(self, picture, id): <NEW_LINE> <INDENT> self.picture = picture <NEW_LINE> data = dict() <NEW_LINE> data["picture"] = picture.toDict(localized=False) <NEW_LINE> data["contact"] = UserManager.getUser().asContact().toDict() <NEW_LINE> contact = ContactManager.getTrustedContact(picture.authorKey) <NEW_LINE> client = ContactClient() <NEW_LINE> body = json_encode(data) <NEW_LINE> try: <NEW_LINE> <INDENT> client.post(contact, u"pictures/contact/download/", body, self.on_download_finished) <NEW_LINE> <DEDENT> except HTTPError: <NEW_LINE> <INDENT> self.return_failure("Cannot download picture from contact.") <NEW_LINE> <DEDENT> <DEDENT> def on_download_finished(self, response): <NEW_LINE> <INDENT> logger.info(self.picture) <NEW_LINE> if response.code == 200: <NEW_LINE> <INDENT> self.picture.put_attachment(response.body, self.picture.path) <NEW_LINE> thumbnail = self.get_thumbnail( response.body, self.picture.path, (1000, 1000)) <NEW_LINE> thbuffer = thumbnail.read() <NEW_LINE> self.picture.put_attachment(thbuffer, "prev_" + self.picture.path) <NEW_LINE> os.remove("th_" + self.picture.path) <NEW_LINE> self.picture.isFile = True <NEW_LINE> self.picture.save() <NEW_LINE> self.return_success("Picture successfuly downloaded.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.return_failure("Picture cannot be retrieved.") <NEW_LINE> <DEDENT> <DEDENT> def get_thumbnail(self, filebody, filename, size): <NEW_LINE> <INDENT> file = open(filename, "w") <NEW_LINE> file.write(filebody) <NEW_LINE> file.close() <NEW_LINE> image = Image.open(filename) <NEW_LINE> image.thumbnail(size, Image.ANTIALIAS) <NEW_LINE> image.save("th_" + filename) <NEW_LINE> file = open(filename) <NEW_LINE> os.remove(filename) <NEW_LINE> return open("th_" + filename) | Handler that allows newebe owner to download original file of the picture
inside its newebe to make it available through UI. | 62598fd8099cdd3c63675687 |
class TestLexer(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.parser = Parser() <NEW_LINE> <DEDENT> def test_keyValue(self): <NEW_LINE> <INDENT> archieml = 'key: This is a value' <NEW_LINE> expected = "[LexToken(IDENTIFIER,'key',1,0), LexToken(COLON,':',1,3), LexToken(TEXT,'This is a value',1,5)]" <NEW_LINE> output = str(self.parser.tokenize(archieml)) <NEW_LINE> self.assertEqual(expected, output) | Test lexing. This is an intermediate step to actually converting an
ArchieML string to json. | 62598fd8a219f33f346c6d58 |
class SecurityGroupServerRpcApiMixin(object): <NEW_LINE> <INDENT> def security_group_rules_for_devices(self, context, devices): <NEW_LINE> <INDENT> LOG.debug(_("Get security group rules " "for devices via rpc %r"), devices) <NEW_LINE> return self.call(context, self.make_msg('security_group_rules_for_devices', devices=devices), version=SG_RPC_VERSION, topic=self.topic) | A mix-in that enable SecurityGroup support in plugin rpc
| 62598fd8adb09d7d5dc0aacc |
class SockServer(ThreadingTCPServer): <NEW_LINE> <INDENT> def __init__(self, server_address, RequestHandlerClass = SocksHandler, version = V_SOCKS5, rmt_addr = [], user = None, pwd = None, verbose = False, log = None, log_fmt = None, time_fmt = None): <NEW_LINE> <INDENT> if not issubclass(RequestHandlerClass, SocksHandler): <NEW_LINE> <INDENT> msg = "{} is not a Sockshandler class" <NEW_LINE> raise ValueError(msg.format(RequestHandlerClass.__name__)) <NEW_LINE> <DEDENT> if ((user and not isinstance(user, str)) or (pwd and not isinstance(pwd, str)) or not isinstance(rmt_addr, list) or version not in _VERSIONS): <NEW_LINE> <INDENT> raise ValueError("Invalid arguments.") <NEW_LINE> <DEDENT> ThreadingTCPServer.__init__(self, server_address, RequestHandlerClass) <NEW_LINE> self.version = version <NEW_LINE> self.rmt_addr = rmt_addr <NEW_LINE> self.user = user <NEW_LINE> self.pwd = pwd <NEW_LINE> self.key_event = threading.Event() <NEW_LINE> self.logger = _set_logger(verbose, log, log_fmt, time_fmt) <NEW_LINE> <DEDENT> def verify_request(self, request, client_address): <NEW_LINE> <INDENT> if self.rmt_addr: <NEW_LINE> <INDENT> if client_address in rmt_addr: <NEW_LINE> <INDENT> log_msg = "{}:{} TCP connection granted" <NEW_LINE> self.logger.info(log_msg.format(*client_address)) <NEW_LINE> return True <NEW_LINE> <DEDENT> client_addr, client_port = client_address <NEW_LINE> for addr, port in rmt_addr: <NEW_LINE> <INDENT> if port == 0 and client_addr == addr: <NEW_LINE> <INDENT> log_msg = "{}:{} TCP connection granted" <NEW_LINE> self.logger.info(log_msg.format(*client_address)) <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> log_msg = "{}:{} TCP connection refused" <NEW_LINE> self.logger.info(log_msg.format(*client_address)) <NEW_LINE> return False <NEW_LINE> <DEDENT> log_msg = "{}:{} TCP connection granted" <NEW_LINE> self.logger.info(log_msg.format(*client_address)) <NEW_LINE> return True <NEW_LINE> <DEDENT> def serve_forever(self, poll_interval=0.5): <NEW_LINE> <INDENT> self.key_event.clear() <NEW_LINE> try: <NEW_LINE> <INDENT> ThreadingTCPServer.serve_forever(self, poll_interval) <NEW_LINE> <DEDENT> except KeyboardInterrupt as e: <NEW_LINE> <INDENT> self.key_event.set() | SOCKS server that handle all SOCKS clients' requests. | 62598fd8ab23a570cc2d5017 |
class lazy_array_loader(object): <NEW_LINE> <INDENT> def __init__(self, path, data_type='data', mem_map=False, map_fn=None): <NEW_LINE> <INDENT> lazypath = get_lazy_path(path) <NEW_LINE> datapath = os.path.join(lazypath, data_type) <NEW_LINE> self._file = open(datapath, 'rb') <NEW_LINE> self.file = self._file <NEW_LINE> self.mem_map = mem_map <NEW_LINE> if self.mem_map: <NEW_LINE> <INDENT> self.file = mmap.mmap(self.file.fileno(), 0, prot=mmap.PROT_READ) <NEW_LINE> <DEDENT> lenpath = os.path.join(lazypath, data_type + '.len.pkl') <NEW_LINE> self.lens = pkl.load(open(lenpath, 'rb')) <NEW_LINE> self.ends = list(accumulate(self.lens)) <NEW_LINE> self.dumb_ends = list(self.ends) <NEW_LINE> self.read_lock = Lock() <NEW_LINE> self.process_fn = map_fn <NEW_LINE> self.map_fn = map_fn <NEW_LINE> self._tokenizer = None <NEW_LINE> <DEDENT> def SetTokenizer(self, tokenizer): <NEW_LINE> <INDENT> if tokenizer is None: <NEW_LINE> <INDENT> if not hasattr(self, '_tokenizer'): <NEW_LINE> <INDENT> self._tokenizer = tokenizer <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._tokenizer = tokenizer <NEW_LINE> <DEDENT> self.map_fn = ProcessorTokenizer(tokenizer, self.process_fn) <NEW_LINE> <DEDENT> def GetTokenizer(self): <NEW_LINE> <INDENT> return self._tokenizer <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if not isinstance(index, slice): <NEW_LINE> <INDENT> if index == 0: <NEW_LINE> <INDENT> start = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = self.ends[index - 1] <NEW_LINE> <DEDENT> end = self.ends[index] <NEW_LINE> rtn = self.file_read(start, end) <NEW_LINE> if self.map_fn is not None: <NEW_LINE> <INDENT> return self.map_fn(rtn) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> chr_lens = self.ends[index] <NEW_LINE> if index.start == 0 or index.start is None: <NEW_LINE> <INDENT> start = 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> start = self.ends[index.start - 1] <NEW_LINE> <DEDENT> stop = chr_lens[-1] <NEW_LINE> strings = self.file_read(start, stop) <NEW_LINE> rtn = split_strings(strings, start, chr_lens) <NEW_LINE> if self.map_fn is not None: <NEW_LINE> <INDENT> return self.map_fn([s for s in rtn]) <NEW_LINE> <DEDENT> <DEDENT> return rtn <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.ends) <NEW_LINE> <DEDENT> def file_read(self, start=0, end=None): <NEW_LINE> <INDENT> self.read_lock.acquire() <NEW_LINE> self.file.seek(start) <NEW_LINE> if end is None: <NEW_LINE> <INDENT> rtn = self.file.read() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rtn = self.file.read(end - start) <NEW_LINE> <DEDENT> self.read_lock.release() <NEW_LINE> rtn = rtn.decode('utf-8', 'ignore') <NEW_LINE> if self.mem_map: <NEW_LINE> <INDENT> rtn = rtn.decode('unicode_escape') <NEW_LINE> <DEDENT> return rtn | Arguments:
path: path to directory where array entries are concatenated into one big string file
and the .len file are located
data_type (str): Some datsets have multiple fields that are stored in different paths.
`data_type` specifies which of these fields to load in this class
mem_map (boolean): Specifies whether to memory map file `path`
map_fn (callable): Fetched strings are passed through map_fn before being returned.
Example of lazy loader directory structure:
file.json
file.lazy/
data_type1
data_type1.len.pkl
data_type2
data_type2.len.pkl | 62598fd8c4546d3d9def752c |
class EmploymentDetailsListSerializer(AbstractBaseSerializer): <NEW_LINE> <INDENT> department_name = serializers.StringRelatedField(source=DEPARTMENT, read_only=True) <NEW_LINE> designation_name = serializers.StringRelatedField(source=DESIGNATION, read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> list_serializer_class = FilteredListSerializer <NEW_LINE> model = EmploymentDetails <NEW_LINE> fields = [ 'id', 'department', 'department_name', 'designation', 'designation_name', 'employment_start_date', 'employment_end_date', 'deleted_status' ] | Serializer for listing Employment Details only | 62598fd8377c676e912f7024 |
class CommentFilter(django_filters.rest_framework.FilterSet): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Comments <NEW_LINE> fields = ['article'] | 文章的过滤类 | 62598fd8dc8b845886d53b12 |
class V2vKeystoneContext(base_wsgi.Middleware): <NEW_LINE> <INDENT> @webob.dec.wsgify(RequestClass=base_wsgi.Request) <NEW_LINE> def __call__(self, req): <NEW_LINE> <INDENT> user_id = req.headers.get('X_USER') <NEW_LINE> user_id = req.headers.get('X_USER_ID', user_id) <NEW_LINE> if user_id is None: <NEW_LINE> <INDENT> LOG.debug("Neither X_USER_ID nor X_USER found in request") <NEW_LINE> return webob.exc.HTTPUnauthorized() <NEW_LINE> <DEDENT> roles = [r.strip() for r in req.headers.get('X_ROLE', '').split(',')] <NEW_LINE> if 'X_TENANT_ID' in req.headers: <NEW_LINE> <INDENT> project_id = req.headers['X_TENANT_ID'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> project_id = req.headers['X_TENANT'] <NEW_LINE> <DEDENT> project_name = req.headers.get('X_TENANT_NAME') <NEW_LINE> req_id = req.environ.get(request_id.ENV_REQUEST_ID) <NEW_LINE> auth_token = req.headers.get('X_AUTH_TOKEN', req.headers.get('X_STORAGE_TOKEN')) <NEW_LINE> remote_address = req.remote_addr <NEW_LINE> service_catalog = None <NEW_LINE> if req.headers.get('X_SERVICE_CATALOG') is not None: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> catalog_header = req.headers.get('X_SERVICE_CATALOG') <NEW_LINE> service_catalog = jsonutils.loads(catalog_header) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise webob.exc.HTTPInternalServerError( explanation=_('Invalid service catalog json.')) <NEW_LINE> <DEDENT> <DEDENT> if CONF.use_forwarded_for: <NEW_LINE> <INDENT> remote_address = req.headers.get('X-Forwarded-For', remote_address) <NEW_LINE> <DEDENT> token_info = req.environ['keystone.token_info'] <NEW_LINE> try: <NEW_LINE> <INDENT> user_name = token_info['access']['user']['username'] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> user_name = None <NEW_LINE> <DEDENT> ctx = context.RequestContext(user_id, project_id, project_name=project_name, roles=roles, auth_token=auth_token, remote_address=remote_address, service_catalog=service_catalog, request_id=req_id, auth_token_info=token_info, auth_url=CONF.keystone_authtoken.auth_url, tenant_id=project_id, user_name=user_name) <NEW_LINE> req.environ['conveyor.context'] = ctx <NEW_LINE> return self.application <NEW_LINE> <DEDENT> def _get_roles(self, req): <NEW_LINE> <INDENT> roles = req.headers.get('X_ROLES', '') <NEW_LINE> return [r.strip() for r in roles.split(',')] | Make a request context from keystone headers. | 62598fd87cff6e4e811b5f7e |
class CollectorThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, client): <NEW_LINE> <INDENT> threading.Thread.__init__(self) <NEW_LINE> self.client = client <NEW_LINE> self.name = 'gw1000-collector' <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.client.collect_sensor_data() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log_traceback_critical(' **** ') | Class used to collect data via the GW1000 API in a thread. | 62598fd88a349b6b43686796 |
class IAddEditViewTitle(Interface): <NEW_LINE> <INDENT> pass | Demographics field add/edit view title. | 62598fd8956e5f7376df5928 |
class HashTable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.size = 11 <NEW_LINE> self.slots = [None] * self.size <NEW_LINE> self.data = [None] * self.size <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.get(key) <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self.insert(key, value) <NEW_LINE> <DEDENT> def hash_function(self, item, size): <NEW_LINE> <INDENT> return item % size <NEW_LINE> <DEDENT> def insert(self, key, value): <NEW_LINE> <INDENT> hash_value = self.hash_function(key, self.size) <NEW_LINE> if self.slots[hash_value] is None: <NEW_LINE> <INDENT> self.slots[hash_value] = key <NEW_LINE> ul = UnorderedList() <NEW_LINE> ul.add(key, value) <NEW_LINE> self.data[hash_value] = ul <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> existing_ul = self.data[hash_value] <NEW_LINE> existing_ul.add(key, value) <NEW_LINE> <DEDENT> <DEDENT> def get(self, key): <NEW_LINE> <INDENT> hash_value = self.hash_function(key, self.size) <NEW_LINE> return self.data[hash_value].searchByKey(key).getData() | Hastable w/chaining | 62598fd8dc8b845886d53b14 |
class IsServiceOwner(permissions.BasePermission): <NEW_LINE> <INDENT> def has_permission(self, request, view): <NEW_LINE> <INDENT> service_uid = request.data.get('service_uid') <NEW_LINE> if service_uid is None: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return has_permission_to_service( request.user, service_uid, check_by_uid=True ) | Checks if given user is an owner of the service given by service_uid
that is coming in request's payload. Can easily be extended to get
service_uid from URL. | 62598fd8656771135c489bc8 |
class SC2GymEnv(gym.Env): <NEW_LINE> <INDENT> def __init__(self, map_name='MoveToBeacon', realtime=False, visualize=True, replay_dir='replays'): <NEW_LINE> <INDENT> super(SC2GymEnv, self).__init__() <NEW_LINE> self._env = sc2_env.SC2Env( map_name=map_name, players=[sc2_env.Agent(sc2_env.Race.terran, "Tergot"), sc2_env.Bot(sc2_env.Race.random, sc2_env.Difficulty.very_easy)], agent_interface_format=features.AgentInterfaceFormat( feature_dimensions=features.Dimensions( screen=96, minimap=64), use_feature_units=True), step_mul=16, game_steps_per_episode=0, realtime=realtime, save_replay_episodes=1, replay_dir=replay_dir, visualize=visualize) <NEW_LINE> self._episode = 0 <NEW_LINE> self._num_step = 0 <NEW_LINE> self._episode_reward = 0 <NEW_LINE> self._total_reward = 0 <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obs = self._env.step(action)[0] <NEW_LINE> reward = obs.reward <NEW_LINE> self._episode_reward += reward <NEW_LINE> self._total_reward += reward <NEW_LINE> return obs, reward, obs.step_type == StepType.LAST, {} <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> logger.info("Interrupted. Quitting...") <NEW_LINE> return None, 0, True, {} <NEW_LINE> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> if self._episode > 0: <NEW_LINE> <INDENT> logger.info("Episode %d ended with reward %d after %d steps.", self._episode, self._episode_reward, self._num_step) <NEW_LINE> logger.info("Got %d total reward so far, with an average reward of %g per episode", self._total_reward, float(self._total_reward) / self._episode) <NEW_LINE> <DEDENT> self._episode += 1 <NEW_LINE> self._num_step = 0 <NEW_LINE> self._episode_reward = 0 <NEW_LINE> logger.info("Episode %d starting...", self._episode) <NEW_LINE> obs = self._env.reset()[0] <NEW_LINE> self.available_actions = obs.observation['available_actions'] <NEW_LINE> return obs <NEW_LINE> <DEDENT> def save_replay(self, replay_dir): <NEW_LINE> <INDENT> self._env.save_replay(replay_dir) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self._episode > 0: <NEW_LINE> <INDENT> logger.info("Episode %d ended with reward %d after %d steps.", self._episode, self._episode_reward, self._num_step) <NEW_LINE> logger.info("Got %d total reward, with an average reward of %g per episode", self._total_reward, float(self._total_reward) / self._episode) <NEW_LINE> <DEDENT> if self._env is not None: <NEW_LINE> <INDENT> self._env.close() <NEW_LINE> <DEDENT> super().close() <NEW_LINE> <DEDENT> @property <NEW_LINE> def action_spec(self): <NEW_LINE> <INDENT> return self._env.action_spec() <NEW_LINE> <DEDENT> @property <NEW_LINE> def observation_spec(self): <NEW_LINE> <INDENT> return self._env.observation_spec() <NEW_LINE> <DEDENT> @property <NEW_LINE> def episode(self): <NEW_LINE> <INDENT> return self._episode <NEW_LINE> <DEDENT> @property <NEW_LINE> def num_step(self): <NEW_LINE> <INDENT> return self._num_step <NEW_LINE> <DEDENT> @property <NEW_LINE> def episode_reward(self): <NEW_LINE> <INDENT> return self._episode_reward <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_reward(self): <NEW_LINE> <INDENT> return self._total_reward <NEW_LINE> <DEDENT> def render(self, mode='rgb_array', close=False): <NEW_LINE> <INDENT> pass | Custom Environment that follows gym interface | 62598fd8283ffb24f3cf3dd8 |
class Volunteer(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=255) <NEW_LINE> email = models.EmailField() <NEW_LINE> phone = models.CharField(max_length=15) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name | Model for each volunteer
| 62598fd8ab23a570cc2d5019 |
class CNN(NeuralNetwork): <NEW_LINE> <INDENT> def __init__(self, state_shape, num_actions, hidden=20, lr=1e-4): <NEW_LINE> <INDENT> self._build_model(state_shape, num_actions, hidden, lr) <NEW_LINE> <DEDENT> def _build_model(self, state_shape, num_actions, hidden, lr): <NEW_LINE> <INDENT> self.states_ = tf.placeholder(tf.float32, shape=[None]+state_shape) <NEW_LINE> self.actions_ = tf.placeholder(tf.int32, shape=[None]) <NEW_LINE> self.targets_ = tf.placeholder(tf.float32, shape=[None]) <NEW_LINE> conv_layer1 = tf.layers.conv2d(self.states_, 16, 3, padding="same", activation=tf.nn.relu) <NEW_LINE> pool_layer1 = tf.layers.max_pooling2d(conv_layer1, 2, 1, padding = "same") <NEW_LINE> conv_layer2 = tf.layers.conv2d(pool_layer1, 16, 3, padding="same", activation=tf.nn.relu) <NEW_LINE> pool_layer2 = tf.layers.max_pooling2d(conv_layer2, 2, 1, padding = "same") <NEW_LINE> flat_layer1 = tf.layers.flatten(pool_layer2) <NEW_LINE> dense_layer1 = tf.layers.dense(flat_layer1, 128, activation=tf.nn.relu) <NEW_LINE> self.predictions = tf.layers.dense(dense_layer1, num_actions) <NEW_LINE> batch_size = tf.shape(self.states_)[0] <NEW_LINE> gather_indices = tf.range(batch_size) * tf.shape(self.predictions)[1] + self.actions_ <NEW_LINE> self.action_predictions = tf.gather(tf.reshape(self.predictions, [-1]), gather_indices) <NEW_LINE> self.losses = tf.squared_difference(self.targets_, self.action_predictions) <NEW_LINE> self.loss = tf.reduce_mean(self.losses) <NEW_LINE> self.optimizer = tf.train.AdamOptimizer(lr) <NEW_LINE> self.train_op = self.optimizer.minimize(self.loss) | Convolutional Network class based on TensorFlow. | 62598fd8adb09d7d5dc0aad0 |
class CaptureTarget(Enum): <NEW_LINE> <INDENT> STRING = -1 <NEW_LINE> STDOUT = -2 | Constants used for contextmanager captured.
Used similarly like the constants PIPE, STDOUT for stdlib's subprocess.Popen. | 62598fd8d8ef3951e32c8107 |
class MediaMiddleware(StaticMiddlewareBase, IMiddleware, SharedDataMiddleware): <NEW_LINE> <INDENT> name = 'media' <NEW_LINE> exports = {ctx.cfg['routing.urls.media'].split(':', 1)[1]: MEDIA_PATH} <NEW_LINE> url_rules = [ Rule('/', defaults={'file': '/'}, endpoint='media'), Rule('/<path:file>', endpoint='media') ] | Concrete media file serving middleware implementation | 62598fd8dc8b845886d53b16 |
class RestoreLibraryData(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.blend_file = 'T:\\Projects\\0043_Ozzy\\Shots\\140\\0010\\layout\\publish\\140.0010_publish.v0002.blend' <NEW_LINE> self.source_list = [] <NEW_LINE> self.taget_list = [] <NEW_LINE> self.base_path = 'C:\\Temp\\RenderData' <NEW_LINE> self.root_path = '%s\\Projects\\0043_Ozzy\\'%self.base_path <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> self.clear_list() <NEW_LINE> self.open_file() <NEW_LINE> self.remap_libraries() <NEW_LINE> self.clear_list() <NEW_LINE> self.save_file() <NEW_LINE> <DEDENT> def open_file(self): <NEW_LINE> <INDENT> bpy.ops.wm.open_mainfile(filepath=self.blend_file) <NEW_LINE> <DEDENT> def clear_list(self): <NEW_LINE> <INDENT> self.source_list = [] <NEW_LINE> self.taget_list = [] <NEW_LINE> self.source_list.clear() <NEW_LINE> self.taget_list.clear() <NEW_LINE> self.source_list = [] <NEW_LINE> self.taget_list = [] <NEW_LINE> <DEDENT> def remap_libraries(self): <NEW_LINE> <INDENT> import os <NEW_LINE> import shutil <NEW_LINE> if not os.path.exists('%sShots'%self.root_path): <NEW_LINE> <INDENT> os.makedirs('%sShots'%self.root_path) <NEW_LINE> <DEDENT> if not os.path.exists('%sAssets'%self.root_path): <NEW_LINE> <INDENT> os.makedirs('%sAssets'%self.root_path) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for library in bpy.data.libraries: <NEW_LINE> <INDENT> source_path = "%s"%(library.filepath) <NEW_LINE> taget_path = "%s"%(self.base_path) <NEW_LINE> final_path = "%s%s"%(target_path,source_path[2:]) <NEW_LINE> self.source_list.append(source_path) <NEW_LINE> self.taget_list.append(final_path) <NEW_LINE> library.filepath = final_path <NEW_LINE> dir = final_path.rpartition('\\') <NEW_LINE> if not os.path.exists("%s"%dir[0]): <NEW_LINE> <INDENT> os.makedirs("%s"%dir[0]) <NEW_LINE> <DEDENT> shutil.copyfile(source_path,final_path) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> print("There are no linked assets or libraries") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def save_file(self): <NEW_LINE> <INDENT> import os <NEW_LINE> source_file_path = bpy.data.filepath <NEW_LINE> taget_file_path = "%s"%(self.base_path) <NEW_LINE> final_file_path = "%s%s"%(taget_file_path,source_file_path[2:]) <NEW_LINE> file_dir = final_file_path.rpartition('\\') <NEW_LINE> if not os.path.exists("%s"%file_dir[0]): <NEW_LINE> <INDENT> os.makedirs("%s"%file_dir[0]) <NEW_LINE> <DEDENT> bpy.ops.wm.save_as_mainfile(filepath=final_file_path) | Remap libraries to new paths, move libraries to those paths, and save all to new, mobile, directory. | 62598fd8283ffb24f3cf3dda |
class TestNgt: <NEW_LINE> <INDENT> def test_int_float(self): <NEW_LINE> <INDENT> assert ngt(1, 1.000000001) == 0 <NEW_LINE> <DEDENT> def test_eq(self): <NEW_LINE> <INDENT> assert ngt(23, 22) == 1 <NEW_LINE> <DEDENT> def test_typeerror(self): <NEW_LINE> <INDENT> with pytest.raises(TypeError): <NEW_LINE> <INDENT> ngt("hello", 12) | Числовая операция БОЛЬШЕ (1 - больше, 0 - меньше)
!!! Необходимо уточнить (что возвращает) | 62598fd8ad47b63b2c5a7dab |
class Calendar(BoundedDate): <NEW_LINE> <INDENT> pass | A bounded date control which edits a Python datetime.date using
a widget which resembles a calendar. | 62598fd8091ae35668705174 |
class AffinityPropagationClustering(Pipeline): <NEW_LINE> <INDENT> def __init__(self, metric: Optional[str] = "cosine"): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.metric = metric <NEW_LINE> self.damping = Uniform(0.5, 1.0) <NEW_LINE> self.preference = Uniform(-10.0, 0.0) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> self.affinity_propagation_ = sklearn.cluster.AffinityPropagation( damping=self.damping, preference=self.preference, affinity="precomputed", max_iter=200, convergence_iter=50, ) <NEW_LINE> <DEDENT> def __call__(self, X: np.ndarray) -> np.ndarray: <NEW_LINE> <INDENT> n_samples, _ = X.shape <NEW_LINE> if n_samples < 1: <NEW_LINE> <INDENT> msg = "There should be at least one sample in `X`." <NEW_LINE> raise ValueError(msg) <NEW_LINE> <DEDENT> elif n_samples == 1: <NEW_LINE> <INDENT> return np.array([1], dtype=int) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> affinity = -squareform(pdist(X, metric=self.metric)) <NEW_LINE> clusters = self.affinity_propagation_.fit_predict(affinity) <NEW_LINE> <DEDENT> except MemoryError as e: <NEW_LINE> <INDENT> clusters = np.arange(n_samples) <NEW_LINE> <DEDENT> if np.any(clusters < 0): <NEW_LINE> <INDENT> clusters = np.arange(n_samples) <NEW_LINE> <DEDENT> clusters += 1 <NEW_LINE> return clusters | Clustering based on affinity propagation
Parameters
----------
metric : `str`, optional
Distance metric. Defaults to 'cosine'
Hyper-parameters
----------------
damping : `float`
preference : `float`
See `sklearn.cluster.AffinityPropagation` | 62598fd8956e5f7376df592a |
class EntryTestCase(TestCase): <NEW_LINE> <INDENT> def test_model(self): <NEW_LINE> <INDENT> obj = mixer.blend('frequently.Entry') <NEW_LINE> self.assertTrue(str(obj)) | Tests for the ``Entry`` model class. | 62598fd89f28863672818b2b |
class Pool(A10BaseClass): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.ERROR_MSG = "" <NEW_LINE> self.required = [ "pool_name"] <NEW_LINE> self.b_key = "pool" <NEW_LINE> self.a10_url="/axapi/v3/ip/nat/pool/{pool_name}" <NEW_LINE> self.DeviceProxy = "" <NEW_LINE> self.use_if_ip = "" <NEW_LINE> self.uuid = "" <NEW_LINE> self.start_address = "" <NEW_LINE> self.vrid = "" <NEW_LINE> self.netmask = "" <NEW_LINE> self.end_address = "" <NEW_LINE> self.ip_rr = "" <NEW_LINE> self.ethernet = "" <NEW_LINE> self.scaleout_device_id = "" <NEW_LINE> self.gateway = "" <NEW_LINE> self.pool_name = "" <NEW_LINE> for keys, value in kwargs.items(): <NEW_LINE> <INDENT> setattr(self,keys, value) | Class Description::
Configure IP pool name.
Class pool supports CRUD Operations and inherits from `common/A10BaseClass`.
This class is the `"PARENT"` class for this module.`
:param use_if_ip: {"description": "Use Interface IP", "format": "flag", "default": 0, "optional": true, "plat-pos-list": ["soft-ax"], "not": "start-address", "type": "number"}
:param uuid: {"description": "uuid of the object", "format": "string", "minLength": 1, "modify-not-allowed": 1, "optional": true, "maxLength": 64, "type": "string"}
:param start_address: {"description": "Configure start IP address of NAT pool", "format": "ipv4-address", "type": "string", "modify-not-allowed": 1, "not": "use-if-ip", "optional": true}
:param vrid: {"description": "Configure VRRP-A vrid (Specify ha VRRP-A vrid)", "format": "number", "optional": true, "maximum": 31, "minimum": 1, "modify-not-allowed": 1, "type": "number"}
:param netmask: {"optional": true, "modify-not-allowed": 1, "type": "string", "description": "Configure mask for pool", "format": "ipv4-netmask-brief"}
:param end_address: {"optional": true, "modify-not-allowed": 1, "type": "string", "description": "Configure end IP address of NAT pool", "format": "ipv4-address"}
:param ip_rr: {"description": "Use IP address round-robin behavior", "format": "flag", "default": 0, "type": "number", "modify-not-allowed": 1, "optional": true}
:param ethernet: {"optional": true, "plat-pos-list": ["soft-ax"], "type": "number", "description": "Ethernet interface", "format": "interface"}
:param scaleout_device_id: {"description": "Configure Scaleout device id to which this NAT pool is to be bound (Specify Scaleout device id)", "format": "number", "optional": true, "maximum": 64, "minimum": 1, "modify-not-allowed": 1, "type": "number"}
:param gateway: {"optional": true, "modify-not-allowed": 1, "type": "string", "description": "Configure gateway IP", "format": "ipv4-address"}
:param pool_name: {"description": "Specify pool name or pool group", "format": "string-rlx", "minLength": 1, "optional": false, "maxLength": 63, "type": "string"}
:param DeviceProxy: The device proxy for REST operations and session handling. Refer to `common/device_proxy.py`
URL for this object::
`https://<Hostname|Ip address>//axapi/v3/ip/nat/pool/{pool_name}`. | 62598fd8656771135c489bce |
class ShowRessort(object): <NEW_LINE> <INDENT> def __call__(self, ressort): <NEW_LINE> <INDENT> last_view = zeit.calendar.browser.interfaces.ILastView( self.request.principal) <NEW_LINE> last_view.hidden_ressorts = last_view.hidden_ressorts.difference( [ressort]) | Show a ressort. | 62598fd8377c676e912f7029 |
@simple_serialization <NEW_LINE> class PoliticalParty(ElectionParty): <NEW_LINE> <INDENT> is_coalition = False <NEW_LINE> def __init__(self, name: str, number: Optional[int] = None, affiliations: Optional[List[PoliticalParty]] = None, lead: Optional[Person] = None, properties: Dict[str, Any] = None, withdrawn: bool = False, ): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.number = number <NEW_LINE> self.affiliations = affiliations <NEW_LINE> self.lead = lead <NEW_LINE> self.properties = properties if properties is not None else {} <NEW_LINE> self.withdrawn = withdrawn <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return ( f'<PoliticalParty({self.name}' + (f',{self.number}' if self.number is not None else '') + ')>' ) | A political party or movement that is eligible to stand in elections.
:param name: Name of the party, in any customary text format.
:param number: Candidacy number assigned to the party for the purpose
of the election; usually drawn by lot.
:param affiliation: Other (e.g. national or supranational) parties this
party is affiliated with, if any.
:param lead: A person that leads the party into the elections. | 62598fd8c4546d3d9def7531 |
@customer_api.route('/reservation') <NEW_LINE> class CreateReservation(Resource): <NEW_LINE> <INDENT> @customer_token_required <NEW_LINE> @customer_api.doc('book_a_reservation') <NEW_LINE> @customer_api.response(201, 'Reservation created.') <NEW_LINE> @customer_api.expect(_reservation, validate=True) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> data = request.json <NEW_LINE> return book_a_reservation(data=data) | Customer Reservation | 62598fd897e22403b383b467 |
class ReminderSchedule(object): <NEW_LINE> <INDENT> def __init__(self, days_before_starting = None, frequency_days = None, max_reminders = None): <NEW_LINE> <INDENT> self.wait_period = datetime.timedelta(days_before_starting) <NEW_LINE> self.end_cutoff_date = datetime.datetime.now() - self.wait_period <NEW_LINE> self.recurrence_delay = datetime.timedelta(frequency_days) <NEW_LINE> self.max_reminders = max_reminders <NEW_LINE> self.start_cutoff_date = self.end_cutoff_date - (self.max_reminders - 1)*self.recurrence_delay | class that given the three settings:
* days to wait before sending the reminders
* frequency of reminders
* maximum number of reminders
return dates when to start sending the reminders,
when to stop, and give friendly names to other
variables
These objects can be reused to all methods that
intend to remind of certain events periodically | 62598fd8dc8b845886d53b1c |
class ShopTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def test_subclass(self): <NEW_LINE> <INDENT> from collective.cart.core.interfaces import IShoppingSiteRoot <NEW_LINE> from collective.cart.shopping.schema import ShopSchema <NEW_LINE> self.assertTrue(issubclass(Shop, Container)) <NEW_LINE> self.assertTrue(issubclass(ShopSchema, Schema)) <NEW_LINE> self.assertTrue(issubclass(IShop, (ShopSchema, IShoppingSiteRoot))) <NEW_LINE> <DEDENT> def test_verifyObject(self): <NEW_LINE> <INDENT> self.assertTrue(verifyObject(IShop, Shop())) | TestCase for content type: collective.cart.shopping.Shop | 62598fd8adb09d7d5dc0aad8 |
class Encoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, num_nodes, num_rels, embed_dim, device): <NEW_LINE> <INDENT> super(Encoder, self).__init__() <NEW_LINE> self.num_nodes = num_nodes <NEW_LINE> self.num_rels = num_rels <NEW_LINE> self.embed_dim = embed_dim <NEW_LINE> self.device = device <NEW_LINE> self.entity_embed_layer = nn.Linear(self.num_nodes, self.embed_dim, bias = True).to(self.device) <NEW_LINE> self.relation_embed_layer = nn.Linear(self.num_rels, self.embed_dim, bias = True).to(self.device) <NEW_LINE> nn.init.xavier_uniform_(self.entity_embed_layer.weight) <NEW_LINE> nn.init.constant_(self.entity_embed_layer.bias, 0.0) <NEW_LINE> nn.init.xavier_uniform_(self.relation_embed_layer.weight) <NEW_LINE> nn.init.constant_(self.relation_embed_layer.bias, 0.0) <NEW_LINE> self.attention = nn.TransformerEncoderLayer(d_model=(self.embed_dim + self.embed_dim), nhead=1).to(self.device) <NEW_LINE> self.leaky_relu = nn.ReLU().to(self.device) <NEW_LINE> self.dropout = nn.Dropout().to(self.device) <NEW_LINE> self.linear = nn.Linear(self.embed_dim + self.embed_dim, self.num_nodes,bias = True).to(self.device) <NEW_LINE> nn.init.xavier_uniform_(self.linear.weight.data) <NEW_LINE> nn.init.constant_(self.linear.bias.data, 0.0) <NEW_LINE> <DEDENT> def forward(self, entity, relation): <NEW_LINE> <INDENT> entity_embed = self.entity_embed_layer(entity) <NEW_LINE> entity_embed = self.leaky_relu(entity_embed) <NEW_LINE> relation_embed = self.relation_embed_layer(relation) <NEW_LINE> relation_embed = self.leaky_relu(relation_embed) <NEW_LINE> x = torch.cat((entity_embed, relation_embed), 1) <NEW_LINE> x = x.view(-1, 1, (self.embed_dim + self.embed_dim)) <NEW_LINE> x = self.attention(x) <NEW_LINE> x = x.view(-1, (self.embed_dim + self.embed_dim)) <NEW_LINE> x = self.linear(x) <NEW_LINE> x = self.leaky_relu(x) <NEW_LINE> x = F.gumbel_softmax(x, hard=True) <NEW_LINE> return x | docstring for Encoder | 62598fd83617ad0b5ee066a6 |
class EmployeeEditView(LoginRequiredMixin, UpdateView): <NEW_LINE> <INDENT> login_url = '/login' <NEW_LINE> model = Employees <NEW_LINE> fields = '__all__' <NEW_LINE> template_name_suffix = '_update_form' <NEW_LINE> def get_success_url(self): <NEW_LINE> <INDENT> return reverse('employee-site') | Edit employee's data | 62598fd8656771135c489bd2 |
class Edit(OpenShiftCLI): <NEW_LINE> <INDENT> def __init__(self, kind, namespace, resource_name=None, kubeconfig='/etc/origin/master/admin.kubeconfig', separator='.', verbose=False): <NEW_LINE> <INDENT> super(Edit, self).__init__(namespace, kubeconfig=kubeconfig, verbose=verbose) <NEW_LINE> self.kind = kind <NEW_LINE> self.name = resource_name <NEW_LINE> self.separator = separator <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> return self._get(self.kind, self.name) <NEW_LINE> <DEDENT> def update(self, file_name, content, edits, force=False, content_type='yaml'): <NEW_LINE> <INDENT> if file_name: <NEW_LINE> <INDENT> if content_type == 'yaml': <NEW_LINE> <INDENT> data = yaml.load(open(file_name)) <NEW_LINE> <DEDENT> elif content_type == 'json': <NEW_LINE> <INDENT> data = json.loads(open(file_name).read()) <NEW_LINE> <DEDENT> yed = Yedit(filename=file_name, content=data, separator=self.separator) <NEW_LINE> if content is not None: <NEW_LINE> <INDENT> changes = [] <NEW_LINE> for key, value in content.items(): <NEW_LINE> <INDENT> changes.append(yed.put(key, value)) <NEW_LINE> <DEDENT> if any([not change[0] for change in changes]): <NEW_LINE> <INDENT> return {'returncode': 0, 'updated': False} <NEW_LINE> <DEDENT> <DEDENT> elif edits is not None: <NEW_LINE> <INDENT> results = Yedit.process_edits(edits, yed) <NEW_LINE> if not results['changed']: <NEW_LINE> <INDENT> return results <NEW_LINE> <DEDENT> <DEDENT> yed.write() <NEW_LINE> atexit.register(Utils.cleanup, [file_name]) <NEW_LINE> return self._replace(file_name, force=force) <NEW_LINE> <DEDENT> return self._replace_content(self.kind, self.name, content, edits, force=force, sep=self.separator) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def run_ansible(params, check_mode): <NEW_LINE> <INDENT> ocedit = Edit(params['kind'], params['namespace'], params['name'], kubeconfig=params['kubeconfig'], separator=params['separator'], verbose=params['debug']) <NEW_LINE> api_rval = ocedit.get() <NEW_LINE> if not Utils.exists(api_rval['results'], params['name']): <NEW_LINE> <INDENT> return {"failed": True, 'msg': api_rval} <NEW_LINE> <DEDENT> if check_mode: <NEW_LINE> <INDENT> return {'changed': True, 'msg': 'CHECK_MODE: Would have performed edit'} <NEW_LINE> <DEDENT> api_rval = ocedit.update(params['file_name'], params['content'], params['edits'], params['force'], params['file_format']) <NEW_LINE> if api_rval['returncode'] != 0: <NEW_LINE> <INDENT> return {"failed": True, 'msg': api_rval} <NEW_LINE> <DEDENT> if 'updated' in api_rval and not api_rval['updated']: <NEW_LINE> <INDENT> return {"changed": False, 'results': api_rval, 'state': 'present'} <NEW_LINE> <DEDENT> api_rval = ocedit.get() <NEW_LINE> if api_rval['returncode'] != 0: <NEW_LINE> <INDENT> return {"failed": True, 'msg': api_rval} <NEW_LINE> <DEDENT> return {"changed": True, 'results': api_rval, 'state': 'present'} | Class to wrap the oc command line tools
| 62598fd8dc8b845886d53b1e |
class Parents (weakref.WeakValueDictionary): <NEW_LINE> <INDENT> def __copy__ (self): <NEW_LINE> <INDENT> return self.__class__(self); <NEW_LINE> <DEDENT> def __deepcopy__ (self,memo): <NEW_LINE> <INDENT> return self.__class__(self); | The Parents class is used to manage a weakly-reffed dictionary of the
node's parents. We only redefine it as a class to implement __copy__
and __depcopy__ (which otherwise crashes and burns on weakrefs) | 62598fd87cff6e4e811b5f8a |
class NoticeLogging(logging.Handler): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> logging.Handler.__init__(self) <NEW_LINE> self.seen_message = False <NEW_LINE> <DEDENT> def emit(self, record): <NEW_LINE> <INDENT> self.seen_message = True | A log handler that, if asked to emit, will set
``self.seen_message`` to True. | 62598fd8ad47b63b2c5a7db3 |
class MacDocumentVersionsEventData(events.EventData): <NEW_LINE> <INDENT> DATA_TYPE = 'mac:document_versions:file' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(MacDocumentVersionsEventData, self).__init__(data_type=self.DATA_TYPE) <NEW_LINE> self.last_time = None <NEW_LINE> self.name = None <NEW_LINE> self.path = None <NEW_LINE> self.query = None <NEW_LINE> self.user_sid = None <NEW_LINE> self.version_path = None | MacOS document revision event data.
Attributes:
last_time (str): the system user ID of the user that opened the file.
name (str): name of the original file.
path (str): path from the original file.
query (str): SQL query that was used to obtain the event data.
user_sid (str): identification user ID that open the file.
version_path (str): path to the version copy of the original file. | 62598fd8c4546d3d9def7533 |
class Spritesheet: <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.sheet = pygame.image.load(filename).convert() <NEW_LINE> <DEDENT> def imgat(self, rect, colorkey = None): <NEW_LINE> <INDENT> rect = pygame.Rect(rect) <NEW_LINE> image = pygame.Surface(rect.size).convert() <NEW_LINE> image.blit(self.sheet, (0, 0), rect) <NEW_LINE> if colorkey is not None: <NEW_LINE> <INDENT> if colorkey is -1: <NEW_LINE> <INDENT> colorkey = image.get_at((0, 0)) <NEW_LINE> <DEDENT> image.set_colorkey(colorkey, pygame.RLEACCEL) <NEW_LINE> <DEDENT> return image <NEW_LINE> <DEDENT> def imgsat(self, rects, colorkey = None): <NEW_LINE> <INDENT> imgs = [] <NEW_LINE> for rect in rects: <NEW_LINE> <INDENT> imgs.append(self.imgat(rect, colorkey)) <NEW_LINE> <DEDENT> return imgs <NEW_LINE> <DEDENT> def img_extract( self, cols, rows, width, height, colorkey = -1 ): <NEW_LINE> <INDENT> rect_list = [] <NEW_LINE> for y in range(0, rows): <NEW_LINE> <INDENT> for x in range(0, cols): <NEW_LINE> <INDENT> rect_list.append( (width*x, height*y, width, height) ) <NEW_LINE> <DEDENT> <DEDENT> return self.imgsat( rect_list, colorkey ) | Class from http://www.scriptedfun.com/transcript-2-using-sprite-sheets-and-drawing-the-background/
This class can be used to seporate images from the sprite sheet | 62598fd9dc8b845886d53b20 |
class DT2NIfTI(CommandLine): <NEW_LINE> <INDENT> _cmd = 'dt2nii' <NEW_LINE> input_spec = DT2NIfTIInputSpec <NEW_LINE> output_spec = DT2NIfTIOutputSpec <NEW_LINE> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self.output_spec().get() <NEW_LINE> output_root = self._gen_outputroot() <NEW_LINE> outputs["dt"] = os.path.abspath(output_root + "dt.nii") <NEW_LINE> outputs["exitcode"] = os.path.abspath(output_root + "exitcode.nii") <NEW_LINE> outputs["lns0"] = os.path.abspath(output_root + "lns0.nii") <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def _gen_outfilename(self): <NEW_LINE> <INDENT> return self._gen_outputroot() <NEW_LINE> <DEDENT> def _gen_outputroot(self): <NEW_LINE> <INDENT> output_root = self.inputs.output_root <NEW_LINE> if not isdefined(output_root): <NEW_LINE> <INDENT> output_root = self._gen_filename('output_root') <NEW_LINE> <DEDENT> return output_root <NEW_LINE> <DEDENT> def _gen_filename(self, name): <NEW_LINE> <INDENT> if name == 'output_root': <NEW_LINE> <INDENT> _, filename, _ = split_filename(self.inputs.in_file) <NEW_LINE> filename = filename + "_" <NEW_LINE> <DEDENT> return filename | Converts camino tensor data to NIfTI format
Reads Camino diffusion tensors, and converts them to NIFTI format as three .nii files. | 62598fd926238365f5fad0c6 |
class GotoRedirect(BrowserView): <NEW_LINE> <INDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> manager = IGUIDManager(self.context) <NEW_LINE> request = self.request <NEW_LINE> response = self.request.response <NEW_LINE> obj = None <NEW_LINE> guid = request.get('guid', None) <NEW_LINE> if not guid: <NEW_LINE> <INDENT> return response.write("The guid paramater is required") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if guid.startswith("/zport/dmd/"): <NEW_LINE> <INDENT> obj = self.context.unrestrictedTraverse(unquote(guid)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> obj = manager.getObject(guid) <NEW_LINE> <DEDENT> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not obj: <NEW_LINE> <INDENT> return response.write("Could not look up guid %s" % guid) <NEW_LINE> <DEDENT> path = obj.absolute_url_path() <NEW_LINE> return response.redirect(path) | Given a guid in the url request redirect to the correct page | 62598fd9ab23a570cc2d501f |
class ByteWriteable(Protocol): <NEW_LINE> <INDENT> def write(self, data: bytes) -> int: <NEW_LINE> <INDENT> pass | The type of object which must be passed into read_body_with_max_size.
Typically this is a file object. | 62598fd997e22403b383b46d |
class Breakpoints(BreakpointWidget, SpyderPluginMixin): <NEW_LINE> <INDENT> CONF_SECTION = 'breakpoints' <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> BreakpointWidget.__init__(self, parent=parent) <NEW_LINE> SpyderPluginMixin.__init__(self, parent) <NEW_LINE> self.initialize_plugin() <NEW_LINE> self.set_data() <NEW_LINE> <DEDENT> def get_plugin_title(self): <NEW_LINE> <INDENT> return _("Breakpoints") <NEW_LINE> <DEDENT> def get_plugin_icon(self): <NEW_LINE> <INDENT> path = osp.join(self.PLUGIN_PATH, self.IMG_PATH) <NEW_LINE> return ima.icon('profiler', icon_path=path) <NEW_LINE> <DEDENT> def get_focus_widget(self): <NEW_LINE> <INDENT> return self.dictwidget <NEW_LINE> <DEDENT> def get_plugin_actions(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def on_first_registration(self): <NEW_LINE> <INDENT> self.main.tabify_plugins(self.main.help, self) <NEW_LINE> <DEDENT> def register_plugin(self): <NEW_LINE> <INDENT> self.edit_goto.connect(self.main.editor.load) <NEW_LINE> self.clear_all_breakpoints.connect( self.main.editor.clear_all_breakpoints) <NEW_LINE> self.clear_breakpoint.connect(self.main.editor.clear_breakpoint) <NEW_LINE> self.main.editor.breakpoints_saved.connect(self.set_data) <NEW_LINE> self.set_or_edit_conditional_breakpoint.connect( self.main.editor.set_or_edit_conditional_breakpoint) <NEW_LINE> self.main.add_dockwidget(self) <NEW_LINE> list_action = create_action(self, _("List breakpoints"), triggered=self.show) <NEW_LINE> list_action.setEnabled(True) <NEW_LINE> pos = self.main.debug_menu_actions.index('list_breakpoints') <NEW_LINE> self.main.debug_menu_actions.insert(pos, list_action) <NEW_LINE> self.main.editor.pythonfile_dependent_actions += [list_action] <NEW_LINE> <DEDENT> def refresh_plugin(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def closing_plugin(self, cancelable=False): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def apply_plugin_settings(self, options): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> if self.dockwidget and not self.ismaximized: <NEW_LINE> <INDENT> self.dockwidget.setVisible(True) <NEW_LINE> self.dockwidget.setFocus() <NEW_LINE> self.dockwidget.raise_() | Breakpoint list | 62598fd9d8ef3951e32c810d |
class BorderChoiceView(ListAPIView, CreateAPIView): <NEW_LINE> <INDENT> serializer_class = ChoiceItemSeralizer <NEW_LINE> permissions = [permissions.IsAuthenticated] <NEW_LINE> queryset = BorderChoice.objects.all() | Список возможных вариантов границы | 62598fd98a349b6b436867a4 |
class QFUImage(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def make(self, header, infile): <NEW_LINE> <INDENT> infile.seek(0, os.SEEK_END) <NEW_LINE> size = infile.tell() <NEW_LINE> header.num_blocks = ((size-1) // header.block_size) + 2 <NEW_LINE> content = header.packed_qfu_header <NEW_LINE> infile.seek(0, os.SEEK_SET) <NEW_LINE> content += infile.read() <NEW_LINE> return content | Creates a QFU compatible file from a binary file. | 62598fd926238365f5fad0c8 |
class PyPascalTriangleIterators(PyPascalTriangleBase): <NEW_LINE> <INDENT> max_height = 900 <NEW_LINE> def build(self, height): <NEW_LINE> <INDENT> if height == 0: <NEW_LINE> <INDENT> self._print(self.ONE_LIST) <NEW_LINE> return self.ONE_LIST <NEW_LINE> <DEDENT> elif height < 0: <NEW_LINE> <INDENT> return self.EMPTY_LIST <NEW_LINE> <DEDENT> prev_line = self.build(height - 1) <NEW_LINE> iterator = itertools.chain(self.ZERO_LIST, prev_line) <NEW_LINE> ahead_iterator = itertools.chain(prev_line, self.ZERO_LIST) <NEW_LINE> line = [x + y for x, y in itertools.izip(iterator, ahead_iterator)] <NEW_LINE> self._print(line) <NEW_LINE> return line | Based on :py:class::`PascalTriangleConstantLists`.
Difference:
- Iterators are used instead of lists where it is possible | 62598fd9ad47b63b2c5a7db7 |
class IssueListCreateView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> serializer_class = IssueSerializer <NEW_LINE> permission_classes = [IsAuthenticated, IsIssue] <NEW_LINE> def get_queryset(self, *args, **kwargs): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> id_project = self.kwargs.get('id_project') <NEW_LINE> try: <NEW_LINE> <INDENT> return Issue.objects.filter(project__contributor=user).filter(project__id=id_project) <NEW_LINE> <DEDENT> except Issue.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> <DEDENT> def perform_create(self, serializer, *args, **kwargs): <NEW_LINE> <INDENT> id_project = self.kwargs.get('id_project') <NEW_LINE> try: <NEW_LINE> <INDENT> projects = Project.objects.get(pk=id_project) <NEW_LINE> <DEDENT> except Project.DoesNotExist: <NEW_LINE> <INDENT> raise Http404 <NEW_LINE> <DEDENT> serializer.save(author=self.request.user, project=projects, user_assigner=self.request.user) | get, create les issues du projet. Il y a que les contributeurs du projet qui y on accés | 62598fd9377c676e912f702d |
class ConfigFile(_messages.Message): <NEW_LINE> <INDENT> class FileTypeValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> FILE_TYPE_UNSPECIFIED = 0 <NEW_LINE> SERVICE_CONFIG_YAML = 1 <NEW_LINE> OPEN_API_JSON = 2 <NEW_LINE> OPEN_API_YAML = 3 <NEW_LINE> FILE_DESCRIPTOR_SET_PROTO = 4 <NEW_LINE> <DEDENT> contents = _messages.StringField(1) <NEW_LINE> fileContents = _messages.BytesField(2) <NEW_LINE> filePath = _messages.StringField(3) <NEW_LINE> fileType = _messages.EnumField('FileTypeValueValuesEnum', 4) | Generic specification of a source configuration file
Enums:
FileTypeValueValuesEnum: The kind of configuration file represented. This
is used to determine the method for generating `google.api.Service`
using this file.
Fields:
contents: DEPRECATED. The contents of the configuration file. Use
file_contents moving forward.
fileContents: The bytes that constitute the file.
filePath: The file name of the configuration file (full or relative path).
fileType: The kind of configuration file represented. This is used to
determine the method for generating `google.api.Service` using this
file. | 62598fd9ab23a570cc2d5020 |
class RefreshToken(models.Model): <NEW_LINE> <INDENT> access_token = models.OneToOneField(AccessToken, related_name='refresh_token', on_delete=models.CASCADE) <NEW_LINE> user = models.ForeignKey(oauth_pen_settings.AUTH_USER_MODEL, on_delete=models.CASCADE) <NEW_LINE> token = models.CharField(max_length=255, unique=True) <NEW_LINE> application = models.ForeignKey(oauth_pen_settings.APPLICATION_MODEL, on_delete=models.CASCADE) <NEW_LINE> def revoke(self): <NEW_LINE> <INDENT> AccessToken.objects.get(id=self.access_token.id).revoke() <NEW_LINE> self.delete() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.token | 刷新token | 62598fd9adb09d7d5dc0aade |
class ClientCommand(object): <NEW_LINE> <INDENT> CONNECT, SEND, RECEIVE, CLOSE = range(4) <NEW_LINE> def __init__(self, type_, data=None): <NEW_LINE> <INDENT> self.type_of_data = type_ <NEW_LINE> self.data = data <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'Type: {self.type_of_data}\nData: {self.data}' | A command to the client thread.
Each command type has its associated data:
CONNECT: (host, port) tuple
SEND: Data string
RECEIVE: None
CLOSE: None | 62598fd9fbf16365ca794624 |
class MzXML(xml.ArrayConversionMixin, xml.IndexedXML): <NEW_LINE> <INDENT> _root_element = 'mzXML' <NEW_LINE> _default_iter_tag = 'scan' <NEW_LINE> _indexed_tags = {'scan'} <NEW_LINE> _indexed_tag_keys = {'scan': 'num'} <NEW_LINE> _default_version = None <NEW_LINE> _default_schema = xml._mzxml_schema_defaults <NEW_LINE> def _get_info_smart(self, element, **kw): <NEW_LINE> <INDENT> name = xml._local_name(element) <NEW_LINE> kwargs = dict(kw) <NEW_LINE> rec = kwargs.pop('recursive', None) <NEW_LINE> if name in {'mzXML'}: <NEW_LINE> <INDENT> info = self._get_info(element, recursive=( rec if rec is not None else False), **kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> info = self._get_info(element, recursive=(rec if rec is not None else True), **kwargs) <NEW_LINE> <DEDENT> if 'num' in info and isinstance(info, dict): <NEW_LINE> <INDENT> info['id'] = info['num'] <NEW_LINE> <DEDENT> if 'peaks' in info and isinstance(info, dict): <NEW_LINE> <INDENT> if not isinstance(info['peaks'], (dict, list)): <NEW_LINE> <INDENT> peak_data = _decode_peaks(info, info.pop('peaks')) <NEW_LINE> for k in self._array_keys: <NEW_LINE> <INDENT> info[k] = self._convert_array(k, peak_data[k]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> peak_data = info.pop('peaks')[0] <NEW_LINE> for k in self._array_keys: <NEW_LINE> <INDENT> info[k] = self._convert_array(k, peak_data.get(k, np.array([]))) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return info <NEW_LINE> <DEDENT> def iterfind(self, path, **kwargs): <NEW_LINE> <INDENT> if path == 'scan': <NEW_LINE> <INDENT> generator = super(MzXML, self).iterfind(path, **kwargs) <NEW_LINE> for item in IteratorQueue(generator): <NEW_LINE> <INDENT> yield item <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for item in super(MzXML, self).iterfind(path, **kwargs): <NEW_LINE> <INDENT> yield item | Parser class for mzXML files. | 62598fd950812a4eaa620e95 |
class DebPlugin(Plugin): <NEW_LINE> <INDENT> def get_cruft(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def post_cleanup(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.app.apt_cache.commit(apt.progress.text.AcquireProgress(), apt.progress.base.InstallProgress()) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self.app.refresh_apt_cache() | Plugin for post-cleanup processing with apt.
This plugin does not find any cruft of its own. Instead it centralizes
the post-cleanup handling for all packages that remove .deb packages. | 62598fd97cff6e4e811b5f90 |
class ConstantDefinitionTest(test_lib.BaseTestCase): <NEW_LINE> <INDENT> def testInitialize(self): <NEW_LINE> <INDENT> data_type_definition = data_types.ConstantDefinition( 'const', description='contant') <NEW_LINE> self.assertIsNotNone(data_type_definition) | Constant data type definition tests. | 62598fd9377c676e912f702e |
class _LoggingProjectsSinksRepository( repository_mixins.ListQueryMixin, _base_repository.GCPRepository): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(_LoggingProjectsSinksRepository, self).__init__( key_field='parent', max_results_field='pageSize', component='projects.sinks', **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_name(project_id): <NEW_LINE> <INDENT> if project_id and not project_id.startswith('projects/'): <NEW_LINE> <INDENT> project_id = 'projects/{}'.format(project_id) <NEW_LINE> <DEDENT> return project_id | Implementation of Logging Projects Sinks repository. | 62598fd997e22403b383b471 |
class Embeddings(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config): <NEW_LINE> <INDENT> super(Embeddings, self).__init__() <NEW_LINE> self.word_embeddings = Embedding(config.vocab_size, config.hidden_size, padding_idx=0) <NEW_LINE> self.position_embeddings = Embedding(config.max_position_embeddings, config.hidden_size) <NEW_LINE> self.token_type_embeddings = Embedding(config.type_vocab_size, config.hidden_size) <NEW_LINE> self.LayerNorm = LayerNorm(config.hidden_size, eps=1e-12) <NEW_LINE> self.dropout = Dropout(config.dropout_prob) <NEW_LINE> self.init_weights(config) <NEW_LINE> <DEDENT> def init_weights(self, config): <NEW_LINE> <INDENT> self.word_embeddings.weight.data.normal_(mean=0.0, std=config.initializer_range) <NEW_LINE> self.position_embeddings.weight.data.normal_(mean=0.0, std=config.initializer_range) <NEW_LINE> self.token_type_embeddings.weight.data.normal_(mean=0.0, std=config.initializer_range) <NEW_LINE> <DEDENT> def forward(self, input_ids, token_type_ids=None): <NEW_LINE> <INDENT> seq_length = input_ids.size(1) <NEW_LINE> position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device) <NEW_LINE> position_ids = position_ids.unsqueeze(0).expand_as(input_ids) <NEW_LINE> words_embeddings = self.word_embeddings(input_ids) <NEW_LINE> position_embeddings = self.position_embeddings(position_ids) <NEW_LINE> token_type_embeddings = self.token_type_embeddings(token_type_ids) <NEW_LINE> embeddings = words_embeddings + position_embeddings + token_type_embeddings <NEW_LINE> embeddings = self.LayerNorm(embeddings) <NEW_LINE> embeddings = self.dropout(embeddings) <NEW_LINE> return embeddings | Construct the embeddings from word, position and token_type embeddings.
| 62598fd98a349b6b436867a8 |
class CreateGoodsView(CreateView): <NEW_LINE> <INDENT> model = Product <NEW_LINE> template_name = 'my_shop/create.html' <NEW_LINE> form_class = ProdForm <NEW_LINE> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save(commit = False) <NEW_LINE> self.object.author = self.request.user <NEW_LINE> self.object.save() <NEW_LINE> return super().form_valid(form) <NEW_LINE> <DEDENT> def get_success_url(self): <NEW_LINE> <INDENT> return reverse_lazy('post_detail', kwargs={'pk': self.object.id}) | создание товара | 62598fd9956e5f7376df5931 |
class Env: <NEW_LINE> <INDENT> metadata = {'render.modes': ['rgb_array', 'human']} <NEW_LINE> def __init__(self, seed=None, itype=numpy.int64): <NEW_LINE> <INDENT> self.rng = numpy.random.RandomState(seed) <NEW_LINE> self.start = numpy.array([0, 3], dtype=itype) <NEW_LINE> self.wind = numpy.array([0, 0, 0, 1, 1, 1, 2, 2, 1, 0], dtype=itype) <NEW_LINE> self.goal = numpy.array([0, 7], dtype=itype) <NEW_LINE> self.bound = numpy.array([6, 9], dtype=itype) <NEW_LINE> self.position = self.start[:] <NEW_LINE> <DEDENT> def is_out(self): <NEW_LINE> <INDENT> x, y = self.position <NEW_LINE> return x < 0 or self.bound[0] < x or y < 0 or self.bound[1] < y <NEW_LINE> <DEDENT> def step(self, action): <NEW_LINE> <INDENT> if self.is_out(): <NEW_LINE> <INDENT> return self.position[:], Reward.not_finish, True, dict() <NEW_LINE> <DEDENT> self.position += self.wind[self.position[0]] <NEW_LINE> if action == Action.left: <NEW_LINE> <INDENT> self.position[0] -= 1 <NEW_LINE> <DEDENT> elif action == Action.right: <NEW_LINE> <INDENT> self.position[0] += 1 <NEW_LINE> <DEDENT> elif action == Action.up: <NEW_LINE> <INDENT> self.position[1] += 1 <NEW_LINE> <DEDENT> elif action == Action.down: <NEW_LINE> <INDENT> self.position[1] -= 1 <NEW_LINE> <DEDENT> if all(self.position == self.goal): <NEW_LINE> <INDENT> reward = Reward.finish <NEW_LINE> done = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reward = Reward.not_finish <NEW_LINE> done = self.is_out() <NEW_LINE> <DEDENT> observation = self.position[:] <NEW_LINE> info = dict() <NEW_LINE> return observation, reward, done, info <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.position = self.start <NEW_LINE> return self.position[:] <NEW_LINE> <DEDENT> def seed(self, seed=None): <NEW_LINE> <INDENT> self.rng.seed(seed) | Example 6.5: Windy Gridworld Env | 62598fd9091ae35668705184 |
class SensorsTest(Tester): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Tester.__init__(self, "Sensors") <NEW_LINE> self.sensors = Sensors() <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> self.rate.sleep() | Behavioral tests for ObstacleDectection. | 62598fd9ab23a570cc2d5022 |
class WFDB_CError(WFDB_Error): <NEW_LINE> <INDENT> def __init__(self, message, return_code=None): <NEW_LINE> <INDENT> super(WFDB_CError, self).__init__(message) <NEW_LINE> self.return_code = return_code | Exceptions that are raised as a result of catching error return codes
from the C WFDB Library
Notes:
The return code of this error should match that of the error code
returned by the underlying C function | 62598fd997e22403b383b473 |
class MultiChoiceQuestion(models.Model): <NEW_LINE> <INDENT> questionnaire = models.ForeignKey(Questionnaire, blank=False) <NEW_LINE> question_order = models.PositiveIntegerField(blank=False) <NEW_LINE> question_text = models.CharField(max_length=255, blank=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.question_text <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ('questionnaire', 'question_order',) | Defines a multiple choice type question on the questionnaire
| 62598fd9656771135c489bdc |
class Collection(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Collection, self).__init__() <NEW_LINE> self.__values = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def _values(self): <NEW_LINE> <INDENT> return self.__values <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return (item in self.__values) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.__values.__getitem__(key) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self.__values.__iter__() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.__values) <NEW_LINE> <DEDENT> def index(self, item): <NEW_LINE> <INDENT> return self.__values.index(item) | Base class for collection classes. May also be used for part collections
that don't yet have any custom methods.
Has the following characteristics.:
* Container (implements __contains__)
* Iterable (delegates __iter__ to |list|)
* Sized (implements __len__)
* Sequence (delegates __getitem__ to |list|) | 62598fd9091ae35668705186 |
class DDPG4KeyWords(DDPG): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> super(DDPG4KeyWords, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def _build_a_net(self,s,scope,trainable): <NEW_LINE> <INDENT> w_initializer, b_initializer = None,None <NEW_LINE> with tf.variable_scope(scope): <NEW_LINE> <INDENT> e1 = tf.layers.dense(inputs=s, units=512, bias_initializer = b_initializer, kernel_initializer=w_initializer, activation = tf.nn.relu, trainable=trainable) <NEW_LINE> e2 = tf.layers.dense(inputs=e1, units=400, bias_initializer = b_initializer, kernel_initializer=w_initializer, activation = tf.nn.relu, trainable=trainable) <NEW_LINE> a = tf.layers.dense(inputs=e2, units=self.n_actions, bias_initializer = b_initializer, kernel_initializer=w_initializer, activation = tf.nn.tanh, trainable=trainable) <NEW_LINE> <DEDENT> return tf.multiply(a, self.a_bound, name='scaled_a') <NEW_LINE> <DEDENT> def _build_c_net(self, s, a, scope, trainable): <NEW_LINE> <INDENT> with tf.variable_scope(scope): <NEW_LINE> <INDENT> n_l1 = 512 <NEW_LINE> w1_s = tf.get_variable('w1_s', [self.n_features, n_l1], trainable=trainable) <NEW_LINE> w1_a = tf.get_variable('w1_a', [self.n_actions, n_l1], trainable=trainable) <NEW_LINE> b1 = tf.get_variable('b1', [1, n_l1], trainable=trainable) <NEW_LINE> net = tf.nn.relu(tf.matmul(s, w1_s) + tf.matmul(a, w1_a) + b1) <NEW_LINE> return tf.layers.dense(net, 1, trainable=trainable) | docstring for ClassName | 62598fd9adb09d7d5dc0aae4 |
class ObjectParser: <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.objects = {} <NEW_LINE> for key,value in data.items(): <NEW_LINE> <INDENT> self.objects[key] = Object(value) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "objectParser:\n{0}".format( "\n%%\n".join([name + ":" + str(object) for name,object in self.objects.items() ] ) ) <NEW_LINE> <DEDENT> def getData(self, request): <NEW_LINE> <INDENT> object = {}; <NEW_LINE> if request in self.objects: <NEW_LINE> <INDENT> return json.dumps(self.objects[request].json()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IOError | A object parser | 62598fd9dc8b845886d53b2a |
class VMs(i3pystatus.IntervalModule): <NEW_LINE> <INDENT> color_up = "#00F000" <NEW_LINE> color_down = "#333333" <NEW_LINE> interval = 120 <NEW_LINE> settings = ( ("color_up", "Color when VMs are running"), ("color_down", "Color when VMs are stopped") ) <NEW_LINE> def run(self): <NEW_LINE> <INDENT> response = {'full_text': '', 'name': 'vms'} <NEW_LINE> num_vms = self.get_virtualbox_vms() + self.get_libvirt_vms() <NEW_LINE> if num_vms > 0: <NEW_LINE> <INDENT> response['color'] = self.color_up <NEW_LINE> response['full_text'] = "VMs: %d" % num_vms <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response['color'] = self.color_down <NEW_LINE> response['full_text'] = "VMs" <NEW_LINE> <DEDENT> self.output = response <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_virtualbox_vms(): <NEW_LINE> <INDENT> num_vms = subprocess.check_output(['vboxmanage', 'list', 'runningvms']) <NEW_LINE> num_vms = len(num_vms.splitlines()) <NEW_LINE> return num_vms <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_libvirt_vms(): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> num_vms = subprocess.check_output( ['virsh', '-q', '-c', 'qemu:///system', 'list'], stderr=subprocess.STDOUT ) <NEW_LINE> num_vms = len(num_vms.splitlines()) <NEW_LINE> <DEDENT> except subprocess.CalledProcessError as e: <NEW_LINE> <INDENT> num_vms = 0 <NEW_LINE> <DEDENT> return num_vms | Virtualbox VMs count. | 62598fd90fa83653e46f5455 |
class ZenpyCache(object): <NEW_LINE> <INDENT> AVAILABLE_CACHES = [ c for c in dir(cachetools) if c.endswith('Cache') and c != 'Cache' ] <NEW_LINE> def __init__(self, cache_impl, maxsize, **kwargs): <NEW_LINE> <INDENT> self.cache = self._get_cache_impl(cache_impl, maxsize, **kwargs) <NEW_LINE> self.purge_lock = RLock() <NEW_LINE> <DEDENT> def set_cache_impl(self, cache_impl, maxsize, **kwargs): <NEW_LINE> <INDENT> new_cache = self._get_cache_impl(cache_impl, maxsize, **kwargs) <NEW_LINE> self._populate_new_cache(new_cache) <NEW_LINE> self.cache = new_cache <NEW_LINE> <DEDENT> def pop(self, key, default=None): <NEW_LINE> <INDENT> return self.cache.pop(key, default) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return self.cache.items() <NEW_LINE> <DEDENT> @property <NEW_LINE> def impl_name(self): <NEW_LINE> <INDENT> return self.cache.__class__.__name__ <NEW_LINE> <DEDENT> @property <NEW_LINE> def maxsize(self): <NEW_LINE> <INDENT> return self.cache.maxsize <NEW_LINE> <DEDENT> def set_maxsize(self, maxsize, **kwargs): <NEW_LINE> <INDENT> new_cache = self._get_cache_impl(self.impl_name, maxsize, **kwargs) <NEW_LINE> self._populate_new_cache(new_cache) <NEW_LINE> self.cache = new_cache <NEW_LINE> <DEDENT> def purge(self): <NEW_LINE> <INDENT> with self.purge_lock: <NEW_LINE> <INDENT> self.cache.clear() <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def currsize(self): <NEW_LINE> <INDENT> return len(self.cache) <NEW_LINE> <DEDENT> def _populate_new_cache(self, new_cache): <NEW_LINE> <INDENT> for key, value in self.cache.items(): <NEW_LINE> <INDENT> new_cache[key] = value <NEW_LINE> <DEDENT> <DEDENT> def _get_cache_impl(self, cache_impl, maxsize, **kwargs): <NEW_LINE> <INDENT> if cache_impl not in self.AVAILABLE_CACHES: <NEW_LINE> <INDENT> raise ZenpyCacheException( "No such cache: %s, available caches: %s" % (cache_impl, str(self.AVAILABLE_CACHES))) <NEW_LINE> <DEDENT> return getattr(cachetools, cache_impl)(maxsize, **kwargs) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self.cache.__iter__() <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.cache[item] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if not issubclass(type(value), BaseObject): <NEW_LINE> <INDENT> raise ZenpyCacheException( "{} is not a subclass of BaseObject!".format(type(value))) <NEW_LINE> <DEDENT> self.cache[key] = value <NEW_LINE> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self.cache[key] <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self.cache <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.cache) | Wrapper class for the various cachetools caches. Adds ability to change cache implementations
on the fly and change the maxsize setting. | 62598fd997e22403b383b477 |
class OnlyIfParser(DecoratorBeforeEvalBaseExtensionParser): <NEW_LINE> <INDENT> __parsername__ = 'only_if' <NEW_LINE> @classmethod <NEW_LINE> def parse_element(cls, indent_stack): <NEW_LINE> <INDENT> return (Keyword("@only_if").suppress() + originalTextFor(nestedExpr()) ).setResultsName("only_if") .setParseAction(lambda toks: toks[0]) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def create_element(cls, rule, field_def, content, namespace): <NEW_LINE> <INDENT> return compile(content, '', 'eval') <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def evaluate(cls, reader, args): <NEW_LINE> <INDENT> from invenio.modules.jsonalchemy.registry import functions <NEW_LINE> evaluated = try_to_eval(args, functions(reader._json.additional_info.namespace), self=reader._json) <NEW_LINE> if not isinstance(evaluated, (list, tuple)): <NEW_LINE> <INDENT> return evaluated <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return all(evaluated) | Handles the @only_if decorator::
number_of_copies:
creator:
@only_if('BOOK' in self.get('collection.primary', []))
get_number_of_copies(self.get('recid')) | 62598fd93617ad0b5ee066b4 |
class Annotations(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'additional_properties': {'key': '', 'type': '{object}'}, 'created': {'key': 'org\\.opencontainers\\.image\\.created', 'type': 'iso-8601'}, 'authors': {'key': 'org\\.opencontainers\\.image\\.authors', 'type': 'str'}, 'url': {'key': 'org\\.opencontainers\\.image\\.url', 'type': 'str'}, 'documentation': {'key': 'org\\.opencontainers\\.image\\.documentation', 'type': 'str'}, 'source': {'key': 'org\\.opencontainers\\.image\\.source', 'type': 'str'}, 'version': {'key': 'org\\.opencontainers\\.image\\.version', 'type': 'str'}, 'revision': {'key': 'org\\.opencontainers\\.image\\.revision', 'type': 'str'}, 'vendor': {'key': 'org\\.opencontainers\\.image\\.vendor', 'type': 'str'}, 'licenses': {'key': 'org\\.opencontainers\\.image\\.licenses', 'type': 'str'}, 'name': {'key': 'org\\.opencontainers\\.image\\.ref\\.name', 'type': 'str'}, 'title': {'key': 'org\\.opencontainers\\.image\\.title', 'type': 'str'}, 'description': {'key': 'org\\.opencontainers\\.image\\.description', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, additional_properties: Optional[Dict[str, Any]] = None, created: Optional[datetime.datetime] = None, authors: Optional[str] = None, url: Optional[str] = None, documentation: Optional[str] = None, source: Optional[str] = None, version: Optional[str] = None, revision: Optional[str] = None, vendor: Optional[str] = None, licenses: Optional[str] = None, name: Optional[str] = None, title: Optional[str] = None, description: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(Annotations, self).__init__(**kwargs) <NEW_LINE> self.additional_properties = additional_properties <NEW_LINE> self.created = created <NEW_LINE> self.authors = authors <NEW_LINE> self.url = url <NEW_LINE> self.documentation = documentation <NEW_LINE> self.source = source <NEW_LINE> self.version = version <NEW_LINE> self.revision = revision <NEW_LINE> self.vendor = vendor <NEW_LINE> self.licenses = licenses <NEW_LINE> self.name = name <NEW_LINE> self.title = title <NEW_LINE> self.description = description | Additional information provided through arbitrary metadata.
:ivar additional_properties: Unmatched properties from the message are deserialized to this
collection.
:vartype additional_properties: dict[str, any]
:ivar created: Date and time on which the image was built (string, date-time as defined by
https://tools.ietf.org/html/rfc3339#section-5.6).
:vartype created: ~datetime.datetime
:ivar authors: Contact details of the people or organization responsible for the image.
:vartype authors: str
:ivar url: URL to find more information on the image.
:vartype url: str
:ivar documentation: URL to get documentation on the image.
:vartype documentation: str
:ivar source: URL to get source code for building the image.
:vartype source: str
:ivar version: Version of the packaged software. The version MAY match a label or tag in the
source code repository, may also be Semantic versioning-compatible.
:vartype version: str
:ivar revision: Source control revision identifier for the packaged software.
:vartype revision: str
:ivar vendor: Name of the distributing entity, organization or individual.
:vartype vendor: str
:ivar licenses: License(s) under which contained software is distributed as an SPDX License
Expression.
:vartype licenses: str
:ivar name: Name of the reference for a target.
:vartype name: str
:ivar title: Human-readable title of the image.
:vartype title: str
:ivar description: Human-readable description of the software packaged in the image.
:vartype description: str | 62598fd950812a4eaa620e99 |
class Department(models.Model): <NEW_LINE> <INDENT> title = models.CharField(verbose_name='部门名称', max_length=16) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.title | 部门表 | 62598fd9377c676e912f7032 |
class SoupProcessorBase(abc.ABC): <NEW_LINE> <INDENT> @abc.abstractclassmethod <NEW_LINE> def process(self, soup: bs4.BeautifulSoup) -> str: <NEW_LINE> <INDENT> raise NotImplementedError | Class for processing a parsed HTML page | 62598fd9ab23a570cc2d5025 |
@autohelp <NEW_LINE> class bytesIterator(Object): <NEW_LINE> <INDENT> _immutable_fields_ = "s", <NEW_LINE> _index = 0 <NEW_LINE> def __init__(self, s): <NEW_LINE> <INDENT> self.s = s <NEW_LINE> <DEDENT> @method("List", "Any") <NEW_LINE> def next(self, ej): <NEW_LINE> <INDENT> if self._index < len(self.s): <NEW_LINE> <INDENT> rv = [IntObject(self._index), IntObject(ord(self.s[self._index]))] <NEW_LINE> self._index += 1 <NEW_LINE> return rv <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> from typhon.objects.ejectors import throwStr <NEW_LINE> throwStr(ej, u"next/1: Iterator exhausted") | An iterator on a bytestring, producing integers. | 62598fd99f28863672818b35 |
class MyGlanceStubClient(glance_stubs.StubGlanceClient): <NEW_LINE> <INDENT> def get(self, image_id): <NEW_LINE> <INDENT> if tries[0] == 0: <NEW_LINE> <INDENT> tries[0] = 1 <NEW_LINE> raise glanceclient.exc.ServiceUnavailable('') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return {} | A client that fails the first time, then succeeds. | 62598fd9283ffb24f3cf3df2 |
class CNN(nn.Module): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CNN, self).__init__() <NEW_LINE> self.num_classes = NUM_CLASSES <NEW_LINE> self.features = nn.Sequential( nn.Conv2d(1, 10, kernel_size=5), nn.MaxPool2d(kernel_size=2, stride=2), nn.ReLU(inplace=True), nn.Conv2d(10, 20, kernel_size=5), nn.MaxPool2d(kernel_size=2, stride=2), nn.ReLU(inplace=True), Flatten(), nn.Linear(320, 50), nn.ReLU(inplace=True), ) <NEW_LINE> self.bottleneck =nn.Sequential( nn.Linear(50, 2) ) <NEW_LINE> self.classifier = nn.Sequential( nn.Linear(2, 10) ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> x = self.bottleneck(x) <NEW_LINE> return x <NEW_LINE> <DEDENT> def extract_features(self, x): <NEW_LINE> <INDENT> x = self.features(x) <NEW_LINE> return x | CNN model | 62598fd97cff6e4e811b5f9a |
class InvoicingRequest(Request): <NEW_LINE> <INDENT> def __init__(self, op): <NEW_LINE> <INDENT> Request.__init__(self,op) | A class representing AGMS Invoicing Request objects. | 62598fd950812a4eaa620e9b |
class KALiteTestCase(SecuresyncTestCase): <NEW_LINE> <INDENT> def reverse(self, url_name, args=None, kwargs=None): <NEW_LINE> <INDENT> return self.live_server_url + reverse(url_name, args=args, kwargs=kwargs) | The base class for KA Lite test cases. | 62598fd99f28863672818b36 |
class UserException(Exception): <NEW_LINE> <INDENT> def __init__(self, err_message): <NEW_LINE> <INDENT> self.message = err_message | User defined exception to raise specific error scenarios. | 62598fd9283ffb24f3cf3df4 |
class ParallelNodeTerminated(Record): <NEW_LINE> <INDENT> def __init__(self, year, lines): <NEW_LINE> <INDENT> Record.__init__(self, year, lines) | Parallel node terminated
A parallel universe program has completed on a node.
Parameters
----------
year: `str`
the year to tag the job with
lines: list
the strings making up this record | 62598fd90fa83653e46f545b |
class RegisterMixIn(IdChangeKeyMixIn): <NEW_LINE> <INDENT> __slots__ = ('__dict__',) <NEW_LINE> INSERT_AFTER_FIELD = None <NEW_LINE> @classmethod <NEW_LINE> def register(cls, attr_name, attr_cls): <NEW_LINE> <INDENT> if not cls.INSERT_AFTER_FIELD: <NEW_LINE> <INDENT> raise ValueError('Class %s is missing INSERT_AFTER_FIELD value' % cls) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> cls.get_field_by_fieldname(attr_name) <NEW_LINE> <DEDENT> except InvalidField: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("'%s' is already registered" % attr_name) <NEW_LINE> <DEDENT> if not issubclass(attr_cls, ExtendedProperty): <NEW_LINE> <INDENT> raise ValueError("%r must be a subclass of ExtendedProperty" % attr_cls) <NEW_LINE> <DEDENT> attr_cls.validate_cls() <NEW_LINE> field = ExtendedPropertyField(attr_name, value_cls=attr_cls) <NEW_LINE> cls.add_field(field, insert_after=cls.INSERT_AFTER_FIELD) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def deregister(cls, attr_name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> field = cls.get_field_by_fieldname(attr_name) <NEW_LINE> <DEDENT> except InvalidField: <NEW_LINE> <INDENT> raise ValueError("'%s' is not registered" % attr_name) <NEW_LINE> <DEDENT> if not isinstance(field, ExtendedPropertyField): <NEW_LINE> <INDENT> raise ValueError("'%s' is not registered as an ExtendedProperty" % attr_name) <NEW_LINE> <DEDENT> cls.remove_field(field) | Base class for classes that can change their list of supported fields dynamically | 62598fd9ad47b63b2c5a7dc5 |
class Player : <NEW_LINE> <INDENT> def __init__(self,name='',MAX_ITEMS=0,items=[]): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.MAX_ITEMS = MAX_ITEMS <NEW_LINE> self.items = items <NEW_LINE> <DEDENT> def inventry(self): <NEW_LINE> <INDENT> if len(self.items) > 0 : <NEW_LINE> <INDENT> for item in self.items : <NEW_LINE> <INDENT> print(item) <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> print('There are no items in the items list') <NEW_LINE> <DEDENT> <DEDENT> def take(self,new_item): <NEW_LINE> <INDENT> if len(self.items) < self.MAX_ITEMS : <NEW_LINE> <INDENT> self.items.append(new_item) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> print('The items for the player is full, cannot add anymore items') <NEW_LINE> <DEDENT> <DEDENT> def drop(self,item): <NEW_LINE> <INDENT> if len(self.items) >0 : <NEW_LINE> <INDENT> if item in self.items : <NEW_LINE> <INDENT> self.items.remove(item) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> print('Player doesnt have the item : {}'.format(item)) <NEW_LINE> <DEDENT> <DEDENT> else : <NEW_LINE> <INDENT> print('Players dosnt have the item : {}'.format(item)) | Player class | 62598fd99f28863672818b37 |
class Poisson1D(ProblemBase): <NEW_LINE> <INDENT> def __init__(self, ndofs, *args, **kwargs): <NEW_LINE> <INDENT> self.dx = 1.0 / (ndofs + 1) <NEW_LINE> A = 1.0 / (self.dx ** 2) * self.__get_system_matrix(ndofs) <NEW_LINE> rhs = self.__get_rhs(ndofs) <NEW_LINE> super(Poisson1D, self).__init__(ndofs, A, rhs, *args, **kwargs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __get_system_matrix(ndofs): <NEW_LINE> <INDENT> data = np.array([[2] * ndofs, [-1] * ndofs, [-1] * ndofs]) <NEW_LINE> diags = np.array([0, -1, 1]) <NEW_LINE> return sp.spdiags(data, diags, ndofs, ndofs, format='csc') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def __get_rhs(ndofs): <NEW_LINE> <INDENT> return np.zeros(ndofs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def u_exact(self): <NEW_LINE> <INDENT> return np.zeros(self.ndofs) <NEW_LINE> <DEDENT> @property <NEW_LINE> def domain(self): <NEW_LINE> <INDENT> return np.array([(i + 1) * self.dx for i in range(self.ndofs)]) <NEW_LINE> <DEDENT> @ProblemBase.ndofs.setter <NEW_LINE> def ndofs(self, val): <NEW_LINE> <INDENT> ProblemBase.ndofs.fset(self, val) <NEW_LINE> self.dx = 1.0 / (val + 1) <NEW_LINE> self.A = 1.0 / (self.dx ** 2) * self.__get_system_matrix(val) <NEW_LINE> self.rhs = self.__get_rhs(self._ndofs) | Implementation of the 1D Poission problem.
Here we define the 1D Poisson problem :math:`-\Delta u = 0` with
Dirichlet-Zero boundary conditions. This is the homogeneous problem,
derive from this class if you want to play around with different RHS.
Attributes:
dx (float): mesh size | 62598fd9a219f33f346c6d7a |
class Dice(): <NEW_LINE> <INDENT> def __init__(self,side=6): <NEW_LINE> <INDENT> self.side = side <NEW_LINE> <DEDENT> def roll_dice(self,roll_times): <NEW_LINE> <INDENT> for roll_time in range(roll_times): <NEW_LINE> <INDENT> print(randint(1,self.side)) | 定义一个几面的骰子 | 62598fd93617ad0b5ee066bc |
@OFPMultipartReply.register_stats_type() <NEW_LINE> @_set_stats_type(ofproto.OFPMP_QUEUE, OFPQueueStats) <NEW_LINE> @_set_msg_type(ofproto.OFPT_MULTIPART_REPLY) <NEW_LINE> class OFPQueueStatsReply(OFPMultipartReply): <NEW_LINE> <INDENT> def __init__(self, datapath, type_=None, **kwargs): <NEW_LINE> <INDENT> super(OFPQueueStatsReply, self).__init__(datapath, **kwargs) | Queue statistics reply message
The switch responds with this message to an aggregate flow statistics
request.
================ ======================================================
Attribute Description
================ ======================================================
body List of ``OFPQueueStats`` instance
================ ======================================================
Example::
@set_ev_cls(ofp_event.EventOFPQueueStatsReply, MAIN_DISPATCHER)
def queue_stats_reply_handler(self, ev):
queues = []
for stat in ev.msg.body:
queues.append('port_no=%d queue_id=%d '
'tx_bytes=%d tx_packets=%d tx_errors=%d '
'duration_sec=%d duration_nsec=%d' %
(stat.port_no, stat.queue_id,
stat.tx_bytes, stat.tx_packets, stat.tx_errors,
stat.duration_sec, stat.duration_nsec))
self.logger.debug('QueueStats: %s', queues) | 62598fd9c4546d3d9def753d |
class DaemonPanel(Panel, threading.Thread): <NEW_LINE> <INDENT> def __init__(self, update_rate): <NEW_LINE> <INDENT> Panel.__init__(self) <NEW_LINE> threading.Thread.__init__(self) <NEW_LINE> self.setDaemon(True) <NEW_LINE> self._halt = False <NEW_LINE> self._update_rate = update_rate <NEW_LINE> <DEDENT> def _update(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> last_ran = None <NEW_LINE> while not self._halt: <NEW_LINE> <INDENT> if last_ran and time.time() - last_ran < self._update_rate: <NEW_LINE> <INDENT> sleep_until = last_ran + self._update_rate + 0.1 <NEW_LINE> while not self._halt and time.time() < sleep_until: <NEW_LINE> <INDENT> time.sleep(PAUSE_TIME) <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> self._update() <NEW_LINE> last_ran = time.time() <NEW_LINE> <DEDENT> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self._halt = True | Panel that triggers its _update() method at a set rate. | 62598fd98a349b6b436867b6 |
class BankRegisterViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = BankRegister.objects.all() <NEW_LINE> serializer_class = BankRegisterSerializer | API endpoint that allows BankRegisters to be viewed and edited | 62598fd9283ffb24f3cf3df8 |
class MonoLayerCrI3: <NEW_LINE> <INDENT> symbols = ('Cr', 'I') <NEW_LINE> numbers = (2, 6) <NEW_LINE> def __init__(self, a, disp=0.23, vac=20.0): <NEW_LINE> <INDENT> self.a = a <NEW_LINE> self.disp = disp <NEW_LINE> self.vac = vac <NEW_LINE> self.cell = np.array([ [ a, 0, 0], [-a/2, a/2*np.sqrt(3), 0], [ 0, 0, vac*2], ]) <NEW_LINE> rz = disp * a / vac / 2 <NEW_LINE> self.direct = np.array([ [ 0, 0, 1/2], [ 1/3, 2/3, 1/2], [ 1/3, 0, 1/2-rz], [ 0, 1/3, 1/2-rz], [ 1/3, 1/3, 1/2+rz], [ 2/3, 0, 1/2+rz], [ 0, 2/3, 1/2+rz], [ 2/3, 2/3, 1/2-rz], ]) <NEW_LINE> self.cartesian = np.dot(self.direct, self.cell) | a monolayer CrI3 structure,
INPUTS:
a: float, lattice constant a, in angstrum
disp: float, iodine displacement along z-axis, scaled by a,
vac: float, vacuum along z-axis, in angstrum
ATTRIBUTES:
a: the same as input
disp: the same as input
vac: the same as input
symbols: str tuple, symbol of ions, order matters
numbers: int tuple, number of ions, order matters
cell: numpy array, basis vectors, in angstrum
direct: numpy array, atom positions, in lattice coordinate
cartesian: numpy array, atom positions, in cartesian coordinate
INTERFACES:
symbols
numbers
cell
direct
cartesian | 62598fd9956e5f7376df5938 |
class Rfc2307Config(LdapConfig): <NEW_LINE> <INDENT> pass | An RFC2307 user database configuration | 62598fd9099cdd3c6367569a |
class OsfAuthHandler(auth.BaseAuthHandler): <NEW_LINE> <INDENT> @asyncio.coroutine <NEW_LINE> def fetch(self, request, bundle): <NEW_LINE> <INDENT> headers = { 'Content-Type': 'application/json', } <NEW_LINE> authorization = request.headers.get('Authorization') <NEW_LINE> if authorization and authorization.startswith('Bearer '): <NEW_LINE> <INDENT> headers['Authorization'] = authorization <NEW_LINE> <DEDENT> elif 'token' in bundle: <NEW_LINE> <INDENT> headers['Authorization'] = 'Bearer ' + bundle['token'] <NEW_LINE> <DEDENT> response = yield from aiohttp.request( 'get', settings.API_URL, params=bundle, headers=headers ) <NEW_LINE> if response.status != 200: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> data = yield from response.json() <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> data = yield from response.read() <NEW_LINE> <DEDENT> raise exceptions.AuthError(data, code=response.status) <NEW_LINE> <DEDENT> return (yield from response.json()) | Identity lookup via the Open Science Framework | 62598fd9a219f33f346c6d7e |
@dataclasses.dataclass <NEW_LINE> class DistrInfo: <NEW_LINE> <INDENT> xs: 'observation values' <NEW_LINE> J_dblocks: 'prior precision diagonal blocks' <NEW_LINE> J_offblocks: 'prior precision offdiagonal blocks' <NEW_LINE> JT_dblocks: 'posterior precision diagonal blocks' <NEW_LINE> JT_offblocks: 'posterior precision off-diagonal blocks' <NEW_LINE> JT_offset: 'posterior offset' <NEW_LINE> Sig: 'Sig' <NEW_LINE> Sigix: 'Sig multiplied by x' <NEW_LINE> def insample_posterior(self): <NEW_LINE> <INDENT> JT_decomp=cr.decompose(self.JT_dblocks,self.JT_offblocks) <NEW_LINE> posterior_mean=cr.solve(JT_decomp,self.JT_offset) <NEW_LINE> cov_dblocks,cov_offblocks=cr.inverse_blocks(JT_decomp) <NEW_LINE> return posterior_mean,cov_dblocks,cov_offblocks <NEW_LINE> <DEDENT> def log_likelihood(self): <NEW_LINE> <INDENT> J_decomp= cr.decompose(self.J_dblocks,self.J_offblocks) <NEW_LINE> Jdet = cr.det(J_decomp) <NEW_LINE> postmahal,JTdet = cr.mahal_and_det(self.JT_dblocks,self.JT_offblocks,self.JT_offset) <NEW_LINE> ldets = tf.reduce_mean(tf.linalg.slogdet((2*np.pi)*self.Sig)[1])*tf.cast(self.xs.shape[0],tf.float64) <NEW_LINE> fwdmahal = tf.reduce_sum(self.Sigix*self.xs) <NEW_LINE> return .5*(Jdet-JTdet - ldets - fwdmahal +postmahal) | Collates some useful information about a model of the form
Z ~ PEG(G)
X[i] ~ N(B[i] Z[ts[idxs[i]],Sig[i])
after observing X | 62598fd926238365f5fad0dc |
class UserExperience(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'user_experience' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> content = Column(Text, nullable=False) <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey("users.id")) <NEW_LINE> user = db.relation("User", backref=backref( 'experience', uselist=False, cascade='all, delete-orphan')) | 用户的经历 | 62598fd98a349b6b436867ba |
class Task(_messages.Message): <NEW_LINE> <INDENT> kind = _messages.StringField(1, default=u'fusiontables#task') <NEW_LINE> progress = _messages.StringField(2) <NEW_LINE> started = _messages.BooleanField(3) <NEW_LINE> taskId = _messages.IntegerField(4) <NEW_LINE> type = _messages.StringField(5) | Specifies the identifier, name, and type of a task in a table.
Fields:
kind: Type of the resource. This is always "fusiontables#task".
progress: An indication of task progress.
started: false while the table is busy with some other task. true if this
background task is currently running.
taskId: Identifier for the task.
type: Type of background task. One of DELETE_ROWS Deletes one or more
rows from the table. ADD_ROWS "Adds one or more rows to a table.
Includes importing data into a new table and importing more rows into an
existing table. ADD_COLUMN Adds a new column to the table. CHANGE_TYPE
Changes the type of a column. | 62598fd9ad47b63b2c5a7dcc |
class TabRenderer(object): <NEW_LINE> <INDENT> def __init__(self, contents): <NEW_LINE> <INDENT> self._contents = contents <NEW_LINE> <DEDENT> def __call__(self, handler): <NEW_LINE> <INDENT> return generate_display_html( handler, crypto.XsrfTokenManager, self._contents) | Convenience class for creating tabs for rendering in dashboard. | 62598fd9dc8b845886d53b38 |
class Number(RangeValidator): <NEW_LINE> <INDENT> messages = { 'number': _("Please enter a number") } <NEW_LINE> def _to_python(self, value, state): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> value = float(value) <NEW_LINE> try: <NEW_LINE> <INDENT> int_value = int(value) <NEW_LINE> <DEDENT> except OverflowError: <NEW_LINE> <INDENT> int_value = None <NEW_LINE> <DEDENT> if value == int_value: <NEW_LINE> <INDENT> return int_value <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise Invalid(self.message('number', state), value, state) | Convert a value to a float or integer.
Tries to convert it to an integer if no information is lost.
Example::
>>> Number.to_python('10')
10
>>> Number.to_python('10.5')
10.5
>>> Number.to_python('ten')
Traceback (most recent call last):
...
Invalid: Please enter a number
>>> Number(min=5).to_python('6.5')
6.5
>>> Number(max=10.5).to_python('11.5')
Traceback (most recent call last):
...
Invalid: Please enter a number that is 10.5 or smaller
>>> Number().to_python('infinity')
inf | 62598fd9ad47b63b2c5a7dcd |
class BaseGeometry: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> raise Exception("area() is not implemented") | instance of geometry class | 62598fd9091ae35668705196 |
class QuitGanetiException(Exception): <NEW_LINE> <INDENT> pass | Signal Ganeti that it must quit.
This is not necessarily an error (and thus not a subclass of
GenericError), but it's an exceptional circumstance and it is thus
treated. This exception should be instantiated with two values. The
first one will specify the return code to the caller, and the second
one will be the returned result (either as an error or as a normal
result). Usually only the leave cluster rpc call should return
status True (as there it's expected we quit), every other call will
return status False (as a critical error was encountered).
Examples::
# Return a result of "True" to the caller, but quit ganeti afterwards
raise QuitGanetiException(True, None)
# Send an error to the caller, and quit ganeti
raise QuitGanetiException(False, "Fatal safety violation, shutting down") | 62598fd997e22403b383b485 |
class XinputParser(object): <NEW_LINE> <INDENT> _key_map = { "Buttons supported": "buttons_supported", "Button labels": "button_labels", "Button state": "button_state", "Class originated from": "device_class", "Keycodes supported": "keycodes_supported", "Touch mode": "touch_mode", "Max number of touches": "max_touch", } <NEW_LINE> def __init__(self, stream): <NEW_LINE> <INDENT> self.stream = stream <NEW_LINE> <DEDENT> def _parseKey(self, key): <NEW_LINE> <INDENT> if " " in key: <NEW_LINE> <INDENT> return self._key_map.get(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return key.lower() <NEW_LINE> <DEDENT> <DEDENT> def _parseValue(self, value): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> value = value.strip() <NEW_LINE> if not value: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> match = CLASS_VALUE_RE.match(value) <NEW_LINE> if match: <NEW_LINE> <INDENT> return match.group("class") <NEW_LINE> <DEDENT> if '"' in value: <NEW_LINE> <INDENT> return list(self._parseList(value)) <NEW_LINE> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def _parseList(self, string): <NEW_LINE> <INDENT> for element in LIST_VALUE_RE.split(string)[1::2]: <NEW_LINE> <INDENT> if element.startswith('"') and element.endswith('"'): <NEW_LINE> <INDENT> yield element.strip('"') <NEW_LINE> <DEDENT> elif element == "None": <NEW_LINE> <INDENT> yield None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def run(self, result): <NEW_LINE> <INDENT> output = self.stream.read() <NEW_LINE> for record in re.split(r"\n{2,}", output): <NEW_LINE> <INDENT> record = record.strip() <NEW_LINE> if not record: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> lines = record.split("\n") <NEW_LINE> line = lines.pop(0) <NEW_LINE> match = DEVICE_RE.match(line) <NEW_LINE> if not match: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> device = { "id": int(match.group("id")), "name": match.group("name"), } <NEW_LINE> result.addXinputDevice(device) <NEW_LINE> device_class = {} <NEW_LINE> prefix = "" <NEW_LINE> for line in lines: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> match = ATTRIBUTE_RE.match(line) <NEW_LINE> if not match: <NEW_LINE> <INDENT> if line.startswith("Scroll"): <NEW_LINE> <INDENT> prefix = "scroll_" <NEW_LINE> <DEDENT> elif line.startswith("Detail"): <NEW_LINE> <INDENT> prefix = "detail_" <NEW_LINE> <DEDENT> continue <NEW_LINE> <DEDENT> key = self._parseKey(match.group("key")) <NEW_LINE> if not key: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> value = self._parseValue(match.group("value")) <NEW_LINE> if key == "device_class" and device_class: <NEW_LINE> <INDENT> result.addXinputDeviceClass(device, device_class) <NEW_LINE> device_class = {} <NEW_LINE> prefix = "" <NEW_LINE> <DEDENT> device_class[prefix + key] = value <NEW_LINE> <DEDENT> if device_class: <NEW_LINE> <INDENT> result.addXinputDeviceClass(device, device_class) <NEW_LINE> <DEDENT> <DEDENT> return result | Parser for the xinput command. | 62598fd9ad47b63b2c5a7dce |
class InvalidAuthorizationHeader(DaedalusError): <NEW_LINE> <INDENT> pass | Thrown when the authorization is invalid. | 62598fd9283ffb24f3cf3dfe |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.