code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class WPSite: <NEW_LINE> <INDENT> PROTOCOL = "https" <NEW_LINE> DEFAULT_TITLE = "New WordPress" <NEW_LINE> DEFAULT_TAGLINE = "EPFL" <NEW_LINE> WP_VERSION = Utils.get_mandatory_env(key="WP_VERSION") <NEW_LINE> def __init__(self, openshift_env, wp_site_url, wp_site_title=None, wp_tagline=None): <NEW_LINE> <INDENT> self.openshift_env = openshift_env.lower() <NEW_LINE> url = urlparse(wp_site_url.lower()) <NEW_LINE> validate_openshift_env(self.openshift_env) <NEW_LINE> if wp_site_title is not None: <NEW_LINE> <INDENT> validate_string(wp_site_title) <NEW_LINE> <DEDENT> if wp_tagline is not None: <NEW_LINE> <INDENT> validate_string(wp_tagline) <NEW_LINE> <DEDENT> self.domain = url.netloc.strip('/') <NEW_LINE> self.folder = url.path.strip('/') <NEW_LINE> self.wp_site_title = wp_site_title or self.DEFAULT_TITLE <NEW_LINE> self.wp_tagline = wp_tagline or self.DEFAULT_TAGLINE <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.url <NEW_LINE> <DEDENT> @property <NEW_LINE> def path(self): <NEW_LINE> <INDENT> if not self.folder: <NEW_LINE> <INDENT> return "/srv/{0.openshift_env}/{0.domain}/htdocs".format(self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return "/srv/{0.openshift_env}/{0.domain}/htdocs/{0.folder}".format(self) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return "{0.PROTOCOL}://{0.domain}/{0.folder}".format(self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> if not self.folder: <NEW_LINE> <INDENT> return self.domain.split('.')[0] <NEW_LINE> <DEDENT> return self.folder.split('/')[-1] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_path(cls, path): <NEW_LINE> <INDENT> given_path = os.path.abspath(path).rstrip('/') <NEW_LINE> env_match = re.match("/srv/([^/]+)", given_path) <NEW_LINE> if env_match is None or not env_match.groups(): <NEW_LINE> <INDENT> raise ValueError("given path '{}' should be included in a valid openshift_env".format(given_path)) <NEW_LINE> <DEDENT> openshift_env = env_match.groups()[0] <NEW_LINE> if not os.path.isdir(given_path): <NEW_LINE> <INDENT> logging.warning("given path '%s' is not a valid dir", given_path) <NEW_LINE> <DEDENT> if 'htdocs' not in given_path: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> regex = re.compile("/([^/]*)") <NEW_LINE> directories = regex.findall(os.path.abspath(path)) <NEW_LINE> htdocs_index = directories.index('htdocs') <NEW_LINE> domain = directories[htdocs_index-1] <NEW_LINE> folders = '/'.join(directories[htdocs_index+1:]) <NEW_LINE> url = "{}://{}/{}".format(cls.PROTOCOL, domain, folders) <NEW_LINE> return cls(openshift_env, url) | Pure python object that will define a WP site by its path & url
its title is optionnal, just to provide a default value to the final user | 62598fac8e7ae83300ee9055 |
class Test_OSCmdBase(_Test_OSCmd): <NEW_LINE> <INDENT> oscmdcls = oscmd.OSCmdBase <NEW_LINE> def test_oscmd_methods(self): <NEW_LINE> <INDENT> for meth, nargs in self.expected_methods: <NEW_LINE> <INDENT> self.assertRaises( NotImplementedError, getattr(self.instance, meth), *tuple(range(nargs))) | Tests for the OSCmdBase class. | 62598fac2ae34c7f260ab095 |
class Master(Base): <NEW_LINE> <INDENT> __tablename__ = 'master' <NEW_LINE> __table_args__ = {'autoload':True} | The Mapped class for the master table. | 62598fac7d847024c075c377 |
class Place(object): <NEW_LINE> <INDENT> def __init__(self, name, exit=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.exit = exit <NEW_LINE> self.bees = [] <NEW_LINE> self.ant = None <NEW_LINE> self.entrance = None <NEW_LINE> if self.exit: <NEW_LINE> <INDENT> exit.entrance = self <NEW_LINE> <DEDENT> <DEDENT> def add_insect(self, insect): <NEW_LINE> <INDENT> if insect.is_ant: <NEW_LINE> <INDENT> if self.ant is None: <NEW_LINE> <INDENT> self.ant = insect <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if Ant.can_contain(self.ant, insect): <NEW_LINE> <INDENT> insect.place = self <NEW_LINE> self.ant.ant = insect <NEW_LINE> <DEDENT> elif Ant.can_contain(insect, self.ant): <NEW_LINE> <INDENT> insect.place = self <NEW_LINE> insect.ant = self.ant <NEW_LINE> self.ant = insect <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert self.ant is None, 'Two ants in {0}'.format(self) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.bees.append(insect) <NEW_LINE> <DEDENT> insect.place = self <NEW_LINE> <DEDENT> def remove_insect(self, insect): <NEW_LINE> <INDENT> if insect.is_ant: <NEW_LINE> <INDENT> if isinstance(insect, QueenAnt) and insect.identity: <NEW_LINE> <INDENT> self.ant = insect <NEW_LINE> <DEDENT> elif insect.is_ant: <NEW_LINE> <INDENT> if self.ant is insect: <NEW_LINE> <INDENT> if hasattr(self.ant, 'container') and self.ant.container: <NEW_LINE> <INDENT> self.ant = self.ant.ant <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.ant = None <NEW_LINE> <DEDENT> insect.place = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if hasattr(self.ant, 'container') and self.ant.container and self.ant.ant is insect: <NEW_LINE> <INDENT> self.ant = self.ant <NEW_LINE> self.ant.ant = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert False, '{0} is not in {1}'.format(insect, self) <NEW_LINE> <DEDENT> insect.place = None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.bees.remove(insect) <NEW_LINE> insect.place = None <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name | A Place holds insects and has an exit to another Place. | 62598fac10dbd63aa1c70b66 |
class UnitTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.pidfile = "PidFile" <NEW_LINE> self.argv_list = ["program", "arg1", "arg2"] <NEW_LINE> self.stdin = "/path/file1" <NEW_LINE> self.stdout = "/path/file2" <NEW_LINE> self.stderr = "/path/file3" <NEW_LINE> <DEDENT> def test_stderr_arg(self): <NEW_LINE> <INDENT> daemon_inst = gen_class.Daemon(self.pidfile, stderr=self.stderr) <NEW_LINE> self.assertEqual((daemon_inst.stderr, daemon_inst.pidfile), (self.stderr, self.pidfile)) <NEW_LINE> <DEDENT> def test_stdout_arg(self): <NEW_LINE> <INDENT> daemon_inst = gen_class.Daemon(self.pidfile, stdout=self.stdout) <NEW_LINE> self.assertEqual((daemon_inst.stdout, daemon_inst.pidfile), (self.stdout, self.pidfile)) <NEW_LINE> <DEDENT> def test_stdin_arg(self): <NEW_LINE> <INDENT> daemon_inst = gen_class.Daemon(self.pidfile, stdin=self.stdin) <NEW_LINE> self.assertEqual((daemon_inst.stdin, daemon_inst.pidfile), (self.stdin, self.pidfile)) <NEW_LINE> <DEDENT> def test_argv_list(self): <NEW_LINE> <INDENT> daemon_inst = gen_class.Daemon(self.pidfile, argv_list=self.argv_list) <NEW_LINE> self.assertEqual((daemon_inst.argv_list, daemon_inst.pidfile), (self.argv_list, self.pidfile)) <NEW_LINE> <DEDENT> def test_default_setting(self): <NEW_LINE> <INDENT> daemon_inst = gen_class.Daemon(self.pidfile) <NEW_LINE> self.assertEqual((daemon_inst.argv_list, daemon_inst.pidfile), ([], self.pidfile)) | Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_stderr_arg
test_stdout_arg
test_stdin_arg
test_argv_list
test_default_setting | 62598facadb09d7d5dc0a53d |
class PhpcsFixThisDirectoryCommand(sublime_plugin.WindowCommand): <NEW_LINE> <INDENT> def run(self, paths=[]): <NEW_LINE> <INDENT> cmd = PhpcsCommand.instance(self.window.active_view()) <NEW_LINE> cmd.fix_standards_errors(os.path.normpath(paths[0])) <NEW_LINE> <DEDENT> def is_enabled(self): <NEW_LINE> <INDENT> if Pref.php_cs_fixer_executable_path != '': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def is_visible(self, paths=[]): <NEW_LINE> <INDENT> if Pref.php_cs_fixer_executable_path != '': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def description(self, paths=[]): <NEW_LINE> <INDENT> return 'Fix this directory (PHP-CS-Fixer)' | Command to use php-cs-fixer to 'fix' the directory | 62598fac8e71fb1e983bba66 |
class ApplicationLauncher(ftrack_connect.application.ApplicationLauncher): <NEW_LINE> <INDENT> def _getApplicationEnvironment(self, application, context): <NEW_LINE> <INDENT> environment = super( ApplicationLauncher, self )._getApplicationEnvironment( application, context ) <NEW_LINE> hiero_plugin_path = os.path.join( FTRACK_CONNECT_NUKE_STUDIO_PATH, 'plugin' ) <NEW_LINE> environment = ftrack_connect.application.appendPath( hiero_plugin_path, 'HIERO_PLUGIN_PATH', environment ) <NEW_LINE> processors_hooks_path = os.path.join( FTRACK_CONNECT_NUKE_STUDIO_PATH, 'processor' ) <NEW_LINE> environment = ftrack_connect.application.appendPath( processors_hooks_path, 'FTRACK_EVENT_PLUGIN_PATH', environment ) <NEW_LINE> application_hooks_path = os.path.join( FTRACK_CONNECT_NUKE_STUDIO_PATH, 'application_hook' ) <NEW_LINE> environment = ftrack_connect.application.appendPath( application_hooks_path, 'FTRACK_EVENT_PLUGIN_PATH', environment ) <NEW_LINE> return environment | Launch nuke studio. | 62598fac8c0ade5d55dc366b |
class ProductViewSet(CartMixin, viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Product.objects.all() <NEW_LINE> serializer_class = ProductSerializer <NEW_LINE> action_to_serializer = { 'list': ProductListRetrieveSerializer, 'retrieve': ProductListRetrieveSerializer } <NEW_LINE> def get_serializer_class(self): <NEW_LINE> <INDENT> return self.action_to_serializer.get( self.action, self.serializer_class ) | вывод списка товаров и конкретного товара | 62598fac4a966d76dd5eee94 |
class TestTeamEventStatusPlayoff(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testTeamEventStatusPlayoff(self): <NEW_LINE> <INDENT> pass | TeamEventStatusPlayoff unit test stubs | 62598fac796e427e5384e747 |
class PlotSender(QObject): <NEW_LINE> <INDENT> done = pyqtSignal() <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.runner = None <NEW_LINE> <DEDENT> def plot(self, plottables=list(), callback=None): <NEW_LINE> <INDENT> from .project_model import ProjectModel <NEW_LINE> ofp = StringIO() <NEW_LINE> plottable = plottables[0] <NEW_LINE> plottable = plottable.transform(*ProjectModel.current.get_transform(plottable)) <NEW_LINE> ProjectModel.current.machine.post.write_lines_to_fp( plottable, ofp) <NEW_LINE> self.gcode = ofp.getvalue() <NEW_LINE> logger.debug("GCode is %s", len(self.gcode)) <NEW_LINE> logger.debug("Callback is %s", callback) <NEW_LINE> self.kill_plot = threading.Event() <NEW_LINE> self.runner = threading.Thread(target=self.plot_monitor, args=[callback, self.kill_plot], daemon=True) <NEW_LINE> self.runner.start() <NEW_LINE> return True <NEW_LINE> <DEDENT> def pause(self, paused=True): <NEW_LINE> <INDENT> from .project_model import ProjectModel <NEW_LINE> ProjectModel.current.machine.protocol.paused = paused <NEW_LINE> <DEDENT> def plot_monitor(self, callback): <NEW_LINE> <INDENT> from .project_model import ProjectModel <NEW_LINE> sem = threading.Semaphore() <NEW_LINE> def _safe_callback(*args, **kw): <NEW_LINE> <INDENT> logger.debug("Callback: %s", args) <NEW_LINE> with sem: <NEW_LINE> <INDENT> callback(*args, **kw) <NEW_LINE> <DEDENT> <DEDENT> logger.debug("The LM machine callback is %s", callback) <NEW_LINE> ProjectModel.current.machine.plot(self.gcode, callback) | slices and sends the plottables | 62598fac5fcc89381b266126 |
class AltoArchive(abc.ABCMeta('ABC', (object,), {})): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> if ".zip" in self.filename: <NEW_LINE> <INDENT> stream = open_stream(self.filename) <NEW_LINE> self.zip = zipfile.ZipFile(stream) <NEW_LINE> self.filenames = [entry.filename for entry in self.zip.infolist()] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.filenames = [entry for entry in listdir(self.filename) if isfile(join(self.filename, entry))] <NEW_LINE> <DEDENT> document_pattern = re.compile(self.get_document_pattern()) <NEW_LINE> page_pattern = re.compile(self.get_page_pattern()) <NEW_LINE> document_matches = [ _f for _f in [document_pattern.match(name) for name in self.filenames] if _f] <NEW_LINE> page_matches = [ _f for _f in [page_pattern.match(name) for name in self.filenames] if _f] <NEW_LINE> self.document_codes = {match.group(1): [] for match in document_matches} <NEW_LINE> for match in page_matches: <NEW_LINE> <INDENT> self.document_codes[match.group(1)].append(match.group(2)) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> return Document(list(self.document_codes.keys())[index], self) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for document in self.document_codes: <NEW_LINE> <INDENT> yield Document(document, self) <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.document_codes) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_document_pattern(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_page_pattern(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_document_info(self, document_code): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_page_info(self, document_code, page_code): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def open_document(self, document_code): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def open_page(self, document_code, page_code): <NEW_LINE> <INDENT> return | Abstract base class for object model representation of ZIP|UNZIP archive
of files in ALTO format. | 62598fac60cbc95b06364302 |
class KeyValueTag(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'key_value_tag_ref': 'str', 'key': 'str', 'value': 'str' } <NEW_LINE> self.attribute_map = { 'key_value_tag_ref': 'keyValueTagRef', 'key': 'key', 'value': 'value' } <NEW_LINE> self._key_value_tag_ref = None <NEW_LINE> self._key = None <NEW_LINE> self._value = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def key_value_tag_ref(self): <NEW_LINE> <INDENT> return self._key_value_tag_ref <NEW_LINE> <DEDENT> @key_value_tag_ref.setter <NEW_LINE> def key_value_tag_ref(self, key_value_tag_ref): <NEW_LINE> <INDENT> self._key_value_tag_ref = key_value_tag_ref <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return self._key <NEW_LINE> <DEDENT> @key.setter <NEW_LINE> def key(self, key): <NEW_LINE> <INDENT> self._key = key <NEW_LINE> <DEDENT> @property <NEW_LINE> def value(self): <NEW_LINE> <INDENT> return self._value <NEW_LINE> <DEDENT> @value.setter <NEW_LINE> def value(self, value): <NEW_LINE> <INDENT> self._value = value <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fac7cff6e4e811b59df |
class VirtualButton: <NEW_LINE> <INDENT> FILE_NAME = 'VIRTUAL_BUTTON' <NEW_LINE> def read(self): <NEW_LINE> <INDENT> file_exists = os.path.exists(self.FILE_NAME) <NEW_LINE> if file_exists: <NEW_LINE> <INDENT> os.remove(self.FILE_NAME) <NEW_LINE> <DEDENT> return file_exists | The virtual button can be pressed by creating an empty file VIRTUAL_BUTTON
at the same place as this script. The file will be deleted automatically
after it has been read. | 62598fac8e7ae83300ee9056 |
class UserToGroup(Base): <NEW_LINE> <INDENT> __tablename__ = 'User_To_Group' <NEW_LINE> ID = Column(Integer, primary_key=True, autoincrement=True) <NEW_LINE> User_Id = Column(Integer, ForeignKey('User_Profile.ID')) <NEW_LINE> Group_Id = Column(Integer, ForeignKey('Server_Group.ID')) <NEW_LINE> __table_args__ =(UniqueConstraint('User_Id','Group_Id',name='uix_uid_gid')) | 关联 user 和 Group | 62598facf9cc0f698b1c52a3 |
class LatLng(Field): <NEW_LINE> <INDENT> VALUES_OUT_OF_RANGE = "All values must be numbers in the range -180.0 to 180.0" <NEW_LINE> WRONG_SIZE = "A point must have 2 values" <NEW_LINE> NOT_STRING_OR_LIST = "Expected a comma-separated list of values or a list or tuple object." <NEW_LINE> def _validate(self, value): <NEW_LINE> <INDENT> if not isinstance(value, (list, tuple)): <NEW_LINE> <INDENT> if not isinstance(value, basestring): <NEW_LINE> <INDENT> raise ValidationError(self.NOT_STRING_OR_LIST) <NEW_LINE> <DEDENT> value = [v.strip() for v in value.split(",")] <NEW_LINE> <DEDENT> if len(value) != 2: <NEW_LINE> <INDENT> raise ValidationError(self.WRONG_SIZE) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> value = [float(v) for v in value] <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise ValidationError(self.VALUES_OUT_OF_RANGE) <NEW_LINE> <DEDENT> for v in value: <NEW_LINE> <INDENT> if not (-180.0 <= v <= 180.0): <NEW_LINE> <INDENT> raise ValidationError(self.VALUES_OUT_OF_RANGE) <NEW_LINE> <DEDENT> <DEDENT> return tuple(value) | Passes a geographical point in for form of a list, tuple or comma-separated string::
v = LatLng()
v.validate("42.76066, -84.9929") # ok -> (42.76066, -84.9929)
v.validate((42.76066, -84.9929)) # ok
v.validate("234,56756.453") # oops | 62598fac7d43ff24874273dc |
class OutputFileForThreads: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.output = {} <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> thread_name = threading.currentThread().getName() <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> if thread_name in self.output: <NEW_LINE> <INDENT> self.output[thread_name].append(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output[thread_name] = [data] | Collates writes according to thread name. | 62598fac45492302aabfc485 |
class SQL: <NEW_LINE> <INDENT> def __init__(self, command, cursor=None, alchemy=False): <NEW_LINE> <INDENT> self.alchemy = alchemy <NEW_LINE> self.command = command <NEW_LINE> self.cursor = cursor <NEW_LINE> <DEDENT> def ex(self, p=False): <NEW_LINE> <INDENT> command = self.command <NEW_LINE> if p or self.cursor is None: <NEW_LINE> <INDENT> return command <NEW_LINE> <DEDENT> if self.alchemy: <NEW_LINE> <INDENT> import pandas as pd <NEW_LINE> df = pd.read_sql_query(command, self.cursor) <NEW_LINE> return df <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cursor.execute(command) <NEW_LINE> return self.cursor.fetchall() <NEW_LINE> <DEDENT> <DEDENT> def where(self, condition): <NEW_LINE> <INDENT> command = self.command <NEW_LINE> command = command[:-1] + f'WHERE {condition}\n;' <NEW_LINE> self.command = command <NEW_LINE> return self | The Select object allows a select statement to be extended with methods
like where before being executed with .ex()
params:
alchemy when set to true, assumes that cursor is an sqlalchemy
engine. This results in some sql commands being
returned as a DataFrame. | 62598fac460517430c432037 |
class CommandImportPage(Command): <NEW_LINE> <INDENT> name = "import_page" <NEW_LINE> needs_config = False <NEW_LINE> doc_usage = "[options] page_url [page_url,...]" <NEW_LINE> doc_purpose = "import arbitrary web pages" <NEW_LINE> def _execute(self, options, args): <NEW_LINE> <INDENT> for url in args: <NEW_LINE> <INDENT> self._import_page(url) <NEW_LINE> <DEDENT> <DEDENT> def _import_page(self, url): <NEW_LINE> <INDENT> r = requests.get(url) <NEW_LINE> if 199 < r.status_code < 300: <NEW_LINE> <INDENT> doc = lxml.html.fromstring(r.content) <NEW_LINE> title = doc.find('*//title').text_content().decode('utf-8') <NEW_LINE> slug = utils.slugify(title) <NEW_LINE> nodes = list(libextract.api.extract(r.content)) <NEW_LINE> lengths = [len(n.text_content()) for n in nodes] <NEW_LINE> node = nodes[lengths.index(max(lengths))] <NEW_LINE> document = doc_template.format( title=title, slug=slug, content=lxml.html.tostring(node, encoding='utf8', method='html', pretty_print=True).decode('utf8') ) <NEW_LINE> with codecs.open(slug + '.html', 'w+', encoding='utf-8') as outf: <NEW_LINE> <INDENT> outf.write(document) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> LOGGER.error('Error fetching URL: {}'.format(url)) | Import a Page. | 62598facf7d966606f747f99 |
class SpecInstallRenderer(SpecSectionRenderer): <NEW_LINE> <INDENT> obj = SpecStInstall | %changelog renderer
@cvar obj: sections rendered by this renderer | 62598fac6aa9bd52df0d4e7c |
class UniformFlowEnvironment( Environment): <NEW_LINE> <INDENT> ma = Float(0.0, desc="flow mach number") <NEW_LINE> fdv = CArray( dtype=float64, shape=(3, ), value=array((1.0, 0, 0)), desc="flow direction") <NEW_LINE> digest = Property( depends_on=['c', 'ma', 'fdv'], ) <NEW_LINE> @cached_property <NEW_LINE> def _get_digest( self ): <NEW_LINE> <INDENT> return digest( self ) <NEW_LINE> <DEDENT> def _r( self, gpos, mpos=0.0): <NEW_LINE> <INDENT> if isscalar(mpos): <NEW_LINE> <INDENT> mpos = array((0, 0, 0), dtype = float32)[:, newaxis] <NEW_LINE> <DEDENT> fdv = self.fdv/sqrt((self.fdv*self.fdv).sum()) <NEW_LINE> mpos = mpos[:, newaxis, :] <NEW_LINE> rmv = gpos[:, :, newaxis]-mpos <NEW_LINE> rm = sqrt(sum(rmv*rmv, 0)) <NEW_LINE> macostheta = (self.ma*sum(rmv.reshape((3, -1))*fdv[:, newaxis], 0) /rm.reshape(-1)).reshape(rm.shape) <NEW_LINE> rm *= 1/(-macostheta + sqrt(macostheta*macostheta-self.ma*self.ma+1)) <NEW_LINE> if rm.shape[1] == 1: <NEW_LINE> <INDENT> rm = rm[:, 0] <NEW_LINE> <DEDENT> return rm | An acoustic environment with uniform flow.
This class provides the facilities to calculate the travel time (distances)
between grid point locations and microphone locations in a uniform flow
field. | 62598faceab8aa0e5d30bd40 |
class DBFChunkedUpload(ChunkedUpload): <NEW_LINE> <INDENT> pass | For now we need to create our own subclass of ChunkedUpload
because the chunked_upload package does not provide migrations.
As soon as
https://github.com/juliomalegria/django-chunked-upload/pull/21 is
merged, we can remove this. | 62598fac5fc7496912d4825c |
class PyTest(TestCommand): <NEW_LINE> <INDENT> def initialize_options(self): <NEW_LINE> <INDENT> TestCommand.initialize_options(self) <NEW_LINE> self.pytest_args = [ '-v', '--pylama', '--cov-report=term-missing', '--cov=bot_calendario_telegram', 'tests/' ] <NEW_LINE> <DEDENT> def run_tests(self): <NEW_LINE> <INDENT> import pytest <NEW_LINE> errno = pytest.main(self.pytest_args) <NEW_LINE> sys.exit(errno) | Run test suite. | 62598fac5fdd1c0f98e5df4a |
class BankAccount(FundingInstrument): <NEW_LINE> <INDENT> type = 'bank_accounts' <NEW_LINE> uri_gen = txwac.URIGen('/bank_accounts', '{bank_account}') <NEW_LINE> def verify(self): <NEW_LINE> <INDENT> return BankAccountVerification( href=self.bank_account_verifications.href ).save() | A BankAccount is both a source, and a destination of, funds. You may
create Debits and Credits to and from, this funding instrument. | 62598fac38b623060ffa904e |
class NsPackageCompliance(ExceptionMessage): <NEW_LINE> <INDENT> pass | Network Service package contents do not comply with the definition. | 62598fac009cb60464d014d5 |
class SvdRotationRateRest(SvdRotationRate): <NEW_LINE> <INDENT> def __init__(self, reload_ = False, training = True, rmnan = True): <NEW_LINE> <INDENT> SvdRotationRate.__init__(self, "rest", reload_, training, rmnan) | Raw rotationrate for rest phase | 62598fac3346ee7daa337623 |
class GeoDatasetCategoryListView(OrganizationViewMixin, ManageViewMixin, ListView): <NEW_LINE> <INDENT> model = GeoDatasetCategory <NEW_LINE> paginate_by = 10 <NEW_LINE> context_object_name = 'geodatasetcategories' | List all geodataset categories | 62598fac7047854f4633f38f |
class Index(_IndexBase, total=False): <NEW_LINE> <INDENT> name: typing.Optional[str] <NEW_LINE> unique: bool | Index schema. | 62598fac66656f66f7d5a3a5 |
class QuestionerDictionary(Questioner): <NEW_LINE> <INDENT> def __init__(self, dictionary=None): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.dictionary = {} if dictionary is None else dictionary <NEW_LINE> <DEDENT> def _get(self, key): <NEW_LINE> <INDENT> return self.dictionary.get(key) | Stores settings in a dictionary, which should be provided in the constructor | 62598fac2ae34c7f260ab097 |
class BannerSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Banner <NEW_LINE> fields = '__all__' | 首页轮播图序列化类 | 62598fac55399d3f056264d9 |
class DGFTreeSelectWidget(TextWidget): <NEW_LINE> <INDENT> klass = u'dgf-tree-select-widget' <NEW_LINE> def __init__(self, request): <NEW_LINE> <INDENT> super(DGFTreeSelectWidget, self).__init__(request) <NEW_LINE> self.terms = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def items(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> super(DGFTreeSelectWidget, self).update() <NEW_LINE> initialize_url_provider(self) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> initialize_slave_chain(self) <NEW_LINE> return super(DGFTreeSelectWidget, self).render() | A data grid widget which does nested master-slave
drop down menus using <select>. | 62598fac7b180e01f3e4902b |
class Help(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def setup(ap): <NEW_LINE> <INDENT> ap.add_argument('command', help='The command to print help for', nargs='?') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def run(ctx, args): <NEW_LINE> <INDENT> if args.command is None: <NEW_LINE> <INDENT> ctx.subcommand_parsers['help'].print_help() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> subcmd_parser = ctx.subcommand_parsers[args.command] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> ctx.error('Unknown sub-command: %s' % args.command) <NEW_LINE> <DEDENT> subcmd_parser.print_help() | Displays help about sub-commands | 62598facadb09d7d5dc0a53f |
@dataclass <NEW_LINE> class CodeSystemConceptDesignation(BackboneElement): <NEW_LINE> <INDENT> resource_type: ClassVar[str] = "CodeSystemConceptDesignation" <NEW_LINE> language: Optional[str] = None <NEW_LINE> use: Optional[Coding] = None <NEW_LINE> value: str = None | Additional representations for the concept.
Additional representations for the concept - other languages, aliases,
specialized purposes, used for particular purposes, etc. | 62598fac8c0ade5d55dc366c |
class DatabaseRecordError(Exception): <NEW_LINE> <INDENT> pass | Raised when mongodb document has wrong format or does not exists | 62598fac4a966d76dd5eee96 |
class GeneralDecoderRNN(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, hidden_size, output_size): <NEW_LINE> <INDENT> super(GeneralDecoderRNN, self).__init__() <NEW_LINE> self.name = 'GeneralDecoderRNN' <NEW_LINE> self.hidden_size = hidden_size <NEW_LINE> self.gru = nn.GRU(input_size, self.hidden_size) <NEW_LINE> self.out = nn.Linear(self.hidden_size, output_size) <NEW_LINE> self.softmax = nn.LogSoftmax(dim=1) <NEW_LINE> <DEDENT> def forward(self, input, hidden): <NEW_LINE> <INDENT> output = torch.tensor(input, dtype=torch.float32, device=DEVICE).view(1,1,-1) <NEW_LINE> output, hidden = self.gru(output, hidden) <NEW_LINE> output = self.softmax(self.out(output[0])) <NEW_LINE> return output, hidden <NEW_LINE> <DEDENT> def initHidden(self): <NEW_LINE> <INDENT> return torch.zeros(1, 1, self.hidden_size, device=DEVICE) | Vanilla decoder (WITH NO EMBEDDINGS) which decodes based on single context vector | 62598fac56b00c62f0fb286a |
class VirtualNetworkGatewayPaged(Paged): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'current_page': {'key': 'value', 'type': '[VirtualNetworkGateway]'} } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(VirtualNetworkGatewayPaged, self).__init__(*args, **kwargs) | A paging container for iterating over a list of :class:`VirtualNetworkGateway <azure.mgmt.network.v2016_09_01.models.VirtualNetworkGateway>` object | 62598fac99cbb53fe6830e8d |
class TemplateRecord(dict, DictRecord): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> date = datetime.datetime.utcnow().isoformat()[:-3] <NEW_LINE> self['ttl'] = int(time.mktime(time.strptime( date, "%Y-%m-%dT%H:%M:%S.%f"))) <NEW_LINE> self['date'] = date | Generic Lifoid message | 62598fac30dc7b766599f803 |
class PLNationalBusinessRegisterField(RegexField): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _(u'National Business Register Number (REGON) consists of 7 or 9 digits.'), 'checksum': _(u'Wrong checksum for the National Business Register Number (REGON).'), } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(PLNationalBusinessRegisterField, self).__init__(r'^\d{7,9}$', max_length=None, min_length=None, *args, **kwargs) <NEW_LINE> <DEDENT> def clean(self,value): <NEW_LINE> <INDENT> super(PLNationalBusinessRegisterField, self).clean(value) <NEW_LINE> if not self.has_valid_checksum(value): <NEW_LINE> <INDENT> raise ValidationError(self.error_messages['checksum']) <NEW_LINE> <DEDENT> return u'%s' % value <NEW_LINE> <DEDENT> def has_valid_checksum(self, number): <NEW_LINE> <INDENT> multiple_table_7 = (2, 3, 4, 5, 6, 7) <NEW_LINE> multiple_table_9 = (8, 9, 2, 3, 4, 5, 6, 7) <NEW_LINE> result = 0 <NEW_LINE> if len(number) == 7: <NEW_LINE> <INDENT> multiple_table = multiple_table_7 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> multiple_table = multiple_table_9 <NEW_LINE> <DEDENT> for i in range(len(number)-1): <NEW_LINE> <INDENT> result += int(number[i]) * multiple_table[i] <NEW_LINE> <DEDENT> result %= 11 <NEW_LINE> if result == 10: <NEW_LINE> <INDENT> result = 0 <NEW_LINE> <DEDENT> if result == int(number[-1]): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | A form field that validated as Polish National Official Business Register Number (REGON)
Valid forms are: 7 or 9 digits number
More on the field: http://www.stat.gov.pl/bip/regon_ENG_HTML.htm
The checksum algorithm is documented at http://wipos.p.lodz.pl/zylla/ut/nip-rego.html | 62598fac4c3428357761a26f |
class TKCNet(BaseNet): <NEW_LINE> <INDENT> def __init__(self, nclass, backbone, aux=False, se_loss=False, norm_layer=nn.BatchNorm2d, **kwargs): <NEW_LINE> <INDENT> super(TKCNet, self).__init__(nclass, backbone, aux, se_loss, norm_layer=norm_layer, **kwargs) <NEW_LINE> self.head = TFAHead(2048, nclass, norm_layer, r1=[10, 20, 30], r2=[7, 15, 25]) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> imsize = x.size()[2:] <NEW_LINE> _, _, c3, c4 = self.base_forward(x) <NEW_LINE> x = self.head(c4) <NEW_LINE> out = upsample(x, imsize, **self._up_kwargs) <NEW_LINE> out1 = [out] <NEW_LINE> return tuple(out1) | Tree-structured Kronecker Convolutional Networks for Semantic Segmentation,
Note that:
In our pytorch implementation of TKCN: for KConv(r_1,r_2), we use AvgPool2d(kernel_size = r_2, stride=1)
and Conv2d( kernel_size =3, dilation = r_1) to approximate it.
The original codes (caffe) will be relesed later .
Parameters
----------
nclass : int
Number of categories for the training dataset.
backbone : string
Pre-trained dilated backbone network type (default:'resnet50'; 'resnet50',
'resnet101' or 'resnet152').
norm_layer : object | 62598fac60cbc95b06364304 |
class GantLinksViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Gantt_links.objects.all() <NEW_LINE> serializer_class = GanttLinksSerializer <NEW_LINE> ordering_fields = '__all__' | ## Gantt Links
Enlaces entre tareas de Gantt | 62598fac5fcc89381b266127 |
class GlobalAction(pythics.libcontrol.Control): <NEW_LINE> <INDENT> def __init__(self, parent, label=None, **kwargs): <NEW_LINE> <INDENT> pythics.libcontrol.Control.__init__(self, parent, **kwargs) <NEW_LINE> if label is None or label == '': <NEW_LINE> <INDENT> self._widget = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._widget = QtWidgets.QLabel(label) <NEW_LINE> <DEDENT> <DEDENT> def _register(self, process, element_id, proxy_key): <NEW_LINE> <INDENT> self._element_id = element_id <NEW_LINE> self._process = process <NEW_LINE> self._proxy_key = proxy_key <NEW_LINE> process.new_global_action(element_id, proxy_key) | Holds an action which can triggered by a `GlobalTrigger` control in another app.
The `id` parameter is the name of the control and it must match the
'action_id' of an associated `GlobalTrigger`. The GlobalAction and
GlobalTrigger may be in different apps.
HTML parameters:
*label*: [ str | *None* (default) ]
text to show in GUI
*actions*: dict
a dictionary of key:value pairs where the key is the name of a signal
and value is the function to run when the signal is emitted
actions in this control:
================ ===================================================
signal when emitted
================ ===================================================
'triggered' associated GlobalTrigger.trigger() is called
================ =================================================== | 62598fac63b5f9789fe8511c |
class Filter(object): <NEW_LINE> <INDENT> @property <NEW_LINE> def regex(self): <NEW_LINE> <INDENT> raise NotImplemented <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def repl(match): <NEW_LINE> <INDENT> raise NotImplemented <NEW_LINE> <DEDENT> def __init__(self, text): <NEW_LINE> <INDENT> self.text = text <NEW_LINE> <DEDENT> def apply(self): <NEW_LINE> <INDENT> return self.regex.sub(self.repl, self.text).strip() | Base filter object. Takes a string in the constructor and knows how to apply a text transformation | 62598fac3317a56b869be525 |
class ServiceDataSubRecord(EGTSRecord): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(ServiceDataSubRecord, self).__init__( ('srt', Byte()), ('srl', UShort()), ('srd', None), *args, **kwargs ) <NEW_LINE> <DEDENT> def set_fields(self): <NEW_LINE> <INDENT> self['srl'] = len(self['srd']) | Subrecord of Service Data Record | 62598fac236d856c2adc9418 |
class InstallWithOptions(install): <NEW_LINE> <INDENT> user_options = install.user_options + [ ('plugins', None, 'Install default plugins.'), ('web', None, 'Install web client resources.') ] <NEW_LINE> boolean_options = install.boolean_options + [ 'plugins', 'web' ] <NEW_LINE> def initialize_options(self, *arg, **kw): <NEW_LINE> <INDENT> install.initialize_options(self, *arg, **kw) <NEW_LINE> self.plugins = None <NEW_LINE> self.web = None <NEW_LINE> <DEDENT> def run(self, *arg, **kw): <NEW_LINE> <INDENT> install.run(self, *arg, **kw) <NEW_LINE> if self.plugins: <NEW_LINE> <INDENT> print('Installing plugins') <NEW_LINE> self.girder_install('plugins') <NEW_LINE> <DEDENT> if self.web: <NEW_LINE> <INDENT> print('Installing web components') <NEW_LINE> self.girder_install('web') <NEW_LINE> <DEDENT> <DEDENT> def girder_install(self, component): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> from girder.utility import install <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> sys.stderr.write( 'Install {} failed. '.format(component) + 'Could not import girder.\n' ) <NEW_LINE> return <NEW_LINE> <DEDENT> if component == 'web': <NEW_LINE> <INDENT> install.install_web(force=True) <NEW_LINE> <DEDENT> elif component == 'plugins': <NEW_LINE> <INDENT> install.install_plugin(force=True) | A custom install command that recognizes extra options
to perform plugin and/or web client installation. | 62598fac8e7ae83300ee9058 |
class ResPartner(orm.Model): <NEW_LINE> <INDENT> _inherit = 'res.partner' <NEW_LINE> _columns = { 'mrp_note': fields.char( 'Production note', size=120, help='Production note for partner'), } | Model name: ResPartner
| 62598fac4428ac0f6e6584da |
class ItemMetadata(): <NEW_LINE> <INDENT> def get_item_type(item): <NEW_LINE> <INDENT> item_type = False <NEW_LINE> if('@type' in item): <NEW_LINE> <INDENT> item_type = item['@type'] <NEW_LINE> <DEDENT> elif('type' in item): <NEW_LINE> <INDENT> item_type = item['type'] <NEW_LINE> <DEDENT> return item_type <NEW_LINE> <DEDENT> def get_class_meta(item, class_type_metadata): <NEW_LINE> <INDENT> item['typelabel'] = False <NEW_LINE> item['icon'] = False <NEW_LINE> if('type' in item): <NEW_LINE> <INDENT> if(item['type'] in class_type_metadata): <NEW_LINE> <INDENT> meta = class_type_metadata[item['type']] <NEW_LINE> for key, value in meta.items(): <NEW_LINE> <INDENT> item[key] = value <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return item | Class has some methods to add metadata to items | 62598facb7558d58954635df |
class TimeRangeEndpoint(Endpoint): <NEW_LINE> <INDENT> _http_method = "GET" <NEW_LINE> _uri = "/time/range" <NEW_LINE> _route_name = "time_range_now_plus_duration" <NEW_LINE> _returns = DateTimeRangeResource( "Information about the range specified, as well as the " "range's start and end datetimes.") <NEW_LINE> duration = IntegerArgument( "The duration in milliseconds to be used.", required=True) <NEW_LINE> def _handle(self, context): <NEW_LINE> <INDENT> millis = context.args['duration'] <NEW_LINE> time_range = DateTimeRangeModel(millis*1000) <NEW_LINE> return {'range': time_range} | Returns start and end times based on the passed in duration.
The start time is implied to be "now", and the end time is calculated
by adding the duration to that start time.
This is obviously fairly contrived, but this endpoint is here to
illustrate and test nested resources. | 62598fac92d797404e388b3f |
class SumTree(): <NEW_LINE> <INDENT> def __init__(self, buffer_size): <NEW_LINE> <INDENT> self.memory_idx = 0 <NEW_LINE> self.n_entries = 0 <NEW_LINE> self.buffer_size = buffer_size <NEW_LINE> self.tree = np.zeros(2*self.buffer_size-1) <NEW_LINE> self.experience = namedtuple('experience', ['state', 'action','reward','next_state','done']) <NEW_LINE> self.memory = [None]*buffer_size <NEW_LINE> <DEDENT> def update(self, leaf_idx, priority): <NEW_LINE> <INDENT> priority_change = priority - self.tree[leaf_idx] <NEW_LINE> self.tree[leaf_idx] = priority <NEW_LINE> tree_idx = leaf_idx <NEW_LINE> while tree_idx != 0: <NEW_LINE> <INDENT> tree_idx = (tree_idx - 1) // 2 <NEW_LINE> self.tree[tree_idx] += priority_change <NEW_LINE> <DEDENT> <DEDENT> def store(self, priority, state, action, reward, next_state, done): <NEW_LINE> <INDENT> leaf_idx = self.memory_idx + self.buffer_size - 1 <NEW_LINE> new_e = self.experience(state, action, reward, next_state, done) <NEW_LINE> self.memory[self.memory_idx] = new_e <NEW_LINE> self.update(leaf_idx, priority) <NEW_LINE> self.memory_idx += 1 <NEW_LINE> if self.memory_idx >= self.buffer_size: <NEW_LINE> <INDENT> self.memory_idx = 0 <NEW_LINE> <DEDENT> if self.n_entries < self.buffer_size: <NEW_LINE> <INDENT> self.n_entries += 1 <NEW_LINE> <DEDENT> <DEDENT> def get_leaf(self, value): <NEW_LINE> <INDENT> parent_idx = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> left_child_idx = 2 * parent_idx + 1 <NEW_LINE> right_child_idx = left_child_idx + 1 <NEW_LINE> if left_child_idx >= len(self.tree): <NEW_LINE> <INDENT> leaf_idx = parent_idx <NEW_LINE> break <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if value <= self.tree[left_child_idx]: <NEW_LINE> <INDENT> parent_idx = left_child_idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value -= self.tree[left_child_idx] <NEW_LINE> parent_idx = right_child_idx <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> memory_idx = leaf_idx - self.buffer_size + 1 <NEW_LINE> return leaf_idx, self.tree[leaf_idx], self.memory[memory_idx] <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_priority(self): <NEW_LINE> <INDENT> return self.tree[0] | Store experience in the memory and its priority in the tree.
The code is referred from
1. https://github.com/MorvanZhou/Reinforcement-learning-with-tensorflow/blob/master/contents/5.2_Prioritized_Replay_DQN
2. https://github.com/rlcode/per | 62598facd268445f26639b5e |
class ShowPlatformSoftwareCpmSwitchB0ResourceSchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'device_status':{ 'oobnd1': str, 'leaba0_3': str, 'leaba0_5': str }, } | Schema for show platform software cpm switch {mode} B0 resource | 62598facdd821e528d6d8eeb |
class ParserI: <NEW_LINE> <INDENT> def grammar(self): <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> def parse(self, sent, *args, **kwargs): <NEW_LINE> <INDENT> if overridden(self.parse_sents): <NEW_LINE> <INDENT> return next(self.parse_sents([sent], *args, **kwargs)) <NEW_LINE> <DEDENT> elif overridden(self.parse_one): <NEW_LINE> <INDENT> return ( tree for tree in [self.parse_one(sent, *args, **kwargs)] if tree is not None ) <NEW_LINE> <DEDENT> elif overridden(self.parse_all): <NEW_LINE> <INDENT> return iter(self.parse_all(sent, *args, **kwargs)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NotImplementedError() <NEW_LINE> <DEDENT> <DEDENT> def parse_sents(self, sents, *args, **kwargs): <NEW_LINE> <INDENT> return (self.parse(sent, *args, **kwargs) for sent in sents) <NEW_LINE> <DEDENT> def parse_all(self, sent, *args, **kwargs): <NEW_LINE> <INDENT> return list(self.parse(sent, *args, **kwargs)) <NEW_LINE> <DEDENT> def parse_one(self, sent, *args, **kwargs): <NEW_LINE> <INDENT> return next(self.parse(sent, *args, **kwargs), None) | A processing class for deriving trees that represent possible
structures for a sequence of tokens. These tree structures are
known as "parses". Typically, parsers are used to derive syntax
trees for sentences. But parsers can also be used to derive other
kinds of tree structure, such as morphological trees and discourse
structures.
Subclasses must define:
- at least one of: ``parse()``, ``parse_sents()``.
Subclasses may define:
- ``grammar()`` | 62598fac4527f215b58e9e97 |
class MESH_OT_connect(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "mesh.connect" <NEW_LINE> bl_label = "Connect" <NEW_LINE> bl_options = {'REGISTER', 'UNDO'} <NEW_LINE> connect_type: bpy.props.IntProperty( name='connect_type', description='connection type', default=1, min=1, max=2, ) <NEW_LINE> @classmethod <NEW_LINE> def poll(cls, context): <NEW_LINE> <INDENT> return context.area.type == 'VIEW_3D' <NEW_LINE> <DEDENT> def execute(self, context): <NEW_LINE> <INDENT> wire = context.active_object <NEW_LINE> b_wire = bmesh.from_edit_mesh(wire.data) <NEW_LINE> origin = [v for v in b_wire.verts if v.select][0] <NEW_LINE> prev_vert = origin <NEW_LINE> current_vert = prev_vert.link_edges[0].other_vert(prev_vert) <NEW_LINE> move_constant = (1 - 1/self.subdiv_lvl) <NEW_LINE> if move_constant > 0: <NEW_LINE> <INDENT> move_percentage, last_vert, second_last_vert, line_vectors = slide_verts(len(b_wire.verts)-2, prev_vert, current_vert, move_constant, -2) <NEW_LINE> move_percentage += move_constant <NEW_LINE> additional_vert_count = math.ceil(move_percentage/(1-move_constant)) <NEW_LINE> second_last_vert, last_vert = add_additional_verts(last_vert, second_last_vert, additional_vert_count, b_wire) <NEW_LINE> bmesh.update_edit_mesh(wire.data) <NEW_LINE> slide_last_verts(additional_vert_count, second_last_vert, last_vert, move_constant, move_percentage, -1, line_vectors) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> line_vectors = get_line_vectors(len(b_wire.verts)-1, prev_vert, current_vert) <NEW_LINE> out_slide_verts(line_vectors, prev_vert, current_vert, move_constant) <NEW_LINE> <DEDENT> bpy.ops.mesh.select_all(action='DESELECT') <NEW_LINE> bmesh.update_edit_mesh(wire.data) <NEW_LINE> return {'FINISHED'} | Tooltip | 62598fac4f88993c371f04e5 |
class Invitation(Resource): <NEW_LINE> <INDENT> @auth.login_required <NEW_LINE> def get(self, invitation_id): <NEW_LINE> <INDENT> invitation = InvitationModel.query.get(invitation_id) <NEW_LINE> if invitation is None: <NEW_LINE> <INDENT> abort(404) <NEW_LINE> <DEDENT> return invitation.serialize, 200 | Define the endpoints for the invitation node. | 62598faca17c0f6771d5c1eb |
class ReferenceDefinition(Base): <NEW_LINE> <INDENT> __table_args__ = {'schema': 'forest_perimeters'} <NEW_LINE> __tablename__ = 'reference_definition' <NEW_LINE> id = sa.Column(sa.String, primary_key=True, autoincrement=False) <NEW_LINE> topic = sa.Column(sa.String, nullable=True) <NEW_LINE> canton = sa.Column(sa.String(2), nullable=True) <NEW_LINE> municipality = sa.Column(sa.Integer, nullable=True) <NEW_LINE> office_id = sa.Column(sa.String, sa.ForeignKey( Office.id), nullable=False ) <NEW_LINE> responsible_office = relationship(Office) <NEW_LINE> liefereinheit = sa.Column(sa.Integer, nullable=True) | The meta bucket for definitions which are directly related to a public law restriction in a common way or
to the whole canton or a whole municipality. It is used to have a place to store general documents
which are related to an extract but not directly on a special public law restriction situation.
Attributes:
id (int): The identifier. This is used in the database only and must not be set manually. If
you don't like it - don't care about.
topic (str): The topic which this definition might be related to.
canton (str): The canton this definition is related to.
municipality (int): The municipality this definition is related to.
office_id (int): The foreign key constraint which the definition is related to.
responsible_office (pyramid_oereb.standard.models.forest_perimeters.Office):
The dedicated relation to the office instance from database. | 62598fac5fdd1c0f98e5df4b |
class SyncSettingArg(bb.Union): <NEW_LINE> <INDENT> _catch_all = 'other' <NEW_LINE> default = None <NEW_LINE> not_synced = None <NEW_LINE> other = None <NEW_LINE> def is_default(self): <NEW_LINE> <INDENT> return self._tag == 'default' <NEW_LINE> <DEDENT> def is_not_synced(self): <NEW_LINE> <INDENT> return self._tag == 'not_synced' <NEW_LINE> <DEDENT> def is_other(self): <NEW_LINE> <INDENT> return self._tag == 'other' <NEW_LINE> <DEDENT> def _process_custom_annotations(self, annotation_type, field_path, processor): <NEW_LINE> <INDENT> super(SyncSettingArg, self)._process_custom_annotations(annotation_type, field_path, processor) | This class acts as a tagged union. Only one of the ``is_*`` methods will
return true. To get the associated value of a tag (if one exists), use the
corresponding ``get_*`` method.
:ivar files.SyncSettingArg.default: On first sync to members' computers, the
specified folder will follow its parent folder's setting or otherwise
follow default sync behavior.
:ivar files.SyncSettingArg.not_synced: On first sync to members' computers,
the specified folder will be set to not sync with selective sync. | 62598fac2c8b7c6e89bd377c |
class Grid: <NEW_LINE> <INDENT> def __init__(self, grid_data=None): <NEW_LINE> <INDENT> self.grid = [] <NEW_LINE> if grid_data and isinstance(grid_data, list): <NEW_LINE> <INDENT> for i, line in enumerate(grid_data): <NEW_LINE> <INDENT> self.grid.append( [Cell(i, j, int(v)) for j, v in enumerate(line)] ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> for i in range(0, 9): <NEW_LINE> <INDENT> self.grid.append([Cell(i, j) for j in range(0, 9)]) <NEW_LINE> <DEDENT> <DEDENT> self.map_grid() <NEW_LINE> self.register_validators() <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> data = [] <NEW_LINE> for line in self.grid: <NEW_LINE> <INDENT> values = [c.get_value() for c in line] <NEW_LINE> data.append(values) <NEW_LINE> <DEDENT> return data <NEW_LINE> <DEDENT> @property <NEW_LINE> def empty_values(self): <NEW_LINE> <INDENT> return len([c in l for l in self.grid for c in l if c.empty]) <NEW_LINE> <DEDENT> def map_grid(self): <NEW_LINE> <INDENT> self.lines = [Line(i).fill(l) for i, l in enumerate(self.grid)] <NEW_LINE> cols = [] <NEW_LINE> for j in range(0, 9): <NEW_LINE> <INDENT> cols.append([self.grid[i][j] for i in range(0, 9)]) <NEW_LINE> <DEDENT> self.columns = [Column(j).fill(c) for j, c in enumerate(cols)] <NEW_LINE> box_list = [] <NEW_LINE> for box_id in range(0, 9): <NEW_LINE> <INDENT> box = [] <NEW_LINE> for value in range(0, 9): <NEW_LINE> <INDENT> start_i = (box_id // 3) * 3 <NEW_LINE> start_j = (box_id % 3) * 3 <NEW_LINE> relative_i = value // 3 <NEW_LINE> relative_j = value % 3 <NEW_LINE> i = start_i + relative_i <NEW_LINE> j = start_j + relative_j <NEW_LINE> box.append(self.grid[i][j]) <NEW_LINE> <DEDENT> box_list.append(box) <NEW_LINE> <DEDENT> self.boxes = [Box(i).fill(b) for i, b in enumerate(box_list)] <NEW_LINE> <DEDENT> def register_validators(self): <NEW_LINE> <INDENT> for i, line in enumerate(self.grid): <NEW_LINE> <INDENT> for j, cell in enumerate(line): <NEW_LINE> <INDENT> box_id = (i - i % 3) + j // 3 <NEW_LINE> cell.validators = [ self.lines[i], self.columns[j], self.boxes[box_id], ] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '\n'.join(str(line) for line in self.lines) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.lines[key] <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return bool(self.empty_values < other.empty_values) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return bool(self.empty_values > other.empty_values) | Class that defines a Sudoku grid. | 62598faccb5e8a47e493c154 |
@abstract <NEW_LINE> class BuildingInformation(EObject, metaclass=MetaEClass): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() | Super class of all different kinds of extra information that can be specified for a building | 62598fac76e4537e8c3ef564 |
class Path(object): <NEW_LINE> <INDENT> def __init__(self, path=''): <NEW_LINE> <INDENT> self.segments = [] <NEW_LINE> self.isabsolute = False <NEW_LINE> if path: <NEW_LINE> <INDENT> self.parse(path) <NEW_LINE> <DEDENT> <DEDENT> def parse(self, path=''): <NEW_LINE> <INDENT> self.isabsolute = (path and path[0] == '/') <NEW_LINE> if isinstance(path, list): <NEW_LINE> <INDENT> segments = url_path_join(*path).split('/') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> segments = path.split('/') <NEW_LINE> <DEDENT> if len(segments) > 1 and segments[0] == '': <NEW_LINE> <INDENT> segments.pop(0) <NEW_LINE> <DEDENT> self.segments = [urllib.unquote(segment) for segment in segments] <NEW_LINE> <DEDENT> def add(self, path=None): <NEW_LINE> <INDENT> if path: <NEW_LINE> <INDENT> if isinstance(path, list): <NEW_LINE> <INDENT> if self.segments[-1] == '': <NEW_LINE> <INDENT> self.segments.pop(-1) <NEW_LINE> <DEDENT> self.segments += path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parse(url_path_join(str(self), path)) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def set(self, path=None): <NEW_LINE> <INDENT> if path: <NEW_LINE> <INDENT> self.parse(path) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def remove(self, path=None): <NEW_LINE> <INDENT> if path: <NEW_LINE> <INDENT> if path == True: <NEW_LINE> <INDENT> self.parse('') <NEW_LINE> <DEDENT> elif isinstance(path, list): <NEW_LINE> <INDENT> self.parse(url_path_remove(str(self), '/'.join(path))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.parse(url_path_remove(str(self), path)) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def isdir(self): <NEW_LINE> <INDENT> return self.segments == [] or (self.segments and self.segments[-1] == '') <NEW_LINE> <DEDENT> @property <NEW_LINE> def isfile(self): <NEW_LINE> <INDENT> return not self.isdir <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.segments and self.isabsolute: <NEW_LINE> <INDENT> return '/'.join([''] + self.segments) <NEW_LINE> <DEDENT> return '/'.join(self.segments) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s('%s')" % (self.__class__.__name__, str(self)) | Represents a URL path comprised of zero or more path segments.
http://tools.ietf.org/html/rfc3986#section-3.3
Path parameters are currently not supported.
Attributes:
segments: List of zero or more path segments comprising this path. If the
path string has a trailing '/', the last segment will be '' and self.isdir
will be True and self.isfile will be False. An empty segment list
represents an empty path, not '/' (though they have the same meaning).
isabsolute: Boolean whether or not this is an absolute path or not. An
absolute path starts with a '/'. self.isabsolute is False if the path is
empty (self.segments == [] and str(path) == ''). | 62598faca8370b77170f0392 |
@skip_server_tests <NEW_LINE> class GzipUdpTests(UdpTests, ManyTestCasesWithServerGzipMixin): <NEW_LINE> <INDENT> pass | Repeat the UDP tests with InfluxDBClient where gzip=True. | 62598fac91f36d47f2230e81 |
class ContainerProjectsLocationsClustersNodePoolsGetRequest(_messages.Message): <NEW_LINE> <INDENT> clusterId = _messages.StringField(1) <NEW_LINE> name = _messages.StringField(2, required=True) <NEW_LINE> nodePoolId = _messages.StringField(3) <NEW_LINE> projectId = _messages.StringField(4) <NEW_LINE> version = _messages.StringField(5) <NEW_LINE> zone = _messages.StringField(6) | A ContainerProjectsLocationsClustersNodePoolsGetRequest object.
Fields:
clusterId: Deprecated. The name of the cluster. This field has been
deprecated and replaced by the name field.
name: The name (project, location, cluster, node pool id) of the node pool
to get. Specified in the format
'projects/*/locations/*/clusters/*/nodePools/*'.
nodePoolId: Deprecated. The name of the node pool. This field has been
deprecated and replaced by the name field.
projectId: Deprecated. The Google Developers Console [project ID or
project
number](https://developers.google.com/console/help/new/#projectnumber).
This field has been deprecated and replaced by the name field.
version: API request version that initiates this operation.
zone: Deprecated. The name of the Google Compute Engine
[zone](/compute/docs/zones#available) in which the cluster resides. This
field has been deprecated and replaced by the name field. | 62598facf548e778e596b55b |
class Validator(Base.Node): <NEW_LINE> <INDENT> __slots__ = ('control', 'required') <NEW_LINE> ERROR = "error" <NEW_LINE> INFO = "info" <NEW_LINE> WARNING = "warning" <NEW_LINE> SUCCESS = "success" <NEW_LINE> messages = {'empty':'A value is required for this field'} <NEW_LINE> properties = Base.Node.properties.copy() <NEW_LINE> properties['required'] = {'action':'classAttribute'} <NEW_LINE> class ClientSide(Display.Message.ClientSide): <NEW_LINE> <INDENT> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def message(self, message): <NEW_LINE> <INDENT> return self.expandTemplate(self.serverSide.messages[message], self.associatedData()) <NEW_LINE> <DEDENT> def error(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.ERROR, self.message(message)) <NEW_LINE> <DEDENT> def info(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.INFO, self.message(message)) <NEW_LINE> <DEDENT> def warning(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.WARNING, self.message(message)) <NEW_LINE> <DEDENT> def success(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.SUCCESS, self.message(message)) <NEW_LINE> <DEDENT> def associatedData(self): <NEW_LINE> <INDENT> return {'field':self.value} <NEW_LINE> <DEDENT> <DEDENT> def _create(self, id=None, name=None, parent=None, **kwargs): <NEW_LINE> <INDENT> Base.Node._create(self, id=id, name=name, parent=parent, **kwargs) <NEW_LINE> self.required = False <NEW_LINE> self.control = None <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def error(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.ERROR, self.message(message)) <NEW_LINE> <DEDENT> def info(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.INFO, self.message(message)) <NEW_LINE> <DEDENT> def warning(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.WARNING, self.message(message)) <NEW_LINE> <DEDENT> def success(self, message): <NEW_LINE> <INDENT> return (ClientSide.MessageTypes.SUCCESS, self.message(message)) <NEW_LINE> <DEDENT> def associatedData(self): <NEW_LINE> <INDENT> return {'field':self.value()} <NEW_LINE> <DEDENT> def message(self, message): <NEW_LINE> <INDENT> return string.Template(self.messages[message]).safe_substitute(self.associatedData()) <NEW_LINE> <DEDENT> def value(self): <NEW_LINE> <INDENT> return self.control.value() <NEW_LINE> <DEDENT> @property <NEW_LINE> def forElement(self): <NEW_LINE> <INDENT> return self.control and self.control.forElement | The base abstract validator that should be sub-classed to define new validators | 62598fac10dbd63aa1c70b6a |
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> curDepth = 0 <NEW_LINE> AgentIndex =0 <NEW_LINE> alpha = -1*float('inf') <NEW_LINE> beta = float('inf') <NEW_LINE> val = self.Getvalue(gameState, AgentIndex, curDepth, alpha, beta) <NEW_LINE> return val[0] <NEW_LINE> <DEDENT> def Getvalue(self, GameState, AgentIndex, curDepth, alpha, beta): <NEW_LINE> <INDENT> if AgentIndex >= GameState.getNumAgents(): <NEW_LINE> <INDENT> AgentIndex = 0 <NEW_LINE> curDepth += 1 <NEW_LINE> <DEDENT> if curDepth == self.depth: <NEW_LINE> <INDENT> return self.evaluationFunction(GameState) <NEW_LINE> <DEDENT> if AgentIndex == 0: <NEW_LINE> <INDENT> return self.maxValue(GameState, AgentIndex, curDepth, alpha, beta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.minValue(GameState, AgentIndex, curDepth, alpha, beta) <NEW_LINE> <DEDENT> <DEDENT> def minValue(self, GameState, AgentIndex, curDepth, alpha, beta): <NEW_LINE> <INDENT> v = ['None', float('inf')] <NEW_LINE> if not GameState.getLegalActions(AgentIndex): <NEW_LINE> <INDENT> return self.evaluationFunction(GameState) <NEW_LINE> <DEDENT> for action in GameState.getLegalActions(AgentIndex): <NEW_LINE> <INDENT> if action == 'Stop': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = self.Getvalue(GameState.generateSuccessor(AgentIndex, action), AgentIndex + 1, curDepth, alpha, beta) <NEW_LINE> if type(val) == list: <NEW_LINE> <INDENT> val = val[1] <NEW_LINE> <DEDENT> min_val = min(v[1], val) <NEW_LINE> if min_val is not v[1]: <NEW_LINE> <INDENT> v = [action, min_val] <NEW_LINE> <DEDENT> if v[1] < alpha: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> beta = min(beta, v[1]) <NEW_LINE> <DEDENT> <DEDENT> return v <NEW_LINE> <DEDENT> def maxValue(self, GameState, AgentIndex, curDepth, alpha, beta): <NEW_LINE> <INDENT> v = ['None', -1*float('inf')] <NEW_LINE> if not GameState.getLegalActions(AgentIndex): <NEW_LINE> <INDENT> return self.evaluationFunction(GameState) <NEW_LINE> <DEDENT> for action in GameState.getLegalActions(AgentIndex): <NEW_LINE> <INDENT> if action == 'Stop': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> val = self.Getvalue(GameState.generateSuccessor(AgentIndex, action), AgentIndex + 1, curDepth, alpha, beta) <NEW_LINE> if type(val) == list: <NEW_LINE> <INDENT> val = val[1] <NEW_LINE> <DEDENT> max_val = max(v[1], val) <NEW_LINE> if max_val is not v[1]: <NEW_LINE> <INDENT> v = [action, max_val] <NEW_LINE> <DEDENT> if v[1] > beta: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> alpha = max(v[1], alpha) <NEW_LINE> <DEDENT> <DEDENT> return v | Your minimax agent with alpha-beta pruning (question 3) | 62598fac8da39b475be0319c |
class UpdateEntryInputSet(InputSet): <NEW_LINE> <INDENT> def set_Entry(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'Entry', value) <NEW_LINE> <DEDENT> def set_AccessToken(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'AccessToken', value) <NEW_LINE> <DEDENT> def set_EntryID(self, value): <NEW_LINE> <INDENT> InputSet._set_input(self, 'EntryID', value) | An InputSet with methods appropriate for specifying the inputs to the UpdateEntry
Choreo. The InputSet object is used to specify input parameters when executing this Choreo. | 62598fac32920d7e50bc600c |
class CompletionTarget(object): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.name = None <NEW_LINE> self.path = None <NEW_LINE> self.base_path = None <NEW_LINE> self.modules = tuple() <NEW_LINE> self.aliases = tuple() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, CompletionTarget): <NEW_LINE> <INDENT> return self.__repr__() == other.__repr__() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self.__eq__(other) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.name.__lt__(other.name) <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.name.__gt__(other.name) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.__repr__()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self.modules: <NEW_LINE> <INDENT> return '%s (%s)' % (self.name, ', '.join(self.modules)) <NEW_LINE> <DEDENT> return self.name | Command-line argument completion target base class. | 62598facac7a0e7691f724c1 |
class AbstractThreadView(View): <NEW_LINE> <INDENT> def authenticate_user(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'HTTP_USERNAME' in request.META and 'HTTP_TOKEN' in request.META: <NEW_LINE> <INDENT> username = request.META['HTTP_USERNAME'] <NEW_LINE> token = request.META['HTTP_TOKEN'] <NEW_LINE> client_user = authenticate(username=username, password=token) <NEW_LINE> if client_user is not None and client_user.is_active: <NEW_LINE> <INDENT> return client_user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.info("AUTHENTICATE: User with USERNAME: " + username + " TOKEN: " + token + " denied access." + '\n\n') <NEW_LINE> return None <NEW_LINE> <DEDENT> <DEDENT> return None | A base class for all Thread handler views.
| 62598facbe8e80087fbbf01b |
class Process(Module): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> super(Process, self).__init__() <NEW_LINE> <DEDENT> @property <NEW_LINE> def exists(self): <NEW_LINE> <INDENT> return self.run_expect([0], "/sbin/pidof %s", self.name).rc == 0 <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<process %s>" % (self.name,) | Test unix process | 62598fac63b5f9789fe8511e |
class FuncCommand(setuptools.Command): <NEW_LINE> <INDENT> initialize_options = do_nothing <NEW_LINE> finalize_options = do_nothing <NEW_LINE> user_options = [] <NEW_LINE> def run(self): <NEW_LINE> <INDENT> self.function(self.args) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> setuptools.Command.__init__(self, *args, **kwargs) | Run a specific function on 'run' and nothing else, no preparing or finalizing options. | 62598fac4428ac0f6e6584dc |
class AverageMeter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.count = 0 <NEW_LINE> self.sum = 0 <NEW_LINE> self.avg = 0 <NEW_LINE> self.val = 0 <NEW_LINE> <DEDENT> def update(self, val, n): <NEW_LINE> <INDENT> self.val = val <NEW_LINE> self.sum += val * n <NEW_LINE> self.count += n <NEW_LINE> self.avg = self.sum / self.count | Computes and stores the average and current value | 62598facf9cc0f698b1c52a5 |
class JournalEntryMixin: <NEW_LINE> <INDENT> _logger = _module_logger.getChild('JournalEntryMixin') <NEW_LINE> def __init__(self, description, imputation_datas): <NEW_LINE> <INDENT> self._description = description <NEW_LINE> self._imputations = [imputation.to_imputation(self) for imputation in imputation_datas] <NEW_LINE> self._imputations.sort(key=lambda x: x.account) <NEW_LINE> self._debits = [imputation for imputation in self._imputations if imputation.is_debit()] <NEW_LINE> self._credits = [imputation for imputation in self._imputations if imputation.is_credit()] <NEW_LINE> self._check() <NEW_LINE> <DEDENT> def _check(self): <NEW_LINE> <INDENT> account_counter = {} <NEW_LINE> account_numbers = [imputation.account for imputation in self._imputations] <NEW_LINE> for account_number in account_numbers: <NEW_LINE> <INDENT> if account_number in account_counter: <NEW_LINE> <INDENT> self._logger.error(str(self)) <NEW_LINE> raise DuplicatedEntryError(account_number) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> account_counter[account_number] = 1 <NEW_LINE> <DEDENT> <DEDENT> sum_of_debits = self.sum_of_debits() <NEW_LINE> sum_of_credits = self.sum_of_credits() <NEW_LINE> if sum_of_debits != sum_of_credits: <NEW_LINE> <INDENT> self._logger.error(str(self)) <NEW_LINE> message = "Journal Entry '{}' is not balanced D {} != C {}" <NEW_LINE> raise UnbalancedEntryError(message.format(self._description, sum_of_debits, sum_of_credits)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def description(self): <NEW_LINE> <INDENT> return self._description <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> message = 'Journal Entry: {}\n'.format(self._description) <NEW_LINE> for imputations in (self.debits, self.credits): <NEW_LINE> <INDENT> message += '\n'.join([str(imputation) for imputation in imputations]) <NEW_LINE> message += '\n' <NEW_LINE> <DEDENT> return message <NEW_LINE> <DEDENT> def _sum_of_imputations(self, imputations): <NEW_LINE> <INDENT> return round_currency(sum([imputation.amount for imputation in imputations])) <NEW_LINE> <DEDENT> def sum_of_debits(self): <NEW_LINE> <INDENT> return self._sum_of_imputations(self._debits) <NEW_LINE> <DEDENT> def sum_of_credits(self): <NEW_LINE> <INDENT> return self._sum_of_imputations(self._credits) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._imputations) <NEW_LINE> <DEDENT> @property <NEW_LINE> def imputations(self): <NEW_LINE> <INDENT> return iter(self._imputations) <NEW_LINE> <DEDENT> @property <NEW_LINE> def debits(self): <NEW_LINE> <INDENT> return iter(self._debits) <NEW_LINE> <DEDENT> @property <NEW_LINE> def credits(self): <NEW_LINE> <INDENT> return iter(self._credits) | This class defines a journal entry template. | 62598facbe383301e02537b1 |
class Fourier(FourierDeterministicTerm): <NEW_LINE> <INDENT> _is_dummy = False <NEW_LINE> def __init__(self, period: float, order: int): <NEW_LINE> <INDENT> super().__init__(order) <NEW_LINE> self._period = float_like(period, "period") <NEW_LINE> if 2 * self._order > self._period: <NEW_LINE> <INDENT> raise ValueError("2 * order must be <= period") <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def period(self) -> float: <NEW_LINE> <INDENT> return self._period <NEW_LINE> <DEDENT> @property <NEW_LINE> def _columns(self) -> List[str]: <NEW_LINE> <INDENT> period = self._period <NEW_LINE> fmt_period = d_or_f(period).strip() <NEW_LINE> columns = [] <NEW_LINE> for i in range(1, self._order + 1): <NEW_LINE> <INDENT> for typ in ("sin", "cos"): <NEW_LINE> <INDENT> columns.append(f"{typ}({i},{fmt_period})") <NEW_LINE> <DEDENT> <DEDENT> return columns <NEW_LINE> <DEDENT> @Appender(DeterministicTerm.in_sample.__doc__) <NEW_LINE> def in_sample( self, index: Union[Sequence[Hashable], pd.Index] ) -> pd.DataFrame: <NEW_LINE> <INDENT> index = self._index_like(index) <NEW_LINE> nobs = index.shape[0] <NEW_LINE> terms = self._get_terms(np.arange(nobs) / self._period) <NEW_LINE> return pd.DataFrame(terms, index=index, columns=self._columns) <NEW_LINE> <DEDENT> @Appender(DeterministicTerm.out_of_sample.__doc__) <NEW_LINE> def out_of_sample( self, steps: int, index: Union[Sequence[Hashable], pd.Index], forecast_index: Optional[Sequence[Hashable]] = None, ) -> pd.DataFrame: <NEW_LINE> <INDENT> index = self._index_like(index) <NEW_LINE> fcast_index = self._extend_index(index, steps, forecast_index) <NEW_LINE> nobs = index.shape[0] <NEW_LINE> terms = self._get_terms(np.arange(nobs, nobs + steps) / self._period) <NEW_LINE> return pd.DataFrame(terms, index=fcast_index, columns=self._columns) <NEW_LINE> <DEDENT> @property <NEW_LINE> def _eq_attr(self) -> Tuple[Hashable, ...]: <NEW_LINE> <INDENT> return self._period, self._order <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return f"Fourier(period={self._period}, order={self._order})" | Fourier series deterministic terms
Parameters
----------
period : int
The length of a full cycle. Must be >= 2.
order : int
The number of Fourier components to include. Must be <= 2*period.
See Also
--------
DeterministicProcess
TimeTrend
Seasonality
CalendarFourier
Notes
-----
Both a sine and a cosine term are included for each i=1, ..., order
.. math::
f_{i,s,t} & = \sin\left(2 \pi i \times \frac{t}{m} \right) \\
f_{i,c,t} & = \cos\left(2 \pi i \times \frac{t}{m} \right)
where m is the length of the period.
Examples
--------
Solar data has an 11-year cycle
>>> from statsmodels.datasets import sunspots
>>> from statsmodels.tsa.deterministic import Seasonality
>>> data = sunspots.load_pandas().data
>>> fourier_gen = Fourier(11, order=2)
>>> fourier_gen.in_sample(data.index) | 62598facdd821e528d6d8eed |
class AppSource (Source, FilesystemWatchMixin): <NEW_LINE> <INDENT> def __init__(self, name=None): <NEW_LINE> <INDENT> super().__init__(name or _("Applications")) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> application_dirs = config.get_data_dirs("", "applications") <NEW_LINE> self.monitor_token = self.monitor_directories(*application_dirs) <NEW_LINE> gobject_connect_weakly(__kupfer_settings__, "plugin-setting-changed", self._on_setting_change) <NEW_LINE> <DEDENT> def _on_setting_change(self, *_args): <NEW_LINE> <INDENT> self.mark_for_update() <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def should_show(cls, app_info, desktop_type, use_filter): <NEW_LINE> <INDENT> if app_info.get_nodisplay(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> if not use_filter: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if desktop_type == "": <NEW_LINE> <INDENT> return app_info.should_show() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return app_info.get_show_in(desktop_type) <NEW_LINE> <DEDENT> <DEDENT> def get_items(self): <NEW_LINE> <INDENT> use_filter = __kupfer_settings__["desktop_filter"] <NEW_LINE> desktop_type = __kupfer_settings__["desktop_type"] <NEW_LINE> for item in Gio.app_info_get_all(): <NEW_LINE> <INDENT> id_ = item.get_id() <NEW_LINE> if id_ in WHITELIST_IDS or ( self.should_show(item, desktop_type, use_filter) and not id_ in BLACKLIST_IDS): <NEW_LINE> <INDENT> yield AppLeaf(item) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def should_sort_lexically(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_description(self): <NEW_LINE> <INDENT> return _("All applications and preferences") <NEW_LINE> <DEDENT> def get_icon_name(self): <NEW_LINE> <INDENT> return "applications-office" <NEW_LINE> <DEDENT> def provides(self): <NEW_LINE> <INDENT> yield AppLeaf | Applications source
This Source contains all user-visible applications (as given by
the desktop files) | 62598fac76e4537e8c3ef565 |
class AveragedPowerspectrum(AveragedCrossspectrum, Powerspectrum): <NEW_LINE> <INDENT> def __init__(self, lc=None, segment_size=None, norm="frac", gti=None): <NEW_LINE> <INDENT> self.type = "powerspectrum" <NEW_LINE> if segment_size is None and lc is not None: <NEW_LINE> <INDENT> raise ValueError("segment_size must be specified") <NEW_LINE> <DEDENT> if segment_size is not None and not np.isfinite(segment_size): <NEW_LINE> <INDENT> raise ValueError("segment_size must be finite!") <NEW_LINE> <DEDENT> self.segment_size = segment_size <NEW_LINE> Powerspectrum.__init__(self, lc, norm, gti=gti) <NEW_LINE> return <NEW_LINE> <DEDENT> def _make_segment_spectrum(self, lc, segment_size): <NEW_LINE> <INDENT> if not isinstance(lc, lightcurve.Lightcurve): <NEW_LINE> <INDENT> raise TypeError("lc must be a lightcurve.Lightcurve object") <NEW_LINE> <DEDENT> if self.gti is None: <NEW_LINE> <INDENT> self.gti = lc.gti <NEW_LINE> <DEDENT> check_gtis(self.gti) <NEW_LINE> start_inds, end_inds = bin_intervals_from_gtis(self.gti, segment_size, lc.time) <NEW_LINE> power_all = [] <NEW_LINE> nphots_all = [] <NEW_LINE> for start_ind, end_ind in zip(start_inds, end_inds): <NEW_LINE> <INDENT> time = lc.time[start_ind:end_ind] <NEW_LINE> counts = lc.counts[start_ind:end_ind] <NEW_LINE> counts_err = lc.counts_err[start_ind: end_ind] <NEW_LINE> lc_seg = lightcurve.Lightcurve(time, counts, err=counts_err, err_dist=lc.err_dist.lower()) <NEW_LINE> power_seg = Powerspectrum(lc_seg, norm=self.norm) <NEW_LINE> power_all.append(power_seg) <NEW_LINE> nphots_all.append(np.sum(lc_seg.counts)) <NEW_LINE> <DEDENT> return power_all, nphots_all | Make an averaged periodogram from a light curve by segmenting the light
curve, Fourier-transforming each segment and then averaging the
resulting periodograms.
Parameters
----------
lc: :class:`stingray.Lightcurve`object OR iterable of :class:`stingray.Lightcurve` objects
The light curve data to be Fourier-transformed.
segment_size: float
The size of each segment to average. Note that if the total
duration of each :class:`Lightcurve` object in lc is not an integer multiple
of the ``segment_size``, then any fraction left-over at the end of the
time series will be lost.
norm: {``leahy`` | ``frac`` | ``abs`` | ``none`` }, optional, default ``frac``
The normaliation of the periodogram to be used.
Other Parameters
----------------
gti: 2-d float array
``[[gti0_0, gti0_1], [gti1_0, gti1_1], ...]`` -- Good Time intervals.
This choice overrides the GTIs in the single light curves. Use with
care!
Attributes
----------
norm: {``leahy`` | ``frac`` | ``abs`` | ``none`` }
the normalization of the periodogram
freq: numpy.ndarray
The array of mid-bin frequencies that the Fourier transform samples
power: numpy.ndarray
The array of normalized squared absolute values of Fourier
amplitudes
power_err: numpy.ndarray
The uncertainties of ``power``.
An approximation for each bin given by ``power_err= power/sqrt(m)``.
Where ``m`` is the number of power averaged in each bin (by frequency
binning, or averaging powerspectrum). Note that for a single
realization (``m=1``) the error is equal to the power.
df: float
The frequency resolution
m: int
The number of averaged periodograms
n: int
The number of data points in the light curve
nphots: float
The total number of photons in the light curve | 62598faccc0a2c111447afc9 |
class CyclonePipeline(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> engine = db_connect() <NEW_LINE> create_cyclone_table(engine) <NEW_LINE> self.Session = sessionmaker(bind=engine) <NEW_LINE> <DEDENT> def process_item(self, item, spider): <NEW_LINE> <INDENT> session = self.Session() <NEW_LINE> cyclone = Cyclones(**item) <NEW_LINE> try: <NEW_LINE> <INDENT> session.add(cyclone) <NEW_LINE> session.commit() <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> session.rollback() <NEW_LINE> raise <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() <NEW_LINE> <DEDENT> return item | Cyclones pipeline for storing scraped items in the database | 62598fac3d592f4c4edbae84 |
class Leaderboard: <NEW_LINE> <INDENT> def __init__(self, fields, is_higher_better): <NEW_LINE> <INDENT> assert isinstance(fields, list) <NEW_LINE> self.keys = ["name"] + fields <NEW_LINE> self.perform_dict = pd.DataFrame(columns=self.keys) <NEW_LINE> self.is_higher_better = is_higher_better <NEW_LINE> self.major_field = fields[0] <NEW_LINE> <DEDENT> def set_major_field(self, field) -> None: <NEW_LINE> <INDENT> if field in self.keys and not field == "name": <NEW_LINE> <INDENT> self.major_field = field <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOGGER.warning( "do not find major field %s in current leaderboard, will ignore.", field ) <NEW_LINE> <DEDENT> <DEDENT> def insert_model_performance(self, name, performance) -> None: <NEW_LINE> <INDENT> if name not in self.perform_dict["name"]: <NEW_LINE> <INDENT> performance["name"] = name <NEW_LINE> new = pd.DataFrame(performance, index=[0]) <NEW_LINE> self.perform_dict = self.perform_dict.append(new, ignore_index=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> LOGGER.warning( "model already in the leaderboard, will override current result." ) <NEW_LINE> self.remove_model_performance(name) <NEW_LINE> self.insert_model_performance(name, performance) <NEW_LINE> <DEDENT> <DEDENT> def remove_model_performance(self, name) -> None: <NEW_LINE> <INDENT> if name not in self.perform_dict["name"]: <NEW_LINE> <INDENT> LOGGER.warning( "no model detected in current leaderboard, will ignore removing action." ) <NEW_LINE> return <NEW_LINE> <DEDENT> index = self.perform_dict["name"][self.perform_dict["name"] == name].index <NEW_LINE> self.perform_dict.drop(self.perform_dict.index[index], inplace=True) <NEW_LINE> return <NEW_LINE> <DEDENT> def get_best_model(self, index=0) -> str: <NEW_LINE> <INDENT> sorted_df = self.perform_dict.sort_values( by=self.major_field, ascending=not self.is_higher_better[self.major_field] ) <NEW_LINE> name_list = sorted_df["name"].tolist() <NEW_LINE> if "ensemble" in name_list: <NEW_LINE> <INDENT> name_list.remove("ensemble") <NEW_LINE> <DEDENT> return name_list[index] <NEW_LINE> <DEDENT> def show(self, top_k=-1) -> None: <NEW_LINE> <INDENT> if top_k == -1: <NEW_LINE> <INDENT> top_k = len(self.perform_dict["name"]) <NEW_LINE> <DEDENT> print( self.perform_dict.sort_values( by=self.major_field, ascending=not self.is_higher_better[self.major_field], ).head(top_k) ) | The leaderboard that can be used to store / sort the model performance automatically.
Parameters
----------
fields: list of `str`
A list of field name that shows the model performance. The first field is used as
the major field for sorting the model performances.
is_higher_better: list of `bool`
A list of indicator that whether the field score is higher better. | 62598facfff4ab517ebcd79d |
class I18NTest(MultipleCoursesTestBase): <NEW_LINE> <INDENT> def test_csv_supports_utf8(self): <NEW_LINE> <INDENT> title_ru = u'Найди факты быстрее' <NEW_LINE> csv_file = os.path.join(self.course_ru.home, 'data/unit.csv') <NEW_LINE> self.modify_file( csv_file, ',Find facts faster,', ',%s,' % title_ru) <NEW_LINE> self.modify_file( os.path.join(self.course_ru.home, 'data/lesson.csv'), ',Find facts faster,', ',%s,' % title_ru) <NEW_LINE> rows = [] <NEW_LINE> for row in csv.reader(open(csv_file)): <NEW_LINE> <INDENT> rows.append(row) <NEW_LINE> <DEDENT> assert title_ru == rows[6][3].decode('utf-8') <NEW_LINE> response = self.get('/courses/%s/course' % self.course_ru.path) <NEW_LINE> assert_contains(title_ru, response.body) <NEW_LINE> self.walk_the_course(self.course_ru, first_time=True) <NEW_LINE> self.walk_the_course(self.course_ru, first_time=False) <NEW_LINE> self.walk_the_course( self.course_ru, first_time=False, is_admin=True, logout=False) <NEW_LINE> dashboard_url = '/courses/%s/dashboard' % self.course_ru.path <NEW_LINE> def assert_page_contains(page_name, text_array): <NEW_LINE> <INDENT> response = self.get('%s?action=%s' % (dashboard_url, page_name)) <NEW_LINE> for text in text_array: <NEW_LINE> <INDENT> assert_contains(text, response.body) <NEW_LINE> <DEDENT> <DEDENT> assert_page_contains('', [ title_ru, self.course_ru.unit_title, self.course_ru.lesson_title]) <NEW_LINE> assert_page_contains( 'edit_questions', [self.course_ru.title]) <NEW_LINE> assert_page_contains( '', [self.course_ru.title]) <NEW_LINE> assert_contains( vfs.AbstractFileSystem.normpath(self.course_ru.home), self.get('%s?action=settings_about' % dashboard_url).body) <NEW_LINE> actions.logout() <NEW_LINE> <DEDENT> def test_i18n(self): <NEW_LINE> <INDENT> response = self.get('/courses/%s/course' % self.course_ru.path) <NEW_LINE> assert_contains_all_of( [u'Войти', u'Учебный план', u'Курс'], response.body) | Test courses running in different locales and containing I18N content. | 62598fac38b623060ffa9052 |
class Amenity(BaseModel): <NEW_LINE> <INDENT> name = "" | Amenity Class - Module | 62598fac7c178a314d78d455 |
class AsyncResultFactory: <NEW_LINE> <INDENT> __metaclass__ = Singleton <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.async_reuslts = {} <NEW_LINE> <DEDENT> def createAsyncResult(self): <NEW_LINE> <INDENT> _async_result = AsyncResult() <NEW_LINE> _key = _makeUniqueKey(_async_result) <NEW_LINE> self.async_reuslts[_key] = _async_result <NEW_LINE> return _key,_async_result <NEW_LINE> <DEDENT> def dropAsyncResultByKey(self,_key): <NEW_LINE> <INDENT> if self.async_reuslts.has_key(_key): <NEW_LINE> <INDENT> del self.async_reuslts[_key] <NEW_LINE> <DEDENT> <DEDENT> def popAsyncResult(self,_key): <NEW_LINE> <INDENT> a = self.async_reuslts.get(_key) <NEW_LINE> self.dropAsyncResultByKey(_key) <NEW_LINE> return a | 异步结果工厂,用来产生AsyncResult对象,并且生成一个唯一的key存放async_reuslts中
| 62598fac16aa5153ce4004bb |
class PySqMxObj: <NEW_LINE> <INDENT> content = None <NEW_LINE> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return len(self.content or '') <NEW_LINE> <DEDENT> def __init__(self, data: list=[]): <NEW_LINE> <INDENT> self.content = [] <NEW_LINE> for line in data: <NEW_LINE> <INDENT> assert len(line) == len(data) <NEW_LINE> self.fill(line) <NEW_LINE> <DEDENT> <DEDENT> def load(self, path: str) -> bool: <NEW_LINE> <INDENT> with open(path) as mx_data: <NEW_LINE> <INDENT> line = True <NEW_LINE> while line: <NEW_LINE> <INDENT> line = mx_data.readline() <NEW_LINE> if line: <NEW_LINE> <INDENT> self.fill(line.split()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> res = self.is_valid <NEW_LINE> if not res: <NEW_LINE> <INDENT> self.clear() <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_valid(self): <NEW_LINE> <INDENT> n = len(self.content) <NEW_LINE> for line in self.content: <NEW_LINE> <INDENT> if n != len(line): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> self.content.clear() <NEW_LINE> <DEDENT> def show(self, size=0): <NEW_LINE> <INDENT> size = size or self.size <NEW_LINE> for x in range(size): <NEW_LINE> <INDENT> line = " ".join(map("{:.6f}".format, self.content[x][:size])) <NEW_LINE> print("|{}|".format(line)) <NEW_LINE> <DEDENT> <DEDENT> def save(self, path: str): <NEW_LINE> <INDENT> with open(path, mode='w') as res: <NEW_LINE> <INDENT> for line in self.content: <NEW_LINE> <INDENT> line = " ".join(map("{:.6f}".format, line)) <NEW_LINE> res.write(line + '\n') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fill(self, line: list): <NEW_LINE> <INDENT> self.content.append(list(map(float, line))) <NEW_LINE> <DEDENT> def compact(self, size: int): <NEW_LINE> <INDENT> m_size = self.size <NEW_LINE> step = m_size / size <NEW_LINE> res = [] <NEW_LINE> for x in range(size): <NEW_LINE> <INDENT> a_x = int((x - 1) * step) <NEW_LINE> if a_x < 0: <NEW_LINE> <INDENT> a_x = 0 <NEW_LINE> <DEDENT> b_x = int((x + 1) * step) <NEW_LINE> if b_x > m_size: <NEW_LINE> <INDENT> b_x = m_size <NEW_LINE> <DEDENT> new_line = [] <NEW_LINE> for y in range(size): <NEW_LINE> <INDENT> a_y = int((y - 1) * step) <NEW_LINE> if a_y < 0: <NEW_LINE> <INDENT> a_y = 0 <NEW_LINE> <DEDENT> b_y = int((y + 1) * step) <NEW_LINE> if b_y > m_size: <NEW_LINE> <INDENT> b_y = m_size <NEW_LINE> <DEDENT> volume = (b_x - a_x) * (b_y - a_y) <NEW_LINE> new_line.append( sum(( self.content[x_i][y_i] for x_i in range(a_x, b_x) for y_i in range(a_y, b_y) )) / volume) <NEW_LINE> <DEDENT> res.append(new_line) <NEW_LINE> <DEDENT> self.content = res | Simple square matrix.
| 62598fac01c39578d7f12d38 |
class AOAWithFlapMax(KeyPointValueNode, FlapOrConfigurationMaxOrMin): <NEW_LINE> <INDENT> NAME_FORMAT = 'AOA With Flap %(flap)s Max' <NEW_LINE> NAME_VALUES = NAME_VALUES_LEVER <NEW_LINE> name = 'AOA With Flap Max' <NEW_LINE> units = ut.DEGREE <NEW_LINE> @classmethod <NEW_LINE> def can_operate(cls, available): <NEW_LINE> <INDENT> return any_of(('Flap Lever', 'Flap Lever (Synthetic)'), available) and all_of(('AOA', 'Airborne'), available) <NEW_LINE> <DEDENT> def derive(self, flap_lever=M('Flap Lever'), flap_synth=M('Flap Lever (Synthetic)'), aoa=P('AOA'), scope=S('Airborne')): <NEW_LINE> <INDENT> flap = flap_lever or flap_synth <NEW_LINE> data = self.flap_or_conf_max_or_min(flap, aoa, max_value, scope, include_zero=True) <NEW_LINE> for index, value, detent in data: <NEW_LINE> <INDENT> self.create_kpv(index, value, flap=detent) | FDS developed this KPV to support the UK CAA Significant Seven programme.
"Loss of Control. Pitch/Angle of Attack vs stall angles"
This is an adaptation of the airspeed algorithm, used to determine peak
AOA vs flap. It may not be possible to obtain stalling angle of attack
figures to set event thresholds, but a threshold based on in-service data
may suffice. | 62598faca8370b77170f0395 |
class BaseTransform(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.is_fitted = False <NEW_LINE> self.sig0_ = None <NEW_LINE> self.displacements_ = None <NEW_LINE> self.transport_map_ = None <NEW_LINE> <DEDENT> def _check_is_fitted(self): <NEW_LINE> <INDENT> if not self.is_fitted: <NEW_LINE> <INDENT> raise AssertionError("The forward transform of {0!s} has not been " "called yet. Call 'forward' before using " "this method".format(type(self).__name__)) <NEW_LINE> <DEDENT> <DEDENT> def forward(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def inverse(self): <NEW_LINE> <INDENT> self._check_is_fitted() <NEW_LINE> return self.apply_inverse_map(self.transport_map_, self.sig0_) <NEW_LINE> <DEDENT> def apply_forward_map(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def apply_inverse_map(self): <NEW_LINE> <INDENT> raise NotImplementedError | Base class for optimal transport transform methods.
.. warning::
This class should **not** be used directly. Use derived classes instead. | 62598fac7d847024c075c37c |
class Ranges: <NEW_LINE> <INDENT> def __init__(self, ranges=[]): <NEW_LINE> <INDENT> self._ranges = [] <NEW_LINE> for r in ranges: <NEW_LINE> <INDENT> self.add(r) <NEW_LINE> <DEDENT> <DEDENT> def add(self, range): <NEW_LINE> <INDENT> for i, r in enumerate(self._ranges): <NEW_LINE> <INDENT> if range.min is None or r.max is None or range.min <= r.max: <NEW_LINE> <INDENT> self._ranges.insert(i, range) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> i = len(self._ranges) <NEW_LINE> self._ranges.append(range) <NEW_LINE> <DEDENT> subsequent_ranges = self._ranges[i + 1:] <NEW_LINE> del self._ranges[i + 1:] <NEW_LINE> for j, r in enumerate(subsequent_ranges): <NEW_LINE> <INDENT> if r.intersect(self._ranges[i]): <NEW_LINE> <INDENT> self._ranges[i] = self._ranges[i].union(r) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._ranges.extend(subsequent_ranges[j:]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def remove(self, range): <NEW_LINE> <INDENT> for i, r in enumerate(self._ranges): <NEW_LINE> <INDENT> if range.intersect(r): <NEW_LINE> <INDENT> subsequent_ranges = self._ranges[i:] <NEW_LINE> del self._ranges[i:] <NEW_LINE> for j, r in enumerate(subsequent_ranges): <NEW_LINE> <INDENT> if r.min < range.min: <NEW_LINE> <INDENT> self._ranges.append(Range(r.min, range.min - 1)) <NEW_LINE> <DEDENT> if range.max is not None and (r.max is None or r.max > range.max): <NEW_LINE> <INDENT> self._ranges.append(Range(range.max + 1, r.max)) <NEW_LINE> self._ranges.extend(subsequent_ranges[j+1:]) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def intersect(self, range): <NEW_LINE> <INDENT> result = Ranges() <NEW_LINE> for r in self: <NEW_LINE> <INDENT> overlap = range.intersect(r) <NEW_LINE> if overlap: <NEW_LINE> <INDENT> result.add(overlap) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._ranges) <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> return self._ranges[i] <NEW_LINE> <DEDENT> def get_ranges(self): <NEW_LINE> <INDENT> return self._ranges <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ', '.join(str(r) for r in self.get_ranges()) <NEW_LINE> <DEDENT> def get_default(self): <NEW_LINE> <INDENT> positive = Range(0, None) <NEW_LINE> negative = Range(None, 0) <NEW_LINE> if self.intersect(positive): <NEW_LINE> <INDENT> result = self.intersect(positive)[0].min <NEW_LINE> <DEDENT> elif self.intersect(negative): <NEW_LINE> <INDENT> result = self.intersect(positive)[-1].max <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = 0 <NEW_LINE> logging.warning('Unable to choose a good output for %s; using %s', name, value) <NEW_LINE> <DEDENT> return result | A class to keep track of a set of Range instances.
Overlapping ranges will be collapsed, and ranges can be removed. | 62598fac10dbd63aa1c70b6c |
class InteractiveReplacer(Replacer): <NEW_LINE> <INDENT> def replace_suggestion(self, suggestion): <NEW_LINE> <INDENT> accept, patches = self.suggestion_dialog(suggestion) <NEW_LINE> suggestion = copy(suggestion) <NEW_LINE> if not accept: <NEW_LINE> <INDENT> suggestion.args.update(patches) <NEW_LINE> <DEDENT> self.apply_suggestion(suggestion) | Open for subclassing with hooks for interactive suggestion dialogs
Provides method `suggestion_dialog` which is called with suggestion data
and expects a {'accept': True/False, 'patches': {..}} result | 62598facdd821e528d6d8eee |
class MultiHasher(object): <NEW_LINE> <INDENT> def __init__(self, algorithms=None, progress=None): <NEW_LINE> <INDENT> if not algorithms: <NEW_LINE> <INDENT> algorithms = ["md5", "sha1", "sha256"] <NEW_LINE> <DEDENT> self._hashers = {} <NEW_LINE> for algorithm in algorithms: <NEW_LINE> <INDENT> self._hashers[algorithm] = hashlib.new(algorithm) <NEW_LINE> <DEDENT> self._bytes_read = 0 <NEW_LINE> self._progress = progress <NEW_LINE> <DEDENT> def HashFilePath(self, path, byte_count): <NEW_LINE> <INDENT> with open(path, "rb") as fd: <NEW_LINE> <INDENT> self.HashFile(fd, byte_count) <NEW_LINE> <DEDENT> <DEDENT> def HashFile(self, fd, byte_count): <NEW_LINE> <INDENT> while byte_count > 0: <NEW_LINE> <INDENT> buf_size = min(byte_count, constants.CLIENT_MAX_BUFFER_SIZE) <NEW_LINE> buf = fd.read(buf_size) <NEW_LINE> if not buf: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.HashBuffer(buf) <NEW_LINE> byte_count -= buf_size <NEW_LINE> <DEDENT> <DEDENT> def HashBuffer(self, buf): <NEW_LINE> <INDENT> for hasher in self._hashers.values(): <NEW_LINE> <INDENT> hasher.update(buf) <NEW_LINE> if self._progress: <NEW_LINE> <INDENT> self._progress() <NEW_LINE> <DEDENT> <DEDENT> self._bytes_read += len(buf) <NEW_LINE> <DEDENT> def GetHashObject(self): <NEW_LINE> <INDENT> hash_object = rdf_crypto.Hash() <NEW_LINE> hash_object.num_bytes = self._bytes_read <NEW_LINE> for algorithm in self._hashers: <NEW_LINE> <INDENT> setattr(hash_object, algorithm, self._hashers[algorithm].digest()) <NEW_LINE> <DEDENT> return hash_object | An utility class that is able to applies multiple hash algorithms.
Objects that need to construct `Hash` object with multiple hash values need
to apply multiple hash algorithms to the given data. This class removes some
boilerplate associated with it and provides a readable API similar to the one
exposed by Python's `hashlib` module.
Args:
algorithms: List of names of the algorithms from the `hashlib` module that
need to be applied.
progress: An (optional) progress callback called when hashing functions are
applied to the data. | 62598fac63d6d428bbee2764 |
class RequestInfo(PhoxRequestContent): <NEW_LINE> <INDENT> request = RefField() | Content for request type ``request-info``. | 62598face1aae11d1e7ce800 |
@ClassFactory.register(ClassType.METRIC) <NEW_LINE> class LaneMetric(MetricBase): <NEW_LINE> <INDENT> def __init__(self, *, method, eval_width, eval_height, iou_thresh, lane_width, thresh_list=None): <NEW_LINE> <INDENT> support_methods = ['f1_measure', 'precision', 'recall'] <NEW_LINE> if method not in support_methods: <NEW_LINE> <INDENT> raise NotImplementedError(f'method should be one of {support_methods}') <NEW_LINE> <DEDENT> self.method = method <NEW_LINE> self.eval_params = dict( eval_width=eval_width, eval_height=eval_height, iou_thresh=iou_thresh, lane_width=lane_width, ) <NEW_LINE> self.metric_handlers = [] <NEW_LINE> if thresh_list is not None: <NEW_LINE> <INDENT> for prob_thresh_spec in thresh_list: <NEW_LINE> <INDENT> self.metric_handlers.append(LaneMetricCore(**self.eval_params, prob_thresh=prob_thresh_spec)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.metric_handlers.append(LaneMetricCore(**self.eval_params, prob_thresh=None)) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, output, target, *args, **kwargs): <NEW_LINE> <INDENT> pairs_list = output <NEW_LINE> for handler in self.metric_handlers: <NEW_LINE> <INDENT> for pair_spec in pairs_list: <NEW_LINE> <INDENT> handler(**pair_spec) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def reset(self): <NEW_LINE> <INDENT> for handler in self.metric_handlers: <NEW_LINE> <INDENT> handler.reset() <NEW_LINE> <DEDENT> <DEDENT> def summary(self): <NEW_LINE> <INDENT> return max([handlers.summary()[self.method] for handlers in self.metric_handlers]) | Save and summary metric for lane metric. | 62598fac6e29344779b00616 |
class Test(unittest.TestCase): <NEW_LINE> <INDENT> def test_implicit_multiplication(self): <NEW_LINE> <INDENT> expression = Expression("(x^k)/k! exp(-x)") <NEW_LINE> expression.taylor_series() <NEW_LINE> <DEDENT> def test_sine_to_javascript(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_exponential_to_javascript(self): <NEW_LINE> <INDENT> pass | Unit tests for expression | 62598fac32920d7e50bc600e |
class get_tables_result(object): <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.LIST, 'success', (TType.STRING, 'UTF8', False), None, ), (1, TType.STRUCT, 'e', (TMapDException, TMapDException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, success=None, e=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> self.e = e <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.success = [] <NEW_LINE> (_etype159, _size156) = iprot.readListBegin() <NEW_LINE> for _i160 in range(_size156): <NEW_LINE> <INDENT> _elem161 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> self.success.append(_elem161) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.e = TMapDException() <NEW_LINE> self.e.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('get_tables_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.LIST, 0) <NEW_LINE> oprot.writeListBegin(TType.STRING, len(self.success)) <NEW_LINE> for iter162 in self.success: <NEW_LINE> <INDENT> oprot.writeString(iter162.encode('utf-8') if sys.version_info[0] == 2 else iter162) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.e is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('e', TType.STRUCT, 1) <NEW_LINE> self.e.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- success
- e | 62598facbe8e80087fbbf01d |
class BertModel(nn.Module): <NEW_LINE> <INDENT> def __init__(self, config: BertConfig): <NEW_LINE> <INDENT> super(BertModel, self).__init__() <NEW_LINE> self.embeddings = BERTEmbeddings(config) <NEW_LINE> self.encoder = BERTEncoder(config) <NEW_LINE> self.pooler = BERTPooler(config) <NEW_LINE> <DEDENT> def forward(self, input_ids, token_type_ids=None, attention_mask=None): <NEW_LINE> <INDENT> if attention_mask is None: <NEW_LINE> <INDENT> attention_mask = torch.ones_like(input_ids) <NEW_LINE> <DEDENT> if token_type_ids is None: <NEW_LINE> <INDENT> token_type_ids = torch.zeros_like(input_ids) <NEW_LINE> <DEDENT> extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2) <NEW_LINE> extended_attention_mask = extended_attention_mask.float() <NEW_LINE> extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0 <NEW_LINE> embedding_output = self.embeddings(input_ids, token_type_ids) <NEW_LINE> all_encoder_layers, all_encoder_attention_scores = self.encoder(embedding_output, extended_attention_mask) <NEW_LINE> sequence_output = all_encoder_layers[-1] <NEW_LINE> pooled_output = self.pooler(sequence_output, optional_attn_mask=attention_mask) <NEW_LINE> return all_encoder_layers, pooled_output, all_encoder_attention_scores, embedding_output <NEW_LINE> <DEDENT> def backward_lrp(self, relevance_score): <NEW_LINE> <INDENT> relevance_score = self.pooler.backward_lrp(relevance_score) <NEW_LINE> relevance_score = self.encoder.backward_lrp(relevance_score) <NEW_LINE> return relevance_score | BERT model ("Bidirectional Embedding Representations from a Transformer").
Example usage:
```python
# Already been converted into WordPiece token ids
input_ids = torch.LongTensor([[31, 51, 99], [15, 5, 0]])
input_mask = torch.LongTensor([[1, 1, 1], [1, 1, 0]])
token_type_ids = torch.LongTensor([[0, 0, 1], [0, 2, 0]])
config = modeling.BertConfig(vocab_size=32000, hidden_size=512,
num_hidden_layers=8, num_attention_heads=6, intermediate_size=1024)
model = modeling.BertModel(config=config)
all_encoder_layers, pooled_output = model(input_ids, token_type_ids, input_mask)
``` | 62598fac3317a56b869be527 |
class GCN_estimator_wrapper(BaseEstimator, ClassifierMixin): <NEW_LINE> <INDENT> def __init__(self, checkpoint_dir, logger, h1=None, h2=None, out=None, in_feat=90, batch_size=64, lr=0.001, nsteps=1000, reset=False): <NEW_LINE> <INDENT> self.gcn = GraphClassificationNet(90, h1, h2, out) <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.lr = lr <NEW_LINE> self.nsteps = nsteps <NEW_LINE> self.checkpoint_dir = checkpoint_dir <NEW_LINE> self.logger = logger <NEW_LINE> self.h1 = h1 <NEW_LINE> self.h2 = h2 <NEW_LINE> self.out = out <NEW_LINE> self.reset = reset <NEW_LINE> logger.info("Success init of GCN params {}-{}-{}" .format(self.h1, self.h2, self.out)) <NEW_LINE> logger.info("Training parameters {} steps and {} learning rate" .format(self.nsteps, self.lr)) <NEW_LINE> <DEDENT> def fit(self, X_train, Y_train, X_val=None, Y_val=None, filename=""): <NEW_LINE> <INDENT> if self.reset: <NEW_LINE> <INDENT> self.gcn = GraphClassificationNet(90, self.h1, self.h2, self.out) <NEW_LINE> <DEDENT> training_loop(self.gcn, X_train, Y_train, self.batch_size, self.lr, self.logger, self.checkpoint_dir, filename, X_val, Y_val, nsteps=self.nsteps) <NEW_LINE> <DEDENT> def predict(self, X_test): <NEW_LINE> <INDENT> self.gcn.eval() <NEW_LINE> test = ToTorchDataset(np.asarray(X_test)) <NEW_LINE> testloader = torch.utils.data.DataLoader(test, batch_size=self.batch_size, shuffle=False, num_workers=4) <NEW_LINE> y_pred = [] <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> for data in testloader: <NEW_LINE> <INDENT> X, A1, A2, A3, A4, A5 = data_to_matrices(data) <NEW_LINE> outputs = self.gcn(X, A1, A2, A3, A4, A5) <NEW_LINE> _, predicted = torch.max(outputs.data, 1) <NEW_LINE> y_pred.append(predicted.cpu().numpy()) <NEW_LINE> <DEDENT> <DEDENT> labels = np.asarray(np.concatenate(y_pred)) <NEW_LINE> return(labels) <NEW_LINE> <DEDENT> def predict_proba(self, X_test): <NEW_LINE> <INDENT> self.gcn.eval() <NEW_LINE> test = ToTorchDataset(X_test, None) <NEW_LINE> testloader = torch.utils.data.DataLoader(test, batch_size=self.batch_size, shuffle=False, num_workers=4) <NEW_LINE> proba = [] <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> for data in testloader: <NEW_LINE> <INDENT> X, A1, A2, A3, A4, A5 = data_to_matrices(data) <NEW_LINE> outputs = self.gcn(X, A1, A2, A3, A4, A5) <NEW_LINE> proba.append(outputs.data.cpu().numpy()) <NEW_LINE> <DEDENT> <DEDENT> out_proba = np.asarray(np.concatenate(proba, 0)) <NEW_LINE> return(out_proba) | Wrapper for the Graph Convolutional network.
| 62598fac236d856c2adc941a |
class TestRemoveCommonNameRouteWithDB: <NEW_LINE> <INDENT> def test_remove_common_name_does_not_exist(self, app, db): <NEW_LINE> <INDENT> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.get(url_for('seeds.remove_common_name', cn_id=42)) <NEW_LINE> <DEDENT> assert rv.location == url_for('seeds.select_common_name', dest='seeds.remove_common_name', _external=True) <NEW_LINE> <DEDENT> def test_remove_common_name_no_id(self, app, db): <NEW_LINE> <INDENT> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.get(url_for('seeds.remove_common_name')) <NEW_LINE> <DEDENT> assert rv.location == url_for('seeds.select_common_name', dest='seeds.remove_common_name', _external=True) <NEW_LINE> <DEDENT> def test_remove_common_name_not_verified(self, app, db): <NEW_LINE> <INDENT> cn = CommonName() <NEW_LINE> cn2 = CommonName() <NEW_LINE> db.session.add_all([cn, cn2]) <NEW_LINE> cn.name = 'Coleus' <NEW_LINE> cn2.name = 'Kingus' <NEW_LINE> db.session.commit() <NEW_LINE> assert cn in CommonName.query.all() <NEW_LINE> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.post(url_for('seeds.remove_common_name', cn_id=cn.id), data=dict(verify_removal='', move_to=cn2.id)) <NEW_LINE> <DEDENT> assert rv.location == url_for('seeds.remove_common_name', cn_id=cn.id, _external=True) <NEW_LINE> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.post(url_for('seeds.remove_common_name', cn_id=cn.id), data=dict(verify_removal='', move_to=cn2.id), follow_redirects=True) <NEW_LINE> <DEDENT> assert 'Common name was not removed' in str(rv.data) <NEW_LINE> assert cn in CommonName.query.all() <NEW_LINE> <DEDENT> def test_remove_common_name_renders_page(self, app, db): <NEW_LINE> <INDENT> cn = CommonName() <NEW_LINE> db.session.add(cn) <NEW_LINE> cn.name = 'Coleus' <NEW_LINE> db.session.commit() <NEW_LINE> with app.test_client() as tc: <NEW_LINE> <INDENT> rv = tc.get(url_for('seeds.remove_common_name', cn_id=cn.id)) <NEW_LINE> <DEDENT> assert rv.status_code == 200 <NEW_LINE> assert 'Remove Common Name' in str(rv.data) <NEW_LINE> <DEDENT> def test_remove_common_name_verified(self, app, db): <NEW_LINE> <INDENT> cn = CommonName() <NEW_LINE> cn2 = CommonName() <NEW_LINE> idx = Index(name='Perennial') <NEW_LINE> db.session.add_all([idx, cn, cn2]) <NEW_LINE> cn.name = 'Coleus' <NEW_LINE> cn.index = idx <NEW_LINE> cn2.name = 'Kingus' <NEW_LINE> cn2.index = idx <NEW_LINE> db.session.commit() <NEW_LINE> assert cn in CommonName.query.all() <NEW_LINE> with app.test_client() as tc: <NEW_LINE> <INDENT> tc.post(url_for('seeds.remove_common_name', cn_id=cn.id), data=dict(verify_removal=True, move_to=cn2.id), follow_redirects=True) <NEW_LINE> <DEDENT> assert cn not in CommonName.query.all() | Test seeds.remove_common_name. | 62598fac5166f23b2e243392 |
class OdontothemePlugin(plugins.OpalPlugin): <NEW_LINE> <INDENT> urls = urlpatterns <NEW_LINE> javascripts = { 'opal.odontotheme': [ ] } <NEW_LINE> stylesheets = [ "css/odonto.css" ] | Main entrypoint to expose this plugin to our Opal application. | 62598facbe383301e02537b3 |
class UnitTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fname = "File_name" <NEW_LINE> <DEDENT> @mock.patch("gen_libs.subprocess.Popen") <NEW_LINE> def test_compress(self, mock_popen): <NEW_LINE> <INDENT> mock_popen.return_value = SubProcess() <NEW_LINE> self.assertFalse(gen_libs.compress(self.fname)) | Class: UnitTest
Description: Class which is a representation of a unit testing.
Methods:
setUp
test_compress | 62598facb7558d58954635e3 |
class Solution: <NEW_LINE> <INDENT> def maxEnvelopes(self, envelopes): <NEW_LINE> <INDENT> if not envelopes: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> envelopes.sort(key=lambda x: (x[0], x[1])) <NEW_LINE> n = len(envelopes) <NEW_LINE> dp = [1] * n <NEW_LINE> for i in range(n): <NEW_LINE> <INDENT> for j in range(i): <NEW_LINE> <INDENT> if self.can_fit(envelopes, i, j): <NEW_LINE> <INDENT> print(i, j) <NEW_LINE> dp[i] = max(dp[i], dp[j] + 1) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return max(dp) <NEW_LINE> <DEDENT> def can_fit(self, envelopes, i, j): <NEW_LINE> <INDENT> return envelopes[j][0] < envelopes[i][0] and envelopes[j][1] < envelopes[i][1] | @param envelopes: a number of envelopes with widths and heights
@return: the maximum number of envelopes | 62598fac76e4537e8c3ef567 |
class DatabaseFailureException(Exception): <NEW_LINE> <INDENT> def __init__(self, msg: str, *args): <NEW_LINE> <INDENT> super().__init__(msg, *args) <NEW_LINE> self._msg: str = msg <NEW_LINE> <DEDENT> @property <NEW_LINE> def failure_reason_msg(self) -> str: <NEW_LINE> <INDENT> return self._msg | An exception raised to give more information about the failure. | 62598faca8370b77170f0396 |
class State: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._sensor_state = {} <NEW_LINE> self._inout_state = {} <NEW_LINE> self._applianece_state = {} <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return str(self.dump_at_now()) <NEW_LINE> <DEDENT> def format_sensors(self, keys, value): <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> self._sensor_state[key] = value <NEW_LINE> <DEDENT> <DEDENT> def format_appliances(self, keys,value): <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> self._applianece_state[key] = value <NEW_LINE> <DEDENT> <DEDENT> def format_inout(self, keys, value): <NEW_LINE> <INDENT> for key in keys: <NEW_LINE> <INDENT> self._inout_state[key] = value <NEW_LINE> <DEDENT> <DEDENT> def update_sensor(self, key, value): <NEW_LINE> <INDENT> self._sensor_state[key] = value <NEW_LINE> <DEDENT> def update_inout(self, key, value): <NEW_LINE> <INDENT> self._inout_state[key] = value <NEW_LINE> <DEDENT> def update_appliance(self, key, value): <NEW_LINE> <INDENT> self._applianece_state[key] = value <NEW_LINE> <DEDENT> def get_sensor_keys(self): <NEW_LINE> <INDENT> return self._sensor_state.keys() <NEW_LINE> <DEDENT> def get_inout_keys(self): <NEW_LINE> <INDENT> return self._inout_state.keys() <NEW_LINE> <DEDENT> def get_appliance_keys(self): <NEW_LINE> <INDENT> return self._applianece_state.keys() <NEW_LINE> <DEDENT> def get_sensor_value(self, key): <NEW_LINE> <INDENT> return self._sensor_state[key] <NEW_LINE> <DEDENT> def get_inout_value(self, key): <NEW_LINE> <INDENT> return self._inout_state[key] <NEW_LINE> <DEDENT> def get_appliance_value(self, key): <NEW_LINE> <INDENT> return self._applianece_state[key] <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> return {"sensors": self._sensor_state, "inout": self._inout_state, "appliances": self._applianece_state} <NEW_LINE> <DEDENT> def dump_with_timestamp(self, time): <NEW_LINE> <INDENT> return {"timestamp": str(time), "sensors": self._sensor_state, "inout": self._inout_state, "appliances": self._applianece_state} <NEW_LINE> <DEDENT> def dump_at_now(self): <NEW_LINE> <INDENT> tz = pytz.timezone('Asia/Tokyo') <NEW_LINE> now = datetime.now(tz) <NEW_LINE> return self.dump_with_timestamp(now) | センサや家電の状態を保持するクラス | 62598fac627d3e7fe0e06e68 |
class Edit (models.Model) : <NEW_LINE> <INDENT> by = models.TextField (null = False) <NEW_LINE> timeOfApproval = models.DateTimeField (auto_now_add = True) <NEW_LINE> field = models.TextField (null = False); <NEW_LINE> edited = models.ForeignKey (SurveyAnswer, on_delete = models.PROTECT, default = DEFAULT_LINK) | Records the dits to the records made by a moderator or other privileged user. | 62598fac16aa5153ce4004bd |
class DependencyParserTests(unittest.TestCase): <NEW_LINE> <INDENT> def parse(self, str): <NEW_LINE> <INDENT> parser = piupartslib.dependencyparser.DependencyParser(str) <NEW_LINE> deps = parser.get_dependencies() <NEW_LINE> names = [] <NEW_LINE> for dep in deps: <NEW_LINE> <INDENT> names.append([]) <NEW_LINE> for simpledep in dep: <NEW_LINE> <INDENT> names[-1].append(simpledep.name) <NEW_LINE> <DEDENT> <DEDENT> return deps, names <NEW_LINE> <DEDENT> def testEmpty(self): <NEW_LINE> <INDENT> deps, names = self.parse("") <NEW_LINE> self.failUnlessEqual(deps, []) <NEW_LINE> <DEDENT> def testSingle(self): <NEW_LINE> <INDENT> deps, names = self.parse("foo") <NEW_LINE> self.failUnlessEqual(names, [["foo"]]) <NEW_LINE> <DEDENT> def testTwo(self): <NEW_LINE> <INDENT> deps, names = self.parse("foo, bar") <NEW_LINE> self.failUnlessEqual(names, [["foo"], ["bar"]]) <NEW_LINE> <DEDENT> def testAlternatives(self): <NEW_LINE> <INDENT> deps, names = self.parse("foo, bar | foobar") <NEW_LINE> self.failUnlessEqual(names, [["foo"], ["bar", "foobar"]]) | Tests for module dependencyparser. | 62598facf548e778e596b55f |
class DeviceUnreachable(_KwException): <NEW_LINE> <INDENT> pass | Raised when a request is made to an unreachable (turned off) device. | 62598fac1b99ca400228f50d |
class AlarmGroupsEnum(Enum): <NEW_LINE> <INDENT> unknown = 0 <NEW_LINE> environ = 1 <NEW_LINE> ethernet = 2 <NEW_LINE> fabric = 3 <NEW_LINE> power = 4 <NEW_LINE> software = 5 <NEW_LINE> slice = 6 <NEW_LINE> cpu = 7 <NEW_LINE> controller = 8 <NEW_LINE> sonet = 9 <NEW_LINE> otn = 10 <NEW_LINE> sdh_controller = 11 <NEW_LINE> asic = 12 <NEW_LINE> fpd_infra = 13 <NEW_LINE> shelf = 14 <NEW_LINE> mpa = 15 <NEW_LINE> ots = 16 <NEW_LINE> last = 17 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_alarmgr_server_oper as meta <NEW_LINE> return meta._meta_table['AlarmGroupsEnum'] | AlarmGroupsEnum
Alarm groups
.. data:: unknown = 0
An unknown alarm group
.. data:: environ = 1
Environomental alarm group
.. data:: ethernet = 2
Ethernet alarm group
.. data:: fabric = 3
Fabric related alarm group
.. data:: power = 4
Power and PEM group of alarms
.. data:: software = 5
Software group of alarms
.. data:: slice = 6
Slice group of alarms
.. data:: cpu = 7
CPU group of alarms
.. data:: controller = 8
Controller group of alarms
.. data:: sonet = 9
Sonet group of alarms
.. data:: otn = 10
OTN group of alarms
.. data:: sdh_controller = 11
SDH group of alarms
.. data:: asic = 12
ASIC group of alarms
.. data:: fpd_infra = 13
FPD group of alarms
.. data:: shelf = 14
Shelf group of alarms
.. data:: mpa = 15
MPA group of alarms
.. data:: ots = 16
OTS group of alarms
.. data:: last = 17
Last unused group | 62598fac2ae34c7f260ab09c |
class Track(object): <NEW_LINE> <INDENT> DEF_LAP = 10 <NEW_LINE> DEF_TS = 0.015 <NEW_LINE> def __init__(self, num_participants=0, model=None, lap_distance=DEF_LAP): <NEW_LINE> <INDENT> self.participants = [Car(i) for i in range(num_participants)] <NEW_LINE> self.lap_distance = lap_distance <NEW_LINE> self.model = model <NEW_LINE> self.track_0_points = Track.generate_track_points(0) <NEW_LINE> self.track_1_points = Track.generate_track_points(1) <NEW_LINE> <DEDENT> def add_participant(self, car, idx=-1): <NEW_LINE> <INDENT> if car not in self.participants: <NEW_LINE> <INDENT> if 0 <= idx and not any(map(lambda x: idx == x.id, self.participants)): <NEW_LINE> <INDENT> car.id = idx <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> car.id = len(self.participants) <NEW_LINE> <DEDENT> self.participants.append(car) <NEW_LINE> <DEDENT> return car.id <NEW_LINE> <DEDENT> def remove_participant(self, idx): <NEW_LINE> <INDENT> car = self.get_car_by_id(idx) <NEW_LINE> if car: <NEW_LINE> <INDENT> self.participants.remove(car) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Car #{} is not on the Track!".format(idx)) <NEW_LINE> <DEDENT> <DEDENT> def get_car_by_id(self, idx): <NEW_LINE> <INDENT> for car in self.participants: <NEW_LINE> <INDENT> if car.id == idx: <NEW_LINE> <INDENT> return car <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def check_winner(self): <NEW_LINE> <INDENT> winner = None <NEW_LINE> for car in self.participants: <NEW_LINE> <INDENT> if car.distance > self.DEF_LAP - 1: <NEW_LINE> <INDENT> if winner is None: <NEW_LINE> <INDENT> winner = car <NEW_LINE> <DEDENT> elif car.distance > winner.distance: <NEW_LINE> <INDENT> winner = car <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return winner <NEW_LINE> <DEDENT> def update_all(self, gametime): <NEW_LINE> <INDENT> if self.participants: <NEW_LINE> <INDENT> for car in self.participants: <NEW_LINE> <INDENT> car.update(gametime) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("There are no cars on the track!") <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def generate_track_points(car_id): <NEW_LINE> <INDENT> dummy = Car(car_id) <NEW_LINE> points = [] <NEW_LINE> for _ in range(600): <NEW_LINE> <INDENT> points.append(physics.calculate_posn(dummy)) <NEW_LINE> dummy.distance += (1 / 600) <NEW_LINE> <DEDENT> return points | A Track is what we will race our Cars on
An important invariant in our definition is that the id of each car will be
its index in our participants list
It is defined by the following attributes:
- participants: List of Cars participating in the race
NOTE: As this scales, we might consider migrating this to a dictionary
to allow for faster lookups and removals
- lap_distance: The length of a lap. Used to measure the performance of
different participants
- model: An image representing the Track. In the future, this could become
a resizable track using the renderer module
- track_0_points: Generates the different track points for the 0th car on
the track
- track_1_points: Generates the different track points for the 1st car on
the track
And by the following behaviours:
- add_participant(Car): Adds participants to the track and returns its
index in our participants list
- remove_participant(idx): Removes participants from the track
- get_car_by_id(idx): Returns the car corresponding to the entered id
- check_winner(): Returns the winner if there is one otherwise returns None
- update_all(gametime): Run an update on every car. This is to be called at
each timestep
- generate_track_points(car_id): Returns the points for the track in the
actual game visual (to be used by Renderer) | 62598fac66656f66f7d5a3ab |
class CommandArgsTestCase(TestCase): <NEW_LINE> <INDENT> shard = 4 <NEW_LINE> def _get_arg_parser(self): <NEW_LINE> <INDENT> cmd = resend_lti_scores.Command() <NEW_LINE> return cmd.create_parser('./manage.py', 'resend_lti_scores') <NEW_LINE> <DEDENT> def test_course_keys(self): <NEW_LINE> <INDENT> parser = self._get_arg_parser() <NEW_LINE> args = parser.parse_args(['course-v1:edX+test_course+2525_fall', 'UBC/Law281/2015_T1']) <NEW_LINE> self.assertEqual(len(args.course_keys), 2) <NEW_LINE> key = args.course_keys[0] <NEW_LINE> self.assertIsInstance(key, CourseKey) <NEW_LINE> self.assertEqual(unicode(key), 'course-v1:edX+test_course+2525_fall') <NEW_LINE> <DEDENT> def test_no_course_keys(self): <NEW_LINE> <INDENT> parser = self._get_arg_parser() <NEW_LINE> args = parser.parse_args([]) <NEW_LINE> self.assertEqual(args.course_keys, []) | Test management command parses arguments properly. | 62598fac7d847024c075c37e |
class BufferManager(lists.ListView): <NEW_LINE> <INDENT> LABEL = 'Buffers' <NEW_LINE> ICON = 'page_white_stack.png' <NEW_LINE> COLUMNS = [objectlist.Column('markup', use_markup=True, mappers=[background_mapper]), objectlist.Column('bufid', visible=False), objectlist.Column('basename', visible=False, searchable=True)] <NEW_LINE> def get_by_bufid(self, bufid): <NEW_LINE> <INDENT> for buf in self.items: <NEW_LINE> <INDENT> if buf.bufid == bufid: <NEW_LINE> <INDENT> return buf <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def get_by_filename(self, filename): <NEW_LINE> <INDENT> for buf in self.items: <NEW_LINE> <INDENT> if buf.filename == filename: <NEW_LINE> <INDENT> return buf <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def create_ui(self): <NEW_LINE> <INDENT> lists.ListView.create_ui(self) <NEW_LINE> if self.model.config['toolbar']: <NEW_LINE> <INDENT> self.stack.pack_start(self.model.shortcuts.create_tools(), expand=False) <NEW_LINE> <DEDENT> <DEDENT> def append(self, filename, bufid): <NEW_LINE> <INDENT> buf = self.get_by_bufid(bufid) <NEW_LINE> if buf is None: <NEW_LINE> <INDENT> buf = Buffer(self.model, filename, bufid) <NEW_LINE> self.items.append(buf) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> buf.filename = filename <NEW_LINE> <DEDENT> if (self.items.get_selection() is None or not self.items.selected_item or self.items.selected_item.filename != filename): <NEW_LINE> <INDENT> self.items.selected_item = buf <NEW_LINE> <DEDENT> self.refresh_activated_item() <NEW_LINE> <DEDENT> def remove(self, bufid): <NEW_LINE> <INDENT> buf = self.get_by_bufid(bufid) <NEW_LINE> if buf is not None: <NEW_LINE> <INDENT> self.items.remove(buf) <NEW_LINE> self.refresh_activated_item() <NEW_LINE> <DEDENT> <DEDENT> def refresh(self): <NEW_LINE> <INDENT> for item in self.items: <NEW_LINE> <INDENT> item.update_dispname() <NEW_LINE> <DEDENT> <DEDENT> def get_activated_item(self): <NEW_LINE> <INDENT> bufid = self.model.vim.get_current_buffer_id() <NEW_LINE> item = self.get_by_bufid(bufid) <NEW_LINE> return item <NEW_LINE> <DEDENT> def refresh_activated_item(self): <NEW_LINE> <INDENT> b = self.get_activated_item() <NEW_LINE> if b is not None: <NEW_LINE> <INDENT> title = '{0}/{1}'.format(b.dispname, b.basename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> title = '' <NEW_LINE> <DEDENT> self.model.ui.set_title(title) <NEW_LINE> <DEDENT> def on_items__item_activated(self, items, item): <NEW_LINE> <INDENT> self.model.vim.open_file(item.filename) <NEW_LINE> self.refresh_activated_item() <NEW_LINE> self.model.vim.grab_focus() <NEW_LINE> <DEDENT> def on_items__item_right_clicked(self, items, item, event): <NEW_LINE> <INDENT> context = contexts.LocalContext(self.model, None, item.filename) <NEW_LINE> context.create_menu() <NEW_LINE> menu = context.create_file_menu() <NEW_LINE> if menu is not None: <NEW_LINE> <INDENT> menu.popup(None, None, None, event.button, event.time) <NEW_LINE> <DEDENT> <DEDENT> def on_items__item_middle_clicked(self, items, item, event): <NEW_LINE> <INDENT> self.model.vim.close(item.filename) | Buffer list. | 62598fac435de62698e9bdb1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.