code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class ExtensionLoader(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._available_extensions = {} <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> for entry_point in iter_entry_points('handroll.extensions'): <NEW_LINE> <INDENT> cls = entry_point.load() <NEW_LINE> self._available_extensions[entry_point.name] = cls <NEW_LINE> <DEDENT> <DEDENT> def get_active_extensions(self, config): <NEW_LINE> <INDENT> extensions = [] <NEW_LINE> for extension in config.active_extensions: <NEW_LINE> <INDENT> extension_cls = self._available_extensions.get(extension) <NEW_LINE> if extension_cls is not None: <NEW_LINE> <INDENT> extensions.append(extension_cls(config)) <NEW_LINE> <DEDENT> <DEDENT> return extensions
A loader for extensions from handroll's extension entry point.
62598fb17047854f4633f442
class IprouteNetConfig(os_net_config.NetConfig): <NEW_LINE> <INDENT> pass
Configure network interfaces using iproute2.
62598fb1498bea3a75a57b87
class DjangoMaintenance(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> json_data = open(JSON_FILE, 'r') <NEW_LINE> data = json.load(json_data) <NEW_LINE> json_data.close() <NEW_LINE> <DEDENT> except IOError: <NEW_LINE> <INDENT> data = None <NEW_LINE> <DEDENT> if data and data['DJANGO_MAINTENANCE'] == 'on': <NEW_LINE> <INDENT> return render_to_response('django_maintenance/maintenance.html')
Django Maintennece middleware.
62598fb199cbb53fe6830f41
class ParamPushConfiguration(object): <NEW_LINE> <INDENT> swagger_types = { 'piid': 'str' } <NEW_LINE> attribute_map = { 'piid': 'piid' } <NEW_LINE> def __init__(self, piid=None): <NEW_LINE> <INDENT> self._piid = None <NEW_LINE> if piid is not None: <NEW_LINE> <INDENT> self.piid = piid <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def piid(self): <NEW_LINE> <INDENT> return self._piid <NEW_LINE> <DEDENT> @piid.setter <NEW_LINE> def piid(self, piid): <NEW_LINE> <INDENT> self._piid = piid <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ParamPushConfiguration): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fb1283ffb24f3cf38f5
class DjangoModuleManager(object): <NEW_LINE> <INDENT> def __init__(self, projectname, *modulename): <NEW_LINE> <INDENT> self.__path = os.path.join(projectname, *modulename) <NEW_LINE> self.__file = {} <NEW_LINE> self.__data = {} <NEW_LINE> if not os.path.exists(self.__path): <NEW_LINE> <INDENT> os.makedirs(self.__path) <NEW_LINE> <DEDENT> <DEDENT> def add_file(self, module, data=None, lines=None): <NEW_LINE> <INDENT> thefile = open(os.path.join(self.__path, module + '.py'), 'a+') <NEW_LINE> thefile.seek(0) <NEW_LINE> self.__file[module] = thefile <NEW_LINE> self.__data[module] = '' if data or lines else thefile.read() <NEW_LINE> if data: <NEW_LINE> <INDENT> self.set_data(module, data) <NEW_LINE> <DEDENT> if lines: <NEW_LINE> <INDENT> self.append_lines(module, *lines) <NEW_LINE> <DEDENT> <DEDENT> def get_file(self, module): <NEW_LINE> <INDENT> return self.__file[module] <NEW_LINE> <DEDENT> def save_files(self): <NEW_LINE> <INDENT> for module, thefile in self.__file.items(): <NEW_LINE> <INDENT> data = self.__data[module] <NEW_LINE> thefile.seek(0) <NEW_LINE> thefile.truncate() <NEW_LINE> thefile.write(data) <NEW_LINE> <DEDENT> <DEDENT> def get_data(self, module): <NEW_LINE> <INDENT> return self.__data[module] <NEW_LINE> <DEDENT> def set_data(self, module, data): <NEW_LINE> <INDENT> self.__data[module] = data <NEW_LINE> <DEDENT> def append_data(self, module, chunk): <NEW_LINE> <INDENT> self.__data[module] += chunk <NEW_LINE> <DEDENT> def append_lines(self, module, *lines): <NEW_LINE> <INDENT> if len(self.__data[module]) > 0: <NEW_LINE> <INDENT> self.append_data(module, os.linesep) <NEW_LINE> <DEDENT> for data in lines: <NEW_LINE> <INDENT> self.append_data(module, data + os.linesep) <NEW_LINE> <DEDENT> <DEDENT> def remove_line(self, module, line): <NEW_LINE> <INDENT> self.replace_line(module, line, None) <NEW_LINE> <DEDENT> def replace_line(self, module, old, new): <NEW_LINE> <INDENT> self.__data[module] = self.__data[module].replace(old + os.linesep, new + os.linesep if new else '')
Utility class to modify and write files in a Python module.
62598fb1fff4ab517ebcd84e
@public <NEW_LINE> @implementer(IChain, IChainIterator) <NEW_LINE> class TerminalChainBase: <NEW_LINE> <INDENT> def _process(self, mlist, msg, msgdata): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def get_links(self, mlist, msg, msgdata): <NEW_LINE> <INDENT> return iter(self) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> yield Link('truth', LinkAction.run, function=self._process) <NEW_LINE> yield Link('truth', LinkAction.stop)
A base chain that always matches and executes a method. The method is called '_process()' and must be provided by the subclass.
62598fb1be383301e0253862
class BatchView(generics.ListCreateAPIView): <NEW_LINE> <INDENT> permission_classes = [permissions.IsAuthenticated] <NEW_LINE> serializer_class = BatchSerializer <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> product = Product.objects.get(owner=self.request.user, pk=self.kwargs['pk']) <NEW_LINE> return Batch.objects.filter(owner=self.request.user, product=product) <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> product = Product.objects.get(owner=self.request.user, pk=self.kwargs['pk']) <NEW_LINE> serializer.save(owner=self.request.user, product=product)
This class manages the view to create and list the products. Attributes: permission_classes (list(Permissions)): The options to access at this resource. serializer_class (Serializer): The serializer to bind the request and the response object. Returns: 200: The list of products. 201: The product is created. 400: An error is detected on the request data. 401: The user must be connected to access this resource. 406: The response format is not acceptable by the server. 500: An error was occured in the treatment of the request.
62598fb1aad79263cf42e83b
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "info_user" <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> name = db.Column(db.String(32), unique=True, nullable=False) <NEW_LINE> followers = db.relationship('User', secondary=tb_user_follows, primaryjoin=id == tb_user_follows.c.followed_id, secondaryjoin=id == tb_user_follows.c.follower_id, backref=db.backref('followed', lazy='dynamic'), lazy='dynamic')
用户表
62598fb13539df3088ecc31a
class MinusNumericExpression(NumericExpression): <NEW_LINE> <INDENT> def __init__(self, numericExpression): <NEW_LINE> <INDENT> NumericExpression.__init__(self) <NEW_LINE> self.numericExpression = numericExpression <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "MinusNE:" + "-(" + str(self.numericExpression) + ")" <NEW_LINE> <DEDENT> def getDependencies(self, codeGenerator): <NEW_LINE> <INDENT> return self.numericExpression.getDependencies(codeGenerator) <NEW_LINE> <DEDENT> def setupEnvironment(self, codeSetup): <NEW_LINE> <INDENT> codeSetup.setupEnvironment(self) <NEW_LINE> <DEDENT> def generateCode(self, codeGenerator): <NEW_LINE> <INDENT> return codeGenerator.generateCode(self)
Class representing a minus numeric expression node in the AST of a MLP
62598fb1a8370b77170f0444
class ProfileLinksForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> models = dillo.models.profiles.ProfileLinks <NEW_LINE> exclude = ('social',)
Links Form, used to generate an inline forms set.
62598fb1090684286d593711
class HTMLSoupLinkScraper(BaseScraper): <NEW_LINE> <INDENT> content_types = [ "text/html", "application/xhtml+xml" ] <NEW_LINE> def derived_get_requests(self): <NEW_LINE> <INDENT> attributes = { "src": True, "href": True, "link": True, "script": True, "url": True } <NEW_LINE> host = self.queue_item.response.url <NEW_LINE> soup = self.queue_item.get_soup_response() <NEW_LINE> base_element = soup.find("base", href=True) <NEW_LINE> elements = soup.select("[{}]".format("],[".join(attributes.keys()))) <NEW_LINE> if base_element: <NEW_LINE> <INDENT> host = URLHelper.make_absolute(host, base_element["href"]) <NEW_LINE> <DEDENT> found_requests = [] <NEW_LINE> for element in elements: <NEW_LINE> <INDENT> for attribute in attributes.keys(): <NEW_LINE> <INDENT> if not element.has_attr(attribute): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> found_url = self.__trim_grave_accent(element[attribute]) <NEW_LINE> if URLHelper.is_mailto(found_url): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> absolute_url = URLHelper.make_absolute(host, found_url) <NEW_LINE> found_requests.append(Request(absolute_url)) <NEW_LINE> <DEDENT> <DEDENT> return found_requests <NEW_LINE> <DEDENT> def __trim_grave_accent(self, href): <NEW_LINE> <INDENT> if href.startswith("`"): <NEW_LINE> <INDENT> href = href[1:] <NEW_LINE> <DEDENT> if href.endswith("`"): <NEW_LINE> <INDENT> href = href[:-1] <NEW_LINE> <DEDENT> return href
The HTMLSoupLinkScraper finds URLs from href attributes in HTML using BeautifulSoup. Attributes: content_types list(str): The supported content types.
62598fb156ac1b37e6302253
class UploadCommand(Command): <NEW_LINE> <INDENT> description = 'Build and publish the package.' <NEW_LINE> user_options = [] <NEW_LINE> @staticmethod <NEW_LINE> def status(s): <NEW_LINE> <INDENT> print('\033[1m{0}\033[0m'.format(s)) <NEW_LINE> <DEDENT> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.status('Removing previous builds…') <NEW_LINE> rmtree(os.path.join(here, 'dist')) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> self.status('Building Source and Wheel (universal) distribution…') <NEW_LINE> os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable)) <NEW_LINE> self.status('Uploading the package to PyPI via Twine…') <NEW_LINE> os.system('twine upload dist/*') <NEW_LINE> self.status('Pushing git tags…') <NEW_LINE> os.system('git tag v{0}'.format(about['__version__'])) <NEW_LINE> os.system('git push --tags') <NEW_LINE> sys.exit()
Support setup.py upload.
62598fb14e4d56256637248f
class SampleScript(PrometheusExporterScript): <NEW_LINE> <INDENT> name = "prometheus-aioexporter-sample" <NEW_LINE> default_port = 9091 <NEW_LINE> def configure(self, args: Namespace): <NEW_LINE> <INDENT> self.create_metrics( [ MetricConfig("a_gauge", "a gauge", "gauge", {"labels": ["foo", "bar"]}), MetricConfig("a_counter", "a counter", "counter", {"labels": ["baz"]}), ] ) <NEW_LINE> <DEDENT> async def on_application_startup(self, application: Application): <NEW_LINE> <INDENT> application["exporter"].set_metric_update_handler(self._update_handler) <NEW_LINE> <DEDENT> async def _update_handler(self, metrics): <NEW_LINE> <INDENT> metrics["a_gauge"].labels( foo=random.choice(["this-foo", "other-foo"]), bar=random.choice(["this-bar", "other-bar"]), ).set(random.uniform(0, 100)) <NEW_LINE> metrics["a_counter"].labels( baz=random.choice(["this-baz", "other-baz"]), ).inc(random.choice(range(10)))
A sample exporter.
62598fb1009cb60464d0158a
class Stateful(object): <NEW_LINE> <INDENT> def save(self, filepath, overwrite=False): <NEW_LINE> <INDENT> with gzip.open(create_filepath(filepath, overwrite), 'wb') as openf: <NEW_LINE> <INDENT> openf.write(pickle.dumps(self)) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def load(filepath): <NEW_LINE> <INDENT> with gzip.open(filepath, 'rb') as openf: <NEW_LINE> <INDENT> return pickle.load(self, openf.read())
Generic class for a stateful object who needs save and load methods.
62598fb1aad79263cf42e83c
class Parser( baseparser.BaseParser ): <NEW_LINE> <INDENT> def __init__( self, declaration, root='root', prebuilts=(), definitionSources=common.SOURCES, ): <NEW_LINE> <INDENT> self._rootProduction = root <NEW_LINE> self._declaration = declaration <NEW_LINE> self._generator = simpleparsegrammar.Parser( declaration, prebuilts, definitionSources = definitionSources, ).generator <NEW_LINE> <DEDENT> def buildTagger( self, production=None, processor=None): <NEW_LINE> <INDENT> if production is None: <NEW_LINE> <INDENT> production = self._rootProduction <NEW_LINE> <DEDENT> if processor is None: <NEW_LINE> <INDENT> processor = self.buildProcessor() <NEW_LINE> <DEDENT> return self._generator.buildParser( production, methodSource=processor, )
EBNF-generated Parsers with results-handling The Parser is a two-stage object: Passed an EBNF definition during initialisation, it compiles the definition into a tagging table (which in turn requires creating a tagging table for parsing the EBNF). You then call the parser's parse method to perform the actual parsing of your data, with the parser passing the results to your processor object and then back to you.
62598fb15fc7496912d482b2
class GenerateRandomNoize: <NEW_LINE> <INDENT> def __init__(self, mean, var, n): <NEW_LINE> <INDENT> self.mean = mean <NEW_LINE> self.sd = np.sqrt(var) <NEW_LINE> self.var = var <NEW_LINE> self.n = n <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> self.value = np.random.normal(loc=self.mean, scale=np.sqrt(self.var), size=self.n)
1次元ガウス分布に従ってデータ点を生成するオブジェクト sd : 標準偏差
62598fb167a9b606de546037
class RomanNumeral(object): <NEW_LINE> <INDENT> values = [(1000, "M"), (900, "CM"), (500, "D"), (400, "CD"), (100, "C"), (90, "XC"), (50, "L"), (40, "XL"), (10, "X"), (9, "IX"), (5, "V"), (4, "IV"), (1, "I")] <NEW_LINE> rev_dict = [(v, k) for (k, v) in values] <NEW_LINE> def __init__(self, text): <NEW_LINE> <INDENT> self.text = text.strip() <NEW_LINE> <DEDENT> def __int__(self): <NEW_LINE> <INDENT> text = self.text <NEW_LINE> num = 0 <NEW_LINE> while text: <NEW_LINE> <INDENT> for v, k in self.rev_dict: <NEW_LINE> <INDENT> if text.find(v) == 0: <NEW_LINE> <INDENT> text = text[len(v):] <NEW_LINE> num += k <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return num <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.text) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "RomanNumeral({0})".format(self.text) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.text <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_int(cls, num): <NEW_LINE> <INDENT> text = "" <NEW_LINE> for val, roman_num in cls.values: <NEW_LINE> <INDENT> while num >= val: <NEW_LINE> <INDENT> text += roman_num <NEW_LINE> num -= val <NEW_LINE> <DEDENT> <DEDENT> return RomanNumeral(text) <NEW_LINE> <DEDENT> def minimize(self): <NEW_LINE> <INDENT> num = int(self) <NEW_LINE> return RomanNumeral.from_int(num).text
Store roman numerals. As per https://projecteuler.net/about=roman_numerals
62598fb14f6381625f1994f4
class SequenceProxy(Proxy[Sequence[T_co]], SequenceRole[T_co]): <NEW_LINE> <INDENT> pass
Proxy to :class:`typing.Sequence` object.
62598fb185dfad0860cbfaa8
class DatabaseException(AioRestException): <NEW_LINE> <INDENT> pass
All database related exceptions
62598fb1f7d966606f74804f
@dataclass(frozen=True) <NEW_LINE> class Aggregate(BaseModel): <NEW_LINE> <INDENT> __blurb__: ClassVar[str] = 'Aggregate' <NEW_LINE> name: str <NEW_LINE> description: str <NEW_LINE> variable: str <NEW_LINE> aggregate_type: str <NEW_LINE> interval_length: pd.Timedelta <NEW_LINE> interval_label: str <NEW_LINE> timezone: str <NEW_LINE> observations: Tuple[AggregateObservation, ...] <NEW_LINE> aggregate_id: str = '' <NEW_LINE> provider: str = '' <NEW_LINE> extra_parameters: str = '' <NEW_LINE> units: str = field(init=False) <NEW_LINE> interval_value_type: str = field(default='interval_mean') <NEW_LINE> def __post_init__(self): <NEW_LINE> <INDENT> __set_units__(self) <NEW_LINE> observations = [ ao.observation for ao in self.observations if ao.observation is not None] <NEW_LINE> __check_variable__( self.variable, *observations) <NEW_LINE> __check_aggregate_interval_compatibility__( self.interval_length, *observations) <NEW_LINE> __generic_oneof__(self, 'aggregate_type', ALLOWED_AGGREGATE_TYPES) <NEW_LINE> __generic_oneof__(self, 'interval_label', ('beginning', 'ending')) <NEW_LINE> object.__setattr__(self, 'interval_value_type', 'interval_mean')
Class for keeping track of Aggregate metadata. Aggregates always have interval_value_type of 'interval_mean'. Parameters ---------- name : str Name of the Aggregate, e.g. Utility X Solar PV description : str A description of what the aggregate is. variable : str Variable name, e.g. power, GHI. Each allowed variable has an associated pre-defined unit. All observations that make up the Aggregate must also have this variable. aggregate_type : str The aggregation function that will be applied to observations. Generally, this will be 'sum' although one might be interested, for example, in the 'mean' irradiance of some observations. May be an aggregate function string supported by Pandas. Common options include ('sum', 'mean', 'min', 'max', 'median', 'std'). interval_length : pandas.Timedelta The length of time between consecutive data points, e.g. 5 minutes, 1 hour. This must be >= the interval lengths of any Observations that will make up the Aggregate. interval_label : str Indicates if a time labels the beginning or the ending of an interval average. timezone : str IANA timezone of the Aggregate, e.g. Etc/GMT+8 aggregate_id : str, optional UUID of the Aggregate in the API provider : str, optional Provider of the Aggregate information. extra_parameters : str, optional Any extra parameters for the Aggregate. observations : tuple of AggregateObservation The Observations that contribute to the Aggregate See Also -------- :py:class:`solarforecastarbiter.datamodel.Observation`
62598fb18e7ae83300ee910d
class ScipyGaussianCopula(object): <NEW_LINE> <INDENT> implements(ICopula) <NEW_LINE> def __init__(self, portfolio): <NEW_LINE> <INDENT> from scipy import sparse <NEW_LINE> self.issuers = [i for i in portfolio.issuers()] <NEW_LINE> self.assets = [a for a in portfolio.assets] <NEW_LINE> self.asset_issuer_map = makeAssetIssuerIndexMap(self.issuers, self.assets) <NEW_LINE> def ppfGen(assets): <NEW_LINE> <INDENT> for ass in assets: <NEW_LINE> <INDENT> yield norm.ppf(ass.dp) <NEW_LINE> <DEDENT> <DEDENT> self.thresholds = np.fromiter(ppfGen(self.assets), np.double) <NEW_LINE> self.n_issuers = len(self.issuers) <NEW_LINE> self.n_assets = len(self.assets) <NEW_LINE> factor_indices = portfolio.factor_indices() <NEW_LINE> self.n_factors = len(factor_indices.keys()) <NEW_LINE> wm = sparse.dok_matrix((self.n_issuers, self.n_factors+self.n_issuers), dtype=np.float32) <NEW_LINE> for i, iss in enumerate(self.issuers): <NEW_LINE> <INDENT> wsum = 0.0 <NEW_LINE> for f in iss.factors: <NEW_LINE> <INDENT> j = factor_indices[f.name] <NEW_LINE> w = np.sqrt(max(f.weight, 0.0)) <NEW_LINE> wm[i, j] = w <NEW_LINE> wsum += w*w <NEW_LINE> <DEDENT> wm[i, self.n_factors+i] = np.sqrt(max(1.0 - wsum, 0.0)) <NEW_LINE> <DEDENT> self.weights = wm.tocsr() <NEW_LINE> <DEDENT> def copula(self, chunk, number_chunks, defaults): <NEW_LINE> <INDENT> n = self.n_factors+self.n_issuers <NEW_LINE> for outer in xrange(number_chunks): <NEW_LINE> <INDENT> corrValues = np.empty(shape=(self.n_issuers, chunk), dtype=np.double) <NEW_LINE> uncorrValues = norm.rvs(size=(n, chunk)) <NEW_LINE> for inner in xrange(chunk): <NEW_LINE> <INDENT> corr = corrValues[:,inner] <NEW_LINE> variates = uncorrValues[:,inner] <NEW_LINE> self.weights.matvec(variates, corr) <NEW_LINE> <DEDENT> self.defaultProcessor(defaults, corrValues) <NEW_LINE> log.msg('progress: [ %s/%s ]' % (outer*chunk, chunk*number_chunks)) <NEW_LINE> <DEDENT> <DEDENT> def defaultProcessor(self, defaults, corrValues): <NEW_LINE> <INDENT> num_runs = np.size(corrValues, 1) <NEW_LINE> raw_data = np.empty(shape=(self.n_assets, num_runs), dtype=np.double) <NEW_LINE> for i in xrange(self.n_assets): <NEW_LINE> <INDENT> issuer_index = self.asset_issuer_map[i] <NEW_LINE> raw_data[i, :] = corrValues[issuer_index, :] < self.thresholds[i] <NEW_LINE> <DEDENT> for i in xrange(num_runs): <NEW_LINE> <INDENT> num_defaults = int(sum(raw_data[:,i])) <NEW_LINE> defaults[num_defaults] += 1
Gaussian copula simulation of correlated defaults using scipy sparse matrix lib
62598fb14428ac0f6e658590
class BlinkController(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> rospy.init_node('blink_controller', anonymous=False) <NEW_LINE> rospy.loginfo( '[Blink Controller]: Waiting for gazebo color plugin service') <NEW_LINE> self.serviceName = str(sys.argv[1]) + '/hat_color' <NEW_LINE> rospy.loginfo(self.serviceName) <NEW_LINE> rospy.wait_for_service(self.serviceName) <NEW_LINE> self.model_color = rospy.ServiceProxy( self.serviceName, SetLightProperties) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> rate = rospy.Rate(0.5) <NEW_LINE> rospy.loginfo("[Blink Controller]: Running!") <NEW_LINE> color = ColorRGBA() <NEW_LINE> while not rospy.is_shutdown(): <NEW_LINE> <INDENT> c = list(np.random.choice(range(256), size=3)) <NEW_LINE> color.r = c[0]/256.0 <NEW_LINE> color.g = c[1]/256.0 <NEW_LINE> color.b = c[2]/256.0 <NEW_LINE> color.a = 1.0 <NEW_LINE> try: <NEW_LINE> <INDENT> resp1 = self.model_color("", color, 0.0, 0.0, 0.0) <NEW_LINE> <DEDENT> except rospy.ServiceException as exc: <NEW_LINE> <INDENT> print( "[Blink Controller]: Service did not process request " + str(exc)) <NEW_LINE> <DEDENT> rate.sleep()
docstring for Blink
62598fb1a05bb46b3848a8d6
class PromoteUDBInstanceToHARequestSchema(schema.RequestSchema): <NEW_LINE> <INDENT> fields = { "DBId": fields.Str(required=True, dump_to="DBId"), "ProjectId": fields.Str(required=False, dump_to="ProjectId"), "Region": fields.Str(required=True, dump_to="Region"), }
PromoteUDBInstanceToHA - 普通db升级为高可用(只针对mysql5.5及以上版本)
62598fb138b623060ffa9106
class GRNN(object): <NEW_LINE> <INDENT> def __init__(self, training_data=[], standard_deviation=1.41, feature_mask =None, global_method=True, k=1): <NEW_LINE> <INDENT> self.training_data = training_data <NEW_LINE> if feature_mask == None: <NEW_LINE> <INDENT> self.feature_mask = [1 for _ in range(len(training_data[0][0]))] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.feature_mask = feature_mask <NEW_LINE> <DEDENT> self.standard_deviation = standard_deviation <NEW_LINE> if not global_method: <NEW_LINE> <INDENT> self.clusters = self._k_means_cluster_creator(training_data, k) <NEW_LINE> <DEDENT> <DEDENT> def _argmin(self, elements): <NEW_LINE> <INDENT> index = 0 <NEW_LINE> value = elements[0] <NEW_LINE> current_index = 0 <NEW_LINE> for element in elements: <NEW_LINE> <INDENT> if element < value: <NEW_LINE> <INDENT> index = current_index <NEW_LINE> value = element <NEW_LINE> <DEDENT> current_index = current_index + 1 <NEW_LINE> <DEDENT> return index <NEW_LINE> <DEDENT> def _k_means_cluster_creator(self, data_points, k): <NEW_LINE> <INDENT> clusters = [] <NEW_LINE> centroids = random.sample(data_points, k) <NEW_LINE> for i in range(5): <NEW_LINE> <INDENT> clusters = [[] for _ in range(k)] <NEW_LINE> for point in data_points: <NEW_LINE> <INDENT> distances = [] <NEW_LINE> for centroid in centroids: <NEW_LINE> <INDENT> distances.append(euclidian_distance(centroid[0], point[0])) <NEW_LINE> <DEDENT> min_index = self._argmin(distances) <NEW_LINE> clusters[min_index].append(point) <NEW_LINE> <DEDENT> for centroid_index in range(k): <NEW_LINE> <INDENT> new_center = [0 for _ in range(95)] <NEW_LINE> for point in clusters[centroid_index]: <NEW_LINE> <INDENT> for j in range(95): <NEW_LINE> <INDENT> new_center[j] = new_center[j] + point[0][j] <NEW_LINE> <DEDENT> for j in range(95): <NEW_LINE> <INDENT> new_center[j] = new_center[j]/len(clusters[centroid_index]) <NEW_LINE> <DEDENT> <DEDENT> centroids[centroid_index] = (tuple(new_center), 0) <NEW_LINE> <DEDENT> <DEDENT> return clusters <NEW_LINE> <DEDENT> def _h_function(self, t_q, t_i): <NEW_LINE> <INDENT> distance = euclidian_distance(t_i, t_q, self.feature_mask) <NEW_LINE> return math.e**(-(distance**2)/(2*self.standard_deviation**2)) <NEW_LINE> <DEDENT> def set_feature_mask(self, feature_mask): <NEW_LINE> <INDENT> self.feature_mask = feature_mask <NEW_LINE> <DEDENT> def load_data(self, training_data): <NEW_LINE> <INDENT> self.training_data = training_data <NEW_LINE> <DEDENT> def classify(self, instance): <NEW_LINE> <INDENT> numerator = 0 <NEW_LINE> denominator = 0 <NEW_LINE> for training_instance in self.training_data: <NEW_LINE> <INDENT> h_value = self._h_function(instance, training_instance[0]) <NEW_LINE> numerator = numerator + h_value*training_instance[1] <NEW_LINE> denominator = denominator + h_value <NEW_LINE> <DEDENT> return numerator/denominator
GRNN classifier
62598fb15fcc89381b266181
class TicketChannel(): <NEW_LINE> <INDENT> def __init__(self, minRepeat=1800): <NEW_LINE> <INDENT> self.providers = [] <NEW_LINE> self.minRepeat = minRepeat <NEW_LINE> self.lastSent = {} <NEW_LINE> <DEDENT> def addProvider(self, regex, provider): <NEW_LINE> <INDENT> self.providers.append( { 're': regex, 'provider': provider } ) <NEW_LINE> <DEDENT> def doPrivmsg(self, msg): <NEW_LINE> <INDENT> for p in self.providers: <NEW_LINE> <INDENT> matches = re.findall(p['re'], msg) <NEW_LINE> for m in matches: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> item = p['provider'][m] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if m in self.lastSent and self.lastSent[m] >= time.time() - self.minRepeat: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.lastSent[m] = time.time() <NEW_LINE> yield item
Dispatcher and rate limiter for per-channel ticketing info
62598fb1fff4ab517ebcd850
class RepoPkgsUpgradeToSubCommandTest(support.ResultTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(RepoPkgsUpgradeToSubCommandTest, self).setUp() <NEW_LINE> base = support.BaseCliStub('updates', 'third_party') <NEW_LINE> base.init_sack() <NEW_LINE> self.cli = base.mock_cli() <NEW_LINE> <DEDENT> def test_all(self): <NEW_LINE> <INDENT> cmd = dnf.cli.commands.RepoPkgsCommand(self.cli) <NEW_LINE> support.command_run(cmd, ['updates', 'upgrade', 'hole-1-2']) <NEW_LINE> self.assertResult(self.cli.base, itertools.chain( self.cli.base.sack.query().installed().filter(name__neq='hole'), dnf.subject.Subject('hole-1-2.x86_64').get_best_query(self.cli.base.sack) .filter(reponame='updates')))
Tests of ``dnf.cli.commands.RepoPkgsCommand.UpgradeToSubCommand`` class.
62598fb13539df3088ecc31c
class ServerAuthenticationMiddleware(object): <NEW_LINE> <INDENT> def process_request(self, request): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if request.method == 'GET': <NEW_LINE> <INDENT> uTime = request.GET['uTime'] <NEW_LINE> sid, token = request.GET['sToken'].split('.') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> uTime = request.POST['uTime'] <NEW_LINE> sid, token = request.POST['sToken'].split('.') <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> server = Server.objects.get(id=sid) <NEW_LINE> assert authenticate(uTime, server.auth_key, token) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise PermissionDenied <NEW_LINE> <DEDENT> request.server = server <NEW_LINE> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> return response
Authenticates a server from querystring parameters 'uTime' and 'sToken' If authentication is successful, a `request.server` will be set to the `Server` model object that authenticated
62598fb1090684286d593712
class FrequencyAverageForecaster(BaseForecaster): <NEW_LINE> <INDENT> def __init__(self, transform_timestamp): <NEW_LINE> <INDENT> self.transform_timestamp = transform_timestamp <NEW_LINE> self.averages_ = None <NEW_LINE> <DEDENT> def fit(self, series): <NEW_LINE> <INDENT> self.averages_ = series.groupby(self.transform_timestamp).mean() <NEW_LINE> return self <NEW_LINE> <DEDENT> def predict(self, timestamps): <NEW_LINE> <INDENT> forecasts = [ self.averages_[self.transform_timestamp(ts)] for ts in timestamps ] <NEW_LINE> return pd.Series(data=forecasts, index=timestamps)
Args: transform_timestamp (func): A function which converts a pandas.tslib.Timestamp to a value with which the data will be grouped by.
62598fb121bff66bcd722cd1
class ReauthAccessTokenRefreshError(ReauthError): <NEW_LINE> <INDENT> def __init__(self, message=None, status=None): <NEW_LINE> <INDENT> super(ReauthAccessTokenRefreshError, self).__init__( 'Failed to get an access token for reauthentication. {0}'.format( message)) <NEW_LINE> self.status = status
An exception for when we can't get an access token for reauth.
62598fb132920d7e50bc60be
class Feature3(Feature): <NEW_LINE> <INDENT> def __init__(self, **kargs): Feature.__init__(self, 0, 'fa-coffee', **kargs) <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> return Text('Single Feature 3 Action')
Single Feature 3
62598fb14f88993c371f0540
class Solution: <NEW_LINE> <INDENT> def permuteUnique(self, nums): <NEW_LINE> <INDENT> nums.sort() <NEW_LINE> rlt = self._permuteUnique(nums) <NEW_LINE> return rlt <NEW_LINE> <DEDENT> def _permuteUnique(self, nums): <NEW_LINE> <INDENT> rlt = [] <NEW_LINE> if len(nums) in [0, 1]: <NEW_LINE> <INDENT> rlt.append(nums) <NEW_LINE> return rlt <NEW_LINE> <DEDENT> for i, v in enumerate(nums): <NEW_LINE> <INDENT> if i != 0 and v == nums[i-1]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> snums = nums[:i] + nums[i+1:] <NEW_LINE> srlt = self._permuteUnique(snums) <NEW_LINE> for vv in srlt: <NEW_LINE> <INDENT> vv.append(v) <NEW_LINE> rlt.append(vv) <NEW_LINE> <DEDENT> <DEDENT> return rlt
@param nums: A list of integers. @return: A list of unique permutations.
62598fb14e4d562566372491
class StdOutHandler(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.oldStdout = None <NEW_LINE> <DEDENT> def hideStdOut(self): <NEW_LINE> <INDENT> self.oldStdout = sys.stdout <NEW_LINE> sys.stdout = open(os.devnull, 'w') <NEW_LINE> <DEDENT> def restoreStdOut(self): <NEW_LINE> <INDENT> if self.oldStdout: <NEW_LINE> <INDENT> sys.stdout.close() <NEW_LINE> sys.stdout = self.oldStdout
Class for managing stdout
62598fb1a219f33f346c687f
class Rectangle: <NEW_LINE> <INDENT> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> if type(width) != int: <NEW_LINE> <INDENT> raise TypeError('width must be an integer') <NEW_LINE> <DEDENT> if width < 0: <NEW_LINE> <INDENT> raise ValueError('width must be >= 0') <NEW_LINE> <DEDENT> if type(height) != int: <NEW_LINE> <INDENT> raise TypeError('height must be an integer') <NEW_LINE> <DEDENT> if height < 0: <NEW_LINE> <INDENT> raise ValueError('height must be >= 0') <NEW_LINE> <DEDENT> self.__width = width <NEW_LINE> self.__height = height <NEW_LINE> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError('width must be an integer') <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError('width must be >= 0') <NEW_LINE> <DEDENT> self.__width = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if type(value) != int: <NEW_LINE> <INDENT> raise TypeError('height must be an integer') <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError('height must be >= 0') <NEW_LINE> <DEDENT> self.__height = value <NEW_LINE> <DEDENT> def area(self): <NEW_LINE> <INDENT> return self.__width * self.__height <NEW_LINE> <DEDENT> def perimeter(self): <NEW_LINE> <INDENT> if self.__width == 0 or self.height == 0: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> return (2 * self.__width) + (2 * self.__height) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> str = "" <NEW_LINE> if self.__width == 0 or self.__height == 0: <NEW_LINE> <INDENT> return str <NEW_LINE> <DEDENT> for a in range(self.__height): <NEW_LINE> <INDENT> for b in range(self.width): <NEW_LINE> <INDENT> str += '#' <NEW_LINE> <DEDENT> if a != self.height - 1: <NEW_LINE> <INDENT> str += '\n' <NEW_LINE> <DEDENT> <DEDENT> return str <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> w = str(self.__width) <NEW_LINE> h = str(self.__height) <NEW_LINE> return 'Rectangle(' + w + ', ' + h + ')' <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> print('Bye rectangle...')
Real Rectangle.
62598fb15fdd1c0f98e5dff7
class canva_Mat(FigureCanvas): <NEW_LINE> <INDENT> N=25 <NEW_LINE> def __init__(self, parent=None): <NEW_LINE> <INDENT> self.start_time = time.time() <NEW_LINE> self.fig = Figure() <NEW_LINE> self.axes_y = self.fig.add_subplot(4,1,1) <NEW_LINE> self.axes_p = self.fig.add_subplot(4,1,3) <NEW_LINE> self.axes_r = self.fig.add_subplot(4,1,4) <NEW_LINE> self.data_y = [0] * self.N <NEW_LINE> self.data_p = [0] * self.N <NEW_LINE> self.data_r = [0] * self.N <NEW_LINE> self.time = [0] * self.N <NEW_LINE> FigureCanvas.__init__(self, self.fig) <NEW_LINE> self.setParent(parent) <NEW_LINE> FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding, QSizePolicy.Expanding) <NEW_LINE> FigureCanvas.updateGeometry(self) <NEW_LINE> <DEDENT> def setDate_y(self, x): <NEW_LINE> <INDENT> self.data_y = self.data_y[1:self.N] <NEW_LINE> self.data_y.append(x) <NEW_LINE> self.paint() <NEW_LINE> <DEDENT> def setDate_p(self, x): <NEW_LINE> <INDENT> self.data_p = self.data_p[1:self.N] <NEW_LINE> self.data_p.append(x) <NEW_LINE> self.paint() <NEW_LINE> <DEDENT> def setDate_r(self, x): <NEW_LINE> <INDENT> self.data_r = self.data_r[1:self.N] <NEW_LINE> self.data_r.append(x) <NEW_LINE> print(x) <NEW_LINE> self.paint() <NEW_LINE> <DEDENT> def setTime(self): <NEW_LINE> <INDENT> self.time = self.time[1:len(self.time)] <NEW_LINE> self.time.append(time.time() - self.start_time) <NEW_LINE> <DEDENT> def setDate(self,yaw,pitch,roll): <NEW_LINE> <INDENT> self.data_y = self.data_y[1:self.N] <NEW_LINE> self.data_p = self.data_p[1:self.N] <NEW_LINE> self.data_r = self.data_r[1:self.N] <NEW_LINE> self.data_y.append(yaw) <NEW_LINE> self.data_p.append(pitch) <NEW_LINE> self.data_r.append(roll) <NEW_LINE> self.paint() <NEW_LINE> <DEDENT> def paint(self): <NEW_LINE> <INDENT> self.setTime() <NEW_LINE> self.axes_r.clear() <NEW_LINE> self.axes_y.clear() <NEW_LINE> self.axes_p.clear() <NEW_LINE> self.axes_y.plot(self.time, self.data_y) <NEW_LINE> self.axes_p.plot(self.time, self.data_p) <NEW_LINE> self.axes_r.plot(self.time, self.data_r) <NEW_LINE> FigureCanvas.draw(self)
This class pronting 3 graphic for acran
62598fb123849d37ff85111e
class Base64Decoder(object): <NEW_LINE> <INDENT> def __init__(self, underlying): <NEW_LINE> <INDENT> self.cache = bytearray() <NEW_LINE> self.underlying = underlying <NEW_LINE> <DEDENT> def write(self, data): <NEW_LINE> <INDENT> if len(self.cache) > 0: <NEW_LINE> <INDENT> data = self.cache + data <NEW_LINE> <DEDENT> decode_len = (len(data) // 4) * 4 <NEW_LINE> val = data[:decode_len] <NEW_LINE> if len(val) > 0: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> decoded = base64.b64decode(val) <NEW_LINE> <DEDENT> except Base64Error: <NEW_LINE> <INDENT> raise DecodeError('There was an error raised while decoding ' 'base64-encoded data.') <NEW_LINE> <DEDENT> self.underlying.write(decoded) <NEW_LINE> <DEDENT> remaining_len = len(data) % 4 <NEW_LINE> if remaining_len > 0: <NEW_LINE> <INDENT> self.cache = data[-remaining_len:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.cache = b'' <NEW_LINE> <DEDENT> return len(data) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if hasattr(self.underlying, 'close'): <NEW_LINE> <INDENT> self.underlying.close() <NEW_LINE> <DEDENT> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> if len(self.cache) > 0: <NEW_LINE> <INDENT> raise DecodeError('There are %d bytes remaining in the ' 'Base64Decoder cache when finalize() is called' % len(self.cache)) <NEW_LINE> <DEDENT> if hasattr(self.underlying, 'finalize'): <NEW_LINE> <INDENT> self.underlying.finalize() <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "%s(underlying=%r)" % (self.__class__.__name__, self.underlying)
This object provides an interface to decode a stream of Base64 data. It is instantiated with an "underlying object", and whenever a write() operation is performed, it will decode the incoming data as Base64, and call write() on the underlying object. This is primarily used for decoding form data encoded as Base64, but can be used for other purposes:: from multipart.decoders import Base64Decoder fd = open("notb64.txt", "wb") decoder = Base64Decoder(fd) try: decoder.write("Zm9vYmFy") # "foobar" in Base64 decoder.finalize() finally: decoder.close() # The contents of "notb64.txt" should be "foobar". This object will also pass all finalize() and close() calls to the underlying object, if the underlying object supports them. Note that this class maintains a cache of base64 chunks, so that a write of arbitrary size can be performed. You must call :meth:`finalize` on this object after all writes are completed to ensure that all data is flushed to the underlying object. :param underlying: the underlying object to pass writes to
62598fb1aad79263cf42e83e
class Compilation(data.Compilation): <NEW_LINE> <INDENT> def save(self): <NEW_LINE> <INDENT> data_storage = {} <NEW_LINE> for name, node in [(na, no) for na, no in self._subnodes.items() if not no.empty]: <NEW_LINE> <INDENT> data_storage[name] = node.save() <NEW_LINE> <DEDENT> return data_storage
Compilation-type data node for the npz backend.
62598fb167a9b606de546038
@TYPES.register("Outlet") <NEW_LINE> class Outlet(HomeAccessory): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> super().__init__(*args, category=CATEGORY_OUTLET) <NEW_LINE> self._flag_state = False <NEW_LINE> serv_outlet = self.add_preload_service(SERV_OUTLET) <NEW_LINE> self.char_on = serv_outlet.configure_char( CHAR_ON, value=False, setter_callback=self.set_state ) <NEW_LINE> self.char_outlet_in_use = serv_outlet.configure_char( CHAR_OUTLET_IN_USE, value=True ) <NEW_LINE> <DEDENT> def set_state(self, value): <NEW_LINE> <INDENT> _LOGGER.debug("%s: Set switch state to %s", self.entity_id, value) <NEW_LINE> self._flag_state = True <NEW_LINE> params = {ATTR_ENTITY_ID: self.entity_id} <NEW_LINE> service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF <NEW_LINE> self.call_service(DOMAIN, service, params) <NEW_LINE> <DEDENT> def update_state(self, new_state): <NEW_LINE> <INDENT> current_state = new_state.state == STATE_ON <NEW_LINE> if not self._flag_state: <NEW_LINE> <INDENT> _LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state) <NEW_LINE> self.char_on.set_value(current_state) <NEW_LINE> <DEDENT> self._flag_state = False
Generate an Outlet accessory.
62598fb101c39578d7f12de5
class DataLoader(object): <NEW_LINE> <INDENT> def __init__(self, reader, batch_size=1, collate_fn=default_collate, transform=None): <NEW_LINE> <INDENT> self.reader = reader <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.collate_fn = collate_fn <NEW_LINE> self.transform = transform <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> batch = [] <NEW_LINE> for row in self.reader: <NEW_LINE> <INDENT> row_as_dict = row._asdict() <NEW_LINE> batch.append(self.transform(row_as_dict) if self.transform else row_as_dict) <NEW_LINE> if len(batch) == self.batch_size: <NEW_LINE> <INDENT> yield self.collate_fn(batch) <NEW_LINE> batch = [] <NEW_LINE> <DEDENT> <DEDENT> if batch: <NEW_LINE> <INDENT> yield self.collate_fn(batch) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.reader.stop() <NEW_LINE> self.reader.join()
A data loader adaptor for ``torch.utils.data.DataLoader``. This class iterates and returns items from the Reader in batches. This loader can be used as a context manager, but it will terminate at the end of an epoch. The context will invoke next_epoch() upon entry. If not used as context manager, invoke the next_epoch() function at the start of each epoch, and once more at the very end.
62598fb199cbb53fe6830f44
class StackTraceMapper(tf_stack.StackTraceMapper): <NEW_LINE> <INDENT> def __init__(self, converted_fn): <NEW_LINE> <INDENT> self._source_map = converted_fn.ag_source_map <NEW_LINE> <DEDENT> def get_effective_source_map(self): <NEW_LINE> <INDENT> effective_source_map = self._effective_source_map <NEW_LINE> if effective_source_map is None: <NEW_LINE> <INDENT> if self.parent is not None: <NEW_LINE> <INDENT> parent_map = self.parent.get_effective_source_map() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> parent_map = {} <NEW_LINE> <DEDENT> effective_source_map = {} <NEW_LINE> for loc, origin in self._source_map.items(): <NEW_LINE> <INDENT> effective_source_map[(loc.filename, loc.lineno)] = ( origin.loc.filename, origin.loc.lineno, origin.function_name) <NEW_LINE> <DEDENT> for key, value in parent_map.items(): <NEW_LINE> <INDENT> filename, lineno, _ = value <NEW_LINE> value_loc = origin_info.LineLocation(filename=filename, lineno=lineno) <NEW_LINE> if value_loc in self._source_map: <NEW_LINE> <INDENT> origin = self._source_map[value_loc] <NEW_LINE> effective_source_map[key] = ( origin.loc.filename, origin.loc.lineno, origin.function_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> effective_source_map[key] = value <NEW_LINE> <DEDENT> <DEDENT> self._effective_source_map = effective_source_map <NEW_LINE> <DEDENT> return effective_source_map
Remaps generated code to code it originated from.
62598fb199fddb7c1ca62e1f
class C3BinaryDataProcessor(DatasetGetter): <NEW_LINE> <INDENT> def __init__(self, tokenizer, max_length): <NEW_LINE> <INDENT> self.tokenizer = tokenizer <NEW_LINE> self.max_length = max_length <NEW_LINE> <DEDENT> def get_dataset(self, fn, with_label=True): <NEW_LINE> <INDENT> features = [] <NEW_LINE> df = pd.read_csv(fn) <NEW_LINE> for i in tqdm(df.index, desc='tokenizing'): <NEW_LINE> <INDENT> example = df.iloc[i] <NEW_LINE> features.append(self.convert_example_to_features(example)) <NEW_LINE> <DEDENT> all_input_ids = torch.cat([ torch.LongTensor([f.input_ids]) for f in features ]) <NEW_LINE> all_input_mask = torch.cat([ torch.LongTensor([f.input_mask]) for f in features ]) <NEW_LINE> all_segment_ids = torch.cat([ torch.LongTensor([f.segment_ids]) for f in features ]) <NEW_LINE> if with_label: <NEW_LINE> <INDENT> all_label_ids = torch.LongTensor(df.label.values) <NEW_LINE> return TensorDataset(all_input_ids, all_input_mask, all_segment_ids, all_label_ids) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return TensorDataset(all_input_ids, all_input_mask, all_segment_ids) <NEW_LINE> <DEDENT> <DEDENT> def convert_example_to_features(self, example): <NEW_LINE> <INDENT> tokens_a = self.tokenizer.tokenize(example.passage) <NEW_LINE> tokens_b = self.tokenizer.tokenize(example.question) <NEW_LINE> tokens_c = self.tokenizer.tokenize(example.choice_0) <NEW_LINE> tokens_d = self.tokenizer.tokenize(example.choice_1) <NEW_LINE> self._truncate_seq_tuple(tokens_a, tokens_b, tokens_c, tokens_d, max_length=self.max_length-5) <NEW_LINE> tokens = ['[CLS]'] + tokens_a + ['[SEP]'] + tokens_b + ['[SEP]'] + tokens_c + ['[SEP]'] + tokens_d + ['[SEP]'] <NEW_LINE> input_ids = self.tokenizer.convert_tokens_to_ids(tokens) <NEW_LINE> input_mask = [1] * len(input_ids) <NEW_LINE> segment_ids = [0]*(2+len(tokens_a)) + [1]*(2+len(tokens_b)+len(tokens_c)) <NEW_LINE> input_ids += [0] * (self.max_length-len(input_ids)) <NEW_LINE> input_mask += [0] * (self.max_length-len(input_mask)) <NEW_LINE> segment_ids += [0] * (self.max_length-len(segment_ids)) <NEW_LINE> assert len(input_ids) == self.max_length <NEW_LINE> assert len(input_mask) == self.max_length <NEW_LINE> assert len(segment_ids) == self.max_length <NEW_LINE> label = None <NEW_LINE> if hasattr(example, 'label'): <NEW_LINE> <INDENT> label = example.label <NEW_LINE> <DEDENT> return InputFeatures(input_ids, input_mask, segment_ids, label) <NEW_LINE> <DEDENT> def _truncate_seq_tuple(self, *tokenList, max_length): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> lengthList = [len(a) for a in tokenList] <NEW_LINE> if sum(lengthList) <= max_length: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> i = np.argmax(lengthList) <NEW_LINE> tokenList[i].pop()
return C3 dataset as a binary classification problem via an implemented method `get_dataset`.
62598fb155399d3f05626585
class BasicModel(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def create_model(self, kfold_X_train, y_train, kfold_X_test, y_test, test): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def batch_iter(self, data, batch_size, num_epochs=1, shuffle=True): <NEW_LINE> <INDENT> data = np.array(data) <NEW_LINE> data_size = len(data) <NEW_LINE> num_batches_per_epoch = int((data_size-1)/batch_size) + 1 <NEW_LINE> for epoch in range(num_epochs): <NEW_LINE> <INDENT> if shuffle: <NEW_LINE> <INDENT> shuffle_indices = np.random.permutation(np.arange(data_size)) <NEW_LINE> shuffled_data = data[shuffle_indices] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shuffled_data = data <NEW_LINE> <DEDENT> for batch_num in range(num_batches_per_epoch): <NEW_LINE> <INDENT> start_index = batch_num * batch_size <NEW_LINE> end_index = min((1 + batch_num) * batch_size, data_size) <NEW_LINE> yield shuffled_data[start_index:end_index] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_f1_score(self, x, y, verbose=False): <NEW_LINE> <INDENT> tp = np.sum(np.logical_and(y > 0, x == y)) <NEW_LINE> fp = np.sum(np.logical_and(x > 0, y == 0)) + np.sum(np.logical_and(x * y > 0, y != x)) <NEW_LINE> fn = np.sum(np.logical_and(y > 0, x == 0)) <NEW_LINE> P = float(tp) / (float(tp + fp) + 1e-8) <NEW_LINE> R = float(tp) / (float(tp + fn) + 1e-8) <NEW_LINE> F = 2 * P * R / (P + R + 1e-8) <NEW_LINE> if verbose: <NEW_LINE> <INDENT> print('P->', P) <NEW_LINE> print('R->', R) <NEW_LINE> print('F->', F) <NEW_LINE> <DEDENT> return F
Docstring for BasicModel.
62598fb14527f215b58e9f40
class GA_Printer(StrPrinter): <NEW_LINE> <INDENT> function_names = ('acos', 'acosh', 'acot', 'acoth', 'arg', 'asin', 'asinh', 'atan', 'atan2', 'atanh', 'ceiling', 'conjugate', 'cos', 'cosh', 'cot', 'coth', 'exp', 'floor', 'im', 'log', 're', 'root', 'sin', 'sinh', 'sqrt', 'sign', 'tan', 'tanh') <NEW_LINE> def _print_Function(self, expr): <NEW_LINE> <INDENT> name = expr.func.__name__ <NEW_LINE> if expr.func.nargs is not None: <NEW_LINE> <INDENT> if name in GA_Printer.function_names: <NEW_LINE> <INDENT> return expr.func.__name__ + "(%s)" % self.stringify(expr.args, ", ") <NEW_LINE> <DEDENT> <DEDENT> return enhance_print.enhance_fct("%s" % (name, )) <NEW_LINE> <DEDENT> def _print_Derivative(self, expr): <NEW_LINE> <INDENT> diff_args = list(map(self._print, expr.args)) <NEW_LINE> return enhance_print.enhance_deriv('D{%s}' % (diff_args[1], )) + '%s' % (diff_args[0], ) <NEW_LINE> <DEDENT> def _print_MV(self, expr): <NEW_LINE> <INDENT> if expr.obj.is_zero: <NEW_LINE> <INDENT> return '0' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if expr.print_blades: <NEW_LINE> <INDENT> expr.base_to_blade() <NEW_LINE> <DEDENT> ostr = expr.get_normal_order_str() <NEW_LINE> return ostr <NEW_LINE> <DEDENT> <DEDENT> def _print_Vector(self, expr): <NEW_LINE> <INDENT> if expr.obj.is_zero: <NEW_LINE> <INDENT> return '0' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ostr = GA_Printer().doprint(expr.obj) <NEW_LINE> ostr = ostr.replace(' ', '') <NEW_LINE> return ostr <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _on(): <NEW_LINE> <INDENT> GA_Printer.Basic__str__ = Basic.__str__ <NEW_LINE> Basic.__str__ = lambda self: GA_Printer().doprint(self) <NEW_LINE> return <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _off(): <NEW_LINE> <INDENT> Basic.__str__ = GA_Printer.Basic__str__ <NEW_LINE> return <NEW_LINE> <DEDENT> def __enter__ (self): <NEW_LINE> <INDENT> GA_Printer._on() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__ (self, type, value, traceback): <NEW_LINE> <INDENT> GA_Printer._off() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @deprecated(useinstead="with GA_Printer()", issue=4042, deprecated_since_version="0.7.4") <NEW_LINE> def on(): <NEW_LINE> <INDENT> GA_Printer._on() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> @deprecated(useinstead="with GA_Printer()", issue=4042, deprecated_since_version="0.7.4") <NEW_LINE> def off(): <NEW_LINE> <INDENT> GA_Printer._off()
An enhanced string printer that is galgebra-aware.
62598fb1e1aae11d1e7ce859
class DaemonOpenError(DaemonError): <NEW_LINE> <INDENT> def __init__(self, e): <NEW_LINE> <INDENT> if hasattr(e, 'filename'): <NEW_LINE> <INDENT> _msg = 'open({})'.format(e.filename) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _msg = 'dup2()' <NEW_LINE> <DEDENT> self.msg = 'Failed {}: [errno={}] {}'.format(_msg, e.errno, e.strerror) <NEW_LINE> super(DaemonOpenError, self).__init__(self.msg)
Failed open()/dup2() while becoming a daemon
62598fb1b7558d5895463696
class LikelihoodRatioTestResult(object): <NEW_LINE> <INDENT> def __init__(self, statistic, df, distribution, n): <NEW_LINE> <INDENT> self.statistic = statistic <NEW_LINE> self.df = df <NEW_LINE> self.distribution = distribution <NEW_LINE> self.n = n <NEW_LINE> <DEDENT> @property <NEW_LINE> def pvalue(self): <NEW_LINE> <INDENT> return self.distribution.sf(self.statistic, self.df) <NEW_LINE> <DEDENT> @property <NEW_LINE> def lod(self): <NEW_LINE> <INDENT> return self.statistic / (2.0 * log(10.0))
The result of a likelihood ratio test. :ivar statistic: test statistic :ivar df: degrees of freedom :ivar distribution: distribution of test statistic :ivar n: sample size :type statistic: float :type df: int :type distribution: scipy probability distribution :type n: int
62598fb1796e427e5384e800
class SerialExpectForSocket(SerialExpect): <NEW_LINE> <INDENT> def __init__(self, host='localhost', port=20000, logger=None): <NEW_LINE> <INDENT> url = 'socket://{host}:{port}'.format(host=host, port=port) <NEW_LINE> self.fd = self.try_connect(url, timeout=0.1) <NEW_LINE> self.logger = logger <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def try_connect(url, tries=10, step=0.5, *args, **kwargs): <NEW_LINE> <INDENT> for _ in range(0, tries - 1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return serial.serial_for_url(url, *args, **kwargs) <NEW_LINE> <DEDENT> except serial.SerialException: <NEW_LINE> <INDENT> time.sleep(step) <NEW_LINE> <DEDENT> <DEDENT> return serial.serial_for_url(url, *args, **kwargs) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> super(SerialExpectForSocket, self).close() <NEW_LINE> time.sleep(1)
Simple Expect implementation for tcp connection adapter
62598fb11f5feb6acb162c8a
class Vote(UpdateCountsMixin, BaseDate): <NEW_LINE> <INDENT> VOTING_CHOICES = ( (1, 'Like'), (-1, 'Dislike'), ) <NEW_LINE> node = models.ForeignKey('nodes.Node') <NEW_LINE> user = models.ForeignKey(settings.AUTH_USER_MODEL) <NEW_LINE> vote = models.IntegerField(choices=VOTING_CHOICES) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'participation' <NEW_LINE> unique_together = (("node", "user"),) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return _('vote #%d for node %s') % (self.pk, self.node.name) <NEW_LINE> <DEDENT> def update_count(self): <NEW_LINE> <INDENT> node_rating_count = self.node.rating_count <NEW_LINE> node_rating_count.likes = self.node.vote_set.filter(vote=1).count() <NEW_LINE> node_rating_count.dislikes = self.node.vote_set.filter(vote=-1).count() <NEW_LINE> node_rating_count.save() <NEW_LINE> <DEDENT> def clean(self , *args, **kwargs): <NEW_LINE> <INDENT> if not self.pk: <NEW_LINE> <INDENT> if self.node.participation_settings.voting_allowed is not True: <NEW_LINE> <INDENT> raise ValidationError("Voting not allowed for this node") <NEW_LINE> <DEDENT> if 'nodeshot.core.layers' in settings.INSTALLED_APPS: <NEW_LINE> <INDENT> layer = self.node.layer <NEW_LINE> if layer.participation_settings.voting_allowed is not True: <NEW_LINE> <INDENT> raise ValidationError("Voting not allowed for this layer")
Vote model Like or dislike feature
62598fb14c3428357761a324
class ACLsMerge(object): <NEW_LINE> <INDENT> def __init__(self, acls): <NEW_LINE> <INDENT> self.acls = acls <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> acls = config.get_cfg_storage(ID_ACL) <NEW_LINE> for aname in self.acls: <NEW_LINE> <INDENT> acl = acls.get(aname) <NEW_LINE> if acl is not None: <NEW_LINE> <INDENT> for rec in acl: <NEW_LINE> <INDENT> yield rec
Special class that merges different ACLs maps
62598fb116aa5153ce40056f
class BaseListViewPage(BaseDesktopPage): <NEW_LINE> <INDENT> def get_list_items(self): <NEW_LINE> <INDENT> list_items = self.driver.find_elements(*LIST_VIEW_ROW) <NEW_LINE> return [el for el in list_items if el.text.strip() != ''] <NEW_LINE> <DEDENT> def get_list_item_by_name(self, search_string): <NEW_LINE> <INDENT> list_items = self.get_list_items() <NEW_LINE> for list_item in list_items: <NEW_LINE> <INDENT> if search_string in list_item.text: <NEW_LINE> <INDENT> return list_item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def open_list_item_by_name(self, search_string): <NEW_LINE> <INDENT> list_row = self.get_list_item_by_name(search_string) <NEW_LINE> self.open_list_item(list_row) <NEW_LINE> <DEDENT> def open_list_item(self, list_item): <NEW_LINE> <INDENT> item_id = list_item.get_attribute('data-id') <NEW_LINE> selector = LIST_VIEW_ROW <NEW_LINE> if item_id: <NEW_LINE> <INDENT> selector = ( By.CSS_SELECTOR, '.oe_webclient .oe_application .oe_view_manager_body ' '.oe_list_content tbody > tr[data-id=\'{}\']'.format(item_id) ) <NEW_LINE> <DEDENT> self.click_and_verify_change(list_item, selector, hidden=True) <NEW_LINE> <DEDENT> def wait_for_list_view_to_load(self): <NEW_LINE> <INDENT> self.wait_for_element(LIST_VIEW_ROW)
Common functionality for list view pages
62598fb166673b3332c30438
class ReadableUrlProcessor: <NEW_LINE> <INDENT> patterns = [ (r'/\w+/OL\d+M', '/type/edition', 'title', 'untitled'), (r'/\w+/ia:[a-zA-Z0-9_\.-]+', '/type/edition', 'title', 'untitled'), (r'/\w+/OL\d+A', '/type/author', 'name', 'noname'), (r'/\w+/OL\d+W', '/type/work', 'title', 'untitled'), (r'/[/\w]+/OL\d+L', '/type/list', 'name', 'unnamed') ] <NEW_LINE> def __call__(self, handler): <NEW_LINE> <INDENT> if web.ctx.path.startswith("/l/"): <NEW_LINE> <INDENT> raise web.seeother("/languages/" + web.ctx.path[len("/l/"):]) <NEW_LINE> <DEDENT> if web.ctx.path.startswith("/user/"): <NEW_LINE> <INDENT> if not web.ctx.site.get(web.ctx.path): <NEW_LINE> <INDENT> raise web.seeother("/people/" + web.ctx.path[len("/user/"):]) <NEW_LINE> <DEDENT> <DEDENT> real_path, readable_path = get_readable_path(web.ctx.site, web.ctx.path, self.patterns, encoding=web.ctx.encoding) <NEW_LINE> if readable_path != web.ctx.path and readable_path != urllib.quote(web.utf8(web.ctx.path)) and web.ctx.method == "GET": <NEW_LINE> <INDENT> raise web.redirect(web.safeunicode(readable_path) + web.safeunicode(web.ctx.query)) <NEW_LINE> <DEDENT> web.ctx.readable_path = readable_path <NEW_LINE> web.ctx.path = real_path <NEW_LINE> web.ctx.fullpath = web.ctx.path + web.ctx.query <NEW_LINE> return handler()
Open Library code works with urls like /books/OL1M and /books/OL1M/edit. This processor seemlessly changes the urls to /books/OL1M/title and /books/OL1M/title/edit. The changequery function is also customized to support this.
62598fb1498bea3a75a57b8b
class ApparentMagnitude(object): <NEW_LINE> <INDENT> def __init__(self, sed_name, max_mag=1000.): <NEW_LINE> <INDENT> self.bps = dict() <NEW_LINE> throughput_dir = lsstUtils.getPackageDir('throughputs') <NEW_LINE> for band in 'ugrizy': <NEW_LINE> <INDENT> self.bps[band] = photUtils.Bandpass() <NEW_LINE> self.bps[band].readThroughput(os.path.join(throughput_dir, 'baseline', 'total_%s.dat' % band)) <NEW_LINE> <DEDENT> self.control_bandpass = photUtils.Bandpass() <NEW_LINE> self.control_bandpass.imsimBandpass() <NEW_LINE> sed_dir = lsstUtils.getPackageDir('sims_sed_library') <NEW_LINE> self.sed_unnormed = photUtils.Sed() <NEW_LINE> self.sed_unnormed.readSED_flambda(os.path.join(sed_dir, sed_name)) <NEW_LINE> self.max_mag = max_mag <NEW_LINE> <DEDENT> def _sed_copy(self): <NEW_LINE> <INDENT> return copy.deepcopy(self.sed_unnormed) <NEW_LINE> <DEDENT> def __call__(self, pars, band): <NEW_LINE> <INDENT> spectrum = self._sed_copy() <NEW_LINE> fnorm = spectrum.calcFluxNorm(pars.magNorm, self.control_bandpass) <NEW_LINE> spectrum.multiplyFluxNorm(fnorm) <NEW_LINE> iA_v, iR_v = pars.internalAv, pars.internalRv <NEW_LINE> gA_v, gR_v = pars.galacticAv, pars.galacticRv <NEW_LINE> if iA_v != 0 or iR_v != 0: <NEW_LINE> <INDENT> a_int, b_int = spectrum.setupCCMab() <NEW_LINE> spectrum.addCCMDust(a_int, b_int, A_v=iA_v, R_v=iR_v) <NEW_LINE> <DEDENT> if pars.redshift > 0: <NEW_LINE> <INDENT> spectrum.redshiftSED(pars.redshift, dimming=True) <NEW_LINE> <DEDENT> if gA_v != 0 or gR_v != 0: <NEW_LINE> <INDENT> a_int, b_int = spectrum.setupCCMab() <NEW_LINE> spectrum.addCCMDust(a_int, b_int, A_v=gA_v, R_v=gR_v) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> mag = spectrum.calcMag(self.bps[band]) <NEW_LINE> <DEDENT> except Exception as eObj: <NEW_LINE> <INDENT> if str(eObj).startswith("This SED has no flux"): <NEW_LINE> <INDENT> mag = self.max_mag <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise eObj <NEW_LINE> <DEDENT> <DEDENT> return mag
Class to compute apparent magnitudes for a given rest frame SED. The SED normalization, internal extinction, redshift, and Galactic extinction are applied given the parameters in an instance catalog object line to produce the apparent magnitude in the desired band. Attributes ---------- bps : dict Dictionary of LSST bandpasses. control_bandpass : lsst.photUtils.Bandpass instance The "imsim bandpass" which is used to set magnorm of an object's spectrum. sed_unnormed : lsst.photUtils.Sed object The un-normalized SED. max_mag : float Sentinal value for underflows of Sed.calcMag
62598fb15166f23b2e243446
class NelJet(object): <NEW_LINE> <INDENT> def __init__(self, n0, r0, beta): <NEW_LINE> <INDENT> self._n0 = n0 <NEW_LINE> self._r0 = r0 <NEW_LINE> self._beta = beta <NEW_LINE> return <NEW_LINE> <DEDENT> @property <NEW_LINE> def n0(self): <NEW_LINE> <INDENT> return self._n0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def r0(self): <NEW_LINE> <INDENT> return self._r0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def beta(self): <NEW_LINE> <INDENT> return self._beta <NEW_LINE> <DEDENT> @n0.setter <NEW_LINE> def n0(self, n0): <NEW_LINE> <INDENT> if type(n0) == u.Quantity: <NEW_LINE> <INDENT> self._n0 = n0.to('cm**-3').value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._n0 = n0 <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> @r0.setter <NEW_LINE> def r0(self, r0): <NEW_LINE> <INDENT> if type(r0) == u.Quantity: <NEW_LINE> <INDENT> self._r0 = r0 .to('pc').value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._r0 = r0 <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> @beta.setter <NEW_LINE> def beta(self, beta): <NEW_LINE> <INDENT> self._beta = beta <NEW_LINE> return <NEW_LINE> <DEDENT> def __call__(self,r): <NEW_LINE> <INDENT> return self._n0 * np.power(r / self._r0, self._beta)
Class to set characteristics of electron density of AGN Jet
62598fb1fff4ab517ebcd851
class Study(models.Model): <NEW_LINE> <INDENT> userId = models.ForeignKey(User) <NEW_LINE> syllabus = models.TextField()
在校学习模型,包括课表
62598fb130dc7b766599f8b9
class Details(db.Document, MtimeMixin): <NEW_LINE> <INDENT> movieinfo = db.EmbeddedDocumentField(MovieInfo) <NEW_LINE> release = db.ListField(db.EmbeddedDocumentField(Release)) <NEW_LINE> detail = db.EmbeddedDocumentField(MovieDetail)
详细信息
62598fb11b99ca400228f566
class phantom(): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def shepp3d(sz = [256, 256, 256]): <NEW_LINE> <INDENT> dim = numpy.array(numpy.flipud(sz)) <NEW_LINE> space = odl.uniform_discr(min_pt = -dim / 2, max_pt = dim / 2, shape=dim, dtype='float32') <NEW_LINE> x = odl.phantom.transmission.shepp_logan(space) <NEW_LINE> vol = numpy.float32(x.asarray())[:,::-1,:] <NEW_LINE> vol = numpy.transpose(vol, [2, 1, 0]) <NEW_LINE> return vol <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def checkers(sz = [256, 256, 256], frequency = 8): <NEW_LINE> <INDENT> vol = numpy.zeros(sz, dtype='bool') <NEW_LINE> step = sz[1] // frequency <NEW_LINE> for ii in range(0, frequency): <NEW_LINE> <INDENT> sl = slice(ii*step, int((ii+0.5) * step)) <NEW_LINE> vol[sl, :, :] = ~vol[sl, :, :] <NEW_LINE> <DEDENT> for ii in range(0, frequency): <NEW_LINE> <INDENT> sl = slice(ii*step, int((ii+0.5) * step)) <NEW_LINE> vol[:, sl, :] = ~vol[:, sl, :] <NEW_LINE> <DEDENT> for ii in range(0, frequency): <NEW_LINE> <INDENT> sl = slice(ii*step, int((ii+0.5) * step)) <NEW_LINE> vol[:, :, sl] = ~vol[:, :, sl] <NEW_LINE> <DEDENT> vol = numpy.float32(vol) <NEW_LINE> return vol
Use tomopy phantom module for now
62598fb27b25080760ed751d
class HistoricalPricesParser(object): <NEW_LINE> <INDENT> SITE_URL = "http://info.finance.yahoo.co.jp/history/?code=%(ccode)s&sy=%(syear)s&sm=%(smon)s&sd=%(sday)s&ey=%(eyear)s&em=%(emon)s&ed=%(eday)s&tm=%(range_type)s&p=%(page)s" <NEW_LINE> DATA_FIELD_NUM = 7 <NEW_LINE> INDEX_DATA_FIELD_NUM = 5 <NEW_LINE> COLUMN_NUM = 50 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self._elms = [] <NEW_LINE> <DEDENT> def fetch(self, start_date, end_date, ccode, range_type, page=1): <NEW_LINE> <INDENT> siteurl = self.SITE_URL % {'syear': start_date.year, 'smon': start_date.month, 'sday': start_date.day, 'eyear': end_date.year, 'emon': end_date.month, 'eday': end_date.day, 'page': page, 'range_type':range_type, 'ccode':ccode} <NEW_LINE> fp = urlopen(siteurl) <NEW_LINE> html = fp.read() <NEW_LINE> fp.close() <NEW_LINE> soup = html_parser(html) <NEW_LINE> self._elms = soup.findAll("table", attrs={"class": "boardFin yjSt marB6"}) <NEW_LINE> if len(self._elms) == 0: <NEW_LINE> <INDENT> raise CCODENotFoundException("証券コードが見つかりません") <NEW_LINE> <DEDENT> self._elms = self._elms[0].findAll("tr")[1:] <NEW_LINE> debuglog(siteurl) <NEW_LINE> debuglog(len(self._elms)) <NEW_LINE> <DEDENT> def get(self, idx=0): <NEW_LINE> <INDENT> if self._elms: <NEW_LINE> <INDENT> if idx >= 0: <NEW_LINE> <INDENT> elm = self._elms[idx] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> tds = elm.findAll("td") <NEW_LINE> if len(tds) == self.DATA_FIELD_NUM: <NEW_LINE> <INDENT> data = [self._text(td) for td in tds] <NEW_LINE> data = PriceData(data[0], data[1], data[2], data[3], data[4], data[5], data[6]) <NEW_LINE> return data <NEW_LINE> <DEDENT> elif len(tds) == self.INDEX_DATA_FIELD_NUM: <NEW_LINE> <INDENT> data = [self._text(td) for td in tds] <NEW_LINE> data = PriceData(data[0], data[1], data[2], data[3], data[4], 0, data[4]) <NEW_LINE> return data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_all(self): <NEW_LINE> <INDENT> res = [] <NEW_LINE> for i in range(len(self._elms)): <NEW_LINE> <INDENT> data = self.get(i) <NEW_LINE> if data: <NEW_LINE> <INDENT> res.append(data) <NEW_LINE> <DEDENT> <DEDENT> return res <NEW_LINE> <DEDENT> def _text(self, soup): <NEW_LINE> <INDENT> if sys.version_info.major < 3: <NEW_LINE> <INDENT> return soup.text.encode("utf-8") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return soup.text
過去の株価情報ページパーサ
62598fb2f548e778e596b610
class BoardElement( object ): <NEW_LINE> <INDENT> def getPos(self): <NEW_LINE> <INDENT> return self.pos <NEW_LINE> <DEDENT> def setPos(self,x,y): <NEW_LINE> <INDENT> self.pos=(int(x),int(y)) <NEW_LINE> <DEDENT> pass
Representacao interna de um tabuleiro de Ricochet Robots.
62598fb2d268445f26639bb9
class RbcpBusError(RbcpError): <NEW_LINE> <INDENT> def __init__(self, message=None): <NEW_LINE> <INDENT> if message is None: <NEW_LINE> <INDENT> message = "SiTCP RBCP Bus Error. Check Device Address and Length for read/write" <NEW_LINE> <DEDENT> super(RbcpBusError, self).__init__(message)
SiTCP RBCP Bus Error. This exception is raised when the RBCP Reply message with Bus Error Flag was set. Check Rbcp.read/write address and length value is valid.
62598fb20c0af96317c563e8
class ParserPlugin(Plugin): <NEW_LINE> <INDENT> requires = ('xdress.base',) <NEW_LINE> defaultrc = utils.RunControl( includes=['.'], defines=["XDRESS"], undefines=[], variables=(), functions=(), classes=(), parsers={'c': ['pycparser', 'gccxml', 'clang'], 'c++':['gccxml', 'clang', 'pycparser']}, clear_parser_cache_period=50, dumpast=NotSpecified, ) <NEW_LINE> rcupdaters = {'includes': lambda old, new: list(new) + list(old)} <NEW_LINE> rcdocs = { 'includes': "Additional include directories", 'defines': "Set additional macro definitions", 'undefines': "Unset additional macro definitions", 'variables': ("A list of variable names in sequence, mapping, " "or apiname format"), 'functions': ("A list of function names in sequence, mapping, " "or apiname format"), 'classes': ("A list of class names in sequence, mapping, " "or apiname format"), 'parsers': "Parser(s) name, list, or dict", 'clear_parser_cache_period': ("Number of parser calls to perform before " "clearing the internal cache. This prevents " "nasty memory overflow issues."), 'dumpast': "Prints the abstract syntax tree of a file.", } <NEW_LINE> def update_argparser(self, parser): <NEW_LINE> <INDENT> rcdocs = self.rcdocs() if callable(self.rcdocs) else self.rcdocs <NEW_LINE> parser.add_argument('-I', '--includes', action='store', dest='includes', nargs="+", help=rcdocs["includes"]) <NEW_LINE> parser.add_argument('-D', '--defines', action='append', dest='defines', nargs="+", help=rcdocs["defines"]) <NEW_LINE> parser.add_argument('-U', '--undefines', action='append', dest='undefines', nargs="+", type=str, help=rcdocs["undefines"]) <NEW_LINE> parser.add_argument('-p', action='store', dest='parsers', help=rcdocs["parsers"]) <NEW_LINE> parser.add_argument('--clear-parser-cache-period', action='store', dest='clear_parser_cache_period', type=int, help=rcdocs["clear_parser_cache_period"]) <NEW_LINE> parser.add_argument('--dumpast', action='store', dest='dumpast', metavar="FILE", help=rcdocs["dumpast"]) <NEW_LINE> <DEDENT> def setup(self, rc): <NEW_LINE> <INDENT> if isinstance(rc.parsers, basestring): <NEW_LINE> <INDENT> if '[' in rc.parsers or '{' in rc.parsers: <NEW_LINE> <INDENT> rc.parsers = eval(rc.parsers) <NEW_LINE> <DEDENT> <DEDENT> if rc.dumpast is not NotSpecified: <NEW_LINE> <INDENT> dumpast(rc.dumpast, rc.parsers, rc.sourcedir, includes=rc.includes, defines=rc.defines, undefines=rc.undefines, verbose=rc.verbose, debug=rc.debug, builddir=rc.builddir) <NEW_LINE> sys.exit() <NEW_LINE> <DEDENT> <DEDENT> def execute(self, rc): <NEW_LINE> <INDENT> raise TypeError("ParserPlugin is not a complete plugin. Do not use directly") <NEW_LINE> <DEDENT> def report_debug(self, rc): <NEW_LINE> <INDENT> msg = 'Autodescriber parsers available:\n\n{0}\n\n' <NEW_LINE> msg = msg.format(pformat(PARSERS_AVAILABLE)) <NEW_LINE> return msg
This is a base plugin for tools that wish to wrap parsing. It should not be used directly.
62598fb27b180e01f3e49086
class NameNotFoundError(ExistenceError): <NEW_LINE> <INDENT> pass
No declaration, assignment, or definition of the given name was found.
62598fb24e4d562566372493
class Alerts(Datapoint): <NEW_LINE> <INDENT> def __init__(self, forecast): <NEW_LINE> <INDENT> if not isinstance(forecast, f.Forecast): <NEW_LINE> <INDENT> raise TypeError("Not a Forecast object.") <NEW_LINE> <DEDENT> elif "alerts" not in forecast: <NEW_LINE> <INDENT> raise NoDataError("Alerts Array does not exist.") <NEW_LINE> <DEDENT> super().__init__(forecast.data.get("alerts")) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Alerts object for {}>".format(self.title)
Represents the Alerts object from the Forecast response. Refer to https://darksky.net/dev/docs/response under Alerts for documentation.
62598fb2e5267d203ee6b975
class GetVariable(_Action): <NEW_LINE> <INDENT> check_hangup = False <NEW_LINE> def __init__(self, variable): <NEW_LINE> <INDENT> _Action.__init__(self, 'GET VARIABLE', quote(variable)) <NEW_LINE> <DEDENT> def process_response(self, response): <NEW_LINE> <INDENT> result = response.items.get(_RESULT_KEY) <NEW_LINE> if result.value == '1': <NEW_LINE> <INDENT> return result.data <NEW_LINE> <DEDENT> return None
Returns a `variable` associated with this channel. The value of the requested variable is returned as a string. If the variable is undefined, `None` is returned. `AGIAppError` is raised on failure.
62598fb226068e7796d4c9c2
class BaseTest(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> app = create_app(config.DevelopmentConfig) <NEW_LINE> self.app = app.test_client() <NEW_LINE> self.app_context = app.app_context <NEW_LINE> self.party_test_data = dict( id=1, name="kanu", hqAddress="Nakuru", logoUrl="gig.com/kanu.png" ) <NEW_LINE> self.office_test_data = dict( id=1, type="Federal", name="ambasodor", )
Base Test Class to every test class
62598fb223849d37ff851120
class OwnTracksEntity(TrackerEntity, RestoreEntity): <NEW_LINE> <INDENT> def __init__(self, dev_id, data=None): <NEW_LINE> <INDENT> self._dev_id = dev_id <NEW_LINE> self._data = data or {} <NEW_LINE> self.entity_id = f"{DOMAIN}.{dev_id}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def unique_id(self): <NEW_LINE> <INDENT> return self._dev_id <NEW_LINE> <DEDENT> @property <NEW_LINE> def battery_level(self): <NEW_LINE> <INDENT> return self._data.get("battery") <NEW_LINE> <DEDENT> @property <NEW_LINE> def extra_state_attributes(self): <NEW_LINE> <INDENT> return self._data.get("attributes") <NEW_LINE> <DEDENT> @property <NEW_LINE> def location_accuracy(self): <NEW_LINE> <INDENT> return self._data.get("gps_accuracy") <NEW_LINE> <DEDENT> @property <NEW_LINE> def latitude(self): <NEW_LINE> <INDENT> if self._data.get("gps"): <NEW_LINE> <INDENT> return self._data["gps"][0] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def longitude(self): <NEW_LINE> <INDENT> if self._data.get("gps"): <NEW_LINE> <INDENT> return self._data["gps"][1] <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> @property <NEW_LINE> def location_name(self): <NEW_LINE> <INDENT> return self._data.get("location_name") <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._data.get("host_name") <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_type(self): <NEW_LINE> <INDENT> return self._data.get("source_type", SOURCE_TYPE_GPS) <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_info(self): <NEW_LINE> <INDENT> return {"name": self.name, "identifiers": {(OT_DOMAIN, self._dev_id)}} <NEW_LINE> <DEDENT> async def async_added_to_hass(self): <NEW_LINE> <INDENT> await super().async_added_to_hass() <NEW_LINE> if self._data: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> state = await self.async_get_last_state() <NEW_LINE> if state is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> attr = state.attributes <NEW_LINE> self._data = { "host_name": state.name, "gps": (attr.get(ATTR_LATITUDE), attr.get(ATTR_LONGITUDE)), "gps_accuracy": attr.get(ATTR_GPS_ACCURACY), "battery": attr.get(ATTR_BATTERY_LEVEL), "source_type": attr.get(ATTR_SOURCE_TYPE), } <NEW_LINE> <DEDENT> @callback <NEW_LINE> def update_data(self, data): <NEW_LINE> <INDENT> self._data = data <NEW_LINE> if self.hass: <NEW_LINE> <INDENT> self.async_write_ha_state()
Represent a tracked device.
62598fb271ff763f4b5e77df
class Links(models.Model): <NEW_LINE> <INDENT> title = models.CharField(max_length=50, verbose_name='标题') <NEW_LINE> description = models.CharField(max_length=200, verbose_name='友情链接描述') <NEW_LINE> callback_url = models.URLField(verbose_name='url地址') <NEW_LINE> date_publish = models.DateTimeField(auto_now_add=True, verbose_name='发布时间') <NEW_LINE> index = models.IntegerField(default=999, verbose_name='排列顺序(从小到大)') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = '友情链接' <NEW_LINE> verbose_name_plural = verbose_name <NEW_LINE> ordering = ['index', 'id'] <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.title
友情链接
62598fb201c39578d7f12de6
class TestLibx264Recipe(BaseTestForMakeRecipe, unittest.TestCase): <NEW_LINE> <INDENT> recipe_name = "libx264" <NEW_LINE> sh_command_calls = ["./configure"]
An unittest for recipe :mod:`~pythonforandroid.recipes.libx264`
62598fb299cbb53fe6830f45
class ExpenseConfig(colander.MappingSchema): <NEW_LINE> <INDENT> id = colander.SchemaNode(colander.Integer(), widget=widget.HiddenWidget(), default=None, missing=None) <NEW_LINE> label = colander.SchemaNode(colander.String(), title=u"Libellé", validator=colander.Length(max=50)) <NEW_LINE> code = colander.SchemaNode(colander.String(), title=u"Code analytique", validator=colander.Length(max=15))
Schema for the configuration of different expense types
62598fb267a9b606de54603b
class TypedMeta(type): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def create_property(name, ptype): <NEW_LINE> <INDENT> pname = '_' + name <NEW_LINE> def getter(self): <NEW_LINE> <INDENT> if not hasattr(self, pname) and hasattr(self, f'{self._getter_prefix}{pname}'): <NEW_LINE> <INDENT> self[f'{self._getter_prefix}{pname}']() <NEW_LINE> <DEDENT> if not hasattr(self, pname): <NEW_LINE> <INDENT> raise AttributeError(f'Please compute or set {name} first.') <NEW_LINE> <DEDENT> return getattr(self, pname) <NEW_LINE> <DEDENT> def setter(self, obj): <NEW_LINE> <INDENT> if not isinstance(obj, ptype): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> obj = ptype(obj) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> raise TypeError('Must be able to convert object {0} to {1} (or must be of type {1})'.format(name, ptype)) <NEW_LINE> <DEDENT> <DEDENT> setattr(self, pname, obj) <NEW_LINE> <DEDENT> def deleter(self): <NEW_LINE> <INDENT> del self[pname] <NEW_LINE> <DEDENT> return property(getter, setter, deleter) <NEW_LINE> <DEDENT> def __new__(mcs, name, bases, clsdict): <NEW_LINE> <INDENT> for k, v in vars(mcs).items(): <NEW_LINE> <INDENT> if isinstance(v, type) and not k.startswith('_'): <NEW_LINE> <INDENT> clsdict[k] = mcs.create_property(k, v) <NEW_LINE> <DEDENT> <DEDENT> return super(TypedMeta, mcs).__new__(mcs, name, bases, clsdict)
This metaclass creates statically typed class attributes using the property framework. .. code-block:: Python class TestMeta(TypedMeta): attr1 = (int, float) attr2 = DataFrame class TestClass(metaclass=TestMeta): def __init__(self, attr1, attr2): self.attr1 = attr1 self.attr2 = attr2 The above code dynamically creates code that looks like the following: .. code-block:: Python class TestClass: @property def attr1(self): return self._attr1 @attr1.setter def attr1(self, obj): if not isinstance(obj, (int, float)): raise TypeError('attr1 must be int') self._attr1 = obj @attr1.deleter def attr1(self): del self._attr1 @property def attr2(self): return self._attr2 @attr2.setter def attr2(self, obj): if not isinstance(obj, DataFrame): raise TypeError('attr2 must be DataFrame') self._attr2 = obj @attr2.deleter def attr2(self): del self._attr2 def __init__(self, attr1, attr2): self.attr1 = attr1 self.attr2 = attr2
62598fb24527f215b58e9f41
class _OneHotColumn(_FeatureColumn, collections.namedtuple("_OneHotColumn", ["sparse_id_column"])): <NEW_LINE> <INDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "{}_one_hot".format(self.sparse_id_column.name) <NEW_LINE> <DEDENT> @property <NEW_LINE> def length(self): <NEW_LINE> <INDENT> return self.sparse_id_column.length <NEW_LINE> <DEDENT> @property <NEW_LINE> def config(self): <NEW_LINE> <INDENT> return _get_feature_config(self.sparse_id_column) <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self): <NEW_LINE> <INDENT> return "{}".format(self) <NEW_LINE> <DEDENT> def insert_transformed_feature(self, columns_to_tensors): <NEW_LINE> <INDENT> if self.sparse_id_column not in columns_to_tensors: <NEW_LINE> <INDENT> self.sparse_id_column.insert_transformed_feature(columns_to_tensors) <NEW_LINE> <DEDENT> columns_to_tensors[self] = columns_to_tensors[self.sparse_id_column] <NEW_LINE> <DEDENT> def _to_dnn_input_layer(self, transformed_input_tensor, unused_weight_collections=None, unused_trainable=False, output_rank=2): <NEW_LINE> <INDENT> sparse_id_column = self.sparse_id_column.id_tensor(transformed_input_tensor) <NEW_LINE> sparse_id_column = layers._inner_flatten(sparse_id_column, output_rank) <NEW_LINE> weight_tensor = self.sparse_id_column.weight_tensor( transformed_input_tensor) <NEW_LINE> if weight_tensor is not None: <NEW_LINE> <INDENT> weighted_column = sparse_ops.sparse_merge(sp_ids=sparse_id_column, sp_values=weight_tensor, vocab_size=self.length) <NEW_LINE> return sparse_ops.sparse_tensor_to_dense(weighted_column) <NEW_LINE> <DEDENT> dense_id_tensor = sparse_ops.sparse_tensor_to_dense(sparse_id_column, default_value=-1) <NEW_LINE> one_hot_id_tensor = array_ops.one_hot( dense_id_tensor, depth=self.length, on_value=1.0, off_value=0.0) <NEW_LINE> return math_ops.reduce_sum( one_hot_id_tensor, reduction_indices=[output_rank - 1])
Represents a one-hot column for use in deep networks. Args: sparse_id_column: A _SparseColumn which is created by `sparse_column_with_*` function.
62598fb23d592f4c4edbaf2e
@base.vectorize <NEW_LINE> class setbit(base.Instruction): <NEW_LINE> <INDENT> __slots__ = ["code"] <NEW_LINE> code = base.opcodes['SETBIT'] <NEW_LINE> arg_format = ['srw', 'sb', 'int']
SETBIT i k n Assigns zero to sri, and then sets the n-th bit to be sb_k The assignment of zero, rather than take an existing register is to ensure we maintain SSA. This instruction is vectorizable
62598fb27c178a314d78d50a
class EarlyStop(object): <NEW_LINE> <INDENT> step_fitness_dict = {'CartPole-v0': [], 'CarRacing-v0': [], 'Breakout-ram-v0': [], 'BipedalWalker-v2': [(190, 15), (300, 30), (400, 40), (600, 50), (700, 65), (800, 80)], 'RoboschoolPong-v1': [], 'Acrobot-v1': []} <NEW_LINE> @classmethod <NEW_LINE> def check(cls, step, fitness, env_name): <NEW_LINE> <INDENT> for i in range( len(cls.step_fitness_dict[env_name]) ): <NEW_LINE> <INDENT> if ( step > cls.step_fitness_dict[env_name][i][0] ) and ( fitness < cls.step_fitness_dict[env_name][i][1] ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False
Contains a method and dictionary that enable the controller.fitness evaluation to be prematurely terminated if a candidate controllers performance is poor. This reduces computational cost. If a given controller falls short of reaching a the specified cumulative reward within the corresponding number of timesteps, the evaluation in controller.fitness is prematurely terminated in order to reduce the runtime. The interface to the controller.fitness is given by the EarlyStop.check method.
62598fb285dfad0860cbfaaa
class MeanLayer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, dim): <NEW_LINE> <INDENT> super(MeanLayer, self).__init__() <NEW_LINE> self.dim = dim <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return x.mean(dim = self.dim, keepdim=True)
The mean layer: calculates the mean of the data along given 'dim'
62598fb256b00c62f0fb2924
class Timeout(BaseException): <NEW_LINE> <INDENT> def __init__(self, seconds=None, exception=None): <NEW_LINE> <INDENT> self.seconds = seconds <NEW_LINE> self.exception = exception <NEW_LINE> self.timer = None <NEW_LINE> self.start() <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> assert not self.pending, '%r is already started; to restart it, cancel it first' % self <NEW_LINE> if self.seconds is None: <NEW_LINE> <INDENT> self.timer = None <NEW_LINE> <DEDENT> elif self.exception is None or isinstance(self.exception, bool): <NEW_LINE> <INDENT> self.timer = get_hub().schedule_call_global( self.seconds, greenlet.getcurrent().throw, self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timer = get_hub().schedule_call_global( self.seconds, greenlet.getcurrent().throw, self.exception) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def pending(self): <NEW_LINE> <INDENT> if self.timer is not None: <NEW_LINE> <INDENT> return self.timer.pending <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> if self.timer is not None: <NEW_LINE> <INDENT> self.timer.cancel() <NEW_LINE> self.timer = None <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> classname = self.__class__.__name__ <NEW_LINE> if self.pending: <NEW_LINE> <INDENT> pending = ' pending' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pending = '' <NEW_LINE> <DEDENT> if self.exception is None: <NEW_LINE> <INDENT> exception = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> exception = ' exception=%r' % self.exception <NEW_LINE> <DEDENT> return '<%s at %s seconds=%s%s%s>' % ( classname, hex(id(self)), self.seconds, exception, pending) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.seconds is None: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> if self.seconds == 1: <NEW_LINE> <INDENT> suffix = '' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> suffix = 's' <NEW_LINE> <DEDENT> if self.exception is None or self.exception is True: <NEW_LINE> <INDENT> return '%s second%s' % (self.seconds, suffix) <NEW_LINE> <DEDENT> elif self.exception is False: <NEW_LINE> <INDENT> return '%s second%s (silent)' % (self.seconds, suffix) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return '%s second%s (%s)' % (self.seconds, suffix, self.exception) <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.timer is None: <NEW_LINE> <INDENT> self.start() <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def __exit__(self, typ, value, tb): <NEW_LINE> <INDENT> self.cancel() <NEW_LINE> if value is self and self.exception is False: <NEW_LINE> <INDENT> return True
Raises *exception* in the current greenthread after *timeout* seconds. When *exception* is omitted or ``None``, the :class:`Timeout` instance itself is raised. If *seconds* is None, the timer is not scheduled, and is only useful if you're planning to raise it directly. Timeout objects are context managers, and so can be used in with statements. When used in a with statement, if *exception* is ``False``, the timeout is still raised, but the context manager suppresses it, so the code outside the with-block won't see it.
62598fb255399d3f05626588
class HTTP429(HTTPError): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return repr('429 Too Many Requests')
Http429 Error Exception The user has sent too many requests in a given amount of time. Intended for use with rate limiting schemes.
62598fb2be8e80087fbbf0d3
class WebServiceError(AcoustidError): <NEW_LINE> <INDENT> pass
The Web service request failed.
62598fb2d58c6744b42dc30f
class HTTP(BaseHTTPRequestHandler): <NEW_LINE> <INDENT> def _set_headers(self): <NEW_LINE> <INDENT> self.send_response(200) <NEW_LINE> self.send_header("Content-type", "text/html") <NEW_LINE> self.end_headers() <NEW_LINE> <DEDENT> def do_GET(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> html_file = open("./index.html", "rb") <NEW_LINE> response = html_file.read() <NEW_LINE> html_file.close() <NEW_LINE> self._set_headers() <NEW_LINE> self.wfile.write(response) <NEW_LINE> return <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def do_HEAD(self): <NEW_LINE> <INDENT> self._set_headers() <NEW_LINE> <DEDENT> def do_POST(self): <NEW_LINE> <INDENT> global jqueuer_lock <NEW_LINE> cur_thread = threading.current_thread() <NEW_LINE> logger.debug("Thread arrived => {0} ".format(cur_thread.name)) <NEW_LINE> jqueuer_lock.acquire() <NEW_LINE> cur_thread = threading.current_thread() <NEW_LINE> logger.debug("Thread entered into critical region => {0}".format(cur_thread.name)) <NEW_LINE> content_length = None <NEW_LINE> data_json = None <NEW_LINE> data = None <NEW_LINE> try: <NEW_LINE> <INDENT> content_length = int( self.headers["Content-Length"] ) <NEW_LINE> data = self.rfile.read(int(content_length)).decode("utf-8") <NEW_LINE> data_json = ast.literal_eval(data) <NEW_LINE> pass <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Error in parsing the content_length and packet data") <NEW_LINE> logger.error("Error in parsing the content_length and packet data") <NEW_LINE> <DEDENT> data_back = "" <NEW_LINE> if self.path == "/experiment/result": <NEW_LINE> <INDENT> html_file = open("./" + data_json["id"] + ".html", "a") <NEW_LINE> text = "<hr>Received from {} at {}: Params: {} ".format( str(self.client_address), str(time.time()), str(data_json) ) <NEW_LINE> html_file.write(text) <NEW_LINE> html_file.close() <NEW_LINE> data_back = "received" <NEW_LINE> <DEDENT> if self.path == "/experiment/add": <NEW_LINE> <INDENT> data_back = add_experiment(data_json) <NEW_LINE> <DEDENT> elif self.path == "/experiment/del": <NEW_LINE> <INDENT> data_back = del_experiment(data_json) <NEW_LINE> <DEDENT> elif self.path == "/experiment/metrics": <NEW_LINE> <INDENT> data_back = record_worker_metrics(data_json) <NEW_LINE> <DEDENT> elif self.path == "/experiment/inform": <NEW_LINE> <INDENT> data_back = inform_event(data_json) <NEW_LINE> <DEDENT> self._set_headers() <NEW_LINE> self.wfile.write(bytes(str(data_back), "utf-8")) <NEW_LINE> cur_thread = threading.current_thread() <NEW_LINE> logger.debug("Thread left critical region => {0} ".format(cur_thread.name)) <NEW_LINE> jqueuer_lock.release()
HTTP class Serve HTTP
62598fb2283ffb24f3cf38fb
@python_2_unicode_compatible <NEW_LINE> class EncodeProfile(models.Model): <NEW_LINE> <INDENT> command = models.CharField(_('command'), max_length=1024) <NEW_LINE> container = models.CharField(_('container'), max_length=32) <NEW_LINE> name = models.CharField(_('name'), max_length=255) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def shell_command(self, input_path, output_path): <NEW_LINE> <INDENT> args = {'input': input_path, 'output': output_path} <NEW_LINE> return shlex.split(self.command % args)
Encoding profiles associated with ``MediaBase`` subclasses. Each media instance can have multiple encoding profiles associated with it. When a media instance is encoded, it will be encoded using all associated encoding profiles.
62598fb2a79ad1619776a0d6
class AddRecord(environment.CLIRunnable): <NEW_LINE> <INDENT> action = 'add' <NEW_LINE> def execute(self, args): <NEW_LINE> <INDENT> manager = SoftLayer.DNSManager(self.client) <NEW_LINE> zone_id = helpers.resolve_id(manager.resolve_ids, args['<zone>'], name='zone') <NEW_LINE> manager.create_record( zone_id, args['<record>'], args['<type>'], args['<data>'], ttl=args['--ttl'] or 7200)
usage: sl dns add <zone> <record> <type> <data> [--ttl=TTL] [options] Add resource record Arguments: <zone> Zone name (softlayer.com) <record> Resource record (www) <type> Record type. [Options: A, AAAA, CNAME, MX, NS, PTR, SPF, SRV, TXT] <data> Record data. NOTE: only minor validation is done Options: --ttl=TTL Time to live
62598fb23539df3088ecc320
class AnalyzeQueryResponse(object): <NEW_LINE> <INDENT> openapi_types = { 'errors': 'list[AnalyzeQueryResponseErrors]' } <NEW_LINE> attribute_map = { 'errors': 'errors' } <NEW_LINE> def __init__(self, errors=None): <NEW_LINE> <INDENT> self._errors = None <NEW_LINE> self.discriminator = None <NEW_LINE> if errors is not None: <NEW_LINE> <INDENT> self.errors = errors <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def errors(self): <NEW_LINE> <INDENT> return self._errors <NEW_LINE> <DEDENT> @errors.setter <NEW_LINE> def errors(self, errors): <NEW_LINE> <INDENT> self._errors = errors <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, AnalyzeQueryResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
NOTE: This class is auto generated by OpenAPI Generator. Ref: https://openapi-generator.tech Do not edit the class manually.
62598fb2167d2b6e312b6fe1
class AgentGlobals(object): <NEW_LINE> <INDENT> _container_id = "00000000-0000-0000-0000-000000000000" <NEW_LINE> @staticmethod <NEW_LINE> def get_container_id(): <NEW_LINE> <INDENT> return AgentGlobals._container_id <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def update_container_id(container_id): <NEW_LINE> <INDENT> AgentGlobals._container_id = container_id
This class is used for setting AgentGlobals which can be used all throughout the Agent.
62598fb27d847024c075c430
class Builder(Command): <NEW_LINE> <INDENT> def __init__(self, build_engine, params): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> self.build_engine = build_engine <NEW_LINE> self.target = self.params.target <NEW_LINE> self.generator = None <NEW_LINE> super(Builder, self).__init__(self.build_engine, Command.TYPE_BUILDER) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> self.prepare() <NEW_LINE> self.before_generate() <NEW_LINE> if self.params.validate: <NEW_LINE> <INDENT> LOGGER.info( "The --validate parameter was specified, generation will not be performed, exiting" ) <NEW_LINE> return <NEW_LINE> <DEDENT> self.generate() <NEW_LINE> if self.params.dry_run: <NEW_LINE> <INDENT> LOGGER.info( "The --dry-run parameter was specified, build will not be executed, exiting" ) <NEW_LINE> return <NEW_LINE> <DEDENT> self.before_build() <NEW_LINE> self.run() <NEW_LINE> <DEDENT> def prepare(self): <NEW_LINE> <INDENT> if ( self.build_engine == "docker" or self.build_engine == "buildah" or self.build_engine == "podman" ): <NEW_LINE> <INDENT> from cekit.generator.docker import DockerGenerator as generator_impl <NEW_LINE> LOGGER.info("Generating files for {} engine".format(self.build_engine)) <NEW_LINE> <DEDENT> elif self.build_engine == "osbs": <NEW_LINE> <INDENT> from cekit.generator.osbs import OSBSGenerator as generator_impl <NEW_LINE> LOGGER.info("Generating files for OSBS engine") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise CekitError( "Unsupported generator type: '{}'".format(self.build_engine) ) <NEW_LINE> <DEDENT> self.generator = generator_impl( self.params.descriptor, self.params.target, self.params.overrides ) <NEW_LINE> if CONFIG.get("common", "redhat"): <NEW_LINE> <INDENT> self.generator.add_redhat_overrides() <NEW_LINE> <DEDENT> <DEDENT> def before_generate(self): <NEW_LINE> <INDENT> LOGGER.debug("Checking CEKit generate dependencies...") <NEW_LINE> self.dependency_handler.handle(self.generator, self.params) <NEW_LINE> self.generator.init() <NEW_LINE> <DEDENT> def generate(self): <NEW_LINE> <INDENT> self.generator.generate(self.build_engine) <NEW_LINE> <DEDENT> def before_build(self): <NEW_LINE> <INDENT> LOGGER.debug("Checking CEKit build dependencies...") <NEW_LINE> self.dependency_handler.handle(self, self.params)
Class representing generic builder - if it's instantiated it returns proper builder
62598fb2379a373c97d99083
class Dialogu_3(QDialog, Dialogu_3.Ui_Dialog): <NEW_LINE> <INDENT> def __init__(self, parent=None): <NEW_LINE> <INDENT> super(Dialogu_3, self).__init__(parent) <NEW_LINE> self.setupUi(self) <NEW_LINE> self.buttonBox_Ok_pilot.clicked.connect(self.setdata_pilot) <NEW_LINE> self.model = Model() <NEW_LINE> <DEDENT> def setdata_pilot(self): <NEW_LINE> <INDENT> query = QSqlQuery() <NEW_LINE> query.prepare("INSERT INTO Pilot(rank,first_name, last_name)" "VALUES (?,?,?)") <NEW_LINE> query.bindValue(0, self.comboBox_grade.currentText()) <NEW_LINE> query.bindValue(1, self.lineEdit_2.text()) <NEW_LINE> query.bindValue(2, self.lineEdit_3.text()) <NEW_LINE> query.exec_() <NEW_LINE> self.model.select()
Opens Dialogu box to insert new pilot in database
62598fb2a8370b77170f044b
class Slider(object): <NEW_LINE> <INDENT> def __init__(self, items=None): <NEW_LINE> <INDENT> self.items = items <NEW_LINE> self.menu = component.Component(Menu(self.items), model='slider') <NEW_LINE> self.menu.on_answer(self.select_slide) <NEW_LINE> self.content = component.Component(None) <NEW_LINE> self.select_slide(0) <NEW_LINE> <DEDENT> def select_slide(self, index): <NEW_LINE> <INDENT> self.content.becomes(self.items[index], model='slider') <NEW_LINE> self.menu().selected(index)
A simple Bar chart rendered with html5
62598fb2e5267d203ee6b977
class DietPlanNutritionInfo (NutritionInfo): <NEW_LINE> <INDENT> diet_plan = models.OneToOneField(DietPlan, related_name='nutrition_info') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Target nutrition' <NEW_LINE> verbose_name_plural = 'Target nutrition'
Nutritional information for a DietPlan.
62598fb232920d7e50bc60c3
class CustomPlugin(object): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def start(cls,urldata): <NEW_LINE> <INDENT> pass
a customed fetch plugin
62598fb25fc7496912d482b5
class TodoTxt: <NEW_LINE> <INDENT> def __init__(self, filename, encoding='utf-8', parser=None): <NEW_LINE> <INDENT> self.filename = pathlib.Path(filename) <NEW_LINE> self.encoding = encoding <NEW_LINE> self.linesep = os.linesep <NEW_LINE> self.tasks = [] <NEW_LINE> self.parser = parser or TodoTxtParser(self.encoding) <NEW_LINE> <DEDENT> def add(self, task): <NEW_LINE> <INDENT> self.tasks.append(task) <NEW_LINE> task.linenr = len(self.tasks) <NEW_LINE> task.todotxt = self <NEW_LINE> <DEDENT> def parse(self): <NEW_LINE> <INDENT> self.tasks = [] <NEW_LINE> for task in self.parser.parse(pathlib.Path(self.filename)): <NEW_LINE> <INDENT> task.todotxt = self <NEW_LINE> self.tasks.append(task) <NEW_LINE> <DEDENT> self.linesep = self.parser.linesep <NEW_LINE> return self.tasks <NEW_LINE> <DEDENT> def save(self, target=None, safe=True, linesep=None): <NEW_LINE> <INDENT> if target is None: <NEW_LINE> <INDENT> target = self.filename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> target = pathlib.Path(target) <NEW_LINE> <DEDENT> if linesep is None: <NEW_LINE> <INDENT> linesep = self.linesep <NEW_LINE> <DEDENT> if safe: <NEW_LINE> <INDENT> tmpfile = tempfile.NamedTemporaryFile('wb', buffering=0, dir=self.filename.parent, delete=False, prefix=".tmp", suffix="~") <NEW_LINE> self.write_to_stream(tmpfile, linesep) <NEW_LINE> tmpfile.close() <NEW_LINE> os.replace(tmpfile.name, target) <NEW_LINE> try: <NEW_LINE> <INDENT> os.unlink(tmpfile.name) <NEW_LINE> <DEDENT> except OSError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> with open(target, 'wb', buffering=0) as fd: <NEW_LINE> <INDENT> self.write_to_stream(fd, linesep) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def write_to_stream(self, stream, linesep=None): <NEW_LINE> <INDENT> if linesep is None: <NEW_LINE> <INDENT> linesep = self.linesep <NEW_LINE> <DEDENT> stream.write(bytes(linesep.join(self.lines) + linesep, self.encoding)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def lines(self): <NEW_LINE> <INDENT> return self.build_lines() <NEW_LINE> <DEDENT> def build_lines(self): <NEW_LINE> <INDENT> lines = [(task.linenr if task.linenr is not None else len(self.tasks), str(task)) for task in self.tasks] <NEW_LINE> lines.sort() <NEW_LINE> return [line for _, line in lines] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f'{self.__class__.__name__}(filename="{self.filename}")'
Convenience wrapper for a single todo.txt file The most common use is:: todotxt = TodoTxt("todo.txt") todotxt.parse() Use the ``tasks`` property to access the parsed entries.
62598fb201c39578d7f12de8
class ClickableCellRendererPixbuf(Gtk.CellRendererPixbuf): <NEW_LINE> <INDENT> __gsignals__ = { 'clicked': ( GObject.SignalFlags.RUN_LAST, GObject.TYPE_BOOLEAN, (GObject.TYPE_PYOBJECT,), GObject.signal_accumulator_true_handled, ) } <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Gtk.CellRendererPixbuf.__init__(self) <NEW_LINE> self.props.mode = Gtk.CellRendererMode.ACTIVATABLE <NEW_LINE> <DEDENT> def do_activate(self, event, widget, path, background_area, cell_area, flags): <NEW_LINE> <INDENT> self.emit('clicked', path) <NEW_LINE> return
Custom :class:`Gtk.CellRendererPixbuf` emitting an *clicked* signal upon activation of the pixbuf
62598fb2d486a94d0ba2c03f
class Time: <NEW_LINE> <INDENT> def flies(self): <NEW_LINE> <INDENT> return River.flow(day, night)
逝者如斯夫,不舍昼夜。 -- 《论语》
62598fb292d797404e388b9b
class StratifiedMean(_ProtoInit): <NEW_LINE> <INDENT> def __call__(self, shape, dtype="float32"): <NEW_LINE> <INDENT> self.instantiate(dtype=dtype) <NEW_LINE> for label, num in zip(self.unique_labels, self.prototype_distribution): <NEW_LINE> <INDENT> x_label = self.x_train[self.y_train == label] <NEW_LINE> x_label_mean = np.mean(x_label, axis=0) <NEW_LINE> x_label_mean = x_label_mean.reshape(1, np.prod(self.x_train.shape[1:])) <NEW_LINE> for _ in range(num): <NEW_LINE> <INDENT> self.prototypes = np.append(self.prototypes, x_label_mean, axis=0) <NEW_LINE> <DEDENT> self.prototype_labels = np.append(self.prototype_labels, [label] * num) <NEW_LINE> <DEDENT> self.validate(shape=shape) <NEW_LINE> random_offset = self.epsilon * np.random.choice( [-1, 1], size=self.prototypes.shape) <NEW_LINE> return self.prototypes + random_offset, self.prototype_labels
Initializer that samples the mean data for each class.
62598fb24f6381625f1994f7
class ConfigManagerEntryIndexView(HomeAssistantView): <NEW_LINE> <INDENT> url = '/api/config/config_entries/entry' <NEW_LINE> name = 'api:config:config_entries:entry' <NEW_LINE> @asyncio.coroutine <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> hass = request.app['hass'] <NEW_LINE> return self.json([{ 'entry_id': entry.entry_id, 'domain': entry.domain, 'title': entry.title, 'source': entry.source, 'state': entry.state, } for entry in hass.config_entries.async_entries()])
View to get available config entries.
62598fb2a17c0f6771d5c2a4
class JobEndNode(graph.State): <NEW_LINE> <INDENT> def __init__(self, name, next_name, reentrance=False): <NEW_LINE> <INDENT> super(JobEndNode, self).__init__(name, reentrance) <NEW_LINE> self._next_name = next_name <NEW_LINE> <DEDENT> def process(self, session, current_node, nodes_process): <NEW_LINE> <INDENT> return self._next_name
任务结束节点
62598fb255399d3f05626589
class ElementGetter(object): <NEW_LINE> <INDENT> def __init__(self, locator_type, query_string, base_element=None, timeout=0, value=lambda el: el, only_if=lambda el: el is not None, facet=False): <NEW_LINE> <INDENT> self.query_string = query_string <NEW_LINE> self.locator_type = locator_type <NEW_LINE> self.timeout = timeout <NEW_LINE> self.driver = None <NEW_LINE> self.iframe = None <NEW_LINE> self.base_element = base_element <NEW_LINE> self.value_mapper = value <NEW_LINE> self.root_fn = lambda: Page.get_driver() <NEW_LINE> self.only_if = only_if <NEW_LINE> log.debug("locator:%s, query_string:%s, timeout:%d" % (locator_type, query_string, timeout)) <NEW_LINE> self.is_facet = facet <NEW_LINE> self.is_debug_facet = False <NEW_LINE> <DEDENT> @property <NEW_LINE> def root(self): <NEW_LINE> <INDENT> return self.root_fn() <NEW_LINE> <DEDENT> @root.setter <NEW_LINE> def root(self, root_fn): <NEW_LINE> <INDENT> self.root_fn = root_fn <NEW_LINE> <DEDENT> def _get_element(self, method=None): <NEW_LINE> <INDENT> if self.base_element: <NEW_LINE> <INDENT> if isinstance(self.base_element, types.LambdaType): <NEW_LINE> <INDENT> _ = self.base_element() <NEW_LINE> _meth = getattr(_, method.__name__) <NEW_LINE> <DEDENT> elif isinstance(self.base_element, Element): <NEW_LINE> <INDENT> _meth = getattr(self.base_element.__get__(self, self.__class__), method.__name__) <NEW_LINE> <DEDENT> elif isinstance(self.base_element, WebElement): <NEW_LINE> <INDENT> _meth = getattr(self.base_element, "find_element") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("invalid base_element type (%s) used" % ( type(self.base_element))) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> _meth = method <NEW_LINE> <DEDENT> log.debug( "looking up locator:%s, query_string:%s, timeout:%d" % (self.locator_type, self.query_string, self.timeout)) <NEW_LINE> if self.iframe: <NEW_LINE> <INDENT> Page.local.driver.switch_to_default_content() <NEW_LINE> Page.local.driver.switch_to_frame(self.iframe) <NEW_LINE> <DEDENT> if self.timeout: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> def callback(_): <NEW_LINE> <INDENT> return _meth(self.locator_type, self.query_string) and (BaseCondition.get_current() or self.only_if)( _meth(self.locator_type, self.query_string) ) <NEW_LINE> <DEDENT> WebDriverWait(self.root, self.timeout, ignored_exceptions=[StaleElementReferenceException,]).until(callback) <NEW_LINE> <DEDENT> except TimeoutException: <NEW_LINE> <INDENT> log.debug( "unable to find element %s after waiting for %d seconds" % ( self.query_string, self.timeout) ) <NEW_LINE> raise <NEW_LINE> <DEDENT> <DEDENT> return _meth(self.locator_type, self.query_string) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def enhance(cls, element): <NEW_LINE> <INDENT> for enhancer in get_enhancers(): <NEW_LINE> <INDENT> if enhancer.matches(element): <NEW_LINE> <INDENT> return enhancer(element) <NEW_LINE> <DEDENT> <DEDENT> return element
internal class to encapsulate the logic used by :class:`holmium.core.Element` & :class:`holmium.core.Elements`
62598fb255399d3f0562658a
class Crash(namedtuple("C", "foo bar")): <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return "{0.foo}: {0.bar}".format(self)
Looking for attributes in __str__ will crash, because EmptyNodes can't be infered.
62598fb238b623060ffa910c
class SitemapError(Exception): <NEW_LINE> <INDENT> pass
Base error class for Sitemap errors.
62598fb22ae34c7f260ab152
class SinglePointSet(QChemDictSet): <NEW_LINE> <INDENT> defaults = {"basis": "6-311++G*", "SCF_algorithm": "diis", "max_scf_cycles": 200} <NEW_LINE> def __init__(self, molecule, DFT_rung=4, PCM_solvent=None): <NEW_LINE> <INDENT> self.basis_set = defaults.get("basis") <NEW_LINE> self.SCF_algorithm = defaults.get("SCF_algorithm") <NEW_LINE> self.max_scf_cycles = defaults.get("max_scf_cycles") <NEW_LINE> super(SinglePointSet, self).__init__(molecule=molecule, job_type="sp", DFT_rung=DFT_rung, PCM_solvent=PCM_solvent, basis_set=self.basis_set, SCF_algorithm=self.SCF_algorithm, max_scf_cycles=self.max_scf_cycles)
QChemDictSet for a single point calculation
62598fb2a79ad1619776a0d8
class TestOdootilCommon(TestBaseCommon): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setUpClass(cls): <NEW_LINE> <INDENT> super().setUpClass() <NEW_LINE> cls.Odootil = cls.env['odootil'] <NEW_LINE> cls.company = cls.env.ref('base.main_company') <NEW_LINE> cls.ResPartner = cls.env['res.partner'] <NEW_LINE> cls.partner_1 = cls.env.ref('base.res_partner_1') <NEW_LINE> cls.partner_2 = cls.env.ref('base.res_partner_2') <NEW_LINE> cls.partner_3 = cls.env.ref('base.res_partner_3') <NEW_LINE> cls.partner_4 = cls.env.ref('base.res_partner_4')
Common class for all odootil test cases.
62598fb2f548e778e596b614
class StructureKeeperLair(OwnedStructure): <NEW_LINE> <INDENT> def __init__(self, pos: RoomPosition, room: Room, structureType: str, _id: str, hits: int, hitsMax: int, my: bool, owner: _Owner, ticksToSpawn: int) -> None: <NEW_LINE> <INDENT> super().__init__(pos, room, structureType, _id, hits, hitsMax, my, owner) <NEW_LINE> self.ticksToSpawn = ticksToSpawn
:type ticksToSpawn: int
62598fb24e4d562566372496
class TestViewFeatureViewSet(TestBaseViewFeatureViewSet, NamespaceMixin): <NEW_LINE> <INDENT> pass
Test ViewFeaturesViewSet read operations.
62598fb2fff4ab517ebcd856
class ChoiceException(StandardOption, Exception): <NEW_LINE> <INDENT> def result(self, value): <NEW_LINE> <INDENT> return self
A choice for input_choice which result in this exception.
62598fb27047854f4633f44b
class HeatmiserV3Thermostat(ClimateDevice): <NEW_LINE> <INDENT> def __init__(self, heatmiser, device, name, serport): <NEW_LINE> <INDENT> self.heatmiser = heatmiser <NEW_LINE> self.device = device <NEW_LINE> self.serport = serport <NEW_LINE> self._current_temperature = None <NEW_LINE> self._name = name <NEW_LINE> self._id = device <NEW_LINE> self.dcb = None <NEW_LINE> self.update() <NEW_LINE> self._target_temperature = int(self.dcb.get('roomset')) <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_features(self): <NEW_LINE> <INDENT> return SUPPORT_TARGET_TEMPERATURE <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def temperature_unit(self): <NEW_LINE> <INDENT> return TEMP_CELSIUS <NEW_LINE> <DEDENT> @property <NEW_LINE> def current_temperature(self): <NEW_LINE> <INDENT> if self.dcb is not None: <NEW_LINE> <INDENT> low = self.dcb.get('floortemplow ') <NEW_LINE> high = self.dcb.get('floortemphigh') <NEW_LINE> temp = (high * 256 + low) / 10.0 <NEW_LINE> self._current_temperature = temp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._current_temperature = None <NEW_LINE> <DEDENT> return self._current_temperature <NEW_LINE> <DEDENT> @property <NEW_LINE> def target_temperature(self): <NEW_LINE> <INDENT> return self._target_temperature <NEW_LINE> <DEDENT> def set_temperature(self, **kwargs): <NEW_LINE> <INDENT> temperature = kwargs.get(ATTR_TEMPERATURE) <NEW_LINE> if temperature is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.heatmiser.hmSendAddress( self._id, 18, temperature, 1, self.serport) <NEW_LINE> self._target_temperature = temperature <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self.dcb = self.heatmiser.hmReadAddress(self._id, 'prt', self.serport)
Representation of a HeatmiserV3 thermostat.
62598fb2aad79263cf42e843
class NetworkInterfaceListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'next_link': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'value': {'key': 'value', 'type': '[NetworkInterface]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(NetworkInterfaceListResult, self).__init__(**kwargs) <NEW_LINE> self.value = kwargs.get('value', None) <NEW_LINE> self.next_link = None
Response for the ListNetworkInterface API service call. Variables are only populated by the server, and will be ignored when sending a request. :param value: A list of network interfaces in a resource group. :type value: list[~azure.mgmt.network.v2019_06_01.models.NetworkInterface] :ivar next_link: The URL to get the next set of results. :vartype next_link: str
62598fb2090684286d593715