code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class MonitorManagementClient: <NEW_LINE> <INDENT> def __init__( self, credential: "TokenCredential", subscription_id: str, base_url: str = "https://management.azure.com", **kwargs: Any ) -> None: <NEW_LINE> <INDENT> self._config = MonitorManagementClientConfiguration(credential=credential, subscription_id=subscription_id, **kwargs) <NEW_LINE> self._client = ARMPipelineClient(base_url=base_url, config=self._config, **kwargs) <NEW_LINE> client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} <NEW_LINE> self._serialize = Serializer(client_models) <NEW_LINE> self._deserialize = Deserializer(client_models) <NEW_LINE> self._serialize.client_side_validation = False <NEW_LINE> self.activity_log_alerts = ActivityLogAlertsOperations(self._client, self._config, self._serialize, self._deserialize) <NEW_LINE> <DEDENT> def _send_request( self, request, **kwargs: Any ) -> HttpResponse: <NEW_LINE> <INDENT> request_copy = deepcopy(request) <NEW_LINE> request_copy.url = self._client.format_url(request_copy.url) <NEW_LINE> return self._client.send_request(request_copy, **kwargs) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self._client.close() <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._client.__enter__() <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, *exc_details): <NEW_LINE> <INDENT> self._client.__exit__(*exc_details) | Monitor Management Client.
:ivar activity_log_alerts: ActivityLogAlertsOperations operations
:vartype activity_log_alerts:
$(python-base-namespace).v2020_10_01.operations.ActivityLogAlertsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.TokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param base_url: Service URL. Default value is 'https://management.azure.com'.
:type base_url: str | 62598fa992d797404e388b0c |
class OracleTablespace(Database): <NEW_LINE> <INDENT> ZENPACKID = 'ZenPacks.community.OracleMon' <NEW_LINE> statusmap ={1: ('grey', 3, 'Unknown'), 2: ('green', 0, 'ONLINE'), 3: ('yellow',3 ,'Available'), 4: ('orange', 4, 'OFFLINE'), 5: ('red', 5, 'INVALID'), } <NEW_LINE> def totalBytes(self): <NEW_LINE> <INDENT> return self.cacheRRDValue('sizeUsed_totalBytes', 0) <NEW_LINE> <DEDENT> def dsn(self): <NEW_LINE> <INDENT> inst = self.getDBSrvInst() <NEW_LINE> return getattr(inst, 'dsn', '') | Oracel Tablespace object | 62598fa9dd821e528d6d8e84 |
class NoMessages(IMAPClientError): <NEW_LINE> <INDENT> pass | Indicates there are no messages to operate on. | 62598fa94a966d76dd5eee31 |
class SnapshotMemorySwitch(Widget, ClickableMixin): <NEW_LINE> <INDENT> ROOT = '//div[contains(@class, "bootstrap-switch-snap_memory")]' <NEW_LINE> ON_LOCATOR = './/div[contains(@class, "bootstrap-switch-on")]' <NEW_LINE> @property <NEW_LINE> def is_set(self): <NEW_LINE> <INDENT> return bool(self.browser.elements(self.ON_LOCATOR)) <NEW_LINE> <DEDENT> def switch_on(self): <NEW_LINE> <INDENT> if not self.is_set: <NEW_LINE> <INDENT> self.click() <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def switch_off(self): <NEW_LINE> <INDENT> if self.is_set: <NEW_LINE> <INDENT> self.click() <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def fill(self, value): <NEW_LINE> <INDENT> return (self.switch_on if value else self.switch_off)() <NEW_LINE> <DEDENT> def read(self): <NEW_LINE> <INDENT> return self.is_set | This replaces the old-style checkbox when working with snapshot memory.
At first glance on the page it looks like a BootstrapSwitch but it really isn't.
Basic switch controls are implemented here along with fill and read methods,
so this should be usable as any other widget. | 62598fa9498bea3a75a57a6c |
class DBStoredFile(Base): <NEW_LINE> <INDENT> __tablename__ = "blobs" <NEW_LINE> id = Column(Integer(), primary_key=True) <NEW_LINE> file_id = Column(String(36), index=True) <NEW_LINE> filename = Column(Unicode(100)) <NEW_LINE> content_type = Column(String(100)) <NEW_LINE> content_length = Column(Integer()) <NEW_LINE> last_modified = Column(DateTime()) <NEW_LINE> data = deferred(Column('data', LargeBinary())) <NEW_LINE> _cursor = 0 <NEW_LINE> _data = _marker <NEW_LINE> public_url = None <NEW_LINE> def __init__(self, file_id, filename=None, content_type=None, last_modified=None, content_length=None, **kwds): <NEW_LINE> <INDENT> self.file_id = file_id <NEW_LINE> self.filename = filename <NEW_LINE> self.content_type = content_type <NEW_LINE> self.last_modified = last_modified or datetime.now() <NEW_LINE> self.content_length = content_length <NEW_LINE> for k, v in kwds.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> def read(self, n=-1): <NEW_LINE> <INDENT> if self._data is _marker: <NEW_LINE> <INDENT> file_id = DBSession.merge(self).file_id <NEW_LINE> self._data = DBSession.query(DBStoredFile.data). filter_by(file_id=file_id).scalar() <NEW_LINE> <DEDENT> if n == -1: <NEW_LINE> <INDENT> result = self._data[self._cursor:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self._data[self._cursor:self._cursor + n] <NEW_LINE> <DEDENT> self._cursor += len(result) <NEW_LINE> return result <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def close(*args, **kwargs): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def closed(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def writable(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def seekable(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def seek(self, offset, whence=0): <NEW_LINE> <INDENT> if whence == 0: <NEW_LINE> <INDENT> self._cursor = offset <NEW_LINE> <DEDENT> elif whence in (1, 2): <NEW_LINE> <INDENT> self._cursor = self._cursor + offset <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('whence must be 0, 1 or 2') <NEW_LINE> <DEDENT> <DEDENT> def tell(self): <NEW_LINE> <INDENT> return self._cursor <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self.filename <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __declare_last__(cls): <NEW_LINE> <INDENT> event.listen(DBStoredFile.data, 'set', handle_change_data) | :class:`depot.io.interfaces.StoredFile` implementation that stores
file data in SQL database.
Can be used together with :class:`kotti.filedepot.DBFileStorage` to
implement blobs storage in the database. | 62598fa9a79ad16197769fb3 |
class MetaGaussianNBayesClassifier(BaseClassifier): <NEW_LINE> <INDENT> def __init__(self, priors=None): <NEW_LINE> <INDENT> self.name = 'gaussian_nb' <NEW_LINE> self.max_n_iter = 0 <NEW_LINE> self.init_params = {} <NEW_LINE> self.init_params['priors'] = priors <NEW_LINE> self.estimator = self._get_clf() <NEW_LINE> self.cv_params = self._set_cv_params() <NEW_LINE> self.cv_params_to_tune = [] <NEW_LINE> <DEDENT> def _get_clf(self): <NEW_LINE> <INDENT> return GaussianNB(**self.init_params) <NEW_LINE> <DEDENT> def get_info(self): <NEW_LINE> <INDENT> return {'does_classification': True, 'does_multiclass': True, 'does_regression': False, 'predict_probas': hasattr(self.estimator, 'predict_proba')} <NEW_LINE> <DEDENT> def adjust_params(self, params): <NEW_LINE> <INDENT> return super().adjust_params(params) <NEW_LINE> <DEDENT> def set_tune_params(self, params, num_params=1, mode='random', keys=list()): <NEW_LINE> <INDENT> return super().set_tune_params(params, num_params, mode, keys) <NEW_LINE> <DEDENT> def _set_cv_params(self): <NEW_LINE> <INDENT> return [{}] | Docstring:
Gaussian naive bayes classifier | 62598fa98e71fb1e983bba01 |
class SignInViewSetTest(APITestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.password = 'testpassword' <NEW_LINE> self.user = User.objects.create( email='example@mail.com', login='testlogin', password=self.password ) <NEW_LINE> self.user.set_password(self.password) <NEW_LINE> self.user.save() <NEW_LINE> <DEDENT> def test_success_user_authentication(self): <NEW_LINE> <INDENT> response = self.client.post('/api/v1/sign_in/', { 'login': 'testlogin', 'password': self.password }, format='json') <NEW_LINE> self.assertEqual('token' in response.data, True) <NEW_LINE> <DEDENT> def test_failed_user_authentication(self): <NEW_LINE> <INDENT> response = self.client.post('/api/v1/sign_in/', { 'login': 'example@mail.com', 'password': '' }, format='json') <NEW_LINE> self.assertEqual('errors' in response.data, True) | Tests for sign in viewset class. | 62598fa95fdd1c0f98e5dee7 |
class CompositeCV(object): <NEW_LINE> <INDENT> def __init__(self, trainx, trainy, testx, cv=5): <NEW_LINE> <INDENT> super(CompositeCV, self).__init__() <NEW_LINE> assert len(trainx) == len(trainy) <NEW_LINE> assert len(trainx[0]) == len(testx[0]) <NEW_LINE> self.__trainx = trainx <NEW_LINE> self.__trainy = trainy <NEW_LINE> self.__testx = testx <NEW_LINE> self.__preds = np.array([0] * len(testx)) <NEW_LINE> self.clfs = list() <NEW_LINE> self.preds = np.array([0] * len(testx)) <NEW_LINE> self.__cv = list() <NEW_LINE> lendat = len(self.__trainx) <NEW_LINE> for i in range(cv): <NEW_LINE> <INDENT> trnx = self.__trainx[lendat // cv * 0: lendat // cv * i] <NEW_LINE> trnx = np.concatenate((trnx, self.__trainx[lendat // cv * (i+1):])) <NEW_LINE> trny = self.__trainy[lendat // cv * 0: lendat // cv * i] <NEW_LINE> trny = np.concatenate((trny, self.__trainy[lendat // cv * (i+1):])) <NEW_LINE> valx = self.__trainx[lendat // cv * i: lendat // cv * (i+1)] <NEW_LINE> self.__cv.append((trnx, trny, valx)) <NEW_LINE> <DEDENT> self.__cvpreds = np.array([0] * (lendat // cv * cv)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def diff(x, y): <NEW_LINE> <INDENT> return sum([x[i] != y[i] for i in range(len(x))]) <NEW_LINE> <DEDENT> def append(self, clf): <NEW_LINE> <INDENT> cvpreds = list() <NEW_LINE> for i in range(len(self.__cv)): <NEW_LINE> <INDENT> clf.fit(self.__cv[i][0], self.__cv[i][1]) <NEW_LINE> cvpreds.extend(clf.predict(self.__cv[i][2])) <NEW_LINE> <DEDENT> cvpreds = np.array(cvpreds).astype(int) <NEW_LINE> old_preds = self.__cvpreds // (len(self.clfs) // 2 + 1) <NEW_LINE> new_preds = (self.__cvpreds + cvpreds) // ((len(self.clfs) + 1) // 2 + 1) <NEW_LINE> if self.diff(new_preds, self.__trainy) < self.diff(old_preds, self.__trainy): <NEW_LINE> <INDENT> print("\tNumber classifiers: " + str(len(self.clfs)) + "\t Last type: " + type(clf).__name__ + "\tCV: " + str(1 - self.diff(new_preds, self.__trainy) / len(self.__trainy))) <NEW_LINE> self.__cvpreds += cvpreds <NEW_LINE> clf.fit(self.__trainx, self.__trainy) <NEW_LINE> self.clfs.append(clf) <NEW_LINE> self.__preds += clf.predict(self.__testx) <NEW_LINE> self.preds = self.__preds // (len(self.clfs) // 2 + 1) <NEW_LINE> return self.diff(new_preds, self.__trainy) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("\tRejected\t\tNumber classifiers: " + str(len(self.clfs)) + "\t Last type: " + type(clf).__name__) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> <DEDENT> def predict(self): <NEW_LINE> <INDENT> assert min(self.preds) >= 0 and max(self.preds) <= 1 <NEW_LINE> return self.preds <NEW_LINE> <DEDENT> def score(self, datax, datay): <NEW_LINE> <INDENT> return np.mean([i.score(datax, datay) for i in self.clfs]) | Composite predictor
Only adds if improves cv score | 62598fa9f9cc0f698b1c5270 |
class TestStringTokenizerCase(TestCase): <NEW_LINE> <INDENT> def test_tokenizer_test(self): <NEW_LINE> <INDENT> text = "This is a test -This -is -NOT -a -test" <NEW_LINE> includes, excludes = get_text_tokenizer(text) <NEW_LINE> self.assertEquals('-'.join(includes), "This-is-a-test") <NEW_LINE> self.assertEquals('-'.join(excludes), "This-is-NOT-a-test") | Tokenizer Test | 62598fa9236d856c2adc93e4 |
class Campaign(Entity): <NEW_LINE> <INDENT> collection = 'campaigns' <NEW_LINE> resource = 'campaign' <NEW_LINE> _relations = { 'advertiser', 'ad_server', 'currency', 'merit_pixel', 'time_zone', } <NEW_LINE> _conv = Entity._enum({'every', 'one', 'variable'}, 'variable') <NEW_LINE> _freq_ints = Entity._enum({'hour', 'day', 'week', 'month', 'not-applicable'}, 'not-applicable') <NEW_LINE> _freq_types = Entity._enum({'even', 'asap', 'no-limit'}, 'no-limit') <NEW_LINE> _goal_cats = Entity._enum({'audience', 'engagement', 'response'}, None) <NEW_LINE> _goal_types = Entity._enum({'spend', 'reach', 'cpc', 'cpe', 'cpa', 'roi'}, None) <NEW_LINE> _serv_types = Entity._enum({'SELF', 'MANAGED'}, 'SELF') <NEW_LINE> _pull = { 'ad_server_fee': float, 'ad_server_id': int, 'ad_server_password': None, 'ad_server_username': None, 'advertiser_id': int, 'agency_fee_pct': float, 'conversion_type': None, 'conversion_variable_minutes': int, 'created_on': Entity._strpt, 'currency_code': None, 'dcs_data_is_campaign_level': Entity._int_to_bool, 'end_date': Entity._strpt, 'frequency_amount': int, 'frequency_interval': None, 'frequency_type': None, 'goal_alert': float, 'goal_category': None, 'goal_type': None, 'goal_value': float, 'has_custom_attribution': Entity._int_to_bool, 'id': int, 'io_name': None, 'io_reference_num': None, 'initial_start_date': Entity._strpt, 'margin_pct': float, 'merit_pixel_id': int, 'name': None, 'pacing_alert': float, 'pc_window_minutes': int, 'pv_pct': float, 'pv_window_minutes': int, 'service_type': None, 'spend_cap_amount': float, 'spend_cap_automatic': Entity._int_to_bool, 'spend_cap_enabled': Entity._int_to_bool, 'start_date': Entity._strpt, 'status': Entity._int_to_bool, 'total_budget': float, 'updated_on': Entity._strpt, 'use_default_ad_server': Entity._int_to_bool, 'use_mm_freq': Entity._int_to_bool, 'version': int, 'zone_name': None, } <NEW_LINE> _push = _pull.copy() <NEW_LINE> _push.update({ 'conversion_type': _conv, 'dcs_data_is_campaign_level': int, 'end_date': Entity._strft, 'frequency_interval': _freq_ints, 'frequency_type': _freq_types, 'goal_category': _goal_cats, 'goal_type': _goal_types, 'has_custom_attribution': int, 'initial_start_date': Entity._strft, 'service_type': _serv_types, 'spend_cap_automatic': int, 'spend_cap_enabled': int, 'start_date': Entity._strft, 'status': int, 'use_default_ad_server': int, 'use_mm_freq': int, }) <NEW_LINE> def __init__(self, session, properties=None, **kwargs): <NEW_LINE> <INDENT> super(Campaign, self).__init__(session, properties, **kwargs) | Campaign entity.
When creating a new campaign, "zone_name" must be set to the name, such as
America/New_York, rather than the code. A list of time zone names can be
found on the developer portal. | 62598fa938b623060ffa8fe7 |
class User(object): <NEW_LINE> <INDENT> def __init__(self, id, known_value=None, neg_weight=1.0, pos_weight=1.5): <NEW_LINE> <INDENT> self.id = id <NEW_LINE> self.initial_pos_weight = pos_weight <NEW_LINE> self.initial_neg_weight = neg_weight <NEW_LINE> self.known_value = known_value <NEW_LINE> self.inferred_value = known_value <NEW_LINE> self.posts = [] <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return repr(dict( id=self.id, known_value=self.known_value, inferred_value=self.inferred_value, posts=[post.id for _, post in self.posts] )) <NEW_LINE> <DEDENT> def add_post(self, post, pol): <NEW_LINE> <INDENT> self.posts.append((pol, post)) <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> self.inferred_value = None <NEW_LINE> <DEDENT> def compute_user_value(self): <NEW_LINE> <INDENT> pos_w = float(self.initial_pos_weight) <NEW_LINE> neg_w = float(self.initial_neg_weight) <NEW_LINE> for pol, post in self.posts: <NEW_LINE> <INDENT> if post.inferred_value is not None: <NEW_LINE> <INDENT> delta = pol * post.inferred_value <NEW_LINE> if delta > 0: <NEW_LINE> <INDENT> pos_w += delta <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> neg_w -= delta <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.inferred_value = (pos_w - neg_w) / (pos_w + neg_w) | Class representing a user. See VotingGraph for general comments. | 62598fa94e4d562566372374 |
class AddDealerForm(FlaskForm): <NEW_LINE> <INDENT> name = StringField('Название', validators=[DataRequired()]) <NEW_LINE> address = StringField('Адрес фермы', validators=[DataRequired()]) <NEW_LINE> submit = SubmitField('Добавить породу') | Добавление дилерского центра | 62598fa9e1aae11d1e7ce7cb |
class BlogHandler(webapp2.RequestHandler): <NEW_LINE> <INDENT> def get_posts(self, limit, offset): <NEW_LINE> <INDENT> query = Post.all().order('-created') <NEW_LINE> return query.fetch(limit=limit, offset=offset) <NEW_LINE> <DEDENT> def get_posts_by_user(self, user, limit, offset): <NEW_LINE> <INDENT> query = Post.all().filter('author =', user).order('-created') <NEW_LINE> return query.fetch(limit=limit, offset=offset) <NEW_LINE> return None <NEW_LINE> <DEDENT> def get_user_by_name(self, username): <NEW_LINE> <INDENT> user = db.GqlQuery("SELECT * FROM User WHERE username = '%s'" % username) <NEW_LINE> if user: <NEW_LINE> <INDENT> return user.get() <NEW_LINE> <DEDENT> <DEDENT> def login_user(self, user): <NEW_LINE> <INDENT> user_id = user.key().id() <NEW_LINE> self.set_secure_cookie('user_id', str(user_id)) <NEW_LINE> <DEDENT> def logout_user(self): <NEW_LINE> <INDENT> self.set_secure_cookie('user_id', '') <NEW_LINE> <DEDENT> def read_secure_cookie(self, name): <NEW_LINE> <INDENT> cookie_val = self.request.cookies.get(name) <NEW_LINE> if cookie_val: <NEW_LINE> <INDENT> return hashutils.check_secure_val(cookie_val) <NEW_LINE> <DEDENT> <DEDENT> def set_secure_cookie(self, name, val): <NEW_LINE> <INDENT> cookie_val = hashutils.make_secure_val(val) <NEW_LINE> self.response.headers.add_header('Set-Cookie', '%s=%s; Path=/' % (name, cookie_val)) <NEW_LINE> <DEDENT> def initialize(self, *a, **kw): <NEW_LINE> <INDENT> webapp2.RequestHandler.initialize(self, *a, **kw) <NEW_LINE> uid = self.read_secure_cookie('user_id') <NEW_LINE> self.user = uid and User.get_by_id(int(uid)) <NEW_LINE> if not self.user and self.request.path in auth_paths: <NEW_LINE> <INDENT> self.redirect('/login') | Utility class for gathering various useful methods that are used by most request handlers | 62598fa9435de62698e9bd45 |
class Plan(object): <NEW_LINE> <INDENT> def __init__(self, kwds={}): <NEW_LINE> <INDENT> self.do_py = kwds.pop('py', False) <NEW_LINE> self.do_json = kwds.pop('json', True) or self.do_py <NEW_LINE> self.do_icy = kwds.pop('icy', True) or self.do_json <NEW_LINE> self.enabled = [self.do_icy, self.do_json, self.do_py] <NEW_LINE> do_zip = kwds.get('zip', True) <NEW_LINE> self.stages = list(self.get_stages(do_zip)) <NEW_LINE> assert len(self.enabled) == len(self.stages) - 1 <NEW_LINE> self.n_steps = sum(1 for _ in itertools.takewhile(lambda a:a, self.enabled)) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_stages(zip_json): <NEW_LINE> <INDENT> yield Stage(['.curry'] , _curry2icurry.curry2icurry) <NEW_LINE> yield Stage(['.icy'] , _icurry2json.icurry2json) <NEW_LINE> if zip_json: <NEW_LINE> <INDENT> yield Stage(['.json.z'], _json2py.json2py) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> yield Stage(['.json'] , _json2py.json2py) <NEW_LINE> <DEDENT> yield Stage(['.py'] , None) <NEW_LINE> <DEDENT> @property <NEW_LINE> def suffixes(self): <NEW_LINE> <INDENT> def seq(): <NEW_LINE> <INDENT> for en, stage in zip([True] + self.enabled, self.stages): <NEW_LINE> <INDENT> if en: <NEW_LINE> <INDENT> for suffix in stage.suffixes: <NEW_LINE> <INDENT> yield suffix <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return list(seq()) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.n_steps <NEW_LINE> <DEDENT> def position(self, filename): <NEW_LINE> <INDENT> for i,(suffixes,_) in enumerate(self.stages): <NEW_LINE> <INDENT> if any(filename.endswith(suffix) for suffix in suffixes): <NEW_LINE> <INDENT> return i <NEW_LINE> <DEDENT> <DEDENT> assert False | Represents a compilation plan.
Describes the sequence of steps that must be performed and the functions that
implement them. | 62598fa9bd1bec0571e1506b |
class Blacklist_Category(Resource): <NEW_LINE> <INDENT> def __init__(self, blacklist_categorys): <NEW_LINE> <INDENT> super(Blacklist_Category, self).__init__(blacklist_categorys) <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:security:ip-intelligence:blacklist-category:blacklist-categorystate' <NEW_LINE> self._meta_data['required_creation_parameters'].update(('partition',)) <NEW_LINE> self._meta_data['required_load_parameters'].update(('partition',)) | BIG-IP® AFM® IP-Intelligence Blacklist Category resource | 62598fa9851cf427c66b8218 |
class UserLogin(generics.ListCreateAPIView): <NEW_LINE> <INDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> email = request.data['email'] <NEW_LINE> password = request.data['password'] <NEW_LINE> user = authenticate(username=email, password=password) <NEW_LINE> if not user: <NEW_LINE> <INDENT> return Response({ "status": "failed", "message": "Please enter a correct username and password." }, status=HTTP_401_UNAUTHORIZED) <NEW_LINE> <DEDENT> if user.is_active: <NEW_LINE> <INDENT> serialized = UserSerializer(user) <NEW_LINE> return Response({ "status": "success", "data":serialized.data}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Response({ 'status': "failed", 'message': "Your account is not active, Please contact to administrator." }, status=HTTP_401_UNAUTHORIZED) | Users Login api | 62598fa9b7558d589546357f |
class UpBow(Bowing): <NEW_LINE> <INDENT> pass | >>> a = articulations.UpBow() | 62598fa94f88993c371f04b2 |
class COLINSolver(OptSolver): <NEW_LINE> <INDENT> def __init__(self, **kwds): <NEW_LINE> <INDENT> OptSolver.__init__(self,**kwds) <NEW_LINE> self._valid_problem_formats=[ProblemFormat.colin_optproblem] <NEW_LINE> self._valid_result_formats = {} <NEW_LINE> self._valid_result_formats[ProblemFormat.colin_optproblem] = [ResultsFormat.osrl,ResultsFormat.results] | An optimizer that can optimize the pyomo.opt.colin.problem.OptProblem object | 62598fa94428ac0f6e658473 |
class Empty_Popup(ModalView): <NEW_LINE> <INDENT> popup_object = ObjectProperty(None) <NEW_LINE> def __init__(self, popup_object): <NEW_LINE> <INDENT> super(Empty_Popup, self).__init__() <NEW_LINE> self.popup_object = popup_object <NEW_LINE> self.add_widget(self.popup_object) <NEW_LINE> <DEDENT> def show(self): <NEW_LINE> <INDENT> self.open() <NEW_LINE> <DEDENT> def hide(self): <NEW_LINE> <INDENT> self.dismiss() | docstring for Empty_Popup | 62598fa9be8e80087fbbefb3 |
class DocTestMixedParser(doctest.DocTestParser): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.pyparser = doctest.DocTestParser() <NEW_LINE> self.jsparser = DocTestJSParser() <NEW_LINE> self.javascript_remote_session = JavascriptSession(JS_SESSION_ADDRESS) <NEW_LINE> <DEDENT> def get_doctest(self, string, globs, name, filename, lineno): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.javascript_remote_session.connect() <NEW_LINE> self.skip_javascript_tests = False <NEW_LINE> <DEDENT> except JavascriptSessionError as e: <NEW_LINE> <INDENT> self.skip_javascript_tests = True <NEW_LINE> ex = e <NEW_LINE> <DEDENT> globs = globs.copy() <NEW_LINE> globs["_js_test"] = self.javascript_remote_session.test <NEW_LINE> _doctest = doctest.DocTest(self.get_examples(string, name), globs, name, filename, lineno, string) <NEW_LINE> if self.skip_javascript_tests and self.has_javascript_tests: <NEW_LINE> <INDENT> print("[Warning] The javascript tests will BE SKIPPED! because the connection failed:\n %s" % str(ex)) <NEW_LINE> <DEDENT> return _doctest <NEW_LINE> <DEDENT> def get_examples(self, string, name): <NEW_LINE> <INDENT> self.type_of_source = {} <NEW_LINE> all_examples = [] <NEW_LINE> self.has_javascript_tests = False <NEW_LINE> for type, parser in [("py", self.pyparser), ("js", self.jsparser)]: <NEW_LINE> <INDENT> examples = parser.get_examples(string, name) <NEW_LINE> if self.skip_javascript_tests and type == "js": <NEW_LINE> <INDENT> for example in examples: <NEW_LINE> <INDENT> example.options[doctest.OPTIONFLAGS_BY_NAME["SKIP"]] = True <NEW_LINE> <DEDENT> <DEDENT> if type == "js": <NEW_LINE> <INDENT> self.has_javascript_tests = len(examples) > 0 <NEW_LINE> <DEDENT> for example in examples: <NEW_LINE> <INDENT> link = (example.lineno, type) <NEW_LINE> try: <NEW_LINE> <INDENT> self.type_of_source[example.source].append(link) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.type_of_source[example.source] = [link] <NEW_LINE> <DEDENT> <DEDENT> all_examples.extend(examples) <NEW_LINE> <DEDENT> all_examples.sort(key=lambda this: this.lineno) <NEW_LINE> for source in self.type_of_source.keys(): <NEW_LINE> <INDENT> self.type_of_source[source].sort(key=lambda this: this[0], reverse=True) <NEW_LINE> <DEDENT> return all_examples <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> self.javascript_remote_session.shutdown() | This object will parse python and javascript code and will keep
track of which type is each source code.
Then, all the tests are mixed and sorted so their order match the
lexical order in which the tests were found during the parsing stage. | 62598fa97d43ff24874273aa |
class multipleChoice(Question): <NEW_LINE> <INDENT> def __init__(self, question_ID, module, topic, questionText, correctAnswer, marks, incorrectAnswerMarks, options): <NEW_LINE> <INDENT> super().__init__(question_ID, module, topic, questionText, correctAnswer, marks, incorrectAnswerMarks) <NEW_LINE> self._options = options <NEW_LINE> <DEDENT> def getOptions(self): <NEW_LINE> <INDENT> return self._options <NEW_LINE> <DEDENT> def isQuestionMultipleChoice(self): <NEW_LINE> <INDENT> return True | Class to represent a Multiple Choice Question | 62598fa999cbb53fe6830e26 |
class DoubleSidedSet(OrderedSet): <NEW_LINE> <INDENT> def add(self, key): <NEW_LINE> <INDENT> if key not in self._map: <NEW_LINE> <INDENT> self._map[key] = link = Link() <NEW_LINE> root = self._root <NEW_LINE> last = root.next <NEW_LINE> link.next, link.prev, link.key = last, root, key <NEW_LINE> last.prev = root.next = proxy(link) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False | Set that remembers order, pops from the furthermost right, and appends to the most left | 62598fa9435de62698e9bd46 |
class StartMainFuelCurve(Curve): <NEW_LINE> <INDENT> def __init__(self, mainFuelType="lignite", StartupModel=None, *args, **kw_args): <NEW_LINE> <INDENT> self.mainFuelType = mainFuelType <NEW_LINE> self._StartupModel = None <NEW_LINE> self.StartupModel = StartupModel <NEW_LINE> super(StartMainFuelCurve, self).__init__(*args, **kw_args) <NEW_LINE> <DEDENT> _attrs = ["mainFuelType"] <NEW_LINE> _attr_types = {"mainFuelType": str} <NEW_LINE> _defaults = {"mainFuelType": "lignite"} <NEW_LINE> _enums = {"mainFuelType": "FuelType"} <NEW_LINE> _refs = ["StartupModel"] <NEW_LINE> _many_refs = [] <NEW_LINE> def getStartupModel(self): <NEW_LINE> <INDENT> return self._StartupModel <NEW_LINE> <DEDENT> def setStartupModel(self, value): <NEW_LINE> <INDENT> if self._StartupModel is not None: <NEW_LINE> <INDENT> self._StartupModel._StartMainFuelCurve = None <NEW_LINE> <DEDENT> self._StartupModel = value <NEW_LINE> if self._StartupModel is not None: <NEW_LINE> <INDENT> self._StartupModel.StartMainFuelCurve = None <NEW_LINE> self._StartupModel._StartMainFuelCurve = self <NEW_LINE> <DEDENT> <DEDENT> StartupModel = property(getStartupModel, setStartupModel) | The quantity of main fuel (Y-axis) used to restart and repay the auxiliary power consumed versus the number of hours (X-axis) the unit was off line
| 62598fa96e29344779b005ac |
class Security(object): <NEW_LINE> <INDENT> def __init__(self, ticker: str, intraday_prices: pd.DataFrame=None, daily_prices: pd.DataFrame=None, output: str='compact'): <NEW_LINE> <INDENT> self.ticker = ticker <NEW_LINE> self.intraday_prices = intraday_prices <NEW_LINE> self.daily_prices = daily_prices <NEW_LINE> <DEDENT> def get_intraday(self): <NEW_LINE> <INDENT> return self.intraday_prices <NEW_LINE> <DEDENT> def get_daily(self): <NEW_LINE> <INDENT> return self.daily_prices | security with historical intraday and daily price data | 62598fa9cc0a2c111447af61 |
class TranslatedAttribute(BaseDescriptor): <NEW_LINE> <INDENT> def __init__(self, opts, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> super(TranslatedAttribute, self).__init__(opts) <NEW_LINE> <DEDENT> def __get__(self, instance, instance_type=None): <NEW_LINE> <INDENT> if not instance: <NEW_LINE> <INDENT> if django.VERSION >= (1, 7) and not registry.apps.ready: <NEW_LINE> <INDENT> raise AttributeError('Attribute not available until registry is ready.') <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.opts.translations_model._meta.get_field_by_name( self.name)[0].default <NEW_LINE> <DEDENT> except FieldDoesNotExist as e: <NEW_LINE> <INDENT> raise AttributeError(*e.args) <NEW_LINE> <DEDENT> <DEDENT> return getattr(self.translation(instance), self.name) <NEW_LINE> <DEDENT> def __set__(self, instance, value): <NEW_LINE> <INDENT> setattr(self.translation(instance), self.name, value) <NEW_LINE> <DEDENT> def __delete__(self, instance): <NEW_LINE> <INDENT> delattr(self.translation(instance), self.name) | Proxies attributes from the shared instance to the translated instance. | 62598fa9167d2b6e312b6ec1 |
class FilesRequest(APIRequest): <NEW_LINE> <INDENT> url = "https://cloud-api.yandex.net/v1/disk/resources/files" <NEW_LINE> method = "GET" <NEW_LINE> def __init__(self, session, offset=0, limit=20, media_type=None, preview_size=None, preview_crop=None, sort=None, fields=None, **kwargs): <NEW_LINE> <INDENT> APIRequest.__init__(self, session, {"offset": offset, "limit": limit, "media_type": media_type, "sort": sort, "preview_size": preview_size, "preview_crop": preview_crop, "fields": fields}, **kwargs) <NEW_LINE> <DEDENT> def process_args(self, offset, limit, media_type, sort, preview_size, preview_crop, fields): <NEW_LINE> <INDENT> self.params["offset"] = offset <NEW_LINE> self.params["limit"] = limit <NEW_LINE> if media_type is not None: <NEW_LINE> <INDENT> if not isinstance(media_type, collections.Iterable): <NEW_LINE> <INDENT> raise TypeError("media_type should be a string or an iterable") <NEW_LINE> <DEDENT> if isinstance(media_type, str): <NEW_LINE> <INDENT> self.params["media_type"] = media_type <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.params["media_type"] = ",".join(media_type) <NEW_LINE> <DEDENT> <DEDENT> if preview_size is not None: <NEW_LINE> <INDENT> self.params["preview_size"] = preview_size <NEW_LINE> <DEDENT> if preview_crop is not None: <NEW_LINE> <INDENT> self.params["preview_crop"] = preview_crop <NEW_LINE> <DEDENT> if sort is not None: <NEW_LINE> <INDENT> self.params["sort"] = sort <NEW_LINE> <DEDENT> if fields is not None: <NEW_LINE> <INDENT> self.params["fields"] = ",".join(fields) <NEW_LINE> <DEDENT> <DEDENT> def process_json(self, js): <NEW_LINE> <INDENT> return FilesResourceListObject(js) | A request to get a flat list of all files (that doesn't include directories).
:param session: an instance of :any:`requests.Session` with prepared headers
:param offset: offset from the beginning of the list
:param limit: number of list elements to be included
:param media_type: type of files to include in the list
:param sort: `str`, field to be used as a key to sort children resources
:param preview_size: size of the file preview
:param preview_crop: `bool`, cut the preview to the size specified in the `preview_size`
:param fields: list of keys to be included in the response
:returns: :any:`FilesResourceListObject` | 62598fa966673b3332c3031b |
class AuthSSH(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def login(username, password): <NEW_LINE> <INDENT> client = SSHClient() <NEW_LINE> client.load_system_host_keys() <NEW_LINE> client.set_missing_host_key_policy(WarningPolicy()) <NEW_LINE> try: <NEW_LINE> <INDENT> client.connect('localhost', username=username, password=password, port=22) <NEW_LINE> rs = RecordSession(user=username) <NEW_LINE> rs.token = os.urandom(64).encode('hex') <NEW_LINE> rs.date = datetime.datetime.now() <NEW_LINE> rs.token_end = datetime.datetime.now() + datetime.timedelta(0, 900) <NEW_LINE> rs.save() <NEW_LINE> return dict(token=rs.token, user=username) <NEW_LINE> <DEDENT> except AuthenticationException: <NEW_LINE> <INDENT> return dict(login=0) | Class for sign in the user with system user | 62598fa94a966d76dd5eee33 |
class MockedBackend(Backend): <NEW_LINE> <INDENT> version = '0.2.0' <NEW_LINE> CATEGORY = "mock_item" <NEW_LINE> ITEMS = 5 <NEW_LINE> def __init__(self, origin, tag=None, archive=None): <NEW_LINE> <INDENT> super().__init__(origin, tag=tag, archive=archive) <NEW_LINE> self._fetch_from_archive = False <NEW_LINE> <DEDENT> def fetch_items(self, **kwargs): <NEW_LINE> <INDENT> for x in range(MockedBackend.ITEMS): <NEW_LINE> <INDENT> if self._fetch_from_archive: <NEW_LINE> <INDENT> item = self.archive.retrieve(str(x), None, None) <NEW_LINE> yield item <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item = {'item': x} <NEW_LINE> if self.archive: <NEW_LINE> <INDENT> self.archive.store(str(x), None, None, item) <NEW_LINE> <DEDENT> yield item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> return super().fetch(MockedBackend.CATEGORY) <NEW_LINE> <DEDENT> def _init_client(self, from_archive=False): <NEW_LINE> <INDENT> self._fetch_from_archive = from_archive <NEW_LINE> return None <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def metadata_id(item): <NEW_LINE> <INDENT> return str(item['item']) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def metadata_updated_on(item): <NEW_LINE> <INDENT> return '2016-01-01' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def metadata_category(item): <NEW_LINE> <INDENT> return MockedBackend.CATEGORY | Mocked backend for testing | 62598fa96aa9bd52df0d4e19 |
class RoiPoolingConv(Layer): <NEW_LINE> <INDENT> def __init__(self, pool_size, num_rois, **kwargs): <NEW_LINE> <INDENT> self.dim_ordering = K.common.image_dim_ordering() <NEW_LINE> self.pool_size = pool_size <NEW_LINE> self.num_rois = num_rois <NEW_LINE> super(RoiPoolingConv, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> self.nb_channels = input_shape[0][3] <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return None, self.num_rois, self.pool_size, self.pool_size, self.nb_channels <NEW_LINE> <DEDENT> def call(self, x, mask=None): <NEW_LINE> <INDENT> assert(len(x) == 2) <NEW_LINE> img = x[0] <NEW_LINE> rois = x[1] <NEW_LINE> input_shape = K.shape(img) <NEW_LINE> outputs = [] <NEW_LINE> for roi_idx in range(self.num_rois): <NEW_LINE> <INDENT> x = rois[0, roi_idx, 0] <NEW_LINE> y = rois[0, roi_idx, 1] <NEW_LINE> w = rois[0, roi_idx, 2] <NEW_LINE> h = rois[0, roi_idx, 3] <NEW_LINE> x = K.cast(x, 'int32') <NEW_LINE> y = K.cast(y, 'int32') <NEW_LINE> w = K.cast(w, 'int32') <NEW_LINE> h = K.cast(h, 'int32') <NEW_LINE> rs = tf.image.resize_images(img[:, y:y+h, x:x+w, :], (self.pool_size, self.pool_size)) <NEW_LINE> outputs.append(rs) <NEW_LINE> <DEDENT> final_output = K.concatenate(outputs, axis=0) <NEW_LINE> final_output = K.reshape(final_output, (1, self.num_rois, self.pool_size, self.pool_size, self.nb_channels)) <NEW_LINE> final_output = K.permute_dimensions(final_output, (0, 1, 2, 3, 4)) <NEW_LINE> return final_output <NEW_LINE> <DEDENT> def get_config(self): <NEW_LINE> <INDENT> config = {'pool_size': self.pool_size, 'num_rois': self.num_rois} <NEW_LINE> base_config = super(RoiPoolingConv, self).get_config() <NEW_LINE> return dict(list(base_config.items()) + list(config.items())) | ROI pooling layer for 2D inputs.
See Spatial Pyramid Pooling in Deep Convolutional Networks for Visual Recognition,
K. He, X. Zhang, S. Ren, J. Sun
# Arguments
pool_size: int
Size of pooling region to use. pool_size = 7 will result in a 7x7 region.
num_rois: number of regions of interest to be used
# Input shape
list of two 4D tensors [X_img,X_roi] with shape:
X_img:
`(1, rows, cols, channels)`
X_roi:
`(1,num_rois,4)` list of rois, with ordering (x,y,w,h)
# Output shape
3D tensor with shape:
`(1, num_rois, channels, pool_size, pool_size)` | 62598fa9498bea3a75a57a6e |
class subMessageTypeType (pyxb.binding.datatypes.token): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'subMessageTypeType') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('http://www.ech.ch/xmlns/eCH-0058/3/eCH-0058-3-0.xsd', 91, 1) <NEW_LINE> _Documentation = None | An atomic simple type. | 62598fa932920d7e50bc5fa6 |
class FlaskConfig(object): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> SECRET_KEY = u'j,*()PL<NERTYSD@#$%^' | Flask app 配置 | 62598fa9462c4b4f79dbb95e |
class Normal(D.Normal): <NEW_LINE> <INDENT> def mode(self): <NEW_LINE> <INDENT> return self.mean <NEW_LINE> <DEDENT> def log_prob(self, ac): <NEW_LINE> <INDENT> return super().log_prob(ac).sum(-1) <NEW_LINE> <DEDENT> def entropy(self): <NEW_LINE> <INDENT> return super().entropy().sum(-1) <NEW_LINE> <DEDENT> def kl(self, other): <NEW_LINE> <INDENT> t1 = torch.log(other.stddev / self.stddev).sum(dim=1) <NEW_LINE> t2 = ((self.variance + (self.mean - other.mean).pow(2)) / (2.0 * other.variance)).sum(dim=1) <NEW_LINE> return t1 + t2 - 0.5 * self.mean.shape[1] <NEW_LINE> <DEDENT> def to_tensors(self): <NEW_LINE> <INDENT> return {'loc': self.mean, 'scale': self.stddev} <NEW_LINE> <DEDENT> def from_tensors(self, tensors): <NEW_LINE> <INDENT> return Normal(**tensors) | Normal Distribution. | 62598fa960cbc95b0636429e |
class DiscussionAddNoteCmd(Command): <NEW_LINE> <INDENT> def get_title(my): <NEW_LINE> <INDENT> return "Added a note" <NEW_LINE> <DEDENT> def execute(my): <NEW_LINE> <INDENT> search_key = my.kwargs.get("search_key") <NEW_LINE> sobject = Search.get_by_search_key(search_key) <NEW_LINE> ticket = my.kwargs.get('ticket') <NEW_LINE> note = my.kwargs.get("note") <NEW_LINE> mail_cc = my.kwargs.get("mail_cc") <NEW_LINE> mail_bcc = my.kwargs.get("mail_bcc") <NEW_LINE> if mail_cc: <NEW_LINE> <INDENT> mail_cc = mail_cc.split(',') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mail_cc = [] <NEW_LINE> <DEDENT> if mail_bcc: <NEW_LINE> <INDENT> mail_bcc = mail_bcc.split(',') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> mail_bcc = [] <NEW_LINE> <DEDENT> process = my.kwargs.get("process") <NEW_LINE> context = my.kwargs.get("context") <NEW_LINE> if not context: <NEW_LINE> <INDENT> context = process <NEW_LINE> <DEDENT> from pyasm.biz import Note <NEW_LINE> note = Note.create(sobject, note, context=context, process=process) <NEW_LINE> subject = 'Added Note' <NEW_LINE> message = 'The following note has been added for [%s]:\n%s '%(sobject.get_code(), note.get_value('note')) <NEW_LINE> project_code = Project.get_project_code() <NEW_LINE> users = [] <NEW_LINE> users.extend(mail_cc) <NEW_LINE> users.extend(mail_bcc) <NEW_LINE> if len(users) > 0: <NEW_LINE> <INDENT> EmailTrigger2.add_notification(users, subject, message, project_code) <NEW_LINE> EmailTrigger2.send([],[],[], subject, message, cc_emails=mail_cc,bcc_emails=mail_bcc) <NEW_LINE> <DEDENT> from pyasm.checkin import FileCheckin <NEW_LINE> files = my.kwargs.get("files") <NEW_LINE> upload_dir = Environment.get_upload_dir(ticket) <NEW_LINE> for i, path in enumerate(files): <NEW_LINE> <INDENT> path = path.replace("\\", "/") <NEW_LINE> basename = os.path.basename(path) <NEW_LINE> basename = File.get_filesystem_name(basename) <NEW_LINE> new_path = "%s/%s" % (upload_dir, basename) <NEW_LINE> context = "publish" <NEW_LINE> file_paths = [new_path] <NEW_LINE> source_paths = [new_path] <NEW_LINE> file_types = ['main'] <NEW_LINE> if os.path.isfile(new_path): <NEW_LINE> <INDENT> icon_creator = IconCreator(new_path) <NEW_LINE> icon_creator.execute() <NEW_LINE> web_path = icon_creator.get_web_path() <NEW_LINE> icon_path = icon_creator.get_icon_path() <NEW_LINE> if web_path: <NEW_LINE> <INDENT> file_paths = [new_path, web_path, icon_path] <NEW_LINE> file_types = ['main', 'web', 'icon'] <NEW_LINE> source_paths.append(web_path) <NEW_LINE> source_paths.append(icon_path) <NEW_LINE> <DEDENT> <DEDENT> checkin = FileCheckin(note, file_paths= file_paths, file_types = file_types, source_paths=source_paths, context=context) <NEW_LINE> checkin.execute() | this is the UI to add note when someone clicks on the + button. It does not contain the + button | 62598fa9097d151d1a2c0f7a |
class Community(): <NEW_LINE> <INDENT> def __init__(self, bot): <NEW_LINE> <INDENT> self.bot = bot <NEW_LINE> self._source_url = "https://github.com/F4stZ4p/BadBot" <NEW_LINE> <DEDENT> @property <NEW_LINE> def source_url(self): <NEW_LINE> <INDENT> return self._source_url <NEW_LINE> <DEDENT> @commands.command() <NEW_LINE> async def source(self, ctx, *, command: str = None): <NEW_LINE> <INDENT> if command is None: <NEW_LINE> <INDENT> return await ctx.send(self.source_url) <NEW_LINE> <DEDENT> object = self.bot.get_command(command.replace('.', ' ')) <NEW_LINE> if object is None: <NEW_LINE> <INDENT> return await ctx.send('Command not found') <NEW_LINE> <DEDENT> src = object.callback.__code__ <NEW_LINE> lines, firstlineno = inspect.getsourcelines(src) <NEW_LINE> if not object.callback.__module__.startswith('discord'): <NEW_LINE> <INDENT> location = os.path.relpath(src.co_filename).replace('\\', '/') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> location = object.callback.__module__.replace('.', '/') + '.py' <NEW_LINE> <DEDENT> await ctx.send(f'<{self.source_url}/blob/master/{location}#L{firstlineno}-L{firstlineno + len(lines) - 1}>') | Commands for The Bad Server community | 62598fa944b2445a339b6918 |
class MockResponse: <NEW_LINE> <INDENT> def __init__(self, json_data): <NEW_LINE> <INDENT> self.json_data = json_data <NEW_LINE> <DEDENT> def json(self): <NEW_LINE> <INDENT> return self.json_data <NEW_LINE> <DEDENT> @property <NEW_LINE> def ok(self): <NEW_LINE> <INDENT> return True | Class to mock requests.Response objects,
feel free to adjust it to match the use case | 62598fa9925a0f43d25e7f90 |
class _DefinitionsMerger(object): <NEW_LINE> <INDENT> _MAX_NUM_FILE_NAMES = 5 <NEW_LINE> def merge(self, first, second): <NEW_LINE> <INDENT> if (not isinstance(first, VcfHeaderDefinitions) or not isinstance(first, VcfHeaderDefinitions)): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> self._merge_definitions(first.infos, second.infos) <NEW_LINE> self._merge_definitions(first.formats, second.formats) <NEW_LINE> <DEDENT> def _merge_definitions( self, first, second ): <NEW_LINE> <INDENT> for key, definitions_to_files_map in second.iteritems(): <NEW_LINE> <INDENT> for definition, file_names in definitions_to_files_map.iteritems(): <NEW_LINE> <INDENT> first[key].setdefault(definition, []) <NEW_LINE> first[key][definition].extend(str(s) for s in file_names) <NEW_LINE> first[key][definition] = ( first[key][definition][:self._MAX_NUM_FILE_NAMES]) | Class for merging two ``VcfHeaderDefinitions``s. | 62598fa97047854f4633f32a |
class Solution41: <NEW_LINE> <INDENT> def first_missing_positive(self, nums: List[int]) -> int: <NEW_LINE> <INDENT> size = len(nums) <NEW_LINE> for i in range(size): <NEW_LINE> <INDENT> while 1 <= nums[i] <= size and nums[i] != nums[nums[i] - 1]: <NEW_LINE> <INDENT> self.swap(nums, i, nums[i] - 1) <NEW_LINE> <DEDENT> <DEDENT> for i in range(size): <NEW_LINE> <INDENT> if i + 1 != nums[i]: <NEW_LINE> <INDENT> return i + 1 <NEW_LINE> <DEDENT> <DEDENT> return size + 1 <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def swap(nums, index1, index2): <NEW_LINE> <INDENT> nums[index1], nums[index2] = nums[index2], nums[index1] | 41. 缺失的第一个正数
https://leetcode-cn.com/problems/first-missing-positive/
给你一个未排序的整数数组 nums ,请你找出其中没有出现的最小的正整数。
请你实现时间复杂度为 O(n) 并且只使用常数级别额外空间的解决方案。
输入:nums = [1,2,0]
输出:3
输入:nums = [3,4,-1,1]
输出:2 | 62598fa9460517430c432005 |
class GerritAccessor(object): <NEW_LINE> <INDENT> def __init__(self, host): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def _FetchChangeDetail(self, issue): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return gerrit_util.GetChangeDetail( self.host, str(issue), ['ALL_REVISIONS', 'DETAILED_LABELS', 'ALL_COMMITS']) <NEW_LINE> <DEDENT> except gerrit_util.GerritError as e: <NEW_LINE> <INDENT> if e.http_status == 404: <NEW_LINE> <INDENT> raise Exception('Either Gerrit issue %s doesn\'t exist, or ' 'no credentials to fetch issue details' % issue) <NEW_LINE> <DEDENT> raise <NEW_LINE> <DEDENT> <DEDENT> def GetChangeInfo(self, issue): <NEW_LINE> <INDENT> assert issue <NEW_LINE> cache_key = int(issue) <NEW_LINE> if cache_key not in self.cache: <NEW_LINE> <INDENT> self.cache[cache_key] = self._FetchChangeDetail(issue) <NEW_LINE> <DEDENT> return self.cache[cache_key] <NEW_LINE> <DEDENT> def GetChangeDescription(self, issue, patchset=None): <NEW_LINE> <INDENT> info = self.GetChangeInfo(issue) <NEW_LINE> if patchset is not None: <NEW_LINE> <INDENT> for rev, rev_info in info['revisions'].items(): <NEW_LINE> <INDENT> if str(rev_info['_number']) == str(patchset): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('patchset %s doesn\'t exist in issue %s' % ( patchset, issue)) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> rev = info['current_revision'] <NEW_LINE> rev_info = info['revisions'][rev] <NEW_LINE> <DEDENT> return rev_info['commit']['message'] <NEW_LINE> <DEDENT> def GetDestRef(self, issue): <NEW_LINE> <INDENT> ref = self.GetChangeInfo(issue)['branch'] <NEW_LINE> if not ref.startswith('refs/'): <NEW_LINE> <INDENT> ref = 'refs/heads/%s' % ref <NEW_LINE> <DEDENT> return ref <NEW_LINE> <DEDENT> def _GetApproversForLabel(self, issue, label): <NEW_LINE> <INDENT> change_info = self.GetChangeInfo(issue) <NEW_LINE> label_info = change_info.get('labels', {}).get(label, {}) <NEW_LINE> values = label_info.get('values', {}).keys() <NEW_LINE> if not values: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> max_value = max(int(v) for v in values) <NEW_LINE> return [v for v in label_info.get('all', []) if v.get('value', 0) == max_value] <NEW_LINE> <DEDENT> def IsBotCommitApproved(self, issue): <NEW_LINE> <INDENT> return bool(self._GetApproversForLabel(issue, 'Bot-Commit')) <NEW_LINE> <DEDENT> def IsOwnersOverrideApproved(self, issue): <NEW_LINE> <INDENT> return bool(self._GetApproversForLabel(issue, 'Owners-Override')) <NEW_LINE> <DEDENT> def GetChangeOwner(self, issue): <NEW_LINE> <INDENT> return self.GetChangeInfo(issue)['owner']['email'] <NEW_LINE> <DEDENT> def GetChangeReviewers(self, issue, approving_only=True): <NEW_LINE> <INDENT> changeinfo = self.GetChangeInfo(issue) <NEW_LINE> if approving_only: <NEW_LINE> <INDENT> reviewers = self._GetApproversForLabel(issue, 'Code-Review') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> reviewers = changeinfo.get('reviewers', {}).get('REVIEWER', []) <NEW_LINE> <DEDENT> return [r.get('email') for r in reviewers] <NEW_LINE> <DEDENT> def UpdateDescription(self, description, issue): <NEW_LINE> <INDENT> gerrit_util.SetCommitMessage(self.host, issue, description, notify='NONE') | Limited Gerrit functionality for canned presubmit checks to work.
To avoid excessive Gerrit calls, caches the results. | 62598fa92ae34c7f260ab033 |
class Subtree(TreeElement, TreeMixin): <NEW_LINE> <INDENT> def __init__(self, branch_length=None, name=None, clades=None): <NEW_LINE> <INDENT> self.clades = clades or [] <NEW_LINE> self.name = name <NEW_LINE> self.branch_length = branch_length <NEW_LINE> <DEDENT> @property <NEW_LINE> def root(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def is_terminal(self): <NEW_LINE> <INDENT> return (not self.clades) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> if isinstance(index, int) or isinstance(index, slice): <NEW_LINE> <INDENT> return self.clades[index] <NEW_LINE> <DEDENT> ref = self <NEW_LINE> for idx in index: <NEW_LINE> <INDENT> ref = ref[idx] <NEW_LINE> <DEDENT> return ref <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.clades) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.clades) | A recursively defined subtree.
@param branch_length:
The length of the branch leading to the root node of this subtree.
@type branch_length: str
@param label: The label of a node.
@type label: str
@param clades: Sub-trees rooted directly under this tree's root.
@type clades: list | 62598fa93317a56b869be4f3 |
class LoadManifestTest(cros_test_lib.TempDirTestCase): <NEW_LINE> <INDENT> manifest_content = ( '<?xml version="1.0" ?><manifest>' '<pending_commit branch="master" ' 'change_id="Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee1" ' 'commit="1ddddddddddddddddddddddddddddddddddddddd" ' 'fail_count="2" gerrit_number="17000" owner_email="foo@chromium.org" ' 'pass_count="0" patch_number="2" project="chromiumos/taco/bar" ' 'project_url="https://base_url/chromiumos/taco/bar" ' 'ref="refs/changes/51/17000/2" remote="cros" total_fail_count="3"/>' '</manifest>') <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.pool = MakePool() <NEW_LINE> <DEDENT> def testAddPendingCommitsIntoPool(self): <NEW_LINE> <INDENT> with tempfile.NamedTemporaryFile() as f: <NEW_LINE> <INDENT> f.write(self.manifest_content) <NEW_LINE> f.flush() <NEW_LINE> self.pool.AddPendingCommitsIntoPool(f.name) <NEW_LINE> <DEDENT> self.assertEqual(self.pool.changes[0].owner_email, 'foo@chromium.org') <NEW_LINE> self.assertEqual(self.pool.changes[0].tracking_branch, 'master') <NEW_LINE> self.assertEqual(self.pool.changes[0].remote, 'cros') <NEW_LINE> self.assertEqual(self.pool.changes[0].gerrit_number, '17000') <NEW_LINE> self.assertEqual(self.pool.changes[0].project, 'chromiumos/taco/bar') <NEW_LINE> self.assertEqual(self.pool.changes[0].project_url, 'https://base_url/chromiumos/taco/bar') <NEW_LINE> self.assertEqual(self.pool.changes[0].change_id, 'Ieeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee1') <NEW_LINE> self.assertEqual(self.pool.changes[0].commit, '1ddddddddddddddddddddddddddddddddddddddd') <NEW_LINE> self.assertEqual(self.pool.changes[0].fail_count, 2) <NEW_LINE> self.assertEqual(self.pool.changes[0].pass_count, 0) <NEW_LINE> self.assertEqual(self.pool.changes[0].total_fail_count, 3) | Tests loading the manifest. | 62598fa94f6381625f199467 |
class Phase(Enum): <NEW_LINE> <INDENT> degrees = "DEG" <NEW_LINE> radians = "RAD" <NEW_LINE> group_delay = "GROUPD" | Enum containing valid phase units for a math channel on the
TekDPO70000 series oscilloscope. | 62598fa9cb5e8a47e493c121 |
class KrakenCrawler(CrawlerBase): <NEW_LINE> <INDENT> expected_name = 'Kraken' <NEW_LINE> def __init__(self, exchange): <NEW_LINE> <INDENT> super().__init__(exchange) <NEW_LINE> if self.exchange.name != KrakenCrawler.expected_name: <NEW_LINE> <INDENT> raise TypeError('Mismatched Exchange') <NEW_LINE> <DEDENT> self.code_mapping = { 'BTC': 'XBT', 'XBT': 'BTC' } <NEW_LINE> <DEDENT> def parse_pair_orderbook(self, response): <NEW_LINE> <INDENT> bids = [] <NEW_LINE> asks = [] <NEW_LINE> if response: <NEW_LINE> <INDENT> orderbook = json.loads(str(response).replace('\'', '"')) <NEW_LINE> if "result" in orderbook: <NEW_LINE> <INDENT> orderbook = orderbook["result"] <NEW_LINE> <DEDENT> if orderbook: <NEW_LINE> <INDENT> orderbook = orderbook[list(orderbook.keys())[0]] <NEW_LINE> <DEDENT> if "bids" in orderbook: <NEW_LINE> <INDENT> bids = [[float(bid[0]), float(bid[1])] for bid in orderbook["bids"]] <NEW_LINE> <DEDENT> if "asks" in orderbook: <NEW_LINE> <INDENT> asks = [[float(ask[0]), float(ask[1])] for ask in orderbook["asks"]] <NEW_LINE> <DEDENT> <DEDENT> return bids, asks <NEW_LINE> <DEDENT> def parse_pair_ticker(self, response): <NEW_LINE> <INDENT> last_bid = None <NEW_LINE> last_ask = None <NEW_LINE> if response: <NEW_LINE> <INDENT> ticker = json.loads(str(response).replace('\'', '"')) <NEW_LINE> if "result" in ticker: <NEW_LINE> <INDENT> ticker = ticker["result"] <NEW_LINE> <DEDENT> if ticker: <NEW_LINE> <INDENT> ticker = ticker[list(ticker.keys())[0]] <NEW_LINE> <DEDENT> if "b" in ticker: <NEW_LINE> <INDENT> last_bid = float(ticker["b"][0]) <NEW_LINE> <DEDENT> if "a" in ticker: <NEW_LINE> <INDENT> last_ask = float(ticker["a"][0]) <NEW_LINE> <DEDENT> <DEDENT> return last_bid, last_ask <NEW_LINE> <DEDENT> def fix_currency_code(self, code): <NEW_LINE> <INDENT> if self.code_mapping: <NEW_LINE> <INDENT> if code in self.code_mapping: <NEW_LINE> <INDENT> return self.code_mapping[code] <NEW_LINE> <DEDENT> <DEDENT> return code | Kraken exchange crawler.
Exchange url: https://www.kraken.com
Orderbook api: https://api.kraken.com/0/public/Depth?pair={}{}
Orderbook eg: {"error":[],"result":{"X{}Z{}":{"bids": [["18295.00000","0.011",1513689432]],
"asks": [["18328.30000","0.006",1513689450]]}}}
Ticker api: https://api.kraken.com/0/public/Ticker?pair={}{}
Ticker eg: {"error":[],"result":{"X{}Z{}":{"a":["18286.50000","1","1.000"],"b":["18225.10000","2","2.000"],
"c":["18286.50000","0.00314585"],"v":["2251.57215593","3533.43568912"],
"p":["18499.30405","18556.15103"],"t":[15947,27773],"l":["17712.90000","17712.90000"],
"h":["18946.80000","19000.00000"],"o":"18832.80000"}}} | 62598fa97d43ff24874273ab |
class MusicRepeatMode(str, Enum): <NEW_LINE> <INDENT> LOOP = 'loop' <NEW_LINE> PLAY_ONCE = 'play_once' | Available music repeat modes. | 62598fa92ae34c7f260ab034 |
@register.tag('form') <NEW_LINE> class FormNode(Node): <NEW_LINE> <INDENT> def __init__(self, parser, token): <NEW_LINE> <INDENT> bits = token.split_contents() <NEW_LINE> remaining_bits = bits[1:] <NEW_LINE> self.kwargs = token_kwargs(remaining_bits, parser) <NEW_LINE> if remaining_bits: <NEW_LINE> <INDENT> raise TemplateSyntaxError("%r received an invalid token: %r" % (bits[0], remaining_bits[0])) <NEW_LINE> <DEDENT> for key in self.kwargs: <NEW_LINE> <INDENT> if key not in ('form', 'layout', 'template'): <NEW_LINE> <INDENT> raise TemplateSyntaxError("%r received an invalid key: %r" % (bits[0], key)) <NEW_LINE> <DEDENT> self.kwargs[key] = self.kwargs[key] <NEW_LINE> <DEDENT> self.nodelist = parser.parse(('end{}'.format(bits[0]),)) <NEW_LINE> parser.delete_first_token() <NEW_LINE> <DEDENT> def render(self, context): <NEW_LINE> <INDENT> form = self.kwargs.get('form') <NEW_LINE> form = form.resolve(context) if form else context.get('form') <NEW_LINE> if form is None: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> layout = self.kwargs.get('layout') <NEW_LINE> if layout is not None: <NEW_LINE> <INDENT> layout = layout.resolve(context) <NEW_LINE> <DEDENT> if layout is None: <NEW_LINE> <INDENT> if 'view' in context: <NEW_LINE> <INDENT> view = context['view'] <NEW_LINE> if hasattr(view, 'layout'): <NEW_LINE> <INDENT> layout = view.layout <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if layout is None: <NEW_LINE> <INDENT> if hasattr(form, 'layout'): <NEW_LINE> <INDENT> layout = form.layout <NEW_LINE> <DEDENT> <DEDENT> template_name = self.kwargs.get('template', 'material/form.html') <NEW_LINE> template = get_template(template_name) <NEW_LINE> parts = defaultdict(dict) <NEW_LINE> attrs = defaultdict(dict) <NEW_LINE> with context.push( form=form, layout=layout, form_template_pack=os.path.dirname(template_name), form_parts=parts, form_widget_attrs=attrs): <NEW_LINE> <INDENT> children = (node for node in self.nodelist if isinstance(node, FormPartNode)) <NEW_LINE> _render_parts(context, children) <NEW_LINE> attrs = (node for node in self.nodelist if isinstance(node, WidgetAttrNode)) <NEW_LINE> for attr in attrs: <NEW_LINE> <INDENT> attr.render(context) <NEW_LINE> <DEDENT> children = (node for node in self.nodelist if isinstance(node, IncludeNode)) <NEW_LINE> for included_list in children: <NEW_LINE> <INDENT> included = included_list.template.resolve(context) <NEW_LINE> children = (node for node in included.nodelist if isinstance(node, FormPartNode)) <NEW_LINE> _render_parts(context, children) <NEW_LINE> attrs = (node for node in self.nodelist if isinstance(node, WidgetAttrNode)) <NEW_LINE> for attr in attrs: <NEW_LINE> <INDENT> attr.render(context) <NEW_LINE> <DEDENT> <DEDENT> return template.render(context.flatten()) | Template based form rendering
Example::
{% form template='material/form.html' form=form layout=view.layout %}
{% part form.email prepend %}<span class="input-group-addon" id="basic-addon1">@</span>{% endpart %}
{% endform %} | 62598fa967a9b606de545f1e |
class Component(object): <NEW_LINE> <INDENT> def __init__(self, x, y, rot=0, name=''): <NEW_LINE> <INDENT> self.x = x <NEW_LINE> self.y = y <NEW_LINE> self.rot = rot <NEW_LINE> self.name = name <NEW_LINE> <DEDENT> def __getitem__(self, i): <NEW_LINE> <INDENT> if isinstance(i, str): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pin = [p for p in self.pins if p.name == i][0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise IndexError("No pin with name %s" % i) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(i, int): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> pin = self.pins[i-1] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise IndexError("Pin %i is not in array" %i) <NEW_LINE> <DEDENT> <DEDENT> return BoundPin(pin, self) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pads(self): <NEW_LINE> <INDENT> pads = reduce(operator.add, [p.pad for p in self.pins]) <NEW_LINE> return s2d.move(s2d.rotate(pads, self.rot), self.x, self.y) <NEW_LINE> <DEDENT> @property <NEW_LINE> def pin_labels(self): <NEW_LINE> <INDENT> L = [] <NEW_LINE> for p in self.pins: <NEW_LINE> <INDENT> p = BoundPin(p, self) <NEW_LINE> if p.pin.name: <NEW_LINE> <INDENT> L.append(text(p.pin.name, p.x, p.y, 0.03)) <NEW_LINE> <DEDENT> <DEDENT> return reduce(operator.add, L) if L else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def label(self): <NEW_LINE> <INDENT> return text(self.name, self.x, self.y, 0.03) | Generic PCB component.
| 62598fa9aad79263cf42e727 |
class VectorSensor(InputGadget): <NEW_LINE> <INDENT> def value_x(self): <NEW_LINE> <INDENT> raise NotImplementedError(NOT_IMPLEMENTED_MSG) <NEW_LINE> <DEDENT> def value_y(self): <NEW_LINE> <INDENT> raise NotImplementedError(NOT_IMPLEMENTED_MSG) <NEW_LINE> <DEDENT> def value_z(self): <NEW_LINE> <INDENT> raise NotImplementedError(NOT_IMPLEMENTED_MSG) | any gadget which value of interest is a physical vector quantity should inherit this. | 62598fa992d797404e388b0e |
class AVLTreeElement(BSTElement): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs) -> None: <NEW_LINE> <INDENT> if len(args) != 0: <NEW_LINE> <INDENT> if len(args) != 2: <NEW_LINE> <INDENT> raise ValueError("Must contain 0 or 2 positional parameters") <NEW_LINE> <DEDENT> super(AVLTreeElement, self).__init__(key=args[0], e=args[1]) <NEW_LINE> <DEDENT> if 'key' in kwargs and 'e' in kwargs: <NEW_LINE> <INDENT> super(AVLTreeElement, self).__init__(key=kwargs['key'], e=kwargs['e']) <NEW_LINE> <DEDENT> elif 'key' in kwargs: <NEW_LINE> <INDENT> super(AVLTreeElement, self).key = kwargs['key'] <NEW_LINE> <DEDENT> elif 'e' in kwargs: <NEW_LINE> <INDENT> super(AVLTreeElement, self).__init__(e=kwargs['e']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(AVLTreeElement, self).__init__() <NEW_LINE> <DEDENT> self._height = self._bal_factor = 0 <NEW_LINE> <DEDENT> def get_data_structure_type(self) -> str: <NEW_LINE> <INDENT> return "AVLTree" <NEW_LINE> <DEDENT> @property <NEW_LINE> def key(self) -> str: <NEW_LINE> <INDENT> return super(AVLTreeElement, self).key <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self) -> int: <NEW_LINE> <INDENT> return self._height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value: int) -> None: <NEW_LINE> <INDENT> self._height = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def balance_factor(self) -> int: <NEW_LINE> <INDENT> return self._bal_factor <NEW_LINE> <DEDENT> @balance_factor.setter <NEW_LINE> def balance_factor(self, value: int) -> None: <NEW_LINE> <INDENT> self._bal_factor = value <NEW_LINE> <DEDENT> @property <NEW_LINE> def left(self): <NEW_LINE> <INDENT> return super(AVLTreeElement, self).left <NEW_LINE> <DEDENT> @left.setter <NEW_LINE> def left(self, val) -> None: <NEW_LINE> <INDENT> self.set_child(0, val) <NEW_LINE> <DEDENT> @property <NEW_LINE> def right(self): <NEW_LINE> <INDENT> return super(AVLTreeElement, self).right <NEW_LINE> <DEDENT> @right.setter <NEW_LINE> def right(self, val) -> None: <NEW_LINE> <INDENT> self.set_child(1, val) <NEW_LINE> <DEDENT> def get_element_representation(self) -> dict: <NEW_LINE> <INDENT> orig_json_str = super(AVLTreeElement, self).get_element_representation() <NEW_LINE> avl_json = { "height": self.height, "balance_factor": self.balance_factor } <NEW_LINE> orig_json_str.update(avl_json) <NEW_LINE> return orig_json_str | @brief This class extends the BSTElement class by adding a height and balance factor fields that are useful in AVL trees.
AVL tree elements include a 'height' and a 'balFactor' value,
representing the height and balance factor of the AVL tree at
that node, respectively. This is useful in representing
AVL trees.
AVLTree elements contain a visualizer (ElementVisualizer) object for setting visual
attributes (color, shape, opacity, size), necessary for displaying them in a
web browser.
AVLTree elements also have a LinkVisualizer object, that is used when they are
linked to another element, appropriate for setting link attributes, for instance,
between the current element and its left or right child
@author Kalpathi Subramanian, Mihai Mehedint, Matthew McQuaigue
@date 6/22/16, 1/7/17, 5/17/17, 6/09/19
Example tutorial using AVLTreeElement at: https://bridgesuncc.github.io/tutorials/AVL.html | 62598fa9b7558d5895463582 |
class ToTensor(object): <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> return {'points': torch.from_numpy(sample['points']).type(torch.FloatTensor), 'seg': torch.tensor(sample['seg']).type(torch.LongTensor)} | Convert ndarrays in sample to Tensors. | 62598fa9d7e4931a7ef3bfee |
class User(AbstractUser): <NEW_LINE> <INDENT> id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) <NEW_LINE> phone_number = models.CharField(max_length=250) <NEW_LINE> first_name = models.CharField(null=True, max_length=30) <NEW_LINE> last_name = models.CharField(null=True, max_length=150) <NEW_LINE> registered = models.DateTimeField(datetime.now, blank=True, null=True) <NEW_LINE> username = None <NEW_LINE> email = models.EmailField(_('email address'), unique=True) <NEW_LINE> USERNAME_FIELD = 'email' <NEW_LINE> REQUIRED_FIELDS = [] <NEW_LINE> objects = UserManager() | Define a user model. | 62598fa9adb09d7d5dc0a4de |
class SquareNumberApp(App): <NEW_LINE> <INDENT> def build(self): <NEW_LINE> <INDENT> Window.size = (800, 300) <NEW_LINE> self.title = "Square Number" <NEW_LINE> self.root = Builder.load_file('squaring.kv') <NEW_LINE> return self.root <NEW_LINE> <DEDENT> def handle_calculate(self): <NEW_LINE> <INDENT> value = float(self.root.ids.input_number.text) <NEW_LINE> result = value ** 2 <NEW_LINE> self.root.ids.output_label.text = str(result) | SquareNumberApp is a Kivy App for squaring a number | 62598fa94527f215b58e9e35 |
class CacheClearEvent(object): <NEW_LINE> <INDENT> swagger_types = { 'teardown': 'bool' } <NEW_LINE> attribute_map = { 'teardown': 'teardown' } <NEW_LINE> def __init__(self, teardown=None): <NEW_LINE> <INDENT> self._teardown = None <NEW_LINE> self.discriminator = None <NEW_LINE> if teardown is not None: <NEW_LINE> <INDENT> self.teardown = teardown <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def teardown(self): <NEW_LINE> <INDENT> return self._teardown <NEW_LINE> <DEDENT> @teardown.setter <NEW_LINE> def teardown(self, teardown): <NEW_LINE> <INDENT> self._teardown = teardown <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CacheClearEvent): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fa9097d151d1a2c0f7c |
class CowrieServer(object): <NEW_LINE> <INDENT> def __init__(self, realm): <NEW_LINE> <INDENT> self.cfg = realm.cfg <NEW_LINE> self.avatars = [] <NEW_LINE> self.hostname = self.cfg.get('honeypot', 'hostname') <NEW_LINE> self.fs = fs.HoneyPotFilesystem(copy.deepcopy(fs.PICKLE),self.cfg) | In traditional Kippo each connection gets its own simulated machine.
This is not always ideal, sometimes two connections come from the same
source IP address. we want to give them the same environment as well.
So files uploaded through SFTP are visible in the SSH session.
This class represents a 'virtual server' that can be shared between
multiple Cowrie connections | 62598fa93617ad0b5ee060a7 |
class InvalidParameterError(EncodingExceptionBase): <NEW_LINE> <INDENT> def __init__(self, param: str): <NEW_LINE> <INDENT> error = 'Specified parameter given is invalid: {0}'.format(param) <NEW_LINE> super().__init__(error) | Given designation is invalid in either parameter or settings.
Does not match encoding.com allows | 62598fa944b2445a339b6919 |
class SubClassCollector: <NEW_LINE> <INDENT> def __init__(self, namespace): <NEW_LINE> <INDENT> self.ontology_graphs = {} <NEW_LINE> self.namespace = namespace <NEW_LINE> self.init_skos() <NEW_LINE> <DEDENT> def init_skos(self): <NEW_LINE> <INDENT> self.subclasses = {} <NEW_LINE> self.subclasses[SKOS.Concept] = [ SKOS.Concept, GVP.Concept, GVP.PhysPlaceConcept, GVP.PhysAdminPlaceConcept, GVP.AdminPlaceConcept, GVP.PersonConcept, GVP.UnknownPersonConcept, GVP.GroupConcept ] <NEW_LINE> self.subclasses[SKOS.Collection] = [ SKOS.Collection, SKOS.OrderedCollection, ISO.ThesaurusArray, GVP.Hierarchy, GVP.Facet, GVP.GuideTerm ] <NEW_LINE> <DEDENT> def get_subclasses(self, clazz): <NEW_LINE> <INDENT> return self.subclasses[clazz] <NEW_LINE> <DEDENT> def collect_subclasses(self, clazz): <NEW_LINE> <INDENT> self.subclasses[clazz] = [clazz] <NEW_LINE> if self.namespace not in self.ontology_graphs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> graph = rdflib.Graph() <NEW_LINE> graph.parse(str(self.namespace), format="application/rdf+xml") <NEW_LINE> self.ontology_graphs[self.namespace] = graph <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.ontology_graphs[self.namespace] = None <NEW_LINE> <DEDENT> <DEDENT> g = self.ontology_graphs[self.namespace] <NEW_LINE> if g is not None: <NEW_LINE> <INDENT> for sub, pred, obj in g.triples((None, RDFS.subClassOf, None)): <NEW_LINE> <INDENT> self._is_subclass_of(sub, clazz) <NEW_LINE> <DEDENT> <DEDENT> return self.subclasses[clazz] <NEW_LINE> <DEDENT> def _is_subclass_of(self, subject, clazz): <NEW_LINE> <INDENT> namespace = subject.split('#')[0] + "#" <NEW_LINE> if subject in self.subclasses[clazz]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if namespace not in self.ontology_graphs: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> graph = rdflib.Graph() <NEW_LINE> graph.parse(str(namespace), format="application/rdf+xml") <NEW_LINE> self.ontology_graphs[namespace] = graph <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.ontology_graphs[namespace] = None <NEW_LINE> <DEDENT> <DEDENT> g = self.ontology_graphs[namespace] <NEW_LINE> if g is not None: <NEW_LINE> <INDENT> for sub, pred, obj in g.triples((subject, RDFS.subClassOf, None)): <NEW_LINE> <INDENT> if obj in self.subclasses[clazz]: <NEW_LINE> <INDENT> self.subclasses[clazz].append(subject) <NEW_LINE> return True <NEW_LINE> <DEDENT> if obj == clazz: <NEW_LINE> <INDENT> self.subclasses[clazz].append(subject) <NEW_LINE> return True <NEW_LINE> <DEDENT> if self._is_subclass_of(obj, clazz): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False | A utility class to collect all the subclasses of a certain Class from an ontology file. | 62598fa9d486a94d0ba2bf21 |
class DirectiveDefinitionNode(TypeSystemDefinitionNode): <NEW_LINE> <INDENT> __slots__ = ("description", "name", "arguments", "locations", "location") <NEW_LINE> def __init__( self, name: "NameNode", locations: List["NameNode"], description: Optional["DescriptionNode"] = None, arguments: Optional[List["InputValueDefinitionNode"]] = None, location: Optional["Location"] = None, ) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.locations = locations <NEW_LINE> self.description = description <NEW_LINE> self.arguments = arguments <NEW_LINE> self.location = location <NEW_LINE> <DEDENT> def __eq__(self, other: Any) -> bool: <NEW_LINE> <INDENT> return self is other or ( isinstance(other, DirectiveDefinitionNode) and self.description == other.description and self.name == other.name and self.arguments == other.arguments and self.locations == other.locations and self.location == other.location ) <NEW_LINE> <DEDENT> def __repr__(self) -> str: <NEW_LINE> <INDENT> return ( "DirectiveDefinitionNode(description=%r, name=%r, arguments=%r, " "locations=%r, location=%r)" % ( self.description, self.name, self.arguments, self.locations, self.location, ) ) | AST node representing a GraphQL directive definition. | 62598fa901c39578d7f12cd3 |
class User(AbstractUser): <NEW_LINE> <INDENT> name = CharField( _("Name of User"), blank=True, max_length=255, help_text='The blog app shows this as the author: First Name and Last Initial', ) <NEW_LINE> email = models.EmailField(_('email address'), unique=True) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse("users:detail", kwargs={"username": self.username}) | Custom User where email is required | 62598fa9d7e4931a7ef3bfef |
class commitSendMessagesToAll_args(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.LIST, 'requestIdList', (TType.STRING, 'UTF8', False), None, ), ) <NEW_LINE> def __init__(self, requestIdList=None,): <NEW_LINE> <INDENT> self.requestIdList = requestIdList <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.LIST: <NEW_LINE> <INDENT> self.requestIdList = [] <NEW_LINE> (_etype393, _size390) = iprot.readListBegin() <NEW_LINE> for _i394 in range(_size390): <NEW_LINE> <INDENT> _elem395 = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString() <NEW_LINE> self.requestIdList.append(_elem395) <NEW_LINE> <DEDENT> iprot.readListEnd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('commitSendMessagesToAll_args') <NEW_LINE> if self.requestIdList is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('requestIdList', TType.LIST, 1) <NEW_LINE> oprot.writeListBegin(TType.STRING, len(self.requestIdList)) <NEW_LINE> for iter396 in self.requestIdList: <NEW_LINE> <INDENT> oprot.writeString(iter396.encode('utf-8') if sys.version_info[0] == 2 else iter396) <NEW_LINE> <DEDENT> oprot.writeListEnd() <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- requestIdList | 62598fa9bd1bec0571e1506d |
class SessionListing(wx.TextCtrl): <NEW_LINE> <INDENT> def __init__(self, parent=None, id=-1,ShellClassName='Shell'): <NEW_LINE> <INDENT> style = (wx.TE_MULTILINE | wx.TE_READONLY | wx.TE_RICH2 | wx.TE_DONTWRAP) <NEW_LINE> wx.TextCtrl.__init__(self, parent, id, style=style) <NEW_LINE> dispatcher.connect(receiver=self.addHistory, signal=ShellClassName+".addHistory") <NEW_LINE> dispatcher.connect(receiver=self.clearHistory, signal=ShellClassName+".clearHistory") <NEW_LINE> dispatcher.connect(receiver=self.loadHistory, signal=ShellClassName+".loadHistory") <NEW_LINE> df = self.GetFont() <NEW_LINE> font = wx.Font(df.GetPointSize(), wx.TELETYPE, wx.NORMAL, wx.NORMAL) <NEW_LINE> self.SetFont(font) <NEW_LINE> <DEDENT> def loadHistory(self, history): <NEW_LINE> <INDENT> hist = history[:] <NEW_LINE> hist.reverse() <NEW_LINE> self.SetValue('\n'.join(hist) + '\n') <NEW_LINE> self.SetInsertionPointEnd() <NEW_LINE> <DEDENT> def addHistory(self, command): <NEW_LINE> <INDENT> if command: <NEW_LINE> <INDENT> self.SetInsertionPointEnd() <NEW_LINE> self.AppendText(command + '\n') <NEW_LINE> <DEDENT> <DEDENT> def clearHistory(self): <NEW_LINE> <INDENT> self.SetValue("") | Text control containing all commands for session. | 62598fa97047854f4633f32d |
class ImageFolder(DatasetFolder): <NEW_LINE> <INDENT> def __init__(self, root, ann_file='', img_prefix='', transform=None, target_transform=None, loader=default_img_loader, cache_mode="no", dataset='ImageNet', two_crop=False, return_coord=False): <NEW_LINE> <INDENT> super(ImageFolder, self).__init__(root, loader, IMG_EXTENSIONS, ann_file=ann_file, img_prefix=img_prefix, transform=transform, target_transform=target_transform, cache_mode=cache_mode, dataset=dataset) <NEW_LINE> self.imgs = self.samples <NEW_LINE> self.two_crop = two_crop <NEW_LINE> self.return_coord = return_coord <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> path, target = self.samples[index] <NEW_LINE> image = self.loader(path) <NEW_LINE> if self.transform is not None: <NEW_LINE> <INDENT> if isinstance(self.transform, tuple) and len(self.transform) == 2: <NEW_LINE> <INDENT> img = self.transform[0](image) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> img = self.transform(image) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> img = image <NEW_LINE> <DEDENT> if self.target_transform is not None: <NEW_LINE> <INDENT> target = self.target_transform(target) <NEW_LINE> <DEDENT> if self.two_crop: <NEW_LINE> <INDENT> if isinstance(self.transform, tuple) and len(self.transform) == 2: <NEW_LINE> <INDENT> img2 = self.transform[1](image) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> img2 = self.transform(image) <NEW_LINE> <DEDENT> <DEDENT> if self.return_coord: <NEW_LINE> <INDENT> assert isinstance(img, tuple) <NEW_LINE> img, coord = img <NEW_LINE> if self.two_crop: <NEW_LINE> <INDENT> img2, coord2 = img2 <NEW_LINE> return img, img2, coord, coord2, index, target <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return img, coord, index, target <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if isinstance(img, tuple): <NEW_LINE> <INDENT> img, coord = img <NEW_LINE> <DEDENT> if self.two_crop: <NEW_LINE> <INDENT> if isinstance(img2, tuple): <NEW_LINE> <INDENT> img2, coord2 = img2 <NEW_LINE> <DEDENT> return img, img2, index, target <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return img, index, target | A generic data loader where the images are arranged in this way: ::
root/dog/xxx.png
root/dog/xxy.png
root/dog/xxz.png
root/cat/123.png
root/cat/nsdf3.png
root/cat/asd932_.png
Args:
root (string): Root directory path.
transform (callable, optional): A function/transform that takes in an PIL image
and returns a transformed version. E.g, ``transforms.RandomCrop``
target_transform (callable, optional): A function/transform that takes in the
target and transforms it.
loader (callable, optional): A function to load an image given its path.
Attributes:
imgs (list): List of (image path, class_index) tuples | 62598fa945492302aabfc425 |
class EmptyElement(Element): <NEW_LINE> <INDENT> @property <NEW_LINE> def empty(self): <NEW_LINE> <INDENT> return True | Convenience class that defines an empty element. | 62598fa9a17c0f6771d5c189 |
class VirtualMachineExtensionsListResult(Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[VirtualMachineExtension]'}, } <NEW_LINE> def __init__(self, *, value=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(VirtualMachineExtensionsListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value | The List Extension operation response.
:param value: The list of extensions
:type value:
list[~azure.mgmt.compute.v2017_12_01.models.VirtualMachineExtension] | 62598fa97d43ff24874273ac |
class SchemaEnforcingMixin(AllowedSchemaMixin, InvenioRecordMetadataSchemaV1Mixin): <NEW_LINE> <INDENT> ALLOWED_SCHEMAS = ('records/record-v1.0.0.json',) <NEW_LINE> PREFERRED_SCHEMA = 'records/record-v1.0.0.json' <NEW_LINE> @validates('schema') <NEW_LINE> def validate_schema(self, value): <NEW_LINE> <INDENT> self._prepare_schemas() <NEW_LINE> value = convert_relative_schema_to_absolute(value) <NEW_LINE> if value: <NEW_LINE> <INDENT> if value not in self.ALLOWED_SCHEMAS: <NEW_LINE> <INDENT> raise ValidationError('Schema %s not in allowed schemas %s' % (value, self.ALLOWED_SCHEMAS)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @post_load <NEW_LINE> def add_schema(self, data, **kwargs): <NEW_LINE> <INDENT> self._prepare_schemas() <NEW_LINE> if '$schema' not in data: <NEW_LINE> <INDENT> data['$schema'] = convert_relative_schema_to_absolute(self.PREFERRED_SCHEMA) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._convert_and_get_schema(data) <NEW_LINE> <DEDENT> return data | A marshmallow mixin that enforces that record has only one of predefined schemas. | 62598fa90a50d4780f705331 |
class Character: <NEW_LINE> <INDENT> battle_queue: 'BattleQueue' <NEW_LINE> playstyle: 'Playstyle' <NEW_LINE> def __init__(self, name: str, bq: 'BattleQueue', ps: 'Playstyle') -> None: <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self.battle_queue = bq <NEW_LINE> self.playstyle = ps <NEW_LINE> self._hp = 100 <NEW_LINE> self._sp = 100 <NEW_LINE> self._defense = 0 <NEW_LINE> self.enemy = None <NEW_LINE> self._character_type = '' <NEW_LINE> self._current_state = 'idle' <NEW_LINE> self._current_frame = 0 <NEW_LINE> self._skills = {'A': None, 'S': None } <NEW_LINE> <DEDENT> def get_name(self) -> str: <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def get_hp(self) -> int: <NEW_LINE> <INDENT> return self._hp <NEW_LINE> <DEDENT> def get_sp(self) -> int: <NEW_LINE> <INDENT> return self._sp <NEW_LINE> <DEDENT> def get_next_sprite(self) -> str: <NEW_LINE> <INDENT> sprite_to_return = "{}_{}_{}".format(self._character_type, self._current_state, self._current_frame) <NEW_LINE> self._current_frame += 1 <NEW_LINE> if self._current_frame == 10: <NEW_LINE> <INDENT> self._current_state = 'idle' <NEW_LINE> self._current_frame = 0 <NEW_LINE> <DEDENT> return sprite_to_return <NEW_LINE> <DEDENT> def get_available_actions(self) -> List[str]: <NEW_LINE> <INDENT> available = [] <NEW_LINE> for skill in self._skills: <NEW_LINE> <INDENT> if self.is_valid_action(skill): <NEW_LINE> <INDENT> available.append(skill) <NEW_LINE> <DEDENT> <DEDENT> return available <NEW_LINE> <DEDENT> def is_valid_action(self, action: str) -> bool: <NEW_LINE> <INDENT> if action in self._skills: <NEW_LINE> <INDENT> return self._skills[action].get_sp_cost() <= self._sp <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def attack(self) -> None: <NEW_LINE> <INDENT> self._current_state = 'attack' <NEW_LINE> self._current_frame = 0 <NEW_LINE> self._skills['A'].use(self, self.enemy) <NEW_LINE> <DEDENT> def special_attack(self) -> None: <NEW_LINE> <INDENT> self._current_state = 'special' <NEW_LINE> self._current_frame = 0 <NEW_LINE> self._skills['S'].use(self, self.enemy) <NEW_LINE> <DEDENT> def reduce_sp(self, cost: int) -> None: <NEW_LINE> <INDENT> self._sp -= cost <NEW_LINE> <DEDENT> def apply_damage(self, damage: int) -> None: <NEW_LINE> <INDENT> damage -= self._defense <NEW_LINE> self._hp -= damage <NEW_LINE> self._hp = max(self._hp, 0) <NEW_LINE> <DEDENT> def set_sp(self, new_sp: int) -> None: <NEW_LINE> <INDENT> self._sp = new_sp <NEW_LINE> <DEDENT> def set_hp(self, new_hp: int) -> None: <NEW_LINE> <INDENT> self._hp = new_hp <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> class_name = self._character_type[0].upper() + self._character_type[1:] <NEW_LINE> return "{} ({}): {}/{}".format(self._name, class_name, self._hp, self._sp) <NEW_LINE> <DEDENT> def copy(self, new_battle_queue: 'BattleQueue') -> 'Character': <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def _set_copy_attributes(self, other: 'Character') -> None: <NEW_LINE> <INDENT> other.set_hp(self._hp) <NEW_LINE> other.set_sp(self._sp) | An abstract superclass for all Characters.
battle_queue - the BattleQueue that this Character will add to.
playstyle - the Playstyle that this Character uses to pick actions.
enemy - the Character that this Character attacks. | 62598fa92c8b7c6e89bd3719 |
class Template(_messages.Message): <NEW_LINE> <INDENT> automaticColumnNames = _messages.StringField(1, repeated=True) <NEW_LINE> body = _messages.StringField(2) <NEW_LINE> kind = _messages.StringField(3, default=u'fusiontables#template') <NEW_LINE> name = _messages.StringField(4) <NEW_LINE> tableId = _messages.StringField(5) <NEW_LINE> templateId = _messages.IntegerField(6, variant=_messages.Variant.INT32) | Represents the contents of InfoWindow templates.
Fields:
automaticColumnNames: List of columns from which the template is to be
automatically constructed. Only one of body or automaticColumns can be
specified.
body: Body of the template. It contains HTML with {column_name} to insert
values from a particular column. The body is sanitized to remove certain
tags, e.g., script. Only one of body or automaticColumns can be
specified.
kind: Type name: a template for the info window contents. The template can
either include an HTML body or a list of columns from which the template
is computed automatically.
name: Optional name assigned to a template.
tableId: Identifier for the table for which the template is defined.
templateId: Identifier for the template, unique within the context of a
particular table. | 62598fa9f548e778e596b4f9 |
class FileShare(models.Model): <NEW_LINE> <INDENT> username = LowerCaseCharField(max_length=255, db_index=True) <NEW_LINE> repo_id = models.CharField(max_length=36, db_index=True) <NEW_LINE> path = models.TextField() <NEW_LINE> token = models.CharField(max_length=10, unique=True) <NEW_LINE> ctime = models.DateTimeField(default=datetime.datetime.now) <NEW_LINE> view_cnt = models.IntegerField(default=0) <NEW_LINE> s_type = models.CharField(max_length=2, db_index=True, default='f') | Model used for file or dir shared link. | 62598fa92ae34c7f260ab036 |
class RideImportSettingAdded(RideImportSetting): <NEW_LINE> <INDENT> pass | Sent whenever an import setting is added.
``datafile`` is the suite or resource file whose imports have changed,
``type`` is either ``resource``, ``library``, or ``variables``. | 62598fa9aad79263cf42e729 |
class Usage(models.Model): <NEW_LINE> <INDENT> added = models.DateTimeField(auto_now_add=True) <NEW_LINE> app_version_target_config = models.ForeignKey( ApplicationVersionTargetConfig, on_delete=models.CASCADE, related_name="+", null=False) <NEW_LINE> app_deployment = models.ForeignKey( ApplicationDeployment, on_delete=models.SET_NULL, related_name="+", null=True) <NEW_LINE> app_config = models.TextField(max_length=1024 * 16, blank=True, null=True) <NEW_LINE> user = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, null=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> ordering = ['added'] <NEW_LINE> verbose_name_plural = 'Usage' | Keep some usage information about instances that are being launched. | 62598fa94a966d76dd5eee37 |
class RobustProteinAnalysis(ProteinAnalysis): <NEW_LINE> <INDENT> PROTEIN_LETTERS = set(Bio.Data.IUPACData.protein_letters) <NEW_LINE> def __init__(self, prot_sequence: str, monoisotopic: bool = False, ignore_invalid: bool = True) -> None: <NEW_LINE> <INDENT> if not isinstance(ignore_invalid, bool): <NEW_LINE> <INDENT> raise TypeError("ignore_invalid must be a boolean") <NEW_LINE> <DEDENT> self._ignore_invalid = ignore_invalid <NEW_LINE> prot_sequence = prot_sequence.upper() <NEW_LINE> self.original_sequence = prot_sequence <NEW_LINE> prot_sequence = "".join(filter(lambda x: x in RobustProteinAnalysis.PROTEIN_LETTERS, self.original_sequence)) <NEW_LINE> super(RobustProteinAnalysis, self).__init__(prot_sequence, monoisotopic) <NEW_LINE> <DEDENT> def molecular_weight(self) -> float: <NEW_LINE> <INDENT> weight = super(RobustProteinAnalysis, self).molecular_weight() <NEW_LINE> if not self._ignore_invalid: <NEW_LINE> <INDENT> aa_difference = len(self.original_sequence) - len(self.sequence) <NEW_LINE> weight += 110 * aa_difference <NEW_LINE> <DEDENT> return weight | A simple subclass of ProteinAnalysis that can deal with
a protein sequence containing invalid characters.
If ignoring invalid characters, the molecular weight is increased by
the average weight of an amino-acid (i.e. 110) for each invalid case. | 62598fa9d486a94d0ba2bf22 |
class ImportImageParameters(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'source': {'required': True}, } <NEW_LINE> _attribute_map = { 'source': {'key': 'source', 'type': 'ImportSource'}, 'target_tags': {'key': 'targetTags', 'type': '[str]'}, 'untagged_target_repositories': {'key': 'untaggedTargetRepositories', 'type': '[str]'}, 'mode': {'key': 'mode', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, source: "ImportSource", target_tags: Optional[List[str]] = None, untagged_target_repositories: Optional[List[str]] = None, mode: Optional[Union[str, "ImportMode"]] = "NoForce", **kwargs ): <NEW_LINE> <INDENT> super(ImportImageParameters, self).__init__(**kwargs) <NEW_LINE> self.source = source <NEW_LINE> self.target_tags = target_tags <NEW_LINE> self.untagged_target_repositories = untagged_target_repositories <NEW_LINE> self.mode = mode | ImportImageParameters.
All required parameters must be populated in order to send to Azure.
:ivar source: Required. The source of the image.
:vartype source: ~azure.mgmt.containerregistry.v2017_10_01.models.ImportSource
:ivar target_tags: List of strings of the form repo[:tag]. When tag is omitted the source will
be used (or 'latest' if source tag is also omitted).
:vartype target_tags: list[str]
:ivar untagged_target_repositories: List of strings of repository names to do a manifest only
copy. No tag will be created.
:vartype untagged_target_repositories: list[str]
:ivar mode: When Force, any existing target tags will be overwritten. When NoForce, any
existing target tags will fail the operation before any copying begins. Possible values
include: "NoForce", "Force". Default value: "NoForce".
:vartype mode: str or ~azure.mgmt.containerregistry.v2017_10_01.models.ImportMode | 62598fa932920d7e50bc5faa |
class UserSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> def validate_email(self, value: str) -> str: <NEW_LINE> <INDENT> if not user_settings["EMAIL_VALIDATION"]: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if check_validation(value=value): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise serializers.ValidationError( "The email must be " "pre-validated via OTP." ) <NEW_LINE> <DEDENT> <DEDENT> def validate_mobile(self, value: str) -> str: <NEW_LINE> <INDENT> if not user_settings["MOBILE_VALIDATION"]: <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> if check_validation(value=value): <NEW_LINE> <INDENT> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise serializers.ValidationError( "The mobile must be " "pre-validated via OTP." ) <NEW_LINE> <DEDENT> <DEDENT> def validate_password(self, value: str) -> str: <NEW_LINE> <INDENT> validate_password(value) <NEW_LINE> return value <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( "id", "username", "name", "email", "mobile", "password", "is_superuser", "is_staff", ) <NEW_LINE> read_only_fields = ("is_superuser", "is_staff") <NEW_LINE> extra_kwargs = {"password": {"write_only": True}} | UserRegisterSerializer is a model serializer which includes the
attributes that are required for registering a user. | 62598fa9a8370b77170f0330 |
class ElementSigned(ElementUnsigned): <NEW_LINE> <INDENT> signed = True | Signed integer. | 62598fa916aa5153ce400458 |
class Bracket: <NEW_LINE> <INDENT> def __init__(self, bracket_type, position): <NEW_LINE> <INDENT> self.bracket_type = bracket_type <NEW_LINE> self.position = position <NEW_LINE> <DEDENT> def Match(self, c): <NEW_LINE> <INDENT> if self.bracket_type == '[' and c == ']': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.bracket_type == '{' and c == '}': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.bracket_type == '(' and c == ')': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Class for storing 'brackets' | 62598fa9d486a94d0ba2bf23 |
class MinimisationFailedError(PacmanException): <NEW_LINE> <INDENT> pass | Raised when a routing table could not be minimised to reach a
specified target. | 62598fa94f6381625f199469 |
@swagger.tags('Category_Resource') <NEW_LINE> class CategorySHOP_routes(Resource): <NEW_LINE> <INDENT> @swagger.reorder_with(Category_listSHEMA, response_code=200, description="OK") <NEW_LINE> @swagger.reorder_with(MessageSHEMA, response_code=404, description="Shop does not exists") <NEW_LINE> @swagger.parameter(_in='query', name='shop_id', description = 'Filter by shop_id or give all categories', schema={'type': 'integer'}) <NEW_LINE> def get(self): <NEW_LINE> <INDENT> get_parser = RequestParser() <NEW_LINE> get_parser.add_argument('shop_id', type = int) <NEW_LINE> args = get_parser.parse_args() <NEW_LINE> filter_shop = (None == None) <NEW_LINE> if args['shop_id']: <NEW_LINE> <INDENT> shop = Shop.query.filter_by(shop_id = args['shop_id']).first_or_404(description='The shop_id {} does not exist '.format(args['shop_id'])) <NEW_LINE> filter_shop = (Shop.shop_id == args['shop_id']) <NEW_LINE> <DEDENT> result = db.session.query(Category).select_from(TableCategory).join(Shop).join(Category). filter(filter_shop).all() <NEW_LINE> return jsonify(to_dict(result,Category_listSHEMA, many = True)) <NEW_LINE> <DEDENT> @swagger.reorder_with(MessageSHEMA, response_code=200, description='OK') <NEW_LINE> @swagger.reorder_with(MessageSHEMA, response_code=409, description="Category exists in shop") <NEW_LINE> @swagger.reorder_with(MessageSHEMA, response_code=404, description="Shop does not exists") <NEW_LINE> @swagger.parameter(_in='query', name='shop_id', schema={'type': 'integer'}, required = True) <NEW_LINE> @swagger.parameter(_in='query', name='category_name', description = 'add category to shop', schema={'type': 'string'}, required = True) <NEW_LINE> def post(self): <NEW_LINE> <INDENT> post_parser = RequestParser() <NEW_LINE> post_parser.add_argument('category_name', type = str, required = True) <NEW_LINE> post_parser.add_argument('shop_id', type = int) <NEW_LINE> args = post_parser.parse_args() <NEW_LINE> shop = Shop.query.filter_by(shop_id = args['shop_id']).first_or_404(description='The shop_id {} does not exist '.format(args['shop_id'])) <NEW_LINE> category = shop.categories.filter_by(category_name = args['category_name']).first() <NEW_LINE> if category: <NEW_LINE> <INDENT> return {'message':'This is category is exists'}, 409 <NEW_LINE> <DEDENT> category = Category.query.filter_by(category_name = args['category_name']).first() <NEW_LINE> if not category: <NEW_LINE> <INDENT> category = Category(**args) <NEW_LINE> <DEDENT> shop.categories.append(category) <NEW_LINE> return jsonify({'message':'Сategory {} was added in {}'.format(args['category_name'], shop.shop_name)}) | Restfull class for category shop | 62598fa9cb5e8a47e493c123 |
class Function(Token): <NEW_LINE> <INDENT> concat = '.' <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> params = [] <NEW_LINE> if len(self.args): <NEW_LINE> <INDENT> for arg in self.args: <NEW_LINE> <INDENT> if issubclass(type(arg), Statement): <NEW_LINE> <INDENT> self.apply_statment(arg) <NEW_LINE> params.append(str(arg)) <NEW_LINE> <DEDENT> elif issubclass(type(arg), Gremlin): <NEW_LINE> <INDENT> arg.set_parent_gremlin(self.gremlin) <NEW_LINE> params.append(str(arg)) <NEW_LINE> <DEDENT> elif isinstance(arg, Param): <NEW_LINE> <INDENT> params.append(self.gremlin.bind_param(arg)[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> params.append(self.gremlin.bind_param(arg)[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return '{}({})'.format(self.value, ', '.join(params)) | class used to create a Gremlin function
it assumes that the last argument passed to the function is the only thing
that will be bound
if you need more than the last argument bound, you can do:
g = Gremlin()
value1 = g.bind_param('value1')[0]
value2 = g.bind_param('value2')[0]
g.functionName('not_bound', value1, value2, ...) | 62598fa9b7558d5895463585 |
class LoginRoutes(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'logout_endpoint': {'key': 'logoutEndpoint', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, logout_endpoint: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(LoginRoutes, self).__init__(**kwargs) <NEW_LINE> self.logout_endpoint = logout_endpoint | The routes that specify the endpoints used for login and logout requests.
:ivar logout_endpoint: The endpoint at which a logout request should be made.
:vartype logout_endpoint: str | 62598fa9e5267d203ee6b861 |
class FavoritesPage(JsonPage): <NEW_LINE> <INDENT> @method <NEW_LINE> class iter_favorites(DictElement): <NEW_LINE> <INDENT> item_xpath = 'aaData' <NEW_LINE> ignore_duplicate = True <NEW_LINE> class item(ItemElement): <NEW_LINE> <INDENT> klass = Favorites <NEW_LINE> obj_id = Regexp(Dict('0'), '/([0-9]+)\\"') <NEW_LINE> obj_name = Regexp(Dict('0'), '>([^<]+)') <NEW_LINE> obj_band_url = Regexp(Dict('0'), 'href=\"([^"]+)') <NEW_LINE> obj_short_description = Format('Genre: %s - Country: %s', Dict('2'), Dict('1')) | Display a list of your favorite bands. | 62598fa92c8b7c6e89bd371c |
class gaussian_flat_layer(nn.Module): <NEW_LINE> <INDENT> def __init__(self, input_size, latent_z_size, latent_y_size): <NEW_LINE> <INDENT> super(gaussian_flat_layer, self).__init__() <NEW_LINE> self.input_size = input_size <NEW_LINE> self.latent_y_size = latent_y_size <NEW_LINE> self.latent_z_size = latent_z_size <NEW_LINE> self.q_mean_mlp = nn.Linear(input_size, latent_z_size) <NEW_LINE> self.q_logvar_mlp = nn.Linear(input_size, latent_z_size) <NEW_LINE> self.q_mean2_mlp = nn.Linear(input_size, latent_y_size) <NEW_LINE> self.q_logvar2_mlp = nn.Linear(input_size, latent_y_size) <NEW_LINE> <DEDENT> def forward(self, inputs, mask, sample): <NEW_LINE> <INDENT> batch_size, batch_len, _ = inputs.size() <NEW_LINE> mean_qs = self.q_mean_mlp(inputs) * mask.unsqueeze(-1) <NEW_LINE> logvar_qs = self.q_logvar_mlp(inputs) * mask.unsqueeze(-1) <NEW_LINE> mean2_qs = self.q_mean2_mlp(inputs) * mask.unsqueeze(-1) <NEW_LINE> logvar2_qs = self.q_logvar2_mlp(inputs) * mask.unsqueeze(-1) <NEW_LINE> if sample: <NEW_LINE> <INDENT> y = gaussian(mean2_qs, logvar2_qs) * mask.unsqueeze(-1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> y = mean2_qs * mask.unsqueeze(-1) <NEW_LINE> <DEDENT> if sample: <NEW_LINE> <INDENT> z = gaussian(mean_qs, logvar_qs) * mask.unsqueeze(-1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> z = mean_qs * mask.unsqueeze(-1) <NEW_LINE> <DEDENT> return z, y, mean_qs, logvar_qs, mean2_qs, logvar2_qs | h
/ y z
\ /
x | 62598fa92ae34c7f260ab038 |
class LineStickerAnimatedGifDownloadView(TemplateResponseMixin, View): <NEW_LINE> <INDENT> def get(self, request, pack_id, sticker_id): <NEW_LINE> <INDENT> binary_io = LineStickerUtils.get_downloaded_animated(pack_id, sticker_id) <NEW_LINE> if not binary_io: <NEW_LINE> <INDENT> return HttpResponse(status=404) <NEW_LINE> <DEDENT> return FileResponse(binary_io, as_attachment=True) | View to download an animated sticker as gif. | 62598fa94428ac0f6e65847a |
class IRevisionFile(Interface): <NEW_LINE> <INDENT> pass | The revision file marker interface | 62598fa90c0af96317c562d9 |
class JIDMalformedProtocolError(ProtocolError, JIDError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> JIDError.__init__(self) <NEW_LINE> ProtocolError.__init__(self, "jid-malformed", message) | Raised when invalid JID is encountered. | 62598fa9d486a94d0ba2bf24 |
class Motor: <NEW_LINE> <INDENT> def __init__(self, thrust_f, torque_f, pos, quat): <NEW_LINE> <INDENT> self.thrust_map = thrust_f <NEW_LINE> self.torque_map = torque_f <NEW_LINE> self.pos = pos <NEW_LINE> self.quat = quat <NEW_LINE> self.direction = quat_rotate(self.quat, np.array((1, 0, 0))) <NEW_LINE> self.torque_direction = np.cross(self.pos, self.direction) | thrust_f is from RPM to force
torque_f is from force to torque | 62598fa9627d3e7fe0e06e03 |
class MyClass: <NEW_LINE> <INDENT> def myFunc(self, x, y): <NEW_LINE> <INDENT> return x + y | A sample class | 62598fa901c39578d7f12cd6 |
class RequestedPhase(Phase): <NEW_LINE> <INDENT> startDT = time.time() <NEW_LINE> endDT = time.time() <NEW_LINE> isOptimized = 0 <NEW_LINE> def initFromPhase(self, cyclePhase): <NEW_LINE> <INDENT> self.componentName = cyclePhase.componentName <NEW_LINE> self.cycleName = cyclePhase.cycleName <NEW_LINE> self.name = cyclePhase.name <NEW_LINE> self.duration = cyclePhase.duration <NEW_LINE> self.delay = cyclePhase.delay <NEW_LINE> self.wfiConsumption = cyclePhase.wfiConsumption <NEW_LINE> <DEDENT> def toString(self): <NEW_LINE> <INDENT> mystr = "Component name: " + self.componentName + "\n" <NEW_LINE> mystr = mystr + "Cycle name: " + self.cycleName + "\n" <NEW_LINE> mystr = mystr + "Phase name: " + self.name + "\n" <NEW_LINE> mystr = mystr + "Duration: " + str(self.duration) + "\n" <NEW_LINE> mystr = mystr + "Delay: " + str(self.delay) + "\n" <NEW_LINE> mystr = mystr + "WFI consumption: " + str(self.wfiConsumption) + "\n" <NEW_LINE> mystr = mystr + "Start: " + str(datetime.datetime.fromtimestamp(self.startDT)) + "\n" <NEW_LINE> mystr = mystr + "End: " + str(datetime.datetime.fromtimestamp(self.endDT)) <NEW_LINE> return mystr <NEW_LINE> <DEDENT> def isEqual(self, anotherRequestedPhase): <NEW_LINE> <INDENT> return self.id == anotherRequestedPhase.id <NEW_LINE> <DEDENT> def getStartDateStr(self): <NEW_LINE> <INDENT> return str(datetime.datetime.fromtimestamp(self.startDT)) <NEW_LINE> <DEDENT> def getEndDateStr(self): <NEW_LINE> <INDENT> return str(datetime.datetime.fromtimestamp(self.endDT)) <NEW_LINE> <DEDENT> def resetStartDateFromString(self, dateAsString): <NEW_LINE> <INDENT> self.startDT = int(time.mktime(time.strptime(dateAsString, "%Y-%m-%d %H:%M:%S"))) <NEW_LINE> <DEDENT> def resetEndDateFromString(self, dateAsString): <NEW_LINE> <INDENT> self.endDT = int(time.mktime(time.strptime(dateAsString, "%Y-%m-%d %H:%M:%S"))) | Enriches CyclePhase with information related to requested scheduling and optimization.
Attributes:
startDT: cycle phase start date and time. If a cycle phase is optimally scheduled (isOptimized = 1), scheduled cycle phase start (scheduledStartDT)
will be chosen by an optimization algorithm to be not earlier than startDT.
If a cycle phase is NOT optimized, scheduled cycle phase start (scheduledStartDT) will coincide with startDT.
endDT: cycle phase end date and time. Provides an upper bound for cycle phase end. In case of optimization, cycle phase start is
chosen so that scheduledStartDT + duration <= endDT
isOptimized: denotes if cycle phase start is optimally scheduled by calls to CycleScheduler.scheduleCycleRequests or not | 62598fa9498bea3a75a57a74 |
class ChainedManyToManyField(IntrospectiveFieldMixin, ManyToManyField): <NEW_LINE> <INDENT> def __init__(self, to, chained_field=None, chained_model_field=None, auto_choose=False, horizontal=False, verbose_name='', **kwargs): <NEW_LINE> <INDENT> self.chain_field = chained_field <NEW_LINE> self.chained_model_field = chained_model_field <NEW_LINE> self.auto_choose = auto_choose <NEW_LINE> self.horizontal = horizontal <NEW_LINE> self.verbose_name = verbose_name <NEW_LINE> super(ChainedManyToManyField, self).__init__(to, **kwargs) <NEW_LINE> <DEDENT> def deconstruct(self): <NEW_LINE> <INDENT> field_name, path, args, kwargs = super( ChainedManyToManyField, self).deconstruct() <NEW_LINE> defaults = { 'chain_field': None, 'chained_model_field': None, 'auto_choose': False, 'horizontal': False } <NEW_LINE> attr_to_kwarg_names = { 'chain_field': 'chained_field', 'chained_model_field': 'chained_model_field', 'auto_choose': 'auto_choose', 'horizontal': 'horizontal', 'verbose_name': 'verbose_name' } <NEW_LINE> for name, default in defaults.items(): <NEW_LINE> <INDENT> value = getattr(self, name) <NEW_LINE> kwarg_name = attr_to_kwarg_names[name] <NEW_LINE> if value is not default: <NEW_LINE> <INDENT> kwargs[kwarg_name] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if kwarg_name in kwargs: <NEW_LINE> <INDENT> del kwargs[kwarg_name] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return field_name, path, args, kwargs <NEW_LINE> <DEDENT> def formfield(self, **kwargs): <NEW_LINE> <INDENT> foreign_key_app_name = self.model._meta.app_label <NEW_LINE> foreign_key_model_name = self.model._meta.object_name <NEW_LINE> foreign_key_field_name = self.name <NEW_LINE> defaults = { 'form_class': form_fields.ChainedManyToManyField, 'queryset': self.rel.to._default_manager.complex_filter( self.rel.limit_choices_to), 'to_app_name': self.to_app_name, 'to_model_name': self.to_model_name, 'chain_field': self.chain_field, 'chained_model_field': self.chained_model_field, 'auto_choose': self.auto_choose, 'horizontal': self.horizontal, 'verbose_name': self.verbose_name, 'foreign_key_app_name': foreign_key_app_name, 'foreign_key_model_name': foreign_key_model_name, 'foreign_key_field_name': foreign_key_field_name, } <NEW_LINE> defaults.update(kwargs) <NEW_LINE> return super(ChainedManyToManyField, self).formfield(**defaults) | chains the choices of a previous combo box with this ManyToMany | 62598fa960cbc95b063642a4 |
class TaskView(tk.Toplevel): <NEW_LINE> <INDENT> def __init__(self, master): <NEW_LINE> <INDENT> tk.Toplevel.__init__(self, master) <NEW_LINE> if isinstance(master, tk.Tk) == False: <NEW_LINE> <INDENT> raise Exception("master is not a tk.Tk()") <NEW_LINE> <DEDENT> """Ensure widget instances are deleted.""" <NEW_LINE> self.protocol('WM_DELETE_WINDOW', self.master.destroy) <NEW_LINE> self.__title_var = tk.StringVar() <NEW_LINE> self.__title_label = tk.Label(self, textvariable = self.__title_var) <NEW_LINE> self.__title_label.pack(side = RIGHT, padx = 5, pady = 5) <NEW_LINE> self.toggle_button = tk.Button(self, text = "Reverse", width = 10) <NEW_LINE> self.toggle_button.pack(side = LEFT, padx = 5, pady = 5) <NEW_LINE> self.listbox = tk.Listbox(self) <NEW_LINE> self.listbox.pack(side = BOTTOM, padx = 5, pady = 5) <NEW_LINE> self.e = Entry(self) <NEW_LINE> self.e.pack() <NEW_LINE> self.e.focus_set() <NEW_LINE> self.add_button = Button(self, text="add item", width=10) <NEW_LINE> self.add_button.pack() <NEW_LINE> self.remove_button = Button(self, text="remove item", width=10) <NEW_LINE> self.remove_button.pack() <NEW_LINE> <DEDENT> def insert(self, item): <NEW_LINE> <INDENT> self.listbox.insert(END, item) <NEW_LINE> <DEDENT> def delete(self, item): <NEW_LINE> <INDENT> self.listbox.delete(item, END) <NEW_LINE> <DEDENT> def update_title(self, title): <NEW_LINE> <INDENT> if type(title) != str: <NEW_LINE> <INDENT> raise Exception("title is not a string") <NEW_LINE> <DEDENT> self.__title_var.set(title) | Close the root Tk() task window, and create a Toplevel window. This is a
child class of Toplevel.
Keyword arguments:
master -- A Tk constructor. | 62598fa95fc7496912d4822e |
class SingletonMF: <NEW_LINE> <INDENT> def __init__(self, name, position): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.position = position <NEW_LINE> <DEDENT> def calculate_membership(self, x): <NEW_LINE> <INDENT> if x == self.position: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return 0 | Singletom fuzzy logic membership function class.
:param name: name of MF
:param: position of MF | 62598fa938b623060ffa8fef |
class more_produce_line(models.TransientModel): <NEW_LINE> <INDENT> _name = 'more.produce.line' <NEW_LINE> _description = 'Wizard More Produce Line' <NEW_LINE> _rec_name = 'product_id' <NEW_LINE> @api.model <NEW_LINE> def _type_get(self): <NEW_LINE> <INDENT> return [ ('rm', 'RM'), ('fp', 'FP'), ] <NEW_LINE> <DEDENT> produce_id = fields.Many2one('more.produce', string='Produce', required=False, ondelete='cascade') <NEW_LINE> product_id = fields.Many2one('product.product', string='Product', required=True, ondelete='cascade') <NEW_LINE> move_id = fields.Many2one('stock.move', string='Move', required=False, ondelete='cascade') <NEW_LINE> qty = fields.Float(string='Qty', default=0.0, required=True) <NEW_LINE> type = fields.Selection('_type_get', string='Type', default='rm') <NEW_LINE> uom_id = fields.Many2one('product.uom', string='UoM', required=True, ondelete='cascade') <NEW_LINE> no_modif = fields.Boolean(string='No modif', default=False) <NEW_LINE> uom_categ_id = fields.Many2one('product.uom.category', string='UoM categ', required=False, ondelete='cascade') <NEW_LINE> @api.onchange('product_id') <NEW_LINE> def _onchange_product_id(self): <NEW_LINE> <INDENT> self.uom_categ_id = self.product_id and self.product_id.uom_id and self.product_id.uom_id.id and self.product_id.uom_id.category_id and self.product_id.uom_id.category_id.id or False <NEW_LINE> self.uom_id = self.product_id and self.product_id.uom_id and self.product_id.uom_id.id or False | Wizard More Produce Line | 62598fa9925a0f43d25e7f96 |
class RestorableSqlDatabasesOperations: <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer) -> None: <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> @distributed_trace <NEW_LINE> def list( self, location: str, instance_id: str, **kwargs: Any ) -> AsyncIterable["_models.RestorableSqlDatabasesListResult"]: <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> def prepare_request(next_link=None): <NEW_LINE> <INDENT> if not next_link: <NEW_LINE> <INDENT> request = build_list_request( subscription_id=self._config.subscription_id, location=location, instance_id=instance_id, template_url=self.list.metadata['url'], ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request = build_list_request( subscription_id=self._config.subscription_id, location=location, instance_id=instance_id, template_url=next_link, ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> request.method = "GET" <NEW_LINE> <DEDENT> return request <NEW_LINE> <DEDENT> async def extract_data(pipeline_response): <NEW_LINE> <INDENT> deserialized = self._deserialize("RestorableSqlDatabasesListResult", pipeline_response) <NEW_LINE> list_of_elem = deserialized.value <NEW_LINE> if cls: <NEW_LINE> <INDENT> list_of_elem = cls(list_of_elem) <NEW_LINE> <DEDENT> return None, AsyncList(list_of_elem) <NEW_LINE> <DEDENT> async def get_next(next_link=None): <NEW_LINE> <INDENT> request = prepare_request(next_link) <NEW_LINE> pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> return pipeline_response <NEW_LINE> <DEDENT> return AsyncItemPaged( get_next, extract_data ) <NEW_LINE> <DEDENT> list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DocumentDB/locations/{location}/restorableDatabaseAccounts/{instanceId}/restorableSqlDatabases'} | RestorableSqlDatabasesOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.cosmosdb.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer. | 62598fa97c178a314d78d3f4 |
class DlinkException(service.BasicException): <NEW_LINE> <INDENT> pass | Базовое исключение. | 62598fa963b5f9789fe850bd |
class RPCCoverage(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.dir = tempfile.mkdtemp(prefix="coverage") <NEW_LINE> self.flag = '--coveragedir=%s' % self.dir <NEW_LINE> <DEDENT> def report_rpc_coverage(self): <NEW_LINE> <INDENT> uncovered = self._get_uncovered_rpc_commands() <NEW_LINE> if uncovered: <NEW_LINE> <INDENT> print("Uncovered RPC commands:") <NEW_LINE> print("".join((" - %s\n" % command) for command in sorted(uncovered))) <NEW_LINE> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("All RPC commands covered.") <NEW_LINE> return True <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self): <NEW_LINE> <INDENT> return shutil.rmtree(self.dir) <NEW_LINE> <DEDENT> def _get_uncovered_rpc_commands(self): <NEW_LINE> <INDENT> reference_filename = 'rpc_interface.txt' <NEW_LINE> coverage_file_prefix = 'coverage.' <NEW_LINE> coverage_ref_filename = os.path.join(self.dir, reference_filename) <NEW_LINE> coverage_filenames = set() <NEW_LINE> all_cmds = set() <NEW_LINE> covered_cmds = set() <NEW_LINE> if not os.path.isfile(coverage_ref_filename): <NEW_LINE> <INDENT> raise RuntimeError("No coverage reference found") <NEW_LINE> <DEDENT> with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file: <NEW_LINE> <INDENT> all_cmds.update([line.strip() for line in coverage_ref_file.readlines()]) <NEW_LINE> <DEDENT> for root, dirs, files in os.walk(self.dir): <NEW_LINE> <INDENT> for filename in files: <NEW_LINE> <INDENT> if filename.startswith(coverage_file_prefix): <NEW_LINE> <INDENT> coverage_filenames.add(os.path.join(root, filename)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for filename in coverage_filenames: <NEW_LINE> <INDENT> with open(filename, 'r', encoding="utf8") as coverage_file: <NEW_LINE> <INDENT> covered_cmds.update([line.strip() for line in coverage_file.readlines()]) <NEW_LINE> <DEDENT> <DEDENT> return all_cmds - covered_cmds | Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `dash-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py | 62598fa9435de62698e9bd4e |
class ReflectPanel(BoxLayout): <NEW_LINE> <INDENT> x_axis = ObjectProperty(None) <NEW_LINE> y_axis = ObjectProperty(None) <NEW_LINE> observer = None <NEW_LINE> def __init__(self,observer,**kw): <NEW_LINE> <INDENT> super(ReflectPanel,self).__init__(**kw) <NEW_LINE> self.observer = observer <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> if not self.x_axis.active and not self.y_axis.active: <NEW_LINE> <INDENT> self.observer.transform = geometry.Transform.Identity() <NEW_LINE> <DEDENT> elif self.x_axis.active and not self.y_axis.active: <NEW_LINE> <INDENT> self.observer.transform = geometry.Transform.Reflection(1,0) <NEW_LINE> <DEDENT> elif not self.x_axis.active and self.y_axis.active: <NEW_LINE> <INDENT> self.observer.transform = geometry.Transform.Reflection(0,1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.observer.transform = geometry.Transform.Rotation(math.pi) | Instances are panels for defining an Reflection transform.
x_axis is a Kivy property for a checkbox indicating a reflection
about the the x-axis
y_axis is a Kivy property for a checkbox indicating a reflection
about the the y-axis
When either x_axis or y_axis change, Kivy invokes the method update.
This method updates the transform property in the observer field. | 62598fa92ae34c7f260ab03a |
class StackUdpRaet(deeding.Deed): <NEW_LINE> <INDENT> Ioinits = odict( inode="raet.udp.stack.", stack='stack', txmsgs=odict(ipath='txmsgs', ival=deque()), rxmsgs=odict(ipath='rxmsgs', ival=deque()), local=odict(ipath='local', ival=odict( name='master', eid=0, host='0.0.0.0', port=raeting.RAET_PORT, sigkey=None, prikey=None)),) <NEW_LINE> def postinitio(self): <NEW_LINE> <INDENT> sigkey = self.local.data.sigkey <NEW_LINE> prikey = self.local.data.prikey <NEW_LINE> ha = (self.local.data.host, self.local.data.port) <NEW_LINE> name = self.local.data.name <NEW_LINE> eid = self.local.data.eid <NEW_LINE> estate = estating.LocalEstate( eid=eid, name=name, ha=ha, sigkey=sigkey, prikey=prikey,) <NEW_LINE> txMsgs = self.txmsgs.value <NEW_LINE> rxMsgs = self.rxmsgs.value <NEW_LINE> self.stack.value = stacking.StackUdp(estate=estate, store=self.store, name=name, txMsgs=txMsgs, rxMsgs=rxMsgs, ) <NEW_LINE> <DEDENT> def action(self, **kwa): <NEW_LINE> <INDENT> self.stack.value.serviceAll() | StackUdpRaet initialize and run raet udp stack | 62598fa955399d3f0562647c |
class ThreadlocalCassandraDataManager(object): <NEW_LINE> <INDENT> implements(IDataManager) <NEW_LINE> def __init__(self, tx): <NEW_LINE> <INDENT> self.tx = tx <NEW_LINE> <DEDENT> def abort(self, trans): <NEW_LINE> <INDENT> if self.tx is not None: <NEW_LINE> <INDENT> self.tx.rollback() <NEW_LINE> self.tx = None <NEW_LINE> <DEDENT> <DEDENT> def commit(self, trans): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tpc_begin(self, trans): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tpc_vote(self, trans): <NEW_LINE> <INDENT> self.tx.commit() <NEW_LINE> self.tx = None <NEW_LINE> <DEDENT> def tpc_finish(self, trans): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tpc_abort(self, trans): <NEW_LINE> <INDENT> self.abort(trans) <NEW_LINE> <DEDENT> def sortKey(self): <NEW_LINE> <INDENT> return "~catcasst:%d" % id(self.tx) | Instances of this class will be used to 'join' Zope's transactions. | 62598fa9aad79263cf42e72d |
class _RedirectStream: <NEW_LINE> <INDENT> _stream = None <NEW_LINE> def __init__(self, new_target=None): <NEW_LINE> <INDENT> self._new_target = new_target <NEW_LINE> self._old_targets = [] <NEW_LINE> <DEDENT> def open(self, new_target): <NEW_LINE> <INDENT> self._new_target = new_target <NEW_LINE> self._old_targets.append(getattr(sys, self._stream)) <NEW_LINE> setattr(sys, self._stream, self._new_target) <NEW_LINE> return self._new_target <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if len(self._old_targets) > 0: <NEW_LINE> <INDENT> setattr(sys, self._stream, self._old_targets.pop()) <NEW_LINE> self._new_target.close() <NEW_LINE> <DEDENT> return getattr(sys, self._stream) <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self._new_target is None: <NEW_LINE> <INDENT> return getattr(sys, self._stream) <NEW_LINE> <DEDENT> return self.open(self._new_target) <NEW_LINE> <DEDENT> def __exit__(self, exctype, excinst, exctb): <NEW_LINE> <INDENT> self.close() | A base class for a context manager to redirect streams from the sys module. | 62598fa9627d3e7fe0e06e05 |
class TenantServicePluginAttr(BaseModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> db_table = "tenant_service_plugin_attr" <NEW_LINE> <DEDENT> service_id = models.CharField(max_length=32, help_text=u"组件ID") <NEW_LINE> service_alias = models.CharField(max_length=32, help_text=u"主组件别名") <NEW_LINE> dest_service_id = models.CharField(max_length=32, help_text=u"组件ID") <NEW_LINE> dest_service_alias = models.CharField(max_length=32, help_text=u"组件别名") <NEW_LINE> plugin_id = models.CharField(max_length=32, help_text=u"插件ID") <NEW_LINE> service_meta_type = models.CharField(max_length=32, choices=data_type, help_text=u"依赖数据类型") <NEW_LINE> injection = models.CharField(max_length=32, help_text=u"注入方式 auto, env") <NEW_LINE> container_port = models.IntegerField(help_text=u"依赖端口") <NEW_LINE> protocol = models.CharField(max_length=16, help_text=u"端口协议", default="uneed") <NEW_LINE> attr_name = models.CharField(max_length=64, help_text=u"变量名") <NEW_LINE> attr_value = models.CharField(max_length=128, help_text=u"变量值") <NEW_LINE> attr_alt_value = models.CharField(max_length=128, help_text=u"可选值") <NEW_LINE> attr_type = models.CharField(max_length=16, help_text=u"属性类型") <NEW_LINE> attr_default_value = models.CharField(max_length=128, null=True, blank=True, help_text=u"默认值") <NEW_LINE> is_change = models.BooleanField(default=False, blank=True, help_text=u"是否可改变") <NEW_LINE> attr_info = models.CharField(max_length=32, null=True, blank=True, help_text=u"配置项说明") | 旧版组件插件属性 | 62598fa9b7558d5895463588 |
class TestApiPromotion(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testApiPromotion(self): <NEW_LINE> <INDENT> pass | ApiPromotion unit test stubs | 62598fa9a8370b77170f0333 |
class DataVisualizer(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> <DEDENT> def inspectDataFile(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def illustrate(self, xlabel, ylabel): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def saveArtwork(self, name='image', fmt='jpg'): <NEW_LINE> <INDENT> pass | for data visualization purposes, to be implemented
.. Author: Tong Zhang
.. Date : 2016-03-14 | 62598fa910dbd63aa1c70b0b |
class Parameter(AnnotatedObject): <NEW_LINE> <INDENT> def __init__(self, name: str, annotation: Any, description: str, kind: Any, default: Any = empty) -> None: <NEW_LINE> <INDENT> super().__init__(annotation, description) <NEW_LINE> self.name = name <NEW_LINE> self.kind = kind <NEW_LINE> self.default = default <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.name <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<Parameter({self.name}, {self.annotation}, {self.description}, {self.kind}, {self.default})>" <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_optional(self): <NEW_LINE> <INDENT> return self.default is not empty <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_required(self): <NEW_LINE> <INDENT> return not self.is_optional <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_args(self): <NEW_LINE> <INDENT> return self.kind is inspect.Parameter.VAR_POSITIONAL <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_kwargs(self): <NEW_LINE> <INDENT> return self.kind is inspect.Parameter.VAR_KEYWORD <NEW_LINE> <DEDENT> @property <NEW_LINE> def default_string(self): <NEW_LINE> <INDENT> if self.is_kwargs: <NEW_LINE> <INDENT> return "{}" <NEW_LINE> <DEDENT> if self.is_args: <NEW_LINE> <INDENT> return "()" <NEW_LINE> <DEDENT> if self.is_required: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> return repr(self.default) | A helper class to store information about a signature parameter. | 62598fa93539df3088ecc20d |
@dataclass <NEW_LINE> class _YEAR(TimeDataType): <NEW_LINE> <INDENT> pass | YEAR
A year in four-digit format. Values allowed in four-digit format: 1901 to 2155, and 0000.
MySQL 8.0 does not support year in two-digit format. | 62598fa9d6c5a102081e20a1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.