code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class ClassicAdapter(wrapt.AdapterFactory): <NEW_LINE> <INDENT> def __init__(self, reason="", version="", action='always', category=DeprecationWarning): <NEW_LINE> <INDENT> self.reason = reason or "" <NEW_LINE> self.version = version or "" <NEW_LINE> self.action = action <NEW_LINE> self.category = category <NEW_LINE> super(ClassicAdapter, self).__init__() <NEW_LINE> <DEDENT> def get_deprecated_msg(self, wrapped, instance): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> if inspect.isclass(wrapped): <NEW_LINE> <INDENT> fmt = "Call to deprecated class {name}." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fmt = "Call to deprecated function (or staticmethod) {name}." <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if inspect.isclass(instance): <NEW_LINE> <INDENT> fmt = "Call to deprecated class method {name}." <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fmt = "Call to deprecated method {name}." <NEW_LINE> <DEDENT> <DEDENT> if self.reason: <NEW_LINE> <INDENT> fmt += " ({reason})" <NEW_LINE> <DEDENT> if self.version: <NEW_LINE> <INDENT> fmt += " -- Deprecated since version {version}." <NEW_LINE> <DEDENT> return fmt.format(name=wrapped.__name__, reason=self.reason or "", version=self.version or "") <NEW_LINE> <DEDENT> def __call__(self, wrapped): <NEW_LINE> <INDENT> if inspect.isclass(wrapped): <NEW_LINE> <INDENT> old_new1 = wrapped.__new__ <NEW_LINE> def wrapped_cls(cls, *args, **kwargs): <NEW_LINE> <INDENT> msg = self.get_deprecated_msg(wrapped, None) <NEW_LINE> with warnings.catch_warnings(): <NEW_LINE> <INDENT> warnings.simplefilter(self.action, self.category) <NEW_LINE> warnings.warn(msg, category=self.category, stacklevel=2) <NEW_LINE> <DEDENT> if old_new1 is object.__new__: <NEW_LINE> <INDENT> return old_new1(cls) <NEW_LINE> <DEDENT> return old_new1(*args, **kwargs) <NEW_LINE> <DEDENT> wrapped.__new__ = classmethod(wrapped_cls) <NEW_LINE> <DEDENT> return wrapped | Classic adapter -- *for advanced usage only*
This adapter is used to get the deprecation message according to the wrapped object type:
class, function, standard method, static method, or class method.
This is the base class of the :class:`~deprecated.sphinx.SphinxAdapter` class
which is used to update the wrapped object docstring.
You can also inherit this class to change the deprecation message.
In the following example, we change the message into "The ... is deprecated.":
.. code-block:: python
import inspect
from deprecated.classic import ClassicAdapter
from deprecated.classic import deprecated
class MyClassicAdapter(ClassicAdapter):
def get_deprecated_msg(self, wrapped, instance):
if instance is None:
if inspect.isclass(wrapped):
fmt = "The class {name} is deprecated."
else:
fmt = "The function {name} is deprecated."
else:
if inspect.isclass(instance):
fmt = "The class method {name} is deprecated."
else:
fmt = "The method {name} is deprecated."
if self.reason:
fmt += " ({reason})"
if self.version:
fmt += " -- Deprecated since version {version}."
return fmt.format(name=wrapped.__name__,
reason=self.reason or "",
version=self.version or "")
Then, you can use your ``MyClassicAdapter`` class like this in your source code:
.. code-block:: python
@deprecated(reason="use another function", adapter_cls=MyClassicAdapter)
def some_old_function(x, y):
return x + y | 62598fc2a219f33f346c6a7e |
class TestPlaceholderMixin(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.form = PlaceholderMixinForm() <NEW_LINE> <DEDENT> def test_placeholder_set_when_defiend(self): <NEW_LINE> <INDENT> self.assertIn('placeholder', self.form.fields['a'].widget.attrs.keys()) <NEW_LINE> self.assertEqual(self.form.fields['a'].widget.attrs['placeholder'], 'foobar') <NEW_LINE> <DEDENT> def test_placeholder_not_set_when_not_defiend(self): <NEW_LINE> <INDENT> self.assertNotIn('placeholder', self.form.fields['b'].widget.attrs.keys()) | Tests for the :py:class:`thecut.forms.forms.PlaceholderMixin` class. | 62598fc299fddb7c1ca62f28 |
class GoogleWifiSensor(SensorEntity): <NEW_LINE> <INDENT> def __init__(self, api, name, variable): <NEW_LINE> <INDENT> self._api = api <NEW_LINE> self._name = name <NEW_LINE> self._state = None <NEW_LINE> variable_info = MONITORED_CONDITIONS[variable] <NEW_LINE> self._var_name = variable <NEW_LINE> self._var_units = variable_info[1] <NEW_LINE> self._var_icon = variable_info[2] <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return f"{self._name}_{self._var_name}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return self._var_icon <NEW_LINE> <DEDENT> @property <NEW_LINE> def unit_of_measurement(self): <NEW_LINE> <INDENT> return self._var_units <NEW_LINE> <DEDENT> @property <NEW_LINE> def available(self): <NEW_LINE> <INDENT> return self._api.available <NEW_LINE> <DEDENT> @property <NEW_LINE> def state(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> def update(self): <NEW_LINE> <INDENT> self._api.update() <NEW_LINE> if self.available: <NEW_LINE> <INDENT> self._state = self._api.data[self._var_name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._state = None | Representation of a Google Wifi sensor. | 62598fc255399d3f0562678e |
class ComponentError(Exception): <NEW_LINE> <INDENT> def __init__(self, reason): <NEW_LINE> <INDENT> self.reason = reason <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.reason | Exception type for internal component signalling. | 62598fc297e22403b383b181 |
class Highlight(object): <NEW_LINE> <INDENT> openapi_types = {"slug": "str", "title": "str", "icon_url": "str"} <NEW_LINE> attribute_map = {"slug": "slug", "title": "title", "icon_url": "icon_url"} <NEW_LINE> def __init__( self, slug=None, title=None, icon_url=None, local_vars_configuration=None ): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._slug = None <NEW_LINE> self._title = None <NEW_LINE> self._icon_url = None <NEW_LINE> self.discriminator = None <NEW_LINE> self.slug = slug <NEW_LINE> self.title = title <NEW_LINE> self.icon_url = icon_url <NEW_LINE> <DEDENT> @property <NEW_LINE> def slug(self): <NEW_LINE> <INDENT> return self._slug <NEW_LINE> <DEDENT> @slug.setter <NEW_LINE> def slug(self, slug): <NEW_LINE> <INDENT> if ( self.local_vars_configuration.client_side_validation and slug is None ): <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `slug`, must not be `None`" ) <NEW_LINE> <DEDENT> self._slug = slug <NEW_LINE> <DEDENT> @property <NEW_LINE> def title(self): <NEW_LINE> <INDENT> return self._title <NEW_LINE> <DEDENT> @title.setter <NEW_LINE> def title(self, title): <NEW_LINE> <INDENT> if ( self.local_vars_configuration.client_side_validation and title is None ): <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `title`, must not be `None`" ) <NEW_LINE> <DEDENT> self._title = title <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon_url(self): <NEW_LINE> <INDENT> return self._icon_url <NEW_LINE> <DEDENT> @icon_url.setter <NEW_LINE> def icon_url(self, icon_url): <NEW_LINE> <INDENT> if ( self.local_vars_configuration.client_side_validation and icon_url is None ): <NEW_LINE> <INDENT> raise ValueError( "Invalid value for `icon_url`, must not be `None`" ) <NEW_LINE> <DEDENT> self._icon_url = icon_url <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list( map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value) ) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict( map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items(), ) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Highlight): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, Highlight): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fc27b180e01f3e4918b |
class SphinxStandaloneReader(SphinxBaseReader): <NEW_LINE> <INDENT> transforms = [ApplySourceWorkaround, ExtraTranslatableNodes, PreserveTranslatableMessages, Locale, CitationReferences, DefaultSubstitutions, MoveModuleTargets, HandleCodeBlocks, AutoNumbering, AutoIndexUpgrader, SortIds, RemoveTranslatableInline, PreserveTranslatableMessages, FilterSystemMessages, RefOnlyBulletListTransform, UnreferencedFootnotesDetector] | Add our own transforms. | 62598fc25166f23b2e243658 |
class SurfaceOver3D1RangeSeries(SurfaceBaseSeries): <NEW_LINE> <INDENT> def __init__(self, expr, var_start_end_x, var_start_end_z ,var_start_end_y, **kwargs): <NEW_LINE> <INDENT> super(SurfaceOver3D1RangeSeries, self).__init__() <NEW_LINE> self.expr = sympify(expr) <NEW_LINE> self.var_x = sympify(var_start_end_x[0]) <NEW_LINE> self.start_x = float(var_start_end_x[1]) <NEW_LINE> self.end_x = float(var_start_end_x[2]) <NEW_LINE> self.var_z = sympify(var_start_end_z[0]) <NEW_LINE> self.start_z = float(var_start_end_z[1]) <NEW_LINE> self.end_z = float(var_start_end_z[2]) <NEW_LINE> self.var_y = sympify(var_start_end_y[0]) <NEW_LINE> self.start_y = float(var_start_end_y[1]) <NEW_LINE> self.end_y = float(var_start_end_y[2]) <NEW_LINE> self.nb_of_points_x = kwargs.get('nb_of_points_x', 50) <NEW_LINE> self.nb_of_points_y = kwargs.get('nb_of_points_y', 50) <NEW_LINE> self.surface_color = kwargs.get('surface_color', None) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('cartesian surface: %s for' ' %s over %s and %s over %s') % ( str(self.expr), str(self.var_x), str((self.start_x, self.end_x)), str(self.var_y), str((self.start_y, self.end_y))) <NEW_LINE> <DEDENT> def get_meshes(self): <NEW_LINE> <INDENT> np = import_module('numpy') <NEW_LINE> zArr=np.linspace(self.start_x, self.end_x,num=50) <NEW_LINE> print (self.expr) <NEW_LINE> print ((self.var_z)) <NEW_LINE> f = lambdify((self.var_z), self.expr,"numpy" ) <NEW_LINE> zExpr = f(zArr) <NEW_LINE> xArrF=[] <NEW_LINE> zArrF=[] <NEW_LINE> yArrF=[] <NEW_LINE> for i in xrange(0,len(zExpr)): <NEW_LINE> <INDENT> xArrL,yArrL=sympyPlot_implicit.plot_implicit_3d(zExpr[i],show=False) <NEW_LINE> xArrF+=xArrL <NEW_LINE> yArrF+=yArrL <NEW_LINE> zArrL=[zArr[i]] * len(xArrL) <NEW_LINE> zArrF+=zArrL <NEW_LINE> <DEDENT> arrSrt=int(len(xArrF)**(1/2)) <NEW_LINE> xArrF=xArrF[:arrSrt*arrSrt] <NEW_LINE> yArrF=yArrF[:arrSrt*arrSrt] <NEW_LINE> zArrF=zArrF[:arrSrt*arrSrt] <NEW_LINE> print(arrSrt) <NEW_LINE> xArrF=np.array(xArrF,dtype=float) <NEW_LINE> yArrF=np.array(yArrF,dtype=float) <NEW_LINE> zArrF=np.array(zArrF,dtype=float) <NEW_LINE> mesh_x=np.reshape(xArrF, (-1, arrSrt)) <NEW_LINE> mesh_y=np.reshape(yArrF, (-1, arrSrt)) <NEW_LINE> mesh_z=np.reshape(zArrF, (-1, arrSrt)) <NEW_LINE> print (type(mesh_x[0])) <NEW_LINE> print (type(mesh_x[0][0])) <NEW_LINE> return (mesh_x,mesh_y,mesh_z) | Representation for a 3D surface consisting of an implicit sympy expression and 3D
range. | 62598fc27d847024c075c634 |
class BoostingQuery(Query): <NEW_LINE> <INDENT> _internal_name = "boosting" <NEW_LINE> def __init__(self, positive, negative, negative_boost=0.0, boost=1.0, **kwargs): <NEW_LINE> <INDENT> super(BoostingQuery, self).__init__(**kwargs) <NEW_LINE> self.positive = positive <NEW_LINE> self.negative = negative <NEW_LINE> self.negative_boost = negative_boost <NEW_LINE> self.boost = boost <NEW_LINE> <DEDENT> def _serialize(self): <NEW_LINE> <INDENT> return { 'positive': self.positive.serialize(), 'negative': self.negative.serialize(), 'negative_boost': self.negative_boost, 'boost': self.boost } | The boosting query can be used to effectively demote results that match a given query. Unlike the "NOT"
clause in bool query, this still selects documents that contain undesirable terms, but reduces their overall score.
Example:
t = TermQuery('name', 'john')
q = BoostingQuery(MatchAllQuery(), t, negative_boost=0.2)
results = conn.search(q)
reference :https://github.com/elastic/elasticsearch/blob/148265bd164cd5a614cd020fb480d5974f523d81/core/src/main/java/org/elasticsearch/index/query/BoostingQueryParser.java | 62598fc223849d37ff85132b |
class PrmEditer(Parametre): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Parametre.__init__(self, "editer", "edit") <NEW_LINE> self.groupe = "administrateur" <NEW_LINE> self.schema = "<nombre> <message>" <NEW_LINE> self.aide_courte = "édite une annonce" <NEW_LINE> self.aide_longue = "Cette sous-commande prend en paramètre l'id d'une annonce " "ainsi qu'un message, et remplace le contenu de l'annonce en " "question par le message précisé." <NEW_LINE> <DEDENT> def interpreter(self, personnage, dic_masques): <NEW_LINE> <INDENT> id = dic_masques["nombre"].nombre <NEW_LINE> modif = dic_masques["message"].message <NEW_LINE> annonces = type(self).importeur.information.annonces <NEW_LINE> if id > len(annonces): <NEW_LINE> <INDENT> personnage << "|err|Aucune annonce ne correspond à l'id " "spécifiée.|ff|" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> id -= 1 <NEW_LINE> annonces[id] = modif <NEW_LINE> personnage << "|att|L'annonce a bien été éditée.|ff|" | Commande 'annonces editer'.
| 62598fc2283ffb24f3cf3afc |
class Rat: <NEW_LINE> <INDENT> def __init__(self, symbol, row, col): <NEW_LINE> <INDENT> self.symbol = symbol <NEW_LINE> self.row = row <NEW_LINE> self.col = col <NEW_LINE> self.num_sprouts_eaten = 0 <NEW_LINE> <DEDENT> def set_location(self, row, col): <NEW_LINE> <INDENT> self.row = row <NEW_LINE> self.col = col <NEW_LINE> <DEDENT> def eat_sprout(self): <NEW_LINE> <INDENT> self.num_sprouts_eaten += 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.symbol+" at ("+str(self.row)+", "+str(self.col)+") ate "+str(self.num_sprouts_eaten)+" sprouts." | A rat caught in a maze. | 62598fc28a349b6b436864b6 |
class FollowupMessage(messages.Message): <NEW_LINE> <INDENT> id = messages.IntegerField(1) <NEW_LINE> anno_id = messages.IntegerField(2) <NEW_LINE> creator = messages.MessageField(UserMessage, 3) <NEW_LINE> created = message_types.DateTimeField(4) <NEW_LINE> comment = messages.StringField(5) | Represents follow up message. | 62598fc2ff9c53063f51a8c6 |
class PrologSearch(BackgroundTaskThread): <NEW_LINE> <INDENT> def __init__(self, view): <NEW_LINE> <INDENT> BackgroundTaskThread.__init__(self, "", True) <NEW_LINE> self.view = view <NEW_LINE> self.signatures = { 'Intel x86 function prologue' : ["\x55\x89\xE5\x83\xEC", "\x55\x89\xE5\x57\x56"], 'Intel x86 NOP Instructions' : ["\x90\x90\x90\x90\x90\x90\x90\x90",], 'ARM big-endian function prologue' : ["\xe9\x2d",], 'ARM little-endian function prologue' : ["\x2d\xe9"], } <NEW_LINE> self.max_sig_size = -8 <NEW_LINE> self.hits = {} <NEW_LINE> <DEDENT> def _search_for_func_prologues(self): <NEW_LINE> <INDENT> for desc, sigs in self.signatures.iteritems(): <NEW_LINE> <INDENT> for sig in sigs: <NEW_LINE> <INDENT> nextaddr = 0 <NEW_LINE> while True: <NEW_LINE> <INDENT> nextaddr = self.view.find_next_data(nextaddr, sig) <NEW_LINE> if nextaddr == None: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> self.hits[nextaddr] = desc <NEW_LINE> nextaddr = nextaddr + len(sig) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def _display_report(self): <NEW_LINE> <INDENT> md = "" <NEW_LINE> for key, val in self.hits.iteritems(): <NEW_LINE> <INDENT> md += "**{:08x}** {}\n\n".format(key, val) <NEW_LINE> <DEDENT> self.view.show_markdown_report("Function Prologue Search", md) <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self._search_for_func_prologues() <NEW_LINE> if self.hits != {}: <NEW_LINE> <INDENT> self._display_report() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> show_message_box( "binjago: Function Prologue Search", "Could not find any function prologues" ) | Class that assists in locating function prologues in flat files binaries such as firmware
| 62598fc263b5f9789fe853eb |
class AvailableProvidersList(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'countries': {'required': True}, } <NEW_LINE> _attribute_map = { 'countries': {'key': 'countries', 'type': '[AvailableProvidersListCountry]'}, } <NEW_LINE> def __init__( self, *, countries: List["AvailableProvidersListCountry"], **kwargs ): <NEW_LINE> <INDENT> super(AvailableProvidersList, self).__init__(**kwargs) <NEW_LINE> self.countries = countries | List of available countries with details.
All required parameters must be populated in order to send to Azure.
:param countries: Required. List of available countries.
:type countries: list[~azure.mgmt.network.v2019_12_01.models.AvailableProvidersListCountry] | 62598fc27b180e01f3e4918c |
class ForwardModel(MongoDBModel): <NEW_LINE> <INDENT> coll_name = "forward_doc" <NEW_LINE> fields = ["forward_person_id", "article_id", "create_time", "is_forward"] <NEW_LINE> async def find_or_insert(self, valid_obj): <NEW_LINE> <INDENT> count_docs = await self.collection.count_documents({"forward_person_id": valid_obj["forward_person_id"], "article_id": valid_obj[ 'article_id']}) <NEW_LINE> if count_docs == 0: <NEW_LINE> <INDENT> doc = await self.collection.insert_one(valid_obj) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> doc = await self.collection.update_one({"forward_person_id": valid_obj["forward_person_id"], "article_id": valid_obj[ 'article_id']}, {'$set': {'is_forward': valid_obj['is_forward'], 'create_time': valid_obj['create_time']}}) <NEW_LINE> <DEDENT> doc = await self.collection.find_one({"forward_person_id": valid_obj["forward_person_id"], "article_id": valid_obj[ 'article_id']}) <NEW_LINE> return self.trans_obj_id_str(doc) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def trans_obj_id_str(docs): <NEW_LINE> <INDENT> if isinstance(docs, list): <NEW_LINE> <INDENT> for doc in docs: <NEW_LINE> <INDENT> doc_id = str(doc.pop("_id")) <NEW_LINE> doc['id'] = doc_id <NEW_LINE> <DEDENT> return docs <NEW_LINE> <DEDENT> elif isinstance(docs, dict): <NEW_LINE> <INDENT> doc_id = str(docs.pop('_id')) <NEW_LINE> docs['id'] = doc_id <NEW_LINE> return docs <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("500", "can shu cuo wu") | 转发
forward_person_id: 转发人id
article_id: 文章id
create_time: 创建时间
is_delete:是否删除 | 62598fc2e1aae11d1e7ce962 |
class PreferenceEditor(athena.LiveElement): <NEW_LINE> <INDENT> implements(ixmantissa.INavigableFragment) <NEW_LINE> title = 'Settings' <NEW_LINE> fragmentName = 'preference-editor' <NEW_LINE> def __init__(self, aggregator): <NEW_LINE> <INDENT> self.aggregator = aggregator <NEW_LINE> super(PreferenceEditor, self).__init__() <NEW_LINE> <DEDENT> def tabbedPane(self, req, tag): <NEW_LINE> <INDENT> navigation = webnav.getTabs(self.aggregator.getPreferenceCollections()) <NEW_LINE> pages = list() <NEW_LINE> for tab in navigation: <NEW_LINE> <INDENT> f = inevow.IRenderer( self.aggregator.store.getItemByID(tab.storeID)) <NEW_LINE> f.tab = tab <NEW_LINE> if hasattr(f, 'setFragmentParent'): <NEW_LINE> <INDENT> f.setFragmentParent(self) <NEW_LINE> <DEDENT> pages.append((tab.name, f)) <NEW_LINE> <DEDENT> f = tabbedPane.TabbedPaneFragment(pages, name='preference-editor') <NEW_LINE> f.setFragmentParent(self) <NEW_LINE> return f <NEW_LINE> <DEDENT> renderer(tabbedPane) <NEW_LINE> def head(self): <NEW_LINE> <INDENT> return tabbedPane.tabbedPaneGlue.inlineCSS | L{xmantissa.ixmantissa.INavigableFragment} adapter for
L{xmantissa.prefs.PreferenceAggregator}. Responsible for
rendering all installed L{xmantissa.ixmantissa.IPreferenceCollection}s | 62598fc25166f23b2e24365a |
class SimpleFactory: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def product_cpu(name): <NEW_LINE> <INDENT> if name == 'Inter': <NEW_LINE> <INDENT> return Inter() <NEW_LINE> <DEDENT> elif name == 'Amd': <NEW_LINE> <INDENT> return Amd() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("{} not in cpu list".format(name)) | 简单工厂.工厂根据不同条件生产不同功能的类. | 62598fc24a966d76dd5ef14e |
class TypeDetector(object): <NEW_LINE> <INDENT> def __init__( self, field_names=None, field_types=DEFAULT_TYPES, fallback_type=TextField, skip_indexes=None, ): <NEW_LINE> <INDENT> self.field_names = field_names or [] <NEW_LINE> self.field_types = list(field_types) <NEW_LINE> self.fallback_type = fallback_type <NEW_LINE> self._possible_types = defaultdict(lambda: list(self.field_types)) <NEW_LINE> self._samples = [] <NEW_LINE> self._skip = skip_indexes or tuple() <NEW_LINE> <DEDENT> def check_type(self, index, value): <NEW_LINE> <INDENT> for type_ in self._possible_types[index][:]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> type_.deserialize(value) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> self._possible_types[index].remove(type_) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def process_row(self, row): <NEW_LINE> <INDENT> for index, value in enumerate(row): <NEW_LINE> <INDENT> if index in self._skip: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.check_type(index, value) <NEW_LINE> <DEDENT> <DEDENT> def feed(self, data): <NEW_LINE> <INDENT> for row in data: <NEW_LINE> <INDENT> self.process_row(row) <NEW_LINE> <DEDENT> <DEDENT> def priority(self, *field_types): <NEW_LINE> <INDENT> return field_types[0] if field_types else self.fallback_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def fields(self): <NEW_LINE> <INDENT> possible, skip = self._possible_types, self._skip <NEW_LINE> if possible: <NEW_LINE> <INDENT> placeholders = make_header(range(max(possible.keys()) + 1)) <NEW_LINE> header = [a or b for a, b in zip_longest(self.field_names, placeholders)] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> header = self.field_names <NEW_LINE> <DEDENT> return OrderedDict( [ ( field_name, self.priority(*(possible[index] if index in possible else [])), ) for index, field_name in enumerate(header) if index not in skip ] ) | Detect data types based on a list of Field classes | 62598fc266656f66f7d5a66c |
class DevConfig(Config): <NEW_LINE> <INDENT> ENV = "development" <NEW_LINE> DEBUG = True <NEW_LINE> TESTING = False <NEW_LINE> SQLALCHEMY_DATABASE_URI = environ.get("DATABASE_URI") | Dev config class for application factory | 62598fc2ec188e330fdf8b0e |
class E09(VOWarning, ValueError): <NEW_LINE> <INDENT> message_template = "'%s' must have a value attribute" <NEW_LINE> default_args = ('x',) | The ``MIN``, ``MAX`` and ``OPTION`` elements must always have a
``value`` attribute.
**References**: `1.1
<http://www.ivoa.net/Documents/VOTable/20040811/REC-VOTable-1.1-20040811.html#sec:values>`__,
`1.2
<http://www.ivoa.net/Documents/VOTable/20091130/REC-VOTable-1.2.html#sec:values>`__ | 62598fc221bff66bcd722ee5 |
class Expression25(Expression): <NEW_LINE> <INDENT> def get(self, instance): <NEW_LINE> <INDENT> raise NotImplementedError('%s not implemented' % ( self.__class__.__name__)) | Aliases->Get Index of Alias
Parameters:
0: Enter Name of Alias (String)
Return type: Int | 62598fc2656771135c4898eb |
class ReportCommentInline(admin.StackedInline): <NEW_LINE> <INDENT> model = ReportComment | ReportComment Inline. | 62598fc263b5f9789fe853ed |
class _FunctionHistogram(_NumpyHistogram): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> _NumpyHistogram.__init__(self) <NEW_LINE> self.y = [] <NEW_LINE> <DEDENT> def step(self,x,y,bins,xmin,xmax): <NEW_LINE> <INDENT> _NumpyHistogram.step(self,x,bins,xmin,xmax) <NEW_LINE> self.y.append(y) <NEW_LINE> <DEDENT> def finalize(self): <NEW_LINE> <INDENT> raise NotImplementedError("_FunctionHistogram must be inherited from.") | Baseclass for histogrammed functions.
A histogrammed function is created by applying a function
to all values y that have been accumulated in a bin x. | 62598fc24428ac0f6e6587a0 |
class TPAMConfigError(TPAMError): <NEW_LINE> <INDENT> def __init__(self, message): <NEW_LINE> <INDENT> TPAMError.__init__(self, message) | Exception for a fatal client configuration error | 62598fc25fdd1c0f98e5e20e |
class atomic_file(AtomicLocalFile): <NEW_LINE> <INDENT> def move_to_final_destination(self): <NEW_LINE> <INDENT> os.rename(self.tmp_path, self.path) <NEW_LINE> <DEDENT> def generate_tmp_path(self, path): <NEW_LINE> <INDENT> return path + '-finestrino-tmp-%09d' % random.randrange(0, 1e10) | Simple class that writes to a temp file and moves it on close().
Also cleans up the temp file if close is not invoked. | 62598fc2e1aae11d1e7ce963 |
class Conv2d_tf(nn.Conv2d): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Conv2d_tf, self).__init__(*args, **kwargs) <NEW_LINE> self.padding = kwargs.get("padding", "SAME") <NEW_LINE> <DEDENT> def _compute_padding(self, input, dim): <NEW_LINE> <INDENT> input_size = input.size(dim + 2) <NEW_LINE> filter_size = self.weight.size(dim + 2) <NEW_LINE> effective_filter_size = (filter_size - 1) * self.dilation[dim] + 1 <NEW_LINE> out_size = (input_size + self.stride[dim] - 1) // self.stride[dim] <NEW_LINE> total_padding = max( 0, (out_size - 1) * self.stride[dim] + effective_filter_size - input_size ) <NEW_LINE> additional_padding = int(total_padding % 2 != 0) <NEW_LINE> return additional_padding, total_padding <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> if self.padding == "VALID": <NEW_LINE> <INDENT> return F.conv2d( input, self.weight, self.bias, self.stride, padding=0, dilation=self.dilation, groups=self.groups, ) <NEW_LINE> <DEDENT> rows_odd, padding_rows = self._compute_padding(input, dim=0) <NEW_LINE> cols_odd, padding_cols = self._compute_padding(input, dim=1) <NEW_LINE> if rows_odd or cols_odd: <NEW_LINE> <INDENT> input = F.pad(input, [0, cols_odd, 0, rows_odd]) <NEW_LINE> <DEDENT> return F.conv2d( input, self.weight, self.bias, self.stride, padding=(padding_rows // 2, padding_cols // 2), dilation=self.dilation, groups=self.groups, ) | Conv2d with the padding behavior from TF. Used for the ResNet implementation | 62598fc2a8370b77170f065d |
class MyMessageView(LoginRequiredMixin, View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> all_message = UserMessage.objects.filter(user= request.user.id) <NEW_LINE> all_unread_messages = UserMessage.objects.filter(user=request.user.id , has_read= False) <NEW_LINE> for unread_message in all_unread_messages: <NEW_LINE> <INDENT> unread_message.has_read = True <NEW_LINE> unread_message.save() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> page = request.GET.get('page', 1) <NEW_LINE> <DEDENT> except PageNotAnInteger: <NEW_LINE> <INDENT> page = 1 <NEW_LINE> <DEDENT> p = Paginator(all_message, 5,request=request) <NEW_LINE> messages = p.page(page) <NEW_LINE> return render(request, "usercenter-message.html", { "messages":messages, }) | 我的消息 | 62598fc257b8e32f5250825c |
class BaseHandler(tornado.web.RequestHandler): <NEW_LINE> <INDENT> def load_json(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.request.arguments = json.loads(self.request.body) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> msg = "Could not decode JSON: %s" % self.request.body <NEW_LINE> logger.debug(msg) <NEW_LINE> raise tornado.web.HTTPError(400, msg) <NEW_LINE> <DEDENT> <DEDENT> def get_json_argument(self, name, default=None): <NEW_LINE> <INDENT> if default is None: <NEW_LINE> <INDENT> default = self._ARG_DEFAULT <NEW_LINE> <DEDENT> if not self.request.arguments: <NEW_LINE> <INDENT> self.load_json() <NEW_LINE> <DEDENT> if name not in self.request.arguments: <NEW_LINE> <INDENT> if default is self._ARG_DEFAULT: <NEW_LINE> <INDENT> msg = "Missing argument '%s'" % name <NEW_LINE> logger.debug(msg) <NEW_LINE> raise tornado.web.HTTPError(400, msg) <NEW_LINE> <DEDENT> logger.debug("Returning default argument %s, as we couldn't find " "'%s' in %s" % (default, name, self.request.arguments)) <NEW_LINE> return default <NEW_LINE> <DEDENT> arg = self.request.arguments[name] <NEW_LINE> logger.debug("Found '%s': %s in JSON arguments" % (name, arg)) <NEW_LINE> return arg <NEW_LINE> <DEDENT> def get_current_user(self): <NEW_LINE> <INDENT> return self.get_secure_cookie('log_user') | A class to collect common handler methods - all other handlers should
subclass this one. | 62598fc2be7bc26dc9251f9a |
class ContourPlate(Part): <NEW_LINE> <INDENT> def AddContourPoint(self,contourPoint): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Delete(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Insert(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Modify(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def Select(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> Contour=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> Type=property(lambda self: object(),lambda self,v: None,lambda self: None) <NEW_LINE> ContourPlateTypeEnum=None | ContourPlate() | 62598fc2bf627c535bcb1723 |
class PrivateMessagesMixin(AuthenticatedReddit): <NEW_LINE> <INDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def _mark_as_read(self, thing_ids, unread=False): <NEW_LINE> <INDENT> data = {'id': ','.join(thing_ids)} <NEW_LINE> key = 'unread_message' if unread else 'read_message' <NEW_LINE> response = self.request_json(self.config[key], data=data) <NEW_LINE> self.evict([self.config[x] for x in ['inbox', 'messages', 'mod_mail', 'unread']]) <NEW_LINE> return response <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_inbox(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_content(self.config['inbox'], *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_message(self, message_id, *args, **kwargs): <NEW_LINE> <INDENT> return objects.Message.from_id(self, message_id, *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_messages(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_content(self.config['messages'], *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_sent(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_content(self.config['sent'], *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_unread(self, unset_has_mail=False, update_user=False, *args, **kwargs): <NEW_LINE> <INDENT> params = kwargs.setdefault('params', {}) <NEW_LINE> if unset_has_mail: <NEW_LINE> <INDENT> params['mark'] = 'true' <NEW_LINE> if update_user: <NEW_LINE> <INDENT> setattr(self.user, 'has_mail', False) <NEW_LINE> <DEDENT> <DEDENT> return self.get_content(self.config['unread'], *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> def get_mentions(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_content(self.config['mentions'], *args, **kwargs) <NEW_LINE> <DEDENT> @decorators.restrict_access(scope='privatemessages') <NEW_LINE> @decorators.require_captcha <NEW_LINE> def send_message(self, recipient, subject, message, from_sr=None, captcha=None): <NEW_LINE> <INDENT> if isinstance(recipient, objects.Subreddit): <NEW_LINE> <INDENT> recipient = '/r/%s' % six.text_type(recipient) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> recipient = six.text_type(recipient) <NEW_LINE> <DEDENT> data = {'text': message, 'subject': subject, 'to': recipient} <NEW_LINE> if from_sr: <NEW_LINE> <INDENT> data['from_sr'] = six.text_type(from_sr) <NEW_LINE> <DEDENT> if captcha: <NEW_LINE> <INDENT> data.update(captcha) <NEW_LINE> <DEDENT> response = self.request_json(self.config['compose'], data=data, retry_on_error=False) <NEW_LINE> self.evict(self.config['sent']) <NEW_LINE> return response | Adds methods requiring the 'privatemessages' scope (or login).
You should **not** directly instantiate instances of this class. Use
:class:`.Reddit` instead. | 62598fc2ec188e330fdf8b10 |
class NGOProjectForm(forms.ModelForm): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Organization <NEW_LINE> fields = ('projects', ) | Add project to the list of projects where given NGO is a mentor. | 62598fc24527f215b58ea14c |
class UnderscoreClassMethodUser: <NEW_LINE> <INDENT> def __init__(self, last_login): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _new(cls): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def is_active(self): <NEW_LINE> <INDENT> pass | User domain model. | 62598fc263b5f9789fe853ef |
class Filter(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.iv = [] <NEW_LINE> self.ia = [] <NEW_LINE> self._filters = [] <NEW_LINE> self.ov = [] <NEW_LINE> self.oa = [] <NEW_LINE> <DEDENT> def _labels_to_str(self, v, a): <NEW_LINE> <INDENT> if not v and a: <NEW_LINE> <INDENT> v = [""] * len(a) <NEW_LINE> <DEDENT> if not a and v: <NEW_LINE> <INDENT> a = [""] * len(v) <NEW_LINE> <DEDENT> return "".join(chain.from_iterable(zip(v, a))) <NEW_LINE> <DEDENT> def add_filter(self, name, *args, **kwargs): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> self._filters.append( mk_single_filter_body(name, *args, **kwargs)) <NEW_LINE> <DEDENT> <DEDENT> def insert_filter(self, i, name, *args, **kwargs): <NEW_LINE> <INDENT> if name: <NEW_LINE> <INDENT> self._filters.insert( i, mk_single_filter_body(name, *args, **kwargs)) <NEW_LINE> <DEDENT> <DEDENT> def append_outlabel_v(self, templ="[v%(counter)s]"): <NEW_LINE> <INDENT> global _olab_counter <NEW_LINE> _olab_counter[templ] += 1 <NEW_LINE> self.ov.append(templ % dict( counter=np.base_repr(_olab_counter[templ], 36))) <NEW_LINE> <DEDENT> def append_outlabel_a(self, templ="[a%(counter)s]"): <NEW_LINE> <INDENT> global _olab_counter <NEW_LINE> _olab_counter[templ] += 1 <NEW_LINE> self.oa.append(templ % dict( counter=np.base_repr(_olab_counter[templ], 36))) <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> ilabs = self._labels_to_str(self.iv, self.ia) <NEW_LINE> filterbody = ",".join(self._filters) <NEW_LINE> olabs = self._labels_to_str(self.ov, self.oa) <NEW_LINE> return ilabs + filterbody + olabs | >>> f = Filter()
>>> f.iv.append("[0:v]")
>>> f.add_filter("scale", "600", "400")
>>> f.add_filter("setsar", "1")
>>> f.ov.append("[v0]")
>>> print(f.to_str())
[0:v]scale=600:400,setsar=1[v0]
>>> #
>>> f = Filter()
>>> f.iv.append("[0:v]")
>>> f.iv.append("[1:v]")
>>> f.add_filter("concat")
>>> f.ov.append("[vc0]")
>>> print(f.to_str())
[0:v][1:v]concat[vc0]
>>> #
>>> f = Filter()
>>> f.iv.append("[0:v]")
>>> f.iv.append("[1:v]")
>>> f.add_filter("concat")
>>> f.append_outlabel_v()
>>> print(f.to_str())
[0:v][1:v]concat[v1]
>>> #
>>> f = Filter()
>>> f.ia.append("[0:a]")
>>> f.ia.append("[1:a]")
>>> f.add_filter("concat")
>>> f.append_outlabel_a()
>>> print(f.to_str())
[0:a][1:a]concat[a1] | 62598fc255399d3f05626794 |
class ProgressViewBase(object): <NEW_LINE> <INDENT> def __init__(self, out): <NEW_LINE> <INDENT> self.out = out <NEW_LINE> <DEDENT> def write(self, args): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def flush(self): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def clear(self): <NEW_LINE> <INDENT> pass | a view base for progress reporting | 62598fc24428ac0f6e6587a2 |
class Patch(object): <NEW_LINE> <INDENT> operation_types = { 'add': AddOperation, 'copy': CopyOperation, 'move': MoveOperation, 'remove': RemoveOperation, 'replace': ReplaceOperation, 'test': TestOperation, } <NEW_LINE> def __init__(self, patch): <NEW_LINE> <INDENT> self.patch = patch <NEW_LINE> <DEDENT> def get_operations(self): <NEW_LINE> <INDENT> return [self.get_operation(operation) for operation in self.patch] <NEW_LINE> <DEDENT> def get_operation(self, operation): <NEW_LINE> <INDENT> if 'op' not in operation: <NEW_LINE> <INDENT> raise PatchException('Missing operation type') <NEW_LINE> <DEDENT> if 'path' not in operation: <NEW_LINE> <INDENT> raise PatchException('Missing operation path') <NEW_LINE> <DEDENT> operation_class = self.get_operation_class(operation['op']) <NEW_LINE> return operation_class(self, operation['path'], operation.get('value')) <NEW_LINE> <DEDENT> def get_operation_class(self, op): <NEW_LINE> <INDENT> if op not in self.operation_types: <NEW_LINE> <INDENT> raise PatchException('Unsupported operation: {0}'.format(op)) <NEW_LINE> <DEDENT> return self.operation_types[op] <NEW_LINE> <DEDENT> def apply(self, obj, save=True): <NEW_LINE> <INDENT> for operation in self.get_operations(): <NEW_LINE> <INDENT> operation.apply(obj) <NEW_LINE> <DEDENT> return obj | JSON Patch defines a JSON document structure for expressing a
sequence of operations to apply to a JavaScript Object Notation
(JSON) document; it is suitable for use with the HTTP PATCH method.
The "application/json-patch+json" media type is used to identify such
patch documents. | 62598fc297e22403b383b187 |
class Adviser(object): <NEW_LINE> <INDENT> name = "" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> pass | Representation of an adviser | 62598fc260cbc95b063645ba |
class getVoteDetail_args: <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRING, 'params', None, None, ), ) <NEW_LINE> def __init__(self, params=None,): <NEW_LINE> <INDENT> self.params = params <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRING: <NEW_LINE> <INDENT> self.params = iprot.readString(); <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None: <NEW_LINE> <INDENT> oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('getVoteDetail_args') <NEW_LINE> if self.params is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('params', TType.STRING, 1) <NEW_LINE> oprot.writeString(self.params) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> value = 17 <NEW_LINE> value = (value * 31) ^ hash(self.params) <NEW_LINE> return value <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.iteritems()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- params | 62598fc25fdd1c0f98e5e210 |
class StampsInfoResponse(object): <NEW_LINE> <INDENT> swagger_types = { 'code': 'int', 'status': 'str', 'stamps': 'StampsInfo' } <NEW_LINE> attribute_map = { 'code': 'Code', 'status': 'Status', 'stamps': 'Stamps' } <NEW_LINE> def __init__(self, code=None, status=None, stamps=None): <NEW_LINE> <INDENT> self._code = None <NEW_LINE> self._status = None <NEW_LINE> self._stamps = None <NEW_LINE> self.code = code <NEW_LINE> if status is not None: <NEW_LINE> <INDENT> self.status = status <NEW_LINE> <DEDENT> if stamps is not None: <NEW_LINE> <INDENT> self.stamps = stamps <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def code(self): <NEW_LINE> <INDENT> return self._code <NEW_LINE> <DEDENT> @code.setter <NEW_LINE> def code(self, code): <NEW_LINE> <INDENT> if code is None: <NEW_LINE> <INDENT> raise ValueError("Invalid value for `code`, must not be `None`") <NEW_LINE> <DEDENT> self._code = code <NEW_LINE> <DEDENT> @property <NEW_LINE> def status(self): <NEW_LINE> <INDENT> return self._status <NEW_LINE> <DEDENT> @status.setter <NEW_LINE> def status(self, status): <NEW_LINE> <INDENT> self._status = status <NEW_LINE> <DEDENT> @property <NEW_LINE> def stamps(self): <NEW_LINE> <INDENT> return self._stamps <NEW_LINE> <DEDENT> @stamps.setter <NEW_LINE> def stamps(self, stamps): <NEW_LINE> <INDENT> self._stamps = stamps <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, StampsInfoResponse): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fc20fa83653e46f5163 |
class TestFile: <NEW_LINE> <INDENT> @pytest.fixture(autouse=True) <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.t = configtypes.File() <NEW_LINE> <DEDENT> def test_validate_empty(self): <NEW_LINE> <INDENT> with pytest.raises(configexc.ValidationError): <NEW_LINE> <INDENT> self.t.validate("") <NEW_LINE> <DEDENT> <DEDENT> def test_validate_empty_none_ok(self): <NEW_LINE> <INDENT> t = configtypes.File(none_ok=True) <NEW_LINE> t.validate("") <NEW_LINE> <DEDENT> def test_validate_does_not_exist(self, os_path): <NEW_LINE> <INDENT> os_path.expanduser.side_effect = lambda x: x <NEW_LINE> os_path.isfile.return_value = False <NEW_LINE> with pytest.raises(configexc.ValidationError): <NEW_LINE> <INDENT> self.t.validate('foobar') <NEW_LINE> <DEDENT> <DEDENT> def test_validate_exists_abs(self, os_path): <NEW_LINE> <INDENT> os_path.expanduser.side_effect = lambda x: x <NEW_LINE> os_path.isfile.return_value = True <NEW_LINE> os_path.isabs.return_value = True <NEW_LINE> self.t.validate('foobar') <NEW_LINE> <DEDENT> def test_validate_exists_not_abs(self, os_path): <NEW_LINE> <INDENT> os_path.expanduser.side_effect = lambda x: x <NEW_LINE> os_path.isfile.return_value = True <NEW_LINE> os_path.isabs.return_value = False <NEW_LINE> with pytest.raises(configexc.ValidationError): <NEW_LINE> <INDENT> self.t.validate('foobar') <NEW_LINE> <DEDENT> <DEDENT> def test_validate_expanduser(self, os_path): <NEW_LINE> <INDENT> os_path.expanduser.side_effect = lambda x: x.replace('~', '/home/foo') <NEW_LINE> os_path.isfile.side_effect = lambda path: path == '/home/foo/foobar' <NEW_LINE> os_path.isabs.return_value = True <NEW_LINE> self.t.validate('~/foobar') <NEW_LINE> os_path.expanduser.assert_called_once_with('~/foobar') <NEW_LINE> <DEDENT> def test_validate_invalid_encoding(self, os_path, unicode_encode_err): <NEW_LINE> <INDENT> os_path.isfile.side_effect = unicode_encode_err <NEW_LINE> os_path.isabs.side_effect = unicode_encode_err <NEW_LINE> with pytest.raises(configexc.ValidationError): <NEW_LINE> <INDENT> self.t.validate('foobar') <NEW_LINE> <DEDENT> <DEDENT> def test_transform(self, os_path): <NEW_LINE> <INDENT> os_path.expanduser.side_effect = lambda x: x.replace('~', '/home/foo') <NEW_LINE> assert self.t.transform('~/foobar') == '/home/foo/foobar' <NEW_LINE> os_path.expanduser.assert_called_once_with('~/foobar') <NEW_LINE> <DEDENT> def test_transform_empty(self): <NEW_LINE> <INDENT> assert self.t.transform('') is None | Test File. | 62598fc2091ae35668704ea5 |
class ToolkitEditorFactory(BaseColorToolkitEditorFactory): <NEW_LINE> <INDENT> def to_qt4_color(self, editor): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> color = getattr(editor.object, editor.name + "_") <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> color = getattr(editor.object, editor.name) <NEW_LINE> <DEDENT> c = QtGui.QColor() <NEW_LINE> c.setRgbF(color[0], color[1], color[2]) <NEW_LINE> return c <NEW_LINE> <DEDENT> def from_qt4_color(self, color): <NEW_LINE> <INDENT> return (color.redF(), color.greenF(), color.blueF()) <NEW_LINE> <DEDENT> def str_color(self, color): <NEW_LINE> <INDENT> if type(color) in SequenceTypes: <NEW_LINE> <INDENT> return "(%d,%d,%d)" % ( int(color[0] * 255.0), int(color[1] * 255.0), int(color[2] * 255.0), ) <NEW_LINE> <DEDENT> return color | PyQt editor factory for color editors. | 62598fc2a8370b77170f065f |
@requires_data <NEW_LINE> @requires_fits <NEW_LINE> class test_wstat_single_array(SherpaTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._old_logger_level = logger.getEffectiveLevel() <NEW_LINE> logger.setLevel(logging.ERROR) <NEW_LINE> ui.set_stat('wstat') <NEW_LINE> infile = self.make_path('3c273.pi') <NEW_LINE> ui.load_pha(1, infile) <NEW_LINE> nbins = ui.get_data(1).get_dep(False).size <NEW_LINE> bscal = 0.9 * np.ones(nbins) * ui.get_backscal(1) <NEW_LINE> ui.set_backscal(1, backscale=bscal) <NEW_LINE> ui.set_source(1, ui.powlaw1d.pl) <NEW_LINE> ui.set_par("pl.gamma", 1.7) <NEW_LINE> ui.set_par("pl.ampl", 1.7e-4) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> ui.clean() <NEW_LINE> try: <NEW_LINE> <INDENT> logger.setLevel(self._old_logger_level) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def _filter_data(self): <NEW_LINE> <INDENT> ui.ignore(None, 0.5) <NEW_LINE> ui.ignore(3, 4) <NEW_LINE> ui.ignore(7, None) <NEW_LINE> <DEDENT> def _check_stat(self, nbins, expected): <NEW_LINE> <INDENT> self.assertEqual(nbins, ui.get_data().get_dep(True).size) <NEW_LINE> stat = ui.calc_stat() <NEW_LINE> self.assertAlmostEqual(expected, stat, places=7) <NEW_LINE> <DEDENT> def test_wstat_grouped_all(self): <NEW_LINE> <INDENT> self._check_stat(46, 71.21845954979574) <NEW_LINE> <DEDENT> def test_wstat_grouped_filtered(self): <NEW_LINE> <INDENT> self._filter_data() <NEW_LINE> self._check_stat(35, 45.6311990089982) <NEW_LINE> <DEDENT> def test_wstat_ungrouped_all(self): <NEW_LINE> <INDENT> ui.ungroup() <NEW_LINE> self._check_stat(1024, 663.0160968458746) <NEW_LINE> <DEDENT> def test_wstat_ungrouped_filtered(self): <NEW_LINE> <INDENT> ui.ungroup() <NEW_LINE> self._filter_data() <NEW_LINE> self._check_stat(375, 420.8390856766203) | Single PHA file with an array of backscal values.
This really should use a grating PHA dataset, but it's not
obvious we have one (along with the necessary responses) in
the sherpa-test-data/ repository. So for now "hack" in
one. The statistic values were calculated by changing the
backscal by 0.9 but leaving it as a scalar. As the tests
currently fail, they have not been validated. | 62598fc25fdd1c0f98e5e211 |
class Get: <NEW_LINE> <INDENT> class Header(Schema): <NEW_LINE> <INDENT> X_GitHub_Media_Type = fields.String(data_key='X-GitHub-Media-Type', description='You can check the current version of media type in responses.\n') <NEW_LINE> Accept = fields.String(description='Is used to set specified media type.') <NEW_LINE> X_RateLimit_Limit = fields.Integer(data_key='X-RateLimit-Limit') <NEW_LINE> X_RateLimit_Remaining = fields.Integer(data_key='X-RateLimit-Remaining') <NEW_LINE> X_RateLimit_Reset = fields.Integer(data_key='X-RateLimit-Reset') <NEW_LINE> X_GitHub_Request_Id = fields.Integer(data_key='X-GitHub-Request-Id') <NEW_LINE> <DEDENT> class Path(Schema): <NEW_LINE> <INDENT> username = fields.String(required=True, description='Name of user.') | List a user's followers | 62598fc2adb09d7d5dc0a7fc |
class ExceptionsDirEntryData(DataContainer): <NEW_LINE> <INDENT> pass | Holds the data related to SEH (and stack unwinding, in particular)
struct an instance of RUNTIME_FUNTION
unwindinfo an instance of UNWIND_INFO | 62598fc2ec188e330fdf8b12 |
class Command(BaseCommand): <NEW_LINE> <INDENT> def handle(self, *args, **options): <NEW_LINE> <INDENT> self.stdout.write('waiting for databse...') <NEW_LINE> db_conn = None <NEW_LINE> while not db_conn: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> db_conn = connections['default'] <NEW_LINE> <DEDENT> except OperationalError: <NEW_LINE> <INDENT> self.stdout.write('Database unavailable, waiting 1 second...') <NEW_LINE> time.sleep(1) <NEW_LINE> <DEDENT> <DEDENT> self.stdout.write(self.style.SUCCESS('Database available!')) | Django command to pause execution until database is available. | 62598fc2796e427e5384ea14 |
class ShapeFreeCallback(b2.queryCallback): <NEW_LINE> <INDENT> def __init__(self, test_fixtures, pose_a, pose_b=None, whitelist=None): <NEW_LINE> <INDENT> b2.queryCallback.__init__(self) <NEW_LINE> self.test_fixtures = test_fixtures <NEW_LINE> self.whitelist = whitelist if whitelist is not None else [] <NEW_LINE> self.pose_a = pose_a <NEW_LINE> self.pose_b = pose_b if pose_b is not None else pose_a <NEW_LINE> self.collision_free = True <NEW_LINE> <DEDENT> def ReportFixture(self, other_fixture): <NEW_LINE> <INDENT> if other_fixture in self.whitelist: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if other_fixture in self.test_fixtures: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> sweep = b2.sweep(a0=self.pose_a[2], a=self.pose_b[2], c0=self.pose_a[0:2], c=self.pose_b[0:2]) <NEW_LINE> still = b2.sweep(a=other_fixture.body.angle, a0=other_fixture.body.angle, c=other_fixture.body.position, c0=other_fixture.body.position) <NEW_LINE> for test_fixture in self.test_fixtures: <NEW_LINE> <INDENT> (collision, impact_time) = b2.timeOfImpact( shapeA=test_fixture.shape, shapeB=other_fixture.shape, sweepA=sweep, sweepB=still, tMax=1.) <NEW_LINE> self.collision_free &= ( (impact_time == 1.) and (collision == b2.TOIOutput.e_separated or collision == b2.TOIOutput.e_touching)) <NEW_LINE> if not self.collision_free: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> return self.collision_free | Callback class for Box2D shape collision detection
Used to determine if the straight-line path between two poses of a
shape is collision free in a Box2D world. Angles are interpolated
linearly, so for large rotations this will be inaccurate.
The test is done by casting each of the test_fixtures (one at at
time) at each fixture in the queried AABB that is not in the whitelist.
The existence of this class is an artifact of pybox2d's poor design;
it uses callback classes rather than callbacks, which make sense in
``c++`` but are silly in python.
Attributes:
test_fixtures (list): list of fixtures to check for collision
whitelist (list): list of fixtures to skip in collision
detection
pose_a (tuple): start pose of the path as an (x, y, theta) tuple
pose_b (tuple): end pose of the path as an (x, y, theta) tuple
collision_free (bool): the return value of the callback. After
being queried, this will be True if the path is collision
free. | 62598fc255399d3f05626796 |
class NamespaceUpdateSerializer(NamespaceSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.Namespace <NEW_LINE> fields = ( 'id', 'name', 'company', 'email', 'avatar_url', 'description', 'links', 'groups', 'resources' ) <NEW_LINE> read_only_fields = ('name', ) | NamespaceSerializer but read_only 'name'. | 62598fc276e4537e8c3ef825 |
class Operation(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'name': {'key': 'name', 'type': 'str'}, 'display': {'key': 'display', 'type': 'OperationDisplay'}, 'origin': {'key': 'origin', 'type': 'str'}, 'service_specification': {'key': 'properties.serviceSpecification', 'type': 'OperationPropertiesFormatServiceSpecification'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(Operation, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.display = kwargs.get('display', None) <NEW_LINE> self.origin = kwargs.get('origin', None) <NEW_LINE> self.service_specification = kwargs.get('service_specification', None) | Network REST API operation definition.
:param name: Operation name: {provider}/{resource}/{operation}.
:type name: str
:param display: Display metadata associated with the operation.
:type display: ~azure.mgmt.network.v2021_05_01.models.OperationDisplay
:param origin: Origin of the operation.
:type origin: str
:param service_specification: Specification of the service.
:type service_specification:
~azure.mgmt.network.v2021_05_01.models.OperationPropertiesFormatServiceSpecification | 62598fc2cc40096d6161a318 |
class Restful(View): <NEW_LINE> <INDENT> def post(self, request, app_label=None, actor=None, pk=None): <NEW_LINE> <INDENT> rpt = requested_actor(app_label, actor) <NEW_LINE> ar = rpt.request(request=request) <NEW_LINE> instance = ar.create_instance() <NEW_LINE> if ar.actor.handle_uploaded_files is not None: <NEW_LINE> <INDENT> ar.actor.handle_uploaded_files(instance, request) <NEW_LINE> <DEDENT> data = request.POST.get('rows') <NEW_LINE> data = json.loads(data) <NEW_LINE> ar.form2obj_and_save(data, instance, True) <NEW_LINE> ar.set_response( rows=[ar.ah.store.row2dict( ar, instance, ar.ah.store.list_fields)]) <NEW_LINE> return json_response(ar.response) <NEW_LINE> <DEDENT> def delete(self, request, app_label=None, actor=None, pk=None): <NEW_LINE> <INDENT> rpt = requested_actor(app_label, actor) <NEW_LINE> ar = rpt.request(request=request) <NEW_LINE> ar.set_selected_pks(pk) <NEW_LINE> return delete_element(ar, ar.selected_rows[0]) <NEW_LINE> <DEDENT> def get(self, request, app_label=None, actor=None, pk=None): <NEW_LINE> <INDENT> rpt = requested_actor(app_label, actor) <NEW_LINE> assert pk is None, 20120814 <NEW_LINE> ar = rpt.request(request=request) <NEW_LINE> rh = ar.ah <NEW_LINE> rows = [ rh.store.row2dict(ar, row, rh.store.list_fields) for row in ar.sliced_data_iterator] <NEW_LINE> kw = dict(count=ar.get_total_count(), rows=rows) <NEW_LINE> kw.update(title=str(ar.get_title())) <NEW_LINE> return json_response(kw) <NEW_LINE> <DEDENT> def put(self, request, app_label=None, actor=None, pk=None): <NEW_LINE> <INDENT> rpt = requested_actor(app_label, actor) <NEW_LINE> ar = rpt.request(request=request) <NEW_LINE> ar.set_selected_pks(pk) <NEW_LINE> elem = ar.selected_rows[0] <NEW_LINE> rh = ar.ah <NEW_LINE> data = http.QueryDict(request.body).get('rows') <NEW_LINE> data = json.loads(data) <NEW_LINE> a = rpt.get_url_action(rpt.default_list_action_name) <NEW_LINE> ar = rpt.request(request=request, action=a) <NEW_LINE> ar.renderer = settings.SITE.kernel.extjs_renderer <NEW_LINE> ar.form2obj_and_save(data, elem, False) <NEW_LINE> ar.set_response( rows=[rh.store.row2dict(ar, elem, rh.store.list_fields)]) <NEW_LINE> return json_response(ar.response) | Used to collaborate with a restful Ext.data.Store. | 62598fc260cbc95b063645bc |
class SineTerrainGenerator(TerrainGenerator): <NEW_LINE> <INDENT> def __init__( self, seed: int, base_height: float = 20.0, period: float = 9.0, amplitude: float = 7.5, x_scale: float = 1.0, y_scale: float = 1.2, ): <NEW_LINE> <INDENT> super().__init__(seed) <NEW_LINE> self.frequency: float = math.pi * 2.0 / period <NEW_LINE> self.amplitude: float = amplitude <NEW_LINE> self.base_height: float = base_height <NEW_LINE> self.x_scale: float = x_scale <NEW_LINE> self.y_scale: float = y_scale <NEW_LINE> <DEDENT> def height_at(self, x_pos: int, y_pos: int) -> float: <NEW_LINE> <INDENT> return ( self.base_height + self.amplitude * ( math.sin(x_pos * self.frequency * self.x_scale) + math.sin(y_pos * self.frequency * self.y_scale) ) / 2 ) | A TerrainGenerator implementation that creates wavy terrain using sine waves.
Nothing is randomized, so seed is unused.
This implementation is not recommended once a better one
is available. | 62598fc2aad79263cf42ea55 |
class DescribeProductsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TotalCount = None <NEW_LINE> self.Products = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> if params.get("Products") is not None: <NEW_LINE> <INDENT> self.Products = [] <NEW_LINE> for item in params.get("Products"): <NEW_LINE> <INDENT> obj = ProductInfo() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Products.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId") | DescribeProducts返回参数结构体
| 62598fc23d592f4c4edbb138 |
class ListaCardapio(RestauranteMixin, View): <NEW_LINE> <INDENT> template = 'restaurante/cardapio/lista_cardapio.html' <NEW_LINE> form_class = Form_Cardapio_Default <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> cardapios = self.restaurante.getEvery_menu() <NEW_LINE> form = self.form_class() <NEW_LINE> return render(request, self.template, {'cardapios':cardapios, 'form':form}) | Mostra lista de cardapios em template | 62598fc271ff763f4b5e79fc |
class PianitzaTestCase(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.deck = initialize_deck() <NEW_LINE> self.hands = get_hands(self.deck) <NEW_LINE> self.all_active = list(range(4)) <NEW_LINE> self.suit_conflict = [(6,'♣'), (9,'♣'), (7,'♣'), (8,'♣')] <NEW_LINE> self.rare_conflict = [(9,'♣'), (9,'♥'), (9,'♠'), (9,'♦')] <NEW_LINE> <DEDENT> def test_initialize_deck(self): <NEW_LINE> <INDENT> self.assertTrue(len(self.deck) == len(set(self.deck)) == 36) <NEW_LINE> <DEDENT> def test_get_hands(self): <NEW_LINE> <INDENT> for hand in self.hands: <NEW_LINE> <INDENT> self.assertTrue(len(hand) == len(set(hand)) == 9) <NEW_LINE> <DEDENT> <DEDENT> def test_compare_cards(self): <NEW_LINE> <INDENT> self.assertTrue(compare_cards(self.suit_conflict, self.all_active) == [1]) <NEW_LINE> self.assertTrue(compare_cards(self.rare_conflict, self.all_active) == self.all_active) | Tests for 'jug_artist.py'. | 62598fc2091ae35668704ea7 |
class UserProfileValuesQuerySet(ValuesQuerySet): <NEW_LINE> <INDENT> def _clone(self, *args, **kwargs): <NEW_LINE> <INDENT> c = super(UserProfileValuesQuerySet, self)._clone(*args, **kwargs) <NEW_LINE> c._privacy_level = getattr(self, '_privacy_level', None) <NEW_LINE> return c <NEW_LINE> <DEDENT> def iterator(self): <NEW_LINE> <INDENT> extra_names = self.query.extra_select.keys() <NEW_LINE> field_names = self.field_names <NEW_LINE> aggregate_names = self.query.aggregate_select.keys() <NEW_LINE> names = extra_names + field_names + aggregate_names <NEW_LINE> privacy_fields = [ (names.index('privacy_%s' % field), names.index(field), field) for field in set(UserProfile._privacy_fields) & set(names)] <NEW_LINE> for row in self.query.get_compiler(self.db).results_iter(): <NEW_LINE> <INDENT> row = list(row) <NEW_LINE> for levelindex, fieldindex, field in privacy_fields: <NEW_LINE> <INDENT> if row[levelindex] < self._privacy_level: <NEW_LINE> <INDENT> row[fieldindex] = UserProfile._privacy_fields[field] <NEW_LINE> <DEDENT> <DEDENT> yield dict(zip(names, row)) | Custom ValuesQuerySet to support privacy.
Note that when you specify fields in values() you need to include
the related privacy field in your query.
E.g. .values('first_name', 'privacy_first_name') | 62598fc24a966d76dd5ef154 |
class OneTimeKeyServlet(RestServlet): <NEW_LINE> <INDENT> PATTERNS = client_patterns("/keys/claim$") <NEW_LINE> def __init__(self, hs): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.auth = hs.get_auth() <NEW_LINE> self.e2e_keys_handler = hs.get_e2e_keys_handler() <NEW_LINE> <DEDENT> async def on_POST(self, request): <NEW_LINE> <INDENT> await self.auth.get_user_by_req(request, allow_guest=True) <NEW_LINE> timeout = parse_integer(request, "timeout", 10 * 1000) <NEW_LINE> body = parse_json_object_from_request(request) <NEW_LINE> result = await self.e2e_keys_handler.claim_one_time_keys(body, timeout) <NEW_LINE> return 200, result | POST /keys/claim HTTP/1.1
{
"one_time_keys": {
"<user_id>": {
"<device_id>": "<algorithm>"
} } }
HTTP/1.1 200 OK
{
"one_time_keys": {
"<user_id>": {
"<device_id>": {
"<algorithm>:<key_id>": "<key_base64>"
} } } } | 62598fc23317a56b869be690 |
class SandboxService(object): <NEW_LINE> <INDENT> def __init__(self, params, service_name, uri='lxc:///'): <NEW_LINE> <INDENT> self.virsh = virsh.Virsh(uri=uri, ignore_status=True) <NEW_LINE> self.command = lvsb_base.SandboxCommandBase(params, service_name) <NEW_LINE> self.command.BINARY_PATH_PARAM = 'virt_sandbox_service_binary' <NEW_LINE> self.command.add_optarg('--connect', uri) <NEW_LINE> self._run = utils.run <NEW_LINE> self.service = SpecificServiceManager(self.service_name, run=self._run) <NEW_LINE> self._bind_service_commands() <NEW_LINE> <DEDENT> def _bind_service_commands(self): <NEW_LINE> <INDENT> for command in COMMANDS: <NEW_LINE> <INDENT> setattr(self, command, getattr(self.service, command)) <NEW_LINE> <DEDENT> <DEDENT> def __getstate__(self): <NEW_LINE> <INDENT> return {'command': self.command, 'run': self._run, 'virsh': dict(virsh)} <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> self.virsh = virsh.Virsh(**state['virsh']) <NEW_LINE> self.command = state['command'] <NEW_LINE> self._run = state['run'] <NEW_LINE> self.service = SpecificServiceManager(self.service_name, run=self._run) <NEW_LINE> self._bind_service_commands() <NEW_LINE> <DEDENT> @property <NEW_LINE> def service_name(self): <NEW_LINE> <INDENT> return self.command.name <NEW_LINE> <DEDENT> def __get_uri__(self): <NEW_LINE> <INDENT> return self.virsh.uri <NEW_LINE> <DEDENT> def __set_uri__(self, uri): <NEW_LINE> <INDENT> self.virsh.uri = uri <NEW_LINE> <DEDENT> def __del_uri__(self): <NEW_LINE> <INDENT> self.virsh.uri = None <NEW_LINE> <DEDENT> uri = property(__get_uri__, __set_uri__, __del_uri__) <NEW_LINE> def create(self): <NEW_LINE> <INDENT> return self.command.run(extra='create') <NEW_LINE> <DEDENT> def destroy(self): <NEW_LINE> <INDENT> return self.command.run(extra='destroy') <NEW_LINE> <DEDENT> @property <NEW_LINE> def list(self): <NEW_LINE> <INDENT> cmdresult = self.virsh.dom_list() <NEW_LINE> result = [] <NEW_LINE> column_names = None <NEW_LINE> for lineno, line in cmdresult.stdout.strip(): <NEW_LINE> <INDENT> if lineno == 0: <NEW_LINE> <INDENT> column_names = line.strip().split() <NEW_LINE> assert len(column_names) > 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert column_names is not None <NEW_LINE> items = [(column_names[index].lower(), value.lower()) for index, value in line.strip().split()] <NEW_LINE> result.append(dict(items)) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> @property <NEW_LINE> def xmlstr(self): <NEW_LINE> <INDENT> return self.virsh.dumpxml(self.service_name).stdout.strip() | Management for a single new/existing sandboxed service | 62598fc27c178a314d78d71f |
class ModelStorageRelationDomain2Target(ModelSQL): <NEW_LINE> <INDENT> __name__ = 'test.modelstorage.relation_domain2.target' <NEW_LINE> relation2 = fields.Many2One( 'test.modelstorage.relation_domain.target', "Relation 2") | First Target of Model stored containing a relation field with a domain | 62598fc27047854f4633f653 |
class TestExternalProtosComponent(ExternalProtosMixin, TestProtobufLibraryComponent): <NEW_LINE> <INDENT> @property <NEW_LINE> def exists(self): <NEW_LINE> <INDENT> return self.is_external_protos <NEW_LINE> <DEDENT> def generate_target_arguments(self): <NEW_LINE> <INDENT> args = super(MainExternalProtosComponent, self).generate_target_arguments() <NEW_LINE> args.update(self.external_protos_arguments) <NEW_LINE> return args <NEW_LINE> <DEDENT> def generate_subdirectory_code(self): <NEW_LINE> <INDENT> code = super(MainExternalProtosComponent, self).generate_subdirectory_code() <NEW_LINE> return code + self.external_protos_contents | Generates src/test/proto for external-protos. | 62598fc25fdd1c0f98e5e213 |
class ShiftPlugin(plugin_object.GeneralPlugin): <NEW_LINE> <INDENT> implements = [interfaces.DecodingRoutineIdentifier] <NEW_LINE> version = 1.0 <NEW_LINE> def identify(self, vivisect_workspace, fvas): <NEW_LINE> <INDENT> candidate_functions = {} <NEW_LINE> for fva in fvas: <NEW_LINE> <INDENT> f = viv_utils.Function(vivisect_workspace, fva) <NEW_LINE> mnems = set([]) <NEW_LINE> shift_mnems = set(["shl", "shr", "sar", "sal", "rol", "ror"]) <NEW_LINE> for bb in f.basic_blocks: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> for i in bb.instructions: <NEW_LINE> <INDENT> mnems.add(i.mnem) <NEW_LINE> if i.mnem in shift_mnems: <NEW_LINE> <INDENT> self.d("shift instruction: %s va: 0x%x function: 0x%x", i, i.va, f.va) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except envi.InvalidInstruction: <NEW_LINE> <INDENT> self.w("Invalid instruction encountered in basic block, skipping: 0x%x", bb.va) <NEW_LINE> continue <NEW_LINE> <DEDENT> <DEDENT> candidate_functions[fva] = 1 - (len(shift_mnems - mnems) / float(len(shift_mnems))) <NEW_LINE> self.d("0x%x %f", fva, candidate_functions[fva]) <NEW_LINE> <DEDENT> return candidate_functions <NEW_LINE> <DEDENT> def score(self, function_vas, vivisect_workspace=None): <NEW_LINE> <INDENT> return function_vas | Identify shift instructions. | 62598fc2ec188e330fdf8b14 |
class DnsPoliciesDeleteResponse(_messages.Message): <NEW_LINE> <INDENT> pass | An empty DnsPoliciesDelete response. | 62598fc2bf627c535bcb1727 |
class Meta: <NEW_LINE> <INDENT> app_label = 'sample_resource' | Meta class for VtPlanner model. | 62598fc2099cdd3c63675522 |
class IProtocol(IPlugin): <NEW_LINE> <INDENT> metadata = {} <NEW_LINE> @abc.abstractmethod <NEW_LINE> def protocol_parameters_definition(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def controller_parameters_definition(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def event_definition(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def stream_definition(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def trial_parameters(self): <NEW_LINE> <INDENT> return TrialParameters({},{}) <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def start_of_trial(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def process_event_request(self, event): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def process_stream_request(self, stream): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def end_of_trial(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def trial_iti_milliseconds(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def protocol_description(self): <NEW_LINE> <INDENT> return self.__class__.__name__ + ' protocol' | Interface for plugins defining an experimental protocol
Usage
=====
User-provided protocol plugins should subclass IProtocol, provide traits-based UI
for configuration and override next_trial_parameters(), protocol_definition(), event_definition(),
data_values(), and process_completed_trial().
Example
======= | 62598fc2a219f33f346c6a88 |
class Child(Parent): <NEW_LINE> <INDENT> def childMethod(self): <NEW_LINE> <INDENT> print('calling child method') | childMethod. | 62598fc2d486a94d0ba2c252 |
class Gdm(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin): <NEW_LINE> <INDENT> plugin_name = 'gdm' <NEW_LINE> profiles = ('desktop',) <NEW_LINE> def setup(self): <NEW_LINE> <INDENT> self.add_copy_spec("/etc/gdm/*") <NEW_LINE> self.add_journal(units="gdm") <NEW_LINE> self.add_cmd_output("systemctl status gdm.service") | GNOME display manager
| 62598fc2377c676e912f6eb4 |
class _RequestCallbackManager(_CallbackManager): <NEW_LINE> <INDENT> def validate_callback(self, callback): <NEW_LINE> <INDENT> super(_RequestCallbackManager, self).validate_callback(callback) <NEW_LINE> if isinstance(callback, (type, types.ClassType)): <NEW_LINE> <INDENT> if not issubclass(callback, RequestCallback): <NEW_LINE> <INDENT> raise ValueError("Type mismatch on callback argument") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if not issubclass(callback.__class__, RequestCallback): <NEW_LINE> <INDENT> raise ValueError("Type mismatch on callback argument") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle_fire(self, request_callback, request): <NEW_LINE> <INDENT> if isinstance(request_callback, (type, types.ClassType)): <NEW_LINE> <INDENT> callback = request_callback() <NEW_LINE> callback.on_request(request) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> request_callback.on_request(request) | Manager for {@link RequestCallback} message callbacks. | 62598fc297e22403b383b18b |
class IntCompareFilter(DecimalCompareFilter): <NEW_LINE> <INDENT> fields = [ SimpleCompareField, forms.IntegerField(), ] | x greater, less, etc than int field | 62598fc2cc40096d6161a319 |
class PhysicalSensor(VirtualSensor): <NEW_LINE> <INDENT> def __init__(self, broker, device=None, **kwargs): <NEW_LINE> <INDENT> super(PhysicalSensor, self).__init__(broker, **kwargs) <NEW_LINE> if device is None: <NEW_LINE> <INDENT> device = DeviceDescriptor(self, path=self.path) <NEW_LINE> log.debug("Setting default device description for sensor %s to %s" % (self, device)) <NEW_LINE> <DEDENT> elif not isinstance(device, DeviceDescriptor): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> device = DeviceDescriptor.from_path(device) <NEW_LINE> <DEDENT> except (ValueError, TypeError): <NEW_LINE> <INDENT> device = DeviceDescriptor(device) <NEW_LINE> <DEDENT> <DEDENT> self.device = device | A PhysicalSensor directly manages a physical sensing device attached to (or on board) the
scale client's host. This is mostly a convention for establishing this contract, but in
the future it may serve as a point of optimizing physical resource management. | 62598fc23617ad0b5ee063c9 |
class TaxiiConnectorConfigurationException(CommonConfigException): <NEW_LINE> <INDENT> pass | Exception class for usage errors with TAXII configuration. | 62598fc27047854f4633f655 |
class TargetReports(object): <NEW_LINE> <INDENT> swagger_types = { 'reports': 'list[TargetReport]' } <NEW_LINE> attribute_map = { 'reports': 'reports' } <NEW_LINE> def __init__(self, reports=None): <NEW_LINE> <INDENT> self._reports = None <NEW_LINE> self.discriminator = None <NEW_LINE> if reports is not None: <NEW_LINE> <INDENT> self.reports = reports <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def reports(self): <NEW_LINE> <INDENT> return self._reports <NEW_LINE> <DEDENT> @reports.setter <NEW_LINE> def reports(self, reports): <NEW_LINE> <INDENT> self._reports = reports <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, TargetReports): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fc250812a4eaa620d26 |
class TargetDetector(object): <NEW_LINE> <INDENT> def __init__(self, img): <NEW_LINE> <INDENT> self.img = img <NEW_LINE> self.img_hsv = cv2.cvtColor(self.img, cv2.COLOR_BGR2HSV) <NEW_LINE> <DEDENT> def threshold(self, min, max): <NEW_LINE> <INDENT> THRESHOLD_MIN = np.array(min, np.uint8) <NEW_LINE> THRESHOLD_MAX = np.array(max, np.uint8) <NEW_LINE> self.thresh = cv2.inRange(self.img_hsv, THRESHOLD_MIN, THRESHOLD_MAX) <NEW_LINE> <DEDENT> def findRect(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> img_thresh, contours, hierarchy = cv2.findContours(self.thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE) <NEW_LINE> count = 0 <NEW_LINE> for cont in contours: <NEW_LINE> <INDENT> epsilon = 0.01 * cv2.arcLength(cont, True) <NEW_LINE> approx = cv2.approxPolyDP(cont, epsilon, True) <NEW_LINE> if len(approx) == 4 and cv2.contourArea(approx) > 5: <NEW_LINE> <INDENT> cv2.drawContours(self.img, contours, count, (255, 0, 0), 4) <NEW_LINE> result.append(approx) <NEW_LINE> <DEDENT> count += 1 <NEW_LINE> <DEDENT> return result | description of class | 62598fc2be7bc26dc9251f9d |
class ObservableFunction: <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> functools.update_wrapper(self, func) <NEW_LINE> self.func = func <NEW_LINE> self.observers = {} <NEW_LINE> <DEDENT> def add_observer(self, observer, identify_observed=False): <NEW_LINE> <INDENT> if hasattr(observer, "__self__"): <NEW_LINE> <INDENT> result = self._add_bound_method(observer, identify_observed) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result = self._add_function(observer, identify_observed) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def _add_function(self, func, identify_observed): <NEW_LINE> <INDENT> key = self.make_key(func) <NEW_LINE> if key not in self.observers: <NEW_LINE> <INDENT> self.observers[key] = ObserverFunction( func, identify_observed, (key, self.observers)) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def _add_bound_method(self, bound_method, identify_observed): <NEW_LINE> <INDENT> inst = bound_method.__self__ <NEW_LINE> method_name = bound_method.__name__ <NEW_LINE> key = self.make_key(bound_method) <NEW_LINE> if key not in self.observers: <NEW_LINE> <INDENT> self.observers[key] = ObserverBoundMethod( inst, method_name, identify_observed, (key, self.observers)) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def discard_observer(self, observer): <NEW_LINE> <INDENT> discarded = False <NEW_LINE> key = self.make_key(observer) <NEW_LINE> if key in self.observers: <NEW_LINE> <INDENT> del self.observers[key] <NEW_LINE> discarded = True <NEW_LINE> <DEDENT> return discarded <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def make_key(observer): <NEW_LINE> <INDENT> if hasattr(observer, "__self__"): <NEW_LINE> <INDENT> inst = observer.__self__ <NEW_LINE> method_name = observer.__name__ <NEW_LINE> key = (id(inst), method_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> key = id(observer) <NEW_LINE> <DEDENT> return key <NEW_LINE> <DEDENT> def __call__(self, *arg, **kw): <NEW_LINE> <INDENT> result = self.func(*arg, **kw) <NEW_LINE> for key in self.observers: <NEW_LINE> <INDENT> self.observers[key](self, *arg, **kw) <NEW_LINE> <DEDENT> return result | A function which can be observed.
I wrap a function and allow other callables to register as observers of it.
If you have a function func, then ObservableFunction(func) is a wrapper
around func which can accept observers.
Add and remove observers using:
add_observer(observer)
registers observer to be called whenever I am called
discard_observer(observer)
Removes an observer from the set of observers.
Attributes:
func: The function I wrap.
observers: Dict mapping keys unique to each observer to that observer.
If this sounds like a job better served by a set, you're probably
right and making that change is planned. It's delicate because it
requires making sure the observer objects are hashable and have a
proper notion of equality. | 62598fc27cff6e4e811b5ca7 |
class RecipeImageSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Recipe <NEW_LINE> fields = ('id', 'image') <NEW_LINE> read_only_fields = ('id',) | Serializer from uploading images to recipies | 62598fc2099cdd3c63675523 |
class DismodDBError(CascadeATError): <NEW_LINE> <INDENT> pass | Raised when there is an error with running the dismod_db script. | 62598fc2f548e778e596b821 |
class CachedEventEmitter(EventEmitter): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.use_cache = False <NEW_LINE> self.processed_events = None <NEW_LINE> <DEDENT> def init(self): <NEW_LINE> <INDENT> super().init() <NEW_LINE> self.processed_events = deque() <NEW_LINE> <DEDENT> def uninit(self): <NEW_LINE> <INDENT> super().uninit() <NEW_LINE> self.processed_events = None <NEW_LINE> <DEDENT> def process_event(self, event): <NEW_LINE> <INDENT> super().process_event(event) <NEW_LINE> if self.use_cache: <NEW_LINE> <INDENT> self.processed_events.append(event) <NEW_LINE> <DEDENT> <DEDENT> def event_loop(self, timeout=None, stop_on=None): <NEW_LINE> <INDENT> if timeout: <NEW_LINE> <INDENT> end_time = time.time() + timeout <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end_time = None <NEW_LINE> <DEDENT> while self.processed_events and (timeout is None or timeout > 0): <NEW_LINE> <INDENT> event = self.processed_events.popleft() <NEW_LINE> if stop_on and stop_on(event): <NEW_LINE> <INDENT> return event <NEW_LINE> <DEDENT> if end_time: <NEW_LINE> <INDENT> timeout = end_time - time.time() <NEW_LINE> <DEDENT> <DEDENT> return super().event_loop(timeout, stop_on) | This class adds cache mechanics to EventEmitter, to allow suspended
emmiter buffing the incoming events. | 62598fc226068e7796d4cbde |
class LowRankMatrix(Base, LatentStructure): <NEW_LINE> <INDENT> def __init__(self, n_samples=100, n_features=100, effective_rank=10, tail_strength=0.5, random_state=None): <NEW_LINE> <INDENT> generator = check_random_state(random_state) <NEW_LINE> n = min(n_samples, n_features) <NEW_LINE> from .utils import qr_economic <NEW_LINE> u, _ = qr_economic(generator.randn(n_samples, n)) <NEW_LINE> v, _ = qr_economic(generator.randn(n_features, n)) <NEW_LINE> singular_ind = np.arange(n, dtype=np.float64) <NEW_LINE> low_rank = (1 - tail_strength) * np.exp(-1.0 * (singular_ind / effective_rank) ** 2) <NEW_LINE> tail = tail_strength * np.exp(-0.1 * singular_ind / effective_rank) <NEW_LINE> s = np.identity(n) * (low_rank + tail) <NEW_LINE> Base.__init__(self, np.dot(np.dot(u, s), v.T)) <NEW_LINE> self.descr['mask'] = generator.randint(3, size=self._X.shape) <NEW_LINE> <DEDENT> def matrix_completion_task(self): <NEW_LINE> <INDENT> X = sparse.csr_matrix(self._X * (self.descr['mask'] == 0)) <NEW_LINE> Y = sparse.csr_matrix(self._X * (self.descr['mask'] == 1)) <NEW_LINE> assert X.nnz == (self.descr['mask'] == 0).sum() <NEW_LINE> assert Y.nnz == (self.descr['mask'] == 1).sum() <NEW_LINE> return X, Y | Mostly low rank random matrix with bell-shaped singular values profile.
Most of the variance can be explained by a bell-shaped curve of width
effective_rank: the low rank part of the singular values profile is::
(1 - tail_strength) * exp(-1.0 * (i / effective_rank) ** 2)
The remaining singular values' tail is fat, decreasing as::
tail_strength * exp(-0.1 * i / effective_rank).
The low rank part of the profile can be considered the structured
signal part of the data while the tail can be considered the noisy
part of the data that cannot be summarized by a low number of linear
components (singular vectors).
This kind of singular profiles is often seen in practice, for instance:
- graw level pictures of faces
- TF-IDF vectors of text documents crawled from the web | 62598fc2377c676e912f6eb5 |
class KeyValueTable(Table): <NEW_LINE> <INDENT> def get_insert_defaults_query(self): <NEW_LINE> <INDENT> sql_method = "_executemany" <NEW_LINE> query = "INSERT OR IGNORE INTO {0} VALUES (?, ?)".format(self.name) <NEW_LINE> args = self.default_values.items() <NEW_LINE> return(sql_method, (query, args)) <NEW_LINE> <DEDENT> def get_update_values_query(self, data): <NEW_LINE> <INDENT> sql_method = "_executemany" <NEW_LINE> query = "UPDATE {0} SET value = ? WHERE key = ?".format(self.name) <NEW_LINE> args = zip(data.values(), data.keys()) <NEW_LINE> return(sql_method, (query, args)) <NEW_LINE> <DEDENT> def process_results(self, data): <NEW_LINE> <INDENT> data_dict = {} <NEW_LINE> for key, value in data: <NEW_LINE> <INDENT> if key in self.default_types: <NEW_LINE> <INDENT> if self.default_types[key] == bool: <NEW_LINE> <INDENT> if value == "0": value = False <NEW_LINE> elif value == "1": value = True <NEW_LINE> <DEDENT> if self.default_types[key] == int: value = int(value) <NEW_LINE> data_dict[key.encode()] = value <NEW_LINE> <DEDENT> <DEDENT> return(data_dict) | Use this class for a table that use key-value pairs
where the value for each key may be a different datatype (i.e. settings) | 62598fc25fdd1c0f98e5e216 |
class BRPostalCodeField(RegexValidator): <NEW_LINE> <INDENT> default_error_messages = { 'invalid': _('Enter a valid postal code in the format XXXXX-XXX.'), } <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(BRPostalCodeField, self).__init__(r'^\d{5}-\d{3}$', *args, **kwargs) | A form field that validates input as a Brazilian zip code, with the format XXXXX-XXX. | 62598fc2cc40096d6161a31a |
class DeckScreengrid(BaseDeckGLViz): <NEW_LINE> <INDENT> viz_type = "deck_screengrid" <NEW_LINE> verbose_name = _("Deck.gl - Screen Grid") <NEW_LINE> spatial_control_keys = ["spatial"] <NEW_LINE> is_timeseries = True <NEW_LINE> def query_obj(self) -> QueryObjectDict: <NEW_LINE> <INDENT> self.is_timeseries = bool( self.form_data.get("time_grain_sqla") or self.form_data.get("granularity") ) <NEW_LINE> return super().query_obj() <NEW_LINE> <DEDENT> def get_properties(self, data: Dict[str, Any]) -> Dict[str, Any]: <NEW_LINE> <INDENT> return { "position": data.get("spatial"), "weight": (data.get(self.metric_label) if self.metric_label else None) or 1, "__timestamp": data.get(DTTM_ALIAS) or data.get("__time"), } <NEW_LINE> <DEDENT> def get_data(self, df: pd.DataFrame) -> VizData: <NEW_LINE> <INDENT> self.metric_label = ( utils.get_metric_name(self.metric) if self.metric else None ) <NEW_LINE> return super().get_data(df) | deck.gl's ScreenGridLayer | 62598fc25166f23b2e243664 |
class RestrictedMultiplyMixin(object): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(RestrictedMultiplyMixin, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def create_equality_constraint_variable(self, size): <NEW_LINE> <INDENT> v = expression.Variable('', shape.Vector(size)) <NEW_LINE> self.varlength[v.value] = size <NEW_LINE> self.varstart[v.value] = self.num_vars <NEW_LINE> self.num_vars += size <NEW_LINE> return v <NEW_LINE> <DEDENT> def expand_param(self, left, right, node): <NEW_LINE> <INDENT> if left.is_matrix_param and right.is_matrix_param: <NEW_LINE> <INDENT> n = node.right.shape.size(abstractdim_rewriter=self.abstractdim_rewriter) <NEW_LINE> new_var = self.create_equality_constraint_variable(n) <NEW_LINE> stack = self.expr_stack <NEW_LINE> self.expr_stack = [] <NEW_LINE> eq_constraint = (new_var == node.right) <NEW_LINE> self.visit(eq_constraint) <NEW_LINE> self.expr_stack = stack <NEW_LINE> self.visit_Variable(new_var) <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def visit_Mul(self, node): <NEW_LINE> <INDENT> assert(isconstant(node.left)) <NEW_LINE> self.generic_visit(node) <NEW_LINE> right = self.expr_stack.pop() <NEW_LINE> left = self.expr_stack.pop() <NEW_LINE> coeff = left['1'] <NEW_LINE> expr = right.values()[0] <NEW_LINE> if self.expand_param(coeff, expr, node): <NEW_LINE> <INDENT> right = self.expr_stack.pop() <NEW_LINE> <DEDENT> for k in right.keys(): <NEW_LINE> <INDENT> right[k] = coeff * right[k] <NEW_LINE> <DEDENT> self.expr_stack.append(right) <NEW_LINE> <DEDENT> def visit_Add(self, node): <NEW_LINE> <INDENT> self.generic_visit(node) <NEW_LINE> right = self.expr_stack.pop() <NEW_LINE> left = self.expr_stack.pop() <NEW_LINE> for k in right.keys(): <NEW_LINE> <INDENT> if left.get(k, None) is not None: <NEW_LINE> <INDENT> if self.expand_param(left[k], right[k], node): <NEW_LINE> <INDENT> new_elem = self.expr_stack.pop() <NEW_LINE> name = new_elem.keys()[0] <NEW_LINE> left[name] = new_elem[name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> left[k] = left[k] + right[k] <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> left[k] = right[k] <NEW_LINE> <DEDENT> <DEDENT> self.expr_stack.append(left) | This implements the restricted multiplication behavior.
| 62598fc292d797404e388ca4 |
class MockCypher(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def encrypt(message): <NEW_LINE> <INDENT> return message <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def decrypt(message): <NEW_LINE> <INDENT> return message | This class mocks Crypto.Cipher objects and does nothing. | 62598fc25fdd1c0f98e5e217 |
class ResearchAuthorInline(admin.TabularInline): <NEW_LINE> <INDENT> model = ResearchAuthor | Easy editing of research author entry relations from the author entry page | 62598fc27047854f4633f657 |
class MyBaseHandler(MyOriginBaseHandler): <NEW_LINE> <INDENT> def set_default_headers(self): <NEW_LINE> <INDENT> self.set_header("Access-Control-Allow-Origin", "*") <NEW_LINE> self.set_header("Access-Control-Allow-Headers", "x-requested-with,access_token") <NEW_LINE> self.set_header("Access-Control-Allow-Methods", "POST, GET, OPTIONS, DELETE"); <NEW_LINE> self.set_header("Access-Control-Max-Age", "3600") <NEW_LINE> <DEDENT> def options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(MyBaseHandler, self).__init__(*args, **kwargs) | 自定义session的类,基于tornado的
- logsession是登录的token,使用mongodb来存储
- sessionid使用redis来存储,以后用token,不用session了 | 62598fc221bff66bcd722eef |
class Tests(IMP.test.TestCase): <NEW_LINE> <INDENT> def setup_filenames(self): <NEW_LINE> <INDENT> self.protein_fns = [ self.get_input_file_name("1z5s_A.pdb"), self.get_input_file_name("1z5s_B.pdb"), self.get_input_file_name("1z5s_C.pdb"), self.get_input_file_name("1z5s_D.pdb")] <NEW_LINE> self.fitting_solutions_fns = [ self.get_input_file_name("1z5s_A_fitting_solutions.txt")] <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> IMP.test.TestCase.setUp(self) <NEW_LINE> IMP.base.set_log_level(IMP.base.SILENT) <NEW_LINE> self.setup_filenames() <NEW_LINE> self.mdl = IMP.kernel.Model() <NEW_LINE> self.mhs = IMP.atom.Hierarchies() <NEW_LINE> self.ca_sel = IMP.atom.CAlphaPDBSelector() <NEW_LINE> for prot_fn in self.protein_fns: <NEW_LINE> <INDENT> mh = IMP.atom.read_pdb(prot_fn, self.mdl, self.ca_sel) <NEW_LINE> self.mhs.append(mh) <NEW_LINE> IMP.atom.add_radii(mh) <NEW_LINE> IMP.atom.setup_as_rigid_body(mh) <NEW_LINE> <DEDENT> <DEDENT> def test_reading_fitting_solutions(self): <NEW_LINE> <INDENT> sols = IMP.multifit.read_fitting_solutions( self.fitting_solutions_fns[0]) <NEW_LINE> self.assertEqual(len(sols), 13) <NEW_LINE> self.assertEqual(sols[0].get_match_size(), 9) <NEW_LINE> self.assertAlmostEqual(sols[1].get_match_average_distance(), 3.2, delta=0.01) <NEW_LINE> self.assertAlmostEqual(sols[2].get_rmsd_to_reference(), 2.98, delta=0.01) <NEW_LINE> self.assertAlmostEqual(sols[2].get_envelope_penetration_score(), 4.5, delta=0.01) <NEW_LINE> <DEDENT> def test_writing_and_reading_fitting_solutions(self): <NEW_LINE> <INDENT> sols = IMP.multifit.read_fitting_solutions( self.fitting_solutions_fns[0]) <NEW_LINE> IMP.multifit.write_fitting_solutions("temp.txt", sols) <NEW_LINE> sols = IMP.multifit.read_fitting_solutions("temp.txt") <NEW_LINE> self.assertEqual(len(sols), 13) <NEW_LINE> self.assertEqual(sols[0].get_match_size(), 9) <NEW_LINE> self.assertAlmostEqual(sols[1].get_match_average_distance(), 3.2, delta=0.01) <NEW_LINE> self.assertAlmostEqual(sols[2].get_rmsd_to_reference(), 2.98, delta=0.01) <NEW_LINE> os.unlink("temp.txt") | Tests for reading fitting solutions | 62598fc2796e427e5384ea1a |
class ShowIpBgpTemplatePeerSession(ShowIpBgpTemplatePeerSession_iosxe): <NEW_LINE> <INDENT> pass | Parser for show ip bgp template peer-session <WORD> | 62598fc24c3428357761a540 |
class ERSTimeSeries(ASCATTimeSeries): <NEW_LINE> <INDENT> def __init__(self, gpi, lon, lat, cell, data, topo_complex=None, wetland_frac=None, porosity_gldas=None, porosity_hwsd=None): <NEW_LINE> <INDENT> super(ERSTimeSeries, self).__init__(gpi, lon, lat, cell, data, topo_complex=topo_complex, wetland_frac=wetland_frac, porosity_gldas=porosity_gldas, porosity_hwsd=porosity_hwsd) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "ERS time series gpi:%d lat:%2.3f lon:%3.3f" % (self.gpi, self.latitude, self.longitude) | Extends :class:`pytesmo.io.sat.ascat.ASCATTimeSeries` and
provides correct string representation for ERS data | 62598fc2d486a94d0ba2c256 |
class SwitcherBoolean(ToggleEntity): <NEW_LINE> <INDENT> def __init__(self, hass, slug_id, name, initial, entity_config): <NEW_LINE> <INDENT> self.hass = hass <NEW_LINE> self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, slug_id, hass=hass) <NEW_LINE> self._name = name <NEW_LINE> self._state = initial <NEW_LINE> self._entity_config = entity_config <NEW_LINE> <DEDENT> @property <NEW_LINE> def should_poll(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def icon(self): <NEW_LINE> <INDENT> return self._entity_config[CONF_ICON] <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self._state <NEW_LINE> <DEDENT> @property <NEW_LINE> def state_attributes(self): <NEW_LINE> <INDENT> return { CONF_STATE_CARD: self._entity_config[CONF_CARD] } <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_turn_on(self, **kwargs): <NEW_LINE> <INDENT> _LOGGER.debug("received turn on request for " + self.entity_id) <NEW_LINE> self._state = True <NEW_LINE> yield from self.async_update_ha_state() <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def async_turn_off(self, **kwargs): <NEW_LINE> <INDENT> _LOGGER.debug("received turn on request for " + self.entity_id) <NEW_LINE> self._state = False <NEW_LINE> yield from self.async_update_ha_state() | Representation of the input_boolean | 62598fc24428ac0f6e6587aa |
class AbstractRecognizer: <NEW_LINE> <INDENT> def __init__(self, recognition_endpoint): <NEW_LINE> <INDENT> self.recognition_endpoint = recognition_endpoint <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def recognize(self, image): <NEW_LINE> <INDENT> pass | A base class for facial recognition service interfaces
Args:
recognition_endpoint (string): The url of the recognition service (theoretically could be something other than a string...maybe an object) | 62598fc2aad79263cf42ea5b |
class Deck(Hand): <NEW_LINE> <INDENT> def populate(self): <NEW_LINE> <INDENT> for suit in Card.SUITS: <NEW_LINE> <INDENT> for rank in Card.RANKS: <NEW_LINE> <INDENT> self.add(Card(rank, suit)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def shuffle(self): <NEW_LINE> <INDENT> import random <NEW_LINE> random.shuffle(self.cards) <NEW_LINE> <DEDENT> def deal(self, hands, per_hand = 1): <NEW_LINE> <INDENT> for rounds in range(per_hand): <NEW_LINE> <INDENT> for hand in hands: <NEW_LINE> <INDENT> if self.cards: <NEW_LINE> <INDENT> top_card = self.cards[0] <NEW_LINE> self.give(top_card, hand) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("He могу больше сдавать: карты закончились!") | Колода игральных карт. | 62598fc266673b3332c30659 |
class BackendAddressPool(SubResource): <NEW_LINE> <INDENT> _validation = { 'backend_ip_configurations': {'readonly': True}, 'load_balancing_rules': {'readonly': True}, 'outbound_nat_rule': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'}, 'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'}, 'outbound_nat_rule': {'key': 'properties.outboundNatRule', 'type': 'SubResource'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, } <NEW_LINE> def __init__(self, *, id: str=None, provisioning_state: str=None, name: str=None, etag: str=None, **kwargs) -> None: <NEW_LINE> <INDENT> super(BackendAddressPool, self).__init__(id=id, **kwargs) <NEW_LINE> self.backend_ip_configurations = None <NEW_LINE> self.load_balancing_rules = None <NEW_LINE> self.outbound_nat_rule = None <NEW_LINE> self.provisioning_state = provisioning_state <NEW_LINE> self.name = name <NEW_LINE> self.etag = etag | Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource ID.
:type id: str
:ivar backend_ip_configurations: Gets collection of references to IP
addresses defined in network interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2017_11_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: Gets load balancing rules that use this
backend address pool.
:vartype load_balancing_rules:
list[~azure.mgmt.network.v2017_11_01.models.SubResource]
:ivar outbound_nat_rule: Gets outbound rules that use this backend address
pool.
:vartype outbound_nat_rule:
~azure.mgmt.network.v2017_11_01.models.SubResource
:param provisioning_state: Get provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Gets name of the resource that is unique within a resource
group. This name can be used to access the resource.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str | 62598fc257b8e32f52508261 |
class GameQuerySet(models.QuerySet): <NEW_LINE> <INDENT> def add_silver(self, silver): <NEW_LINE> <INDENT> game = self._get_game() <NEW_LINE> game.silver += silver <NEW_LINE> game.save(update_fields=['silver']) <NEW_LINE> <DEDENT> def alter_xp(self, bonus_xp): <NEW_LINE> <INDENT> game = self._get_game() <NEW_LINE> game.bonus_xp = bonus_xp <NEW_LINE> game.save(update_fields=['bonus_xp']) <NEW_LINE> return ADMIN_DONE['bonus_xp'].format(self._get_game().bonus_xp) <NEW_LINE> <DEDENT> def _get_game(self): <NEW_LINE> <INDENT> return self.all().first() <NEW_LINE> <DEDENT> def get_silver(self): <NEW_LINE> <INDENT> return DONE['silver_global'].format(self._get_game().silver) <NEW_LINE> <DEDENT> def get_bonusxp(self): <NEW_LINE> <INDENT> return self._get_game().bonus_xp | Requête de base pour les informations de partie. | 62598fc25fdd1c0f98e5e219 |
class BertTokenizedDataset(Dataset): <NEW_LINE> <INDENT> def __init__(self, data_path_words: str, data_path_tags: str, tag_to_ix: Vocab, tokenizer: BertTokenizer) -> None: <NEW_LINE> <INDENT> self.tokenizer = tokenizer <NEW_LINE> self.original_sentences = [sent.strip() for sent in open(data_path_words).readlines()] <NEW_LINE> self.tags = open(data_path_tags).readlines() <NEW_LINE> input_ids = [] <NEW_LINE> self.tag_ids = [[tag_to_ix[tag] for tag in tags.strip().split()] for tags in self.tags] <NEW_LINE> self.attention_masks = [] <NEW_LINE> for sent in self.original_sentences: <NEW_LINE> <INDENT> encoded_dict = self.tokenizer.encode_plus( sent, add_special_tokens=True, max_length=512, pad_to_max_length=True, return_attention_mask=True, return_tensors='pt', ) <NEW_LINE> input_ids.append(encoded_dict['input_ids']) <NEW_LINE> self.attention_masks.append(encoded_dict['attention_mask']) <NEW_LINE> <DEDENT> self.token_start_idx = self.get_token_start_idxs() <NEW_LINE> self.input_ids = torch.cat(input_ids, dim=0) <NEW_LINE> self.attention_masks = torch.cat(self.attention_masks, dim=0) <NEW_LINE> <DEDENT> def get_token_start_idxs(self) -> List[int]: <NEW_LINE> <INDENT> token_start_idxs = [] <NEW_LINE> for sent in self.original_sentences: <NEW_LINE> <INDENT> words = sent.split() <NEW_LINE> subwords = list(map(self.tokenizer.tokenize, words)) <NEW_LINE> subword_lengths = list(map(len, subwords)) <NEW_LINE> token_start_idxs.append(list(np.cumsum([0] + subword_lengths))[1:]) <NEW_LINE> <DEDENT> return token_start_idxs <NEW_LINE> <DEDENT> def __len__(self) -> int: <NEW_LINE> <INDENT> return len(self.original_sentences) <NEW_LINE> <DEDENT> def __getitem__(self, idx) -> Dict: <NEW_LINE> <INDENT> sample = { 'original_sentence': self.original_sentences[idx], 'tags': self.tags[idx], 'input_ids': self.input_ids[idx], 'tag_ids': self.tag_ids[idx], 'attention_mask': self.attention_masks[idx], 'token_start_idx': self.token_start_idx[idx] } <NEW_LINE> return sample | The main class containing the Bert-tokenized dataset. The data is subsequently transferred into a DataLoader object
so that it can be iterated over without having to be loaded into memory all at once (see create_bert_datasets() below). | 62598fc20fa83653e46f516b |
class TestV1ObjectFieldSelector(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testV1ObjectFieldSelector(self): <NEW_LINE> <INDENT> model = lib_openshift.models.v1_object_field_selector.V1ObjectFieldSelector() | V1ObjectFieldSelector unit test stubs | 62598fc2656771135c4898f5 |
class MockRequests: <NEW_LINE> <INDENT> flag = None <NEW_LINE> content = """{"message": "Invalid token", "errors": [{"type": "invalid_token", "subject": "token"}]}""" <NEW_LINE> status_code = None <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.url = None <NEW_LINE> self.data = None <NEW_LINE> self.kwargs = None <NEW_LINE> <DEDENT> def return_request(self, url, data=None, **kwargs): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.data = data <NEW_LINE> self.kwargs = kwargs <NEW_LINE> requests = MockRequests() <NEW_LINE> if self.flag == 1: <NEW_LINE> <INDENT> requests.status_code = 401 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> requests.status_code = 200 <NEW_LINE> <DEDENT> return requests <NEW_LINE> <DEDENT> def post(self, url, data=None, **kwargs): <NEW_LINE> <INDENT> return self.return_request(url, data, **kwargs) <NEW_LINE> <DEDENT> def delete(self, url, **kwargs): <NEW_LINE> <INDENT> return self.return_request(url, **kwargs) <NEW_LINE> <DEDENT> def get(self, url, **kwargs): <NEW_LINE> <INDENT> return self.return_request(url, **kwargs) <NEW_LINE> <DEDENT> def put(self, url, data=None, **kwargs): <NEW_LINE> <INDENT> return self.return_request(url, data, **kwargs) | Mock smtplib class | 62598fc2283ffb24f3cf3b0a |
class CreateOperation(VDOOperation): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(CreateOperation, self).__init__(checkBinaries=True) <NEW_LINE> <DEDENT> def preflight(self, args): <NEW_LINE> <INDENT> super(CreateOperation, self).preflight(args) <NEW_LINE> if not args.name: <NEW_LINE> <INDENT> raise ArgumentError(_("Missing required argument '--name'")) <NEW_LINE> <DEDENT> if not args.device: <NEW_LINE> <INDENT> raise ArgumentError(_("Missing required argument '--device'")) <NEW_LINE> <DEDENT> <DEDENT> @exclusivelock <NEW_LINE> @transactional <NEW_LINE> def execute(self, args): <NEW_LINE> <INDENT> conf = Configuration.modifiableSingleton(self.confFile) <NEW_LINE> argsDict = vars(args).copy() <NEW_LINE> name = argsDict['name'] <NEW_LINE> del argsDict['name'] <NEW_LINE> vdo = VDOService(args.name, conf, **argsDict) <NEW_LINE> transaction = Transaction.transaction() <NEW_LINE> vdo.create(args.force) <NEW_LINE> transaction.addUndoStage(vdo.remove) <NEW_LINE> conf.persist() <NEW_LINE> vdo.announceReady() | Implements the create command. | 62598fc27047854f4633f65a |
class UserDetailSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ( "id", "username", "gender", "birthday", "email", "mobile" ) | 用户详情序列化类 | 62598fc2ff9c53063f51a8d4 |
class FilelistsXMLFileContext(FastForwardXmlFileContext): <NEW_LINE> <INDENT> def __init__(self, working_dir, num_units, checksum_type=None): <NEW_LINE> <INDENT> metadata_file_path = os.path.join(working_dir, REPO_DATA_DIR_NAME, FILE_LISTS_XML_FILE_NAME) <NEW_LINE> self.num_packages = num_units <NEW_LINE> attributes = {'xmlns': FILE_LISTS_NAMESPACE, 'packages': str(self.num_packages)} <NEW_LINE> super(FilelistsXMLFileContext, self).__init__(metadata_file_path, 'filelists', search_tag='package', root_attributes=attributes, checksum_type=checksum_type) <NEW_LINE> <DEDENT> def add_unit_metadata(self, unit): <NEW_LINE> <INDENT> metadata = unit.metadata['repodata']['filelists'] <NEW_LINE> self.metadata_file_handle.write(metadata) | Context manager for generating the filelists.xml.gz file. | 62598fc25fc7496912d483be |
class NumericCombo(wx.ComboBox): <NEW_LINE> <INDENT> def __init__(self, parent, choices, precision=3, init=0, width=80): <NEW_LINE> <INDENT> self.fmt = "%%.%if" % precision <NEW_LINE> self.choices = choices <NEW_LINE> schoices = [self.fmt % i for i in self.choices] <NEW_LINE> wx.ComboBox.__init__(self, parent, -1, '', (-1, -1), (width, -1), schoices, wx.CB_DROPDOWN|wx.TE_PROCESS_ENTER) <NEW_LINE> init = min(init, len(self.choices)) <NEW_LINE> self.SetStringSelection(schoices[init]) <NEW_LINE> self.Bind(wx.EVT_TEXT_ENTER, self.OnEnter) <NEW_LINE> <DEDENT> def OnEnter(self, event=None): <NEW_LINE> <INDENT> thisval = float(event.GetString()) <NEW_LINE> if thisval not in self.choices: <NEW_LINE> <INDENT> self.choices.append(thisval) <NEW_LINE> self.choices.sort() <NEW_LINE> <DEDENT> self.Clear() <NEW_LINE> self.AppendItems([self.fmt % i for i in self.choices]) <NEW_LINE> self.SetSelection(self.choices.index(thisval)) | Numeric Combo: ComboBox with numeric-only choices | 62598fc221bff66bcd722ef1 |
class TestCompareXLSXFiles(ExcelComparisonTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.maxDiff = None <NEW_LINE> filename = 'textbox17.xlsx' <NEW_LINE> test_dir = 'xlsxwriter/test/comparison/' <NEW_LINE> self.got_filename = test_dir + '_test_' + filename <NEW_LINE> self.exp_filename = test_dir + 'xlsx_files/' + filename <NEW_LINE> self.ignore_files = [] <NEW_LINE> self.ignore_elements = {} <NEW_LINE> <DEDENT> def test_create_file(self): <NEW_LINE> <INDENT> workbook = Workbook(self.got_filename) <NEW_LINE> worksheet = workbook.add_worksheet() <NEW_LINE> worksheet.insert_textbox('E9', 'This is some text', {'align': {'vertical': 'bottom'}}) <NEW_LINE> workbook.close() <NEW_LINE> self.assertExcelEqual() | Test file created by XlsxWriter against a file created by Excel. | 62598fc2a219f33f346c6a8e |
class Log4jAppenderTest(Test): <NEW_LINE> <INDENT> def __init__(self, test_context): <NEW_LINE> <INDENT> super(Log4jAppenderTest, self).__init__(test_context) <NEW_LINE> self.num_zk = 1 <NEW_LINE> self.num_brokers = 1 <NEW_LINE> self.messages_received_count = 0 <NEW_LINE> self.topics = { TOPIC: {'partitions': 1, 'replication-factor': 1} } <NEW_LINE> self.zk = ZookeeperService(test_context, self.num_zk) <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.zk.start() <NEW_LINE> <DEDENT> def start_kafka(self, security_protocol, interbroker_security_protocol): <NEW_LINE> <INDENT> self.kafka = KafkaService( self.test_context, self.num_brokers, self.zk, security_protocol=security_protocol, interbroker_security_protocol=interbroker_security_protocol, topics=self.topics) <NEW_LINE> self.kafka.start() <NEW_LINE> <DEDENT> def start_appender(self, security_protocol): <NEW_LINE> <INDENT> self.appender = KafkaLog4jAppender(self.test_context, self.num_brokers, self.kafka, TOPIC, MAX_MESSAGES, security_protocol=security_protocol) <NEW_LINE> self.appender.start() <NEW_LINE> <DEDENT> def custom_message_validator(self, msg): <NEW_LINE> <INDENT> if msg and "INFO : org.apache.kafka.tools.VerifiableLog4jAppender" in msg: <NEW_LINE> <INDENT> self.logger.debug("Received message: %s" % msg) <NEW_LINE> self.messages_received_count += 1 <NEW_LINE> <DEDENT> <DEDENT> def start_consumer(self): <NEW_LINE> <INDENT> self.consumer = ConsoleConsumer(self.test_context, num_nodes=self.num_brokers, kafka=self.kafka, topic=TOPIC, consumer_timeout_ms=10000, message_validator=self.custom_message_validator) <NEW_LINE> self.consumer.start() <NEW_LINE> <DEDENT> @cluster(num_nodes=4) <NEW_LINE> @matrix(security_protocol=['PLAINTEXT', 'SSL']) <NEW_LINE> @cluster(num_nodes=5) <NEW_LINE> @matrix(security_protocol=['SASL_PLAINTEXT', 'SASL_SSL']) <NEW_LINE> def test_log4j_appender(self, security_protocol='PLAINTEXT'): <NEW_LINE> <INDENT> self.start_kafka(security_protocol, security_protocol) <NEW_LINE> self.start_appender(security_protocol) <NEW_LINE> self.appender.wait() <NEW_LINE> self.start_consumer() <NEW_LINE> node = self.consumer.nodes[0] <NEW_LINE> wait_until(lambda: self.consumer.alive(node), timeout_sec=10, backoff_sec=.2, err_msg="Consumer was too slow to start") <NEW_LINE> wait_until(lambda: self.messages_received_count == MAX_MESSAGES, timeout_sec=10, err_msg="Timed out waiting to consume expected number of messages.") <NEW_LINE> self.consumer.stop() | Tests KafkaLog4jAppender using VerifiableKafkaLog4jAppender that appends increasing ints to a Kafka topic | 62598fc2adb09d7d5dc0a803 |
@implementer(ISchemaField) <NEW_LINE> class SchemaField(Field): <NEW_LINE> <INDENT> def __init__(self, field): <NEW_LINE> <INDENT> super(SchemaField, self).__init__( field.title or None, field.__name__) <NEW_LINE> self.description = field.description <NEW_LINE> self.required = field.required <NEW_LINE> self.readonly = field.readonly <NEW_LINE> self._field = field <NEW_LINE> <DEDENT> def get_field(self): <NEW_LINE> <INDENT> return self._field <NEW_LINE> <DEDENT> def clone(self, new_identifier=None): <NEW_LINE> <INDENT> copy = self.__class__(self._field) <NEW_LINE> copy.__dict__.update(self.__dict__) <NEW_LINE> if new_identifier is not None: <NEW_LINE> <INDENT> copy.identifier = new_identifier <NEW_LINE> <DEDENT> return copy <NEW_LINE> <DEDENT> def validate(self, value, context=None): <NEW_LINE> <INDENT> error = super(SchemaField, self).validate(value) <NEW_LINE> if error is not None: <NEW_LINE> <INDENT> return error <NEW_LINE> <DEDENT> if value is not NO_VALUE: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> binded_field = self._field.bind(context) <NEW_LINE> binded_field.validate(value) <NEW_LINE> <DEDENT> except schema_interfaces.ValidationError as error: <NEW_LINE> <INDENT> return error.doc() <NEW_LINE> <DEDENT> except Invalid as error: <NEW_LINE> <INDENT> return error.args[0] <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def fromUnicode(self, value): <NEW_LINE> <INDENT> if schema_interfaces.IFromUnicode.providedBy(self._field): <NEW_LINE> <INDENT> return self._field.fromUnicode(value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def getDefaultValue(self, form): <NEW_LINE> <INDENT> default = super(SchemaField, self).getDefaultValue(form) <NEW_LINE> if default is not NO_VALUE: <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> default = self._field.default <NEW_LINE> if default is None: <NEW_LINE> <INDENT> return NO_VALUE <NEW_LINE> <DEDENT> return default | A form field using a zope.schema field as settings.
| 62598fc2377c676e912f6eb7 |
class POP3Reader(BaseReader): <NEW_LINE> <INDENT> def __init__(self, args): <NEW_LINE> <INDENT> self.server = args['server'] <NEW_LINE> self.account = args['account'] <NEW_LINE> if args['ssl']: <NEW_LINE> <INDENT> self.pop3 = poplib.POP3_SSL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.pop3 = poplib.POP3 <NEW_LINE> <DEDENT> self.data = None <NEW_LINE> self.moredata = None <NEW_LINE> self.identifier = None <NEW_LINE> self.password = getpass.getpass("Enter password for %s:" % self.account) <NEW_LINE> pop3 = self.pop3(self.server) <NEW_LINE> pop3.user(self.account) <NEW_LINE> try: <NEW_LINE> <INDENT> pop3.pass_(self.password) <NEW_LINE> self.state = pop3.stat() <NEW_LINE> <DEDENT> except poplib.error_proto: <NEW_LINE> <INDENT> raise Exception("Incorrect username/password for %s" % self.account) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> pop3.quit() <NEW_LINE> <DEDENT> BaseReader.__init__(self) <NEW_LINE> self.log.log("Starting POP3 Reader for %s" % self.account) <NEW_LINE> <DEDENT> def checkUpdate(self): <NEW_LINE> <INDENT> pop3 = self.pop3(self.server) <NEW_LINE> pop3.user(self.account) <NEW_LINE> pop3.pass_(self.password) <NEW_LINE> newstate = pop3.stat() <NEW_LINE> if newstate != self.state: <NEW_LINE> <INDENT> self.state = newstate <NEW_LINE> self.items.append(POP3Item(self.state, {'account':self.account})) <NEW_LINE> <DEDENT> pop3.quit() | This reader is used to monitor an email inbox.
WARNING: This reader will retrieve all of your emails, and treat
each one as an item.
An update will be triggered every time a new email is recieved.
If you only want to retrieve meta info, and manually check your email,
please use IMAPReader instead (see private option) | 62598fc2f548e778e596b825 |
class Pupitre(Frame): <NEW_LINE> <INDENT> def __init__(self, boss, canon): <NEW_LINE> <INDENT> Frame.__init__(self, bd =3, relief =GROOVE) <NEW_LINE> self.score =0 <NEW_LINE> s =Scale(self, from_ =88, to =65, troughcolor ='dark grey', command =canon.orienter) <NEW_LINE> s.set(45) <NEW_LINE> s.pack(side =LEFT) <NEW_LINE> Label(self, text ='Hausse').pack(side =TOP, anchor =W, pady =5) <NEW_LINE> Button(self, text ='Feu !', command =canon.feu). pack(side =BOTTOM, padx =5, pady =5) <NEW_LINE> Label(self, text ="points").pack() <NEW_LINE> self.points =Label(self, text=' 0 ', bg ='white') <NEW_LINE> self.points.pack() <NEW_LINE> gd =(LEFT, RIGHT)[canon.sens == -1] <NEW_LINE> self.pack(padx =3, pady =5, side =gd) <NEW_LINE> <DEDENT> def attribuerPoint(self, p): <NEW_LINE> <INDENT> self.score += p <NEW_LINE> self.points.config(text = ' %s ' % self.score) | Pupitre de pointage associé à un canon | 62598fc2a05bb46b3848aaf3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.