code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class ResolveSavingConflictOperation(object): <NEW_LINE> <INDENT> def execute(self, *args, **kwargs): <NEW_LINE> <INDENT> changelist_number = kwargs['changelist_number'] <NEW_LINE> perforce.resolve_save_conflict(changelist_number=changelist_number)
|
Save the conflicts
|
6259902c925a0f43d25e905f
|
class IdentityAwareProxy(_messages.Message): <NEW_LINE> <INDENT> enabled = _messages.BooleanField(1) <NEW_LINE> oauth2ClientId = _messages.StringField(2) <NEW_LINE> oauth2ClientSecret = _messages.StringField(3) <NEW_LINE> oauth2ClientSecretSha256 = _messages.StringField(4)
|
Identity-Aware Proxy
Fields:
enabled: Whether the serving infrastructure will authenticate and
authorize all incoming requests.If true, the oauth2_client_id and
oauth2_client_secret fields must be non-empty.
oauth2ClientId: OAuth2 client ID to use for the authentication flow.
oauth2ClientSecret: OAuth2 client secret to use for the authentication
flow.For security reasons, this value cannot be retrieved via the API.
Instead, the SHA-256 hash of the value is returned in the
oauth2_client_secret_sha256 field.@InputOnly
oauth2ClientSecretSha256: Hex-encoded SHA-256 hash of the client
secret.@OutputOnly
|
6259902c5166f23b2e2443ee
|
class Logger(object): <NEW_LINE> <INDENT> _attrs_to_log = [ "time_this_iter_s", "mean_loss", "mean_accuracy", "episode_reward_mean", "episode_len_mean"] <NEW_LINE> def __init__(self, config, logdir, upload_uri=None): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.logdir = logdir <NEW_LINE> self.uri = upload_uri <NEW_LINE> self._init() <NEW_LINE> <DEDENT> def _init(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def on_result(self, result): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass
|
Logging interface for ray.tune; specialized implementations follow.
By default, the UnifiedLogger implementation is used which logs results in
multiple formats (TensorBoard, rllab/viskit, plain json) at once.
|
6259902cd53ae8145f91947b
|
class EVENt(SCPINode, SCPIQuery): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> _cmd = "EVENt" <NEW_LINE> args = []
|
STATus:QUEStionable:BERT:EVENt
Arguments:
|
6259902c287bf620b6272bff
|
class AudioFile: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swaggerTypes = { 'attributionUrl': 'str', 'commentCount': 'int', 'voteCount': 'int', 'fileUrl': 'str', 'audioType': 'str', 'id': 'int', 'duration': 'float', 'attributionText': 'str', 'createdBy': 'str', 'description': 'str', 'createdAt': 'str', 'voteWeightedAverage': 'float', 'voteAverage': 'float', 'word': 'str' } <NEW_LINE> self.attributionUrl = None <NEW_LINE> self.commentCount = None <NEW_LINE> self.voteCount = None <NEW_LINE> self.fileUrl = None <NEW_LINE> self.audioType = None <NEW_LINE> self.id = None <NEW_LINE> self.duration = None <NEW_LINE> self.attributionText = None <NEW_LINE> self.createdBy = None <NEW_LINE> self.description = None <NEW_LINE> self.createdAt = None <NEW_LINE> self.voteWeightedAverage = None <NEW_LINE> self.voteAverage = None <NEW_LINE> self.word = None
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902c56b00c62f0fb38d9
|
class RemoveRedundantAssign(object): <NEW_LINE> <INDENT> def __init__(self, interp): <NEW_LINE> <INDENT> self.interp = interp <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> for blkid, blk in utils.iteritems(self.interp.blocks): <NEW_LINE> <INDENT> self.run_block(blk) <NEW_LINE> <DEDENT> <DEDENT> def run_block(self, blk): <NEW_LINE> <INDENT> tempassign = {} <NEW_LINE> removeset = set() <NEW_LINE> for offset, inst in enumerate(blk.body): <NEW_LINE> <INDENT> self.mark_asssignment(tempassign, offset, inst) <NEW_LINE> <DEDENT> for bag in utils.itervalues(tempassign): <NEW_LINE> <INDENT> if len(bag) == 2: <NEW_LINE> <INDENT> off1, off2 = bag <NEW_LINE> first = blk.body[off1] <NEW_LINE> second = blk.body[off2] <NEW_LINE> inst = ir.Assign(value=first.value, target=second.target, loc=first.loc) <NEW_LINE> blk.body[off2] = inst <NEW_LINE> removeset.add(off1) <NEW_LINE> <DEDENT> <DEDENT> for off in reversed(sorted(removeset)): <NEW_LINE> <INDENT> del blk.body[off] <NEW_LINE> <DEDENT> <DEDENT> def mark_asssignment(self, tempassign, offset, inst): <NEW_LINE> <INDENT> if isinstance(inst, ir.Assign): <NEW_LINE> <INDENT> if inst.target.is_temp: <NEW_LINE> <INDENT> tempassign[inst.target.name] = [offset] <NEW_LINE> <DEDENT> elif inst.value.name in tempassign: <NEW_LINE> <INDENT> bag = tempassign[inst.value.name] <NEW_LINE> if bag[0] == offset - 1: <NEW_LINE> <INDENT> bag.append(offset) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> del tempassign[inst.value.name]
|
Turn assignment pairs into one assignment
|
6259902c1d351010ab8f4b30
|
class ProberThread(threading.Thread): <NEW_LINE> <INDENT> def __init__(self, thread_requests, thread_responses, threads_busy_lock): <NEW_LINE> <INDENT> super(ProberThread, self).__init__() <NEW_LINE> self.daemon = True <NEW_LINE> self.thread_requests = thread_requests <NEW_LINE> self.thread_responses = thread_responses <NEW_LINE> self.threads_busy_lock = threads_busy_lock <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> global _THREADS_BUSY <NEW_LINE> while True: <NEW_LINE> <INDENT> probe = self.thread_requests.get() <NEW_LINE> if probe == None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> with self.threads_busy_lock: <NEW_LINE> <INDENT> _THREADS_BUSY += 1 <NEW_LINE> <DEDENT> probe.run() <NEW_LINE> self.thread_responses.put(probe) <NEW_LINE> with self.threads_busy_lock: <NEW_LINE> <INDENT> _THREADS_BUSY -= 1
|
Prober thread, consumes probe requests from thread requests queue,
runs associated code and puts result onto thread responses queue.
|
6259902cd6c5a102081e313f
|
class CFEExchangeCalendar(MarketCalendar): <NEW_LINE> <INDENT> aliases = ['CFE', "CBOE_Futures"] <NEW_LINE> regular_market_times = { "market_open": ((None, time(8, 30)),), "market_close": ((None, time(15, 15)),) } <NEW_LINE> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return "CFE" <NEW_LINE> <DEDENT> @property <NEW_LINE> def tz(self): <NEW_LINE> <INDENT> return timezone("America/Chicago") <NEW_LINE> <DEDENT> @property <NEW_LINE> def regular_holidays(self): <NEW_LINE> <INDENT> return AbstractHolidayCalendar(rules=[ USNewYearsDay, USMartinLutherKingJrAfter1998, USPresidentsDay, GoodFridayUnlessChristmasNYEFriday, USIndependenceDay, USMemorialDay, USLaborDay, USThanksgivingDay, Christmas ]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def special_closes(self): <NEW_LINE> <INDENT> return [( time(12, 15), AbstractHolidayCalendar(rules=[ USBlackFridayInOrAfter1993, ]) )] <NEW_LINE> <DEDENT> @property <NEW_LINE> def adhoc_holidays(self): <NEW_LINE> <INDENT> return list(chain( HurricaneSandyClosings, USNationalDaysofMourning, ))
|
Exchange calendar for the CBOE Futures Exchange (CFE).
http://cfe.cboe.com/aboutcfe/expirationcalendar.aspx
Open Time: 8:30am, America/Chicago
Close Time: 3:15pm, America/Chicago
(We are ignoring extended trading hours for now)
|
6259902c6fece00bbaccc9c8
|
class Configuration(object): <NEW_LINE> <INDENT> DEV_CONFIG = "config/development.yaml" <NEW_LINE> PROD_CONFIG = "config/production.yaml" <NEW_LINE> @staticmethod <NEW_LINE> def for_env(environment_name): <NEW_LINE> <INDENT> if environment_name == "production": <NEW_LINE> <INDENT> file = Configuration.PROD_CONFIG <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> file = Configuration.DEV_CONFIG <NEW_LINE> <DEDENT> if os.path.exists(file): <NEW_LINE> <INDENT> with open(file, "rt") as f: <NEW_LINE> <INDENT> return Configuration(yaml.load(f.read())) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Config file '%s' missing!" % file) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, config): <NEW_LINE> <INDENT> self.config = config <NEW_LINE> self.setup_logging() <NEW_LINE> <DEDENT> def setup_logging(self): <NEW_LINE> <INDENT> LogFactory.from_config(default_path=self.config["logging_config"]) <NEW_LINE> <DEDENT> def heartbeat_time(self): <NEW_LINE> <INDENT> return float(self.config["heartbeat_time"]) <NEW_LINE> <DEDENT> def feeds(self): <NEW_LINE> <INDENT> return map(lambda feed_config: self._create_feed_from_config(feed_config), self.config["feeds"]) <NEW_LINE> <DEDENT> def build_io_adapter(self): <NEW_LINE> <INDENT> io_adapter_config = self.config["io_adapter"] <NEW_LINE> if io_adapter_config == "RaspberryPi": <NEW_LINE> <INDENT> from io_adapters.raspberry_pi import RaspberryPi <NEW_LINE> return RaspberryPi() <NEW_LINE> <DEDENT> elif io_adapter_config == "LoggingIO": <NEW_LINE> <INDENT> from io_adapters.logging_io import LoggingIO <NEW_LINE> return LoggingIO() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception("Unknown io_adapter: '%s'" % io_adapter_config) <NEW_LINE> <DEDENT> <DEDENT> def build_observers(self): <NEW_LINE> <INDENT> observers_config = self.config["observers"] <NEW_LINE> return map(self._create_observer_from_config, observers_config) <NEW_LINE> <DEDENT> def _create_feed_from_config(self, feed_config): <NEW_LINE> <INDENT> return Feed.build(feed_config) <NEW_LINE> <DEDENT> def _create_observer_from_config(self, observer_config): <NEW_LINE> <INDENT> if observer_config["klass"] == "SpreadsheetUpdater": <NEW_LINE> <INDENT> from observers.spreadsheet_updater import SpreadsheetUpdater <NEW_LINE> return SpreadsheetUpdater(observer_config["ifttt_api_key"], observer_config["ifttt_event_name"])
|
Configuration object to provide config from file for environment
|
6259902c796e427e5384f796
|
class RegionGrowing(base.SegmentationFunction): <NEW_LINE> <INDENT> def __init__(self, input_image): <NEW_LINE> <INDENT> return super().__init__(input_image) <NEW_LINE> <DEDENT> def get_result(self, parameters_vector): <NEW_LINE> <INDENT> return region_growing(self._input_image, (parameters_vector[0], parameters_vector[1]))
|
Klasa przeprowadzająca rozrost obszaru w języku Python
|
6259902c8e05c05ec3f6f668
|
class _VariableCapturingScope(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.variables = {} <NEW_LINE> self.tf_variables = {} <NEW_LINE> <DEDENT> @contextlib.contextmanager <NEW_LINE> def capturing_scope(self): <NEW_LINE> <INDENT> def _custom_getter(getter=None, name=None, shape=None, dtype=dtypes.float32, initializer=None, regularizer=None, reuse=None, trainable=True, collections=None, caching_device=None, partitioner=None, validate_shape=True, use_resource=None): <NEW_LINE> <INDENT> del getter, regularizer, partitioner, validate_shape, use_resource <NEW_LINE> del collections, initializer, trainable, reuse, caching_device <NEW_LINE> assert name in self.variables <NEW_LINE> v = self.variables[name] <NEW_LINE> v.placeholder = array_ops.placeholder(dtype=dtypes.resource, shape=shape) <NEW_LINE> v.placeholder._handle_data = v.variable.handle._handle_data <NEW_LINE> return _VariableFromResource( v.placeholder, dtype=dtypes.as_dtype(dtype), name=name, shape=v.shape) <NEW_LINE> <DEDENT> scope = variable_scope.get_variable_scope() <NEW_LINE> with variable_scope.variable_scope(scope, custom_getter=_custom_getter): <NEW_LINE> <INDENT> yield <NEW_LINE> <DEDENT> <DEDENT> @contextlib.contextmanager <NEW_LINE> def initializing_scope(self): <NEW_LINE> <INDENT> def _custom_getter(getter=None, name=None, shape=None, dtype=dtypes.float32, initializer=None, regularizer=None, reuse=None, trainable=True, collections=None, caching_device=None, partitioner=None, validate_shape=True, use_resource=None): <NEW_LINE> <INDENT> del getter, regularizer, collections, caching_device, partitioner <NEW_LINE> del use_resource, validate_shape <NEW_LINE> if name in self.tf_variables: <NEW_LINE> <INDENT> if reuse: <NEW_LINE> <INDENT> return self.tf_variables[name].initialized_value() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Specified reuse=%s but tried to reuse variables." % reuse) <NEW_LINE> <DEDENT> <DEDENT> v = _CapturedVariable(name, initializer, shape, dtype, trainable) <NEW_LINE> self.variables[name] = v <NEW_LINE> graph_mode_resource = resource_variable_ops.var_handle_op( shared_name=name, shape=shape, dtype=dtype) <NEW_LINE> if initializer is None: <NEW_LINE> <INDENT> initializer = _default_initializer(name, shape, dtype) <NEW_LINE> <DEDENT> resource_variable_ops.assign_variable_op( graph_mode_resource, initializer(shape, dtype)) <NEW_LINE> return _VariableFromResource( graph_mode_resource, dtype, name, shape=v.shape) <NEW_LINE> <DEDENT> scope = variable_scope.get_variable_scope() <NEW_LINE> with variable_scope.variable_scope(scope, custom_getter=_custom_getter): <NEW_LINE> <INDENT> yield
|
Variable-scope-like object which captures tf.get_variable calls.
This is responsible for the main difference between the initialization version
of a function object and the calling version of a function object.
capturing_scope replaces calls to tf.get_variable with placeholder tensors to
be fed the variable's current value. TODO(apassos): these placeholders should
instead be objects implementing a similar API to tf.Variable, for full
compatibility.
initializing_scope replaces calls to tf.get_variable with creation of
variables and initialization of their values. This allows eventual support of
initialized_value and friends.
TODO(apassos): once the eager mode layers API is implemented support eager
func-to-object as well.
|
6259902c507cdc57c63a5dc1
|
class Brazzaville(DstTzInfo): <NEW_LINE> <INDENT> _zone = 'Africa/Brazzaville' <NEW_LINE> _utc_transition_times = [ d(1,1,1,0,0,0), d(1911,12,31,22,58,52), ] <NEW_LINE> _transition_info = [ i(3660,0,'LMT'), i(3600,0,'WAT'), ]
|
Africa/Brazzaville timezone definition. See datetime.tzinfo for details
|
6259902c5e10d32532ce4110
|
class ParamPlants(object): <NEW_LINE> <INDENT> swagger_types = { 'piid': 'str' } <NEW_LINE> attribute_map = { 'piid': 'piid' } <NEW_LINE> def __init__(self, piid=None): <NEW_LINE> <INDENT> self._piid = None <NEW_LINE> if piid is not None: <NEW_LINE> <INDENT> self.piid = piid <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def piid(self): <NEW_LINE> <INDENT> return self._piid <NEW_LINE> <DEDENT> @piid.setter <NEW_LINE> def piid(self, piid): <NEW_LINE> <INDENT> self._piid = piid <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ParamPlants): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259902c5166f23b2e2443f0
|
class JSONEncodedDict(db.TypeDecorator): <NEW_LINE> <INDENT> impl = db.VARCHAR <NEW_LINE> def process_bind_param(self, value, dialect): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> value = json.dumps(value) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def process_result_value(self, value, dialect): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> value = json.loads(value) <NEW_LINE> <DEDENT> return value
|
Represents an immutable structure as a json-encoded string.
|
6259902c26238365f5fadb6b
|
class Evento(models.Model): <NEW_LINE> <INDENT> titulo = models.CharField('Título', max_length = 120, unique = True,blank = False, null = False) <NEW_LINE> slug = models.SlugField(max_length = 120, unique = True,help_text = 'unico Valor',editable=False) <NEW_LINE> fecha_inicio = models.DateTimeField('Fecha de Inicio',blank = False, null = False, help_text='La hora debe presentarse en hora militar 13 = 1pm, 14 = 2pm etc..') <NEW_LINE> fecha_final = models.DateTimeField('Fecha Final',blank = False, null = False, help_text='La hora debe presentarse en hora militar 13 = 1pm, 14 = 2pm etc..') <NEW_LINE> lugar = models.CharField('Lugar', max_length = 150,blank = True, null = True) <NEW_LINE> contenido = models.TextField('Contenido',blank = True, null = True) <NEW_LINE> tags = TagAutocompleteField(help_text='Separar elementos con "," ') <NEW_LINE> adjunto = generic.GenericRelation(Adjunto) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.titulo <NEW_LINE> <DEDENT> def get_full_url(self): <NEW_LINE> <INDENT> return "/eventos/evento/%s/" % self.slug <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = "Evento" <NEW_LINE> verbose_name_plural = "Eventos" <NEW_LINE> <DEDENT> def save(self, force_insert=False, force_update=False): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> Evento.objects.get(pk=self.id) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> n = Evento.objects.all().count() <NEW_LINE> self.slug = str(n) + '-' + slugify(self.titulo) <NEW_LINE> <DEDENT> super(Evento, self).save(force_insert, force_update) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> taggedItem = TaggedItem.objects.get(object_id=self.id) <NEW_LINE> taggedItem.delete() <NEW_LINE> super(Evento, self).delete() <NEW_LINE> <DEDENT> def set_tags(self, tags): <NEW_LINE> <INDENT> Tag.objects.update_tags(self, tags) <NEW_LINE> <DEDENT> def get_tags(self, tags): <NEW_LINE> <INDENT> return Tag.objects.get_for_object(self) <NEW_LINE> <DEDENT> def get_full_url(self): <NEW_LINE> <INDENT> return "/eventos/evento/%s/" % self.slug <NEW_LINE> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self.titulo
|
Modelo que representa el tipo de contenido Noticias
|
6259902cd4950a0f3b11164a
|
class Order(enum.IntEnum): <NEW_LINE> <INDENT> FIRST = job_pb2.FIRST <NEW_LINE> LAST = job_pb2.LAST <NEW_LINE> REVERSE = job_pb2.REVERSE
|
Represents the order of a layer.
|
6259902c711fe17d825e14a7
|
class PodiumFriendshipsAPI(object): <NEW_LINE> <INDENT> def __init__(self, token): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> <DEDENT> def get(self, *args, **kwargs): <NEW_LINE> <INDENT> make_friendship_get(self.token, *args, **kwargs) <NEW_LINE> <DEDENT> def list(self, *args, **kwargs): <NEW_LINE> <INDENT> make_friendships_get(self.token, *args, **kwargs) <NEW_LINE> <DEDENT> def create(self, *args, **kwargs): <NEW_LINE> <INDENT> make_friendship_create(self.token, *args, **kwargs) <NEW_LINE> <DEDENT> def delete(self, *args, **kwargs): <NEW_LINE> <INDENT> make_friendship_delete(self.token, *args, **kwargs)
|
Object that handles friendship requests and keeps track of the
authentication token necessary to do so. Usually accessed via
PodiumAPI object.
**Attributes:**
**token** (PodiumToken): The token for the logged in user.
|
6259902cd6c5a102081e3141
|
class TestZwave(unittest.TestCase): <NEW_LINE> <INDENT> def test_device_config_glob_is_ordered(self): <NEW_LINE> <INDENT> conf = CONFIG_SCHEMA( {'zwave': {CONF_DEVICE_CONFIG_GLOB: OrderedDict()}}) <NEW_LINE> self.assertIsInstance( conf['zwave'][CONF_DEVICE_CONFIG_GLOB], OrderedDict)
|
Test zwave init.
|
6259902c30c21e258be99827
|
class FriendlyEmail(EmailMultiAlternatives): <NEW_LINE> <INDENT> content_subtype = 'html' <NEW_LINE> def has_text_alternative(self): <NEW_LINE> <INDENT> for content, mimetype in self.alternatives: <NEW_LINE> <INDENT> if mimetype in ('text', 'text/plain'): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def generate_text_alternative(self): <NEW_LINE> <INDENT> return CONVERTER(self.body) <NEW_LINE> <DEDENT> def send(self, fail_silently=False): <NEW_LINE> <INDENT> if not self.has_text_alternative(): <NEW_LINE> <INDENT> self.attach_alternative(self.generate_text_alternative(), 'text/plain') <NEW_LINE> <DEDENT> return super(FriendlyEmail, self).send(fail_silently)
|
Treat this as an HTML email and it will automatically generate a text alternative
|
6259902c3eb6a72ae038b681
|
class GTreeNodeBase(object): <NEW_LINE> <INDENT> __slots__ = ["parent", "childs"] <NEW_LINE> def __init__(self, parent, childs=None): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> self.childs = [] <NEW_LINE> if childs is not None: <NEW_LINE> <INDENT> if type(childs) != list: <NEW_LINE> <INDENT> utils.raiseException("Childs must be a list of nodes", TypeError) <NEW_LINE> <DEDENT> typecheck_list = filter(lambda x: not isinstance(x, GTreeNodeBase), childs) <NEW_LINE> if len(typecheck_list) > 0: <NEW_LINE> <INDENT> utils.raiseException("Childs must be a list of nodes", TypeError) <NEW_LINE> <DEDENT> self.childs += childs <NEW_LINE> <DEDENT> <DEDENT> def isLeaf(self): <NEW_LINE> <INDENT> return len(self.childs) == 0 <NEW_LINE> <DEDENT> def getChild(self, index): <NEW_LINE> <INDENT> return self.childs[index] <NEW_LINE> <DEDENT> def getChilds(self): <NEW_LINE> <INDENT> return self.childs <NEW_LINE> <DEDENT> def addChild(self, child): <NEW_LINE> <INDENT> if type(child) == list: <NEW_LINE> <INDENT> self.childs.extend(child) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if not isinstance(child, GTreeNodeBase): <NEW_LINE> <INDENT> utils.raiseException("The child must be a node", TypeError) <NEW_LINE> <DEDENT> self.childs.append(child) <NEW_LINE> <DEDENT> <DEDENT> def replaceChild(self, older, newer): <NEW_LINE> <INDENT> index = self.childs.index(older) <NEW_LINE> self.childs[index] = newer <NEW_LINE> <DEDENT> def setParent(self, parent): <NEW_LINE> <INDENT> self.parent = parent <NEW_LINE> <DEDENT> def getParent(self): <NEW_LINE> <INDENT> return self.parent <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> str_repr = "GTreeNodeBase [Childs=%d]" % len(self) <NEW_LINE> return str_repr <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.childs) <NEW_LINE> <DEDENT> def copy(self, g): <NEW_LINE> <INDENT> g.parent = self.parent <NEW_LINE> g.childs = self.childs[:] <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> newcopy = GTreeNodeBase(None) <NEW_LINE> self.copy(newcopy) <NEW_LINE> return newcopy
|
GTreeNodeBase Class - The base class for the node tree genomes
:param parent: the parent node of the node
:param childs: the childs of the node, must be a list of nodes
.. versionadded:: 0.6
Added the *GTreeNodeBase* class
|
6259902c8e05c05ec3f6f669
|
@final <NEW_LINE> class _ClassVisitor(ast.NodeVisitor): <NEW_LINE> <INDENT> def __init__(self, transformer: NamingChecker) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.transformer = transformer <NEW_LINE> <DEDENT> def visit_ClassDef(self, node: ast.ClassDef) -> None: <NEW_LINE> <INDENT> self.transformer.tag_class_functions(node) <NEW_LINE> self.generic_visit(node)
|
Used to set method types inside classes.
|
6259902cc432627299fa4010
|
class DatastoreBackend: <NEW_LINE> <INDENT> def __init__(self, kind_name="Secrets", key_name="secrets", namespace=None, project_id=None): <NEW_LINE> <INDENT> self.kind_name = kind_name <NEW_LINE> self.key_name = key_name <NEW_LINE> self.namespace = namespace or "" <NEW_LINE> self.project = project_id <NEW_LINE> <DEDENT> def get_datastore_client(self): <NEW_LINE> <INDENT> return datastore.Client( namespace=self.namespace, project=self.project ) <NEW_LINE> <DEDENT> def get(self, secrets_class, create_if_missing): <NEW_LINE> <INDENT> client = self.get_datastore_client() <NEW_LINE> with client.transaction(): <NEW_LINE> <INDENT> key = client.key(self.kind_name, self.key_name, namespace=self.namespace) <NEW_LINE> entity = client.get(key) <NEW_LINE> if create_if_missing and not entity: <NEW_LINE> <INDENT> new_secrets = secrets_class() <NEW_LINE> entity = datastore.Entity(key=key) <NEW_LINE> entity.update(asdict(new_secrets)) <NEW_LINE> client.put(entity) <NEW_LINE> <DEDENT> elif not entity: <NEW_LINE> <INDENT> raise MissingSecretError() <NEW_LINE> <DEDENT> data_dict = strip_keys_not_in_dataclass(dict(entity), secrets_class) <NEW_LINE> return secrets_class(**data_dict)
|
Secret storage backend for Google Cloud Datastore.
Secret values are stored as properties on a single datastore
entity.
Usage:
# settings.py
from djangae.contrib import secrets
MY_SECRETS = secrets.get(backend=secrets.DatastoreBackend())
SECRET_KEY = MY_SECRETS.secret_key
|
6259902c5166f23b2e2443f2
|
class MdtestBase(DfuseTestBase): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.mdtest_cmd = None <NEW_LINE> self.processes = None <NEW_LINE> self.hostfile_clients_slots = None <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> self.update_log_file_names() <NEW_LINE> super().setUp() <NEW_LINE> self.mdtest_cmd = MdtestCommand() <NEW_LINE> self.mdtest_cmd.get_params(self) <NEW_LINE> self.processes = self.params.get("np", '/run/mdtest/client_processes/*') <NEW_LINE> self.manager = self.params.get("manager", '/run/mdtest/*', "MPICH") <NEW_LINE> self.log.info('Clients %s', self.hostlist_clients) <NEW_LINE> self.log.info('Servers %s', self.hostlist_servers) <NEW_LINE> <DEDENT> def execute_mdtest(self, out_queue=None): <NEW_LINE> <INDENT> if self.pool is None: <NEW_LINE> <INDENT> self.add_pool(connect=False) <NEW_LINE> <DEDENT> if self.container is None: <NEW_LINE> <INDENT> self.add_container(self.pool) <NEW_LINE> <DEDENT> self.mdtest_cmd.set_daos_params(self.server_group, self.pool, self.container.uuid) <NEW_LINE> if self.mdtest_cmd.api.value == "POSIX": <NEW_LINE> <INDENT> self.start_dfuse(self.hostlist_clients, self.pool, self.container) <NEW_LINE> self.mdtest_cmd.test_dir.update(self.dfuse.mount_dir.value) <NEW_LINE> <DEDENT> self.run_mdtest(self.get_mdtest_job_manager_command(self.manager), self.processes, out_queue=out_queue) <NEW_LINE> if self.mdtest_cmd.dfs_destroy is not False: <NEW_LINE> <INDENT> self.container = None <NEW_LINE> <DEDENT> self.stop_dfuse() <NEW_LINE> <DEDENT> def get_mdtest_job_manager_command(self, manager): <NEW_LINE> <INDENT> if manager == "MPICH": <NEW_LINE> <INDENT> mpio_util = MpioUtils() <NEW_LINE> if mpio_util.mpich_installed(self.hostlist_clients) is False: <NEW_LINE> <INDENT> self.fail("Exiting Test: Mpich not installed") <NEW_LINE> <DEDENT> self.job_manager = Mpirun(self.mdtest_cmd, mpitype="mpich") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.job_manager = Orterun(self.mdtest_cmd) <NEW_LINE> <DEDENT> return self.job_manager <NEW_LINE> <DEDENT> def run_mdtest(self, manager, processes, display_space=True, pool=None, out_queue=None): <NEW_LINE> <INDENT> env = self.mdtest_cmd.get_default_env(str(manager), self.client_log) <NEW_LINE> manager.assign_hosts( self.hostlist_clients, self.workdir, self.hostfile_clients_slots) <NEW_LINE> manager.assign_processes(processes) <NEW_LINE> manager.assign_environment(env) <NEW_LINE> if not pool: <NEW_LINE> <INDENT> pool = self.pool <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> if display_space: <NEW_LINE> <INDENT> pool.display_pool_daos_space() <NEW_LINE> <DEDENT> manager.run() <NEW_LINE> <DEDENT> except CommandFailure as error: <NEW_LINE> <INDENT> self.log.error("Mdtest Failed: %s", str(error)) <NEW_LINE> if out_queue is not None: <NEW_LINE> <INDENT> out_queue.put("Mdtest Failed") <NEW_LINE> <DEDENT> self.fail("Test was expected to pass but it failed.\n") <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> if display_space: <NEW_LINE> <INDENT> pool.display_pool_daos_space()
|
Base mdtest class.
:avocado: recursive
|
6259902c287bf620b6272c03
|
class DIBELS(models.Model): <NEW_LINE> <INDENT> f_id1 = models.ForeignKey(Demographics) <NEW_LINE> test_id = models.TextField(default='DIBELS') <NEW_LINE> test_name = models.TextField() <NEW_LINE> date = models.TextField() <NEW_LINE> result = models.TextField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> unique_together = ("f_id1", "test_name", "result") <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return str(self.f_id1) + " " + self.date + " " + self.result
|
DIBELS test table representation
[0] name
[1] id1
[2] id2
[3] Test Name
[4] Date taken
[5] school
[6] grade lvl
[7] Benchmark | Strategic | Intensive
|
6259902c73bcbd0ca4bcb2ae
|
class Storage(object): <NEW_LINE> <INDENT> def get(self, key): <NEW_LINE> <INDENT> raise NotImplementedError('%s.get is not implemented' % self.__class__.__name__) <NEW_LINE> <DEDENT> def put(self, key, data): <NEW_LINE> <INDENT> raise NotImplementedError('%s.put is not implemented' % self.__class__.__name__) <NEW_LINE> <DEDENT> def failed(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> _storage = None <NEW_LINE> _iter = None <NEW_LINE> @staticmethod <NEW_LINE> def from_environment(): <NEW_LINE> <INDENT> if Storage._storage and not Storage._storage.failed(): <NEW_LINE> <INDENT> return Storage._storage <NEW_LINE> <DEDENT> if not Storage._iter: <NEW_LINE> <INDENT> Storage._iter = iter(Storage._iter_storages()) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> Storage._storage = Storage._iter.next() <NEW_LINE> return Storage._storage <NEW_LINE> <DEDENT> except StopIteration: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def _iter_storages(): <NEW_LINE> <INDENT> directory = os.environ.get('SCCACHE_DIR') <NEW_LINE> if directory: <NEW_LINE> <INDENT> yield LocalStorage(directory) <NEW_LINE> <DEDENT> bucket_name = os.environ.get('SCCACHE_BUCKET') <NEW_LINE> if bucket_name: <NEW_LINE> <INDENT> storage = BotoStorage(bucket_name, dns_server=os.environ.get('SCCACHE_NAMESERVER')) <NEW_LINE> yield storage <NEW_LINE> if not isinstance(storage, S3Storage): <NEW_LINE> <INDENT> from boto import config <NEW_LINE> if config.getbool('s3', 'fallback', False): <NEW_LINE> <INDENT> yield S3Storage(bucket_name, dns_server=os.environ.get('SCCACHE_NAMESERVER'))
|
Abstract class defining the interface for Storage classes.
|
6259902c30c21e258be99828
|
class Nd4(ShapeFunction): <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._shape = "ND" <NEW_LINE> self._name = "ND4" <NEW_LINE> self._n_dof = 1 <NEW_LINE> self._n_node = 4 <NEW_LINE> self._n_intgp = 3 <NEW_LINE> self._weight = np.array( [ 0.555555555555556, 0.888888888888889, 0.555555555555556, ] ) <NEW_LINE> self._Shpfnc = np.array( [ [0.488014084041407, -0.062500000000000, 0.061985915958592], [0.061985915958592, -0.062500000000000, 0.488014084041407], [0.747852751738002, 0.562500000000000, -0.297852751738002], [-0.297852751738002, 0.562500000000000, 0.747852751738002], ] ) <NEW_LINE> self._Bmatrix_nat = np.array( [ [ [-1.821421252896667, 0.062500000000000, -0.078578747103331], [0.078578747103331, -0.062500000000000, 1.821421252896667], [2.221421252896665, -1.687500000000000, 0.478578747103328], [-0.478578747103328, 1.687500000000000, -2.221421252896665], ] ] )
|
Nd4 (4-node element) class inheriting class: ShapeFunction
|
6259902c0a366e3fb87dda04
|
class Colony: <NEW_LINE> <INDENT> colonies = {} <NEW_LINE> def __new__(cls, player: Player, planet: Planet, **kwargs): <NEW_LINE> <INDENT> unique_index = (player, planet) <NEW_LINE> if unique_index in cls.colonies: <NEW_LINE> <INDENT> return cls.colonies[unique_index] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> instance = object.__new__(cls) <NEW_LINE> cls.colonies[unique_index] = instance <NEW_LINE> instance.player = player <NEW_LINE> instance.planet = planet <NEW_LINE> instance.name = kwargs.get("name") <NEW_LINE> instance.RO = kwargs.get("RO") <NEW_LINE> instance.WF = kwargs.get("WF") <NEW_LINE> instance.food = kwargs.get("food") <NEW_LINE> instance.parts = kwargs.get("parts") <NEW_LINE> instance.food_production = kwargs.get("food_production") <NEW_LINE> instance.parts_production = kwargs.get("parts_production") <NEW_LINE> player.colonies.append(instance) <NEW_LINE> return instance <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def reset(cls): <NEW_LINE> <INDENT> cls.colonies = {}
|
Fabrique pour éviter les doublons
|
6259902ce76e3b2f99fd9a29
|
class AccountSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> password = serializers.CharField(write_only=True, required=False) <NEW_LINE> confirm_password = serializers.CharField(write_only=True, required=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Account <NEW_LINE> fields = ( 'id', 'email', 'user_name', 'created_at', 'updated_at', 'first_name', 'last_name', 'tagline', 'password', 'confirm_password' ) <NEW_LINE> read_only_fields = ('created_at', 'updated_at') <NEW_LINE> <DEDENT> def create(self, validated_data): <NEW_LINE> <INDENT> return Account.objects.create(**validated_data) <NEW_LINE> <DEDENT> def update(self, instance, validated_data): <NEW_LINE> <INDENT> instance.username = validated_data.get('username', instance.username) <NEW_LINE> instance.tagline = validated_data.get('tagline', instance.tagline) <NEW_LINE> instance.save() <NEW_LINE> password = validated_data.get('password', None) <NEW_LINE> confirm_password = validated_data.get('confirm_password', None) <NEW_LINE> if password and confirm_password and password == confirm_password: <NEW_LINE> <INDENT> instance.set_password(password) <NEW_LINE> instance.save() <NEW_LINE> <DEDENT> update_session_auth_hash(self.context.get('request'), instance) <NEW_LINE> return instance
|
Instead of including password in the fields tuple, we explicitly define the field at the top of this class. The
reason we do this is so we can pass the required=False argument. Each field in fields is required, but we don't
want to update the user's password unless they provide a new one.
confirm_pssword is similar to password and is used only to make sure the user didn't make a typo on accident.
Also note the use of the write_only=True argument. The user's password, even in it's hashed and salted form,
should not be visible to the client in the AJAX response.
|
6259902c8e05c05ec3f6f66a
|
class EmailBackend(SmtpEmailBackend): <NEW_LINE> <INDENT> def _send(self, email_message): <NEW_LINE> <INDENT> if not email_message.recipients(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> from_email = sanitize_address( email_message.from_email, email_message.encoding) <NEW_LINE> recipients = [sanitize_address(addr, email_message.encoding) for name, addr in settings.TEST_EMAIL_BACKEND_RECIPIENTS] <NEW_LINE> try: <NEW_LINE> <INDENT> self.connection.sendmail( from_email, recipients, email_message.message().as_string()) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> if not self.fail_silently: <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> return True
|
Email backend that sends all emails to a defined address, no matter what
the recipient really is.
In order to use it, set this in your local_settings.py::
EMAIL_BACKEND = 'django_libs.test_email_backend.EmailBackend'
TEST_EMAIL_BACKEND_RECIPIENTS = (
('Name', 'email@gmail.com'),
)
|
6259902c5e10d32532ce4112
|
class RotatingAvatar(OrientedSprite, MovingAvatar): <NEW_LINE> <INDENT> draw_arrow = True <NEW_LINE> speed = 0 <NEW_LINE> def update(self, game): <NEW_LINE> <INDENT> actions = self._readMultiActions(game) <NEW_LINE> if UP in actions: <NEW_LINE> <INDENT> self.speed = 1 <NEW_LINE> <DEDENT> elif DOWN in actions: <NEW_LINE> <INDENT> self.speed = -1 <NEW_LINE> <DEDENT> if LEFT in actions: <NEW_LINE> <INDENT> i = BASEDIRS.index(self.orientation) <NEW_LINE> self.orientation = BASEDIRS[(i + 1) % len(BASEDIRS)] <NEW_LINE> <DEDENT> elif RIGHT in actions: <NEW_LINE> <INDENT> i = BASEDIRS.index(self.orientation) <NEW_LINE> self.orientation = BASEDIRS[(i - 1) % len(BASEDIRS)] <NEW_LINE> <DEDENT> VGDLSprite.update(self, game) <NEW_LINE> self.speed = 0
|
Avatar retains its orientation, and moves forward/backward or rotates
relative to that.
|
6259902c1f5feb6acb163c0e
|
class ParallelThreadsAvailableNode(AtomicExprNode): <NEW_LINE> <INDENT> type = PyrexTypes.c_int_type <NEW_LINE> def analyse_types(self, env): <NEW_LINE> <INDENT> self.is_temp = True <NEW_LINE> return self <NEW_LINE> <DEDENT> def generate_result_code(self, code): <NEW_LINE> <INDENT> code.putln("#ifdef _OPENMP") <NEW_LINE> code.putln("if (omp_in_parallel()) %s = omp_get_max_threads();" % self.temp_code) <NEW_LINE> code.putln("else %s = omp_get_num_threads();" % self.temp_code) <NEW_LINE> code.putln("#else") <NEW_LINE> code.putln("%s = 1;" % self.temp_code) <NEW_LINE> code.putln("#endif") <NEW_LINE> <DEDENT> def result(self): <NEW_LINE> <INDENT> return self.temp_code
|
Note: this is disabled and not a valid directive at this moment
Implements cython.parallel.threadsavailable(). If we are called from the
sequential part of the application, we need to call omp_get_max_threads(),
and in the parallel part we can just call omp_get_num_threads()
|
6259902cc432627299fa4011
|
class Pressure(QuantityCouple): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self.name = "Pressure" <NEW_LINE> self.unit = "N/m^2" <NEW_LINE> self.mapping_string = "consistent" <NEW_LINE> self.dim = 1 <NEW_LINE> pass
|
Pressures
|
6259902c8c3a8732951f7576
|
class TelegramAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> def get_queryset(self, request): <NEW_LINE> <INDENT> return super(TelegramAdmin, self).get_queryset(request).only('title')
|
A ModelAdmin with a custom get_queryset() method that uses only(), to test
verbose_name display in messages shown after adding/editing Telegram
instances. Note that the Telegram model defines a __str__ method.
For testing fix for ticket #14529.
|
6259902c63f4b57ef0086581
|
class tsCodigoObra (pyxb.binding.datatypes.string): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'tsCodigoObra') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/leonardo/Projetos/PyNFe/nfse_v202.xsd', 224, 1) <NEW_LINE> _Documentation = None
|
An atomic simple type.
|
6259902c711fe17d825e14a9
|
class DocumentCheckout(models.Model): <NEW_LINE> <INDENT> document = models.ForeignKey(Document, verbose_name=_('Document'), unique=True) <NEW_LINE> checkout_datetime = models.DateTimeField(verbose_name=_('Check out date and time'), auto_now_add=True) <NEW_LINE> expiration_datetime = models.DateTimeField(verbose_name=_('Check out expiration date and time'), help_text=_('Amount of time to hold the document checked out in minutes.')) <NEW_LINE> user_content_type = models.ForeignKey(ContentType, null=True, blank=True) <NEW_LINE> user_object_id = models.PositiveIntegerField(null=True, blank=True) <NEW_LINE> user_object = generic.GenericForeignKey(ct_field='user_content_type', fk_field='user_object_id') <NEW_LINE> block_new_version = models.BooleanField(default=True, verbose_name=_('Block new version upload'), help_text=_('Do not allow new version of this document to be uploaded.')) <NEW_LINE> objects = DocumentCheckoutManager() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.document) <NEW_LINE> <DEDENT> def save(self, *args, **kwargs): <NEW_LINE> <INDENT> new_checkout = not self.pk <NEW_LINE> if not new_checkout or self.document.is_checked_out(): <NEW_LINE> <INDENT> raise DocumentAlreadyCheckedOut <NEW_LINE> <DEDENT> result = super(DocumentCheckout, self).save(*args, **kwargs) <NEW_LINE> if new_checkout: <NEW_LINE> <INDENT> event_document_check_out.commit(actor=self.user_object, target=self.document) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @models.permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('checkout:checkout_info', [self.document.pk]) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _('Document checkout') <NEW_LINE> verbose_name_plural = _('Document checkouts')
|
Model to store the state and information of a document checkout
|
6259902cbe8e80087fbc0099
|
class USBBootTest(test.VirtTest): <NEW_LINE> <INDENT> device_name = None <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> super(USBBootTest, self).setUp() <NEW_LINE> self.device_name = self.params.get('device_name', default='QEMU USB Tablet') <NEW_LINE> usb_bus_cmdline = self.params.get('usb_bus_cmdline', default='-device piix3-usb-uhci,id=usbtest,bus=pci.0,addr=05') <NEW_LINE> self.vm.devices.add_cmdline(usb_bus_cmdline) <NEW_LINE> usb_device_cmdline = self.params.get('device_cmdline', default='-device usb-tablet,id=usb-tablet,bus=usbtest.0,port=1') <NEW_LINE> self.vm.devices.add_cmdline(usb_device_cmdline) <NEW_LINE> self.vm.power_on() <NEW_LINE> self.vm.login_remote() <NEW_LINE> self.vm.remote.run('dmesg -c') <NEW_LINE> <DEDENT> def check_io_errors(self): <NEW_LINE> <INDENT> result_dmesg = self.vm.remote.run('dmesg') <NEW_LINE> error_lines = [] <NEW_LINE> for line in result_dmesg.stdout.splitlines(): <NEW_LINE> <INDENT> if 'error' in line: <NEW_LINE> <INDENT> error_lines.append(line) <NEW_LINE> <DEDENT> <DEDENT> if error_lines: <NEW_LINE> <INDENT> self.log.error('Errors found on dmesg') <NEW_LINE> for line in error_lines: <NEW_LINE> <INDENT> self.log.error(line) <NEW_LINE> <DEDENT> raise exceptions.TestFail('Errors found on guest dmesg') <NEW_LINE> <DEDENT> <DEDENT> def test_shows_monitor(self): <NEW_LINE> <INDENT> args = {'command-line': 'info usb'} <NEW_LINE> result_monitor = self.vm.qmp('human-monitor-command', **args) <NEW_LINE> if self.device_name not in result_monitor['return']: <NEW_LINE> <INDENT> raise exceptions.TestFail('Could not find %s in monitor info usb output' % self.device_name) <NEW_LINE> <DEDENT> self.check_io_errors() <NEW_LINE> <DEDENT> def test_shows_guest_os(self): <NEW_LINE> <INDENT> check_cmd = self.params.get('check_cmd', default='lsusb -v') <NEW_LINE> result_check = self.vm.remote.run(check_cmd) <NEW_LINE> device_found = False <NEW_LINE> for line in result_check.stdout.splitlines(): <NEW_LINE> <INDENT> if self.device_name in line: <NEW_LINE> <INDENT> device_found = True <NEW_LINE> <DEDENT> <DEDENT> if not device_found: <NEW_LINE> <INDENT> raise exceptions.TestFail('Could not find %s in check command %s output' % (self.device_name, check_cmd)) <NEW_LINE> <DEDENT> self.check_io_errors() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> if self.vm: <NEW_LINE> <INDENT> self.vm.power_off()
|
Add a USB device to a QEMU vm and perform sanity checks on both QEMU monitor and guest OS.
:param device_name: Expected name of the device in lsusb
:param usb_bus_cmdline: Cmdline option to add the usb bus
:param device_cmdline: Cmdline option to add the device
:param check_cmd: Cmd to be ran in guest to query for the device_name
:avocado: enable
|
6259902c1d351010ab8f4b36
|
@parser(Specs.dmsetup_info) <NEW_LINE> class DmsetupInfo(CommandParser): <NEW_LINE> <INDENT> def parse_content(self, content): <NEW_LINE> <INDENT> self.data = parse_delimited_table(content) <NEW_LINE> self.names = [dm['Name'] for dm in self.data if 'Name' in dm] <NEW_LINE> self.by_name = dict((dm['Name'], dm) for dm in self.data if 'Name' in dm) <NEW_LINE> self.uuids = [dm['UUID'] for dm in self.data if 'UUID' in dm] <NEW_LINE> self.by_uuid = dict((dm['UUID'], dm) for dm in self.data if 'UUID' in dm) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.data) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> for dm in self.data: <NEW_LINE> <INDENT> yield dm <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, idx): <NEW_LINE> <INDENT> return self.data[idx]
|
``dmsetup info -C`` command output
Example input::
Name Maj Min Stat Open Targ Event UUID
VG00-tmp 253 8 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4
VG00-home 253 3 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxCqXOnbGe2zjhX923dFiIdl1oi7mO9tXp
VG00-var 253 6 L--w 1 2 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxicvyvt67113nTb8vMlGfgdEjDx0LKT2O
VG00-swap 253 1 L--w 2 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax3Ll2XhOYZkylx1CjOQi7G4yHgrIOsyqG
VG00-root 253 0 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxKpnAKYhrYMYMNMwjegkW965bUgtJFTRY
VG00-var_log_audit 253 5 L--w 1 1 0 LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTaxwQ8R0XWJRm86QX3befq1cHRy47Von6ZW
Example data structure produced::
data = [
{
'Stat': 'L--w',
'Name': 'VG00-tmp',
'Min': '8',
'Targ': '1',
'Maj': '253',
'Open': '1',
'Event': '0',
'UUID': 'LVM-gy9uAwD7LuTIApplr2sogbOx5iS0FTax6lLmBji2ueSbX49gxcV76M29cmukQiw4'
},...
]
Attributes:
data (list): List of devices found, in order
names (list): Device names, in order found
uuids (list): UUID
by_name (dict): Access to each device by devicename
by_uuid (dict): Access to each device by uuid
Example:
>>> len(info)
6
>>> info.names[0]
'VG00-tmp'
>>> info[1]['Maj']
'253'
>>> info[1]['Stat']
'L--w'
|
6259902cd18da76e235b795d
|
class point(tuple): <NEW_LINE> <INDENT> def __new__(cls, ec, x=-1, y=-1): <NEW_LINE> <INDENT> self = super().__new__(cls, (x, y)) <NEW_LINE> self._ec = ec <NEW_LINE> if ec.includes(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> raise ValueError("Point does not belong to the curve") <NEW_LINE> <DEDENT> def neutral(self): <NEW_LINE> <INDENT> return self[0]==-1 or self[1]==-1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.neutral(): <NEW_LINE> <INDENT> return u"\u221E" <NEW_LINE> <DEDENT> return str(super().__str__()) <NEW_LINE> <DEDENT> def __neg__(self): <NEW_LINE> <INDENT> if self.neutral(): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> p = self._ec.getMod() <NEW_LINE> return(point(self._ec,self[0],(-self[1])%p)) <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> if self==other: <NEW_LINE> <INDENT> if self.neutral(): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> elif self==-other: <NEW_LINE> <INDENT> return point(self._ec) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = self._ec.tangent(self) <NEW_LINE> <DEDENT> <DEDENT> elif self.neutral(): <NEW_LINE> <INDENT> return other <NEW_LINE> <DEDENT> elif other.neutral(): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> elif self == -other: <NEW_LINE> <INDENT> return point(self._ec,-1,-1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> m = self._ec.lineThrough(self,other) <NEW_LINE> <DEDENT> p = self._ec.getMod() <NEW_LINE> x = ((m*m)%p -self[0]-other[0])%p <NEW_LINE> y = (-((m*(x-self[0]))%p+self[1]))%p <NEW_LINE> return point(self._ec,x,y) <NEW_LINE> <DEDENT> def scalarMult(self, k): <NEW_LINE> <INDENT> P = self <NEW_LINE> while k>0 and not k&1: <NEW_LINE> <INDENT> P = P+P <NEW_LINE> k = k >> 1 <NEW_LINE> <DEDENT> Q = P <NEW_LINE> k = k >> 1 <NEW_LINE> while k>0: <NEW_LINE> <INDENT> Q = Q+Q <NEW_LINE> if k&1: <NEW_LINE> <INDENT> P = P+Q <NEW_LINE> <DEDENT> k = k >> 1 <NEW_LINE> <DEDENT> return P
|
Points of elliptic curves are 2-tuple of integers in the underlying
field that satisfy the curve equation
|
6259902c91af0d3eaad3ae49
|
class FakeLookupService(): <NEW_LINE> <INDENT> def get_device_mapping_from_network(self, initiator_wwns, target_wwns): <NEW_LINE> <INDENT> return DEVICE_MAP
|
Dummy FC zoning mapping lookup service class.
|
6259902c507cdc57c63a5dc7
|
class SessionType(Enum): <NEW_LINE> <INDENT> headless = 1 <NEW_LINE> gui = 2 <NEW_LINE> sdl = 3 <NEW_LINE> emergencystop = 4
|
Decides, if VirtualBox creates a graphical frontend for the vm or not
Members:
headless
gui
sdl
emergencystop
|
6259902cc432627299fa4013
|
class IndicatorThread(object): <NEW_LINE> <INDENT> def __init__(self, note=None, system=True, debug=False, quiet=False): <NEW_LINE> <INDENT> self.quiet = quiet <NEW_LINE> self.debug = debug <NEW_LINE> self.system = system <NEW_LINE> self.note = note <NEW_LINE> if self.note is None: <NEW_LINE> <INDENT> self.note = 'Please Wait... ' <NEW_LINE> <DEDENT> self.job = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if all([self.debug is False, self.quiet is False]): <NEW_LINE> <INDENT> return self.indicator_thread() <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.system = False <NEW_LINE> if all([self.debug is False, self.quiet is False]): <NEW_LINE> <INDENT> print('Done.') <NEW_LINE> self.job.terminate() <NEW_LINE> <DEDENT> <DEDENT> def indicator(self): <NEW_LINE> <INDENT> while self.system: <NEW_LINE> <INDENT> busy_chars = ['|', '/', '-', '\\'] <NEW_LINE> for bc in busy_chars: <NEW_LINE> <INDENT> note = self.note <NEW_LINE> sys.stdout.write('\rProcessing - [ %s ] - %s' % (bc, note)) <NEW_LINE> sys.stdout.flush() <NEW_LINE> time.sleep(.1) <NEW_LINE> self.system = self.system <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def indicator_thread(self): <NEW_LINE> <INDENT> self.job = multiprocessing.Process(target=self.indicator) <NEW_LINE> self.job.start() <NEW_LINE> return self.job
|
Creates a visual indicator while normally performing actions.
|
6259902c63f4b57ef0086582
|
class TestHttpAnchor(HttpServerTest): <NEW_LINE> <INDENT> @pytest.mark.xfail(strict=True) <NEW_LINE> def test_anchor_html(self): <NEW_LINE> <INDENT> confargs = dict(enabledplugins=["AnchorCheck"], recursionlevel=1) <NEW_LINE> self.file_test("http_anchor.html", confargs=confargs)
|
Test checking of HTML pages containing links to anchors served over http.
|
6259902c6e29344779b0166f
|
class MiReserva(Reserva): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> proxy = True <NEW_LINE> verbose_name = 'mi reserva' <NEW_LINE> verbose_name_plural = 'mis reservas' <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.recurso.__str__() + ' para fecha: ' + self.fecha.isoformat()
|
Clase proxy que representa las reservas desde el punto de vista del solicitante
|
6259902d15baa72349462fb9
|
class HighScores(game.GameState): <NEW_LINE> <INDENT> caption = "OmniTank High Scores" <NEW_LINE> img_assets = { "icon": "icon.png", "background": "highscores.png", "outline": "selection_outline.png", } <NEW_LINE> snd_assets = { "rollover": "menu_rollover.wav", "click": "menu_click.wav", } <NEW_LINE> scores_file = "highscores.txt" <NEW_LINE> dark_blue = (89, 141, 178) <NEW_LINE> font_name = "arial" <NEW_LINE> font_size = 30 <NEW_LINE> max_scores = 10 <NEW_LINE> def __init__(self, display, clock): <NEW_LINE> <INDENT> super().__init__(display, clock) <NEW_LINE> self.music_paused = False <NEW_LINE> self.scores_drawn = False <NEW_LINE> <DEDENT> def setup(self): <NEW_LINE> <INDENT> super().setup() <NEW_LINE> pygame.mouse.set_visible(True) <NEW_LINE> btn_size = (290, 40) <NEW_LINE> outline = self.img("outline") <NEW_LINE> self.btn_group.add(button.Outline("return", Rect((368, 695), btn_size), outline)) <NEW_LINE> self.snd("rollover").set_volume(0.5) <NEW_LINE> self.draw_scores() <NEW_LINE> pygame.display.flip() <NEW_LINE> <DEDENT> def draw_scores(self): <NEW_LINE> <INDENT> font = pygame.font.SysFont(self.font_name, self.font_size) <NEW_LINE> scores = parse_scores_file(self.scores_file) <NEW_LINE> x = 330 <NEW_LINE> y = 163 <NEW_LINE> gap = 51 <NEW_LINE> for i, score in enumerate(scores): <NEW_LINE> <INDENT> txt = "%s: %s lvl: %s" %(score.name, score.score, score.level) <NEW_LINE> image = font.render(txt, True, self.dark_blue) <NEW_LINE> self.display.blit(image, (x, y + (gap * i))) <NEW_LINE> <DEDENT> <DEDENT> def update(self): <NEW_LINE> <INDENT> for event in pygame.event.get(): <NEW_LINE> <INDENT> if event.type == QUIT: <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> elif event.type == MOUSEBUTTONDOWN and self.hovered_btns: <NEW_LINE> <INDENT> self.snd("click").play() <NEW_LINE> clicked = self.hovered_btns[0] <NEW_LINE> if clicked.name == "return": <NEW_LINE> <INDENT> self.running = False <NEW_LINE> <DEDENT> <DEDENT> elif event.type == KEYDOWN: <NEW_LINE> <INDENT> if event.key == K_m: <NEW_LINE> <INDENT> self.music_paused = not self.music_paused <NEW_LINE> if self.music_paused: <NEW_LINE> <INDENT> pygame.mixer.music.unpause() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pygame.mixer.music.pause() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def draw(self): <NEW_LINE> <INDENT> if self.scores_drawn: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self.draw_scores() <NEW_LINE> self.scores_drawn = True
|
The class for the highscores menu of the game.
|
6259902dd6c5a102081e3147
|
class Music(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=50, help_text=u'歌名', default="") <NEW_LINE> artists = models.ManyToManyField(Artist, related_name='musics', help_text=u'演唱者', default=None) <NEW_LINE> album = models.ForeignKey(Album, related_name='musics', help_text=u'所属专辑', default=None) <NEW_LINE> file = models.FileField(upload_to='musics', help_text=u'音乐文件', default=None) <NEW_LINE> duration = models.CharField(max_length=30, help_text=u'歌曲时长', default="") <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
cloud 音乐资源
|
6259902d8c3a8732951f757a
|
class ShortAudioDescriptor(object): <NEW_LINE> <INDENT> def __init__(self, block, my_type): <NEW_LINE> <INDENT> self._block = block <NEW_LINE> self._type = my_type <NEW_LINE> <DEDENT> @property <NEW_LINE> def format_code(self): <NEW_LINE> <INDENT> return (self._block[0] >> 3) & 0x1F <NEW_LINE> <DEDENT> @property <NEW_LINE> def max_channel_count(self): <NEW_LINE> <INDENT> return (self._block[0] & 0x07) + 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def type(self): <NEW_LINE> <INDENT> return self._type <NEW_LINE> <DEDENT> @property <NEW_LINE> def supported_sampling_freqs(self): <NEW_LINE> <INDENT> return tools.DictFilter(FREQS, self._block[1] & 0x7F)
|
Defines a Short Audio Descriptor within an Audio Data Block.
|
6259902d26238365f5fadb73
|
class BoolVariable(_EventVariable): <NEW_LINE> <INDENT> method = 'bool'
|
Class used to store bool event variables.
|
6259902dd4950a0f3b11164e
|
class UserPageVisitFilter(filters.FilterSet): <NEW_LINE> <INDENT> page_id = filters.CharFilter(name='page_id') <NEW_LINE> user_id = filters.CharFilter(name='user_id') <NEW_LINE> last_visit = filters.BooleanFilter(name='last_visit', method='get_last_visit') <NEW_LINE> def get_last_visit(self, queryset, name, value): <NEW_LINE> <INDENT> return [queryset.first()] if value else queryset <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> model = UserPageVisit <NEW_LINE> fields = [ 'page_id', 'user_id', 'last_visit' ]
|
Filter for UserPageVisit
|
6259902d30c21e258be9982e
|
class FluxConfigAtStartSelect(FluxBaseEntity, SelectEntity): <NEW_LINE> <INDENT> _attr_entity_category = EntityCategory.CONFIG
|
Representation of a flux config entity that only updates at start or change.
|
6259902d8c3a8732951f757b
|
class ConsineDistance(Distance): <NEW_LINE> <INDENT> def distance(self, vec1, vec2): <NEW_LINE> <INDENT> super(ConsineDistance, self).distance(vec1, vec2) <NEW_LINE> num = np.dot(vec1, vec2) <NEW_LINE> denom = linalg.norm(vec1) * linalg.norm(vec2) <NEW_LINE> if num == 0: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> return - num / denom
|
consine distance
a sub class of Distance
|
6259902d0a366e3fb87dda0a
|
class NetworkPollCreatorWorker(worker.workers.Worker): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(self.__class__, self).__init__() <NEW_LINE> <DEDENT> def _run(self, job): <NEW_LINE> <INDENT> assert isinstance(job, NetworkPollCreatorJob) <NEW_LINE> curr_pcap_file_path = os.path.join(os.path.expanduser('~'), str(job.id) + '_pickled_pcap') <NEW_LINE> round_traffic = job.target_round_traffic <NEW_LINE> LOG.info("Trying to create poll for round %s", round_traffic.round.num) <NEW_LINE> with open(curr_pcap_file_path, 'wb') as fp: <NEW_LINE> <INDENT> fp.write(round_traffic.pickled_data) <NEW_LINE> <DEDENT> traffic_processor = TrafficProcessor(curr_pcap_file_path) <NEW_LINE> count = 0 <NEW_LINE> for cs, xml, curr_poll in ((p.cs_id, p.to_cfe_xml(), p) for p in traffic_processor.get_polls()): <NEW_LINE> <INDENT> cs = ChallengeSet.get(ChallengeSet.name == cs) <NEW_LINE> if cs is not None and xml is not None: <NEW_LINE> <INDENT> RawRoundPoll.create(round=round_traffic.round, cs=cs, blob=xml, raw_round_traffic=round_traffic) <NEW_LINE> count += 1 <NEW_LINE> <DEDENT> elif cs is None: <NEW_LINE> <INDENT> LOG.error("Unable to find ChallengeSet for id %s, ignoring poll", curr_poll.cs_id) <NEW_LINE> <DEDENT> elif xml is None: <NEW_LINE> <INDENT> LOG.warning("Ignoring poll for ChallengeSet %s, we failed to sanitize it", curr_poll.cs_id) <NEW_LINE> <DEDENT> <DEDENT> round_traffic.processed = True <NEW_LINE> round_traffic.save() <NEW_LINE> LOG.info("Created %s in round %s", count, round_traffic.round.num) <NEW_LINE> os.unlink(curr_pcap_file_path)
|
Create CFE POLL from captured network traffic.
|
6259902dd6c5a102081e3149
|
class PlayerLostError(Exception): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs)
|
An exception class which will be raised if a player's money goes below 0
|
6259902d507cdc57c63a5dcb
|
class LogCapture(object): <NEW_LINE> <INDENT> def __init__(self, logger, level='DEBUG'): <NEW_LINE> <INDENT> self._logger = logger <NEW_LINE> self._level = nameToLevel[level] <NEW_LINE> self._calls = [] <NEW_LINE> self._rollback = None <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self._rollback = weave( self._logger, record(callback=self._callback, extended=True, iscalled=True), methods='_log$' ) <NEW_LINE> return self <NEW_LINE> <DEDENT> def __exit__(self, *exc): <NEW_LINE> <INDENT> self._rollback() <NEW_LINE> <DEDENT> def _callback(self, _binding, _qualname, args, _kwargs): <NEW_LINE> <INDENT> level, message, args = args <NEW_LINE> if level >= self._level: <NEW_LINE> <INDENT> self._calls.append(( message % args if args else message, message, args, getLevelName(level) )) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def calls(self): <NEW_LINE> <INDENT> return [i[1:] for i in self._calls] <NEW_LINE> <DEDENT> @property <NEW_LINE> def messages(self): <NEW_LINE> <INDENT> return [(i[-1], i[0]) for i in self._calls] <NEW_LINE> <DEDENT> def has(self, message, *args, **kwargs): <NEW_LINE> <INDENT> level = kwargs.pop('level', None) <NEW_LINE> assert not kwargs, "Unexpected arguments: %s" % kwargs <NEW_LINE> for call_final_message, call_message, call_args, call_level in self._calls: <NEW_LINE> <INDENT> if level is None or level == call_level: <NEW_LINE> <INDENT> if ( message == call_message and args == call_args if args else message == call_final_message or message == call_message ): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def assertLogged(self, message, *args, **kwargs): <NEW_LINE> <INDENT> if not self.has(message, *args, **kwargs): <NEW_LINE> <INDENT> raise AssertionError("There's no such message %r (with args %r) logged on %s. Logged messages where: %s" % ( message, args, self._logger, self.calls ))
|
Records all log messages made on the given logger. Assumes the logger has a ``_log`` method.
Example::
>>> import logging
>>> logger = logging.getLogger('mylogger')
>>> with LogCapture(logger, level='INFO') as logs:
... logger.debug("Message from debug: %s", 'somearg')
... logger.info("Message from info: %s", 'somearg')
... logger.error("Message from error: %s", 'somearg')
>>> logs.calls
[('Message from info: %s', ('somearg',), 'INFO'), ('Message from error: %s', ('somearg',), 'ERROR')]
>>> logs.messages
[('INFO', 'Message from info: somearg'), ('ERROR', 'Message from error: somearg')]
>>> logs.has('Message from info: %s')
True
>>> logs.has('Message from info: somearg')
True
>>> logs.has('Message from info: %s', 'badarg')
False
>>> logs.has('Message from debug: %s')
False
>>> logs.assertLogged('Message from error: %s')
>>> logs.assertLogged('Message from error: %s')
>>> logs.assertLogged('Message from error: %s')
.. versionchanged:: 1.3.0
Added ``messages`` property.
Changed ``calls`` to retrun the level as a string (instead of int).
|
6259902d1f5feb6acb163c14
|
class FileHandler: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> is_colab = os.path.exists(os.path.join("/content", "sample_data")) <NEW_LINE> is_google_drive_mounted = os.path.exists( os.path.join("/content", "drive", "My Drive") ) <NEW_LINE> colab_drive_path = os.path.join("/content", "drive", "My Drive") <NEW_LINE> colab_path = "/content" <NEW_LINE> home_path = Path.home() <NEW_LINE> if is_colab and is_google_drive_mounted: <NEW_LINE> <INDENT> self.base_path = colab_drive_path <NEW_LINE> <DEDENT> elif is_colab: <NEW_LINE> <INDENT> self.base_path = colab_path <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.base_path = home_path <NEW_LINE> <DEDENT> <DEDENT> def check_planner_path(self, planner_name): <NEW_LINE> <INDENT> planner_path = os.path.join(self.base_path, "concha_planners", planner_name) <NEW_LINE> subdirs = ["history", "metadata", "forecast", "models"] <NEW_LINE> for subdir in subdirs: <NEW_LINE> <INDENT> subpath = os.path.join(planner_path, subdir) <NEW_LINE> if not os.path.exists(subpath): <NEW_LINE> <INDENT> os.makedirs(subpath) <NEW_LINE> <DEDENT> <DEDENT> settings_path = os.path.join(planner_path, "planner_settings.json") <NEW_LINE> return planner_path, settings_path <NEW_LINE> <DEDENT> def check_importer_path(self): <NEW_LINE> <INDENT> importers_path = os.path.join(self.base_path, "concha_planners", "importers") <NEW_LINE> if not os.path.exists(importers_path): <NEW_LINE> <INDENT> os.makedirs(importers_path) <NEW_LINE> <DEDENT> return importers_path <NEW_LINE> <DEDENT> def dict_to_file(self, dct, file_path): <NEW_LINE> <INDENT> with open(file_path, "w") as file: <NEW_LINE> <INDENT> json.dump(dct, file, indent=4) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def dict_from_file(self, file_path): <NEW_LINE> <INDENT> with open(file_path, "r") as file: <NEW_LINE> <INDENT> dct = json.load(file) <NEW_LINE> <DEDENT> return dct
|
Filehandler automates some json -> file and vice versa operations.
|
6259902d1d351010ab8f4b3b
|
class Page(object): <NEW_LINE> <INDENT> def __init__(self, item_count, page_index=1, page_size=10): <NEW_LINE> <INDENT> self.item_count = item_count <NEW_LINE> self.page_size = page_size <NEW_LINE> self.page_count = item_count // page_size + (1 if item_count % page_size > 0 else 0) <NEW_LINE> if(item_count == 0)or(page_index > self.page_count): <NEW_LINE> <INDENT> self.offset = 0 <NEW_LINE> self.limit = 0 <NEW_LINE> self.page_index = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.page_index = page_index <NEW_LINE> self.offset = self.page_size*(page_index-1) <NEW_LINE> self.limit = self.page_size <NEW_LINE> <DEDENT> self.has_next = self.page_index < self.page_count <NEW_LINE> self.has_previous = self.page_index > 1 <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'item_count: %s, page_count:%s,page_index:%s,page_size:%s,offset:%s,limit:%s' % (self.item_count, self.page_count, self.page_index, self.page_size, self.offset, self.limit) <NEW_LINE> <DEDENT> __repr__ = __str__
|
Page object for display pages.
|
6259902d8c3a8732951f757c
|
class HostResource(HTTPServerTransport): <NEW_LINE> <INDENT> VERSION_STRING = host_settings.HOST_VERSION_STRING <NEW_LINE> @exceptions_logged(logger) <NEW_LINE> def __init__(self, avatar): <NEW_LINE> <INDENT> HTTPServerTransport.__init__(self, avatar=avatar) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> @exceptions_logged(logger) <NEW_LINE> def init_environment_for_host_server(cls, app, known_peers, force): <NEW_LINE> <INDENT> HTTPServerTransport.init_environment( app=app, tr_manager=TransactionManagerInMemory( tr_classes=transactions.ALL_TRANSACTION_CLASSES, app=app), force=force) <NEW_LINE> cls.known_peers = known_peers <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def init_environment_perprocess(cls, listen_port): <NEW_LINE> <INDENT> logger.debug('Initializing perprocess http-server environment ' 'for %s at port %i', cls.app.host, listen_port) <NEW_LINE> <DEDENT> @exceptions_logged(logger) <NEW_LINE> def create_message_from_headers(self, headers, auth_peer=None): <NEW_LINE> <INDENT> if _DEBUG_EXIT_ON_INCOMING_CONNECTION: <NEW_LINE> <INDENT> logger.error('Breaking the connection') <NEW_LINE> os._exit(0) <NEW_LINE> <DEDENT> _peer_proto = partial(Node, urls=self.app.primary_node.urls) <NEW_LINE> return self._create_message_from_headers(headers=headers, auth_peer=auth_peer, me=self.app.host, known_inhs=self.known_peers, must_know_peer=False, peer_proto=_peer_proto) <NEW_LINE> <DEDENT> @exceptions_logged(logger) <NEW_LINE> def _accept_passthrough_message(self, message): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @exceptions_logged(logger) <NEW_LINE> @contract_epydoc <NEW_LINE> def _handle_error(self, request, failure): <NEW_LINE> <INDENT> super(HostResource, self)._handle_error(request, failure) <NEW_LINE> logger.error('failure: %r', failure) <NEW_LINE> exc = failure.value <NEW_LINE> if isinstance(exc, MessageProcessingException): <NEW_LINE> <INDENT> level = exc.level <NEW_LINE> message = exc._message <NEW_LINE> assert isinstance(message, AbstractMessage), repr(message) <NEW_LINE> status_type = 'message_processing_error' <NEW_LINE> msg_pragma = message.get_pragma() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> level = logging.CRITICAL <NEW_LINE> message = str(exc) <NEW_LINE> status_type = 'unknown_error' <NEW_LINE> msg_pragma = None <NEW_LINE> <DEDENT> logger_status.log(level, message, extra={'_type': status_type}) <NEW_LINE> if isinstance(exc, MessageProcessingException): <NEW_LINE> <INDENT> _pragma = ','.join(ifilter(None, [msg_pragma, self.extra_pragma()])) <NEW_LINE> request.headers.update({'Pragma': _pragma}) <NEW_LINE> <DEDENT> if isinstance(exc, (MessageProcessingException, TransactionProcessingException)): <NEW_LINE> <INDENT> request.setResponseCode(http.BAD_REQUEST) <NEW_LINE> result_struct = {'error': {'text': str(exc), 'level': level}} <NEW_LINE> callFromThread(request.write, json.dumps(result_struct)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> failure.raiseException() <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> logger.exception('VERY BAD ERROR %s', e)
|
The host acting as an HTTP server; a new instance is created
for every request. Technically, this is an C{IResource}.
|
6259902d711fe17d825e14ac
|
class Response(ResponseBase): <NEW_LINE> <INDENT> default_mimetype = 'application/json; charset=utf-8' <NEW_LINE> default_headers = { 'Strict-Transport-Security': 'max-age=31536000; includeSubdomains; preload', 'Cache-Control': 'no-cache, no-store, must-revalidate', 'Pragma': 'no-cache', 'Expires': '0' } <NEW_LINE> def __init__(self, response=None, *args, **kwargs): <NEW_LINE> <INDENT> headers = kwargs.pop('headers', {}) <NEW_LINE> status = kwargs.pop('status', None) <NEW_LINE> limit = kwargs.pop('limit', None) <NEW_LINE> page = kwargs.pop('page', None) <NEW_LINE> total = kwargs.pop('total', None) <NEW_LINE> if all([limit, page, total]): <NEW_LINE> <INDENT> headers.update(Pagination(limit, total, page).headers()) <NEW_LINE> <DEDENT> headers.update(self.default_headers) <NEW_LINE> headers['Status'] = self.status <NEW_LINE> if response is None: <NEW_LINE> <INDENT> response = json.dumps({ 'message': self._status }) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = json.dumps(response) <NEW_LINE> <DEDENT> return super(ResponseBase, self).__init__( response, headers=headers, status=status, *args, **kwargs)
|
Base Response Object. This overrides the default in Flask, set in the
application factory
|
6259902dd4950a0f3b11164f
|
class BordaNauru(BaseVotingSystem): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_id(): <NEW_LINE> <INDENT> return 'borda-nauru' <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_description(): <NEW_LINE> <INDENT> return _('Nauru Borda Count voting') <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def create_tally(question, question_num): <NEW_LINE> <INDENT> return BordaNauruTally(question=question, question_num=question_num)
|
Defines the helper functions that allows agora to manage an OpenSTV-based
Nauru Borda voting system.
|
6259902d8a43f66fc4bf31a8
|
class Graph: <NEW_LINE> <INDENT> def __init__(self, edges=None): <NEW_LINE> <INDENT> self.adj_list = defaultdict(lambda: (list(), list())) <NEW_LINE> if edges: <NEW_LINE> <INDENT> for v1, v2 in edges: <NEW_LINE> <INDENT> self.add_edge(v1, v2) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def add_edge(self, v1, v2): <NEW_LINE> <INDENT> self.adj_list[v1][0].append(v2) <NEW_LINE> self.adj_list[v2][1].append(v1) <NEW_LINE> <DEDENT> def out(self, v): <NEW_LINE> <INDENT> return self.adj_list[v][0] <NEW_LINE> <DEDENT> def inc(self, v): <NEW_LINE> <INDENT> return self.adj_list[v][1] <NEW_LINE> <DEDENT> def topo_sort(self): <NEW_LINE> <INDENT> queue = list() <NEW_LINE> order = list() <NEW_LINE> inc_remaining = dict() <NEW_LINE> for v, (out, inc) in self.adj_list.items(): <NEW_LINE> <INDENT> if len(inc) == 0: <NEW_LINE> <INDENT> queue.append(v) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inc_remaining[v] = len(inc) <NEW_LINE> <DEDENT> <DEDENT> while queue: <NEW_LINE> <INDENT> current = queue.pop() <NEW_LINE> order.append(current) <NEW_LINE> for v in self.out(current): <NEW_LINE> <INDENT> inc_remaining[v] -= 1 <NEW_LINE> if inc_remaining[v] == 0: <NEW_LINE> <INDENT> queue.append(v) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return order
|
A directed graph with adjacency list/dict storage. Each element in its
adjacency list contains a tuple of two lists, the first the outgoing edges
and the second the incoming edges. The edges are represented by the
index/element on the other end of the list.
|
6259902d23e79379d538d52e
|
class OutputFile(BasicFile): <NEW_LINE> <INDENT> def __init__(self,name,titles=[],parent=None): <NEW_LINE> <INDENT> BasicFile.__init__(self,name) <NEW_LINE> self.parent=parent <NEW_LINE> self.setTitles(titles) <NEW_LINE> <DEDENT> def setTitles(self,titles): <NEW_LINE> <INDENT> self.titles=titles <NEW_LINE> <DEDENT> def outputAtStart(self): <NEW_LINE> <INDENT> if len(self.titles)>0: <NEW_LINE> <INDENT> fh=self.getHandle() <NEW_LINE> fh.write("# time") <NEW_LINE> for c in self.titles: <NEW_LINE> <INDENT> fh.write(" \t"+c) <NEW_LINE> <DEDENT> fh.write("\n") <NEW_LINE> <DEDENT> <DEDENT> def write(self,time,data): <NEW_LINE> <INDENT> self.writeLine( (time,)+data) <NEW_LINE> <DEDENT> def callAtOpen(self): <NEW_LINE> <INDENT> if self.parent: <NEW_LINE> <INDENT> self.parent.addToOpenList(path.basename(self.name)) <NEW_LINE> <DEDENT> <DEDENT> def callAtClose(self): <NEW_LINE> <INDENT> if self.parent: <NEW_LINE> <INDENT> self.parent.removeFromOpenList(path.basename(self.name)) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> result="Outfile:"+self.name <NEW_LINE> if self.isOpen: <NEW_LINE> <INDENT> result+=" OPEN" <NEW_LINE> <DEDENT> if self.append: <NEW_LINE> <INDENT> result+=" APPEND" <NEW_LINE> <DEDENT> if self.handle: <NEW_LINE> <INDENT> result+=" HANDLE" <NEW_LINE> <DEDENT> return result
|
output of time dependent data
|
6259902da8ecb03325872242
|
class Annotator: <NEW_LINE> <INDENT> def annotate(self, tokens): <NEW_LINE> <INDENT> return []
|
A base class for annotators.
|
6259902d15baa72349462fbd
|
class PyPsutil(PythonPackage): <NEW_LINE> <INDENT> homepage = "https://pypi.python.org/pypi/psutil" <NEW_LINE> url = "https://pypi.io/packages/source/p/psutil/psutil-5.4.5.tar.gz" <NEW_LINE> version('5.5.1', '81d6969ba8392cd3b6f5cba6c4e77caa') <NEW_LINE> version('5.4.5', '7d3d7954782bba4a400e106e66f10656') <NEW_LINE> version('5.0.1', '153dc8be94badc4072016ceeac7808dc') <NEW_LINE> depends_on('python@2.6:') <NEW_LINE> depends_on('py-setuptools', type='build')
|
psutil is a cross-platform library for retrieving information on
running processes and system utilization (CPU, memory, disks, network)
in Python.
|
6259902da4f1c619b294f61b
|
class AlchemyEngineUtility: <NEW_LINE> <INDENT> name = FieldProperty(IAlchemyEngineUtility['name']) <NEW_LINE> dsn = FieldProperty(IAlchemyEngineUtility['dsn']) <NEW_LINE> echo = FieldProperty(IAlchemyEngineUtility['echo']) <NEW_LINE> use_pool = FieldProperty(IAlchemyEngineUtility['use_pool']) <NEW_LINE> pool_size = FieldProperty(IAlchemyEngineUtility['pool_size']) <NEW_LINE> pool_recycle = FieldProperty(IAlchemyEngineUtility['pool_recycle']) <NEW_LINE> echo_pool = FieldProperty(IAlchemyEngineUtility['echo_pool']) <NEW_LINE> encoding = FieldProperty(IAlchemyEngineUtility['encoding']) <NEW_LINE> convert_unicode = FieldProperty(IAlchemyEngineUtility['convert_unicode']) <NEW_LINE> twophase = FieldProperty(IAlchemyEngineUtility['twophase']) <NEW_LINE> def __init__(self, name='', dsn='', echo=False, use_pool=True, pool_size=25, pool_recycle=-1, echo_pool=False, encoding='utf-8', convert_unicode=False, twophase=True, **kwargs): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.dsn = dsn <NEW_LINE> self.echo = echo <NEW_LINE> self.use_pool = use_pool <NEW_LINE> self.pool_size = pool_size <NEW_LINE> self.pool_recycle = pool_recycle <NEW_LINE> self.echo_pool = echo_pool <NEW_LINE> self.encoding = encoding <NEW_LINE> self.convert_unicode = convert_unicode <NEW_LINE> self.twophase = twophase <NEW_LINE> self.kw = PersistentMapping() <NEW_LINE> self.kw.update(kwargs) <NEW_LINE> <DEDENT> def __setattr__(self, key, value): <NEW_LINE> <INDENT> super().__setattr__(key, value) <NEW_LINE> if (key != '_v_engine') and hasattr(self, '_v_engine'): <NEW_LINE> <INDENT> delattr(self, '_v_engine') <NEW_LINE> <DEDENT> <DEDENT> def get_engine(self, use_pool=True): <NEW_LINE> <INDENT> kw = {} <NEW_LINE> kw.update(self.kw) <NEW_LINE> if not (use_pool and self.use_pool): <NEW_LINE> <INDENT> return create_engine(self.dsn, echo=self.echo, poolclass=NullPool, encoding=self.encoding, convert_unicode=self.convert_unicode, **kw) <NEW_LINE> <DEDENT> engine = getattr(self, '_v_engine', None) <NEW_LINE> if engine is None: <NEW_LINE> <INDENT> engine = self._v_engine = create_engine(self.dsn, echo=self.echo, pool_size=self.pool_size, pool_recycle=self.pool_recycle, echo_pool=self.echo_pool, encoding=self.encoding, convert_unicode=self.convert_unicode, **kw) <NEW_LINE> <DEDENT> return engine <NEW_LINE> <DEDENT> def clear_engine(self): <NEW_LINE> <INDENT> if hasattr(self, '_v_engine'): <NEW_LINE> <INDENT> delattr(self, '_v_engine')
|
SQLAlchemy engine utility
|
6259902d5166f23b2e2443fc
|
class IS3Resource(INamed): <NEW_LINE> <INDENT> buckets = zope.schema.Object( title="Dictionary of S3Bucket objects", schema=IS3Buckets, required=True, )
|
S3 Bucket
|
6259902db57a9660fecd2aa8
|
class ProtectedItemResourceList(ResourceList): <NEW_LINE> <INDENT> _attribute_map = { 'next_link': {'key': 'nextLink', 'type': 'str'}, 'value': {'key': 'value', 'type': '[ProtectedItemResource]'}, } <NEW_LINE> def __init__( self, *, next_link: Optional[str] = None, value: Optional[List["ProtectedItemResource"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ProtectedItemResourceList, self).__init__(next_link=next_link, **kwargs) <NEW_LINE> self.value = value
|
List of ProtectedItem resources.
:ivar next_link: The uri to fetch the next page of resources. Call ListNext() fetches next page
of resources.
:vartype next_link: str
:ivar value: List of resources.
:vartype value:
list[~azure.mgmt.recoveryservicesbackup.passivestamp.models.ProtectedItemResource]
|
6259902d8a349b6b43687261
|
class CreationLicenseWidget(forms.MultiWidget): <NEW_LINE> <INDENT> COPYRIGHTED = {1: _("Copyright")} <NEW_LINE> CREATIVE_COMMONS = {10: _("CC BY - Attribution"), 11: _("CC BY-SA - Share alike"), 12: _("CC BY-ND - Cannot edit"), 13: _("CC BY-NC - Non commercial"), 14: _("CC BY-NC-SA - SA+NC"), 15: _("CC BY-NC-ND - ND+NC"), 16: _("Public domain")} <NEW_LINE> def __init__(self, attrs=None, widget=None): <NEW_LINE> <INDENT> license_widget = widgets.Select if not widget else widget <NEW_LINE> license_widget = license_widget(choices=CreationLicenseWidget.get_choices(), attrs={'class': 'license-license', 'style': 'min-width: 24em;'}) <NEW_LINE> _widgets = (license_widget, widgets.TextInput(attrs={'class': 'license-author', 'placeholder': _("Creator or rights holder")})) <NEW_LINE> super(CreationLicenseWidget, self).__init__(_widgets, attrs) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_choices(): <NEW_LINE> <INDENT> cc_choices = [[0, pgettext_lazy('license', "None")], [_("Creative Commons"), list(CreationLicenseWidget.CREATIVE_COMMONS.items())], [_("Copyright"), list(CreationLicenseWidget.COPYRIGHTED.items())]] <NEW_LINE> return cc_choices <NEW_LINE> <DEDENT> def decompress(self, value): <NEW_LINE> <INDENT> if value: <NEW_LINE> <INDENT> licens, author = value.split(';', 1) <NEW_LINE> licens = licens or None <NEW_LINE> return [licens, author] <NEW_LINE> <DEDENT> return [None, ""] <NEW_LINE> <DEDENT> def value_from_datadict(self, data, files, name): <NEW_LINE> <INDENT> data = [widget.value_from_datadict(data, files, "{}_{}".format(name, i)) for i, widget in enumerate(self.widgets)] <NEW_LINE> licens = data[0] or '' <NEW_LINE> author = data[1] <NEW_LINE> return "{};{}".format(licens, author) <NEW_LINE> <DEDENT> def format_output(self, rendered_widgets): <NEW_LINE> <INDENT> return ' '.join(rendered_widgets)
|
Widget de licence ou copyright
|
6259902d15baa72349462fbf
|
class ElectionResult(ModelBase): <NEW_LINE> <INDENT> __versioned__ = {} <NEW_LINE> id = db.Column( evalg.database.types.UuidType, default=uuid.uuid4, primary_key=True) <NEW_LINE> election_id = db.Column( evalg.database.types.UuidType, db.ForeignKey('election.id')) <NEW_LINE> election = db.relationship( 'Election', back_populates='election_results', lazy='joined') <NEW_LINE> election_group_count_id = db.Column( evalg.database.types.UuidType, db.ForeignKey('election_group_count.id')) <NEW_LINE> election_group_count = db.relationship( 'ElectionGroupCount', back_populates='election_results', lazy='joined') <NEW_LINE> """ election group count that the result belongs to """ <NEW_LINE> election_protocol = deferred(db.Column(evalg.database.types.MutableJson)) <NEW_LINE> ballots = deferred(db.Column(evalg.database.types.NestedMutableJson)) <NEW_LINE> result = db.Column(evalg.database.types.MutableJson) <NEW_LINE> pollbook_stats = db.Column(evalg.database.types.MutableJson) <NEW_LINE> @property <NEW_LINE> def election_protocol_text(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> protcol_cls = count.PROTOCOL_MAPPINGS[self.election.type_str] <NEW_LINE> return protcol_cls.from_dict(self.election_protocol).render() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise Exception('Unsupported counting method for protocol') <NEW_LINE> <DEDENT> return self.value
|
The ElectionResult class
|
6259902da8ecb03325872244
|
class Solution(object): <NEW_LINE> <INDENT> def findMedianSortedArrays(self, nums1, nums2): <NEW_LINE> <INDENT> n = len(nums1) + len(nums2) <NEW_LINE> if n % 2 == 1: <NEW_LINE> <INDENT> return self.findKth(nums1, nums2, n / 2 + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> smaller = self.findKth(nums1, nums2, n / 2) <NEW_LINE> bigger = self.findKth(nums1, nums2, n / 2 + 1) <NEW_LINE> return (smaller + bigger) / 2.0 <NEW_LINE> <DEDENT> <DEDENT> def findKth(self, A, B, k): <NEW_LINE> <INDENT> if len(A) == 0: <NEW_LINE> <INDENT> return B[k - 1] <NEW_LINE> <DEDENT> if len(B) == 0: <NEW_LINE> <INDENT> return A[k - 1] <NEW_LINE> <DEDENT> if k == 1: <NEW_LINE> <INDENT> return min(A[0], B[0]) <NEW_LINE> <DEDENT> a = A[int(k / 2 - 1)] if len(A) >= k / 2 else None <NEW_LINE> b = B[int(k / 2 - 1)] if len(B) >= k / 2 else None <NEW_LINE> if b is None or (a is not None and a < b): <NEW_LINE> <INDENT> return self.findKth(A[int(k / 2):], B, int(k - k / 2)) <NEW_LINE> <DEDENT> return self.findKth(A, B[int(k / 2):], int(k - k / 2))
|
@param A: An integer array.
@param B: An integer array.
@return: a double whose format is *.5 or *.0
|
6259902dc432627299fa401b
|
class BranchUnsupported(Exception): <NEW_LINE> <INDENT> def __init__(self, url: str, description: str): <NEW_LINE> <INDENT> self.url = url <NEW_LINE> self.description = description <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> return self.description
|
The branch uses a VCS or protocol that is unsupported.
|
6259902dd18da76e235b7962
|
class CreateLoadBalancersRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.VpcId = None <NEW_LINE> self.LoadBalancerType = None <NEW_LINE> self.SubnetId = None <NEW_LINE> self.ProjectId = None <NEW_LINE> self.GoodsNum = None <NEW_LINE> self.PayMode = None <NEW_LINE> self.TgwSetType = None <NEW_LINE> self.Exclusive = None <NEW_LINE> self.SpecifiedVips = None <NEW_LINE> self.BzConf = None <NEW_LINE> self.IpProtocolType = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.VpcId = params.get("VpcId") <NEW_LINE> self.LoadBalancerType = params.get("LoadBalancerType") <NEW_LINE> self.SubnetId = params.get("SubnetId") <NEW_LINE> self.ProjectId = params.get("ProjectId") <NEW_LINE> self.GoodsNum = params.get("GoodsNum") <NEW_LINE> self.PayMode = params.get("PayMode") <NEW_LINE> self.TgwSetType = params.get("TgwSetType") <NEW_LINE> self.Exclusive = params.get("Exclusive") <NEW_LINE> self.SpecifiedVips = params.get("SpecifiedVips") <NEW_LINE> if params.get("BzConf") is not None: <NEW_LINE> <INDENT> self.BzConf = CreateLoadBalancerBzConf() <NEW_LINE> self.BzConf._deserialize(params.get("BzConf")) <NEW_LINE> <DEDENT> self.IpProtocolType = params.get("IpProtocolType") <NEW_LINE> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set))
|
CreateLoadBalancers请求参数结构体
|
6259902d6fece00bbaccc9d8
|
class Fact(object): <NEW_LINE> <INDENT> def __init__(self, model=None): <NEW_LINE> <INDENT> self.model = model <NEW_LINE> self.id = genID() <NEW_LINE> if model: <NEW_LINE> <INDENT> for fm in model.fieldModels: <NEW_LINE> <INDENT> self.fields.append(Field(fm)) <NEW_LINE> <DEDENT> <DEDENT> self.new = True <NEW_LINE> <DEDENT> def isNew(self): <NEW_LINE> <INDENT> return getattr(self, 'new', False) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return [field.name for field in self.fields] <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return [field.value for field in self.fields] <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return [f.value for f in self.fields if f.name == key][0] <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> [f for f in self.fields if f.name == key][0].value = value <NEW_LINE> <DEDENT> except IndexError: <NEW_LINE> <INDENT> raise KeyError <NEW_LINE> <DEDENT> <DEDENT> def get(self, key, default): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> except (IndexError, KeyError): <NEW_LINE> <INDENT> return default <NEW_LINE> <DEDENT> <DEDENT> def assertValid(self): <NEW_LINE> <INDENT> for field in self.fields: <NEW_LINE> <INDENT> if not self.fieldValid(field): <NEW_LINE> <INDENT> raise FactInvalidError(type="fieldEmpty", field=field.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fieldValid(self, field): <NEW_LINE> <INDENT> return not (field.fieldModel.required and not field.value.strip()) <NEW_LINE> <DEDENT> def assertUnique(self, s): <NEW_LINE> <INDENT> for field in self.fields: <NEW_LINE> <INDENT> if not self.fieldUnique(field, s): <NEW_LINE> <INDENT> raise FactInvalidError(type="fieldNotUnique", field=field.name) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def fieldUnique(self, field, s): <NEW_LINE> <INDENT> if not field.fieldModel.unique: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> req = ("select value from fields " "where fieldModelId = :fmid and value = :val") <NEW_LINE> if field.id: <NEW_LINE> <INDENT> req += " and id != %s" % field.id <NEW_LINE> <DEDENT> return not s.scalar(req, val=field.value, fmid=field.fieldModel.id) <NEW_LINE> <DEDENT> def focusLost(self, field): <NEW_LINE> <INDENT> runHook('fact.focusLost', self, field) <NEW_LINE> <DEDENT> def setModified(self, textChanged=False, deck=None, media=True): <NEW_LINE> <INDENT> self.modified = time.time() <NEW_LINE> if textChanged: <NEW_LINE> <INDENT> if not deck: <NEW_LINE> <INDENT> import ankiqt <NEW_LINE> if not getattr(ankiqt, 'setModWarningShown', None): <NEW_LINE> <INDENT> import sys; sys.stderr.write( "plugin needs to pass deck to fact.setModified()") <NEW_LINE> ankiqt.setModWarningShown = True <NEW_LINE> <DEDENT> deck = ankiqt.mw.deck <NEW_LINE> <DEDENT> assert deck <NEW_LINE> self.spaceUntil = stripHTMLMedia(u" ".join( self.values())) <NEW_LINE> for card in self.cards: <NEW_LINE> <INDENT> card.rebuildQA(deck)
|
A single fact. Fields exposed as dict interface.
|
6259902d0a366e3fb87dda10
|
class IterableEventQueue(queue.Queue): <NEW_LINE> <INDENT> def __init__(self, timeout=3600): <NEW_LINE> <INDENT> queue.Queue.__init__(self, maxsize=32) <NEW_LINE> self._timeout = timeout <NEW_LINE> <DEDENT> def offer(self, event): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.put(event, block=False) <NEW_LINE> <DEDENT> except queue.Full: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> return self.next() <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self.get(block=True, timeout=self._timeout) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> except queue.Empty: <NEW_LINE> <INDENT> pass
|
Extends queue.Queue to add an __iter__ interface.
|
6259902d796e427e5384f7a6
|
class StaffGradingService(GradingService): <NEW_LINE> <INDENT> METRIC_NAME = 'edxapp.open_ended_grading.staff_grading_service' <NEW_LINE> def __init__(self, config): <NEW_LINE> <INDENT> config['system'] = LmsModuleSystem( static_url='/static', track_function=None, get_module=None, render_template=render_to_string, replace_urls=None, descriptor_runtime=None, services={ 'i18n': ModuleI18nService(), }, ) <NEW_LINE> super(StaffGradingService, self).__init__(config) <NEW_LINE> self.url = config['url'] + config['staff_grading'] <NEW_LINE> self.login_url = self.url + '/login/' <NEW_LINE> self.get_next_url = self.url + '/get_next_submission/' <NEW_LINE> self.save_grade_url = self.url + '/save_grade/' <NEW_LINE> self.get_problem_list_url = self.url + '/get_problem_list/' <NEW_LINE> self.get_notifications_url = self.url + "/get_notifications/" <NEW_LINE> <DEDENT> def get_problem_list(self, course_id, grader_id): <NEW_LINE> <INDENT> params = {'course_id': course_id, 'grader_id': grader_id} <NEW_LINE> result = self.get(self.get_problem_list_url, params) <NEW_LINE> tags = [u'course_id:{}'.format(course_id)] <NEW_LINE> self._record_result('get_problem_list', result, tags) <NEW_LINE> dog_stats_api.histogram( self._metric_name('get_problem_list.result.length'), len(result.get('problem_list', [])) ) <NEW_LINE> return result <NEW_LINE> <DEDENT> def get_next(self, course_id, location, grader_id): <NEW_LINE> <INDENT> result = self._render_rubric( self.get( self.get_next_url, params={ 'location': location, 'grader_id': grader_id } ) ) <NEW_LINE> tags = [u'course_id:{}'.format(course_id)] <NEW_LINE> self._record_result('get_next', result, tags) <NEW_LINE> return result <NEW_LINE> <DEDENT> def save_grade(self, course_id, grader_id, submission_id, score, feedback, skipped, rubric_scores, submission_flagged): <NEW_LINE> <INDENT> data = {'course_id': course_id, 'submission_id': submission_id, 'score': score, 'feedback': feedback, 'grader_id': grader_id, 'skipped': skipped, 'rubric_scores': rubric_scores, 'rubric_scores_complete': True, 'submission_flagged': submission_flagged} <NEW_LINE> result = self._render_rubric(self.post(self.save_grade_url, data=data)) <NEW_LINE> tags = [u'course_id:{}'.format(course_id)] <NEW_LINE> self._record_result('save_grade', result, tags) <NEW_LINE> return result <NEW_LINE> <DEDENT> def get_notifications(self, course_id): <NEW_LINE> <INDENT> params = {'course_id': course_id} <NEW_LINE> result = self.get(self.get_notifications_url, params) <NEW_LINE> tags = [ u'course_id:{}'.format(course_id), u'staff_needs_to_grade:{}'.format(result.get('staff_needs_to_grade')) ] <NEW_LINE> self._record_result('get_notifications', result, tags) <NEW_LINE> return result
|
Interface to staff grading backend.
|
6259902dac7a0e7691f73512
|
class SentenceSelection(): <NEW_LINE> <INDENT> def __init__(self, stemmer=None, stoplist=None): <NEW_LINE> <INDENT> self.stemmer = stemmer if stemmer else lambda x: x <NEW_LINE> self.stoplist = stoplist if stoplist else lambda x: False <NEW_LINE> <DEDENT> def load_data(self, topics, sentence_data): <NEW_LINE> <INDENT> self.topics = _load('topics', topics) <NEW_LINE> self.sentence_data = _load('sentence data', sentence_data)
|
Context object for hosting data and resources
|
6259902d1d351010ab8f4b41
|
class Dropout(object): <NEW_LINE> <INDENT> def __init__(self, amount): <NEW_LINE> <INDENT> self.amount = amount <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> images = sample['images'] <NEW_LINE> if 0.5 > np.random.uniform(0.0, 1.0): <NEW_LINE> <INDENT> droped = [] <NEW_LINE> num_droped = np.ceil(np.random.uniform(0.0, self.amount) * 224*224) <NEW_LINE> coords = [np.random.randint(0, i - 1 , int(num_droped)) for i in images[0].shape] <NEW_LINE> for i in images: <NEW_LINE> <INDENT> i[coords[:-1]] = (0, 0, 0) <NEW_LINE> droped.append(i) <NEW_LINE> <DEDENT> droped = np.array(droped) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> droped = np.array(images) <NEW_LINE> <DEDENT> return {'images': droped, 'labels': sample['labels'], 'names':sample['names']}
|
Flip randomly the image in a sample.
|
6259902d26238365f5fadb7b
|
class InvalidConstraint(MAASAPIBadRequest): <NEW_LINE> <INDENT> def __init__(self, constraint, value, err=None): <NEW_LINE> <INDENT> super(InvalidConstraint, self).__init__(constraint, value) <NEW_LINE> self.err = err <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> s = "Invalid '%s' constraint '%s'" % self.args <NEW_LINE> if self.err: <NEW_LINE> <INDENT> return "%s: %s" % (s, str(self.err)) <NEW_LINE> <DEDENT> return s
|
Node allocation constraint given cannot be interpreted.
|
6259902d63f4b57ef0086587
|
class Subprogram(AccessPath): <NEW_LINE> <INDENT> class CallInterface(object): <NEW_LINE> <INDENT> IGNORE_STACK = 0 <NEW_LINE> PASS_STACK = 1 <NEW_LINE> RETURN_STACK = 2 <NEW_LINE> def __init__(self, out_indices, stack_action, does_return): <NEW_LINE> <INDENT> self.out_indices = out_indices <NEW_LINE> self.stack_action = stack_action <NEW_LINE> self.does_return = does_return <NEW_LINE> <DEDENT> def takes_stack(self): <NEW_LINE> <INDENT> return self.stack_action != self.IGNORE_STACK <NEW_LINE> <DEDENT> def returns_stack(self): <NEW_LINE> <INDENT> return self.stack_action == self.RETURN_STACK <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, subp_obj, interface): <NEW_LINE> <INDENT> self.subp_obj = subp_obj <NEW_LINE> self.interface = interface <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "Subp_{}".format(self.subp_obj)
|
Represents an access to a subprogram.
|
6259902d6e29344779b0167a
|
class MyNode(object): <NEW_LINE> <INDENT> def __init__(self,item,lchild=None,rchild=None): <NEW_LINE> <INDENT> self.item = item <NEW_LINE> self.lchild = lchild <NEW_LINE> self.rchild = rchild
|
节点
|
6259902dd6c5a102081e3151
|
class UsersHandler(BaseHandler): <NEW_LINE> <INDENT> methods = ['GET'] <NEW_LINE> path = '/api/users' <NEW_LINE> def run(self, *args, **kwargs): <NEW_LINE> <INDENT> def reduction(acc, location): <NEW_LINE> <INDENT> acc[location.user].append(location.device) <NEW_LINE> return acc <NEW_LINE> <DEDENT> locations = self.ctx.db.session.query(Location.user, Location.device).distinct().all() <NEW_LINE> user_devices = reduce(reduction, locations, collections.defaultdict(list)) <NEW_LINE> formatted_user_devices = [ {'user': user, 'devices': devices} for user, devices in user_devices.iteritems() ] <NEW_LINE> self.ctx.metrics_event.emit_event('query_users') <NEW_LINE> return self.success(data=formatted_user_devices, status=200)
|
Query all known users and associated devices.
|
6259902d30c21e258be99837
|
class ReplayBuffer: <NEW_LINE> <INDENT> def __init__(self, action_size, buffer_size, batch_size, seed): <NEW_LINE> <INDENT> self.action_size = action_size <NEW_LINE> self.memory = deque(maxlen=buffer_size) <NEW_LINE> self.batch_size = batch_size <NEW_LINE> self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"]) <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> <DEDENT> def add(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> e = self.experience(state, action, reward, next_state, done) <NEW_LINE> self.memory.append(e) <NEW_LINE> <DEDENT> def sample(self): <NEW_LINE> <INDENT> experiences = random.sample(self.memory, k=self.batch_size) <NEW_LINE> states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device) <NEW_LINE> actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).long().to(device) <NEW_LINE> rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device) <NEW_LINE> next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device) <NEW_LINE> dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device) <NEW_LINE> return states, actions, rewards, next_states, dones <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.memory)
|
Fixed-size buffer to store experience objects.
|
6259902dd164cc6175821f9d
|
class Conv1d(nn.Conv1d): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, kernel_size, activation_fn=None, drop_rate=0., stride=1, padding='same', dilation=1, groups=1, bias=True): <NEW_LINE> <INDENT> self.activation_fn = activation_fn <NEW_LINE> self.drop_rate = drop_rate <NEW_LINE> if padding == 'same': <NEW_LINE> <INDENT> padding = kernel_size // 2 * dilation <NEW_LINE> <DEDENT> super(Conv1d, self).__init__(in_channels, out_channels, kernel_size, stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias) <NEW_LINE> self.drop_out = nn.Dropout(self.drop_rate) if self.drop_rate > 0 else None <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> y = super(Conv1d, self).forward(x) <NEW_LINE> y = self.activation_fn(y) if self.activation_fn is not None else y <NEW_LINE> y = self.drop_out(y) if self.drop_out is not None else y <NEW_LINE> return y
|
Hightway Convolution 1d
Args:
x: (N, C_in, L)
Returns:
y: (N, C_out, L)
|
6259902d1f5feb6acb163c1c
|
class SpectrumExtractorMode(RoiMode): <NEW_LINE> <INDENT> persistent = True <NEW_LINE> def __init__(self, axes, **kwargs): <NEW_LINE> <INDENT> super(SpectrumExtractorMode, self).__init__(axes, **kwargs) <NEW_LINE> self.icon = get_icon('glue_spectrum') <NEW_LINE> self.mode_id = 'Spectrum' <NEW_LINE> self.action_text = 'Spectrum' <NEW_LINE> self.tool_tip = 'Extract a spectrum from the selection' <NEW_LINE> self._roi_tool = qt_roi.QtRectangularROI(self._axes) <NEW_LINE> self._roi_tool.plot_opts.update(edgecolor='#c51b7d', facecolor=None, edgewidth=3, alpha=1.0) <NEW_LINE> self.shortcut = 'S'
|
Let's the user select a region in an image and,
when connected to a SpectrumExtractorTool, uses this
to display spectra extracted from that position
|
6259902d9b70327d1c57fdae
|
class Transaction(object): <NEW_LINE> <INDENT> _tid = None <NEW_LINE> _msg_id = None <NEW_LINE> _oid_list = None <NEW_LINE> _prepared = False <NEW_LINE> _uuid_set = None <NEW_LINE> _lock_wait_uuid_set = None <NEW_LINE> def __init__(self, node, ttid): <NEW_LINE> <INDENT> self._node = node <NEW_LINE> self._ttid = ttid <NEW_LINE> self._birth = time() <NEW_LINE> self._notification_set = set() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<%s(client=%r, tid=%r, oids=%r, storages=%r, age=%.2fs) at %x>" % ( self.__class__.__name__, self._node, dump(self._tid), map(dump, self._oid_list or ()), map(uuid_str, self._uuid_set or ()), time() - self._birth, id(self), ) <NEW_LINE> <DEDENT> def getNode(self): <NEW_LINE> <INDENT> return self._node <NEW_LINE> <DEDENT> def getTTID(self): <NEW_LINE> <INDENT> return self._ttid <NEW_LINE> <DEDENT> def getTID(self): <NEW_LINE> <INDENT> return self._tid <NEW_LINE> <DEDENT> def getMessageId(self): <NEW_LINE> <INDENT> return self._msg_id <NEW_LINE> <DEDENT> def getUUIDList(self): <NEW_LINE> <INDENT> return list(self._uuid_set) <NEW_LINE> <DEDENT> def getOIDList(self): <NEW_LINE> <INDENT> return list(self._oid_list) <NEW_LINE> <DEDENT> def isPrepared(self): <NEW_LINE> <INDENT> return self._prepared <NEW_LINE> <DEDENT> def registerForNotification(self, uuid): <NEW_LINE> <INDENT> self._notification_set.add(uuid) <NEW_LINE> <DEDENT> def getNotificationUUIDList(self): <NEW_LINE> <INDENT> return list(self._notification_set) <NEW_LINE> <DEDENT> def prepare(self, tid, oid_list, uuid_list, msg_id): <NEW_LINE> <INDENT> self._tid = tid <NEW_LINE> self._oid_list = oid_list <NEW_LINE> self._msg_id = msg_id <NEW_LINE> self._uuid_set = set(uuid_list) <NEW_LINE> self._lock_wait_uuid_set = set(uuid_list) <NEW_LINE> self._prepared = True <NEW_LINE> <DEDENT> def storageLost(self, uuid): <NEW_LINE> <INDENT> self._notification_set.discard(uuid) <NEW_LINE> if self._prepared: <NEW_LINE> <INDENT> self._lock_wait_uuid_set.discard(uuid) <NEW_LINE> self._uuid_set.discard(uuid) <NEW_LINE> return self.locked() <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def clientLost(self, node): <NEW_LINE> <INDENT> if self._node is node: <NEW_LINE> <INDENT> if self._prepared: <NEW_LINE> <INDENT> self._node = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._notification_set.discard(node.getUUID()) <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def lock(self, uuid): <NEW_LINE> <INDENT> self._lock_wait_uuid_set.remove(uuid) <NEW_LINE> return self.locked() <NEW_LINE> <DEDENT> def locked(self): <NEW_LINE> <INDENT> return not self._lock_wait_uuid_set
|
A pending transaction
|
6259902db57a9660fecd2aae
|
class aXe_DRZPREP(TaskWrapper): <NEW_LINE> <INDENT> def __init__(self, inlist, configs, **params): <NEW_LINE> <INDENT> super(aXe_DRZPREP, self).__init__('aXe_DRZPREP', 'drzprep') <NEW_LINE> if 'back' in params and params['back']: <NEW_LINE> <INDENT> self.bck = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bck = False <NEW_LINE> <DEDENT> self.command_list.append(inlist) <NEW_LINE> self.command_list.append(configs) <NEW_LINE> if 'opt_extr' in params and params['opt_extr']: <NEW_LINE> <INDENT> self.command_list.append('-opt_extr') <NEW_LINE> <DEDENT> <DEDENT> def runall(self, silent=False): <NEW_LINE> <INDENT> super(aXe_DRZPREP, self).runall(silent=silent) <NEW_LINE> if self.bck: <NEW_LINE> <INDENT> self.command_list.append('-bck') <NEW_LINE> super(aXe_DRZPREP, self).runall(silent=silent)
|
Wrapper around the aXe_DRZPREP task
|
6259902dd53ae8145f91948f
|
class Text(_String): <NEW_LINE> <INDENT> type = 'text' <NEW_LINE> def convert_to_cache(self, value, record, validate=True): <NEW_LINE> <INDENT> if value is None or value is False: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return ustr(value)
|
Very similar to :class:`~.Char` but used for longer contents, does not
have a size and usually displayed as a multiline text box.
:param translate: enable the translation of the field's values; use
``translate=True`` to translate field values as a whole; ``translate``
may also be a callable such that ``translate(callback, value)``
translates ``value`` by using ``callback(term)`` to retrieve the
translation of terms.
|
6259902d8c3a8732951f7584
|
class LocationListPluginModel(CMSPlugin): <NEW_LINE> <INDENT> template = models.CharField(verbose_name=_('Plugin template'),max_length=250,null=True,blank=True) <NEW_LINE> def get_short_description(self): <NEW_LINE> <INDENT> desc = self.id <NEW_LINE> choices = getattr(self.get_plugin_class(),'template_choices',[]) <NEW_LINE> choice_name = [x[1] for x in choices if x[0] == self.template] <NEW_LINE> if choice_name: <NEW_LINE> <INDENT> desc = choice_name[0] <NEW_LINE> <DEDENT> elif self.template: <NEW_LINE> <INDENT> desc = self.template <NEW_LINE> <DEDENT> return desc
|
A model for listing of all active locations
|
6259902d63f4b57ef0086588
|
class IndexHandler(BaseHandler): <NEW_LINE> <INDENT> @auth.admin <NEW_LINE> def get(self): <NEW_LINE> <INDENT> args = dict() <NEW_LINE> self.jrender('dev_backstage_index.html', **args)
|
后台管理首页
|
6259902d287bf620b6272c13
|
class AndExpression(BaseExpression): <NEW_LINE> <INDENT> operator = ' ' <NEW_LINE> def __init__(self, *terms): <NEW_LINE> <INDENT> self._subterms = list(terms) <NEW_LINE> self._costs = 0 <NEW_LINE> for t in self._subterms: <NEW_LINE> <INDENT> self._costs += t.costs() <NEW_LINE> <DEDENT> self.negated = 0 <NEW_LINE> <DEDENT> def append(self, expression): <NEW_LINE> <INDENT> self._subterms.append(expression) <NEW_LINE> self._costs += expression.costs() <NEW_LINE> <DEDENT> def subterms(self): <NEW_LINE> <INDENT> return self._subterms <NEW_LINE> <DEDENT> def costs(self): <NEW_LINE> <INDENT> return self._costs <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> result = '' <NEW_LINE> for t in self._subterms: <NEW_LINE> <INDENT> result += self.operator + t <NEW_LINE> <DEDENT> return u'[' + result[len(self.operator):] + u']' <NEW_LINE> <DEDENT> def pageFilter(self): <NEW_LINE> <INDENT> self.sortByCost() <NEW_LINE> terms = [term for term in self._subterms if isinstance(term, TitleSearch)] <NEW_LINE> if terms: <NEW_LINE> <INDENT> def filter(name): <NEW_LINE> <INDENT> for term in terms: <NEW_LINE> <INDENT> filter = term.pageFilter() <NEW_LINE> if not filter(name): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> return filter <NEW_LINE> <DEDENT> return None <NEW_LINE> <DEDENT> def sortByCost(self): <NEW_LINE> <INDENT> tmp = [(term.costs(), term) for term in self._subterms] <NEW_LINE> tmp.sort() <NEW_LINE> self._subterms = [item[1] for item in tmp] <NEW_LINE> <DEDENT> def search(self, page): <NEW_LINE> <INDENT> self.sortByCost() <NEW_LINE> matches = [] <NEW_LINE> for term in self._subterms: <NEW_LINE> <INDENT> result = term.search(page) <NEW_LINE> if not result: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> matches.extend(result) <NEW_LINE> <DEDENT> return matches <NEW_LINE> <DEDENT> def highlight_re(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> for s in self._subterms: <NEW_LINE> <INDENT> highlight_re = s.highlight_re() <NEW_LINE> if highlight_re: result.append(highlight_re) <NEW_LINE> <DEDENT> return '|'.join(result) <NEW_LINE> <DEDENT> def lupy_term(self): <NEW_LINE> <INDENT> required = self.operator== " " <NEW_LINE> lupy_term = BooleanQuery() <NEW_LINE> for term in self._subterms: <NEW_LINE> <INDENT> lupy_term.add(term.lupy_term(), required, term.negated) <NEW_LINE> <DEDENT> return lupy_term
|
A term connecting several sub terms with a logical AND
|
6259902d6fece00bbaccc9dc
|
class ShowSecurityGroupRule(common.NetworkAndComputeShowOne): <NEW_LINE> <INDENT> def update_parser_common(self, parser): <NEW_LINE> <INDENT> parser.add_argument( 'rule', metavar="<rule>", help=_("Security group rule to display (ID only)") ) <NEW_LINE> return parser <NEW_LINE> <DEDENT> def take_action_network(self, client, parsed_args): <NEW_LINE> <INDENT> obj = client.find_security_group_rule(parsed_args.rule, ignore_missing=False) <NEW_LINE> columns = _get_columns(obj) <NEW_LINE> data = utils.get_item_properties(obj, columns) <NEW_LINE> return (columns, data) <NEW_LINE> <DEDENT> def take_action_compute(self, client, parsed_args): <NEW_LINE> <INDENT> obj = None <NEW_LINE> security_group_rules = [] <NEW_LINE> for security_group in client.security_groups.list(): <NEW_LINE> <INDENT> security_group_rules.extend(security_group.rules) <NEW_LINE> <DEDENT> for security_group_rule in security_group_rules: <NEW_LINE> <INDENT> if parsed_args.rule == str(security_group_rule.get('id')): <NEW_LINE> <INDENT> obj = security_group_rule <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if obj is None: <NEW_LINE> <INDENT> msg = _("Could not find security group rule " "with ID '%s'") % parsed_args.rule <NEW_LINE> raise exceptions.CommandError(msg) <NEW_LINE> <DEDENT> return _format_security_group_rule_show(obj)
|
Display security group rule details
|
6259902dd164cc6175821f9f
|
class ProjectsView(APIView): <NEW_LINE> <INDENT> render_classes = (JSONRenderer, ) <NEW_LINE> def get(self, request, hpc=None, project_name=None): <NEW_LINE> <INDENT> logger.debug('ProjectsView--->GET: called.') <NEW_LINE> user = get_user(request) <NEW_LINE> if not isinstance(user, User): <NEW_LINE> <INDENT> logger.warning('JobsView--->GET: User not recognized.\n' + ' ================= USER ERRORS ====================\n' + user + ' ==================================================') <NEW_LINE> return Response(user, status=status.HTTP_403_FORBIDDEN) <NEW_LINE> <DEDENT> if hpc: <NEW_LINE> <INDENT> hpc = hpc.upper() <NEW_LINE> if hpc not in [i[0] for i in HPC]: <NEW_LINE> <INDENT> return Response('HPC not found!', status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> if project_name: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> project = Project.objects.get(name=project_name, hpc=hpc) <NEW_LINE> serializer = ProjectSerializer(project) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> except Project.DoesNotExsist: <NEW_LINE> <INDENT> return Response('Project not found!', status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> projects = Project.objects.filter(hpc=hpc) <NEW_LINE> serializer = ProjectSerializer(projects, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> except Project.DoesNotExist: <NEW_LINE> <INDENT> return Response('Projects not found!', status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> projects = Project.objects.all() <NEW_LINE> serializer = ProjectSerializer(projects, many=True) <NEW_LINE> return Response(serializer.data, status=status.HTTP_200_OK) <NEW_LINE> <DEDENT> except Project.DoesNotExist: <NEW_LINE> <INDENT> return Response('No projects', status=status.HTTP_204_NO_CONTENT)
|
ProjectsView is used to create, retrieve and delete projects.
A project is intended as a virtual space into users can run jobs.
Allowed methods are:
GET: retrieve a specified project.
POST: create a new project.
DELETE: delete a specified project.
|
6259902db57a9660fecd2ab0
|
class DocumentError(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'id': {'required': True}, 'error': {'required': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'error': {'key': 'error', 'type': 'TextAnalyticsError'}, } <NEW_LINE> def __init__( self, *, id: str, error: "TextAnalyticsError", **kwargs ): <NEW_LINE> <INDENT> super(DocumentError, self).__init__(**kwargs) <NEW_LINE> self.id = id <NEW_LINE> self.error = error
|
DocumentError.
All required parameters must be populated in order to send to Azure.
:ivar id: Required. Document Id.
:vartype id: str
:ivar error: Required. Document Error.
:vartype error: ~azure.ai.textanalytics.v3_2_preview_2.models.TextAnalyticsError
|
6259902d4e696a045264e639
|
class BNLayer(TrainableLayer): <NEW_LINE> <INDENT> def __init__(self, regularizer=None, moving_decay=0.9, eps=1e-5, name='batch_norm'): <NEW_LINE> <INDENT> super(BNLayer, self).__init__(name=name) <NEW_LINE> self.eps = eps <NEW_LINE> self.moving_decay = moving_decay <NEW_LINE> self.initializers = {'beta': tf.constant_initializer(0.0), 'gamma': tf.constant_initializer(1.0), 'moving_mean': tf.constant_initializer(0.0), 'moving_variance': tf.constant_initializer(1.0)} <NEW_LINE> self.regularizers = {'beta': regularizer, 'gamma': regularizer} <NEW_LINE> <DEDENT> def layer_op(self, inputs, is_training, use_local_stats=False): <NEW_LINE> <INDENT> input_shape = inputs.get_shape() <NEW_LINE> params_shape = input_shape[-1:] <NEW_LINE> axes = list(range(input_shape.ndims - 1)) <NEW_LINE> beta = tf.get_variable( 'beta', shape=params_shape, initializer=self.initializers['beta'], regularizer=self.regularizers['beta'], dtype=tf.float32, trainable=True) <NEW_LINE> gamma = tf.get_variable( 'gamma', shape=params_shape, initializer=self.initializers['gamma'], regularizer=self.regularizers['gamma'], dtype=tf.float32, trainable=True) <NEW_LINE> collections = [tf.GraphKeys.GLOBAL_VARIABLES] <NEW_LINE> moving_mean = tf.get_variable( 'moving_mean', shape=params_shape, initializer=self.initializers['moving_mean'], dtype=tf.float32, trainable=False, collections=collections) <NEW_LINE> moving_variance = tf.get_variable( 'moving_variance', shape=params_shape, initializer=self.initializers['moving_variance'], dtype=tf.float32, trainable=False, collections=collections) <NEW_LINE> mean, variance = tf.nn.moments(inputs, axes) <NEW_LINE> update_moving_mean = moving_averages.assign_moving_average( moving_mean, mean, self.moving_decay).op <NEW_LINE> update_moving_variance = moving_averages.assign_moving_average( moving_variance, variance, self.moving_decay).op <NEW_LINE> tf.add_to_collection(BN_COLLECTION, update_moving_mean) <NEW_LINE> tf.add_to_collection(BN_COLLECTION, update_moving_variance) <NEW_LINE> if is_training or use_local_stats: <NEW_LINE> <INDENT> outputs = tf.nn.batch_normalization( inputs, mean, variance, beta, gamma, self.eps, name='batch_norm') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> outputs = tf.nn.batch_normalization( inputs, moving_mean, moving_variance, beta, gamma, self.eps, name='batch_norm') <NEW_LINE> <DEDENT> outputs.set_shape(inputs.get_shape()) <NEW_LINE> return outputs
|
Batch normalisation layer, with trainable mean value 'beta' and
std 'gamma'. 'beta' is initialised to 0.0 and 'gamma' is initialised
to 1.0. This class assumes 'beta' and 'gamma' share the same type_str of
regulariser.
|
6259902d6fece00bbaccc9dd
|
class Runner: <NEW_LINE> <INDENT> def __init__(self, runner_dic = None): <NEW_LINE> <INDENT> self.name = "" <NEW_LINE> self.last_name = "" <NEW_LINE> self.weight = 0 <NEW_LINE> self.height = 0 <NEW_LINE> self.runner_id = "" <NEW_LINE> self.firstbeat_file_read = False <NEW_LINE> if runner_dic != None: <NEW_LINE> <INDENT> self.name = runner_dic["name"] <NEW_LINE> self.last_name = runner_dic["last_name"] <NEW_LINE> self.weight = runner_dic["weight"] <NEW_LINE> self.height = runner_dic["height"] <NEW_LINE> self.runner_id = runner_dic["runner_id"] <NEW_LINE> self.firstbeat_file_read = False <NEW_LINE> <DEDENT> <DEDENT> def set_runner_data(self, name, last_name, weight, height, runner_id): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.weight = weight <NEW_LINE> self.height = height <NEW_LINE> self.runner_id = runner_id <NEW_LINE> <DEDENT> def get_dict(self): <NEW_LINE> <INDENT> dict_r = {'name': self.name, 'last_name': self.last_name, 'weight': self.weight, 'height': self.height, 'runner_id': self.runner_id} <NEW_LINE> return dict_r <NEW_LINE> <DEDENT> def get_firstbeat_file(self,filename = None): <NEW_LINE> <INDENT> if filename is None: <NEW_LINE> <INDENT> print("Insert the complete path of the firstbeat filename in .sdf format\n") <NEW_LINE> filename_path = raw_input("Filename: ") <NEW_LINE> <DEDENT> rr_time_series_metadata = {'runner': "", 'date': None, 'start_time': None, 'filename': None} <NEW_LINE> filename_data = basename(filename) <NEW_LINE> idx_ = [n for n in xrange(len(filename_data)) if filename_data.find('_',n) == n] <NEW_LINE> idx_point = filename_data.index('.') <NEW_LINE> runner_name = filename_data[0:idx_[1]] <NEW_LINE> dni = filename_data[idx_[2]+1:idx_[3]] <NEW_LINE> date = filename_data[idx_[3]+1:idx_[6]] <NEW_LINE> start_time = filename_data[idx_[6]+1:idx_point] <NEW_LINE> rr_time_series_metadata["runner"] = runner_name <NEW_LINE> rr_time_series_metadata["filename"] = filename <NEW_LINE> rr_time_series_metadata["date"] = date <NEW_LINE> rr_time_series_metadata["start_time"] = start_time <NEW_LINE> self.firstbeat_file_read = True <NEW_LINE> return rr_time_series_metadata, filename
|
This class control the runner element in the application
|
6259902d30c21e258be9983b
|
class ProductImporterMessage(object): <NEW_LINE> <INDENT> openapi_types = { 'entries': 'list[CatalogProduct]' } <NEW_LINE> attribute_map = { 'entries': 'entries' } <NEW_LINE> def __init__(self, entries=None): <NEW_LINE> <INDENT> self._entries = None <NEW_LINE> self.discriminator = None <NEW_LINE> if entries is not None: <NEW_LINE> <INDENT> self.entries = entries <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def entries(self): <NEW_LINE> <INDENT> return self._entries <NEW_LINE> <DEDENT> @entries.setter <NEW_LINE> def entries(self, entries): <NEW_LINE> <INDENT> self._entries = entries <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, ProductImporterMessage): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
|
6259902dd164cc6175821fa1
|
@dataclass <NEW_LINE> class TransactionIndex: <NEW_LINE> <INDENT> withdraw = TransactionWithdrawIndex <NEW_LINE> datetime = TransactionDatetimeIndex
|
Secondary indexes helper
|
6259902d6fece00bbaccc9de
|
class User(BaseModel, Base): <NEW_LINE> <INDENT> if os.getenv('HBNB_TYPE_STORAGE') == 'db': <NEW_LINE> <INDENT> __tablename__ = "users" <NEW_LINE> email = Column(String(128), nullable=False) <NEW_LINE> password = Column(String(128), nullable=False) <NEW_LINE> first_name = Column(String(128)) <NEW_LINE> last_name = Column(String(128)) <NEW_LINE> places = relationship("Place", backref="user", cascade="delete") <NEW_LINE> reviews = relationship("Review", cascade="delete", backref="user") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> email = "" <NEW_LINE> password = "" <NEW_LINE> first_name = "" <NEW_LINE> last_name = ""
|
Definition of the User class
|
6259902d287bf620b6272c16
|
class Wildcard(m21Base.Music21Object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> m21Base.Music21Object.__init__(self) <NEW_LINE> self.duration = WildcardDuration()
|
An object that may have some properties defined, but others not that
matches a single object in a music21 stream. Equivalent to the
regular expression "."
>>> wc1 = search.Wildcard()
>>> wc1.pitch = pitch.Pitch("C")
>>> st1 = stream.Stream()
>>> st1.append(note.HalfNote("D"))
>>> st1.append(wc1)
|
6259902d1f5feb6acb163c20
|
class Development(Resource): <NEW_LINE> <INDENT> endpoint = 'developments' <NEW_LINE> def __init__(self, api, data): <NEW_LINE> <INDENT> super(Development, self).__init__(api, data) <NEW_LINE> self.__class_agency__ = Agency <NEW_LINE> self.__class_agent__ = Agent <NEW_LINE> self.__class_group__ = PropertyGroup <NEW_LINE> self.__class_phase__ = DevelopmentPhase <NEW_LINE> <DEDENT> def agency(self): <NEW_LINE> <INDENT> value = self._data.get('agency', None) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> data = { 'response': value, } <NEW_LINE> return self.__class_agency__(self._api, data) <NEW_LINE> <DEDENT> <DEDENT> def agent(self): <NEW_LINE> <INDENT> value = self._data.get('agent', None) <NEW_LINE> if value is not None: <NEW_LINE> <INDENT> data = { 'response': value, } <NEW_LINE> return self.__class_agent__(self._api, data) <NEW_LINE> <DEDENT> <DEDENT> def groups(self, params=None): <NEW_LINE> <INDENT> url = self._data.get('groups') <NEW_LINE> data = self._api.request(url, 'GET', params=params) <NEW_LINE> return self.__class_group__(self._api, data).get_items() <NEW_LINE> <DEDENT> def listings(self, params=None): <NEW_LINE> <INDENT> if params is None: <NEW_LINE> <INDENT> params = {} <NEW_LINE> <DEDENT> url = self._data.get('listing_url', None) <NEW_LINE> if url is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> url_params = url.split('?') <NEW_LINE> if len(url_params) < 2: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> url_params = dict(urlparse.parse_qsl(url_params[1])) <NEW_LINE> params.update(url_params) <NEW_LINE> listing_resource = client.ListingResource( self._api.base_url, api_key=self._api.api_key, ) <NEW_LINE> return listing_resource.search(params=params) <NEW_LINE> <DEDENT> def phases(self, params=None): <NEW_LINE> <INDENT> url = self._data.get('phases') <NEW_LINE> data = self._api.request(url, 'GET', params=params) <NEW_LINE> return self.__class_phase__(self._api, data).get_items() <NEW_LINE> <DEDENT> def pictures(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> items = self._data.get('pictures', []) <NEW_LINE> if items is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for item in items: <NEW_LINE> <INDENT> result.append(Image(item)) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def representatives(self): <NEW_LINE> <INDENT> result = [] <NEW_LINE> items = self._data.get('representatives', []) <NEW_LINE> if items is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> for item in items: <NEW_LINE> <INDENT> data = { 'response': item, } <NEW_LINE> result.append(self.__class_agent__(self._api, data)) <NEW_LINE> <DEDENT> return result
|
'Development Project' entity resource class.
|
6259902d9b70327d1c57fdb2
|
class Function(Element): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self.has_input = True <NEW_LINE> self.has_output = True <NEW_LINE> super(Function, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def worker(self, records): <NEW_LINE> <INDENT> return self.process(records) <NEW_LINE> <DEDENT> def process(self, records): <NEW_LINE> <INDENT> raise NotImplementedError( """class "{0}" needs a process() method""".format( self.__class__.__name__ ) )
|
Function() base class: a Function() takes records from the pipe,
applies a function to them and passes them further down the pipe
|
6259902d1d351010ab8f4b47
|
class UpdateCheck(models.Model): <NEW_LINE> <INDENT> STATUSES = ( ("E", "Error"), ("R", "Running"), ("A", "Available"), ("N", "Not available") ) <NEW_LINE> state = models.CharField(max_length=1, choices=STATUSES, default="R", editable=False, null=False, blank=False) <NEW_LINE> created_at = models.DateTimeField(auto_now_add=True, editable=False, db_index=True) <NEW_LINE> @staticmethod <NEW_LINE> def should_check(): <NEW_LINE> <INDENT> if UpdateCheck.objects.filter(created_at__gte=datetime.now()-timedelta(days=1)).order_by("-created_at").count(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True
|
Collection of image analysis.
|
6259902d8c3a8732951f7588
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.