code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
|---|---|---|
class build_py_binary(_build_py): <NEW_LINE> <INDENT> def find_package_modules(self, package, package_dir): <NEW_LINE> <INDENT> module_files = glob(os.path.join(package_dir, "*.py")) <NEW_LINE> module_files += glob(os.path.join(package_dir, "*.pyc")) <NEW_LINE> setup_script = os.path.abspath(self.distribution.script_name) <NEW_LINE> modules = [] <NEW_LINE> for f in module_files: <NEW_LINE> <INDENT> abs_f = os.path.abspath(f) <NEW_LINE> if abs_f != setup_script: <NEW_LINE> <INDENT> module = os.path.splitext(os.path.basename(f))[0] <NEW_LINE> modules.append((package, module, f)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.debug_print("excluding %s" % setup_script) <NEW_LINE> <DEDENT> <DEDENT> return modules <NEW_LINE> <DEDENT> def get_module_outfile(self, build_dir, package, module): <NEW_LINE> <INDENT> f = _build_py.get_module_outfile(self, build_dir, package, module) <NEW_LINE> if not f.endswith('__init__.py'): <NEW_LINE> <INDENT> if f.endswith('.py'): <NEW_LINE> <INDENT> f += 'c' <NEW_LINE> <DEDENT> <DEDENT> return f <NEW_LINE> <DEDENT> def byte_compile(self, files): <NEW_LINE> <INDENT> path = files[0] <NEW_LINE> path = path[:path.index('onep')] <NEW_LINE> for path, subdir, files in os.walk(path): <NEW_LINE> <INDENT> for name in files: <NEW_LINE> <INDENT> if name.startswith('__init__'): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if name.endswith('.py'): <NEW_LINE> <INDENT> os.remove(os.path.join(path, name))
|
Copy .pyc files in addition to .py
|
6259901b507cdc57c63a5b7f
|
class Fileset: <NEW_LINE> <INDENT> def __init__(self, directory): <NEW_LINE> <INDENT> self.directory = directory <NEW_LINE> self.dates = {} <NEW_LINE> self.weeks = defaultdict(list) <NEW_LINE> self.months = defaultdict(list) <NEW_LINE> self.years = defaultdict(list) <NEW_LINE> if not os.path.isdir(directory): <NEW_LINE> <INDENT> raise ValueError("Input argument is not a directory") <NEW_LINE> <DEDENT> self.files = glob.glob(directory + '/*_group*.txt') <NEW_LINE> for f in self.files: <NEW_LINE> <INDENT> g = shortname(f) <NEW_LINE> d = arrow.get(g, 'YYYYMMDD') <NEW_LINE> self.dates[g] = d <NEW_LINE> self.weeks[self.getWeek(d)].append(f) <NEW_LINE> self.months[self.getMonth(d)].append(f) <NEW_LINE> self.years[self.getYear(d)].append(f) <NEW_LINE> <DEDENT> <DEDENT> def getAll(self): <NEW_LINE> <INDENT> return self.files <NEW_LINE> <DEDENT> def between(self, start, stop): <NEW_LINE> <INDENT> a = arrow.get(start, 'YYYYMMDD') <NEW_LINE> b = arrow.get(stop, 'YYYYMMDD') <NEW_LINE> result = [] <NEW_LINE> for f in self.files: <NEW_LINE> <INDENT> g = shortname(f) <NEW_LINE> d = arrow.get(g, 'YYYYMMDD') <NEW_LINE> if (d >= a) and (d <= b): <NEW_LINE> <INDENT> result.append(f) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def getDay(self, day): <NEW_LINE> <INDENT> a = arrow.get(day, 'YYYYMMDD') <NEW_LINE> result = [] <NEW_LINE> for f in self.files: <NEW_LINE> <INDENT> g = shortname(f) <NEW_LINE> d = arrow.get(g, 'YYYYMMDD') <NEW_LINE> if (d == a): <NEW_LINE> <INDENT> result.append(f) <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def getWeek(self, a): <NEW_LINE> <INDENT> return a.isocalendar()[1] <NEW_LINE> <DEDENT> def getMonth(self, a): <NEW_LINE> <INDENT> return a.format('M') <NEW_LINE> <DEDENT> def getYear(self, a): <NEW_LINE> <INDENT> return a.format('YYYY')
|
Read directory of NAME files,
extract subset corresponding to given time period
|
6259901b91af0d3eaad3ac00
|
class BaseDataSource(object): <NEW_LINE> <INDENT> def __init__(self, uri, columns): <NEW_LINE> <INDENT> self._pretty_to_raw_columns = columns <NEW_LINE> self._raw_to_pretty_columns = dict(x[::-1] for x in columns.items()) <NEW_LINE> if len(self._pretty_to_raw_columns) != len(self._raw_to_pretty_columns): <NEW_LINE> <INDENT> raise ValueError('Column mapping must be 1-to-1!') <NEW_LINE> <DEDENT> if columns: <NEW_LINE> <INDENT> query_string = '&'.join(['%s=%s' % (urllib.quote(x), urllib.quote(y)) for x, y in columns.items()]) <NEW_LINE> self.uri = uri + '?' + query_string <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.uri = uri <NEW_LINE> <DEDENT> <DEDENT> def columns(self, row): <NEW_LINE> <INDENT> return (self._raw_to_pretty_columns[x] for x in self._columns(row)) <NEW_LINE> <DEDENT> def column_values(self, row, columns=None): <NEW_LINE> <INDENT> if columns is None: <NEW_LINE> <INDENT> columns = self._raw_columns <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = [self._pretty_to_raw_columns[column] for column in columns] <NEW_LINE> <DEDENT> return ((self._raw_to_pretty_columns[x], y) for x, y in self._column_values(row, columns)) <NEW_LINE> <DEDENT> def rows(self): <NEW_LINE> <INDENT> return self._rows() <NEW_LINE> <DEDENT> def row_columns(self): <NEW_LINE> <INDENT> for row, columns in self._row_columns(): <NEW_LINE> <INDENT> pretty_columns = (self._raw_to_pretty_columns[column] for column in columns) <NEW_LINE> yield row, pretty_columns <NEW_LINE> <DEDENT> <DEDENT> def row_column_values(self, columns=None): <NEW_LINE> <INDENT> if columns is None: <NEW_LINE> <INDENT> columns = self._raw_columns <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> columns = [self._pretty_to_raw_columns[column] for column in columns] <NEW_LINE> <DEDENT> for row, column_values in self._row_column_values(columns=columns): <NEW_LINE> <INDENT> pretty_column_values = ((self._raw_to_pretty_columns[column], value) for column, value in column_values) <NEW_LINE> yield row, pretty_column_values <NEW_LINE> <DEDENT> <DEDENT> def value(self, row, column): <NEW_LINE> <INDENT> return self._value(row, self._pretty_to_raw_columns[column])
|
Base DataSource, must extend at least _columns, _row_column_values, and _value
|
6259901bd18da76e235b783c
|
class TwitterCredential(Credential): <NEW_LINE> <INDENT> id = Column(Integer, ForeignKey('credential.id'), primary_key=True) <NEW_LINE> identifier = Column(BigInteger, nullable=False, unique=True) <NEW_LINE> token = Column(String) <NEW_LINE> token_secret = Column(String) <NEW_LINE> __tablename__ = 'twitter_credential' <NEW_LINE> __mapper_args__ = {'polymorphic_identity': 'twitter'} <NEW_LINE> __repr_columns__ = id, identifier
|
Information about Twitter User
|
6259901b796e427e5384f55d
|
class FailedShowsDb(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._query_get_all = 'SELECT path FROM failed_shows' <NEW_LINE> self._query_get = 'SELECT path FROM failed_shows WHERE path=?' <NEW_LINE> self._query_set = 'INSERT INTO failed_shows VALUES (?)' <NEW_LINE> self._query_delete = 'DELETE FROM failed_shows WHERE path=?' <NEW_LINE> self._query_flush = 'DELETE FROM failed_shows' <NEW_LINE> <DEDENT> def get_failed_shows(self): <NEW_LINE> <INDENT> shows = [] <NEW_LINE> connection = sqlite3.connect(autosubliminal.DBFILE) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute(self._query_get_all) <NEW_LINE> for row in cursor: <NEW_LINE> <INDENT> shows.append(row[0]) <NEW_LINE> <DEDENT> connection.close() <NEW_LINE> return shows <NEW_LINE> <DEDENT> def get_failed_show(self, show_path): <NEW_LINE> <INDENT> show = None <NEW_LINE> connection = sqlite3.connect(autosubliminal.DBFILE) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute(self._query_get, [show_path]) <NEW_LINE> for row in cursor: <NEW_LINE> <INDENT> show = row[0] <NEW_LINE> <DEDENT> connection.close() <NEW_LINE> return show <NEW_LINE> <DEDENT> def set_failed_show(self, show_path): <NEW_LINE> <INDENT> connection = sqlite3.connect(autosubliminal.DBFILE) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute(self._query_set, [show_path]) <NEW_LINE> connection.commit() <NEW_LINE> connection.close() <NEW_LINE> <DEDENT> def delete_failed_show(self, show_path): <NEW_LINE> <INDENT> connection = sqlite3.connect(autosubliminal.DBFILE) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute(self._query_delete, [show_path]) <NEW_LINE> connection.commit() <NEW_LINE> connection.close() <NEW_LINE> <DEDENT> def flush_failed_shows(self): <NEW_LINE> <INDENT> connection = sqlite3.connect(autosubliminal.DBFILE) <NEW_LINE> cursor = connection.cursor() <NEW_LINE> cursor.execute(self._query_flush) <NEW_LINE> connection.commit() <NEW_LINE> connection.close()
|
Failed shows db.
|
6259901b30c21e258be995f2
|
class Interval: <NEW_LINE> <INDENT> def __init__(self, chrom: str, start: int, end: int): <NEW_LINE> <INDENT> self.chrom = chrom <NEW_LINE> assert (start <= end) <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return (self.chrom == other.chrom) and (self.start == other.start) and (self.end == other.end) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.chrom, self.start, self.end))
|
Stores a Genomic interval
|
6259901b462c4b4f79dbc7e9
|
class Com(object): <NEW_LINE> <INDENT> def __init__(self, comPort='COM1',baudrate=9600): <NEW_LINE> <INDENT> self.ser = serial.Serial(str(comPort),baudrate) <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> if not self.ser.is_open: <NEW_LINE> <INDENT> self.ser.open() <NEW_LINE> <DEDENT> return self.ser.is_open <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> if self.ser.is_open: <NEW_LINE> <INDENT> self.ser.close() <NEW_LINE> <DEDENT> return self.ser.closed <NEW_LINE> <DEDENT> def getPortStr(self): <NEW_LINE> <INDENT> if self.ser.is_open: <NEW_LINE> <INDENT> return self.ser.portstr <NEW_LINE> <DEDENT> raise Exception("Serise is not open!") <NEW_LINE> <DEDENT> def sendMsg(self,msg): <NEW_LINE> <INDENT> if not self.ser.is_open: <NEW_LINE> <INDENT> self.connect() <NEW_LINE> <DEDENT> if not msg: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> listMsg=[] <NEW_LINE> try: <NEW_LINE> <INDENT> listMsg=list(msg) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> print ("Format input msg error") <NEW_LINE> return False <NEW_LINE> <DEDENT> self.ser.write((','.join(listMsg)).encode()) <NEW_LINE> return True
|
classdocs
|
6259901b287bf620b62729c8
|
class LTLfLast(LTLfFormula): <NEW_LINE> <INDENT> def to_nnf(self) -> LTLfFormula: <NEW_LINE> <INDENT> return LTLfAnd([LTLfWeakNext(LTLfFalse()), LTLfNot(LTLfEnd())]).to_nnf() <NEW_LINE> <DEDENT> def negate(self) -> LTLfFormula: <NEW_LINE> <INDENT> return self.to_nnf().negate() <NEW_LINE> <DEDENT> def find_labels(self) -> List[AtomSymbol]: <NEW_LINE> <INDENT> return list() <NEW_LINE> <DEDENT> def _members(self): <NEW_LINE> <INDENT> return (Symbols.LAST.value,) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return Symbols.LAST.value <NEW_LINE> <DEDENT> def to_mona(self, v="0") -> str: <NEW_LINE> <INDENT> return LTLfWeakNext(LTLfFalse()).to_mona(v)
|
Class for the LTLf Last formula.
|
6259901b63f4b57ef0086462
|
class BitbakeController(object): <NEW_LINE> <INDENT> def __init__(self, connection): <NEW_LINE> <INDENT> self.connection = connection <NEW_LINE> <DEDENT> def _runCommand(self, command): <NEW_LINE> <INDENT> result, error = self.connection.connection.runCommand(command) <NEW_LINE> if error: <NEW_LINE> <INDENT> raise Exception(error) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> return self.connection.removeClient() <NEW_LINE> <DEDENT> def setVariable(self, name, value): <NEW_LINE> <INDENT> return self._runCommand(["setVariable", name, value]) <NEW_LINE> <DEDENT> def build(self, targets, task = None): <NEW_LINE> <INDENT> if task is None: <NEW_LINE> <INDENT> task = "build" <NEW_LINE> <DEDENT> return self._runCommand(["buildTargets", targets, task])
|
This is the basic class that controlls a bitbake server.
It is outside the scope of this class on how the server is started and aquired
|
6259901bbf627c535bcb2293
|
class UnversionedSigner(UnversionedVerifier): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def Read(location): <NEW_LINE> <INDENT> return UnversionedSigner(readers.FileReader(location)) <NEW_LINE> <DEDENT> def IsAcceptablePurpose(self, purpose): <NEW_LINE> <INDENT> return purpose == keyinfo.SIGN_AND_VERIFY <NEW_LINE> <DEDENT> def Sign(self, data): <NEW_LINE> <INDENT> signing_key = self.primary_key <NEW_LINE> if signing_key is None: <NEW_LINE> <INDENT> raise errors.NoPrimaryKeyError() <NEW_LINE> <DEDENT> return util.Encode(signing_key.Sign(data))
|
Capable of both signing and verifying. This outputs standard signatures
(i.e. HMAC-SHA1, DSA-SHA1, RSA-SHA1) that contain no key versioning.
|
6259901b507cdc57c63a5b85
|
class TestLoopbackContext(unittest.TestCase): <NEW_LINE> <INDENT> def _check_loopback_context_manager(self, cm, guard, items): <NEW_LINE> <INDENT> with cm: <NEW_LINE> <INDENT> self.assertItemsEqual(guard.locked_items, items) <NEW_LINE> for item in items: <NEW_LINE> <INDENT> self.assertIn(item, guard) <NEW_LINE> <DEDENT> <DEDENT> self.assertIsNone(guard.locked_items) <NEW_LINE> for item in items: <NEW_LINE> <INDENT> self.assertNotIn(item, guard) <NEW_LINE> <DEDENT> <DEDENT> def test_loopback_context_direct(self): <NEW_LINE> <INDENT> items = ['a', 'b', 'c'] <NEW_LINE> guard = LoopbackGuard() <NEW_LINE> cm = LoopbackContext(guard, items) <NEW_LINE> self._check_loopback_context_manager(cm, guard, items) <NEW_LINE> <DEDENT> def test_loopback_context_call(self): <NEW_LINE> <INDENT> items = ['a', 'b', 'c'] <NEW_LINE> guard = LoopbackGuard() <NEW_LINE> cm = guard(*items) <NEW_LINE> self._check_loopback_context_manager(cm, guard, items)
|
Unit tests that exercise the LoopbackGuard class through
the LoopbackContext context manager.
|
6259901b287bf620b62729cc
|
class SampleGroup(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'sample_groups' <NEW_LINE> id = db.Column(UUID(as_uuid=True), primary_key=True, server_default=db.text('uuid_generate_v4()')) <NEW_LINE> organization_id = db.Column(UUID(as_uuid=True), db.ForeignKey('organizations.id')) <NEW_LINE> name = db.Column(db.String(128), unique=True, nullable=False) <NEW_LINE> description = db.Column(db.String(300), nullable=False, default='') <NEW_LINE> access_scheme = db.Column(db.String(128), default='public', nullable=False) <NEW_LINE> theme = db.Column(db.String(16), nullable=False, default='') <NEW_LINE> created_at = db.Column(db.DateTime, nullable=False) <NEW_LINE> sample_placeholders = db.relationship(SamplePlaceholder) <NEW_LINE> sample_ids = association_proxy('sample_placeholders', 'sample_id') <NEW_LINE> analysis_result_uuid = db.Column(UUID(as_uuid=True), nullable=False) <NEW_LINE> def __init__( self, name, analysis_result, description='', access_scheme='public', theme='', created_at=datetime.datetime.utcnow()): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.access_scheme = access_scheme <NEW_LINE> self.theme = theme <NEW_LINE> self.created_at = created_at <NEW_LINE> self.analysis_result_uuid = analysis_result.uuid <NEW_LINE> <DEDENT> @property <NEW_LINE> def samples(self): <NEW_LINE> <INDENT> return Sample.objects(uuid__in=self.sample_ids) <NEW_LINE> <DEDENT> @samples.setter <NEW_LINE> def samples(self, value): <NEW_LINE> <INDENT> self.sample_ids = [sample.uuid for sample in value] <NEW_LINE> <DEDENT> @samples.deleter <NEW_LINE> def samples(self): <NEW_LINE> <INDENT> self.sample_ids = [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def tools_present(self): <NEW_LINE> <INDENT> samples = self.samples <NEW_LINE> tools_present_in_all = set([]) <NEW_LINE> for i, sample in enumerate(samples): <NEW_LINE> <INDENT> tool_results = set(sample.tool_result_names) <NEW_LINE> if i == 0: <NEW_LINE> <INDENT> tools_present_in_all |= tool_results <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> tools_present_in_all &= tool_results <NEW_LINE> <DEDENT> <DEDENT> return list(tools_present_in_all) <NEW_LINE> <DEDENT> @property <NEW_LINE> def analysis_result(self): <NEW_LINE> <INDENT> return AnalysisResultMeta.objects.get(uuid=self.analysis_result_uuid) <NEW_LINE> <DEDENT> @analysis_result.setter <NEW_LINE> def analysis_result(self, new_analysis_result): <NEW_LINE> <INDENT> self.analysis_result_uuid = new_analysis_result.uuid
|
MetaGenScope Sample Group model.
|
6259901bd18da76e235b7840
|
class TestArchiveBotFunctions(TestCase): <NEW_LINE> <INDENT> net = False <NEW_LINE> def test_str2time(self): <NEW_LINE> <INDENT> date = datetime(2017, 1, 1) <NEW_LINE> self.assertEqual(archivebot.str2time('0d'), timedelta(0)) <NEW_LINE> self.assertEqual(archivebot.str2time('4000s'), timedelta(seconds=4000)) <NEW_LINE> self.assertEqual(archivebot.str2time('4000h'), timedelta(hours=4000)) <NEW_LINE> self.assertEqual(archivebot.str2time('7d'), archivebot.str2time('1w')) <NEW_LINE> self.assertEqual(archivebot.str2time('3y'), timedelta(1096)) <NEW_LINE> self.assertEqual(archivebot.str2time('3y', date), timedelta(1095)) <NEW_LINE> self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '4000@') <NEW_LINE> self.assertRaises(archivebot.MalformedConfigError, archivebot.str2time, '$1') <NEW_LINE> <DEDENT> def test_checkstr(self): <NEW_LINE> <INDENT> self.assertEqual(archivebot.checkstr('400s'), ('s', '400')) <NEW_LINE> with suppress_warnings(): <NEW_LINE> <INDENT> self.assertEqual(archivebot.checkstr('3000'), ('s', '3000')) <NEW_LINE> <DEDENT> self.assertEqual(archivebot.checkstr('7d'), ('d', '7')) <NEW_LINE> self.assertEqual(archivebot.checkstr('3y'), ('y', '3')) <NEW_LINE> self.assertEqual(archivebot.checkstr('4000@'), ('@', '4000')) <NEW_LINE> <DEDENT> def test_str2size(self): <NEW_LINE> <INDENT> self.assertEqual(archivebot.str2size('0'), (0, 'B')) <NEW_LINE> self.assertEqual(archivebot.str2size('3000'), (3000, 'B')) <NEW_LINE> self.assertEqual(archivebot.str2size('4 K'), (4096, 'B')) <NEW_LINE> self.assertEqual(archivebot.str2size('2T'), (2, 'T')) <NEW_LINE> self.assertEqual(archivebot.str2size('2 000'), (2, 'B'))
|
Test functions in archivebot.
|
6259901b8c3a8732951f7348
|
class InvalidDependencyError(spack.error.SpecError): <NEW_LINE> <INDENT> def __init__(self, pkg, deps): <NEW_LINE> <INDENT> self.invalid_deps = deps <NEW_LINE> super(InvalidDependencyError, self).__init__( 'Package {0} does not depend on {1}'.format( pkg, spack.util.string.comma_or(deps)))
|
Raised when a dependency in a spec is not actually a dependency
of the package.
|
6259901bd164cc6175821d65
|
class IDCCutInfoSchema(schema.ResponseSchema): <NEW_LINE> <INDENT> fields = { "City": fields.Str(required=False, load_from="City"), "CutType": fields.Str(required=False, load_from="CutType"), "EndTime": fields.Int(required=False, load_from="EndTime"), "IDCName": fields.Str(required=False, load_from="IDCName"), "Province": fields.Str(required=False, load_from="Province"), "ResourceSet": fields.List(ResourceSetSchema()), "StartTime": fields.Int(required=False, load_from="StartTime"), }
|
IDCCutInfo - 机房割接信息
|
6259901bbe8e80087fbbfe5a
|
class SMSKingErrorCodeResponse(SMSKingErrorResponse): <NEW_LINE> <INDENT> def __init__(self, url, status_code, response, error_code): <NEW_LINE> <INDENT> super(SMSKingErrorCodeResponse, self).__init__(url, status_code, response) <NEW_LINE> self.error_code = error_code <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'SMSKingErrorCodeResponse(response={0}, error_code={1})'.format(self.response, self.error_code)
|
已知錯誤,帶有錯誤代碼
|
6259901b462c4b4f79dbc7f1
|
class UnnamedIntegerParameter(Rule): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Rule.__init__(self, name='unnamed-int', description='Unnamed integer parameter', suggestion='Provide a name for this parameter.') <NEW_LINE> <DEDENT> pattern = re.compile(FUNC_PROT_PATTERN) <NEW_LINE> int_types = 'uint8_t|uint16_t|uint32_t|uint64_t|int8_t|int16_t|int32_t|int64_t|int|short|long' <NEW_LINE> unnamed_int_pattern = re.compile(r'\b({types})(?:\s*?(?:,|$))'.format( types=int_types)) <NEW_LINE> def augment_by_color(self, violation: RuleViolation): <NEW_LINE> <INDENT> line_index = violation.index_of_starting_line() <NEW_LINE> function_line_number, function_line = violation.lines[line_index] <NEW_LINE> insertion_index = violation.meta['insertion_index'] <NEW_LINE> violation.lines[line_index] = (function_line_number, function_line[:insertion_index] + Colors.GOOD + ' name' + Colors.RESET + function_line[insertion_index:]) <NEW_LINE> <DEDENT> def collect(self, file: CheckFile): <NEW_LINE> <INDENT> offenders = [] <NEW_LINE> text = file.collapsed <NEW_LINE> for function_match in self.pattern.finditer(text): <NEW_LINE> <INDENT> function_parameters = function_match.group('params') <NEW_LINE> function_parameters_starting_index = function_match.start('params') <NEW_LINE> for unnamed_match in self.unnamed_int_pattern.finditer(function_parameters): <NEW_LINE> <INDENT> offending_index = (function_parameters_starting_index + unnamed_match.start(1)) <NEW_LINE> offending_line_number, offending_column = file.line_number_at(offending_index) <NEW_LINE> character_range = (function_match.start(), function_match.end()) <NEW_LINE> offending_lines = file.lines_in_character_range(character_range) <NEW_LINE> _, insertion_column = file.line_number_at( function_parameters_starting_index + unnamed_match.end(1)) <NEW_LINE> offender = self.violate(at=(offending_line_number, offending_column), lines=offending_lines, meta={'insertion_index': insertion_column - 1}) <NEW_LINE> offenders.append(offender) <NEW_LINE> <DEDENT> <DEDENT> return offenders <NEW_LINE> <DEDENT> @property <NEW_LINE> def severity(self): <NEW_LINE> <INDENT> return RuleViolation.ALLOW <NEW_LINE> <DEDENT> @property <NEW_LINE> def triggers(self): <NEW_LINE> <INDENT> return [ 'void func(↓int);', 'void func( ↓int);', 'void func(↓int );', 'void func( ↓int );', ('void func(↓int,\n' ' unsigned ↓int);') ] <NEW_LINE> <DEDENT> @property <NEW_LINE> def nontriggers(self): <NEW_LINE> <INDENT> return [ 'void func(int a);', 'void func(int a);', ('void func(int\n' ' a);'), 'void func(struct point);' ]
|
Provide meaningful names for integer parameters if able.
The majority of function prototypes will suffer from having unnamed integer parameters,
as their meaning might be difficult to derive without.
<br/><br/>
There are exceptions, of course; a good example is a math function such as `max(int, int)`
where adding parameter names (e.g. `int a` and `int b`) would not add value or make it any
easier to understand.
<br/><br/>
In general, however, it is almost always preferable to provide parameter names.
|
6259901b507cdc57c63a5b89
|
class RestMissingParameter(RestDispException404): <NEW_LINE> <INDENT> pass;
|
Missing parameter.
|
6259901b5e10d32532ce3ff9
|
class Segment(Base, PtmBase): <NEW_LINE> <INDENT> __tablename__ = 'segments' <NEW_LINE> plan_id = Column(Integer, ForeignKey('activity_plans.id'), nullable=False) <NEW_LINE> pace_id = Column(Integer, ForeignKey('paces.id'), nullable=False) <NEW_LINE> position = Column(Integer) <NEW_LINE> length = Column(Integer, nullable=False) <NEW_LINE> plan = relationship('ActivityPlan') <NEW_LINE> pace = relationship('Pace') <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> return '<Segment(pace="%s", length=%d)>' % ( self.pace.speed, self.length, ) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def remove_orphans(cls): <NEW_LINE> <INDENT> cls.query.filter(cls.plan_id == None).delete(synchronize_session='fetch')
|
Segment object containing both a Pace and a time in seconds, tied to an
ActivityPlan.
This functions as a join table which maps ActivityPlans, using a doubly
linked list structure to enforce an ordering of Segments in an ActivityPlan.
|
6259901bac7a0e7691f732d2
|
class FlickrObject(object): <NEW_LINE> <INDENT> __converters__ = [] <NEW_LINE> __display__ = [] <NEW_LINE> __metaclass__ = FlickrAutoDoc <NEW_LINE> def __init__(self, **params): <NEW_LINE> <INDENT> params["loaded"] = False <NEW_LINE> self._set_properties(**params) <NEW_LINE> <DEDENT> def _set_properties(self, **params): <NEW_LINE> <INDENT> for c in self.__class__.__converters__: <NEW_LINE> <INDENT> c(params) <NEW_LINE> <DEDENT> self.__dict__.update(params) <NEW_LINE> <DEDENT> def setToken(self, filename=None, token=None, token_key=None, token_secret=None): <NEW_LINE> <INDENT> if token is None: <NEW_LINE> <INDENT> token = auth.token_factory(filename=filename, token_key=token_key, token_secret=token_secret) <NEW_LINE> <DEDENT> self.__dict__["token"] = token <NEW_LINE> <DEDENT> def getToken(self): <NEW_LINE> <INDENT> return self.__dict__.get("token", None) <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> if name == 'id' and name not in self.__dict__: <NEW_LINE> <INDENT> raise AttributeError( "'%s' object has no attribute '%s'" % ( self.__class__.__name__, name) ) <NEW_LINE> <DEDENT> if name not in self.__dict__: <NEW_LINE> <INDENT> if not self.loaded: <NEW_LINE> <INDENT> self.load() <NEW_LINE> <DEDENT> <DEDENT> try: <NEW_LINE> <INDENT> return self.__dict__[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError( "'%s' object has no attribute '%s'" % ( self.__class__.__name__, name ) ) <NEW_LINE> <DEDENT> <DEDENT> def __setattr__(self, name, values): <NEW_LINE> <INDENT> raise FlickrError("Readonly attribute") <NEW_LINE> <DEDENT> def get(self, key, *args, **kwargs): <NEW_LINE> <INDENT> return self.__dict__.get(key, *args, **kwargs) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> raise FlickrError("Read-only attribute") <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> vals = [] <NEW_LINE> for k in self.__class__.__display__: <NEW_LINE> <INDENT> val_found = False <NEW_LINE> try: <NEW_LINE> <INDENT> value = self.__dict__[k] <NEW_LINE> val_found = True <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.load() <NEW_LINE> try: <NEW_LINE> <INDENT> value = self.__dict__[k] <NEW_LINE> val_found = True <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> if not val_found: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if isinstance(value, unicode): <NEW_LINE> <INDENT> value = value.encode("utf8") <NEW_LINE> <DEDENT> if isinstance(value, str): <NEW_LINE> <INDENT> value = "'%s'" % value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = str(value) <NEW_LINE> <DEDENT> if len(value) > 20: <NEW_LINE> <INDENT> value = value[:20] + "..." <NEW_LINE> <DEDENT> vals.append("%s=%s" % (k, value)) <NEW_LINE> <DEDENT> return "%s(%s)" % (self.__class__.__name__, ", ".join(vals)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) <NEW_LINE> <DEDENT> def getInfo(self): <NEW_LINE> <INDENT> return {} <NEW_LINE> <DEDENT> def load(self): <NEW_LINE> <INDENT> props = self.getInfo() <NEW_LINE> self.__dict__["loaded"] = True <NEW_LINE> self._set_properties(**props)
|
Base Object for Flickr API Objects.
Flickr Objects are dynamically created from the
named arguments given to the constructor.
|
6259901bd18da76e235b7842
|
class Network: <NEW_LINE> <INDENT> def __init__(self, root_factory=None): <NEW_LINE> <INDENT> self.root = root_factory(self) if root_factory else None <NEW_LINE> <DEDENT> @property <NEW_LINE> def root(self): <NEW_LINE> <INDENT> return self.__root <NEW_LINE> <DEDENT> @root.setter <NEW_LINE> def root(self, value): <NEW_LINE> <INDENT> if value is not None and not isinstance(value, NetworkNode): <NEW_LINE> <INDENT> raise TypeError('root must be NetworkNode') <NEW_LINE> <DEDENT> if value is not None and value.network != self: <NEW_LINE> <INDENT> raise ValueError('root network must match with self') <NEW_LINE> <DEDENT> self.__root = value <NEW_LINE> <DEDENT> def on_choose_name(self, node, name): <NEW_LINE> <INDENT> if not re.match('[A-z0-9_]+$', name): <NEW_LINE> <INDENT> raise NodeNameInvalidError('invalid node name {!r}'.format(name)) <NEW_LINE> <DEDENT> if node.parent: <NEW_LINE> <INDENT> for child in node.parent.children: <NEW_LINE> <INDENT> if child != node and child.name == name: <NEW_LINE> <INDENT> raise NodeNameConflictError( 'can not rename to {!r}, another node in the same hierarchy ' 'level occupies that name ({})'.format(name, child.path)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return name <NEW_LINE> <DEDENT> def on_attach_to(self, node, parent): <NEW_LINE> <INDENT> for child in parent.children: <NEW_LINE> <INDENT> if child != node and child.name == node.name: <NEW_LINE> <INDENT> raise NodeNameConflictError( 'can not attach to this parent node as another child node has ' 'the same name {!r}'.format(node.name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def on_node_enters_network(self, node): <NEW_LINE> <INDENT> pass
|
Represents a node network constructed of #NetworkNode objects. Its main
purpose is to contain the root node. When a network is constructed, you
may specifiy a factory function that returns a new #NetworkNode, our you
may bind a root node after the network is constructed.
A network may place constraints on node naming and insertion. The
#NetworkNode will invoke the respective callbacks on the network to
ask for permission of an operation that changes the node's name or
location in the tree.
|
6259901b5e10d32532ce3ffa
|
class Square(Rectangle): <NEW_LINE> <INDENT> def __init__(self, size, x=0, y=0, id=None): <NEW_LINE> <INDENT> super().__init__(size, size, x, y, id) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[Square] ({}) {}/{} - {}".format( self.id, self.x, self.y, self.width) <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self.width <NEW_LINE> <DEDENT> @size.setter <NEW_LINE> def size(self, size): <NEW_LINE> <INDENT> self.width = size <NEW_LINE> self.height = size <NEW_LINE> <DEDENT> def update(self, *args, **kwargs): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> attrs = ["id", "size", "x", "y"] <NEW_LINE> for i, e in enumerate(args): <NEW_LINE> <INDENT> setattr(self, attrs[i], e) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> for key, val in kwargs.items(): <NEW_LINE> <INDENT> if hasattr(self, key): <NEW_LINE> <INDENT> setattr(self, key, val) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def to_dictionary(self): <NEW_LINE> <INDENT> dict = {} <NEW_LINE> for key, val in vars(self).items(): <NEW_LINE> <INDENT> if key.startswith("_"): <NEW_LINE> <INDENT> if not key.endswith("width") and not key.endswith("height"): <NEW_LINE> <INDENT> idx = key.index("__") <NEW_LINE> dict[key[idx + 2:]] = val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> dict["size"] = val <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> dict[key] = val <NEW_LINE> <DEDENT> <DEDENT> return dict
|
Represents a class called Square that inherits from Rectangle
with a private instance attributes called size, x, y
and id (from Base)
|
6259901bbf627c535bcb229b
|
class WordsList(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=256) <NEW_LINE> description = models.CharField(max_length=256) <NEW_LINE> owner = models.ForeignKey(User, blank=True, null=True, on_delete=models.SET_NULL) <NEW_LINE> public = models.BooleanField(default=False) <NEW_LINE> order = models.IntegerField(default=0) <NEW_LINE> words = models.ManyToManyField(Word) <NEW_LINE> deleted = models.BooleanField(default=False) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
A list of words. The list is owned by a drawer, if it was created
by one.
Public lists are displayed as a choice for everyone in the server, while
private ones are only for authenticated user who created their own lists.
|
6259901b21a7993f00c66d66
|
class ThreeLevelCaseSplitter(BaseSplitter): <NEW_LINE> <INDENT> def __init__(self, data_path, img_to_mask, train_name='train', val_name='val', test_name='test', middle_folder='Image', img_filter=None, cache_path=None, force_cache=False): <NEW_LINE> <INDENT> self.middle_folder = middle_folder <NEW_LINE> super(ThreeLevelCaseSplitter, self).__init__(data_path=data_path, train_name=train_name, img_to_mask=img_to_mask, val_name=val_name, test_name=test_name, img_filter=img_filter, cache_path=cache_path, force_cache=force_cache) <NEW_LINE> <DEDENT> def split_file(self): <NEW_LINE> <INDENT> root = Path(self.data_path) <NEW_LINE> for folder in (root / self.train_name / self.middle_folder).iterdir(): <NEW_LINE> <INDENT> self.add_folder_to_data(folder, self.train_data_path) <NEW_LINE> <DEDENT> for folder in (root / self.val_name / self.middle_folder).iterdir(): <NEW_LINE> <INDENT> self.add_folder_to_data(folder, self.dev_data_path) <NEW_LINE> <DEDENT> for folder in (root / self.test_name / self.middle_folder).iterdir(): <NEW_LINE> <INDENT> self.add_folder_to_data(folder, self.test_data_path)
|
If the folder has two depth which means the structure is :
Root => train => Image/Mask => cases => imgs
=> dev => Image/Mask => cases => imgs
=> test => Image/Mask => cases => imgs
|
6259901b796e427e5384f569
|
class GxKMLSOEPage(CoClass): <NEW_LINE> <INDENT> _reg_clsid_ = GUID('{F2F7A0A5-EBAA-440D-972A-AA0BF7E821BB}') <NEW_LINE> _idlflags_ = [] <NEW_LINE> _typelib_path_ = typelib_path <NEW_LINE> _reg_typelib_ = ('{C0FC1503-7E6F-11D2-AABF-00C04FA375F1}', 10, 2)
|
KML SOE properties page.
|
6259901b8c3a8732951f734c
|
class BencodeParser(interface.FileObjectParser): <NEW_LINE> <INDENT> _BENCODE_RE = re.compile(b'd[0-9]') <NEW_LINE> NAME = 'bencode' <NEW_LINE> DATA_FORMAT = 'Bencoded file' <NEW_LINE> _plugin_classes = {} <NEW_LINE> def ParseFileObject(self, parser_mediator, file_object): <NEW_LINE> <INDENT> header_data = file_object.read(2) <NEW_LINE> if not self._BENCODE_RE.match(header_data): <NEW_LINE> <INDENT> raise errors.WrongParser('Not a valid Bencoded file.') <NEW_LINE> <DEDENT> bencode_file = BencodeFile() <NEW_LINE> try: <NEW_LINE> <INDENT> bencode_file.Open(file_object) <NEW_LINE> <DEDENT> except IOError as exception: <NEW_LINE> <INDENT> display_name = parser_mediator.GetDisplayName() <NEW_LINE> raise errors.WrongParser( '[{0:s}] unable to parse file: {1:s} with error: {2!s}'.format( self.NAME, display_name, exception)) <NEW_LINE> <DEDENT> if not bencode_file.decoded_values: <NEW_LINE> <INDENT> parser_mediator.ProduceExtractionWarning('missing decoded Bencode values') <NEW_LINE> return <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for plugin in self._plugins: <NEW_LINE> <INDENT> if parser_mediator.abort: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> file_entry = parser_mediator.GetFileEntry() <NEW_LINE> display_name = parser_mediator.GetDisplayName(file_entry) <NEW_LINE> if not plugin.CheckRequiredKeys(bencode_file): <NEW_LINE> <INDENT> logger.debug('Skipped parsing file: {0:s} with plugin: {1:s}'.format( display_name, plugin.NAME)) <NEW_LINE> continue <NEW_LINE> <DEDENT> logger.debug('Parsing file: {0:s} with plugin: {1:s}'.format( display_name, plugin.NAME)) <NEW_LINE> try: <NEW_LINE> <INDENT> plugin.UpdateChainAndProcess( parser_mediator, bencode_file=bencode_file) <NEW_LINE> <DEDENT> except Exception as exception: <NEW_LINE> <INDENT> parser_mediator.ProduceExtractionWarning(( 'plugin: {0:s} unable to parse Bencode file with error: ' '{1!s}').format(plugin.NAME, exception)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> bencode_file.Close()
|
Parser for bencoded files.
|
6259901bac7a0e7691f732d4
|
class AnimatedDecorator(decorator.Decorator): <NEW_LINE> <INDENT> default_message = 'loading' <NEW_LINE> spinner = SpinnerController() <NEW_LINE> animation = AnimationController() <NEW_LINE> _enabled = True <NEW_LINE> def __init__(self, message=None, fpadding=space_wave): <NEW_LINE> <INDENT> super(AnimatedDecorator, self).__init__() <NEW_LINE> self.message = message <NEW_LINE> self.spinner.fpadding = fpadding <NEW_LINE> <DEDENT> @property <NEW_LINE> def enabled(self): <NEW_LINE> <INDENT> return AnimatedDecorator._enabled <NEW_LINE> <DEDENT> @enabled.setter <NEW_LINE> def enabled(self, state): <NEW_LINE> <INDENT> AnimatedDecorator._enabled = state <NEW_LINE> <DEDENT> def start(self, autopush=True): <NEW_LINE> <INDENT> if self.enabled: <NEW_LINE> <INDENT> if autopush: <NEW_LINE> <INDENT> self.push_message(self.message) <NEW_LINE> self.spinner.message = ' - '.join(self.animation.messages) <NEW_LINE> <DEDENT> if not self.spinner.running: <NEW_LINE> <INDENT> self.animation.thread = threading.Thread(target=_spinner, args=(self.spinner,)) <NEW_LINE> self.spinner.running = True <NEW_LINE> self.animation.thread.start() <NEW_LINE> sys.stdout = stream.Clean(sys.stdout, self.spinner.stream) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def stop(cls): <NEW_LINE> <INDENT> if AnimatedDecorator._enabled: <NEW_LINE> <INDENT> if cls.spinner.running: <NEW_LINE> <INDENT> cls.spinner.running = False <NEW_LINE> cls.animation.thread.join() <NEW_LINE> <DEDENT> if any(cls.animation.messages): <NEW_LINE> <INDENT> cls.pop_message() <NEW_LINE> <DEDENT> sys.stdout = sys.__stdout__ <NEW_LINE> <DEDENT> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> if self.enabled: <NEW_LINE> <INDENT> self.animation.context += 1 <NEW_LINE> self.start() <NEW_LINE> <DEDENT> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> if self.enabled: <NEW_LINE> <INDENT> self.animation.context -= 1 <NEW_LINE> self.pop_message() <NEW_LINE> if self.animation.context == 0: <NEW_LINE> <INDENT> self.stop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.start(autopush=False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def push_message(cls, message): <NEW_LINE> <INDENT> return cls.animation.messages.append(message) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def pop_message(cls): <NEW_LINE> <INDENT> return cls.animation.messages.pop(-1) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __call__(cls, *args, **kwargs): <NEW_LINE> <INDENT> obj = super(AnimatedDecorator, cls).__call__(*args, **kwargs) <NEW_LINE> if any(cls.instances): <NEW_LINE> <INDENT> last_instance = cls.instances[-1] <NEW_LINE> last_instance.message = last_instance.auto_message(args) <NEW_LINE> <DEDENT> elif isinstance(obj, cls): <NEW_LINE> <INDENT> obj.message = obj.auto_message(args) <NEW_LINE> <DEDENT> return obj <NEW_LINE> <DEDENT> def auto_message(self, args): <NEW_LINE> <INDENT> if any(args) and callable(args[0]) and not self.message: <NEW_LINE> <INDENT> return args[0].__name__ <NEW_LINE> <DEDENT> elif not self.message: <NEW_LINE> <INDENT> return self.default_message <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.message
|
The animated decorator from hell
You can use this these way:
@animated
def slow():
heavy_stuff()
As well with custom messages
@animated('WOOOOW')
def download_the_universe():
while True:
pass
with animated('loool'):
stuff_from_hell()
|
6259901bbe8e80087fbbfe5e
|
class PickupRequest(request.Request): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def get_url(cls): <NEW_LINE> <INDENT> return "/pickup" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_usages(cls): <NEW_LINE> <INDENT> return [ apidocs.UsageDoc('?boxid=<i>boxid</i>&token=<i>token</i>', 'pick up a dropped box', '?boxid=aghib3hlc2FwaXIJCxIDQm94GEMM&token=1234') ] <NEW_LINE> <DEDENT> def find_my_dropped_box(self, boxes_by_location, boxid): <NEW_LINE> <INDENT> for dropped_box in boxes_by_location: <NEW_LINE> <INDENT> if dropped_box.boxid == boxid: <NEW_LINE> <INDENT> return dropped_box <NEW_LINE> <DEDENT> <DEDENT> return None <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> if not self.required_field('boxid'): return <NEW_LINE> userid = self.userid_from_token() <NEW_LINE> if not userid: return <NEW_LINE> boxid = self.request.get('boxid') <NEW_LINE> last_drop = model.History.get_last_drop(boxid) <NEW_LINE> if last_drop.next_picker_timestamp: <NEW_LINE> <INDENT> self.conflict("box was already picked up") <NEW_LINE> return <NEW_LINE> <DEDENT> last_drop.next_picker_timestamp = datetime.datetime.utcnow() <NEW_LINE> last_drop.next_picker = userid <NEW_LINE> last_drop.put() <NEW_LINE> mybox = model.MyBox(userid = userid, boxid = boxid, picked_at = last_drop.drop_location, drop_message = last_drop.drop_message) <NEW_LINE> mybox.put() <NEW_LINE> boxes_by_location = model.DroppedBox.query_by_location(last_drop.drop_location.lat, last_drop.drop_location.lon, 18) <NEW_LINE> logging.info(boxes_by_location) <NEW_LINE> if boxes_by_location.count() == 0: <NEW_LINE> <INDENT> self.conflict("unable to find box in map") <NEW_LINE> return <NEW_LINE> <DEDENT> my_dropped_box = self.find_my_dropped_box(boxes_by_location, boxid) <NEW_LINE> my_dropped_box.delete() <NEW_LINE> self.emit_text("box picked up successfuly")
|
Pick up a dropped box
|
6259901bd18da76e235b7843
|
class CheckingAccount(Account): <NEW_LINE> <INDENT> def __init__(self, id, name): <NEW_LINE> <INDENT> super(CheckingAccount, self).__init__(id, name) <NEW_LINE> self.overdrafitlimit = 30000 <NEW_LINE> <DEDENT> def withdraw(self, amount): <NEW_LINE> <INDENT> if amount <= self.balance + self.overdrafitlimit: <NEW_LINE> <INDENT> self.balance -= amount <NEW_LINE> return amount <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('You cannot withdraw so much') <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ('CheckingAccount[id={id}, name={name}, balance={balance}, overdrafitlimit={overdrafitlimit}]' .format(id=self.id, name=self.name, balance=self.balance, overdrafitlimit=self.overdrafitlimit))
|
This is a CheckingAccount derived from Account class.
|
6259901b91af0d3eaad3ac10
|
class PointMatcher(object): <NEW_LINE> <INDENT> def __init__(self, patt): <NEW_LINE> <INDENT> self.re_patt = re.compile(r"([^#@]+)(#\d+|@[\d\.:]+)?") <NEW_LINE> self.set_patt(patt) <NEW_LINE> <DEDENT> def set_patt(self, patt): <NEW_LINE> <INDENT> self.patt = None <NEW_LINE> self.path = None <NEW_LINE> self.indextype = None <NEW_LINE> self.index = None <NEW_LINE> if not patt: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> patt = re.sub(r"(^|\s+)#.*", "", patt) <NEW_LINE> self.patt = patt.strip() <NEW_LINE> match = self.re_patt.match(self.patt) <NEW_LINE> if match: <NEW_LINE> <INDENT> self.path = re.compile(match.group(1)) <NEW_LINE> if match.group(2): <NEW_LINE> <INDENT> self.indextype, indexstr = match.group(2)[0], match.group(2)[1:] <NEW_LINE> if self.indextype: <NEW_LINE> <INDENT> if not ":" in indexstr: <NEW_LINE> <INDENT> self.index = float(indexstr) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> indexstr2 = indexstr.split(":", 1) <NEW_LINE> if not indexstr2[0]: indexstr2[0] = "-inf" <NEW_LINE> if not indexstr2[1]: indexstr2[1] = "inf" <NEW_LINE> self.index = [float(istr) for istr in indexstr2] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def match_path(self, path): <NEW_LINE> <INDENT> return self.path.match(path) is not None <NEW_LINE> <DEDENT> def search_path(self, path): <NEW_LINE> <INDENT> return self.path.search(path) is not None <NEW_LINE> <DEDENT> def match_pos(self, p): <NEW_LINE> <INDENT> if not self.indextype: <NEW_LINE> <INDENT> accept = True <NEW_LINE> <DEDENT> elif self.indextype == "#": <NEW_LINE> <INDENT> if type(self.index) is float: <NEW_LINE> <INDENT> accept = (p.n == int(self.index)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> accept = (p.n >= self.index[0] and p.n < self.index[1]) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if type(self.index) is float: <NEW_LINE> <INDENT> accept = (self.index >= p.xmin and self.index < p.xmax) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> accept = (p.xmax > self.index[0] and p.xmin <= self.index[1]) <NEW_LINE> <DEDENT> <DEDENT> return accept <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> s = "PointMatcher('%s' %s %s %s)" % (self.patt, self.path, self.indextype, self.index) <NEW_LINE> return s
|
System for selecting subsets of bins based on a search range
syntax extended from Professor weight files:
Path structure: /path/parts/to/histo[syst_variation]@xmin:xmax
or: /path/parts/to/histo[syst_variation]#nmin:nmax
TODO: Extend to multi-dimensional ranges i.e. @xmin:xmax,#nymin:nymax,...
|
6259901bbf627c535bcb229f
|
class Policy(object): <NEW_LINE> <INDENT> __metaclass__ = PolicyType <NEW_LINE> default = False <NEW_LINE> name = None <NEW_LINE> resource = None <NEW_LINE> providers = [] <NEW_LINE> signature = () <NEW_LINE> def __init__(self, resource): <NEW_LINE> <INDENT> self.resource = resource <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def conforms(self, resource): <NEW_LINE> <INDENT> return AND(*self.signature).test(resource) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def validate(self, resource): <NEW_LINE> <INDENT> a = AND(*self.signature) <NEW_LINE> if a.test(resource): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> msg = [ "The resource '%s' is using the policy '%s' but doesn't confirm to that policy" % (resource, self.name), ""] <NEW_LINE> msg.extend(a.describe(resource)) <NEW_LINE> raise error.NonConformingPolicy("\n".join(msg)) <NEW_LINE> <DEDENT> def get_provider(self, context): <NEW_LINE> <INDENT> valid = [p.isvalid(self, self.resource, context) for p in self.providers] <NEW_LINE> if valid.count(True) > 1: <NEW_LINE> <INDENT> raise error.TooManyProviders() <NEW_LINE> <DEDENT> if valid.count(True) == 0: <NEW_LINE> <INDENT> raise error.NoSuitableProviders() <NEW_LINE> <DEDENT> return self.providers[valid.index(True)]
|
A policy is a representation of a resource. A policy requires a
certain argument signature to be present before it can be used. There may
be multiple policies selected for a resource, in which case all argument
signatures must be conformant.
Providers must provide all selected policies to be a valid provider for
the resource.
|
6259901b796e427e5384f56d
|
class ReportedDebugContextBase(DebugContextBase): <NEW_LINE> <INDENT> def report_trigger(self, source_ref, target_ref): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def report_pretrigger(self, source_ref, target_ref): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def report_push_out(self, source_ref, target_ref, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def report_pull_in(self, source_ref, target_ref, data): <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> def build_connection(self, source, target): <NEW_LINE> <INDENT> if isinstance(source, PushOut): <NEW_LINE> <INDENT> target = DebugPushOutTarget(self, ref(source), ref(target)) <NEW_LINE> <DEDENT> elif isinstance(target, PullIn): <NEW_LINE> <INDENT> source = DebugPullInSource(self, ref(source), ref(target)) <NEW_LINE> <DEDENT> target._hive_connect_target(source) <NEW_LINE> source._hive_connect_source(target) <NEW_LINE> <DEDENT> def build_trigger(self, source, target, pre): <NEW_LINE> <INDENT> target_func = target._hive_trigger_target() <NEW_LINE> if pre: <NEW_LINE> <INDENT> callable_target = DebugPretriggerTarget(self, ref(source), ref(target)) <NEW_LINE> source._hive_pretrigger_source(callable_target) <NEW_LINE> source._hive_pretrigger_source(target_func) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> callable_target = DebugTriggerTarget(self, ref(source), ref(target)) <NEW_LINE> source._hive_trigger_source(callable_target) <NEW_LINE> source._hive_trigger_source(target_func)
|
Base class for connection and trigger listener callbacks
|
6259901bd18da76e235b7845
|
class ToolbarGrabar(Gtk.Toolbar): <NEW_LINE> <INDENT> __gtype_name__ = 'ToolbarGrabar' <NEW_LINE> __gsignals__ = { "stop": (GObject.SIGNAL_RUN_FIRST, GObject.TYPE_NONE, [])} <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Gtk.Toolbar.__init__(self) <NEW_LINE> self.modify_bg(0, get_colors("drawingplayer")) <NEW_LINE> self.colors = [get_color("BLANCO"), get_color("NARANJA")] <NEW_LINE> self.color = self.colors[0] <NEW_LINE> self.insert(get_separador(draw=False, ancho=3, expand=False), -1) <NEW_LINE> archivo = os.path.join(BASE_PATH, "Iconos", "stop.svg") <NEW_LINE> boton = get_boton(archivo, flip=False, pixels=24) <NEW_LINE> boton.set_tooltip_text("Detener") <NEW_LINE> self.insert(boton, -1) <NEW_LINE> self.insert(get_separador(draw=False, ancho=3, expand=False), -1) <NEW_LINE> item = Gtk.ToolItem() <NEW_LINE> self.label = Gtk.Label("Grabador Detenido.") <NEW_LINE> self.label.show() <NEW_LINE> item.add(self.label) <NEW_LINE> self.insert(item, -1) <NEW_LINE> self.show_all() <NEW_LINE> boton.connect("clicked", self.__emit_stop) <NEW_LINE> <DEDENT> def __emit_stop(self, widget=None, event=None): <NEW_LINE> <INDENT> self.stop() <NEW_LINE> self.emit("stop") <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.color = self.colors[0] <NEW_LINE> self.label.modify_fg(0, self.color) <NEW_LINE> self.label.set_text("Grabador Detenido.") <NEW_LINE> if self.get_visible(): <NEW_LINE> <INDENT> self.hide() <NEW_LINE> <DEDENT> <DEDENT> def set_info(self, datos): <NEW_LINE> <INDENT> self.label.set_text(datos) <NEW_LINE> self.__update() <NEW_LINE> <DEDENT> def __update(self): <NEW_LINE> <INDENT> if self.color == self.colors[0]: <NEW_LINE> <INDENT> self.color = self.colors[1] <NEW_LINE> <DEDENT> elif self.color == self.colors[1]: <NEW_LINE> <INDENT> self.color = self.colors[0] <NEW_LINE> <DEDENT> self.label.modify_fg(0, self.color) <NEW_LINE> if not self.get_visible(): <NEW_LINE> <INDENT> self.show()
|
Informa al usuario cuando se está grabando
desde un streaming.
|
6259901b507cdc57c63a5b91
|
class Oauth2RequestAuthorizer(requests.auth.AuthBase): <NEW_LINE> <INDENT> def __init__(self, access_token): <NEW_LINE> <INDENT> self.access_token = access_token <NEW_LINE> <DEDENT> def __call__(self, request): <NEW_LINE> <INDENT> request.headers['Authorization'] = 'Bearer {}'.format( self.access_token) <NEW_LINE> return request
|
authorization header for requests
|
6259901b462c4b4f79dbc7f9
|
class CheckQosScripts(rootfs_boot.RootFSBootTest): <NEW_LINE> <INDENT> def runTest(self): <NEW_LINE> <INDENT> board = self.dev.board <NEW_LINE> board.sendline('\nopkg list | grep qos-scripts') <NEW_LINE> try: <NEW_LINE> <INDENT> board.expect('qos-scripts - ', timeout=4) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> assert False
|
Package "qos-scripts" is not installed.
|
6259901b5e10d32532ce3ffd
|
class WinkBinarySensorDevice(WinkDevice, BinarySensorDevice, Entity): <NEW_LINE> <INDENT> def __init__(self, wink, hass): <NEW_LINE> <INDENT> super().__init__(wink, hass) <NEW_LINE> if hasattr(self.wink, 'unit'): <NEW_LINE> <INDENT> self._unit_of_measurement = self.wink.unit() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._unit_of_measurement = None <NEW_LINE> <DEDENT> if hasattr(self.wink, 'capability'): <NEW_LINE> <INDENT> self.capability = self.wink.capability() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.capability = None <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def is_on(self): <NEW_LINE> <INDENT> return self.wink.state() <NEW_LINE> <DEDENT> @property <NEW_LINE> def device_class(self): <NEW_LINE> <INDENT> return SENSOR_TYPES.get(self.capability)
|
Representation of a Wink binary sensor.
|
6259901b5166f23b2e2441c3
|
@admin.register(User) <NEW_LINE> class UserAdmin(DjangoUserAdmin): <NEW_LINE> <INDENT> fieldsets = ( (None, {'fields': ('email', 'password')}), (_('Personal info'), {'fields': ('first_name', 'last_name')}), (_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser', 'groups', 'user_permissions')}), (_('Important dates'), {'fields': ('last_login', 'date_joined')}), ) <NEW_LINE> add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('email', 'password1', 'password2'), }), ) <NEW_LINE> list_display = ('email', 'first_name', 'last_name', 'is_staff') <NEW_LINE> search_fields = ('email', 'first_name', 'last_name') <NEW_LINE> ordering = ('email',)
|
Define admin model for custom User model with no username field.
|
6259901b30c21e258be99604
|
@public <NEW_LINE> class WampRawSocketServerFactory(WampRawSocketFactory): <NEW_LINE> <INDENT> protocol = WampRawSocketServerProtocol <NEW_LINE> def __init__(self, factory, serializers=None): <NEW_LINE> <INDENT> if callable(factory): <NEW_LINE> <INDENT> self._factory = factory <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._factory = lambda: factory <NEW_LINE> <DEDENT> if serializers is None: <NEW_LINE> <INDENT> serializers = [] <NEW_LINE> try: <NEW_LINE> <INDENT> from autobahn.wamp.serializer import CBORSerializer <NEW_LINE> serializers.append(CBORSerializer(batched=True)) <NEW_LINE> serializers.append(CBORSerializer()) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> from autobahn.wamp.serializer import MsgPackSerializer <NEW_LINE> serializers.append(MsgPackSerializer(batched=True)) <NEW_LINE> serializers.append(MsgPackSerializer()) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> from autobahn.wamp.serializer import UBJSONSerializer <NEW_LINE> serializers.append(UBJSONSerializer(batched=True)) <NEW_LINE> serializers.append(UBJSONSerializer()) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> from autobahn.wamp.serializer import JsonSerializer <NEW_LINE> serializers.append(JsonSerializer(batched=True)) <NEW_LINE> serializers.append(JsonSerializer()) <NEW_LINE> <DEDENT> except ImportError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if not serializers: <NEW_LINE> <INDENT> raise Exception("could not import any WAMP serializers") <NEW_LINE> <DEDENT> <DEDENT> self._serializers = {} <NEW_LINE> for ser in serializers: <NEW_LINE> <INDENT> self._serializers[ser.RAWSOCKET_SERIALIZER_ID] = ser
|
Twisted-based WAMP-over-RawSocket server protocol factory.
|
6259901bd18da76e235b7846
|
class MobilePlans(db.Model): <NEW_LINE> <INDENT> __tablename__ = tablename <NEW_LINE> id = db.Column(db.Integer, Sequence('mobileplan_seq'), primary_key=True) <NEW_LINE> name = db.Column(db.String(250)) <NEW_LINE> sms = db.Column(db.String(250)) <NEW_LINE> data = db.Column(db.String(250)) <NEW_LINE> voice = db.Column(db.String(250)) <NEW_LINE> price = db.Column(db.String(250))
|
docstring for MobilePlans
|
6259901bbf627c535bcb22a3
|
class NonNegativeInteger(Integer): <NEW_LINE> <INDENT> errormsg = _('Value must be a non-negative integer, not %r.') <NEW_LINE> def setValue(self, v): <NEW_LINE> <INDENT> if v < 0: <NEW_LINE> <INDENT> self.error(v) <NEW_LINE> <DEDENT> super(NonNegativeInteger, self).setValue(v)
|
Value must be a non-negative integer.
|
6259901b6fece00bbaccc7a8
|
class Number(BuiltinField): <NEW_LINE> <INDENT> @property <NEW_LINE> def _model_type(self): <NEW_LINE> <INDENT> return six.integer_types + (float,)
|
Combined Integer and Float field
|
6259901b287bf620b62729db
|
class Application: <NEW_LINE> <INDENT> url = "https://bilibili.com" <NEW_LINE> cookie_file = "cookie.txt" <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> cookie = Path(self.cookie_file).read_text().rstrip() <NEW_LINE> uid = dict([ item.split("=") for item in cookie.split("; ") ]).get("DedeUserID") <NEW_LINE> self.__session = r.Session() <NEW_LINE> self.__session.headers.update({ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:67.0) Gecko/20100101 Firefox/67.0", "Cookie": cookie, }) <NEW_LINE> self.__uid = uid <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> package = { "pn": 1, "ps": 100, "up_mid": self.__uid, "is_space": 0, "jsonp": "jsonp", } <NEW_LINE> response = self.__session.get( "http://api.bilibili.com/medialist/gateway/base/created", headers=HEADERS, params=package) <NEW_LINE> favlists = response.json().get("data").get("list") <NEW_LINE> stack_fav = [] <NEW_LINE> for d in favlists: <NEW_LINE> <INDENT> stack_fav.append( Favorate(d) ) <NEW_LINE> <DEDENT> for fav in stack_fav: <NEW_LINE> <INDENT> fav.get_videos(self.__session, fav.media_count, fav.id) <NEW_LINE> <DEDENT> for fav in stack_fav: <NEW_LINE> <INDENT> fav.save()
|
Bilibili Client
|
6259901b30c21e258be99606
|
class DeploysRequest(Request, Additive): <NEW_LINE> <INDENT> dirname: str = "deploys" <NEW_LINE> endpoint: str = "deployments" <NEW_LINE> def mkdeploy(self, data): <NEW_LINE> <INDENT> deploy = Deploy(data) <NEW_LINE> deploy.statuses_request = StatusRequest(deploy) <NEW_LINE> deploy.commit_request = CommitRequest(deploy.sha) <NEW_LINE> return deploy <NEW_LINE> <DEDENT> @property <NEW_LINE> def data(self): <NEW_LINE> <INDENT> return [ self.mkdeploy(x) for x in self.request_data ]
|
Class for requesting a list of deployments.
https://docs.github.com/en/free-pro-team@latest/rest/reference/repos#deployments
|
6259901b507cdc57c63a5b95
|
class TenantProperties(): <NEW_LINE> <INDENT> def __init__(self, credentials): <NEW_LINE> <INDENT> self.manager = clients.Manager(credentials) <NEW_LINE> self.creds = self.manager.credentials <NEW_LINE> self.network = None <NEW_LINE> self.subnet = None <NEW_LINE> self.router = None <NEW_LINE> self.security_groups = {} <NEW_LINE> self.servers = list() <NEW_LINE> <DEDENT> def set_network(self, network, subnet, router): <NEW_LINE> <INDENT> self.network = network <NEW_LINE> self.subnet = subnet <NEW_LINE> self.router = router
|
helper class to save tenant details
id
credentials
network
subnet
security groups
servers
access point
|
6259901b56b00c62f0fb36b2
|
class Scene(namedtuple('Scene', ['scene'])): <NEW_LINE> <INDENT> TYPE ='scene' <NEW_LINE> def __repr____(self): <NEW_LINE> <INDENT> return self.scene
|
Signfies a new scene
Parameters
----------
act : int
act number
|
6259901c21a7993f00c66d72
|
class PKCS5PaddingTestCase(T.TestCase): <NEW_LINE> <INDENT> VECTORS = ( ("", 1, "\x01"), ("abcd", 8, "abcd\x04\x04\x04\x04"), ("abcdefg\x00", 16, "abcdefg\x00\x08\x08\x08\x08\x08\x08\x08\x08"), ) <NEW_LINE> def test_pad(self): <NEW_LINE> <INDENT> for unpadded, bs, padded in self.VECTORS: <NEW_LINE> <INDENT> T.assert_equal(padding.pkcs5_pad(unpadded, bs), padded) <NEW_LINE> <DEDENT> <DEDENT> def test_unpad(self): <NEW_LINE> <INDENT> for unpadded, _, padded in self.VECTORS: <NEW_LINE> <INDENT> T.assert_equal(padding.pkcs5_unpad(padded), unpadded) <NEW_LINE> <DEDENT> T.assert_equal(padding.pkcs5_unpad(""), "")
|
Test our PKCS#5 padding
|
6259901c8c3a8732951f7359
|
class PlatformNotSupported(PlatformError): <NEW_LINE> <INDENT> pass
|
Base class for the unsupported platform errors.
|
6259901cac7a0e7691f732e0
|
class IpFilter(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Switch = None <NEW_LINE> self.FilterType = None <NEW_LINE> self.Filters = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Switch = params.get("Switch") <NEW_LINE> self.FilterType = params.get("FilterType") <NEW_LINE> self.Filters = params.get("Filters")
|
IP黑白名单。
|
6259901c0a366e3fb87dd7ee
|
class StravaRedirect(generic.RedirectView): <NEW_LINE> <INDENT> def get_redirect_url(self, approval_prompt="auto", scope="write", *args, **kwargs): <NEW_LINE> <INDENT> from website.apps.stravauth.utils import get_stravauth_url <NEW_LINE> return get_stravauth_url(approval_prompt, scope)
|
Redirects to the Strava oauth page
|
6259901c796e427e5384f577
|
class CGPCholCache(CParamObject): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> for _s in [CParamObject]: <NEW_LINE> <INDENT> __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) <NEW_LINE> <DEDENT> __setattr__ = lambda self, name, value: _swig_setattr(self, CGPCholCache, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> for _s in [CParamObject]: <NEW_LINE> <INDENT> __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) <NEW_LINE> <DEDENT> __getattr__ = lambda self, name: _swig_getattr(self, CGPCholCache, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self, gp): <NEW_LINE> <INDENT> this = _core.new_CGPCholCache(gp) <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except Exception: <NEW_LINE> <INDENT> self.this = this <NEW_LINE> <DEDENT> <DEDENT> __swig_destroy__ = _core.delete_CGPCholCache <NEW_LINE> __del__ = lambda self: None <NEW_LINE> def getCovar(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_getCovar(self) <NEW_LINE> <DEDENT> def setCovar(self, covar): <NEW_LINE> <INDENT> return _core.CGPCholCache_setCovar(self, covar) <NEW_LINE> <DEDENT> def rgetKEff(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_rgetKEff(self) <NEW_LINE> <DEDENT> def rgetKEffChol(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_rgetKEffChol(self) <NEW_LINE> <DEDENT> def rgetKEffInv(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_rgetKEffInv(self) <NEW_LINE> <DEDENT> def rgetYeffective(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_rgetYeffective(self) <NEW_LINE> <DEDENT> def rgetKEffInvY(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_rgetKEffInvY(self) <NEW_LINE> <DEDENT> def getDKEffInv_KEffInvYYKinv(self): <NEW_LINE> <INDENT> return _core.CGPCholCache_getDKEffInv_KEffInvYYKinv(self)
|
Proxy of C++ limix::CGPCholCache class.
|
6259901c30c21e258be9960c
|
class NetworkUpdateSelfConnection(NetworkUpdateConnection): <NEW_LINE> <INDENT> def __init__(self, timestamp, newConn, networkId): <NEW_LINE> <INDENT> NetworkUpdateConnection.__init__(self, timestamp, None, newConn, networkId) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "You are now connected to %s" % (str(self.newConnection))
|
The user have made a new connection
|
6259901cd164cc6175821d77
|
class MPBlockConnector(object): <NEW_LINE> <INDENT> def __init__(self, send_array, recv_array, send_ev, recv_ev, conf_ev, remote_conf_ev): <NEW_LINE> <INDENT> self._send_array = send_array <NEW_LINE> self._recv_array = recv_array <NEW_LINE> self._send_ev = send_ev <NEW_LINE> self._recv_ev = recv_ev <NEW_LINE> self._conf_ev = conf_ev <NEW_LINE> self._remote_conf_ev = remote_conf_ev <NEW_LINE> <DEDENT> def send(self, data): <NEW_LINE> <INDENT> self._remote_conf_ev.wait() <NEW_LINE> self._send_array[:] = data <NEW_LINE> self._remote_conf_ev.clear() <NEW_LINE> self._send_ev.set() <NEW_LINE> <DEDENT> def recv(self, data, quit_ev): <NEW_LINE> <INDENT> while self._recv_ev.wait(0.01) != True: <NEW_LINE> <INDENT> if self._recv_ev.is_set(): <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if quit_ev.is_set(): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> data[:] = self._recv_array[:] <NEW_LINE> self._recv_ev.clear() <NEW_LINE> self._conf_ev.set() <NEW_LINE> return True <NEW_LINE> <DEDENT> def init_runner(self, ctx): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def make_pair(self, ctype, sizes, ids): <NEW_LINE> <INDENT> array1 = Array(ctype, sizes[0]) <NEW_LINE> array2 = Array(ctype, sizes[1]) <NEW_LINE> ev1 = Event() <NEW_LINE> ev2 = Event() <NEW_LINE> ev3 = Event() <NEW_LINE> ev4 = Event() <NEW_LINE> ev3.set() <NEW_LINE> ev4.set() <NEW_LINE> return (MPBlockConnector(array1, array2, ev1, ev2, ev3, ev4), MPBlockConnector(array2, array1, ev2, ev1, ev4, ev3))
|
Handles directed data exchange between two blocks using the
multiprocessing module.
|
6259901cd18da76e235b784a
|
class Queue(BaseQueue): <NEW_LINE> <INDENT> prefix = "entry-" <NEW_LINE> def __init__(self, client, path): <NEW_LINE> <INDENT> super(Queue, self).__init__(client, path) <NEW_LINE> self._children = [] <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return super(Queue, self).__len__() <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> self._ensure_paths() <NEW_LINE> return self.client.retry(self._inner_get) <NEW_LINE> <DEDENT> def _inner_get(self): <NEW_LINE> <INDENT> if not self._children: <NEW_LINE> <INDENT> self._children = self.client.retry( self.client.get_children, self.path) <NEW_LINE> self._children = sorted(self._children) <NEW_LINE> <DEDENT> if not self._children: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> name = self._children[0] <NEW_LINE> try: <NEW_LINE> <INDENT> data, stat = self.client.get(self.path + "/" + name) <NEW_LINE> <DEDENT> except NoNodeError: <NEW_LINE> <INDENT> raise ForceRetryError() <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.client.delete(self.path + "/" + name) <NEW_LINE> <DEDENT> except NoNodeError: <NEW_LINE> <INDENT> raise ForceRetryError() <NEW_LINE> <DEDENT> self._children.pop(0) <NEW_LINE> return data <NEW_LINE> <DEDENT> def put(self, value, priority=100): <NEW_LINE> <INDENT> self._check_put_arguments(value, priority) <NEW_LINE> self._ensure_paths() <NEW_LINE> path = '{path}/{prefix}{priority:03d}-'.format( path=self.path, prefix=self.prefix, priority=priority) <NEW_LINE> self.client.create(path, value, sequence=True)
|
A distributed queue with optional priority support.
This queue does not offer reliable consumption. An entry is removed
from the queue prior to being processed. So if an error occurs, the
consumer has to re-queue the item or it will be lost.
|
6259901cbf627c535bcb22ab
|
class WormHoleFile(OffsetedFile): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> if 'wormhole' in kwargs: <NEW_LINE> <INDENT> self.target, self.source, self.wormlen = kwargs.pop('wormhole') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.target, self.source, self.wormlen = [0, 0, 0] <NEW_LINE> <DEDENT> super(WormHoleFile, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def read(self, length=None): <NEW_LINE> <INDENT> if length == None: <NEW_LINE> <INDENT> length = self.offset + self.length - self.pointer <NEW_LINE> <DEDENT> tmp = self.pointer <NEW_LINE> FutureOffset = self.pointer + length <NEW_LINE> if (tmp >= self.target + self.wormlen) or (FutureOffset < self.target): <NEW_LINE> <INDENT> data = super(WormHoleFile, self).read(length) <NEW_LINE> <DEDENT> elif tmp >= self.target: <NEW_LINE> <INDENT> self.seek(tmp - self.target + self.source) <NEW_LINE> if FutureOffset < self.target + self.wormlen: <NEW_LINE> <INDENT> data = super(WormHoleFile, self).read( length) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> inWorm_len = self.target + self.wormlen - tmp <NEW_LINE> outWorm_len = FutureOffset - self.target - self.wormlen <NEW_LINE> inWorm = super(WormHoleFile, self).read(inWorm_len) <NEW_LINE> self.seek(self.target + self.wormlen) <NEW_LINE> outWorm = super(WormHoleFile, self).read(outWorm_len) <NEW_LINE> inWorm_bytes = inWorm if isinstance( inWorm, bytes) else inWorm.encode() <NEW_LINE> outWorm_bytes = outWorm if isinstance( outWorm, bytes) else outWorm.encode() <NEW_LINE> data = inWorm_bytes + outWorm_bytes <NEW_LINE> <DEDENT> <DEDENT> elif FutureOffset < self.target + self.wormlen: <NEW_LINE> <INDENT> preWorm_len = self.target - tmp <NEW_LINE> inWorm_len = FutureOffset - self.target <NEW_LINE> preWorm = super(WormHoleFile, self).read(preWorm_len) <NEW_LINE> self.seek(self.source) <NEW_LINE> inWorm = super(WormHoleFile, self).read(inWorm_len) <NEW_LINE> data = preWorm + inWorm <NEW_LINE> <DEDENT> elif FutureOffset > self.target + self.wormlen: <NEW_LINE> <INDENT> preWorm_len = self.target - tmp <NEW_LINE> inWorm_len = self.wormlen <NEW_LINE> postWorm_len = FutureOffset - self.target - self.wormlen <NEW_LINE> preWorm = super(WormHoleFile, self).read(preWorm_len) <NEW_LINE> self.seek(self.source) <NEW_LINE> inWorm = super(WormHoleFile, self).read(inWorm_len) <NEW_LINE> self.seek(self.target + inWorm_len) <NEW_LINE> postWorm = super(WormHoleFile, self).read(postWorm_len) <NEW_LINE> data = preWorm + inWorm + postWorm <NEW_LINE> <DEDENT> self.seek(FutureOffset) <NEW_LINE> return data
|
Redirects an offset-range to another offset in a file. Because
everbody likes wormholes.
I even chose that name before WH were mainsteam (Interstellar)
|
6259901c6fece00bbaccc7b0
|
class Boolean(Argument): <NEW_LINE> <INDENT> serializer = serializers.Boolean() <NEW_LINE> def clean(self, instance, value): <NEW_LINE> <INDENT> if isinstance(value, six.string_types): <NEW_LINE> <INDENT> if value.lower() in ("1", "yes", "on", "true"): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return bool(value)
|
Represents a boolean. "1", "yes", "on" and "true" are all considered
to be True boolean values. Anything else is False.
|
6259901c5166f23b2e2441cd
|
class KnownInstrumentOptimizationProblem: <NEW_LINE> <INDENT> def __init__(self, inst_array, start_dt, end_dt): <NEW_LINE> <INDENT> prices = [] <NEW_LINE> for iname in inst_array: <NEW_LINE> <INDENT> i = Instrument.get_instrument(iname) <NEW_LINE> price_i = i.get_bar_collection_timeframe(Period.get_period("H1"), start_dt, end_dt) <NEW_LINE> price_x = map(lambda x: x.close, price_i) <NEW_LINE> prices.append(price_x) <NEW_LINE> <DEDENT> z = [] <NEW_LINE> for i in prices: <NEW_LINE> <INDENT> z.append(np.matrix(i).T) <NEW_LINE> <DEDENT> self._p_matrix = np.hstack(z) <NEW_LINE> <DEDENT> def prep_optimization_config(self): <NEW_LINE> <INDENT> A = self._p_matrix <NEW_LINE> retmx = A / np.vstack((A[0].astype(float), A[:-1].astype(float))) -1 <NEW_LINE> ret_vc = np.array(np.mean(retmx, axis=0))[0] <NEW_LINE> cov_mx = np.cov(retmx, rowvar=0) <NEW_LINE> self._config = OptimizerConfiguration(cov_mx, ret_vc) <NEW_LINE> <DEDENT> def optimize(self, **kwargs): <NEW_LINE> <INDENT> opt = Optimizer(self._config, ConstrainedReturnOptimizationPolicy()) <NEW_LINE> return opt.optimize(**kwargs)
|
The KnownInstrumentOptimizationProblem class encapsulates an optimization problem where the
associated financial securities are already known to the system and some price data is
available in the database.
|
6259901c9b70327d1c57fb7b
|
class DatasetRenderer(DatasetAction): <NEW_LINE> <INDENT> def __init__(self, dest, size=256, room_size=6.05): <NEW_LINE> <INDENT> self.dest = dest <NEW_LINE> self.size = size <NEW_LINE> self.count = 0 <NEW_LINE> self.renderer = TopDownView(size=self.size, length_cap=room_size) <NEW_LINE> data_dir = utils.get_data_root_dir() <NEW_LINE> self.dest_dir = f"{data_dir}/{dest}" <NEW_LINE> if not os.path.exists(self.dest_dir): <NEW_LINE> <INDENT> os.makedirs(self.dest_dir) <NEW_LINE> <DEDENT> <DEDENT> def step(self, houses, num_threads=1): <NEW_LINE> <INDENT> rooms = [] <NEW_LINE> def render_and_save(room, dest): <NEW_LINE> <INDENT> img, data = self.renderer.render(room) <NEW_LINE> with open(f"{self.dest_dir}/{dest}.pkl","wb") as f: <NEW_LINE> <INDENT> pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL) <NEW_LINE> <DEDENT> img = m.toimage(img, cmin=0, cmax=1) <NEW_LINE> img.save(f"{self.dest_dir}/{dest}.jpg") <NEW_LINE> <DEDENT> for house in houses: <NEW_LINE> <INDENT> if house.rooms: <NEW_LINE> <INDENT> for room in house.rooms: <NEW_LINE> <INDENT> if num_threads==1: <NEW_LINE> <INDENT> render_and_save(room, self.count) <NEW_LINE> print(f"Rendering room {self.count}...", end="\r") <NEW_LINE> if hasattr(room, 'augmented'): <NEW_LINE> <INDENT> for (i, room_a) in enumerate(room.augmented): <NEW_LINE> <INDENT> self.count += 1 <NEW_LINE> render_and_save(room_a, self.count) <NEW_LINE> <DEDENT> <DEDENT> self.count += 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rooms.append(room) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> yield house <NEW_LINE> <DEDENT> def render(i): <NEW_LINE> <INDENT> room = rooms[i] <NEW_LINE> img, data = self.renderer.render(room) <NEW_LINE> with open(f"{self.dest_dir}/{self.count+i}.pkl","wb") as f: <NEW_LINE> <INDENT> pickle.dump((data, room), f, pickle.HIGHEST_PROTOCOL) <NEW_LINE> <DEDENT> img = m.toimage(img, cmin=0, cmax=1) <NEW_LINE> img.save(f"{self.dest_dir}/{self.count+i}.jpg") <NEW_LINE> print(f"Rendering room {self.count+i}...") <NEW_LINE> <DEDENT> if num_threads > 1: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> pool = ThreadPool(num_threads) <NEW_LINE> _ = pool.map(render, range(len(rooms))) <NEW_LINE> self.count += len(rooms)
|
Pre-render top-down view of
each room in the house (floor, walls and objects)
|
6259901c796e427e5384f57b
|
class Queue(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.__front = None <NEW_LINE> self.__rear = None <NEW_LINE> self.__size = 0 <NEW_LINE> <DEDENT> def enqueue(self, newItem): <NEW_LINE> <INDENT> newNode = Node(newItem,None) <NEW_LINE> if self.isEmpty(): <NEW_LINE> <INDENT> self.__front = newNode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__rear.next = newNode <NEW_LINE> <DEDENT> self.__rear = newNode <NEW_LINE> self.__size += 1 <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> if self.isEmpty(): <NEW_LINE> <INDENT> print("Queue is empty. Abort operation!!") <NEW_LINE> return "" <NEW_LINE> <DEDENT> elif self.__size == 1: <NEW_LINE> <INDENT> oldItem = self.__front.data <NEW_LINE> self.__front = None <NEW_LINE> self.__rear = None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> oldItem = self.__front.data <NEW_LINE> self.__front = self.__front.next <NEW_LINE> <DEDENT> self.__size-=1 <NEW_LINE> return oldItem <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> if self.isEmpty(): <NEW_LINE> <INDENT> print("Queue is empty. Abort operation!!") <NEW_LINE> return "" <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.__front.data <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return(self.__size) <NEW_LINE> <DEDENT> def isEmpty(self): <NEW_LINE> <INDENT> return(self.__size == 0) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> result = '' <NEW_LINE> probe = self.__front <NEW_LINE> while probe != None: <NEW_LINE> <INDENT> result += str(probe.data)+"" <NEW_LINE> probe = probe.next <NEW_LINE> <DEDENT> return result
|
Link-based queue implementation.
|
6259901ca8ecb0332587201a
|
class FlavorError(ValueError): <NEW_LINE> <INDENT> pass
|
Unsupported or unavailable flavor or flavor conversion.
This exception is raised when an unsupported or unavailable flavor
is given to a dataset, or when a conversion of data between two
given flavors is not supported nor available.
|
6259901cbe8e80087fbbfe70
|
class ServerResponse(object): <NEW_LINE> <INDENT> def __init__(self, line): <NEW_LINE> <INDENT> self.code = None <NEW_LINE> self.message = [] <NEW_LINE> self.will_continue=False <NEW_LINE> line = line.split(' ') <NEW_LINE> if len(line) > 0: <NEW_LINE> <INDENT> self.code = line[0] <NEW_LINE> l = self.code.split("-") <NEW_LINE> if (len(l) > 1): <NEW_LINE> <INDENT> self.will_continue = True <NEW_LINE> self.code = l[0] <NEW_LINE> self.message.append("-".join(l[1:])) <NEW_LINE> <DEDENT> <DEDENT> if len(line) > 1: <NEW_LINE> <INDENT> self.message.extend(line[1:]) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "[{}] {} {}".format(self.will_continue, self.code, self.comment()) <NEW_LINE> <DEDENT> def comment(self): <NEW_LINE> <INDENT> return " ".join(self.message)
|
SMTP Server message.
It always begins with a code
(sometimes folowed by a dash(-),
which means server will send another message),
and possibly server's comment.
|
6259901c5e10d32532ce4004
|
class MetaBasicParser(type): <NEW_LINE> <INDENT> def __new__(metacls, name, bases, namespace): <NEW_LINE> <INDENT> global _MetaBasicParser <NEW_LINE> cls = type.__new__(metacls, name, bases, namespace) <NEW_LINE> if len(bases) > 1: <NEW_LINE> <INDENT> raise TypeError("%s must inherit from an unique parent," " use Grammar for aggregation" % name) <NEW_LINE> <DEDENT> if len(bases) == 1: <NEW_LINE> <INDENT> strbase = bases[0].__name__ <NEW_LINE> if strbase not in _MetaBasicParser: <NEW_LINE> <INDENT> raise TypeError("metaclass of %s not found" % bases[0].__name__) <NEW_LINE> <DEDENT> clsbase = _MetaBasicParser[strbase] <NEW_LINE> if hasattr(clsbase, '_rules'): <NEW_LINE> <INDENT> cls._rules = clsbase._rules.new_child() <NEW_LINE> <DEDENT> if hasattr(clsbase, '_hooks'): <NEW_LINE> <INDENT> cls._hooks = clsbase._hooks.new_child() <NEW_LINE> <DEDENT> <DEDENT> if '_rules' in namespace: <NEW_LINE> <INDENT> cls._rules.update(namespace['_rules']) <NEW_LINE> <DEDENT> if '_hooks' in namespace: <NEW_LINE> <INDENT> cls._hooks.update(namespace['_hooks']) <NEW_LINE> <DEDENT> _MetaBasicParser[name] = cls <NEW_LINE> return cls
|
Metaclass for all parser.
|
6259901c6fece00bbaccc7b4
|
@parser(Specs.swift_proxy_server_conf) <NEW_LINE> class SwiftProxyServerConf(IniConfigFile): <NEW_LINE> <INDENT> pass
|
This class is to parse the content of the ``/etc/swift/proxy-server.conf``.
The swift proxy - server configuration file
``/etc/swift/proxy-server.conf`` is in the standard 'ini' format and is
read by the :py:class:`insights.core.IniConfigFile` parser class.
Sample configuration file::
[DEFAULT]
bind_port = 8080
bind_ip = 172.20.15.20
workers = 0
[pipeline:main]
pipeline = catch_errors healthcheck proxy-logging cache ratelimit
[app:proxy-server]
use = egg:swift # proxy
set log_name = proxy-server
set log_facility = LOG_LOCAL1
[filter:catch_errors]
use = egg:swift # catch_errors
Examples:
>>> proxy_server_conf = shared[SwiftProxyServerConf]
>>> 'app:proxy-server' in proxy_server_conf
True
>>> proxy_server_conf.get('filter:catch_errors', 'use')
'egg:swift#catch_errors'
>>> proxy_server_conf.getint('DEFAULT', 'bind_port')
8080
|
6259901cbf627c535bcb22b1
|
class Paginator(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self.data = deepcopy(data) <NEW_LINE> self.data_text = 'This text will appear on the page.' <NEW_LINE> self.first_page_number = 1 <NEW_LINE> <DEDENT> def get_data(self, sort_info=None): <NEW_LINE> <INDENT> if not self.data: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> if sort_info is not None: <NEW_LINE> <INDENT> stripped_info = self._strip_sort_info(sort_info) <NEW_LINE> self._sort_data(stripped_info) <NEW_LINE> <DEDENT> return self.data <NEW_LINE> <DEDENT> def _strip_sort_info(self, sort_info): <NEW_LINE> <INDENT> text = r'Random text to show\n' <NEW_LINE> desc_direction = 'desc' <NEW_LINE> first_number = 145 <NEW_LINE> visited = set()
|
Calculates a results set, possibly sorted, and paginated.
|
6259901c287bf620b62729e9
|
class Clients(db.Model): <NEW_LINE> <INDENT> __clients__ = "clients" <NEW_LINE> id = db.Column( db.Integer, primary_key=True, autoincrement=True, comment="ID клиента" ) <NEW_LINE> name = db.Column(db.String, nullable=False, comment="Имя клиента") <NEW_LINE> is_vip = db.Column(db.Boolean, nullable=False, comment="Флаг VIP-клиента") <NEW_LINE> orders = relationship("Orders", cascade='all', backref="client") <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Clients, self).__init__(*args, **kwargs)
|
Таблица с клиентами.
|
6259901c8c3a8732951f7363
|
class Book(BaseModel): <NEW_LINE> <INDENT> id = models.CharField( max_length=30, primary_key=True, help_text=( "The primary identifier of this title, we get this value " "from publishers." ) ) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return u"Book %s" % self.id <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['id']
|
Main storage for a Book object.
|
6259901cbe8e80087fbbfe74
|
class SpeechProjectsLocationsLogDataStatsListRequest(_messages.Message): <NEW_LINE> <INDENT> parent = _messages.StringField(1, required=True)
|
A SpeechProjectsLocationsLogDataStatsListRequest object.
Fields:
parent: Required. Resource name of the parent. Has the format :-
"projects/{project_id}/locations/{location_id}"
|
6259901c925a0f43d25e8e43
|
class Process(object): <NEW_LINE> <INDENT> def __init__(self, name, exe, args, desc=None, proc_type=None, expand=True, bank_env=None, log_dir=None): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> for key, value in bank_env.items(): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> args = args.replace('${' + key + '}', value) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.name = name <NEW_LINE> self.exe = exe <NEW_LINE> self.desc = desc <NEW_LINE> if args is not None: <NEW_LINE> <INDENT> self.args = args.split() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.args = [] <NEW_LINE> <DEDENT> self.bank_env = bank_env <NEW_LINE> self.type = proc_type <NEW_LINE> self.expand = expand <NEW_LINE> self.log_dir = log_dir <NEW_LINE> if log_dir is not None: <NEW_LINE> <INDENT> self.output_file = os.path.join(log_dir, name + '.out') <NEW_LINE> self.error_file = os.path.join(log_dir, name + '.err') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output_file = name + '.out' <NEW_LINE> self.error_file = name + '.err' <NEW_LINE> <DEDENT> self.types = '' <NEW_LINE> self.format = '' <NEW_LINE> self.tags = '' <NEW_LINE> self.files = '' <NEW_LINE> self.exitcode = -1 <NEW_LINE> self.exec_time = 0 <NEW_LINE> self.proc_type = proc_type <NEW_LINE> self.trace_id = None <NEW_LINE> self.parent_id = None <NEW_LINE> <DEDENT> def set_trace(self, trace_id, parent_id): <NEW_LINE> <INDENT> self.trace_id = trace_id <NEW_LINE> self.parent_id = parent_id <NEW_LINE> <DEDENT> def run(self, simulate=False): <NEW_LINE> <INDENT> args = [self.exe] + self.args <NEW_LINE> logging.debug('PROCESS:EXEC:' + str(self.args)) <NEW_LINE> err = False <NEW_LINE> if not simulate: <NEW_LINE> <INDENT> logging.info('PROCESS:RUN:' + self.name) <NEW_LINE> with open(self.output_file, 'w') as fout: <NEW_LINE> <INDENT> with open(self.error_file, 'w') as ferr: <NEW_LINE> <INDENT> start_time = datetime.datetime.now() <NEW_LINE> start_time = time.mktime(start_time.timetuple()) <NEW_LINE> if self.expand: <NEW_LINE> <INDENT> args = " ".join(args) <NEW_LINE> proc = subprocess.Popen(args, stdout=fout, stderr=ferr, env=self.bank_env, shell=True) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> proc = subprocess.Popen(args, stdout=fout, stderr=ferr, env=self.bank_env, shell=False) <NEW_LINE> <DEDENT> proc.wait() <NEW_LINE> end_time = datetime.datetime.now() <NEW_LINE> end_time = time.mktime(end_time.timetuple()) <NEW_LINE> self.exec_time = end_time - start_time <NEW_LINE> self.exitcode = proc.returncode <NEW_LINE> if proc.returncode == 0: <NEW_LINE> <INDENT> err = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('PROCESS:ERROR:' + self.name) <NEW_LINE> <DEDENT> fout.flush() <NEW_LINE> ferr.flush() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> err = True <NEW_LINE> <DEDENT> logging.info('PROCESS:EXEC:' + self.name + ':' + str(err)) <NEW_LINE> return err
|
Define a process to execute
|
6259901c21a7993f00c66d7e
|
class Genre(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200, help_text='Enter a book genre(e.g. Science Fiction)') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.name
|
Model representing book genre.
|
6259901c6fece00bbaccc7b8
|
class Sunward(Vej): <NEW_LINE> <INDENT> def axis(self, init): <NEW_LINE> <INDENT> from .util import mhat <NEW_LINE> if np.iterable(init): <NEW_LINE> <INDENT> r = np.array(list((i.r for i in init))) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = init.r <NEW_LINE> <DEDENT> m, hat = mhat(r) <NEW_LINE> i = m == 0 <NEW_LINE> if np.any(i): <NEW_LINE> <INDENT> if r.ndim == 1: <NEW_LINE> <INDENT> hat = np.zeros(3) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> hat[i] = 0 <NEW_LINE> <DEDENT> <DEDENT> return -hat <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ("Sunward(body_basis={}, w={}, distribution='{}'," " theta_dist={}, phi_dist={})".format( np.array2string(self.body_basis, separator=','), self._w, self._distribution, self.theta_dist, self.phi_dist)) <NEW_LINE> <DEDENT> i = Vej.__doc__.find('Parameters') <NEW_LINE> __doc__ += Vej.__doc__[i:] <NEW_LINE> del i
|
Ejection velocity cone centered on the sunward vector.
|
6259901c9b70327d1c57fb83
|
class TestCountDatasets(unittest.TestCase): <NEW_LINE> <INDENT> def test(self): <NEW_LINE> <INDENT> with open("auth/nyc-open-data.json", "r") as f: <NEW_LINE> <INDENT> auth = json.load(f) <NEW_LINE> <DEDENT> result = pysocrata.count_resources(**auth) <NEW_LINE> assert {'dataset', 'href', 'file', 'map'}.issuperset(set(result.keys()))
|
Tests that counting datasets works as expected. This is a networked test against the New York City Open Data
Portal.
|
6259901c925a0f43d25e8e45
|
class Topic: <NEW_LINE> <INDENT> def __init__(self, name: str, words: List): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.words = words
|
A topic containing words
|
6259901c21a7993f00c66d80
|
class LagrangeInterpolation: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def lagrange_polynomial(i, x_points, x): <NEW_LINE> <INDENT> num, dem = 1, 1 <NEW_LINE> for j in range(len(x_points)): <NEW_LINE> <INDENT> if x_points[j] != i: <NEW_LINE> <INDENT> num *= x - x_points[j] <NEW_LINE> dem *= (i-x_points[j]) <NEW_LINE> <DEDENT> <DEDENT> return field.division(num, dem) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def reconstruct_secret(shares, x): <NEW_LINE> <INDENT> res = 0 <NEW_LINE> x_points, y_points = zip(*shares) <NEW_LINE> for i in range(len(x_points)): <NEW_LINE> <INDENT> poly = LagrangeInterpolation.lagrange_polynomial(x_points[i], x_points, x) <NEW_LINE> product = (poly * y_points[i]) % field.get_prime() <NEW_LINE> res += product <NEW_LINE> <DEDENT> return res % field.get_prime()
|
Class that brings statics methods for works that should be done without
taking creating the same objects as Encryption needs
|
6259901c507cdc57c63a5ba7
|
class PartName(Enum): <NEW_LINE> <INDENT> VALUES = ( "night", "morning", "day", "evening", )
|
Название времени суток. Возможные значения:
|
6259901cbf627c535bcb22b7
|
class LogCaptureTests(TestCase): <NEW_LINE> <INDENT> log = Logger() <NEW_LINE> def test_capture(self): <NEW_LINE> <INDENT> foo = object() <NEW_LINE> with capturedLogs() as captured: <NEW_LINE> <INDENT> self.log.debug("Capture this, please", foo=foo) <NEW_LINE> self.log.info("Capture this too, please", foo=foo) <NEW_LINE> <DEDENT> self.assertTrue(len(captured) == 2) <NEW_LINE> self.assertEqual(captured[0]["log_format"], "Capture this, please") <NEW_LINE> self.assertEqual(captured[0]["log_level"], LogLevel.debug) <NEW_LINE> self.assertEqual(captured[0]["foo"], foo) <NEW_LINE> self.assertEqual(captured[1]["log_format"], "Capture this too, please") <NEW_LINE> self.assertEqual(captured[1]["log_level"], LogLevel.info) <NEW_LINE> self.assertEqual(captured[1]["foo"], foo)
|
Tests for L{LogCaptureTests}.
|
6259901c925a0f43d25e8e47
|
class AreAllWellFormatted(object): <NEW_LINE> <INDENT> def match(self, actual): <NEW_LINE> <INDENT> for key, value in actual.iteritems(): <NEW_LINE> <INDENT> if key == 'content-length' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-timestamp' and not re.match("^\d+\.?\d*\Z", value): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-account-bytes-used' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-account-container-count' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-account-object-count' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-container-bytes-used' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-container-object-count' and not value.isdigit(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'content-type' and not value: <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'x-trans-id' and not re.match("^tx[0-9a-f]{21}-[0-9a-f]{10}.*", value): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'date' and not value: <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'accept-ranges' and not value == 'bytes': <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'etag' and not value.isalnum(): <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> elif key == 'transfer-encoding' and not value == 'chunked': <NEW_LINE> <INDENT> return InvalidFormat(key, value) <NEW_LINE> <DEDENT> <DEDENT> return None
|
Specific matcher to check the correctness of formats of values of Swift's
response headers
This matcher checks the format of values of response headers.
When checking the format of values of 'specific' headers such as
X-Account-Meta-* or X-Object-Manifest for example, those values must be
checked in each test code.
|
6259901cac7a0e7691f732ef
|
class PluginMixin(APIMixin): <NEW_LINE> <INDENT> pass
|
Used to bridge between a Core API and a Plugin, via Python's awesome support for multi-inheritance.
|
6259901c5166f23b2e2441d9
|
class HelloWorldTest(unittest.TestCase): <NEW_LINE> <INDENT> def test_hello_world(self): <NEW_LINE> <INDENT> self.assertEqual("Hello World!", solutions.hello_world.hello_world())
|
Example test suite
|
6259901ca8ecb03325872026
|
class Message(object): <NEW_LINE> <INDENT> def __init__( self, address_from: Address, address_to: Address ): <NEW_LINE> <INDENT> super(Message, self).__init__() <NEW_LINE> self._address_from = address_from <NEW_LINE> self._address_to = address_to <NEW_LINE> <DEDENT> @property <NEW_LINE> def address_from(self) -> Address: <NEW_LINE> <INDENT> return self._address_from <NEW_LINE> <DEDENT> @property <NEW_LINE> def address_to(self) -> Address: <NEW_LINE> <INDENT> return self._address_to <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'from {a_from} to {a_to}'.format(a_from=self.address_from, a_to=self.address_to)
|
Сообщение, передающееся по сети.
|
6259901c507cdc57c63a5bab
|
class OryxTransientZip(object): <NEW_LINE> <INDENT> def __init__(self, zfilename): <NEW_LINE> <INDENT> self.zipcreated = '' <NEW_LINE> if os.path.exists(zfilename): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> elif os.path.exists(zfilename[:-4]): <NEW_LINE> <INDENT> zfile = ZipFile(zfilename, 'w') <NEW_LINE> for filename in glob.glob(zfilename[:-4] + '/*'): <NEW_LINE> <INDENT> zfile.write(filename, os.path.basename(filename)) <NEW_LINE> <DEDENT> zfile.close() <NEW_LINE> self.zipcreated = zfilename <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.zipcreated = 'No transient data' <NEW_LINE> raise IOError("NoTransientFiles") <NEW_LINE> <DEDENT> self.zfile = ZipFile(zfilename) <NEW_LINE> <DEDENT> def __del__(self): <NEW_LINE> <INDENT> if self.zipcreated != 'No transient data': <NEW_LINE> <INDENT> self.zfile.close() <NEW_LINE> if self.zipcreated != '': <NEW_LINE> <INDENT> os.remove(self.zipcreated) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def data_from_transient_file(self, filename): <NEW_LINE> <INDENT> full_file = self.zfile.read(filename).decode() <NEW_LINE> data_string = '\n'.join(full_file.split('\r\n')[13:-2]) <NEW_LINE> return np.loadtxt(string2file(data_string), delimiter=',').T <NEW_LINE> <DEDENT> @property <NEW_LINE> def list_transient_file(self): <NEW_LINE> <INDENT> file_list = self.zfile.namelist() <NEW_LINE> return file_list <NEW_LINE> <DEDENT> @property <NEW_LINE> def supply_voltage_list(self): <NEW_LINE> <INDENT> voltages_dict = {'TlpCurr': [], 'TlpVolt': [], 'TlpVMonCh3': [], 'TlpVMonCh4': [], 'TlpVoltCh3': [], 'TlpVoltCh4': []} <NEW_LINE> for filename in self.zfile.namelist(): <NEW_LINE> <INDENT> if filename[-4:] == ".wfm": <NEW_LINE> <INDENT> elems = filename[:-5].split('_') <NEW_LINE> voltages_dict[elems[3]].append(elems[4]) <NEW_LINE> <DEDENT> <DEDENT> voltages_dict['TlpCurr'].sort(key=float) <NEW_LINE> voltages_dict['TlpVolt'].sort(key=float) <NEW_LINE> if voltages_dict['TlpCurr'] != voltages_dict['TlpVolt']: <NEW_LINE> <INDENT> log = logging.getLogger('thunderstorm.thunder.importers') <NEW_LINE> log.warn("Current and Voltage waveform mismatch") <NEW_LINE> <DEDENT> return voltages_dict['TlpCurr'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def filecontents(self): <NEW_LINE> <INDENT> volt_list = self.supply_voltage_list <NEW_LINE> basename = '_'.join(self.zfile.namelist()[0].split('_')[0:3]) <NEW_LINE> return (basename, volt_list)
|
Utils to extract data from oryx zip files
Waveforms can be stored in either a zip file with the same name as
the .tsr file, or it can be stored in a subfolder with the same name
as the .tsr file.
Note: Oryx deprecated saving as zip
|
6259901cd18da76e235b7852
|
class PacketPokerPlayerArrive(PacketPokerPlayerInfo): <NEW_LINE> <INDENT> info = PacketPokerPlayerInfo.info + ( ('blind', 'late', 'bs'), ('remove_next_turn', False, 'bool'), ('sit_out', True, 'bool'), ('sit_out_next_turn', False, 'bool'), ('auto', False, 'bool'), ('auto_blind_ante', False, 'bool'), ('wait_for', False, 'bool'), ('buy_in_payed', False, 'bool'), ('seat', None, 'Bnone'), )
|
Semantics: the player "serial" is seated at the game "game_id".
Descriptive information for the player such as "name" and "outfit"
is provided.
Direction: server => client
Context: this packet is the server answer to successfull
:class:`PACKET_POKER_SEAT <pokerpackets.networkpackets.PacketPokerSeat>` request. The actual seat allocated to the player
will be specified in the next :class:`PACKET_POKER_SEATS <pokerpackets.networkpackets.PacketPokerSeats>` packet.
name: login name of the player.
outfit: name of the player outfit, usually referring to the organization he belongs
serial: integer uniquely identifying a player.
game_id: integer uniquely identifying a game.
|
6259901c91af0d3eaad3ac2c
|
class ValidationError(Exception): <NEW_LINE> <INDENT> pass
|
Something is generally not valid
|
6259901cac7a0e7691f732f3
|
class ImageSource(metaclass=abc.ABCMeta): <NEW_LINE> <INDENT> def __enter__(self): <NEW_LINE> <INDENT> self.begin() <NEW_LINE> <DEDENT> def __exit__(self, exc_type, exc_val, exc_tb): <NEW_LINE> <INDENT> self.shutdown() <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def sequence_type(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def supports_random_access(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_depth_available(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_per_pixel_labels_available(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_labels_available(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_normals_available(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_stereo_available(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> @abc.abstractmethod <NEW_LINE> def is_stored_in_database(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_camera_intrinsics(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def get_stereo_baseline(self): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def begin(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get(self, index): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def get_next_image(self): <NEW_LINE> <INDENT> return None, None <NEW_LINE> <DEDENT> @abc.abstractmethod <NEW_LINE> def is_complete(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def shutdown(self): <NEW_LINE> <INDENT> pass
|
An abstract class representing a place to get images from.
This generalizes datasets from previous versions,
and simulators, the big addition of this iteration.
The new usage structure for image sources is the with statement, i.e.:
```
with image_source:
while not image_source.is_complete():
image, timestamp = image_source.get_next_image()
```
This allows us to cleanly startup and shutdown the image source (for simulators).
TODO: We need more ways to interrogate the image source for information about it.
|
6259901c0a366e3fb87dd802
|
class ObjectExtractionApplet(StandardApplet): <NEW_LINE> <INDENT> def __init__(self, name="Object Extraction", workflow=None, projectFileGroupName="ObjectExtraction", interactive=True): <NEW_LINE> <INDENT> super(ObjectExtractionApplet, self).__init__(name=name, workflow=workflow) <NEW_LINE> self._serializableItems = [ ObjectExtractionSerializer(self.topLevelOperator, projectFileGroupName) ] <NEW_LINE> self._interactive = interactive <NEW_LINE> <DEDENT> @property <NEW_LINE> def singleLaneOperatorClass(self): <NEW_LINE> <INDENT> return OpObjectExtraction <NEW_LINE> <DEDENT> @property <NEW_LINE> def broadcastingSlots(self): <NEW_LINE> <INDENT> return ['Features'] <NEW_LINE> <DEDENT> @property <NEW_LINE> def singleLaneGuiClass(self): <NEW_LINE> <INDENT> from ilastik.applets.objectExtraction.objectExtractionGui import ObjectExtractionGui <NEW_LINE> from ilastik.applets.objectExtraction.objectExtractionGui import ObjectExtractionGuiNonInteractive <NEW_LINE> if self._interactive: <NEW_LINE> <INDENT> return ObjectExtractionGui <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ObjectExtractionGuiNonInteractive <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def dataSerializers(self): <NEW_LINE> <INDENT> return self._serializableItems
|
Calculates object features for each object in an image.
Features are provided by plugins, which are responsible for
performing the actual computation.
|
6259901c796e427e5384f58b
|
class ShapeDetector: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def detect(self, c): <NEW_LINE> <INDENT> shape = 'unidentified' <NEW_LINE> peri = cv2.arcLength(c, True) <NEW_LINE> approx = cv2.approxPolyDP(c, 0.03 * peri, True) <NEW_LINE> if len(approx) <= 6: <NEW_LINE> <INDENT> shape = 'shell' <NEW_LINE> <DEDENT> elif len(approx) >= 10: <NEW_LINE> <INDENT> shape = 'star' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> shape = 'circle' <NEW_LINE> <DEDENT> return shape
|
Takes a shape/contourgroup and returns the nearest shape based on rules set in this class.
|
6259901cbe8e80087fbbfe81
|
class EntryDetailView(FormMixin, DetailView): <NEW_LINE> <INDENT> model = Entry <NEW_LINE> template_name = 'entradas/ver.html' <NEW_LINE> form_class = ComentaryForm <NEW_LINE> success_url = '.' <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> qs = super(EntryDetailView, self).get_queryset().filter(anulate=False) <NEW_LINE> return qs <NEW_LINE> <DEDENT> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super(EntryDetailView, self).get_context_data(**kwargs) <NEW_LINE> entrada = self.get_object() <NEW_LINE> entrada.vists = entrada.vists + 1 <NEW_LINE> entrada.save() <NEW_LINE> comentarios = [] <NEW_LINE> if entrada.theme.tipo == '0': <NEW_LINE> <INDENT> comentarios = Comentary.objects.filter(entry__pk=entrada.pk)[:20] <NEW_LINE> <DEDENT> context['comentarios'] = comentarios <NEW_LINE> return context <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.object = self.get_object() <NEW_LINE> form = self.get_form() <NEW_LINE> if form.is_valid(): <NEW_LINE> <INDENT> return self.form_valid(form) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.form_invalid(form) <NEW_LINE> <DEDENT> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> entrada = self.get_object() <NEW_LINE> usuario = self.request.user <NEW_LINE> comentario = form.cleaned_data['comentario'] <NEW_LINE> Comentary( user=usuario, entry=entrada, content=comentario, calification=1, ).save() <NEW_LINE> return HttpResponseRedirect(self.get_success_url())
|
vista para ver una entrada
|
6259901c287bf620b62729f7
|
class ParseError(EntropyException): <NEW_LINE> <INDENT> pass
|
Parse error.
|
6259901c8c3a8732951f7371
|
class ConfigSection(Config): <NEW_LINE> <INDENT> def __init__(self, key=_ANONYMOUS, members=None, **kwargs): <NEW_LINE> <INDENT> super(ConfigSection, self).__init__(key, default={}, **kwargs) <NEW_LINE> self.members = members or {} <NEW_LINE> for member in members.values(): <NEW_LINE> <INDENT> assert member.key is not _ANONYMOUS <NEW_LINE> <DEDENT> <DEDENT> def update_members(self, new_members, overwrite=True): <NEW_LINE> <INDENT> for member in new_members.values(): <NEW_LINE> <INDENT> assert member.key is not _ANONYMOUS <NEW_LINE> <DEDENT> if not overwrite: <NEW_LINE> <INDENT> new_members = new_members.copy() <NEW_LINE> for k in self.members.keys(): <NEW_LINE> <INDENT> if k in new_members: <NEW_LINE> <INDENT> del new_members[k] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.members.update(new_members) <NEW_LINE> <DEDENT> def bind(self, config, prefix): <NEW_LINE> <INDENT> return BoundContainerWithGetAttr(self, bind_to=config, grab_key=self.key, prefix=prefix) <NEW_LINE> <DEDENT> def _coerce_type(self, raw, prefix=''): <NEW_LINE> <INDENT> return dict([(key, self.get_member(raw, key, prefix)) for key in self.members.keys()]) <NEW_LINE> <DEDENT> def get_member(self, data, attr, prefix): <NEW_LINE> <INDENT> if self.key is not _ANONYMOUS: <NEW_LINE> <INDENT> prefix += prefix and '.' + self.key or self.key <NEW_LINE> <DEDENT> return self.members[attr].bind(data, prefix) <NEW_LINE> <DEDENT> def print_help(self, out=sys.stdout, indent=0, skip_header=False): <NEW_LINE> <INDENT> if self.private: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> if not skip_header: <NEW_LINE> <INDENT> print((" " * indent) + "[%s]" % self.get_presentable_key(), file=out) <NEW_LINE> print(self.get_presentable_help_text(indent=indent), file=out) <NEW_LINE> print(file=out) <NEW_LINE> new_indent = indent + 2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> new_indent = indent <NEW_LINE> <DEDENT> for programmer_key, config in sorted(iter(self.members.items()), key=lambda x: x[1].key): <NEW_LINE> <INDENT> config.print_help(out=out, indent=new_indent)
|
A section of configuration variables whose names are known
a priori. For example, this can be used to group configuration
for a cluster.
|
6259901c91af0d3eaad3ac32
|
class use_cut: <NEW_LINE> <INDENT> force = _internal._constants.CPX_USECUT_FORCE <NEW_LINE> purge = _internal._constants.CPX_USECUT_PURGE <NEW_LINE> filter = _internal._constants.CPX_USECUT_FILTER
|
Constants to specify when to use the added cut.
|
6259901cbf627c535bcb22c1
|
class Member(models.Model): <NEW_LINE> <INDENT> name = models.CharField(max_length=200) <NEW_LINE> catagory = models.ForeignKey(Catagory) <NEW_LINE> gender = models.CharField(choices=GENDER_CHOICES, max_length=50) <NEW_LINE> description = models.TextField() <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name
|
This models stores information about member and to which
|
6259901cac7a0e7691f732f9
|
class DiceLoss(nn.Module): <NEW_LINE> <INDENT> def __init__(self, background=False): <NEW_LINE> <INDENT> super(DiceLoss, self).__init__() <NEW_LINE> self.SMOOTH = 0.0001 <NEW_LINE> self.background = background <NEW_LINE> <DEDENT> def forward (self, pred, gt): <NEW_LINE> <INDENT> nclasses = gt.size()[1] <NEW_LINE> if (self.background): <NEW_LINE> <INDENT> prediction = pred.contiguous() <NEW_LINE> groundtruth = gt.contiguous() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prediction = pred[:,1:,...].contiguous() <NEW_LINE> groundtruth = gt[:,1:,...].contiguous() <NEW_LINE> <DEDENT> iflat = prediction.view(-1) <NEW_LINE> tflat = groundtruth.view(-1) <NEW_LINE> intersection = (iflat * tflat).sum() <NEW_LINE> union = iflat.sum() + tflat.sum() <NEW_LINE> dsc = ((2. * intersection + self.SMOOTH) / (union + self.SMOOTH)) <NEW_LINE> loss_dsc = 1. - dsc <NEW_LINE> return loss_dsc
|
Dice Loss (Ignore background - channel 0)
Arguments:
@param prediction: tensor with predictions classes
@param groundtruth: tensor with ground truth mask
|
6259901c287bf620b62729f9
|
class IGuestProcess(IProcess): <NEW_LINE> <INDENT> __uuid__ = 'dfa39a36-5d43-4840-a025-67ea956b3111' <NEW_LINE> __wsmap__ = 'managed'
|
Implementation of the :py:class:`IProcess` object
for processes on the guest.
|
6259901c30c21e258be99624
|
class Value(): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.possible_values = [] <NEW_LINE> self.value = int(value) <NEW_LINE> <DEDENT> def remove_value(self, value): <NEW_LINE> <INDENT> if int(value) in self.possible_values: <NEW_LINE> <INDENT> self.possible_values.remove(int(value)) <NEW_LINE> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def set_value(self, value=-1): <NEW_LINE> <INDENT> if value == -1: <NEW_LINE> <INDENT> self.value = self.possible_values[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.value = int(value) <NEW_LINE> self.possible_values = [self.value] <NEW_LINE> <DEDENT> <DEDENT> def setup(self, row, column): <NEW_LINE> <INDENT> self.row = row <NEW_LINE> self.column = column <NEW_LINE> self.square = compute_square(row, column) <NEW_LINE> if self.value > 0: <NEW_LINE> <INDENT> self.possible_values = [self.value] <NEW_LINE> return True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.possible_values = numbers[:] <NEW_LINE> return False
|
A Value in a square
Stores its value, possible values location,
and whether it has been propagted or not
|
6259901c91af0d3eaad3ac35
|
class CreateHeartbeatPayloadOwnerTeam(object): <NEW_LINE> <INDENT> swagger_types = { 'name': 'str', 'id': 'str' } <NEW_LINE> attribute_map = { 'name': 'name', 'id': 'id' } <NEW_LINE> def __init__(self, name=None, id=None): <NEW_LINE> <INDENT> self._name = None <NEW_LINE> self._id = None <NEW_LINE> self.discriminator = None <NEW_LINE> if name is not None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> if id is not None: <NEW_LINE> <INDENT> self.id = id <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @name.setter <NEW_LINE> def name(self, name): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @id.setter <NEW_LINE> def id(self, id): <NEW_LINE> <INDENT> self._id = id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> if issubclass(CreateHeartbeatPayloadOwnerTeam, dict): <NEW_LINE> <INDENT> for key, value in self.items(): <NEW_LINE> <INDENT> result[key] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CreateHeartbeatPayloadOwnerTeam): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
|
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
|
6259901c21a7993f00c66d90
|
class UpdateHandler(InboundMailHandler): <NEW_LINE> <INDENT> with open('config.yaml', 'r') as f: <NEW_LINE> <INDENT> doc = yaml.load(f); <NEW_LINE> <DEDENT> appname = doc["appname"] <NEW_LINE> @classmethod <NEW_LINE> def get_update(cls, body): <NEW_LINE> <INDENT> def _cleaner(s, break_list): <NEW_LINE> <INDENT> clean_text = s <NEW_LINE> for b in break_list: <NEW_LINE> <INDENT> clean_text = clean_text.split(b)[0] <NEW_LINE> <DEDENT> return clean_text.strip() <NEW_LINE> <DEDENT> dt = datetime.now() <NEW_LINE> breaks = [ '[DONE]', '[done]', '[Done]', '-----Original Message-----', '________________________________________', 'From: %s' % cls.appname, 'Sent from my iPhone', 'On Mon, {0:%b} {0.day}, {0:%Y} at 10:00 AM, '.format(dt), 'On Mon, {0:%b} {0.day}, {0:%Y}, at 10:00 AM, '.format(dt), 'On {0:%b} {0.day}, {0:%Y} 10:00 AM'.format(dt), 'On {0:%b} {0.day}, {0:%Y}, at 10:00 AM'.format(dt), 'Just reply with a few brief bullets starting with' ] <NEW_LINE> good_msg = _cleaner(body, breaks) <NEW_LINE> updates = good_msg.split('*') <NEW_LINE> cleaned = [x.strip('[\n ]') for x in updates] <NEW_LINE> filtered = filter(lambda x: (x != '' and x != '*'), cleaned) <NEW_LINE> message = '* ' + '\n* '.join(filtered) <NEW_LINE> return message <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_urlsafe(cls, address): <NEW_LINE> <INDENT> if address.find('<') > -1: <NEW_LINE> <INDENT> urlsafe = address.split('<')[1].split('+')[1].split('@')[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> urlsafe = address.split('+')[1].split('@')[0] <NEW_LINE> <DEDENT> return urlsafe <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def process_update(cls, address, body): <NEW_LINE> <INDENT> urlsafe = cls.get_urlsafe(address) <NEW_LINE> if not urlsafe: <NEW_LINE> <INDENT> logging.error('Unable to extract urlsafe from %s' % address) <NEW_LINE> return <NEW_LINE> <DEDENT> subscriber_update = ndb.Key(urlsafe=urlsafe).get() <NEW_LINE> subscriber_update.message = cls.get_update(body) <NEW_LINE> subscriber_update.put() <NEW_LINE> return subscriber_update <NEW_LINE> <DEDENT> def receive(self, message): <NEW_LINE> <INDENT> body = [b.decode() for t, b in message.bodies('text/plain')][0] <NEW_LINE> self.process_update(message.to, body)
|
Handler for incoming update emails from subscribers.
|
6259901c56b00c62f0fb36d2
|
class SocketEndpoint(Model): <NEW_LINE> <INDENT> name = fields.StringField(max_length=64, primary_key=True) <NEW_LINE> allowed_methods = fields.JSONField() <NEW_LINE> links = fields.LinksField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> parent = CustomSocket <NEW_LINE> endpoints = { 'detail': { 'methods': ['get'], 'path': '/endpoints/{name}/' }, 'list': { 'methods': ['get'], 'path': '/endpoints/' } } <NEW_LINE> <DEDENT> def run(self, method='GET', data=None): <NEW_LINE> <INDENT> endpoint_path = self.links.self <NEW_LINE> connection = self._get_connection() <NEW_LINE> if not self._validate_method(method): <NEW_LINE> <INDENT> raise SyncanoValueError('Method: {} not specified in calls for this custom socket.'.format(method)) <NEW_LINE> <DEDENT> method = method.lower() <NEW_LINE> if method in ['get', 'delete']: <NEW_LINE> <INDENT> response = connection.request(method, endpoint_path) <NEW_LINE> <DEDENT> elif method in ['post', 'put', 'patch']: <NEW_LINE> <INDENT> response = connection.request(method, endpoint_path, data=data or {}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SyncanoValueError('Method: {} not supported.'.format(method)) <NEW_LINE> <DEDENT> return response <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_all_endpoints(cls, instance_name=None): <NEW_LINE> <INDENT> connection = cls._meta.connection <NEW_LINE> all_endpoints_path = Instance._meta.resolve_endpoint( 'endpoints', {'name': cls.please.properties.get('instance_name') or instance_name} ) <NEW_LINE> response = connection.request('GET', all_endpoints_path) <NEW_LINE> return [cls(**endpoint) for endpoint in response['objects']] <NEW_LINE> <DEDENT> def _validate_method(self, method): <NEW_LINE> <INDENT> if '*' in self.allowed_methods or method in self.allowed_methods: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False
|
OO wrapper around endpoints defined in CustomSocket instance.
Look at the custom socket documentation for more details.
:ivar name: :class:`~syncano.models.fields.StringField`
:ivar calls: :class:`~syncano.models.fields.JSONField`
:ivar links: :class:`~syncano.models.fields.LinksField`
|
6259901c30c21e258be99627
|
class AvatarHash(Row): <NEW_LINE> <INDENT> _TABLE_ = 'avatar_hash' <NEW_LINE> _PRIMARY_KEYS_ = ['hash_id'] <NEW_LINE> _COLUMNS_ = [ 'hash_id', 'hashalgo', 'hashdata' ] <NEW_LINE> @property <NEW_LINE> def hashstr(self): <NEW_LINE> <INDENT> return base64.a85encode(self.hashdata) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<%s #%d %s %s>' % ( self.__class__.__name__, self.hash_id, self.hashalgo, self.hashstr)
|
A hash of users' avatars and their scores.
|
6259901dac7a0e7691f732ff
|
class TestCreate(BaseGroup): <NEW_LINE> <INDENT> def test_create(self): <NEW_LINE> <INDENT> grp = self.f.create_group('foo') <NEW_LINE> self.assertIsInstance(grp, Group) <NEW_LINE> <DEDENT> def test_create_intermediate(self): <NEW_LINE> <INDENT> grp = self.f.create_group('foo/bar/baz') <NEW_LINE> self.assertEqual(grp.name, '/foo/bar/baz') <NEW_LINE> <DEDENT> def test_create_exception(self): <NEW_LINE> <INDENT> self.f.create_group('foo') <NEW_LINE> with self.assertRaises(ValueError): <NEW_LINE> <INDENT> self.f.create_group('foo') <NEW_LINE> <DEDENT> <DEDENT> def test_unicode(self): <NEW_LINE> <INDENT> name = u"/Name\u4500" <NEW_LINE> group = self.f.create_group(name) <NEW_LINE> self.assertEqual(group.name, name) <NEW_LINE> self.assertEqual(group.id.links.get_info(name.encode('utf8')).cset, h5t.CSET_UTF8)
|
Feature: New groups can be created via .create_group method
|
6259901d56b00c62f0fb36d4
|
class DescribeMaliciousRequestsResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TotalCount = None <NEW_LINE> self.MaliciousRequests = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> if params.get("MaliciousRequests") is not None: <NEW_LINE> <INDENT> self.MaliciousRequests = [] <NEW_LINE> for item in params.get("MaliciousRequests"): <NEW_LINE> <INDENT> obj = MaliciousRequest() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.MaliciousRequests.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.RequestId = params.get("RequestId")
|
DescribeMaliciousRequests返回参数结构体
|
6259901dd164cc6175821d96
|
class ServerAction(models.Model): <NEW_LINE> <INDENT> pass
|
TODO: Track server actions like timing out players after a given length of inactivity
|
6259901d5e10d32532ce4011
|
class RecipeRetrieveSerializer(BaseRecipeSerializer): <NEW_LINE> <INDENT> categories = CategorySerializer(many=True, read_only=True) <NEW_LINE> tags = TagSerializer(many=True, read_only=True) <NEW_LINE> class Meta(BaseRecipeSerializer.Meta): <NEW_LINE> <INDENT> fields = BaseRecipeSerializer.Meta.fields + ( 'comments_count', 'main_picture_thumbs', 'secondary_picture_thumbs', )
|
This serializer is used to retrieve Recipe instance.
|
6259901d91af0d3eaad3ac3b
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.