code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class TestInferPrefsByRatioDiffDepth(TestInferPrefsByRatio): <NEW_LINE> <INDENT> Nr_RANGE = (300000, 400000) <NEW_LINE> ATOL = 1e-3 <NEW_LINE> RTOL = 1e-3
Tests `dms_tools2.inferPrefsByRatio` with different depths. There is pseudocount scaling in this case. We handle this by calculating expected results without scaling and just relaxing tolerance while keeping depth close among samples.
62598fb3be383301e0253888
class BackwardsCompatibleMeta(type): <NEW_LINE> <INDENT> def __new__(mcs, clsname, bases, dct): <NEW_LINE> <INDENT> bcsv = dct.pop("_bcsv") if "_bcsv" in dct else {} <NEW_LINE> bcsm = dct.pop("_bcsm") if "_bcsm" in dct else {} <NEW_LINE> bcim = dct.pop("_bcim") if "_bcim" in dct else {} <NEW_LINE> bca = set(bcsv) | set(bcsm) | set(bcim) <NEW_LINE> dct["_bc"] = {"sv": bcsv, "sm": bcsm, "im": bcim, "a": bca} <NEW_LINE> return super(BackwardsCompatibleMeta, mcs).__new__(mcs, clsname, bases, dct) <NEW_LINE> <DEDENT> def __getattribute__(cls, name): <NEW_LINE> <INDENT> bc = type.__getattribute__(cls, "_bc") <NEW_LINE> if name in bc["a"]: <NEW_LINE> <INDENT> if name in bc["sv"]: <NEW_LINE> <INDENT> return bc["sv"][name] <NEW_LINE> <DEDENT> if name in bc["sm"]: <NEW_LINE> <INDENT> return bc["sm"][name] <NEW_LINE> <DEDENT> <DEDENT> return type.__getattribute__(cls, name) <NEW_LINE> <DEDENT> def __setattr__(cls, name, value): <NEW_LINE> <INDENT> bc = cls.__dict__["_bc"] <NEW_LINE> if name in bc["sv"]: <NEW_LINE> <INDENT> bc["sv"][name] = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls.__dict__[name] = value
Use this meta class if you have any static methods that need to be removed from the class, but at the same time need to still "work" to ensure backward compatibility. Declare `_bcsv` dictionary with variables that should be static and deprecated. Example: class A(backwards_compatible()): _bcsv = {"foo": 100, "nay": -1} _bcsm = {"getfoo": lambda: A.foo} def __init__(self): super(A, self).__init__()
62598fb3379a373c97d990a3
class RemindersApiListView(HeliumAPIView, CreateModelMixin, ListModelMixin): <NEW_LINE> <INDENT> serializer_class = ReminderSerializer <NEW_LINE> permission_classes = (IsAuthenticated,) <NEW_LINE> filter_class = ReminderFilter <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> if hasattr(self.request, 'user'): <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> return user.reminders.all() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return Reminder.objects.none() <NEW_LINE> <DEDENT> <DEDENT> def get(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.serializer_class = ReminderExtendedSerializer <NEW_LINE> response = self.list(request, *args, **kwargs) <NEW_LINE> return response <NEW_LINE> <DEDENT> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(user=self.request.user) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> if 'event' in request.data: <NEW_LINE> <INDENT> permissions.check_event_permission(request.user.pk, request.data['event']) <NEW_LINE> <DEDENT> if 'homework' in request.data: <NEW_LINE> <INDENT> permissions.check_homework_permission(request.user.pk, request.data['homework']) <NEW_LINE> <DEDENT> response = self.create(request, *args, **kwargs) <NEW_LINE> logger.info(f"Reminder {response.data['id']} created for user {request.user.get_username()}") <NEW_LINE> return response
get: Return a list of all reminder instances for the authenticated user. For convenience, reminder instances on a GET are serialized to a depth of two to avoid the need for redundant API calls. post: Create a new reminder instance for the authenticated user. For more details pertaining to choice field values, [see here](https://github.com/HeliumEdu/platform/wiki#choices).
62598fb3fff4ab517ebcd874
class Mul(Instruction): <NEW_LINE> <INDENT> pass <NEW_LINE> def __init__(self, *operands, machine): <NEW_LINE> <INDENT> super().__init__('mul', *operands, machine=machine) <NEW_LINE> <DEDENT> def exec(self): <NEW_LINE> <INDENT> logexec(self) <NEW_LINE> self.pc += 1 <NEW_LINE> self.machine.registers[self.operands[0]] *= self.rm1
`mul X Y` sets register X to the result of multiplying the value contained in register X by the value of Y.
62598fb34e4d5625663724b5
class TestPack(PackTests): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> require_git_version((1, 5, 0)) <NEW_LINE> super(TestPack, self).setUp() <NEW_LINE> self._tempdir = tempfile.mkdtemp() <NEW_LINE> self.addCleanup(shutil.rmtree, self._tempdir) <NEW_LINE> <DEDENT> def test_copy(self): <NEW_LINE> <INDENT> with self.get_pack(pack1_sha) as origpack: <NEW_LINE> <INDENT> self.assertSucceeds(origpack.index.check) <NEW_LINE> pack_path = os.path.join(self._tempdir, "Elch") <NEW_LINE> write_pack(pack_path, origpack.pack_tuples()) <NEW_LINE> output = run_git_or_fail(['verify-pack', '-v', pack_path]) <NEW_LINE> pack_shas = set() <NEW_LINE> for line in output.splitlines(): <NEW_LINE> <INDENT> sha = line[:40] <NEW_LINE> try: <NEW_LINE> <INDENT> binascii.unhexlify(sha) <NEW_LINE> pack_shas.add(Sha1Sum(sha)) <NEW_LINE> <DEDENT> except (ObjectFormatException, TypeError, binascii.Error): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> orig_shas = set(o.id for o in origpack.iterobjects()) <NEW_LINE> self.assertEqual(orig_shas, pack_shas)
Compatibility tests for reading and writing pack files.
62598fb3d7e4931a7ef3c122
class FusedRingsTestSystem(SmallMoleculeLibraryTestSystem): <NEW_LINE> <INDENT> def __init__(self, **kwargs): <NEW_LINE> <INDENT> self.molecules = ['c1ccccc1', 'c1ccc2ccccc2c1'] <NEW_LINE> super(FusedRingsTestSystem, self).__init__(**kwargs)
Simple test system containing fused rings (benzene <--> naphtalene) in explicit solvent.
62598fb37d847024c075c451
class ShellExecuteInfoW(Structure): <NEW_LINE> <INDENT> _fields_ = [ ("cbSize", DWORD), ("fMask", ULONG), ("hwnd", HWND), ("lpVerb", LPWSTR), ("lpFile", LPWSTR), ("lpParameters", LPWSTR), ("lpDirectory", LPWSTR), ("nShow", INT), ("hInstApp", HINSTANCE), ("lpIDList", LPVOID), ("lpClass", LPWSTR), ("hKeyClass", HKEY), ("dwHotKey", DWORD), ("hIcon", HANDLE), ("hProcess", HANDLE) ]
https://docs.microsoft.com/en-us/windows/win32/api/shellapi/ns-shellapi-shellexecuteinfow
62598fb37c178a314d78d52b
class Screen(Gtk.DrawingArea): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Screen,self).__init__() <NEW_LINE> self.connect("draw", self.on_draw) <NEW_LINE> GObject.timeout_add(50, self.tick) <NEW_LINE> <DEDENT> def tick(self): <NEW_LINE> <INDENT> rect = self.get_allocation() <NEW_LINE> self.get_window().invalidate_rect(rect, True) <NEW_LINE> return True <NEW_LINE> <DEDENT> def on_draw(self, widget, event): <NEW_LINE> <INDENT> self.cr = self.get_window().cairo_create() <NEW_LINE> geom = self.get_window().get_geometry() <NEW_LINE> self.draw(geom.width, geom.height)
This class is a Drawing Area
62598fb399cbb53fe6830f62
class Application(Frame): <NEW_LINE> <INDENT> def __init__(self, master=None): <NEW_LINE> <INDENT> Frame.__init__(self, master) <NEW_LINE> self.pack() <NEW_LINE> self.createWidgets() <NEW_LINE> <DEDENT> def createWidgets(self): <NEW_LINE> <INDENT> top_frame = Frame(self) <NEW_LINE> self.text_in = Entry(top_frame) <NEW_LINE> self.label = Label(top_frame, text="Number") <NEW_LINE> self.text_in.pack() <NEW_LINE> self.label.pack() <NEW_LINE> top_frame.pack(side=LEFT) <NEW_LINE> top_frame = Frame(self) <NEW_LINE> self.text_in = Entry(top_frame) <NEW_LINE> self.labela = Label(top_frame, text="Number") <NEW_LINE> self.text_in.pack() <NEW_LINE> self.labela.pack() <NEW_LINE> top_frame.pack(side=RIGHT) <NEW_LINE> bottom_frame = Frame(self) <NEW_LINE> bottom_frame.pack(side=BOTTOM) <NEW_LINE> self.labelb = Label(bottom_frame, text="***ERROR***", command=self.multiply) <NEW_LINE> self.labelb.pack(side=BOTTOM) <NEW_LINE> self.QUIT = Button(bottom_frame, text="Quit", command=self.quit) <NEW_LINE> self.QUIT.pack(side=LEFT) <NEW_LINE> self.handleb = Button(bottom_frame, text="Convert", command=self.multiply) <NEW_LINE> self.handleb.pack(side=LEFT) <NEW_LINE> <DEDENT> def multiply(self): <NEW_LINE> <INDENT> text = self.text_in.get() <NEW_LINE> operation = self.r.get() <NEW_LINE> if operation == 1: <NEW_LINE> <INDENT> output = text.upper() <NEW_LINE> <DEDENT> elif operation == 2: <NEW_LINE> <INDENT> output = text.lower() <NEW_LINE> <DEDENT> elif operation == 3: <NEW_LINE> <INDENT> output = text.title() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> output = "***ERROR***" <NEW_LINE> <DEDENT> self.label.config(text=output)
Application main window class.
62598fb3bd1bec0571e15109
class Message(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> movie_id = db.Column(db.Integer) <NEW_LINE> talker = db.Column(db.String(20)) <NEW_LINE> message = db.Column(db.String(200))
留言板
62598fb3e1aae11d1e7ce86b
class Conference(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> number = db.Column(db.String(16), unique=True) <NEW_LINE> name = db.Column(db.Unicode(128)) <NEW_LINE> is_public = db.Column(db.Boolean) <NEW_LINE> conference_profile_id = db.Column(db.Integer, db.ForeignKey('conference_profile.id')) <NEW_LINE> conference_profile = db.relationship('ConferenceProfile') <NEW_LINE> public_participant_profile_id = db.Column( db.Integer, db.ForeignKey('participant_profile.id')) <NEW_LINE> public_participant_profile = db.relationship('ParticipantProfile') <NEW_LINE> user_id = db.Column(db.Integer, db.ForeignKey('user.id')) <NEW_LINE> user = db.relationship('User', backref='conferences') <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return '%s <%s>' % (self.name, self.number) <NEW_LINE> <DEDENT> def _online_participant_count(self): <NEW_LINE> <INDENT> return asterisk.confbridge_get_user_count(self.number) or 0 <NEW_LINE> <DEDENT> online_participant_count = property(_online_participant_count) <NEW_LINE> def _invited_participant_count(self): <NEW_LINE> <INDENT> return Participant.query.filter_by(conference=self, is_invited=True).count() <NEW_LINE> <DEDENT> invited_participant_count = property(_invited_participant_count) <NEW_LINE> def _participant_count(self): <NEW_LINE> <INDENT> return len(self.participants) <NEW_LINE> <DEDENT> participant_count = property(_participant_count) <NEW_LINE> def _is_locked(self): <NEW_LINE> <INDENT> return asterisk.confbridge_is_locked(self.number) <NEW_LINE> <DEDENT> is_locked = property(_is_locked) <NEW_LINE> def log(self, message): <NEW_LINE> <INDENT> post = ConferenceLog(conference=self, message=message) <NEW_LINE> db.session.add(post) <NEW_LINE> db.session.commit() <NEW_LINE> sse_notify(self.id, 'log_message', message) <NEW_LINE> <DEDENT> def invite_participants(self): <NEW_LINE> <INDENT> online_participants = [ k['callerid'] for k in asterisk.confbridge_list_participants( self.number)] <NEW_LINE> gen = (p for p in self.participants if p.is_invited and p.phone not in online_participants) <NEW_LINE> for p in gen: <NEW_LINE> <INDENT> asterisk.originate(self.number, p.phone, name=p.name, bridge_options=self.conference_profile.get_confbridge_options(), user_options=p.profile.get_confbridge_options() )
Conference is an event held in in a Room
62598fb33317a56b869be593
class SlicerPythonLzmaTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_compressionDecompressionRoundtrip(self): <NEW_LINE> <INDENT> someText = "something..." <NEW_LINE> originalData = someText.encode() <NEW_LINE> import lzma <NEW_LINE> lzc = lzma.LZMACompressor() <NEW_LINE> compressedData = lzc.compress(originalData) + lzc.flush() <NEW_LINE> lzd = lzma.LZMADecompressor() <NEW_LINE> uncompressedData = lzd.decompress(compressedData) <NEW_LINE> self.assertEqual(originalData, uncompressedData)
This test verifies that Python is build with lzma enabled.
62598fb31f5feb6acb162cae
class HiddenLayer(object): <NEW_LINE> <INDENT> def __init__(self, rng, input, n_in, n_out, W=None, b=None, activation=T.nnet.sigmoid): <NEW_LINE> <INDENT> self.input = input <NEW_LINE> if W is None: <NEW_LINE> <INDENT> W_values = np.asarray(rng.uniform(low=-np.sqrt(6. / (n_in + n_out)), high=np.sqrt(6. / (n_in + n_out)), size=(n_in, n_out)), dtype=theano.config.floatX) <NEW_LINE> if activation == theano.tensor.nnet.sigmoid: <NEW_LINE> <INDENT> W_values *= 4 <NEW_LINE> <DEDENT> W = theano.shared(value=W_values, name='W', borrow=True) <NEW_LINE> <DEDENT> if b is None: <NEW_LINE> <INDENT> b_values = np.zeros((n_out,), dtype=theano.config.floatX) <NEW_LINE> b = theano.shared(value=b_values, name='b', borrow=True) <NEW_LINE> <DEDENT> self.W = W <NEW_LINE> self.b = b <NEW_LINE> lin_output = T.dot(input, self.W) + self.b <NEW_LINE> self.output = (lin_output if activation is None else activation(lin_output)) <NEW_LINE> self.params = [self.W, self.b]
Hidden layer class for a Multi-layer Perceptron.
62598fb3097d151d1a2c10bd
class InternationalMelonOrder(AbstractMelonOrder): <NEW_LINE> <INDENT> def __init__(self, species, qty, country_code): <NEW_LINE> <INDENT> super.__init__(self, species, qty, "international", 0.17) <NEW_LINE> self.country_code = country_code <NEW_LINE> <DEDENT> def get_country_code(self): <NEW_LINE> <INDENT> return self.country_code
An international (non-US) melon order.
62598fb3e5267d203ee6b994
class Card: <NEW_LINE> <INDENT> RANKS = ["A", "2", "3", "4", "5", "6", "7", "8", "9", "10", "J", "Q", "K"] <NEW_LINE> SUITS = ["c", "d", "h", "s"] <NEW_LINE> def __init__(self, rank, suit): <NEW_LINE> <INDENT> self.rank = rank <NEW_LINE> self.suit = suit <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> rep = self.rank + self.suit <NEW_LINE> return rep
A playing card.
62598fb338b623060ffa912c
class ActivateRuleRequest(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.RuleId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.RuleId = params.get("RuleId")
ActivateRule请求参数结构体
62598fb3f548e778e596b634
class CodeGenerator(object): <NEW_LINE> <INDENT> def __init__(self, indent, max_length, continuation, comment): <NEW_LINE> <INDENT> self._indent = indent <NEW_LINE> self.max_length = max_length <NEW_LINE> self.continuation = continuation <NEW_LINE> self.comment = comment <NEW_LINE> self.level = 0 <NEW_LINE> self.code = [] <NEW_LINE> <DEDENT> def indent(self): <NEW_LINE> <INDENT> self.level += 1 <NEW_LINE> <DEDENT> def dedent(self): <NEW_LINE> <INDENT> self.level -= 1 <NEW_LINE> <DEDENT> def write(self, *args): <NEW_LINE> <INDENT> if args is (): <NEW_LINE> <INDENT> args = ('\n',) <NEW_LINE> <DEDENT> args = ' '.join(args).rstrip() + '\n' <NEW_LINE> lines = args.splitlines(True) <NEW_LINE> self.code.extend([self._indent * self.level + line for line in lines]) <NEW_LINE> <DEDENT> def writelines(self, items, insert=None, level=None): <NEW_LINE> <INDENT> if level is None: <NEW_LINE> <INDENT> level = self.level <NEW_LINE> <DEDENT> lines = [] <NEW_LINE> for item in items: <NEW_LINE> <INDENT> item_lines = item.splitlines(True) <NEW_LINE> if not item_lines[-1].endswith('\n'): <NEW_LINE> <INDENT> item_lines[-1] += '\n' <NEW_LINE> <DEDENT> lines.extend(item_lines) <NEW_LINE> <DEDENT> lines = [self._indent * level + line for line in lines] <NEW_LINE> if insert is not None: <NEW_LINE> <INDENT> self.code = self.code[:insert] + lines + self.code[insert:] <NEW_LINE> return insert + len(lines) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.code.extend(lines) <NEW_LINE> return len(self.code) <NEW_LINE> <DEDENT> <DEDENT> def split_long_lines(self): <NEW_LINE> <INDENT> out = [] <NEW_LINE> for line in self.code: <NEW_LINE> <INDENT> if len(line) > self.max_length and not line.strip().startswith(self.comment): <NEW_LINE> <INDENT> indent = line[:len(line) - len(line.lstrip())] <NEW_LINE> tokens = line.split() <NEW_LINE> split_lines = [[]] <NEW_LINE> while tokens: <NEW_LINE> <INDENT> token = tokens.pop(0) <NEW_LINE> current_line = ' '.join(split_lines[-1]) <NEW_LINE> if len(current_line) + len(token) < self.max_length: <NEW_LINE> <INDENT> split_lines[-1].append(token) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> split_lines[-1].append(self.continuation) <NEW_LINE> split_lines.append([self._indent + token]) <NEW_LINE> <DEDENT> <DEDENT> out.extend([indent + ' '.join(line) + '\n' for line in split_lines]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> out.append(line) <NEW_LINE> <DEDENT> <DEDENT> return out <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "".join(self.split_long_lines())
Simple class to handle code generation. Handles simple tasks such as indent/dedent and continuation symbols. Parameters ---------- indent : `str` Specification of the indent size/type. Typical choices may be ``" "*4`` or ``" "``. max_length : `int` Maximum length of a code line. continuation : `str` Symbol to represent continuation in the desired language (eg. '&' in Fortran) comment : `str` Character used to define comments (e.g. '!' in Fortran, '#' in Python)
62598fb3a8370b77170f046c
class Cookbook(object): <NEW_LINE> <INDENT> def __init__(self, name, comment=None): <NEW_LINE> <INDENT> self.name = str(name) <NEW_LINE> self.comment = comment <NEW_LINE> self.resources = [] <NEW_LINE> self.files = {} <NEW_LINE> <DEDENT> def add(self, resource): <NEW_LINE> <INDENT> self.resources.append(resource) <NEW_LINE> <DEDENT> def apt_package(self, name, **kwargs): <NEW_LINE> <INDENT> self.add(Resource('apt_package', name, **kwargs)) <NEW_LINE> <DEDENT> def gem_package(self, name, **kwargs): <NEW_LINE> <INDENT> self.add(Resource('gem_package', name, **kwargs)) <NEW_LINE> <DEDENT> def execute(self, name, **kwargs): <NEW_LINE> <INDENT> self.add(Resource('execute', name, **kwargs)) <NEW_LINE> <DEDENT> def directory(self, name, **kwargs): <NEW_LINE> <INDENT> self.add(Resource('directory', name, **kwargs)) <NEW_LINE> <DEDENT> def link(self, name, **kwargs): <NEW_LINE> <INDENT> self.add(Resource('link', name, **kwargs)) <NEW_LINE> <DEDENT> def file(self, name, content, **kwargs): <NEW_LINE> <INDENT> self.add(File(name, content, **kwargs)) <NEW_LINE> <DEDENT> def _dump(self, w, inline=False): <NEW_LINE> <INDENT> if self.comment is not None: <NEW_LINE> <INDENT> w(self.comment) <NEW_LINE> <DEDENT> for resource in self.resources: <NEW_LINE> <INDENT> w(resource.dumps(inline)) <NEW_LINE> <DEDENT> <DEDENT> def dumps(self): <NEW_LINE> <INDENT> out = [] <NEW_LINE> return ''.join(out) <NEW_LINE> <DEDENT> def dumpf(self, gzip=False): <NEW_LINE> <INDENT> os.mkdir(self.name) <NEW_LINE> f = open(os.path.join(self.name, 'metadata.rb'), 'w') <NEW_LINE> f.close() <NEW_LINE> os.mkdir(os.path.join(self.name, 'recipes')) <NEW_LINE> filename = os.path.join(self.name, 'recipes/default.rb') <NEW_LINE> f = open(filename, 'w') <NEW_LINE> self._dump(f.write, inline=False) <NEW_LINE> f.close() <NEW_LINE> for resource in self.resources: <NEW_LINE> <INDENT> if 'cookbook_file' != resource.type: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> pathname = os.path.join(self.name, 'files/default', resource.name[1:]) <NEW_LINE> try: <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(pathname)) <NEW_LINE> <DEDENT> except OSError as e: <NEW_LINE> <INDENT> if errno.EEXIST != e.errno: <NEW_LINE> <INDENT> raise e <NEW_LINE> <DEDENT> <DEDENT> f = open(pathname, 'w') <NEW_LINE> f.write(resource.content) <NEW_LINE> f.close() <NEW_LINE> <DEDENT> if gzip: <NEW_LINE> <INDENT> filename = 'chef-{0}.tar.gz'.format(self.name) <NEW_LINE> tarball = tarfile.open(filename, 'w:gz') <NEW_LINE> tarball.add(self.name) <NEW_LINE> tarball.close() <NEW_LINE> return filename <NEW_LINE> <DEDENT> return filename
A cookbook is a collection of Chef resources plus the files and other supporting objects needed to run it.
62598fb31b99ca400228f578
class JsgfRule(_object): <NEW_LINE> <INDENT> __swig_setmethods__ = {} <NEW_LINE> __setattr__ = lambda self, name, value: _swig_setattr(self, JsgfRule, name, value) <NEW_LINE> __swig_getmethods__ = {} <NEW_LINE> __getattr__ = lambda self, name: _swig_getattr(self, JsgfRule, name) <NEW_LINE> __repr__ = _swig_repr <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> this = _sphinxbase.new_JsgfRule() <NEW_LINE> try: <NEW_LINE> <INDENT> self.this.append(this) <NEW_LINE> <DEDENT> except __builtin__.Exception: <NEW_LINE> <INDENT> self.this = this <NEW_LINE> <DEDENT> <DEDENT> __swig_destroy__ = _sphinxbase.delete_JsgfRule <NEW_LINE> __del__ = lambda self: None <NEW_LINE> def fromIter(itor): <NEW_LINE> <INDENT> return _sphinxbase.JsgfRule_fromIter(itor) <NEW_LINE> <DEDENT> fromIter = staticmethod(fromIter) <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return _sphinxbase.JsgfRule_name(self) <NEW_LINE> <DEDENT> def public(self): <NEW_LINE> <INDENT> return _sphinxbase.JsgfRule_public(self)
Proxy of C JsgfRule struct.
62598fb363d6d428bbee283c
@_tag <NEW_LINE> class Object(HtmlEmbedded, HtmlFlow, HtmlInteractive, HtmlPalpable, HtmlPhrasing): <NEW_LINE> <INDENT> pass
Represents an external resource, which can be treated as an image, a nested browsing context, or a resource to be handled by a plugin. Categories: Interactive: if the element has a usemap attribute. Listed and submittable form-associated element: None. Content model: Zero or more param elements, then, transparent: None. Contexts for use: Embedded.
62598fb3a05bb46b3848a8fb
class sweep50(parameter): <NEW_LINE> <INDENT> pass
Horizontal tail plane sweep angle at half chord :Unit: [deg]
62598fb356ac1b37e630227a
class Auth: <NEW_LINE> <INDENT> access_level = None <NEW_LINE> last_auth_time = None <NEW_LINE> last_wrong_access_time = None <NEW_LINE> delay_get_access = 30 <NEW_LINE> delay_auth = 30 <NEW_LINE> access_map = None <NEW_LINE> @classmethod <NEW_LINE> def login(cls, login, password): <NEW_LINE> <INDENT> current_time = datetime.datetime.today() <NEW_LINE> if (not (cls.last_auth_time is None) and (current_time - cls.last_auth_time).seconds < cls.delay_auth): <NEW_LINE> <INDENT> cls.last_auth_time = current_time <NEW_LINE> return -2 <NEW_LINE> <DEDENT> query = AuthUsers.select().where(AuthUsers.login == login) <NEW_LINE> if (len(query) == 0): <NEW_LINE> <INDENT> logging.info('AU Auth.login( ): fail <%s>', login) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> elif (len(query) == 1): <NEW_LINE> <INDENT> entry = query.get() <NEW_LINE> if (bcrypt.checkpw(str(password).encode(), entry.password.encode())): <NEW_LINE> <INDENT> logging.info('AU Auth.login( ): success <%s>', login) <NEW_LINE> if (entry.auth_user_id == 1): <NEW_LINE> <INDENT> cls.access_level = ('admin', 99) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls.access_level = ('librarian', entry.privilege) <NEW_LINE> return 0 <NEW_LINE> <DEDENT> <DEDENT> logging.info('AU Auth.login( ): fail <%s>', login) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logging.error('AU Auth.login( ), too much users with same id!') <NEW_LINE> return -1 <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def load_access_map(cls): <NEW_LINE> <INDENT> if cls.access_map is None: <NEW_LINE> <INDENT> access_map_file = './data/access_map.json' <NEW_LINE> with open(access_map_file, 'r') as json_file: <NEW_LINE> <INDENT> cls.access_map = json.load(json_file) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def get_access(cls, module, name): <NEW_LINE> <INDENT> current_time = datetime.datetime.today() <NEW_LINE> if (cls.last_wrong_access_time != None and (current_time - cls.last_wrong_access_time).seconds < cls.delay_get_access): <NEW_LINE> <INDENT> cls.last_wrong_access_time = current_time <NEW_LINE> return False <NEW_LINE> <DEDENT> if not (cls.access_level is None): <NEW_LINE> <INDENT> cls.load_access_map() <NEW_LINE> if name in cls.access_map[module].keys(): <NEW_LINE> <INDENT> allowed = cls.access_map[module][name] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if cls.access_level[0] is 'admin': <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if cls.access_level[0] is 'librarian': <NEW_LINE> <INDENT> if allowed <= cls.access_level[1]: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> cls.last_wrong_access_time = current_time <NEW_LINE> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get_access_level(cls): <NEW_LINE> <INDENT> return cls.access_level[0], cls.access_level[1] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def log_out(cls): <NEW_LINE> <INDENT> if cls.access_level is None: <NEW_LINE> <INDENT> logging.info('AU Auth.log_out( ): fail') <NEW_LINE> return False <NEW_LINE> <DEDENT> logging.info('AU Auth.log_out( ): success') <NEW_LINE> cls.access_level = None <NEW_LINE> return True
Class that manages authentication and authorization
62598fb37047854f4633f46b
class UserAdminFormTestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.user = mommy.prepare(settings.AUTH_USER_MODEL) <NEW_LINE> self.user.username="usuario" <NEW_LINE> self.user.set_password('123456') <NEW_LINE> self.user.save() <NEW_LINE> <DEDENT> def test_valid_form(self): <NEW_LINE> <INDENT> data = {'username': 'teste', 'name':'teste', 'email': 'teste@teste.com', 'password1':'123456', 'password2':'123456'} <NEW_LINE> form = UserAdminForm(data=data) <NEW_LINE> self.assertTrue(form.is_valid(), 'error validating form with invalid user') <NEW_LINE> <DEDENT> def test_invalid_form(self): <NEW_LINE> <INDENT> data = {'username': 'blablabla', 'password':'123456'} <NEW_LINE> form = UserAdminForm(data=data) <NEW_LINE> self.assertFalse(form.is_valid(), 'error validating form with invalid user') <NEW_LINE> <DEDENT> def test_fields_form(self): <NEW_LINE> <INDENT> form = UserAdminForm() <NEW_LINE> filds = ['username', 'name', 'email', 'is_active', 'is_staff'] <NEW_LINE> for fild in filds: <NEW_LINE> <INDENT> self.assertTrue(fild in form.fields, 'UserAdminCreation form does not have the field {}'.format(fild))
Class Testing Form UserAdminCreation
62598fb34e4d5625663724b6
class Development(Common): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> ALLOWED_HOSTS = [] <NEW_LINE> INSTALLED_APPS = Common.INSTALLED_APPS + ( 'debug_toolbar', ) <NEW_LINE> EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' <NEW_LINE> DEBUG_TOOLBAR_PATCH_SETTINGS = values.BooleanValue(True) <NEW_LINE> CORS_ORIGIN_ALLOW_ALL = values.BooleanValue(True)
The in-development settings and the default configuration.
62598fb3009cb60464d015b2
class StringResponseData(ResponseData): <NEW_LINE> <INDENT> def __init__(self, string): <NEW_LINE> <INDENT> self._size = len(string.encode("latin-1")) <NEW_LINE> self._reader = io.StringIO(string) <NEW_LINE> <DEDENT> def read(self, n): <NEW_LINE> <INDENT> return bytes(self._reader.read(n).encode("latin-1")) <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return self._size <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> pass
A convenience subclass of `ResponseData` that transforms an input String into a file-like object.
62598fb37d847024c075c452
class Spider(): <NEW_LINE> <INDENT> url = 'https://www.douyu.com/g_jdqs' <NEW_LINE> root_pattern = '<div class="DyListCover-info">([\s\S]*?)</div>' <NEW_LINE> name_pattern = '</svg>([\s\S]*?)</svg>([\s\S]*?)</h2>' <NEW_LINE> number_pattern = '</svg>([\s\S]*?)</span>' <NEW_LINE> def __fetch_content(self): <NEW_LINE> <INDENT> r = request.urlopen(Spider.url) <NEW_LINE> htmls = r.read() <NEW_LINE> buff = BytesIO(htmls) <NEW_LINE> f = gzip.GzipFile(fileobj = buff) <NEW_LINE> htmls = f.read().decode('utf-8') <NEW_LINE> return htmls <NEW_LINE> <DEDENT> def __analysis(self,htmls): <NEW_LINE> <INDENT> root_html = re.findall(Spider.root_pattern,htmls) <NEW_LINE> n = 1 <NEW_LINE> length = len(root_html) <NEW_LINE> anchors = [] <NEW_LINE> for html in root_html: <NEW_LINE> <INDENT> if n%2 == 0 : <NEW_LINE> <INDENT> name = re.findall(Spider.name_pattern, html) <NEW_LINE> name = name[0][1] <NEW_LINE> number = re.findall(Spider.number_pattern, html) <NEW_LINE> anchor = {'name': name, 'number': number} <NEW_LINE> anchors.append(anchor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> n += 1 <NEW_LINE> <DEDENT> return anchors <NEW_LINE> <DEDENT> def __refine(self,anchors): <NEW_LINE> <INDENT> func = lambda anchor: {'name':anchor['name'], 'number':anchor['number'][0]} <NEW_LINE> map(func, anchors) <NEW_LINE> <DEDENT> def __sort(self, anchors): <NEW_LINE> <INDENT> anchors = sorted(anchors, key=self.__sort_seed, reverse= True) <NEW_LINE> return anchors <NEW_LINE> <DEDENT> def __sort_seed(self, anchor): <NEW_LINE> <INDENT> r = re.findall('\d*', anchor['number']) <NEW_LINE> number = float(r[0]) <NEW_LINE> if '万' in anchor['number']: <NEW_LINE> <INDENT> number *= 10000 <NEW_LINE> <DEDENT> return number <NEW_LINE> <DEDENT> def __show(self, anchors): <NEW_LINE> <INDENT> for rank in range(0, len(anchors)): <NEW_LINE> <INDENT> print(' rank ', rank + 1, ' : ', anchors[rank]['name'], ' : ', anchors[rank]['number']) <NEW_LINE> <DEDENT> <DEDENT> def go(self): <NEW_LINE> <INDENT> htmls = self.__fetch_content() <NEW_LINE> anchors = self.__analysis(htmls) <NEW_LINE> anchors_list = self.__refine(anchors) <NEW_LINE> anchors_sort = self.__sort(anchors_list) <NEW_LINE> anchors_show = self.__show(anchors_sort) <NEW_LINE> print(anchors_show)
url为网页地址,root_patter、name_pattern、number_pattern为获取信息的正则表达式规则
62598fb3167d2b6e312b7003
class VideoUnlockCb(ctypes.c_void_p): <NEW_LINE> <INDENT> pass
Callback prototype to unlock a picture buffer. When the video frame decoding is complete, the unlock callback is invoked. This callback might not be needed at all. It is only an indication that the application can now read the pixel values if it needs to. @warning: A picture buffer is unlocked after the picture is decoded, but before the picture is displayed. @param opaque: private pointer as passed to L{libvlc_video_set_callbacks}() [IN]. @param picture: private pointer returned from the @ref libvlc_video_lock_cb callback [IN]. @param planes: pixel planes as defined by the @ref libvlc_video_lock_cb callback (this parameter is only for convenience) [IN].
62598fb3bf627c535bcb1530
class MoleculeBoxModel(models.Model): <NEW_LINE> <INDENT> moleculeBox = PickledObjectField(null=True) <NEW_LINE> svg = models.TextField(null=True) <NEW_LINE> equalsTarget = models.BooleanField() <NEW_LINE> isStartingMaterial = models.BooleanField() <NEW_LINE> @classmethod <NEW_LINE> def create(cls, moleculeBoxObject, isStartingMaterial=False): <NEW_LINE> <INDENT> x = cls(moleculeBox = moleculeBoxObject, svg = moleculeBoxObject.stringList(), equalsTarget = False, isStartingMaterial = isStartingMaterial) <NEW_LINE> return x <NEW_LINE> <DEDENT> def checkIfEqualsTarget(self, target): <NEW_LINE> <INDENT> self.equalsTarget = boxEqualityChecker(self.moleculeBox, target.moleculeBox) <NEW_LINE> return self.equalsTarget
MoleculeBoxModel Contains: foreignkey to a SynthesisProblemModel Contains: pickled moleculebox Contains: SVG representation
62598fb332920d7e50bc60e4
class FileLockTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self._tempFileID = tempfile.TemporaryFile() <NEW_LINE> <DEDENT> def testSharedLock(self): <NEW_LINE> <INDENT> flock = file_lock.SharedFileLock(self._tempFileID) <NEW_LINE> self.assertEqual(type(flock), file_lock._FileLock) <NEW_LINE> <DEDENT> def testExclusiveLock(self): <NEW_LINE> <INDENT> flock = file_lock.ExclusiveFileLock(self._tempFileID) <NEW_LINE> self.assertEqual(type(flock), file_lock._FileLock) <NEW_LINE> <DEDENT> def testContextMgrWithGoodFile(self): <NEW_LINE> <INDENT> with file_lock._FileLock(self._tempFileID, fcntl.LOCK_SH) as flock: <NEW_LINE> <INDENT> self.assertEqual(type(flock), file_lock._FileLock) <NEW_LINE> <DEDENT> <DEDENT> def testFileLockAcquireException(self): <NEW_LINE> <INDENT> self._tempFileID.close() <NEW_LINE> with self.assertRaises(file_lock.FileLockAcquireException): <NEW_LINE> <INDENT> with file_lock._FileLock(self._tempFileID, fcntl.LOCK_SH) as f_err: <NEW_LINE> <INDENT> self.assertIn("FileLock acquire failed on", f_err.exception.args[0]) <NEW_LINE> <DEDENT> <DEDENT> with self.assertRaises(file_lock.FileLockAcquireException): <NEW_LINE> <INDENT> with file_lock._FileLock([], fcntl.LOCK_SH) as f_err: <NEW_LINE> <INDENT> self.assertIn("FileLock acquire failed on", f_err.exception.args[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> """To do: test ReleaseException""" <NEW_LINE> def testValidLockOperation(self): <NEW_LINE> <INDENT> with self.assertRaises(AttributeError): <NEW_LINE> <INDENT> file_lock._FileLock(self._tempFileID, fcntl.LOCK_S)
Unit tests for the file-lock utilities
62598fb3d7e4931a7ef3c124
class BarrierTaskContext(TaskContext): <NEW_LINE> <INDENT> _port = None <NEW_LINE> _secret = None <NEW_LINE> @classmethod <NEW_LINE> def _getOrCreate(cls): <NEW_LINE> <INDENT> if not isinstance(cls._taskContext, BarrierTaskContext): <NEW_LINE> <INDENT> cls._taskContext = object.__new__(cls) <NEW_LINE> <DEDENT> return cls._taskContext <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls): <NEW_LINE> <INDENT> if not isinstance(cls._taskContext, BarrierTaskContext): <NEW_LINE> <INDENT> raise Exception('It is not in a barrier stage') <NEW_LINE> <DEDENT> return cls._taskContext <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _initialize(cls, port, secret): <NEW_LINE> <INDENT> cls._port = port <NEW_LINE> cls._secret = secret <NEW_LINE> <DEDENT> def barrier(self): <NEW_LINE> <INDENT> if self._port is None or self._secret is None: <NEW_LINE> <INDENT> raise Exception("Not supported to call barrier() before initialize " + "BarrierTaskContext.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> _load_from_socket(self._port, self._secret, BARRIER_FUNCTION) <NEW_LINE> <DEDENT> <DEDENT> def allGather(self, message=""): <NEW_LINE> <INDENT> if not isinstance(message, str): <NEW_LINE> <INDENT> raise ValueError("Argument `message` must be of type `str`") <NEW_LINE> <DEDENT> elif self._port is None or self._secret is None: <NEW_LINE> <INDENT> raise Exception("Not supported to call barrier() before initialize " + "BarrierTaskContext.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return _load_from_socket(self._port, self._secret, ALL_GATHER_FUNCTION, message) <NEW_LINE> <DEDENT> <DEDENT> def getTaskInfos(self): <NEW_LINE> <INDENT> if self._port is None or self._secret is None: <NEW_LINE> <INDENT> raise Exception("Not supported to call getTaskInfos() before initialize " + "BarrierTaskContext.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> addresses = self._localProperties.get("addresses", "") <NEW_LINE> return [BarrierTaskInfo(h.strip()) for h in addresses.split(",")]
.. note:: Experimental A :class:`TaskContext` with extra contextual info and tooling for tasks in a barrier stage. Use :func:`BarrierTaskContext.get` to obtain the barrier context for a running barrier task. .. versionadded:: 2.4.0
62598fb38a349b6b436862cd
class MutateJobServiceStub(object): <NEW_LINE> <INDENT> def __init__(self, channel): <NEW_LINE> <INDENT> self.CreateMutateJob = channel.unary_unary( '/google.ads.googleads.v1.services.MutateJobService/CreateMutateJob', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.CreateMutateJobRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.CreateMutateJobResponse.FromString, ) <NEW_LINE> self.GetMutateJob = channel.unary_unary( '/google.ads.googleads.v1.services.MutateJobService/GetMutateJob', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.GetMutateJobRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_resources_dot_mutate__job__pb2.MutateJob.FromString, ) <NEW_LINE> self.ListMutateJobResults = channel.unary_unary( '/google.ads.googleads.v1.services.MutateJobService/ListMutateJobResults', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.ListMutateJobResultsRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.ListMutateJobResultsResponse.FromString, ) <NEW_LINE> self.RunMutateJob = channel.unary_unary( '/google.ads.googleads.v1.services.MutateJobService/RunMutateJob', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.RunMutateJobRequest.SerializeToString, response_deserializer=google_dot_longrunning_dot_operations__pb2.Operation.FromString, ) <NEW_LINE> self.AddMutateJobOperations = channel.unary_unary( '/google.ads.googleads.v1.services.MutateJobService/AddMutateJobOperations', request_serializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.AddMutateJobOperationsRequest.SerializeToString, response_deserializer=google_dot_ads_dot_googleads__v1_dot_proto_dot_services_dot_mutate__job__service__pb2.AddMutateJobOperationsResponse.FromString, )
Proto file describing the MutateJobService. Service to manage mutate jobs.
62598fb3aad79263cf42e864
class ImageStreamParser(StreamParser): <NEW_LINE> <INDENT> def get_frame(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> frame = urllib.request.urlopen(self.url, timeout=5) <NEW_LINE> def handler(signum): <NEW_LINE> <INDENT> print('Signal handler called with signal', signum) <NEW_LINE> raise OSError("Couldn't open device!") <NEW_LINE> <DEDENT> signal.signal(signal.SIGALRM, handler) <NEW_LINE> signal.alarm(5) <NEW_LINE> frame = frame.read() <NEW_LINE> signal.alarm(0) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> print("Possible Cause of error: {}".format(e)) <NEW_LINE> raise UnreachableCameraError() <NEW_LINE> <DEDENT> if frame == '': <NEW_LINE> <INDENT> raise CorruptedFrameError() <NEW_LINE> <DEDENT> frame_size = len(frame) <NEW_LINE> frame = cv2.imdecode(np.fromstring(frame, dtype=np.uint8), -1) <NEW_LINE> if frame is None: <NEW_LINE> <INDENT> raise CorruptedFrameError() <NEW_LINE> <DEDENT> return frame, frame_size
Represent a parser for a camera image stream. This class subclasses the StreamParser class and inherits its attributes and constructor. Notes ----- A camera that provides an image stream is a camera that provides a URL to get the most recent frame (regardless of how recent it is). Hence, Parsing an image stream is as simple as downloading the most recent frame from the given URL whenever requested. There is no need to call open_stream or close_stream since they do nothing.
62598fb330dc7b766599f8dd
class Register(generics.GenericAPIView): <NEW_LINE> <INDENT> permission_classes = (AllowAny,) <NEW_LINE> serializer_class = UserRegisterSerializer <NEW_LINE> allowed_methods = ('POST', 'OPTIONS', 'HEAD') <NEW_LINE> def post_register(self): <NEW_LINE> <INDENT> auth_login(self.request, self.user) <NEW_LINE> <DEDENT> def get_response(self): <NEW_LINE> <INDENT> token = Token.objects.create(user=self.user) <NEW_LINE> return Response({"key": token.key}, status=status.HTTP_201_CREATED) <NEW_LINE> <DEDENT> def get_error_response(self): <NEW_LINE> <INDENT> return Response(self.serializer.errors, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> self.serializer = self.get_serializer(data=self.request.data) <NEW_LINE> if not self.serializer.is_valid(): <NEW_LINE> <INDENT> return self.get_error_response() <NEW_LINE> <DEDENT> self.user = self.serializer.save() <NEW_LINE> self.post_register() <NEW_LINE> return self.get_response()
Creates User Account Returns: SESSION cookie on success and 201 status; Errors and 4xx status code on failure
62598fb363b5f9789fe851fa
class GetFile(function.Function): <NEW_LINE> <INDENT> def __init__(self, stack, fn_name, args): <NEW_LINE> <INDENT> super(GetFile, self).__init__(stack, fn_name, args) <NEW_LINE> self.files = self.stack.t.files if self.stack is not None else None <NEW_LINE> <DEDENT> def result(self): <NEW_LINE> <INDENT> assert self.files is not None, "No stack definition in Function" <NEW_LINE> args = function.resolve(self.args) <NEW_LINE> if not (isinstance(args, str)): <NEW_LINE> <INDENT> raise TypeError(_('Argument to "%s" must be a string') % self.fn_name) <NEW_LINE> <DEDENT> f = self.files.get(args) <NEW_LINE> if f is None: <NEW_LINE> <INDENT> fmt_data = {'fn_name': self.fn_name, 'file_key': args} <NEW_LINE> raise ValueError(_('No content found in the "files" section for ' '%(fn_name)s path: %(file_key)s') % fmt_data) <NEW_LINE> <DEDENT> return f
A function for including a file inline. Takes the form:: get_file: <file_key> And resolves to the content stored in the files dictionary under the given key.
62598fb355399d3f056265ab
class ObjectFactory: <NEW_LINE> <INDENT> __type1Value1 = None <NEW_LINE> __type1Value2 = None <NEW_LINE> __type2Value1 = None <NEW_LINE> __type2Value2 = None <NEW_LINE> @staticmethod <NEW_LINE> def initialize(): <NEW_LINE> <INDENT> ObjectFactory.__type1Value1 = Type1(1) <NEW_LINE> ObjectFactory.__type1Value2 = Type1(2) <NEW_LINE> ObjectFactory.__type2Value1 = Type2(1) <NEW_LINE> ObjectFactory.__type2Value2 = Type2(2) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getType1Value1(): <NEW_LINE> <INDENT> return ObjectFactory.__type1Value1.clone() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getType1Value2(): <NEW_LINE> <INDENT> return ObjectFactory.__type1Value2.clone() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getType2Value1(): <NEW_LINE> <INDENT> return ObjectFactory.__type2Value1.clone() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def getType2Value2(): <NEW_LINE> <INDENT> return ObjectFactory.__type2Value2.clone()
Manages prototypes. Static factory, that encapsulates prototype initialization and then allows instatiation of the classes from these prototypes.
62598fb3e5267d203ee6b996
class RicercaAziende(Ricerca): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Ricerca.__init__(self, 'Promogest - Ricerca aziende', RicercaAziendeFilter(self)) <NEW_LINE> self.inserimento_togglebutton.set_sensitive(False) <NEW_LINE> <DEDENT> def insert(self, toggleButton, returnWindow): <NEW_LINE> <INDENT> pass
Ricerca azienda
62598fb3f548e778e596b635
class PredictionConfidenceScore(ConfidenceQuantifier): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def takes_samples(cls) -> bool: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def problem_type(cls) -> ProblemType: <NEW_LINE> <INDENT> return ProblemType.CLASSIFICATION <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def aliases(cls) -> List[str]: <NEW_LINE> <INDENT> return ["pcs", "prediction_confidence_score", "PredictionConfidenceScore"] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def calculate(cls, nn_outputs: np.ndarray): <NEW_LINE> <INDENT> _check_inputs_array(nn_outputs, quantifier_name="prediction_confidence_score") <NEW_LINE> num_samples = nn_outputs.shape[0] <NEW_LINE> calculated_predictions = np.argmax(nn_outputs, axis=1) <NEW_LINE> max_values = nn_outputs[np.arange(num_samples), calculated_predictions] <NEW_LINE> values_copy = nn_outputs.copy() <NEW_LINE> values_copy[np.arange(num_samples), calculated_predictions] = -np.inf <NEW_LINE> second_highest_values = np.max(values_copy, axis=1) <NEW_LINE> pcs = max_values - second_highest_values <NEW_LINE> return calculated_predictions, pcs
The Prediction Confidence Score is a confidence metric in one-shot classification. Inputs/activations have to be normalized using the softmax function over all classes. The class with the highest activation is chosen as prediction, the difference between the two highest activations is used as confidence quantification.
62598fb366673b3332c3045e
class Male(Agent): <NEW_LINE> <INDENT> __slots__=() <NEW_LINE> def __init__(self, params, attributes, timestepper, stats): <NEW_LINE> <INDENT> Agent.__init__(self, params, attributes, timestepper, stats) <NEW_LINE> <DEDENT> def step_activity(self, sim): <NEW_LINE> <INDENT> super(Male, self).step_activity(sim) <NEW_LINE> if self.params["log_wages"] and self.employment.have_job(): <NEW_LINE> <INDENT> wage = self.employment.get_wage(sim.pop) <NEW_LINE> logger.debug("event:wage,date:{},agent:{},age:{},wage:{},skill:{}," "experience:{}".format(self.timestepper.date, self.ident, self.age_years, wage, self.skill, self.experience)) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def isfemale(self): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> def get_marriage_market(self, pop): <NEW_LINE> <INDENT> return pop.marriage_market_males
subclass of agent corresponding to male agents
62598fb3cc0a2c111447b0a4
class TestCurveChecker(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.reference = bezier.CubicBezierCurve( (10, 30), (30, 30), (10, 10), (30, 10) ) <NEW_LINE> <DEDENT> def test_assert_continuous(self): <NEW_LINE> <INDENT> discountinuous_curve = bezier.CubicBezierCurve( (30, 40), (10, 60), (30, 60), (30, 60) ) <NEW_LINE> self.assertFalse( bezier.assert_continuous(self.reference, discountinuous_curve) ) <NEW_LINE> continuous_curve = bezier.CubicBezierCurve( (30, 30), (50, 10), (50, 30), (50, 30) ) <NEW_LINE> self.assertTrue( bezier.assert_continuous(self.reference, continuous_curve) ) <NEW_LINE> <DEDENT> def test_assert_collinear(self): <NEW_LINE> <INDENT> collinear = [ (1, 1), (2, 5), (4, 13) ] <NEW_LINE> self.assertTrue( bezier.assert_collinear(*collinear) ) <NEW_LINE> noncollinear = [ (1, 1), (2, 5), (11, 43) ] <NEW_LINE> self.assertFalse( bezier.assert_collinear(*noncollinear) ) <NEW_LINE> <DEDENT> def test_assert_differentiable(self): <NEW_LINE> <INDENT> nondifferentiable_curve = bezier.CubicBezierCurve( (30, 30), (50, 10), (50, 30), (50, 30) ) <NEW_LINE> self.assertFalse( bezier.assert_differentiable(self.reference, nondifferentiable_curve) ) <NEW_LINE> differentiable_curve = bezier.CubicBezierCurve( (30, 30), (10, 50), (30, 50), (30, 50) ) <NEW_LINE> self.assertTrue( bezier.assert_differentiable(self.reference, differentiable_curve) )
CurveChecker tests
62598fb356ac1b37e630227d
class City(models.Model): <NEW_LINE> <INDENT> city = models.CharField(_('city'), max_length=100) <NEW_LINE> state = models.CharField(_('state'), max_length=100) <NEW_LINE> slug = models.SlugField(_('slug'), unique=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = _('city') <NEW_LINE> verbose_name_plural = _('cities') <NEW_LINE> db_table = 'place_cities' <NEW_LINE> unique_together = (('city', 'state',),) <NEW_LINE> ordering = ('state', 'city',) <NEW_LINE> <DEDENT> class Admin: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return u'%s, %s' % (self.city, self.state) <NEW_LINE> <DEDENT> @permalink <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return ('place_city_detail', None, {'slug': self.slug})
City model.
62598fb3cc0a2c111447b0a5
class NetgroupParser(parser.FileParser): <NEW_LINE> <INDENT> output_types = ["User"] <NEW_LINE> supported_artifacts = ["NetgroupConfiguration"] <NEW_LINE> USERNAME_REGEX = r"^[a-z_][a-z0-9_-]{0,30}[$]?$" <NEW_LINE> @classmethod <NEW_LINE> def ParseLines(cls, lines): <NEW_LINE> <INDENT> users = set() <NEW_LINE> filter_regexes = [ re.compile(x) for x in config.CONFIG["Artifacts.netgroup_filter_regexes"] ] <NEW_LINE> username_regex = re.compile(cls.USERNAME_REGEX) <NEW_LINE> blacklist = config.CONFIG["Artifacts.netgroup_user_blacklist"] <NEW_LINE> for index, line in enumerate(lines): <NEW_LINE> <INDENT> if line.startswith("#"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> splitline = line.split(" ") <NEW_LINE> group_name = splitline[0] <NEW_LINE> if filter_regexes: <NEW_LINE> <INDENT> filter_match = False <NEW_LINE> for regex in filter_regexes: <NEW_LINE> <INDENT> if regex.search(group_name): <NEW_LINE> <INDENT> filter_match = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not filter_match: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> for member in splitline[1:]: <NEW_LINE> <INDENT> if member.startswith("("): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> _, user, _ = member.split(",") <NEW_LINE> if user not in users and user not in blacklist: <NEW_LINE> <INDENT> if not username_regex.match(user): <NEW_LINE> <INDENT> yield rdf_anomaly.Anomaly( type="PARSER_ANOMALY", symptom="Invalid username: %s" % user) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> users.add(user) <NEW_LINE> yield rdf_client.User(username=utils.SmartUnicode(user)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> raise parser.ParseError( "Invalid netgroup file at line %d: %s" % (index + 1, line)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> def Parse(self, stat, file_object, knowledge_base): <NEW_LINE> <INDENT> _, _ = stat, knowledge_base <NEW_LINE> lines = [ l.strip() for l in utils.ReadFileBytesAsUnicode(file_object).splitlines() ] <NEW_LINE> return self.ParseLines(lines)
Parser that extracts users from a netgroup file.
62598fb3d7e4931a7ef3c127
class FixedBoundedFloatStrategy(FloatStrategy): <NEW_LINE> <INDENT> Parameter = namedtuple( 'Parameter', ('cut', 'leftwards') ) <NEW_LINE> def __init__(self, lower_bound, upper_bound): <NEW_LINE> <INDENT> SearchStrategy.__init__(self) <NEW_LINE> self.lower_bound = float(lower_bound) <NEW_LINE> self.upper_bound = float(upper_bound) <NEW_LINE> <DEDENT> def produce_parameter(self, random): <NEW_LINE> <INDENT> return self.Parameter( cut=random.random(), leftwards=dist.biased_coin(random, 0.5) ) <NEW_LINE> <DEDENT> def produce_template(self, context, pv): <NEW_LINE> <INDENT> random = context.random <NEW_LINE> cut = self.lower_bound + pv.cut * (self.upper_bound - self.lower_bound) <NEW_LINE> if pv.leftwards: <NEW_LINE> <INDENT> left = self.lower_bound <NEW_LINE> right = cut <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> left = cut <NEW_LINE> right = self.upper_bound <NEW_LINE> <DEDENT> return left + random.random() * (right - left) <NEW_LINE> <DEDENT> def basic_simplify(self, random, value): <NEW_LINE> <INDENT> if value == self.lower_bound: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> yield self.lower_bound <NEW_LINE> yield self.upper_bound <NEW_LINE> mid = (self.lower_bound + self.upper_bound) * 0.5 <NEW_LINE> yield mid
A strategy for floats distributed between two endpoints. The conditional distribution tries to produce values clustered closer to one of the ends.
62598fb34a966d76dd5eef69
class DaemonError(Exception): <NEW_LINE> <INDENT> def __init__(self, value): <NEW_LINE> <INDENT> self.msg = value <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.msg)
Standard exception for a Daemon error. Very simplistic at the moment in that it only caters for situations where functionality is requested without a valid PID file specified. .. attribute:: msg An explanation of the error.
62598fb3d486a94d0ba2c063
class ProxyContainer: <NEW_LINE> <INDENT> def __init__(self) -> None: <NEW_LINE> <INDENT> self._handle_manager: HandleManager = HandleManager() <NEW_LINE> self._object_proxies: ProxyList = [] <NEW_LINE> <DEDENT> def add_object(self, obj: Any) -> Handle: <NEW_LINE> <INDENT> handle: Handle = self._handle_manager.issue_handle() <NEW_LINE> if len(self._object_proxies) == handle.index: <NEW_LINE> <INDENT> self._object_proxies.append(None) <NEW_LINE> <DEDENT> self._object_proxies[handle.index] = proxy(obj) <NEW_LINE> return handle <NEW_LINE> <DEDENT> def get(self, handle: Handle) -> proxy: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._handle_manager.validate_handle(handle) <NEW_LINE> <DEDENT> except HandleValidationException: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self._object_proxies[handle.index] <NEW_LINE> <DEDENT> def remove(self, handle: Handle) -> None: <NEW_LINE> <INDENT> self._object_proxies[handle.index] = None <NEW_LINE> self._handle_manager.remove_handle(handle) <NEW_LINE> <DEDENT> def clear(self) -> None: <NEW_LINE> <INDENT> self._handle_manager.remove_all_handles() <NEW_LINE> self._object_proxies.clear()
Handle Managed Container
62598fb3dd821e528d6d8fc1
class Meta: <NEW_LINE> <INDENT> model = ProviderAuthentication <NEW_LINE> fields = ('uuid', 'provider_resource_name', 'credentials')
Metadata for the serializer.
62598fb38e7ae83300ee9136
class SelectorDIC(ModelSelector): <NEW_LINE> <INDENT> def select(self): <NEW_LINE> <INDENT> warnings.filterwarnings("ignore", category=DeprecationWarning) <NEW_LINE> highest_dic = float('-inf') <NEW_LINE> best_model = None <NEW_LINE> for i in range(self.min_n_components, self.max_n_components + 1): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> model = self.base_model(i) <NEW_LINE> scores = [] <NEW_LINE> for word, (X, lengths) in self.hwords.items(): <NEW_LINE> <INDENT> if word != self.this_word: <NEW_LINE> <INDENT> scores.append(model.score(X, lengths)) <NEW_LINE> <DEDENT> <DEDENT> dic = model.score(self.X, self.lengths) - np.mean(scores) <NEW_LINE> if dic > highest_dic: <NEW_LINE> <INDENT> highest_dic = dic <NEW_LINE> best_model = model <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> <DEDENT> return best_model if best_model else self.base_model(self.n_constant)
select best model based on Discriminative Information Criterion Biem, Alain. "A model selection criterion for classification: Application to hmm topology optimization." Document Analysis and Recognition, 2003. Proceedings. Seventh International Conference on. IEEE, 2003. http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.58.6208&rep=rep1&type=pdf https://pdfs.semanticscholar.org/ed3d/7c4a5f607201f3848d4c02dd9ba17c791fc2.pdf DIC = log(P(X(i)) - 1/(M-1)SUM(log(P(X(all but i))
62598fb330bbd722464699c2
class Disassembler(LLVMObject): <NEW_LINE> <INDENT> def __init__(self, triple): <NEW_LINE> <INDENT> ptr = lib.LLVMCreateDisasm(c_char_p(triple), c_void_p(None), c_int(0), callbacks['op_info'](0), callbacks['symbol_lookup'](0)) <NEW_LINE> if not ptr.contents: <NEW_LINE> <INDENT> raise Exception('Could not obtain disassembler for triple: %s' % triple) <NEW_LINE> <DEDENT> LLVMObject.__init__(self, ptr, disposer=lib.LLVMDisasmDispose) <NEW_LINE> <DEDENT> def get_instruction(self, source, pc=0): <NEW_LINE> <INDENT> buf = cast(c_char_p(source), POINTER(c_ubyte)) <NEW_LINE> out_str = cast((c_byte * 255)(), c_char_p) <NEW_LINE> result = lib.LLVMDisasmInstruction(self, buf, c_uint64(len(source)), c_uint64(pc), out_str, 255) <NEW_LINE> return (result, out_str.value) <NEW_LINE> <DEDENT> def get_instructions(self, source, pc=0): <NEW_LINE> <INDENT> source_bytes = c_char_p(source) <NEW_LINE> out_str = cast((c_byte * 255)(), c_char_p) <NEW_LINE> buf = cast(source_bytes, POINTER(c_ubyte * len(source))).contents <NEW_LINE> offset = 0 <NEW_LINE> address = pc <NEW_LINE> end_address = pc + len(source) <NEW_LINE> while address < end_address: <NEW_LINE> <INDENT> b = cast(addressof(buf) + offset, POINTER(c_ubyte)) <NEW_LINE> result = lib.LLVMDisasmInstruction(self, b, c_uint64(len(source) - offset), c_uint64(address), out_str, 255) <NEW_LINE> if result == 0: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> yield (address, result, out_str.value) <NEW_LINE> address += result <NEW_LINE> offset += result
Represents a disassembler instance. Disassembler instances are tied to specific "triple," which must be defined at creation time. Disassembler instances can disassemble instructions from multiple sources.
62598fb3236d856c2adc9488
class MyfileAdmin(admin.ModelAdmin): <NEW_LINE> <INDENT> list_display = ('filename','filenumber','created_at','user_name','file_path',)
docstring for MMyfileAdmin
62598fb357b8e32f52508165
class SheetsReadException(SheetsLibException): <NEW_LINE> <INDENT> pass
Error Reading Sheets.
62598fb330dc7b766599f8df
class EasierToAskForgiveness1(X): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> try: X.__init__(self) <NEW_LINE> except AttributeError: pass
easier to ask forgiveness idiom for call-if-exists
62598fb371ff763f4b5e7806
class ExportW(bpy.types.Operator, ExportHelper): <NEW_LINE> <INDENT> bl_idname = "export_scene.w" <NEW_LINE> bl_label = 'Export W' <NEW_LINE> filename_ext = "" <NEW_LINE> filter_glob = StringProperty( default="*.w", options={'HIDDEN'}, ) <NEW_LINE> scale = FloatProperty(default=0.01, name = "Scale", min = 0.0005, max = 1, step = 0.01) <NEW_LINE> up_axis = EnumProperty(default = "-Y", name = "Up axis", items = (("X", "X", "X"), ("Y", "Y", "Y"), ("Z", "Z", "Z"), ("-X", "-X", "-X"), ("-Y", "-Y", "-Y"), ("-Z", "-Z", "-Z"))) <NEW_LINE> forward_axis = EnumProperty(default = "Z", name = "Forward axis", items = (("X", "X", "X"), ("Y", "Y", "Y"), ("Z", "Z", "Z"), ("-X", "-X", "-X"), ("-Y", "-Y", "-Y"), ("-Z", "-Z", "-Z"))) <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> from . import export_w <NEW_LINE> return export_w.save( self, self.properties.filepath, context, axis_conversion(from_up = self.up_axis, from_forward = self.forward_axis).to_4x4() * (1 / self.scale))
Export to W file format (.w)
62598fb35fc7496912d482c5
class StructuredPacket(serial.threaded.Protocol): <NEW_LINE> <INDENT> HEADER = b'\x01\x02\x03\x04\x05' <NEW_LINE> def __init__(self, data_size): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.data_size = int(data_size) <NEW_LINE> <DEDENT> except ValueError as exc: <NEW_LINE> <INDENT> raise ValueError("Exepected arg 'size' to be int: " + str(exc)) <NEW_LINE> <DEDENT> self.packet = bytearray() <NEW_LINE> self.in_data = False <NEW_LINE> self.header_pos = 0 <NEW_LINE> self.transport = None <NEW_LINE> <DEDENT> def connection_made(self, transport): <NEW_LINE> <INDENT> self.transport = transport <NEW_LINE> <DEDENT> def conneciton_lost(self, exc): <NEW_LINE> <INDENT> self.transport = None <NEW_LINE> del self.packet[:] <NEW_LINE> super(StructuredPacket, self).connection_lost(exc) <NEW_LINE> <DEDENT> def data_received(self, data): <NEW_LINE> <INDENT> for byte in serial.iterbytes(data): <NEW_LINE> <INDENT> if self.in_data and (len(self.packet) < self.data_size): <NEW_LINE> <INDENT> self.packet.extend(byte) <NEW_LINE> if len(self.packet) == self.data_size: <NEW_LINE> <INDENT> self.in_data = False <NEW_LINE> self.handle_packet(bytes(self.packet)) <NEW_LINE> del self.packet[:] <NEW_LINE> <DEDENT> <DEDENT> elif byte == self.HEADER[self.header_pos:self.header_pos+1]: <NEW_LINE> <INDENT> self.header_pos += 1 <NEW_LINE> if self.header_pos == len(self.HEADER): <NEW_LINE> <INDENT> self.header_pos = 0 <NEW_LINE> self.in_data = True <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.header_pos = 0 <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def handle_packet(self, packet): <NEW_LINE> <INDENT> raise serial.threaded.NotImplementedError( 'please implement functionality in handle_packet')
Read binary packets. Packets are expected to be fixed length have a header to mark its beginning.
62598fb3a17c0f6771d5c2c9
class Bucket(object): <NEW_LINE> <INDENT> def __init__(self, name, type_obj, bucket_id=None, alg='straw', crush_hash='rjenkins1'): <NEW_LINE> <INDENT> if bucket_id is not None and bucket_id >= 0: <NEW_LINE> <INDENT> raise ValueError('Expecting bucket_id to be a negative integer') <NEW_LINE> <DEDENT> if alg not in ('uniform', 'list', 'tree', 'straw'): <NEW_LINE> <INDENT> raise ValueError("{} is not a valid algorithm".format(alg)) <NEW_LINE> <DEDENT> if crush_hash not in ('rjenkins1',): <NEW_LINE> <INDENT> raise ValueError("{} is not a valid hash".format(crush_hash)) <NEW_LINE> <DEDENT> self.name = name <NEW_LINE> self.id = bucket_id <NEW_LINE> self.type = type_obj <NEW_LINE> self.alg = alg <NEW_LINE> self.hash = crush_hash <NEW_LINE> self.items = OrderedDict() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<Bucket type={} id={} name={} n_items={}>".format( self.type.name, self.id, self.name, len(self.items)) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(self.id) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> hash_id = 0 <NEW_LINE> out = '{} {} {{\n'.format(self.type.name, self.name) <NEW_LINE> out += '\tid {}\t\t# do not change unnecessarily\n'.format(self.id) <NEW_LINE> out += '\t# weight {:.3f}\n'.format(self.weight()) <NEW_LINE> out += '\talg {}\n'.format(self.alg) <NEW_LINE> out += '\thash {}\t# {}\n'.format(hash_id, self.hash) <NEW_LINE> for item, w in self.items.items(): <NEW_LINE> <INDENT> if isinstance(item, Device): <NEW_LINE> <INDENT> weight = w <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weight = item.weight() <NEW_LINE> <DEDENT> out += '\titem {} weight {:.3f}\n'.format(item.name, weight) <NEW_LINE> <DEDENT> out += '}\n' <NEW_LINE> return out <NEW_LINE> <DEDENT> def add_item(self, obj, weight=0.0): <NEW_LINE> <INDENT> if not isinstance(obj, (Bucket, Device)): <NEW_LINE> <INDENT> raise TypeError("item must be a Bucket or a Device") <NEW_LINE> <DEDENT> self.items[obj] = weight <NEW_LINE> <DEDENT> def weight(self, traversed=None): <NEW_LINE> <INDENT> if traversed is None: <NEW_LINE> <INDENT> traversed = [] <NEW_LINE> <DEDENT> if self.name in traversed: <NEW_LINE> <INDENT> raise ValueError("There is a loop in the bucket hierarchy!") <NEW_LINE> <DEDENT> traversed.append(self.name) <NEW_LINE> weight = 0.0 <NEW_LINE> for obj, w in self.items.items(): <NEW_LINE> <INDENT> if isinstance(obj, Device): <NEW_LINE> <INDENT> weight += w <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> weight += obj.weight(traversed) <NEW_LINE> <DEDENT> <DEDENT> return weight <NEW_LINE> <DEDENT> def reweight_devices(self, weight): <NEW_LINE> <INDENT> own_weight = 0.0 <NEW_LINE> for obj in self.items: <NEW_LINE> <INDENT> if isinstance(obj, Device): <NEW_LINE> <INDENT> item_weight = weight <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> item_weight = obj.reweight_devices(weight) <NEW_LINE> <DEDENT> own_weight += item_weight <NEW_LINE> self.items[obj] = item_weight <NEW_LINE> <DEDENT> return own_weight
Represents a single bucket, its properties and items. Also keeps track of any parent buckets. Arguments: - name: Unique name for this bucket - id: Unique integer ID for this bucket - type_obj: Type object referring to the bucket's type - alg: CRUSH algorith (default: straw) - hash_name: Name of the hash to use (default: rjenkins1)
62598fb3e5267d203ee6b998
class CreateSubscriptionRequest(object): <NEW_LINE> <INDENT> deserialized_types = { 'name': 'str', 'events': 'list[ask_smapi_model.v0.development_events.subscription.event.Event]', 'vendor_id': 'str', 'subscriber_id': 'str' } <NEW_LINE> attribute_map = { 'name': 'name', 'events': 'events', 'vendor_id': 'vendorId', 'subscriber_id': 'subscriberId' } <NEW_LINE> supports_multiple_types = False <NEW_LINE> def __init__(self, name=None, events=None, vendor_id=None, subscriber_id=None): <NEW_LINE> <INDENT> self.__discriminator_value = None <NEW_LINE> self.name = name <NEW_LINE> self.events = events <NEW_LINE> self.vendor_id = vendor_id <NEW_LINE> self.subscriber_id = subscriber_id <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.deserialized_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x.value if isinstance(x, Enum) else x, value )) <NEW_LINE> <DEDENT> elif isinstance(value, Enum): <NEW_LINE> <INDENT> result[attr] = value.value <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else (item[0], item[1].value) if isinstance(item[1], Enum) else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, CreateSubscriptionRequest): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other
:param name: Name of the subscription. :type name: (optional) str :param events: The list of events that the subscriber should be notified for. :type events: (optional) list[ask_smapi_model.v0.development_events.subscription.event.Event] :param vendor_id: The vendorId of the event publisher. :type vendor_id: (optional) str :param subscriber_id: The id of the subscriber that would receive the events. :type subscriber_id: (optional) str
62598fb3f548e778e596b638
class SpellCleanRefLists(pyffi.spells.nif.NifSpell): <NEW_LINE> <INDENT> SPELLNAME = "opt_cleanreflists" <NEW_LINE> READONLY = False <NEW_LINE> def datainspect(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> if self.data.header.has_block_type(NifFormat.NiPSysMeshEmitter): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> except ValueError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> return self.inspectblocktype(NifFormat.NiObjectNET) <NEW_LINE> <DEDENT> def dataentry(self): <NEW_LINE> <INDENT> self.data.roots = self.cleanreflist(self.data.roots, "root") <NEW_LINE> return True <NEW_LINE> <DEDENT> def branchinspect(self, branch): <NEW_LINE> <INDENT> return isinstance(branch, NifFormat.NiObjectNET) <NEW_LINE> <DEDENT> def cleanreflist(self, reflist, category): <NEW_LINE> <INDENT> cleanlist = [] <NEW_LINE> for ref in reflist: <NEW_LINE> <INDENT> if ref is None: <NEW_LINE> <INDENT> self.toaster.msg("removing empty %s reference" % category) <NEW_LINE> self.changed = True <NEW_LINE> <DEDENT> elif ref in cleanlist: <NEW_LINE> <INDENT> self.toaster.msg("removing duplicate %s reference" % category) <NEW_LINE> self.changed = True <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cleanlist.append(ref) <NEW_LINE> <DEDENT> <DEDENT> return cleanlist <NEW_LINE> <DEDENT> def branchentry(self, branch): <NEW_LINE> <INDENT> if isinstance(branch, NifFormat.NiObjectNET): <NEW_LINE> <INDENT> branch.set_extra_datas( self.cleanreflist(branch.get_extra_datas(), "extra")) <NEW_LINE> <DEDENT> if isinstance(branch, NifFormat.NiAVObject): <NEW_LINE> <INDENT> branch.set_properties( self.cleanreflist(branch.get_properties(), "property")) <NEW_LINE> <DEDENT> if isinstance(branch, NifFormat.NiNode): <NEW_LINE> <INDENT> branch.set_children( self.cleanreflist(branch.get_children(), "child")) <NEW_LINE> branch.set_effects( self.cleanreflist(branch.get_effects(), "effect")) <NEW_LINE> <DEDENT> return True
Remove empty and duplicate entries in reference lists.
62598fb37047854f4633f46e
class Grade(BaseCurricularAlignmentModel): <NEW_LINE> <INDENT> level = models.ForeignKey( Level, related_name='grades' ) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = u'Grado' <NEW_LINE> verbose_name_plural = u'Grados'
Defines model for school grade in curricular alignment
62598fb360cbc95b063643da
class GridState(np.ndarray): <NEW_LINE> <INDENT> def __new__(cls, width, height): <NEW_LINE> <INDENT> obj = super().__new__(cls, (height, width), dtype=bool) <NEW_LINE> obj.fill(False) <NEW_LINE> return obj <NEW_LINE> <DEDENT> @property <NEW_LINE> def min_width(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return np.where(self.sum(axis=0) > 0)[0].max() + 1 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def min_height(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return np.where(self.sum(axis=1) > 0)[0].max() + 1 <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def empty_copy(self): <NEW_LINE> <INDENT> return GridState(*self.shape) <NEW_LINE> <DEDENT> def conflicts(self, cell, col_span, row_span): <NEW_LINE> <INDENT> col, row = cell <NEW_LINE> return self[row:row+row_span, col:col+col_span].any() <NEW_LINE> <DEDENT> def conflicts_where(self, cell, col_span, row_span): <NEW_LINE> <INDENT> return self.empty_copy().populate(cell, col_span, row_span) & self <NEW_LINE> <DEDENT> def populate(self, cell, col_span, row_span): <NEW_LINE> <INDENT> self._set(cell, col_span, row_span, True) <NEW_LINE> <DEDENT> def unpopulate(self, cell, col_span, row_span): <NEW_LINE> <INDENT> self._set(cell, col_span, row_span, False) <NEW_LINE> <DEDENT> def _set(self, cell, col_span, row_span, val): <NEW_LINE> <INDENT> col, row = cell <NEW_LINE> self[row:row+row_span, col:col+col_span] = val
A wrapper around a numpy array of booleans for storing the state of a ``Grid`` instance. That is, each element in the ``GridState`` stores whether or not the corresponding grid cell is populated (i.e. contains part of a ``DockableMixin`` widget). A numpy array in particular is used for this purpose chiefly for the convenience provided by fancy indexing. This makes checking for grid conflicts with a prospect dockable placement rather simple (and fast).
62598fb35fcc89381b266196
class CSVGenerator(ReportGenerator): <NEW_LINE> <INDENT> writer = None <NEW_LINE> writer_function = csv.writer <NEW_LINE> first_row_with_column_names = False <NEW_LINE> mimetype = 'text/csv' <NEW_LINE> def __init__(self, report, cache_enabled=None, writer=None, first_row_with_column_names=None, **kwargs): <NEW_LINE> <INDENT> super(CSVGenerator, self).__init__(report, **kwargs) <NEW_LINE> if cache_enabled is not None: <NEW_LINE> <INDENT> self.cache_enabled = cache_enabled <NEW_LINE> <DEDENT> elif self.cache_enabled is None: <NEW_LINE> <INDENT> self.cache_enabled = bool(self.report.cache_status) <NEW_LINE> <DEDENT> self.writer = writer or self.writer <NEW_LINE> if first_row_with_column_names is not None: <NEW_LINE> <INDENT> self.first_row_with_column_names = first_row_with_column_names <NEW_LINE> <DEDENT> for k,v in kwargs.items(): <NEW_LINE> <INDENT> setattr(self, k, v) <NEW_LINE> <DEDENT> <DEDENT> def start_writer(self, filename=None): <NEW_LINE> <INDENT> if self.writer: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> filename = filename or self.filename <NEW_LINE> if isinstance(filename, basestring): <NEW_LINE> <INDENT> filename = file(filename, 'w') <NEW_LINE> <DEDENT> self.writer = self.writer_function(filename, quoting=csv.QUOTE_MINIMAL) <NEW_LINE> <DEDENT> def execute(self): <NEW_LINE> <INDENT> super(CSVGenerator, self).execute() <NEW_LINE> self.report.do_before_print(generator=self) <NEW_LINE> self.generate_csv() <NEW_LINE> self.report.do_after_print(generator=self) <NEW_LINE> <DEDENT> def get_hash_key(self, objects): <NEW_LINE> <INDENT> return super(CSVGenerator, self).get_hash_key(objects) + '.csv' <NEW_LINE> <DEDENT> def generate_csv(self): <NEW_LINE> <INDENT> self._current_object_index = 0 <NEW_LINE> objects = self.report.get_objects_list() <NEW_LINE> self.start_writer() <NEW_LINE> columns = [el for el in self.report.band_detail.elements if isinstance(el, ObjectValue)] <NEW_LINE> columns.sort(lambda a,b: cmp(a.left, b.left) or cmp(a.width, b.width)) <NEW_LINE> if self.first_row_with_column_names: <NEW_LINE> <INDENT> cells = [(col.name or col.expression or col.attribute_name) for col in columns] <NEW_LINE> self.writer.writerow(cells) <NEW_LINE> <DEDENT> while self._current_object_index < len(objects): <NEW_LINE> <INDENT> self._current_object = objects[self._current_object_index] <NEW_LINE> cells = [] <NEW_LINE> for element in columns: <NEW_LINE> <INDENT> widget = element.clone() <NEW_LINE> widget.font_color = self.report.default_font_color <NEW_LINE> widget.instance = self._current_object <NEW_LINE> widget.generator = self <NEW_LINE> widget.report = self.report <NEW_LINE> widget.band = self.report.band_detail <NEW_LINE> widget.page = None <NEW_LINE> cells.append(widget.text) <NEW_LINE> <DEDENT> self._current_object_index += 1 <NEW_LINE> self.writer.writerow(cells)
This is a generator to output data in CSV format. This format can be imported as a spreadsheet to Excel, OpenOffice Calc, Google Docs Spreadsheet, and others. Attributes: * 'filename' - is the file path you can inform optionally to save text to. * 'writer' - is csv.writer function you can inform manually to make it customizable. This function must expects a first argument to receive a file object and returns a csv.writer object.
62598fb3283ffb24f3cf3920
class ParsedAdditiveExpressionList(ListRedirect): <NEW_LINE> <INDENT> def __init__(self,multiplicativeExprList): <NEW_LINE> <INDENT> if isinstance(multiplicativeExprList,list): <NEW_LINE> <INDENT> self._list = multiplicativeExprList <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._list = [multiplicativeExprList] <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "<AdditiveExpressionList: %s>"%self._list
A list of MultiplicativeExpressions, joined by '+' or '-' s
62598fb39c8ee823130401bc
class Endpoint(BasicEndpoint): <NEW_LINE> <INDENT> def __init__(self, name: str, server_handler: Callable): <NEW_LINE> <INDENT> super().__init__(name, server_handler) <NEW_LINE> self.vertex = None <NEW_LINE> self.connections = {} <NEW_LINE> <DEDENT> def connect(self, endpoint_name: str): <NEW_LINE> <INDENT> self.connections[endpoint_name] = None <NEW_LINE> <DEDENT> def send(self, action_type: Union[str, ActionType], data) -> Any: <NEW_LINE> <INDENT> action_type = ActionType.force_cast(action_type) <NEW_LINE> endpoint = action_type.endpoint <NEW_LINE> if endpoint not in self.connections: <NEW_LINE> <INDENT> raise Exception("Unknown connection to '{}'".format(endpoint)) <NEW_LINE> <DEDENT> connection = self.connections[endpoint] <NEW_LINE> if connection is None: <NEW_LINE> <INDENT> message = "Connection to '{}' has not been created, have you run start() on {}?" <NEW_LINE> raise Exception(message.format(endpoint, self.name)) <NEW_LINE> <DEDENT> return connection.send(action_type, data) <NEW_LINE> <DEDENT> def setup(self, vertex_addr): <NEW_LINE> <INDENT> super().start() <NEW_LINE> self.vertex = self.client.connect(vertex_addr) <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> for endpoint_name in self.connections.keys(): <NEW_LINE> <INDENT> response = self.vertex.send(ActionType("vertex", "lookup"), {"endpoint_name": endpoint_name}) <NEW_LINE> if not response: <NEW_LINE> <INDENT> raise NoEndpointError(endpoint_name) <NEW_LINE> <DEDENT> host = response["host"] <NEW_LINE> port = response["port"] <NEW_LINE> endpoint_addr = (host, port) <NEW_LINE> connection = self.client.connect(endpoint_addr) <NEW_LINE> self.connections[endpoint_name] = connection <NEW_LINE> <DEDENT> <DEDENT> def vertex_send(self, action_type: Union[str, ActionType], data): <NEW_LINE> <INDENT> return self.vertex.send(action_type, data)
Generic Endpoint, has all basic features including connection management, and endpoint resolution. This class can be inherited by any Endpoint that uses a connection to a Vertex.
62598fb34e4d5625663724ba
class AdvicesViewSet(BaseViewSet): <NEW_LINE> <INDENT> permission_code = 'advices' <NEW_LINE> queryset = Advices.objects.all().select_related('created_by','updated_by') <NEW_LINE> serializer_class = AdvicesSerializer <NEW_LINE> filter_class = AdvicesFilter <NEW_LINE> filter_backends = (OrderingFilter, DjangoFilterBackend) <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> advice = Advices.objects.filter(description=self.request.data['description'], type_diagnostic=self.request.data['type_diagnostic'], deleted=0) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> advice = None <NEW_LINE> <DEDENT> if advice: <NEW_LINE> <INDENT> raise ValidationError({'description': ['Ya se registró esta recomendación.']}) <NEW_LINE> <DEDENT> serializer.save() <NEW_LINE> <DEDENT> def perform_update(self, serializer): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> advice = Advices.objects.filter(description=self.request.data['description'], type_diagnostic=self.request.data['type_diagnostic'], deleted=0).exclude(id=self.kwargs['pk']) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> advice = None <NEW_LINE> <DEDENT> if advice: <NEW_LINE> <INDENT> raise ValidationError({'description': ['Ya se registró esta recomendación.']}) <NEW_LINE> <DEDENT> serializer.save()
Type diagnostic view FILTERS: 'id': ['exact'], 'description':['exact', 'icontains'], 'type_diagnostic':['exact',], 'created_at': ['exact', 'year', 'year__gte', 'year__lte', 'month', 'month__lte', 'month__gte', 'day', 'day__lte', 'day__gte', 'year__in', 'month__in', 'day__in'], 'created_by': ['exact'],
62598fb356ac1b37e630227e
class LegacySyndicationFeed(AtomFeed): <NEW_LINE> <INDENT> def __init__(self, title, link, description, language=None, author_email=None, author_name=None, author_link=None, subtitle=None, categories=[], feed_url=None, feed_copyright=None): <NEW_LINE> <INDENT> atom_id = link <NEW_LINE> title = title <NEW_LINE> updated = None <NEW_LINE> rights = feed_copyright <NEW_LINE> subtitle = subtitle <NEW_LINE> author_dict = {'name': author_name} <NEW_LINE> if author_link: <NEW_LINE> <INDENT> author_dict['uri'] = author_uri <NEW_LINE> <DEDENT> if author_email: <NEW_LINE> <INDENT> author_dict['email'] = author_email <NEW_LINE> <DEDENT> authors = [author_dict] <NEW_LINE> if categories: <NEW_LINE> <INDENT> categories = [{'term': term} for term in categories] <NEW_LINE> <DEDENT> links = [{'rel': 'alternate', 'href': link}] <NEW_LINE> if feed_url: <NEW_LINE> <INDENT> links.append({'rel': 'self', 'href': feed_url}) <NEW_LINE> <DEDENT> if language: <NEW_LINE> <INDENT> extra_attrs = {'xml:lang': language} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> extra_attrs = {} <NEW_LINE> <DEDENT> AtomFeed.__init__(self, atom_id, title, updated, rights=rights, subtitle=subtitle, authors=authors, categories=categories, links=links, extra_attrs=extra_attrs) <NEW_LINE> <DEDENT> def add_item(self, title, link, description, author_email=None, author_name=None, author_link=None, pubdate=None, comments=None, unique_id=None, enclosure=None, categories=[], item_copyright=None): <NEW_LINE> <INDENT> if unique_id: <NEW_LINE> <INDENT> atom_id = unique_id <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> atom_id = get_tag_uri(link, pubdate) <NEW_LINE> <DEDENT> title = title <NEW_LINE> updated = pubdate <NEW_LINE> if item_copyright: <NEW_LINE> <INDENT> rights = item_copyright <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> rights = None <NEW_LINE> <DEDENT> if description: <NEW_LINE> <INDENT> summary = 'html', description <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> summary = None <NEW_LINE> <DEDENT> author_dict = {'name': author_name} <NEW_LINE> if author_link: <NEW_LINE> <INDENT> author_dict['uri'] = author_uri <NEW_LINE> <DEDENT> if author_email: <NEW_LINE> <INDENT> author_dict['email'] = author_email <NEW_LINE> <DEDENT> authors = [author_dict] <NEW_LINE> categories = [{'term': term} for term in categories] <NEW_LINE> links = [{'rel': 'alternate', 'href': link}] <NEW_LINE> if enclosure: <NEW_LINE> <INDENT> links.append({'rel': 'enclosure', 'href': enclosure.url, 'length': enclosure.length, 'type': enclosure.mime_type}) <NEW_LINE> <DEDENT> AtomFeed.add_item(self, atom_id, title, updated, rights=rights, summary=summary, authors=authors, categories=categories, links=links)
Provides an SyndicationFeed-compatible interface in its __init__ and add_item but is really a new AtomFeed object.
62598fb37047854f4633f46f
class TeamStanding(models.Model): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> unique_together = ( 'source_timestamp', 'team_id', 'competition_id', 'season_id',) <NEW_LINE> <DEDENT> source_timestamp = models.DateTimeField() <NEW_LINE> team_id = models.IntegerField() <NEW_LINE> competition_id = models.IntegerField() <NEW_LINE> season_id = models.IntegerField() <NEW_LINE> against = models.IntegerField() <NEW_LINE> drawn = models.IntegerField() <NEW_LINE> goals_for = models.IntegerField() <NEW_LINE> lost = models.IntegerField() <NEW_LINE> played = models.IntegerField() <NEW_LINE> points = models.IntegerField() <NEW_LINE> start_day_position = models.IntegerField() <NEW_LINE> won = models.IntegerField() <NEW_LINE> away_against = models.IntegerField() <NEW_LINE> away_drawn = models.IntegerField() <NEW_LINE> away_for = models.IntegerField() <NEW_LINE> away_lost = models.IntegerField() <NEW_LINE> away_played = models.IntegerField() <NEW_LINE> away_points = models.IntegerField() <NEW_LINE> away_position = models.IntegerField() <NEW_LINE> away_won = models.IntegerField() <NEW_LINE> home_against = models.IntegerField() <NEW_LINE> home_drawn = models.IntegerField() <NEW_LINE> home_for = models.IntegerField() <NEW_LINE> home_lost = models.IntegerField() <NEW_LINE> home_played = models.IntegerField() <NEW_LINE> home_points = models.IntegerField() <NEW_LINE> home_position = models.IntegerField() <NEW_LINE> home_won = models.IntegerField()
Represents a team's standing.
62598fb356ac1b37e630227f
class DownBlock(BaseBlock): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels, bn_name=NormalizationType.BatchNormalize2d, activation_name=ActivationType.ReLU): <NEW_LINE> <INDENT> super().__init__(UNetBlockName.DownBlock) <NEW_LINE> self.maxpool_conv = nn.Sequential( nn.MaxPool2d(2), DoubleConv2d(in_channels, out_channels, bn_name, activation_name) ) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> return self.maxpool_conv(x)
Downscaling with maxpool then double conv
62598fb367a9b606de546062
class Test49(unittest.TestCase): <NEW_LINE> <INDENT> def test_v1_message_validation(self) -> None: <NEW_LINE> <INDENT> from_id = "me" <NEW_LINE> client = m49.ClientV1(from_id) <NEW_LINE> server = m49.ServerV1() <NEW_LINE> parsed_message = {"to_id": "you", "amount": 1000} <NEW_LINE> request = client.send(**parsed_message) <NEW_LINE> self.assertTrue(server.validate(request)) <NEW_LINE> <DEDENT> def test_v1_fail_to_validate(self) -> None: <NEW_LINE> <INDENT> client = m49.ClientV1("0001") <NEW_LINE> server = m49.ServerV1() <NEW_LINE> request = client.send(to_id="0002", amount=100) <NEW_LINE> request += b"\x02" <NEW_LINE> self.assertFalse(server.validate(request)) <NEW_LINE> with self.assertRaises(Exception): <NEW_LINE> <INDENT> server.process(request) <NEW_LINE> <DEDENT> <DEDENT> def test_v1_attack_variable_iv(self) -> None: <NEW_LINE> <INDENT> attacker_id = "0001" <NEW_LINE> victim_id = "0002" <NEW_LINE> forgery = m49.forge_via_variable_iv(attacker_id, victim_id) <NEW_LINE> tx = m49.ServerV1.process(forgery) <NEW_LINE> self.assertEqual(tx["to"], attacker_id) <NEW_LINE> self.assertEqual(tx["from"], victim_id) <NEW_LINE> self.assertEqual(tx["amount"], "1000000") <NEW_LINE> <DEDENT> def test_v2_fail_to_validate(self) -> None: <NEW_LINE> <INDENT> client = m49.ClientV2("1") <NEW_LINE> server = m49.ServerV2() <NEW_LINE> request = client.send({"2": 100}) <NEW_LINE> request += b"\x02" <NEW_LINE> self.assertFalse(server.validate(request)) <NEW_LINE> with self.assertRaises(Exception): <NEW_LINE> <INDENT> server.process(request) <NEW_LINE> <DEDENT> <DEDENT> def test_v2_attack_via_length_extension(self) -> None: <NEW_LINE> <INDENT> attacker_id = "1" <NEW_LINE> victim_id = "2" <NEW_LINE> forgery = m49.forge_via_length_extension(attacker_id, victim_id) <NEW_LINE> txs = m49.ServerV2.process(forgery) <NEW_LINE> self.assertEqual(txs["from"], victim_id) <NEW_LINE> self.assertIn({"to": attacker_id, "amount": 1000000}, txs["tx_list"])
CBC-MAC Message Forgery
62598fb33539df3088ecc346
class Client(object): <NEW_LINE> <INDENT> def __init__(self, robot_name): <NEW_LINE> <INDENT> action_name = "/" + robot_name + "/action_server/task" <NEW_LINE> self._action_client = actionlib.SimpleActionClient(action_name, action_server_msgs.msg.TaskAction) <NEW_LINE> rospy.loginfo("Waiting for task action server to come online...") <NEW_LINE> self._action_client.wait_for_server() <NEW_LINE> rospy.loginfo("Connected to task action server") <NEW_LINE> self.get_actions_proxy = rospy.ServiceProxy('get_actions', action_server_msgs.srv.GetActions) <NEW_LINE> <DEDENT> def get_actions(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> res = self.get_actions_proxy() <NEW_LINE> <DEDENT> except rospy.ServiceException: <NEW_LINE> <INDENT> rospy.logerr("Failed to get actions from the action server.") <NEW_LINE> res = [] <NEW_LINE> <DEDENT> return res.actions <NEW_LINE> <DEDENT> def send_async_task(self, semantics, done_cb=None, feedback_cb=None): <NEW_LINE> <INDENT> def _wrapped_done_cb(_, result): <NEW_LINE> <INDENT> taskoutcome = task_outcome_from_result(result=result) <NEW_LINE> return done_cb(taskoutcome) <NEW_LINE> <DEDENT> _done_cb = _wrapped_done_cb if callable(done_cb) else None <NEW_LINE> goal = action_server_msgs.msg.TaskGoal(recipe=semantics) <NEW_LINE> self._action_client.send_goal(goal=goal, done_cb=_done_cb, feedback_cb=feedback_cb) <NEW_LINE> <DEDENT> def send_task(self, semantics) -> TaskOutcome: <NEW_LINE> <INDENT> goal = action_server_msgs.msg.TaskGoal(recipe=semantics) <NEW_LINE> self._action_client.send_goal(goal) <NEW_LINE> try: <NEW_LINE> <INDENT> self._action_client.wait_for_result() <NEW_LINE> result = self._action_client.get_result() <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> self.cancel_all() <NEW_LINE> raise KeyboardInterrupt <NEW_LINE> <DEDENT> if not isinstance(result, action_server_msgs.msg.TaskResult): <NEW_LINE> <INDENT> msg = "Result not instance of 'action_server_msgs.msg.TaskResult', but {}".format(type(result)) <NEW_LINE> if result is None: <NEW_LINE> <INDENT> rospy.logerr(msg) <NEW_LINE> <DEDENT> return TaskOutcome(messages=[msg]) <NEW_LINE> <DEDENT> return task_outcome_from_result(result=result) <NEW_LINE> <DEDENT> def cancel_all(self): <NEW_LINE> <INDENT> rospy.logdebug("cancelling all goals...") <NEW_LINE> self._action_client.cancel_all_goals() <NEW_LINE> self._action_client.wait_for_result() <NEW_LINE> rospy.logdebug("... all goals cancelled!") <NEW_LINE> <DEDENT> def cancel_all_async(self): <NEW_LINE> <INDENT> rospy.logdebug("cancelling all goals async...") <NEW_LINE> self._action_client.cancel_all_goals()
A client for the action server Wraps the client side of the actionlib interface so that it can be easily used in client side applications. Example: client = Client('amigo') semantics = "{'actions': [{'action': 'say', 'sentence': 'ROBOT_NAME'}]}" client.send_task(semantics)
62598fb3cc0a2c111447b0a7
class ConversionImpossible(Exception): <NEW_LINE> <INDENT> pass
Utility exception class used by conversion methods to signal that this object cannot be converted
62598fb3379a373c97d990a9
class Entity(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.position = None <NEW_LINE> self.inventory = None <NEW_LINE> self.in_inventory = None <NEW_LINE> self.is_solid = False <NEW_LINE> self.appearance = "?" <NEW_LINE> self.description = ""
An entity is a player, a mob, an item, a fireball, etc.
62598fb37d847024c075c456
@skip_check_grad_ci( reason="reduce_min is discontinuous non-derivable function," " its gradient check is not supported by unittest framework.") <NEW_LINE> class TestReduceMinOpMultiAxises(OpTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.op_type = "reduce_min" <NEW_LINE> self.inputs = {'X': np.random.random((5, 6, 10)).astype("float64")} <NEW_LINE> self.attrs = {'dim': [1, 2]} <NEW_LINE> self.outputs = { 'Out': self.inputs['X'].min(axis=tuple(self.attrs['dim'])) } <NEW_LINE> <DEDENT> def test_check_output(self): <NEW_LINE> <INDENT> self.check_output()
Remove Min with subgradient from gradient check to confirm the success of CI.
62598fb326068e7796d4c9ea
class TestPad(QiskitTestCase): <NEW_LINE> <INDENT> def test_padding_empty_schedule(self): <NEW_LINE> <INDENT> self.assertEqual(pulse.Schedule(), pad(pulse.Schedule())) <NEW_LINE> <DEDENT> def test_padding_schedule(self): <NEW_LINE> <INDENT> delay = 10 <NEW_LINE> sched = (Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(1)).shift(10)) <NEW_LINE> ref_sched = (sched | Delay(delay, DriveChannel(0)) | Delay(delay, DriveChannel(0)).shift(20) | Delay(delay, DriveChannel(1)) | Delay(2 * delay, DriveChannel(1)).shift(20)) <NEW_LINE> self.assertEqual(pad(sched), ref_sched) <NEW_LINE> <DEDENT> def test_padding_schedule_inverse_order(self): <NEW_LINE> <INDENT> delay = 10 <NEW_LINE> sched = (Delay(delay, DriveChannel(1)).shift(10) + Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(0)).shift(10)) <NEW_LINE> ref_sched = (sched | Delay(delay, DriveChannel(0)) | Delay(delay, DriveChannel(0)).shift(20) | Delay(delay, DriveChannel(1)) | Delay(2 * delay, DriveChannel(1)).shift(20)) <NEW_LINE> self.assertEqual(pad(sched), ref_sched) <NEW_LINE> <DEDENT> def test_padding_until_less(self): <NEW_LINE> <INDENT> delay = 10 <NEW_LINE> sched = (Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(1))) <NEW_LINE> ref_sched = (sched | Delay(delay, DriveChannel(0)) | Delay(5, DriveChannel(1)).shift(10)) <NEW_LINE> self.assertEqual(pad(sched, until=15), ref_sched) <NEW_LINE> <DEDENT> def test_padding_until_greater(self): <NEW_LINE> <INDENT> delay = 10 <NEW_LINE> sched = (Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(1))) <NEW_LINE> ref_sched = (sched | Delay(delay, DriveChannel(0)) | Delay(30, DriveChannel(0)).shift(20) | Delay(40, DriveChannel(1)).shift(10)) <NEW_LINE> self.assertEqual(pad(sched, until=50), ref_sched) <NEW_LINE> <DEDENT> def test_padding_supplied_channels(self): <NEW_LINE> <INDENT> delay = 10 <NEW_LINE> sched = (Delay(delay, DriveChannel(0)).shift(10) + Delay(delay, DriveChannel(1))) <NEW_LINE> ref_sched = (sched | Delay(delay, DriveChannel(0)) | Delay(2 * delay, DriveChannel(2))) <NEW_LINE> channels = [DriveChannel(0), DriveChannel(2)] <NEW_LINE> self.assertEqual(pad(sched, channels=channels), ref_sched)
Test padding of schedule with delays.
62598fb316aa5153ce400597
class VerifyVersionCommand(install): <NEW_LINE> <INDENT> def run(self): <NEW_LINE> <INDENT> tag = os.getenv("CIRCLE_TAG") <NEW_LINE> if tag != VERSION: <NEW_LINE> <INDENT> info = f"The git tag: '{tag}' does not match the package ver: '{VERSION}'" <NEW_LINE> sys.exit(info)
Custom command to verify that the git tag matches our VERSION.
62598fb399fddb7c1ca62e34
class HostManager(crud.CRUDClient): <NEW_LINE> <INDENT> key = 'host' <NEW_LINE> base_path = '/hosts' <NEW_LINE> resource_class = Host <NEW_LINE> def list(self, project_id, **kwargs): <NEW_LINE> <INDENT> kwargs['project'] = str(project_id) <NEW_LINE> super(HostManager, self).list(**kwargs)
A manager for hosts.
62598fb3cc40096d6161a223
class ParseMatcher(Matcher): <NEW_LINE> <INDENT> custom_types = {} <NEW_LINE> parser_class = parse.Parser <NEW_LINE> def __init__(self, func, pattern, step_type=None): <NEW_LINE> <INDENT> super(ParseMatcher, self).__init__(func, pattern, step_type) <NEW_LINE> self.parser = self.parser_class(pattern, self.custom_types) <NEW_LINE> <DEDENT> @property <NEW_LINE> def regex_pattern(self): <NEW_LINE> <INDENT> return self.parser._expression <NEW_LINE> <DEDENT> def check_match(self, step): <NEW_LINE> <INDENT> result = self.parser.parse(step) <NEW_LINE> if not result: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> args = [] <NEW_LINE> for index, value in enumerate(result.fixed): <NEW_LINE> <INDENT> start, end = result.spans[index] <NEW_LINE> args.append(Argument(start, end, step[start:end], value)) <NEW_LINE> <DEDENT> for name, value in list(result.named.items()): <NEW_LINE> <INDENT> start, end = result.spans[name] <NEW_LINE> args.append(Argument(start, end, step[start:end], value, name)) <NEW_LINE> <DEDENT> args.sort(key=lambda x: x.start) <NEW_LINE> return args
Uses :class:`~parse.Parser` class to be able to use simpler parse expressions compared to normal regular expressions.
62598fb34c3428357761a34e
class ExpressRouteCircuitConnection(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'type': {'readonly': True}, 'circuit_connection_status': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'express_route_circuit_peering': {'key': 'properties.expressRouteCircuitPeering', 'type': 'SubResource'}, 'peer_express_route_circuit_peering': {'key': 'properties.peerExpressRouteCircuitPeering', 'type': 'SubResource'}, 'address_prefix': {'key': 'properties.addressPrefix', 'type': 'str'}, 'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'}, 'ipv6_circuit_connection_config': {'key': 'properties.ipv6CircuitConnectionConfig', 'type': 'Ipv6CircuitConnectionConfig'}, 'circuit_connection_status': {'key': 'properties.circuitConnectionStatus', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRouteCircuitConnection, self).__init__(**kwargs) <NEW_LINE> self.name = kwargs.get('name', None) <NEW_LINE> self.etag = None <NEW_LINE> self.type = None <NEW_LINE> self.express_route_circuit_peering = kwargs.get('express_route_circuit_peering', None) <NEW_LINE> self.peer_express_route_circuit_peering = kwargs.get('peer_express_route_circuit_peering', None) <NEW_LINE> self.address_prefix = kwargs.get('address_prefix', None) <NEW_LINE> self.authorization_key = kwargs.get('authorization_key', None) <NEW_LINE> self.ipv6_circuit_connection_config = kwargs.get('ipv6_circuit_connection_config', None) <NEW_LINE> self.circuit_connection_status = None <NEW_LINE> self.provisioning_state = None
Express Route Circuit Connection in an ExpressRouteCircuitPeering resource. Variables are only populated by the server, and will be ignored when sending a request. :param id: Resource ID. :type id: str :param name: The name of the resource that is unique within a resource group. This name can be used to access the resource. :type name: str :ivar etag: A unique read-only string that changes whenever the resource is updated. :vartype etag: str :ivar type: Type of the resource. :vartype type: str :param express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the circuit initiating connection. :type express_route_circuit_peering: ~azure.mgmt.network.v2020_08_01.models.SubResource :param peer_express_route_circuit_peering: Reference to Express Route Circuit Private Peering Resource of the peered circuit. :type peer_express_route_circuit_peering: ~azure.mgmt.network.v2020_08_01.models.SubResource :param address_prefix: /29 IP address space to carve out Customer addresses for tunnels. :type address_prefix: str :param authorization_key: The authorization key. :type authorization_key: str :param ipv6_circuit_connection_config: IPv6 Address PrefixProperties of the express route circuit connection. :type ipv6_circuit_connection_config: ~azure.mgmt.network.v2020_08_01.models.Ipv6CircuitConnectionConfig :ivar circuit_connection_status: Express Route Circuit connection state. Possible values include: "Connected", "Connecting", "Disconnected". :vartype circuit_connection_status: str or ~azure.mgmt.network.v2020_08_01.models.CircuitConnectionStatus :ivar provisioning_state: The provisioning state of the express route circuit connection resource. Possible values include: "Succeeded", "Updating", "Deleting", "Failed". :vartype provisioning_state: str or ~azure.mgmt.network.v2020_08_01.models.ProvisioningState
62598fb33d592f4c4edbaf55
class Meta: <NEW_LINE> <INDENT> icon = "Message" <NEW_LINE> references = {'user': 'user.email'} <NEW_LINE> model = Message <NEW_LINE> filters = 'user',
Tune the handler.
62598fb323849d37ff851148
class ToolbarOptionGreyedOrUnavailable(CFMEException): <NEW_LINE> <INDENT> pass
Raised when toolbar wants to click item that is greyed or unavailable
62598fb344b2445a339b69bc
class Dummy(): <NEW_LINE> <INDENT> def __init__(self, Class): <NEW_LINE> <INDENT> self.Class = Class <NEW_LINE> self.num_returns = None <NEW_LINE> <DEDENT> def dumb(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.num_returns == 0: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> if self.num_returns == 1: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return (None,)*self.num_returns <NEW_LINE> <DEDENT> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.num_returns = getattr(self.Class, attr).__code__.co_stacksize <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.dumb
Make a dummy object with Dummy(Class). You can replace any object of class Class with a Dummy object, and then any functions or variables in that class will do absolutely nothing. This is useful, for example, if you want to run a procedure without actually using any instruments. Expected uses: debugging or replotting data.
62598fb35fc7496912d482c6
class EpicsAuthorityNameValidator(TaurusAuthorityNameValidator): <NEW_LINE> <INDENT> scheme = '(ca|epics)' <NEW_LINE> authority = '//' <NEW_LINE> path = '(?!)' <NEW_LINE> query = '(?!)' <NEW_LINE> fragment = '(?!)' <NEW_LINE> def getNames(self, fullname, factory=None): <NEW_LINE> <INDENT> if self.isValid(fullname): <NEW_LINE> <INDENT> return 'ca://', '//', '' <NEW_LINE> <DEDENT> return None
Validator for Epics authority names. For now, the only supported authority is "//":
62598fb391f36d47f2230ef2
class CajaPropertyPage: <NEW_LINE> <INDENT> def __init__(self, git_uri): <NEW_LINE> <INDENT> self._git = git.Git(git_uri) <NEW_LINE> self._watchdog = watchdog.WatchDog(self._git.dir) <NEW_LINE> self._watchdog.connect("refresh", self._refresh) <NEW_LINE> self._builder = Gtk.Builder() <NEW_LINE> self._builder.add_from_resource('/com/caja/git/ui/property.ui') <NEW_LINE> self._build_widgets() <NEW_LINE> <DEDENT> @property <NEW_LINE> def main(self): <NEW_LINE> <INDENT> return self._builder.get_object("main") <NEW_LINE> <DEDENT> def _build_widgets(self): <NEW_LINE> <INDENT> self._builder.get_object("branch").set_text(self._git.get_branch()) <NEW_LINE> status = self._git.get_status() <NEW_LINE> status_widgets = ["added", "removed", "modified"] <NEW_LINE> for widget_name in status_widgets: <NEW_LINE> <INDENT> count = str(len(status[widget_name])) <NEW_LINE> widget = self._builder.get_object(widget_name) <NEW_LINE> widget.set_text(_("{0} file.").format(count)) <NEW_LINE> <DEDENT> <DEDENT> def _refresh(self, event): <NEW_LINE> <INDENT> branch = self._builder.get_object("branch") <NEW_LINE> branch.set_text(self._git.get_branch()) <NEW_LINE> branch.show()
Property page main widget class.
62598fb38e7ae83300ee9139
class CalendarTodoTestCase(ModuleTestCase): <NEW_LINE> <INDENT> module = 'calendar_todo'
Test Calendar Todo module
62598fb3097d151d1a2c10c3
class WXAuthUserNotLoggedAction(generics.GenericAPIView): <NEW_LINE> <INDENT> def get_object_by_openid(self, out_open_id): <NEW_LINE> <INDENT> return ConsumerUser.get_object(**{'out_open_id': out_open_id}) <NEW_LINE> <DEDENT> def post(self, request, *args, **kwargs): <NEW_LINE> <INDENT> form = WXAuthCreateUserForm(request.data) <NEW_LINE> if not form.is_valid(): <NEW_LINE> <INDENT> return Response({'Detail': form.errors}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> cld = form.cleaned_data <NEW_LINE> result = verify_identifying_code(cld) <NEW_LINE> if isinstance(result, Exception): <NEW_LINE> <INDENT> return Response({'Detail': result.args}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> if request.user.is_binding: <NEW_LINE> <INDENT> return Response({'Detail': 'The phone is already binded'}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> serializer = UserSerializer(request.user) <NEW_LINE> try: <NEW_LINE> <INDENT> serializer.binding_phone_to_user(request, request.user, cld) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> return Response({'Detail': e.args}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> _token_dict = Oauth2AccessToken().get_token(request.user) <NEW_LINE> if isinstance(_token_dict, Exception): <NEW_LINE> <INDENT> return Response({'Detail': _token_dict.args}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> return Response(_token_dict, status=status.HTTP_201_CREATED)
微信用户注册(处于登录状态)
62598fb338b623060ffa9132
class StrEnum(enum.Enum): <NEW_LINE> <INDENT> foo = 'foo' <NEW_LINE> bar = 'bar'
string based enum class for testing message pack/unpack
62598fb37047854f4633f470
class GXChargeTable: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.index = None <NEW_LINE> self.chargePerUnit = 0
Online help: http://www.gurux.fi/Gurux.DLMS.Objects.GXDLMSCharge
62598fb33539df3088ecc348
class PoolIdentityType(with_metaclass(CaseInsensitiveEnumMeta, str, Enum)): <NEW_LINE> <INDENT> USER_ASSIGNED = "UserAssigned" <NEW_LINE> NONE = "None"
The type of identity used for the Batch Pool.
62598fb3009cb60464d015b8
class Time(attr.Attr): <NEW_LINE> <INDENT> def __init__(self, start, end): <NEW_LINE> <INDENT> attr.Attr.__init__(self) <NEW_LINE> self.start = start <NEW_LINE> self.end = end <NEW_LINE> <DEDENT> def collides(self, other): <NEW_LINE> <INDENT> return isinstance(other, Time) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, self.__class__): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return vars(self) == vars(other) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash(tuple(vars(self).items())) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def dt(cls, start, end): <NEW_LINE> <INDENT> return cls(datetime(*start), datetime(*end))
Restrict query to time range between start and end.
62598fb3bf627c535bcb1536
class Cargo(Interface): <NEW_LINE> <INDENT> def __init__(self, token=None, key=None, secret=None, config=None): <NEW_LINE> <INDENT> super(Cargo, self).__init__(key=key, secret=secret, token=token, config=config) <NEW_LINE> self.address += 'cargo/v1/' <NEW_LINE> <DEDENT> def request(self, endpoint, verb=None, **req_kwargs): <NEW_LINE> <INDENT> req_kwargs['headers'] = {'Authorization': 'Bearer ' + self.token, 'Accept': 'application/json'} <NEW_LINE> resp = super(Cargo, self).request(endpoint, verb=verb, **req_kwargs) <NEW_LINE> resp.raise_for_status() <NEW_LINE> return resp.json() <NEW_LINE> <DEDENT> def delays(self, by_name=None, by_id=None, by_lat_long=None, **endpoint_kwargs): <NEW_LINE> <INDENT> c = sum([1 for v in (by_name, by_id, by_lat_long) if v]) <NEW_LINE> if c > 1: <NEW_LINE> <INDENT> raise ValueError("by_name, by_id and by_long_lat are " "mutually exclusive!") <NEW_LINE> <DEDENT> if not c: <NEW_LINE> <INDENT> raise ValueError("Must pass one of kwargs: by_name, by_id " "and by_long_lat") <NEW_LINE> <DEDENT> endpoint = 'delays' <NEW_LINE> if by_name: <NEW_LINE> <INDENT> payload = {'name': by_name} <NEW_LINE> payload.update(endpoint_kwargs) <NEW_LINE> return self.request(endpoint, params=payload) <NEW_LINE> <DEDENT> elif by_id: <NEW_LINE> <INDENT> endpoint += '/' + by_id <NEW_LINE> <DEDENT> elif by_lat_long: <NEW_LINE> <INDENT> lat, long = by_lat_long <NEW_LINE> endpoint += '/loc/' + str(lat) + '/' + str(long) <NEW_LINE> <DEDENT> return self.request(endpoint, params=endpoint_kwargs)
Wrapper for Deutsche Bahn's Cargo Delay Statistics API. Documentation at: https://developer.deutschebahn.com/store/apis/info?name=Fahrplan&version=v1&provider=DBOpenData
62598fb3460517430c4320a9
class MessageSendBreaker(object): <NEW_LINE> <INDENT> def __init__(self, app, allow): <NEW_LINE> <INDENT> self.app = app <NEW_LINE> self.allow = allow <NEW_LINE> self._send_message_via_window = self.app.send_message_via_window <NEW_LINE> self._messages_sent = 0 <NEW_LINE> <DEDENT> def patch_app(self): <NEW_LINE> <INDENT> self.app.send_message_via_window = self._broken_send <NEW_LINE> <DEDENT> def _broken_send(self, *args, **kw): <NEW_LINE> <INDENT> if self._messages_sent >= self.allow: <NEW_LINE> <INDENT> raise BreakerError("oops") <NEW_LINE> <DEDENT> self._messages_sent += 1 <NEW_LINE> return self._send_message_via_window(*args, **kw)
A helper to break message sending during a bulk send.
62598fb326068e7796d4c9ec
class CustomerFollowUp(models.Model): <NEW_LINE> <INDENT> customer = models.ForeignKey('CustomerInfo') <NEW_LINE> content = models.TextField(verbose_name='跟踪内容') <NEW_LINE> user = models.ForeignKey('UserProfile', verbose_name='跟进人') <NEW_LINE> status_choices = ((0, '近期无报名计划'), (1, '一个月内报名'), (2, '2周内报名'), (3, '已报名')) <NEW_LINE> status = models.SmallIntegerField(choices=status_choices, default=0) <NEW_LINE> date = models.DateField(auto_now_add=True) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return self.content <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = '客户跟踪记录表' <NEW_LINE> verbose_name_plural = '客户跟踪记录表'
客户跟踪记录表
62598fb37d847024c075c459
class Solution: <NEW_LINE> <INDENT> def longestCommonPrefix(self, strs: list[str]) -> str: <NEW_LINE> <INDENT> if len(strs) == 0: <NEW_LINE> <INDENT> return "" <NEW_LINE> <DEDENT> s = strs[0] <NEW_LINE> for i in range(1, len(strs)): <NEW_LINE> <INDENT> while strs[i].find(s) != 0: <NEW_LINE> <INDENT> s = s[:-1] <NEW_LINE> <DEDENT> <DEDENT> return s
1、本题目求最长公共前缀,我们可以采用字符串A和B相比较求出最长前缀,再与C比较 以此类推,求出最终的最长前缀 2、现在问题变成了求两个字符串的最长公共前缀,这样如果我们从第一个字符开始比较 的话,与C比较时候会再次从第一个字符比较增加不必要的开销,所以我们选择整体比较, 如果整体不同则去掉末尾一个字符。
62598fb38a43f66fc4bf2211
class ReadExcel(object): <NEW_LINE> <INDENT> def __init__(self, file_name, sheet_name): <NEW_LINE> <INDENT> self.file_name = file_name <NEW_LINE> self.sheet_name = sheet_name <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self.wb = openpyxl.load_workbook(self.file_name) <NEW_LINE> self.sh = self.wb[self.sheet_name] <NEW_LINE> <DEDENT> def read_data(self): <NEW_LINE> <INDENT> self.open() <NEW_LINE> rows = list(self.sh.rows) <NEW_LINE> cases = [] <NEW_LINE> titles1 = [r.value for r in rows[0]] <NEW_LINE> for row in rows[1:]: <NEW_LINE> <INDENT> data = [r.value for r in row] <NEW_LINE> case = dict(zip(titles1, data)) <NEW_LINE> cases.append(case) <NEW_LINE> <DEDENT> return cases <NEW_LINE> <DEDENT> def read_data_obj(self): <NEW_LINE> <INDENT> self.open() <NEW_LINE> cases = [] <NEW_LINE> rows = list(self.sh.rows) <NEW_LINE> titles = [r.value for r in rows[0]] <NEW_LINE> for row in rows[1:]: <NEW_LINE> <INDENT> data = [r.value for r in row] <NEW_LINE> zip_obj = zip(titles, data) <NEW_LINE> case_data = CaseData(zip_obj) <NEW_LINE> cases.append(case_data) <NEW_LINE> <DEDENT> return cases <NEW_LINE> <DEDENT> def write_data(self, row, column, value): <NEW_LINE> <INDENT> self.open() <NEW_LINE> self.sh.cell(row=row, column=column, value=value) <NEW_LINE> self.wb.save(self.file_name)
读取excel中的用例数据
62598fb3851cf427c66b834d
class SupportedLanguagesVocabulary(object): <NEW_LINE> <INDENT> implements(IVocabularyFactory) <NEW_LINE> def __call__(self, context): <NEW_LINE> <INDENT> self.context = context <NEW_LINE> canonical_language = context.getCanonical().Language() <NEW_LINE> portal_languages = getToolByName(context, 'portal_languages') <NEW_LINE> terms = [SimpleTerm(id, title=title) for id, title in portal_languages.listSupportedLanguages() if id != canonical_language] <NEW_LINE> return SimpleVocabulary(terms)
Vocabulary that returns all supported languages of the site except for the canonical language
62598fb3f9cc0f698b1c5316
class JobSchema(_dao_utils.TimestampedSchemaMixin, _dao_utils.Schema): <NEW_LINE> <INDENT> key = {'type': 'TEXT', 'primary_key': True, 'default': _dao_utils.generate_key} <NEW_LINE> status = {'type': 'TEXT'}
Fields for job records.
62598fb3dd821e528d6d8fc5
class TransmissionViewSet(viewsets.ModelViewSet): <NEW_LINE> <INDENT> queryset = Transmission.objects.all() <NEW_LINE> serializer_class = TransmissionSerializer <NEW_LINE> def perform_create(self, serializer): <NEW_LINE> <INDENT> serializer.save(created_by=self.request.user, updated_by=self.request.user) <NEW_LINE> <DEDENT> def perform_update(self, serializer): <NEW_LINE> <INDENT> serializer.save(updated_by=self.request.user)
This viewset automatically provides `list` and `detail` actions.
62598fb323849d37ff85114a
class GmfSet(object): <NEW_LINE> <INDENT> def __init__(self, gmfset, investigation_time): <NEW_LINE> <INDENT> self.gmfset = gmfset <NEW_LINE> self.investigation_time = investigation_time <NEW_LINE> self.stochastic_event_set_id = 1 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.gmfset) <NEW_LINE> <DEDENT> def __nonzero__(self): <NEW_LINE> <INDENT> return bool(self.gmfset) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return ( 'GMFsPerSES(investigation_time=%f, ' 'stochastic_event_set_id=%s,\n%s)' % ( self.investigation_time, self.stochastic_event_set_id, '\n'.join( sorted(str(g) for g in self.gmfset))))
Small wrapper around the list of Gmf objects associated to the given SES.
62598fb363b5f9789fe85200
class SystemSize(object): <NEW_LINE> <INDENT> def __init__(self, source_dir): <NEW_LINE> <INDENT> self.source_dir = source_dir <NEW_LINE> <DEDENT> def customize(self, size, requested_filesystem): <NEW_LINE> <INDENT> if requested_filesystem: <NEW_LINE> <INDENT> if requested_filesystem.startswith('ext'): <NEW_LINE> <INDENT> size *= 1.5 <NEW_LINE> file_count = self.accumulate_files() <NEW_LINE> inode_mbytes = file_count * Defaults.get_default_inode_size() / 1048576 <NEW_LINE> size += 2 * inode_mbytes <NEW_LINE> <DEDENT> elif requested_filesystem == 'btrfs': <NEW_LINE> <INDENT> size *= 1.5 <NEW_LINE> <DEDENT> elif requested_filesystem == 'xfs': <NEW_LINE> <INDENT> size *= 1.5 <NEW_LINE> <DEDENT> <DEDENT> return int(size) <NEW_LINE> <DEDENT> def accumulate_mbyte_file_sizes(self): <NEW_LINE> <INDENT> du_call = Command.run( [ 'du', '-s', '--apparent-size', '--block-size', '1', self.source_dir ] ) <NEW_LINE> return int(du_call.output.split('\t')[0]) / 1048576 <NEW_LINE> <DEDENT> def accumulate_files(self): <NEW_LINE> <INDENT> bash_comand = [ 'find', self.source_dir, '|', 'wc', '-l' ] <NEW_LINE> wc_call = Command.run( [ 'bash', '-c', ' '.join(bash_comand) ] ) <NEW_LINE> return int(wc_call.output.rstrip('\n'))
Provide source tree size information Attributes * :attr:`source_dir` source directory path name
62598fb355399d3f056265b1
class RPCManager(object): <NEW_LINE> <INDENT> def __init__(self, freqtrade) -> None: <NEW_LINE> <INDENT> self.registered_modules: List[RPC] = [] <NEW_LINE> if freqtrade.config['telegram'].get('enabled', False): <NEW_LINE> <INDENT> logger.info('Enabling rpc.telegram ...') <NEW_LINE> from freqtrade.rpc.telegram import Telegram <NEW_LINE> self.registered_modules.append(Telegram(freqtrade)) <NEW_LINE> <DEDENT> if freqtrade.config.get('webhook', {}).get('enabled', False): <NEW_LINE> <INDENT> logger.info('Enabling rpc.webhook ...') <NEW_LINE> from freqtrade.rpc.webhook import Webhook <NEW_LINE> self.registered_modules.append(Webhook(freqtrade)) <NEW_LINE> <DEDENT> if freqtrade.config.get('api_server', {}).get('enabled', False): <NEW_LINE> <INDENT> logger.info('Enabling rpc.api_server') <NEW_LINE> from freqtrade.rpc.api_server import ApiServer <NEW_LINE> self.registered_modules.append(ApiServer(freqtrade)) <NEW_LINE> <DEDENT> <DEDENT> def cleanup(self) -> None: <NEW_LINE> <INDENT> logger.info('Cleaning up rpc modules ...') <NEW_LINE> while self.registered_modules: <NEW_LINE> <INDENT> mod = self.registered_modules.pop() <NEW_LINE> logger.debug('Cleaning up rpc.%s ...', mod.name) <NEW_LINE> mod.cleanup() <NEW_LINE> del mod <NEW_LINE> <DEDENT> <DEDENT> def send_msg(self, msg: Dict[str, Any]) -> None: <NEW_LINE> <INDENT> logger.info('Sending rpc message: %s', msg) <NEW_LINE> for mod in self.registered_modules: <NEW_LINE> <INDENT> logger.debug('Forwarding message to rpc.%s', mod.name) <NEW_LINE> try: <NEW_LINE> <INDENT> mod.send_msg(msg) <NEW_LINE> <DEDENT> except NotImplementedError: <NEW_LINE> <INDENT> logger.error(f"Message type {msg['type']} not implemented by handler {mod.name}.") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def startup_messages(self, config, pairlist) -> None: <NEW_LINE> <INDENT> if config.get('dry_run', False): <NEW_LINE> <INDENT> self.send_msg({ 'type': RPCMessageType.WARNING_NOTIFICATION, 'status': 'Dry run is enabled. All trades are simulated.' }) <NEW_LINE> <DEDENT> stake_currency = config['stake_currency'] <NEW_LINE> stake_amount = config['stake_amount'] <NEW_LINE> minimal_roi = config['minimal_roi'] <NEW_LINE> stoploss = config['stoploss'] <NEW_LINE> trailing_stop = config['trailing_stop'] <NEW_LINE> ticker_interval = config['ticker_interval'] <NEW_LINE> exchange_name = config['exchange']['name'] <NEW_LINE> strategy_name = config.get('strategy', '') <NEW_LINE> self.send_msg({ 'type': RPCMessageType.CUSTOM_NOTIFICATION, 'status': f'*Exchange:* `{exchange_name}`\n' f'*Stake per trade:* `{stake_amount} {stake_currency}`\n' f'*Minimum ROI:* `{minimal_roi}`\n' f'*{"Trailing " if trailing_stop else ""}Stoploss:* `{stoploss}`\n' f'*Ticker Interval:* `{ticker_interval}`\n' f'*Strategy:* `{strategy_name}`' }) <NEW_LINE> self.send_msg({ 'type': RPCMessageType.STATUS_NOTIFICATION, 'status': f'Searching for {stake_currency} pairs to buy and sell ' f'based on {pairlist.short_desc()}' })
Class to manage RPC objects (Telegram, Slack, ...)
62598fb3097d151d1a2c10c5
class Or(object): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> self.fields = args <NEW_LINE> <DEDENT> def inside(self, entries): <NEW_LINE> <INDENT> content = map(entries.get, self.fields) <NEW_LINE> assert any(content), "Or({}) not found in {}".format(self.fields, entries) <NEW_LINE> return filter(None, content)[0]
>>> _or = Or('foo', 'bar') >>> _or.inside({'foo': 'foo', 'baz': 'baz'}) 'foo' >>> _or.inside({'foo': 'foo', 'bar': 'baz'}) 'foo' >>> _or.inside({'buz': 'buz', 'baz': 'baz'}) Traceback (most recent call last): ... AssertionError: Or(('foo', 'bar')) not found in {'buz': 'buz', 'baz': 'baz'}
62598fb3a17c0f6771d5c2cd
class add_result(object): <NEW_LINE> <INDENT> thrift_spec = ( None, (1, TType.STRUCT, 'ire', (InvalidRequestException, InvalidRequestException.thrift_spec), None, ), (2, TType.STRUCT, 'ue', (UnavailableException, UnavailableException.thrift_spec), None, ), (3, TType.STRUCT, 'te', (TimedOutException, TimedOutException.thrift_spec), None, ), ) <NEW_LINE> def __init__(self, ire=None, ue=None, te=None,): <NEW_LINE> <INDENT> self.ire = ire <NEW_LINE> self.ue = ue <NEW_LINE> self.te = te <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.ire = InvalidRequestException() <NEW_LINE> self.ire.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 2: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.ue = UnavailableException() <NEW_LINE> self.ue.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> elif fid == 3: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.te = TimedOutException() <NEW_LINE> self.te.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('add_result') <NEW_LINE> if self.ire is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ire', TType.STRUCT, 1) <NEW_LINE> self.ire.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.ue is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('ue', TType.STRUCT, 2) <NEW_LINE> self.ue.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> if self.te is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('te', TType.STRUCT, 3) <NEW_LINE> self.te.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - ire - ue - te
62598fb3fff4ab517ebcd87d
class Stack(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.st = [] <NEW_LINE> self.top = -1 <NEW_LINE> <DEDENT> def push(self,data): <NEW_LINE> <INDENT> self.st.append(data) <NEW_LINE> self.top += 1 <NEW_LINE> <DEDENT> def pop(self): <NEW_LINE> <INDENT> self.top -= 1 <NEW_LINE> if self.top < 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.st.pop() <NEW_LINE> <DEDENT> def peek(self): <NEW_LINE> <INDENT> if self.top < 0: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.st[self.top]
An ADT for stact
62598fb32ae34c7f260ab17a
class TagModelTests(TestCase): <NEW_LINE> <INDENT> def tearDown(self): <NEW_LINE> <INDENT> Tag.objects.all().delete() <NEW_LINE> <DEDENT> def test_tag(self): <NEW_LINE> <INDENT> tag = Tag(text='foo_tag') <NEW_LINE> tag.save() <NEW_LINE> self.assertEqual(str(tag), 'foo_tag') <NEW_LINE> <DEDENT> def test_tag_no_text(self): <NEW_LINE> <INDENT> tag1 = Tag() <NEW_LINE> with self.assertRaises(ValidationError): <NEW_LINE> <INDENT> tag1.full_clean() <NEW_LINE> <DEDENT> <DEDENT> def test_tag_dup(self): <NEW_LINE> <INDENT> tag1 = Tag(text='bar_tag') <NEW_LINE> tag1.save() <NEW_LINE> tag2 = Tag(text='bar_tag') <NEW_LINE> with self.assertRaises(ValidationError): <NEW_LINE> <INDENT> tag2.full_clean() <NEW_LINE> <DEDENT> <DEDENT> def test_tag_case(self): <NEW_LINE> <INDENT> tag1 = Tag(text='foo') <NEW_LINE> tag1.save() <NEW_LINE> tag2 = Tag(text='FOO') <NEW_LINE> with self.assertRaises(ValidationError): <NEW_LINE> <INDENT> tag2.full_clean()
Tag Model test cases.
62598fb366673b3332c30464