code
stringlengths
4
4.48k
docstring
stringlengths
1
6.45k
_id
stringlengths
24
24
class Account(object): <NEW_LINE> <INDENT> def __init__(self,intake,whitdrawal,rent = 0.05, amount = 100): <NEW_LINE> <INDENT> self.intake = intake <NEW_LINE> self.rent = rent <NEW_LINE> self.amount = amount <NEW_LINE> self.whitdrawal = whitdrawal <NEW_LINE> <DEDENT> def amount_increase(self): <NEW_LINE> <INDENT> self.amount = self.amount + self.amount*rent + intake <NEW_LINE> return self.amount <NEW_LINE> <DEDENT> def amount_decrease(self): <NEW_LINE> <INDENT> self.amount = self.amount + self.amount * self.rent - self.whitdrawal <NEW_LINE> return self.amount
Put in and take out mney from bank. The amount in the bank will increase, thanks to the rent The default beginning amount is 100
62598faf7047854f4633f3e5
class LoginRequiredMiddleware: <NEW_LINE> <INDENT> def process_view(self, request, vfunc, vargs, vkwargs): <NEW_LINE> <INDENT> if request.user.is_authenticated(): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> path = request.path_info.lstrip('/') <NEW_LINE> if not any(m.match(path) for m in EXEMPT_URLS): <NEW_LINE> <INDENT> return login_required(function=vfunc, login_url=settings.LOGIN_URL, redirect_field_name=None)(request, *vargs, **vkwargs) <NEW_LINE> <DEDENT> return None
Middleware that requires a user to be authenticated to view any page other than LOGIN_URL. Exemptions to this requirement can optionally be specified in settings via a list of regular expressions in LOGIN_EXEMPT_URLS (which you can copy from your urls.py). Requires authentication middleware and template context processors to be loaded. You'll get an error if they aren't. The Login Required middleware requires authentication middleware to be installed. Edit your MIDDLEWARE_CLASSES setting to insert 'django.contrib.auth.middleware.AuthenticationMiddleware'. If that doesn't work, ensure your TEMPLATE_CONTEXT_PROCESSORS setting includes 'django.contrib.auth.context_processors.auth'.
62598faf2c8b7c6e89bd37d0
class CGCType1RopExploit(CGCType1Exploit): <NEW_LINE> <INDENT> def __init__(self, crash, register, reg_bitmask, ip_bitmask, ch_mem, value_var, ip_var): <NEW_LINE> <INDENT> super(CGCType1RopExploit, self).__init__(crash, register, bypasses_nx=True, bypasses_aslr=True, reg_bitmask=reg_bitmask, ip_bitmask=ip_bitmask) <NEW_LINE> self.method_name = 'rop' <NEW_LINE> self._mem = ch_mem <NEW_LINE> self._arg_vars = [value_var, ip_var] <NEW_LINE> self._generate_formula() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<%s> rop type1" % self.register
A CGC type1 exploit object, which sets a register via Rop.
62598faf32920d7e50bc605f
class PolarionTestcases(object): <NEW_LINE> <INDENT> def __init__(self, repo_dir): <NEW_LINE> <INDENT> self.repo_dir = os.path.expanduser(repo_dir) <NEW_LINE> self.wi_cache = WorkItemCache(self.repo_dir) <NEW_LINE> self.available_testcases = {} <NEW_LINE> <DEDENT> def load_active_testcases(self): <NEW_LINE> <INDENT> cases = {} <NEW_LINE> for item in os.walk(self.wi_cache.test_case_dir): <NEW_LINE> <INDENT> if 'workitem.xml' not in item[2]: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> case_id = os.path.split(item[0])[-1] <NEW_LINE> if not (case_id and '*' not in case_id): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> item_cache = self.wi_cache[case_id] <NEW_LINE> if not item_cache: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> case_status = item_cache.get('status') <NEW_LINE> if not case_status or case_status == 'inactive': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> case_title = item_cache.get('title') <NEW_LINE> if not case_title: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> cases[case_title] = case_id <NEW_LINE> <DEDENT> self.available_testcases = cases <NEW_LINE> <DEDENT> def get_by_name(self, testcase_name): <NEW_LINE> <INDENT> testcase_id = self.available_testcases[testcase_name] <NEW_LINE> return self.wi_cache[testcase_id] <NEW_LINE> <DEDENT> def get_by_id(self, testcase_id): <NEW_LINE> <INDENT> return self.wi_cache[testcase_id] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.available_testcases) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.available_testcases) <NEW_LINE> <DEDENT> def __contains__(self, item): <NEW_LINE> <INDENT> return item in self.available_testcases <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Testcases {}>'.format(self.available_testcases)
Loads and access Polarion testcases.
62598faf26068e7796d4c960
class PartitionType_Enum (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = pyxb.namespace.ExpandedName(Namespace, 'PartitionType.Enum') <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/afcastel/hmc_rest_api/code/pmc.schema.pcm-8.8.5.0/schema/common/inc/CommonEnumerations.xsd', 851, 4) <NEW_LINE> _Documentation = None
An atomic simple type.
62598faf627d3e7fe0e06eb8
class Solution: <NEW_LINE> <INDENT> def majorityNumber(self, nums): <NEW_LINE> <INDENT> nums_sort = sorted(nums) <NEW_LINE> return nums_sort[int(len(nums)/2)]
@param: nums: a list of integers @return: find a majority number
62598faf76e4537e8c3ef5b8
class BackupResourceConfig(Model): <NEW_LINE> <INDENT> _attribute_map = { 'storage_type': {'key': 'storageType', 'type': 'str'}, 'storage_type_state': {'key': 'storageTypeState', 'type': 'str'}, } <NEW_LINE> def __init__(self, storage_type=None, storage_type_state=None): <NEW_LINE> <INDENT> self.storage_type = storage_type <NEW_LINE> self.storage_type_state = storage_type_state
The resource storage details. :param storage_type: Storage type. Possible values include: 'Invalid', 'GeoRedundant', 'LocallyRedundant' :type storage_type: str or :class:`StorageType <azure.mgmt.recoveryservicesbackup.models.StorageType>` :param storage_type_state: Locked or Unlocked. Once a machine is registered against a resource, the storageTypeState is always Locked. Possible values include: 'Invalid', 'Locked', 'Unlocked' :type storage_type_state: str or :class:`StorageTypeState <azure.mgmt.recoveryservicesbackup.models.StorageTypeState>`
62598faf99cbb53fe6830ee3
class RandomMask(TokenResource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> parser = reqparse.RequestParser() <NEW_LINE> parser.add_argument('size', type=int, help='可不填, 默认返回一个头像id') <NEW_LINE> args = parser.parse_args() <NEW_LINE> mask_size = args['size'] if args['size'] else 1 <NEW_LINE> sample = connection[MongoConfig.DB][CollectionName.MASKS].aggregate( [{"$sample": {"size": mask_size}}] ) <NEW_LINE> return { "status": "ok", "message": "成功生成随机头像列表", "data": { "masks": [str(i['_id']) for i in sample['result']] } }
随机取一个 mask_id 放在原列表第一位, 删掉原列表最末位
62598faf4e4d562566372431
class StopBaseClient(object): <NEW_LINE> <INDENT> def __init__(self, srv_name='stop_base'): <NEW_LINE> <INDENT> rospy.wait_for_service(srv_name) <NEW_LINE> self.proxy = rospy.ServiceProxy(srv_name, StopBase) <NEW_LINE> <DEDENT> def stop_base(self, status, requester): <NEW_LINE> <INDENT> return self.proxy(make_request(status, requester))
Proxy class for making stop_base calls.
62598fafe76e3b2f99fd8a42
class GetNormalsNode(bpy.types.Node, SverchCustomTreeNode): <NEW_LINE> <INDENT> bl_idname = 'GetNormalsNode' <NEW_LINE> bl_label = 'Calc Normals' <NEW_LINE> bl_icon = 'OUTLINER_OB_EMPTY' <NEW_LINE> def sv_init(self, context): <NEW_LINE> <INDENT> self.inputs.new('VerticesSocket', "Vertices") <NEW_LINE> self.inputs.new('StringsSocket', "Edges") <NEW_LINE> self.inputs.new('StringsSocket', "Polygons") <NEW_LINE> self.outputs.new('VerticesSocket', "FaceNormals") <NEW_LINE> self.outputs.new('VerticesSocket', "VertexNormals") <NEW_LINE> <DEDENT> def process(self): <NEW_LINE> <INDENT> if not (self.outputs['VertexNormals'].is_linked or self.outputs['FaceNormals'].is_linked): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> vertices_s = self.inputs['Vertices'].sv_get(default=[[]]) <NEW_LINE> edges_s = self.inputs['Edges'].sv_get(default=[[]]) <NEW_LINE> faces_s = self.inputs['Polygons'].sv_get(default=[[]]) <NEW_LINE> result_vertex_normals = [] <NEW_LINE> result_face_normals = [] <NEW_LINE> meshes = match_long_repeat([vertices_s, edges_s, faces_s]) <NEW_LINE> for vertices, edges, faces in zip(*meshes): <NEW_LINE> <INDENT> vertex_normals, face_normals = calc_mesh_normals(vertices, edges, faces) <NEW_LINE> result_vertex_normals.append(vertex_normals) <NEW_LINE> result_face_normals.append(face_normals) <NEW_LINE> <DEDENT> if self.outputs['FaceNormals'].is_linked: <NEW_LINE> <INDENT> self.outputs['FaceNormals'].sv_set(result_face_normals) <NEW_LINE> <DEDENT> if self.outputs['VertexNormals'].is_linked: <NEW_LINE> <INDENT> self.outputs['VertexNormals'].sv_set(result_vertex_normals)
Calculate normals of faces and vertices
62598faf7b25080760ed74bb
class PluginMeta(type): <NEW_LINE> <INDENT> def __init__(cls, name, bases, attrs): <NEW_LINE> <INDENT> if not hasattr(cls, 'plugins'): <NEW_LINE> <INDENT> cls.plugins = [] <NEW_LINE> cls.aliases = {} <NEW_LINE> cls.source = {} <NEW_LINE> cls.source[cls.__name__] = inspect.getsource(cls) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cls.plugins.append(cls) <NEW_LINE> cls.source[cls.__name__] = inspect.getsource(cls) <NEW_LINE> if hasattr(cls, 'ALIASES'): <NEW_LINE> <INDENT> for alias in cls.ALIASES: <NEW_LINE> <INDENT> if alias in cls.aliases: <NEW_LINE> <INDENT> raise Exception("duplicate alias %s found in %s, already present in %s" % (alias, cls.__name__, cls.aliases[alias].__name__)) <NEW_LINE> <DEDENT> cls.aliases[alias] = cls
Based on http://martyalchin.com/2008/jan/10/simple-plugin-framework/
62598faf2ae34c7f260ab0ed
class RadioSettingGroup(object): <NEW_LINE> <INDENT> def _validate(self, element): <NEW_LINE> <INDENT> if not isinstance(element, RadioSettingGroup): <NEW_LINE> <INDENT> raise InternalError("Incorrect type %s" % type(element)) <NEW_LINE> <DEDENT> <DEDENT> def __init__(self, name, shortname, *elements): <NEW_LINE> <INDENT> self._name = name <NEW_LINE> self._shortname = shortname <NEW_LINE> self.__doc__ = name <NEW_LINE> self._elements = {} <NEW_LINE> self._element_order = [] <NEW_LINE> for element in elements: <NEW_LINE> <INDENT> self._validate(element) <NEW_LINE> self.append(element) <NEW_LINE> <DEDENT> <DEDENT> def get_name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> def get_shortname(self): <NEW_LINE> <INDENT> return self._shortname <NEW_LINE> <DEDENT> def set_doc(self, doc): <NEW_LINE> <INDENT> self.__doc__ = doc <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> string = "group '%s': {\n" % self._name <NEW_LINE> for element in sorted(self._elements.values()): <NEW_LINE> <INDENT> string += "\t" + str(element) + "\n" <NEW_LINE> <DEDENT> string += "}" <NEW_LINE> return string <NEW_LINE> <DEDENT> def append(self, element): <NEW_LINE> <INDENT> self[element.get_name()] = element <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> class RSGIterator: <NEW_LINE> <INDENT> def __init__(self, rsg): <NEW_LINE> <INDENT> self.__rsg = rsg <NEW_LINE> self.__i = 0 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> def next(self): <NEW_LINE> <INDENT> if self.__i >= len(self.__rsg.keys()): <NEW_LINE> <INDENT> raise StopIteration() <NEW_LINE> <DEDENT> e = self.__rsg[self.__rsg.keys()[self.__i]] <NEW_LINE> self.__i += 1 <NEW_LINE> return e <NEW_LINE> <DEDENT> <DEDENT> return RSGIterator(self) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._elements) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self._elements[name] <NEW_LINE> <DEDENT> def __setitem__(self, name, value): <NEW_LINE> <INDENT> if name in self._element_order: <NEW_LINE> <INDENT> raise KeyError("Duplicate item %s" % name) <NEW_LINE> <DEDENT> self._elements[name] = value <NEW_LINE> self._element_order.append(name) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return [(name, self._elements[name]) for name in self._element_order] <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._element_order <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return [self._elements[name] for name in self._element_order]
A group of settings
62598faf3346ee7daa33764d
class DocumentList(list): <NEW_LINE> <INDENT> def __init__(self, package_name, has_more, offset): <NEW_LINE> <INDENT> list.__init__(self) <NEW_LINE> self._package_name = package_name <NEW_LINE> self._has_more = has_more <NEW_LINE> self._offset = offset <NEW_LINE> <DEDENT> def package_name(self): <NEW_LINE> <INDENT> return self._package_name <NEW_LINE> <DEDENT> def offset(self): <NEW_LINE> <INDENT> return self._offset <NEW_LINE> <DEDENT> def has_more(self): <NEW_LINE> <INDENT> return self._has_more
DocumentList is a list object providing extra methods for obtaining extra document list statuses, such as the number of elements found, the current elements list offset, and if there are more elements on the remote service.
62598fafd486a94d0ba2bfda
class FileProxySource(BaseProxySource): <NEW_LINE> <INDENT> def __init__(self, path, **kwargs): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> super(FileProxySource, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def load_raw_data(self): <NEW_LINE> <INDENT> with open(self.path) as inp: <NEW_LINE> <INDENT> return inp.read()
Proxy source that loads list from the file
62598faf99cbb53fe6830ee4
class Result(PrintableResultMixin): <NEW_LINE> <INDENT> def __init__(self, group, provider, checker, code, messages): <NEW_LINE> <INDENT> self.group = group <NEW_LINE> self.provider = provider <NEW_LINE> self.checker = checker <NEW_LINE> self.code = code <NEW_LINE> self.messages = messages
Placeholder for analysis results.
62598fafbe8e80087fbbf070
class Email(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Rc = RC.ReadConfig() <NEW_LINE> self.server = self.Rc.getMail('Smtp_Server') <NEW_LINE> self.sender = self.Rc.getMail('Smtp_Sender') <NEW_LINE> self.password = self.Rc.getMail('Password') <NEW_LINE> self.LReceiver = self.Rc.getMail('OnLine_Receiver') <NEW_LINE> self.TReceiver = self.Rc.getMail('Test_Receiver') <NEW_LINE> self.Msg_Title = self.Rc.getMail('Msg_Title') <NEW_LINE> self.Content_Type = self.Rc.getMail('Content_Type') <NEW_LINE> self.Content_Disposition = self.Rc.getMail('Content_Disposition') <NEW_LINE> self.resultPath = self.Rc.getMail('resultPath') <NEW_LINE> self.log = MyLog.getLog('SendEmail') <NEW_LINE> self.logger = self.log.logger <NEW_LINE> self.msg = MIMEMultipart() <NEW_LINE> <DEDENT> def get_Result(self, reportFile): <NEW_LINE> <INDENT> self.driver = webdriver.Chrome() <NEW_LINE> self.driver.maximize_window() <NEW_LINE> self.result_url = "file://%s" % reportFile <NEW_LINE> self.driver.get(self.result_url) <NEW_LINE> time.sleep(3) <NEW_LINE> resultPath = self.resultPath <NEW_LINE> self.result = self.driver.find_element_by_xpath(resultPath).text <NEW_LINE> self.result = self.result.split(':') <NEW_LINE> self.driver.quit() <NEW_LINE> self.setHeader(self.result[-1]) <NEW_LINE> <DEDENT> def setHeader(self, result): <NEW_LINE> <INDENT> now = time.strftime("%Y-%m-%d-%H_%M_%S") <NEW_LINE> self.msg['subject'] = Header('[执行结果:' + result + ']' + self.Msg_Title + now, 'utf-8') <NEW_LINE> <DEDENT> def setContent(self, reportFile): <NEW_LINE> <INDENT> f = open(reportFile, 'rb') <NEW_LINE> self.mail_body = f.read() <NEW_LINE> f.close() <NEW_LINE> self.contentText = MIMEText(self.mail_body, 'html', 'UTF-8') <NEW_LINE> self.msg.attach(self.contentText) <NEW_LINE> self.setAccessory(self.mail_body) <NEW_LINE> <DEDENT> def setAccessory(self, mail_body): <NEW_LINE> <INDENT> self.accessory = MIMEText(mail_body, 'html', 'utf-8') <NEW_LINE> self.accessory['Content-Type'] = self.Content_Type <NEW_LINE> self.accessory["Content-Disposition"] = self.Content_Disposition <NEW_LINE> self.msg.attach(self.accessory) <NEW_LINE> <DEDENT> def sendEMail(self, reportFile): <NEW_LINE> <INDENT> self.get_Result(reportFile) <NEW_LINE> self.setContent(reportFile) <NEW_LINE> try: <NEW_LINE> <INDENT> self.smtp = smtplib.SMTP(self.server, 25) <NEW_LINE> self.smtp.login(self.sender, self.password) <NEW_LINE> self.receiver.append(self.PReceiver) <NEW_LINE> print(self.receiver) <NEW_LINE> self.msg['From'] = self.sender <NEW_LINE> self.msg['To'] = ",".join(self.receiver) <NEW_LINE> self.smtp.sendmail(self.sender, self.receiver, self.msg.as_string()) <NEW_LINE> self.smtp.quit() <NEW_LINE> self.logger.info("The test report has send to developer by email.") <NEW_LINE> return True <NEW_LINE> <DEDENT> except smtplib.SMTPException as e: <NEW_LINE> <INDENT> self.logger.error(str(e)) <NEW_LINE> print(str(e)) <NEW_LINE> return False
创建一个邮件类
62598faf4a966d76dd5eeee3
class DistortedColormap2(object): <NEW_LINE> <INDENT> n = 100 <NEW_LINE> def __init__(self, name='jet', vmin=0.0, vmax=1.0, xmid1=0.25, ymid1=0.25, xmid2=0.75, ymid2=0.75): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.original = cm.get_cmap(name) <NEW_LINE> self.set_lim(vmin, vmax) <NEW_LINE> self.set_mid(xmid1, ymid1, xmid2, ymid2) <NEW_LINE> <DEDENT> def set_lim(self, vmin, vmax): <NEW_LINE> <INDENT> self.vmin = vmin <NEW_LINE> self.vmax = vmax <NEW_LINE> <DEDENT> def set_mid(self, xmid1, ymid1, xmid2, ymid2): <NEW_LINE> <INDENT> self.xmid1 = xmid1 <NEW_LINE> self.ymid1 = ymid1 <NEW_LINE> self.xmid2 = xmid2 <NEW_LINE> self.ymid2 = ymid2 <NEW_LINE> <DEDENT> @property <NEW_LINE> def space(self): <NEW_LINE> <INDENT> space1 = np.linspace(self.vmin, self.ymid1, self.n*self.xmid1, endpoint=False) <NEW_LINE> space2 = np.linspace(self.ymid1, self.ymid2, self.n - self.n*(self.xmid2-self.xmid1), endpoint=False) <NEW_LINE> space3 = np.linspace(self.ymid2, self.vmax, self.n - self.n*(self.vmax-self.xmid2)) <NEW_LINE> return np.concatenate((space1, space2, space3)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def distorted(self): <NEW_LINE> <INDENT> return LinearSegmentedColormap.from_list( repr(self), self.original(self.space)) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ('{}(name={s.name!r}, vmin={s.vmin}, vmax={s.vmax}, ' 'xmid1={s.xmid1}, ymid1={s.ymid1})' 'xmid2={s.xmid2}, ymid2={s.ymid2})' ).format(type(self).__name__, s=self)
dcm = DistortedColormap('jet', xmid1=0.5, ymid1=0.5, xmid2=0.5, ymid2=0.5) dcm.distorted
62598faf7d847024c075c3cf
class CDPlayer: <NEW_LINE> <INDENT> def __init__(self, cdda, audio_output, next_track_callback=lambda: None): <NEW_LINE> <INDENT> self.command_queue = Queue.Queue() <NEW_LINE> self.worker = CDPlayerThread(cdda, audio_output, self.command_queue) <NEW_LINE> self.thread = threading.Thread(target=self.worker.run, args=(next_track_callback,)) <NEW_LINE> self.thread.daemon = True <NEW_LINE> self.thread.start() <NEW_LINE> <DEDENT> def open(self, track_number): <NEW_LINE> <INDENT> self.command_queue.put(("open", [track_number])) <NEW_LINE> <DEDENT> def play(self): <NEW_LINE> <INDENT> self.command_queue.put(("play", [])) <NEW_LINE> <DEDENT> def pause(self): <NEW_LINE> <INDENT> self.command_queue.put(("pause", [])) <NEW_LINE> <DEDENT> def toggle_play_pause(self): <NEW_LINE> <INDENT> self.command_queue.put(("toggle_play_pause", [])) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> self.command_queue.put(("stop", [])) <NEW_LINE> <DEDENT> def close(self): <NEW_LINE> <INDENT> self.command_queue.put(("exit", [])) <NEW_LINE> <DEDENT> def progress(self): <NEW_LINE> <INDENT> return (self.worker.frames_played, self.worker.total_frames)
A class for operating a CDDA player. The player itself runs in a seperate thread, which this sends commands to.
62598faf851cf427c66b82c8
class BaseError(Exception): <NEW_LINE> <INDENT> def __init__(self, code=400, message='', status='', field=None): <NEW_LINE> <INDENT> Exception.__init__(self) <NEW_LINE> self.code = code <NEW_LINE> self.message = message <NEW_LINE> self.status = status <NEW_LINE> self.field = field <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> return {'code': self.code, 'message': self.message, 'status': self.status, 'field': self.field, } <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.message
Base Error Class
62598faf71ff763f4b5e777d
class AbstractView(FloatLayout): <NEW_LINE> <INDENT> adapter = ObjectProperty(None)
View using an :class:`~kivy.adapters.adapter.Adapter` as a data provider.
62598fafa8370b77170f03e7
class WithSeededRandomPipelineEngine(WithTradingSessions, WithAssetFinder): <NEW_LINE> <INDENT> SEEDED_RANDOM_PIPELINE_SEED = 42 <NEW_LINE> @classmethod <NEW_LINE> def init_class_fixtures(cls): <NEW_LINE> <INDENT> super(WithSeededRandomPipelineEngine, cls).init_class_fixtures() <NEW_LINE> cls._sids = cls.asset_finder.sids <NEW_LINE> cls.seeded_random_loader = loader = make_seeded_random_loader( cls.SEEDED_RANDOM_PIPELINE_SEED, cls.trading_days, cls._sids, ) <NEW_LINE> cls.seeded_random_engine = SimplePipelineEngine( get_loader=lambda column: loader, calendar=cls.trading_days, asset_finder=cls.asset_finder, ) <NEW_LINE> <DEDENT> def raw_expected_values(self, column, start_date, end_date): <NEW_LINE> <INDENT> all_values = self.seeded_random_loader.values( column.dtype, self.trading_days, self._sids, ) <NEW_LINE> row_slice = self.trading_days.slice_indexer(start_date, end_date) <NEW_LINE> return all_values[row_slice] <NEW_LINE> <DEDENT> def run_pipeline(self, pipeline, start_date, end_date): <NEW_LINE> <INDENT> if start_date not in self.trading_days: <NEW_LINE> <INDENT> raise AssertionError("Start date not in calendar: %s" % start_date) <NEW_LINE> <DEDENT> if end_date not in self.trading_days: <NEW_LINE> <INDENT> raise AssertionError("Start date not in calendar: %s" % start_date) <NEW_LINE> <DEDENT> return self.seeded_random_engine.run_pipeline( pipeline, start_date, end_date, )
ZiplineTestCase mixin providing class-level fixtures for running pipelines against deterministically-generated random data. Attributes ---------- SEEDED_RANDOM_PIPELINE_SEED : int Fixture input. Random seed used to initialize the random state loader. seeded_random_loader : SeededRandomLoader Fixture output. Loader capable of providing columns for zipline.pipeline.data.testing.TestingDataSet. seeded_random_engine : SimplePipelineEngine Fixture output. A pipeline engine that will use seeded_random_loader as its only data provider. Methods ------- run_pipeline(start_date, end_date) Run a pipeline with self.seeded_random_engine. See Also -------- zipline.pipeline.loaders.synthetic.SeededRandomLoader zipline.pipeline.loaders.testing.make_seeded_random_loader zipline.pipeline.engine.SimplePipelineEngine
62598faffff4ab517ebcd7f1
class SendUSMSMessageRequestSchema(schema.RequestSchema): <NEW_LINE> <INDENT> fields = { "ExtendCode": fields.Str(required=False, dump_to="ExtendCode"), "PhoneNumbers": fields.List(fields.Str()), "ProjectId": fields.Str(required=True, dump_to="ProjectId"), "Region": fields.Str( required=False, dump_to="Region" ), "SigContent": fields.Str(required=True, dump_to="SigContent"), "TemplateId": fields.Str(required=True, dump_to="TemplateId"), "TemplateParams": fields.List(fields.Str()), "UserId": fields.Str(required=False, dump_to="UserId"), "Zone": fields.Str( required=False, dump_to="Zone" ), }
SendUSMSMessage - 调用接口SendUSMSMessage发送短信
62598fafff9c53063f51a659
class TestGatherWater(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def test_that_functions_exist(self): <NEW_LINE> <INDENT> empty_rain_terrace = { 'total_water': 0, 'left_idx': 0, 'right_idx': 0, 'left_wall': 0, 'right_wall': 0, 'bottom_vals': [], } <NEW_LINE> self.assertIsNotNone(gather_water(empty_rain_terrace))
Tests Gather Water function from rain_terrace.py
62598fafbe383301e0253805
class accept2: <NEW_LINE> <INDENT> def __init__(self, mimetype): <NEW_LINE> <INDENT> self.mimetype = mimetype <NEW_LINE> <DEDENT> def __call__(self, next): <NEW_LINE> <INDENT> def inner(*args, **kwargs): <NEW_LINE> <INDENT> accepted = parse_accept_header() <NEW_LINE> def set_depth(mimetype): <NEW_LINE> <INDENT> if 'depth' not in accepted[mimetype]: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> nonlocal args <NEW_LINE> try: <NEW_LINE> <INDENT> depth = int(accepted[mimetype]['depth']) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> bottle.abort(400, "Invalid depth value") <NEW_LINE> <DEDENT> argspec = inspect.getfullargspec(next)[0] <NEW_LINE> if 'depth' in argspec and 'depth' not in kwargs: <NEW_LINE> <INDENT> index = argspec.index('depth') <NEW_LINE> if args and len(args) > index and args[index] is not None: <NEW_LINE> <INDENT> new_args = list(args) <NEW_LINE> new_args[index] = depth <NEW_LINE> args = tuple(new_args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> kwargs['depth'] = depth <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> kwargs['depth'] = depth <NEW_LINE> <DEDENT> <DEDENT> if self.mimetype in accepted: <NEW_LINE> <INDENT> set_depth(self.mimetype) <NEW_LINE> <DEDENT> elif '*/*' in accepted: <NEW_LINE> <INDENT> set_depth('*/*') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bottle.abort(406, 'Expected application/json') <NEW_LINE> <DEDENT> return next(*args, **kwargs) <NEW_LINE> <DEDENT> return inner
Decorator class to handle parsing the HTTP Accept header.
62598faf63d6d428bbee27b8
@skipUnless(getattr(settings, 'SELENIUM_TESTS', False), 'Selenium tests disabled. Set SELENIUM_TESTS = True in your settings.py to enable.') <NEW_LINE> class ProjectSeleniumTests(ProjectTestsMixin, SeleniumTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.projects = dict([(slugify(title), title) for title in [ u'Women first 2', u'Mobile payments for everyone 2!', u'Schools for children 2' ]]) <NEW_LINE> for slug, title in self.projects.items(): <NEW_LINE> <INDENT> project = self.create_project(title=title, slug=slug, money_asked=100000) <NEW_LINE> project.projectcampaign.money_donated = 0 <NEW_LINE> project.projectcampaign.save() <NEW_LINE> <DEDENT> <DEDENT> def visit_project_list_page(self, lang_code=None): <NEW_LINE> <INDENT> self.visit_path('/projects', lang_code) <NEW_LINE> self.assertTrue(self.browser.is_element_present_by_css('.item.item-project'), 'Cannot load the project list page.') <NEW_LINE> <DEDENT> def test_navigate_to_project_list_page(self): <NEW_LINE> <INDENT> self.visit_homepage() <NEW_LINE> self.browser.find_link_by_text('1%Projects').first.click() <NEW_LINE> self.assertTrue(self.browser.is_element_present_by_css('.item.item-project'), 'Cannot load the project list page.') <NEW_LINE> self.assertEqual(self.browser.url, '%s/en/#!/projects' % self.live_server_url) <NEW_LINE> self.assertEqual(self.browser.title, '1%Club - Share a little. Change the world') <NEW_LINE> <DEDENT> def test_view_project_list_page(self): <NEW_LINE> <INDENT> self.visit_project_list_page() <NEW_LINE> time.sleep(2) <NEW_LINE> def convert_money_to_int(money_text): <NEW_LINE> <INDENT> return int(money_text.strip(u'€ ').replace('.', '').replace(',', '')) <NEW_LINE> <DEDENT> web_projects = [] <NEW_LINE> for p in self.browser.find_by_css('.item.item-project'): <NEW_LINE> <INDENT> donated = convert_money_to_int(p.find_by_css('.donated').first.text) <NEW_LINE> web_projects.append({ 'title': p.find_by_css('h3').first.text, 'money_needed': donated, }) <NEW_LINE> <DEDENT> self.assertTrue(len(web_projects) > 0) <NEW_LINE> expected_projects = [] <NEW_LINE> for p in Project.objects.filter(phase=ProjectPhases.campaign).order_by('popularity')[:len(web_projects)]: <NEW_LINE> <INDENT> expected_projects.append({ 'title': p.title.upper(), 'money_needed': int(round(p.projectcampaign.money_needed / 100.0)), }) <NEW_LINE> <DEDENT> self.assertListEqual(web_projects, expected_projects)
Selenium tests for Projects.
62598faf4f6381625f1994c5
class TestRLCount(unittest.TestCase): <NEW_LINE> <INDENT> def test_tpc(self): <NEW_LINE> <INDENT> dir = os.path.dirname(__file__) <NEW_LINE> tpc = os.path.join(dir, '2016', 'M04_5m_001.tpc') <NEW_LINE> res = tu.ResData(tpc) <NEW_LINE> t = res.rlCount() <NEW_LINE> self.assertEqual(t, 3) <NEW_LINE> <DEDENT> def test_info(self): <NEW_LINE> <INDENT> dir = os.path.dirname(__file__) <NEW_LINE> info = os.path.join(dir, '2013', 'M04_5m_001_1d.info') <NEW_LINE> res = tu.ResData(info) <NEW_LINE> t = res.rlCount() <NEW_LINE> self.assertEqual(t, 0)
Test rlCount() function
62598faffff4ab517ebcd7f2
class ExampleTop0(Unit): <NEW_LINE> <INDENT> def _config(self): <NEW_LINE> <INDENT> self.DATA_WIDTH = Param(2) <NEW_LINE> <DEDENT> def _declr(self): <NEW_LINE> <INDENT> addClkRstn(self) <NEW_LINE> with self._paramsShared(): <NEW_LINE> <INDENT> self.a = Handshaked() <NEW_LINE> self.b = Handshaked()._m() <NEW_LINE> <DEDENT> r = self.ram_port = BramPort_withoutClk() <NEW_LINE> r.ADDR_WIDTH = 10 <NEW_LINE> r.DATA_WIDTH = 8 <NEW_LINE> <DEDENT> def _impl(self): <NEW_LINE> <INDENT> a, b = self.a, self.b <NEW_LINE> vld = self._reg("vld_delayed", def_val=0) <NEW_LINE> vld(a.vld) <NEW_LINE> b.vld(vld) <NEW_LINE> b.data(a.data + 1) <NEW_LINE> a.rd(b.rd & ~vld) <NEW_LINE> ram_port = self.ram_port <NEW_LINE> ram = self._sig("ram", Bits(8)[1024]) <NEW_LINE> If(self.clk._onRisingEdge(), If(ram_port.en, ram_port.dout(ram[ram_port.addr]) ), If(ram_port.en & ram_port.we, ram[ram_port.addr](ram_port.din) ), ) <NEW_LINE> def r(row, start, last): <NEW_LINE> <INDENT> a = [] <NEW_LINE> for x in range(start, last + 1): <NEW_LINE> <INDENT> a.append(row + ("%d" % x)) <NEW_LINE> <DEDENT> return a <NEW_LINE> <DEDENT> def p(intf, pinMap, ioStd=XdcIoStandard.LVCMOS18): <NEW_LINE> <INDENT> ConstrainIo(intf, pinMap, ioStd)
Lorem Ipsum componet to have something to compile
62598faf2c8b7c6e89bd37d2
class SubjectGuideView(SubjectBaseView): <NEW_LINE> <INDENT> featured_database = {} <NEW_LINE> def __init__(self,context,request): <NEW_LINE> <INDENT> super(SubjectGuideView, self).__init__(context,request) <NEW_LINE> self.databases = SubjectFactory._safe_pop(self.research_databases,0,3) + SubjectFactory._safe_pop(self.books,0,1) + SubjectFactory._safe_pop(self.primary_sources,0,1) <NEW_LINE> remaining_databases = SubjectFactory._safe_pop(self.research_databases,3,999) + SubjectFactory._safe_pop(self.books,1,999) + SubjectFactory._safe_pop(self.primary_sources,1,999) <NEW_LINE> if remaining_databases: <NEW_LINE> <INDENT> self.featured_database = remaining_databases[(DateTime().dayOfYear() % len(remaining_databases))] <NEW_LINE> <DEDENT> <DEDENT> def getNewNewsItems(self): <NEW_LINE> <INDENT> timespan = getToolByName(self.context, 'portal_properties').get('site_properties').getProperty('news_limit_days',90) <NEW_LINE> from_start = DateTime() - timespan <NEW_LINE> brains = getToolByName(self.context, 'portal_catalog').searchResults(portal_type='WeblogEntry', Subject=self.context.getNewsTopic(), sort_on='created', sort_order='descending', review_state='published', created={'query':(from_start,DateTime('2045-11-19 11:59:00')), 'range': 'min:max'} ) <NEW_LINE> return brains[:3] <NEW_LINE> <DEDENT> def isOnCampus(self): <NEW_LINE> <INDENT> ip_range = getToolByName(self.context, 'portal_properties').get('library_ip_ranges').getProperty('campus_ip_range', '0.0.0.0') <NEW_LINE> if 'HTTP_X_FORWARDED_FOR' in self.request.environ: <NEW_LINE> <INDENT> ip = self.request.environ['HTTP_X_FORWARDED_FOR'] <NEW_LINE> <DEDENT> elif 'HTTP_HOST' in self.request.environ: <NEW_LINE> <INDENT> ip = self.request.environ['REMOTE_ADDR'] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ip = "0.0.0.0" <NEW_LINE> <DEDENT> ip = ip.split(',')[0].strip() <NEW_LINE> return ip.startswith(ip_range)
This controller class extends and adds functionality for the main subject guide view. @author: David Hietpas @version: 1.1
62598faf442bda511e95c464
class cancelJob_result(object): <NEW_LINE> <INDENT> thrift_spec = ( (0, TType.BOOL, 'success', None, None, ), ) <NEW_LINE> def __init__(self, success=None,): <NEW_LINE> <INDENT> self.success = success <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, (self.__class__, self.thrift_spec)) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 0: <NEW_LINE> <INDENT> if ftype == TType.BOOL: <NEW_LINE> <INDENT> self.success = iprot.readBool() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, (self.__class__, self.thrift_spec))) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('cancelJob_result') <NEW_LINE> if self.success is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('success', TType.BOOL, 0) <NEW_LINE> oprot.writeBool(self.success) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other)
Attributes: - success
62598fafcc0a2c111447b01e
class ExportAction(BaseAction): <NEW_LINE> <INDENT> _cmd_ = 'export' <NEW_LINE> _help_ = 'export apk file(s)' <NEW_LINE> path = Argument('--path', '-p', default='/app', action='store', help='app source path') <NEW_LINE> def handler(self, path='/app'): <NEW_LINE> <INDENT> options = { 'path': path } <NEW_LINE> project = builds.from_path(path) <NEW_LINE> print(project.get_export_path())
Export action that inherits from BaseAction to show the apk(s) path to be exported from the container.
62598faf3d592f4c4edbaecd
class LeaveController(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.base_url = "http://qa.eipsev.com//ApplicationForPay/SubmitApplicationForPay" <NEW_LINE> self.cookie = get_cookie.get_cookies() <NEW_LINE> <DEDENT> def test_leave_all_true(self): <NEW_LINE> <INDENT> payload = {"WorkFlowCC":[], "ImagesList":[], "Data":{ "DeptID":"96f75a51-779b-491a-9773-cb5f90cef11e", "DeptName":"技术研发部", "PayDetailList":[ {"Amount":"2", "Type":"差旅费", "Statement":"报销" } ] } } <NEW_LINE> r = requests.post(self.base_url,json = payload,cookies = self.cookie) <NEW_LINE> self.result = r.json() <NEW_LINE> self.assertEqual(self.result['Success'],True) <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> print(self.result)
报销
62598faf76e4537e8c3ef5ba
class BlogPageTag(TaggedItemBase): <NEW_LINE> <INDENT> content_object = ParentalKey( 'BlogPage', related_name='tagged_items', on_delete=models.CASCADE )
Support for tagging posts.
62598faf91f36d47f2230ead
class OLDWorkflowField(with_metaclass(models.SubfieldBase, models.CharField)): <NEW_LINE> <INDENT> description = "Workflow field" <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> defaults = { 'max_length': 32, 'db_index': True, } <NEW_LINE> defaults.update(kwargs) <NEW_LINE> defaults.update({ 'null': True, 'blank': True, 'editable': False, }) <NEW_LINE> super(OLDWorkflowField, self).__init__(**defaults)
OLD DONT USE
62598faf16aa5153ce400510
class Stacked(Transducer): <NEW_LINE> <INDENT> def __init__(self, *layers): <NEW_LINE> <INDENT> self.layers = layers <NEW_LINE> <DEDENT> def start(self): <NEW_LINE> <INDENT> for layer in self.layers: <NEW_LINE> <INDENT> layer.start() <NEW_LINE> <DEDENT> <DEDENT> def start_from(self, other): <NEW_LINE> <INDENT> for layer, other_layer in zip(self.layers, other.layers): <NEW_LINE> <INDENT> layer.start_from(other_layer) <NEW_LINE> <DEDENT> <DEDENT> def step(self, inp): <NEW_LINE> <INDENT> val = inp <NEW_LINE> for layer in self.layers: <NEW_LINE> <INDENT> val = layer.step(val) <NEW_LINE> <DEDENT> return val
Several stacked recurrent networks, or, the composition of several FSTs.
62598faf5fcc89381b266153
class AppInfoAction(Action): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Action.__init__(self, "app_info", Action.TK_INSTANCE, "Shows a breakdown of your installed apps.", "Developer") <NEW_LINE> self.supports_api = True <NEW_LINE> <DEDENT> def run_noninteractive(self, log, parameters): <NEW_LINE> <INDENT> return self._run(log) <NEW_LINE> <DEDENT> def run_interactive(self, log, args): <NEW_LINE> <INDENT> if len(args) != 0: <NEW_LINE> <INDENT> raise TankError("This command takes no arguments!") <NEW_LINE> <DEDENT> return self._run(log) <NEW_LINE> <DEDENT> def _run(self, log): <NEW_LINE> <INDENT> log.info("This command lists details about Apps and Engines") <NEW_LINE> log.info("--------------------------------------------------") <NEW_LINE> log.info("") <NEW_LINE> log.info("Your current configuration is located here:") <NEW_LINE> log.info(self.tk.pipeline_configuration.get_path()) <NEW_LINE> log.info("") <NEW_LINE> log.info("This command will list all apps in all environments.") <NEW_LINE> log.info("The following environments exist:") <NEW_LINE> for env_name in self.tk.pipeline_configuration.get_environments(): <NEW_LINE> <INDENT> log.info(" - %s" % env_name) <NEW_LINE> <DEDENT> log.info("") <NEW_LINE> log.info("") <NEW_LINE> for env_name in self.tk.pipeline_configuration.get_environments(): <NEW_LINE> <INDENT> self._env_breakdown(log, env_name) <NEW_LINE> <DEDENT> log.info("") <NEW_LINE> log.info("") <NEW_LINE> log.info("") <NEW_LINE> log.info("- To install a new app, use the command tank install_app") <NEW_LINE> log.info("- To switch an app location, use the command tank switch") <NEW_LINE> log.info("") <NEW_LINE> <DEDENT> def _env_breakdown(self, log, env_name): <NEW_LINE> <INDENT> env = self.tk.pipeline_configuration.get_environment(env_name) <NEW_LINE> log.info("") <NEW_LINE> log.info("") <NEW_LINE> log.info("=" * 70) <NEW_LINE> log.info("Environment: %s" % env.name) <NEW_LINE> log.info("Location: %s" % env.disk_location) <NEW_LINE> log.info("Description: %s" % env.description) <NEW_LINE> log.info("=" * 70) <NEW_LINE> for eng in env.get_engines(): <NEW_LINE> <INDENT> log.info("") <NEW_LINE> log.info("-" * 70) <NEW_LINE> log.info("Engine %s" % eng) <NEW_LINE> log.info("-" * 70) <NEW_LINE> log.info("") <NEW_LINE> for app in env.get_apps(eng): <NEW_LINE> <INDENT> descriptor = env.get_app_descriptor(eng, app) <NEW_LINE> log.info("App %s" % app) <NEW_LINE> log.info("-" * (4+len(app))) <NEW_LINE> for (k,v) in descriptor.get_dict().items(): <NEW_LINE> <INDENT> log.info(" %s: %s" % (k.capitalize(), v) ) <NEW_LINE> <DEDENT> log.info(" Docs: %s" % descriptor.documentation_url) <NEW_LINE> log.info("") <NEW_LINE> <DEDENT> <DEDENT> log.info("") <NEW_LINE> log.info("")
Action that gives a breakdown of all engines and apps in an environment
62598fafcc0a2c111447b01f
class Enum(_messages.Message): <NEW_LINE> <INDENT> class SyntaxValueValuesEnum(_messages.Enum): <NEW_LINE> <INDENT> SYNTAX_PROTO2 = 0 <NEW_LINE> SYNTAX_PROTO3 = 1 <NEW_LINE> <DEDENT> enumvalue = _messages.MessageField('EnumValue', 1, repeated=True) <NEW_LINE> name = _messages.StringField(2) <NEW_LINE> options = _messages.MessageField('Option', 3, repeated=True) <NEW_LINE> sourceContext = _messages.MessageField('SourceContext', 4) <NEW_LINE> syntax = _messages.EnumField('SyntaxValueValuesEnum', 5)
Enum type definition. Enums: SyntaxValueValuesEnum: The source syntax. Fields: enumvalue: Enum value definitions. name: Enum type name. options: Protocol buffer options. sourceContext: The source context. syntax: The source syntax.
62598fafeab8aa0e5d30bd9a
class FlaskDefaults(Enum): <NEW_LINE> <INDENT> SECRET_KEY = Config.get_random_secret_key() <NEW_LINE> LOGGER_NAME = 'netify'
Default values for the Flask section of the config file.
62598faf2c8b7c6e89bd37d3
class UserGrantedNodeAssetsApi(UserPermissionCacheMixin, AssetsFilterMixin, ListAPIView): <NEW_LINE> <INDENT> permission_classes = (IsOrgAdminOrAppUser,) <NEW_LINE> serializer_class = AssetGrantedSerializer <NEW_LINE> pagination_class = LimitOffsetPagination <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> user_id = self.kwargs.get('pk', '') <NEW_LINE> if user_id: <NEW_LINE> <INDENT> user = get_object_or_404(User, id=user_id) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> user = self.request.user <NEW_LINE> <DEDENT> return user <NEW_LINE> <DEDENT> def get_queryset(self): <NEW_LINE> <INDENT> user = self.get_object() <NEW_LINE> node_id = self.kwargs.get('node_id') <NEW_LINE> util = AssetPermissionUtil(user, cache_policy=self.cache_policy) <NEW_LINE> node = get_object_or_404(Node, id=node_id) <NEW_LINE> nodes = util.get_nodes_with_assets() <NEW_LINE> assets = nodes.get(node, []) <NEW_LINE> for asset, system_users in assets.items(): <NEW_LINE> <INDENT> asset.system_users_granted = system_users <NEW_LINE> <DEDENT> assets = list(assets.keys()) <NEW_LINE> return assets <NEW_LINE> <DEDENT> def get_permissions(self): <NEW_LINE> <INDENT> if self.kwargs.get('pk') is None: <NEW_LINE> <INDENT> self.permission_classes = (IsValidUser,) <NEW_LINE> <DEDENT> return super().get_permissions()
查询用户授权的节点下的资产的api, 与上面api不同的是,只返回某个节点下的资产
62598fafaad79263cf42e7e1
class CentralBank: <NEW_LINE> <INDENT> def __init__(self, variables, parameters): <NEW_LINE> <INDENT> self.var = variables <NEW_LINE> self.par = parameters <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return 'central_bank' <NEW_LINE> <DEDENT> def sell(self, amount, price, t): <NEW_LINE> <INDENT> if self.var.assets[t] < amount: <NEW_LINE> <INDENT> raise ValueError("not enough stocks to sell this amount") <NEW_LINE> <DEDENT> self.var.assets[t] -= amount <NEW_LINE> self.var.currency[t] += price <NEW_LINE> <DEDENT> def buy(self, amount, price, t): <NEW_LINE> <INDENT> self.var.assets[t] += amount <NEW_LINE> self.var.currency[t] -= price
Class holding central bank properties
62598faf379a373c97d99022
class Profiler(object): <NEW_LINE> <INDENT> def __init__(self, sortby="tottime"): <NEW_LINE> <INDENT> self.sortby = sortby <NEW_LINE> <DEDENT> def __enter__(self): <NEW_LINE> <INDENT> self.pr = cProfile.Profile() <NEW_LINE> self.pr.enable() <NEW_LINE> <DEDENT> def __exit__(self, *args): <NEW_LINE> <INDENT> self.pr.disable() <NEW_LINE> s = StringIO() <NEW_LINE> ps = pstats.Stats(self.pr, stream=s).sort_stats(self.sortby) <NEW_LINE> ps.print_stats() <NEW_LINE> print(s.getvalue())
Allows to profile code running in the context of the Profiler. Usage: from time import sleep with Profiler(): for _ in range(10): sleep(0.1)
62598faf3d592f4c4edbaecf
class BaseHandler(RequestHandler): <NEW_LINE> <INDENT> HTTP_error = 406 <NEW_LINE> @property <NEW_LINE> def db(self)->any: <NEW_LINE> <INDENT> return self.application.db <NEW_LINE> <DEDENT> @property <NEW_LINE> def redis(self)->any: <NEW_LINE> <INDENT> return self.application.redis <NEW_LINE> <DEDENT> def initialize(self): <NEW_LINE> <INDENT> if self.request.headers.get("content-type").endswith("application/json"): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.json_obj = json.loads(self.request.body) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> self.json_obj = None <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self.json_obj = None <NEW_LINE> <DEDENT> <DEDENT> def set_default_headers(self)->None: <NEW_LINE> <INDENT> self.set_header('Access-Control-Allow-Origin', '*') <NEW_LINE> self.set_header('Access-Control-Allow-Headers', '*') <NEW_LINE> <DEDENT> def send_error(self, info:str)->None: <NEW_LINE> <INDENT> self.write(info) <NEW_LINE> self.set_status(self.HTTP_error) <NEW_LINE> self.finish()
基础 Handler
62598faf3d592f4c4edbaed0
class TwoTDCM(TuringMachine): <NEW_LINE> <INDENT> outSymbols = ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9'] <NEW_LINE> maxSteps = 200 <NEW_LINE> def writeSymbol(self, symbol): <NEW_LINE> <INDENT> if symbol in TwoTDCM.outSymbols: <NEW_LINE> <INDENT> self.outTape.append(symbol) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> TuringMachine.writeSymbol(self, symbol) <NEW_LINE> <DEDENT> <DEDENT> def getOutput(self): <NEW_LINE> <INDENT> return ''.join(self.outTape) <NEW_LINE> <DEDENT> def getMaxSteps(self): <NEW_LINE> <INDENT> return TwoTDCM.maxSteps <NEW_LINE> <DEDENT> def reset(self, tapeStr = '', state = None, headPos = 0, steps = 0, resetHistory = True): <NEW_LINE> <INDENT> self.outTape = [ ] <NEW_LINE> state = TuringMachine.startState if state is None else state <NEW_LINE> TuringMachine.reset(self, tapeStr, state, headPos, steps, resetHistory)
A TwoTDCM object models a 2TDCM as described in the textbook. It does not support blocks or nondeterminism.
62598faf26068e7796d4c964
class Vars(object): <NEW_LINE> <INDENT> Bench = 8 <NEW_LINE> Squat = 4 <NEW_LINE> Shoulder_Press = 2 <NEW_LINE> Deadlift = 1
Class to store the enum values for compound lifts.
62598faf99cbb53fe6830ee7
class ChatSessionMessage(TrackableDateModel): <NEW_LINE> <INDENT> user = models.ForeignKey(User, on_delete=models.PROTECT) <NEW_LINE> chat_session = models.ForeignKey( ChatSession, related_name='messages', on_delete=models.PROTECT ) <NEW_LINE> message = models.TextField(max_length=2000) <NEW_LINE> def to_json(self): <NEW_LINE> <INDENT> return {'user': deserialize_user(self.user), 'message': self.message}
Store messages for a session
62598faf3317a56b869be552
class LinkedList(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.tail = LinkedListElement(None) <NEW_LINE> self.head = LinkedListElement(None) <NEW_LINE> self.head.next = self.tail <NEW_LINE> self.tail.prev = self.head <NEW_LINE> self.index = {} <NEW_LINE> self.lock = threading.RLock() <NEW_LINE> <DEDENT> def append(self, value): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if value not in self.index: <NEW_LINE> <INDENT> new = LinkedListElement(value, self.tail) <NEW_LINE> self.index[value] = new <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def popleft(self): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if self.head.next != self.tail: <NEW_LINE> <INDENT> value = self.head.next.delete() <NEW_LINE> del self.index[value] <NEW_LINE> return value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def delete(self, value): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if value in self.index: <NEW_LINE> <INDENT> self.index[value].delete() <NEW_LINE> del self.index[value] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def move_to_the_tail(self, value): <NEW_LINE> <INDENT> with self.lock: <NEW_LINE> <INDENT> if value in self.index: <NEW_LINE> <INDENT> old = self.index[value] <NEW_LINE> old.delete() <NEW_LINE> old.append(self.tail)
A linked list that is used by yas3fs as a LRU index for the file system cache.
62598faf4527f215b58e9ee5
class rustTool(RunEnvTool): <NEW_LINE> <INDENT> __slots__ = () <NEW_LINE> def getDeps(self): <NEW_LINE> <INDENT> if self._isGlobalRust(): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> return ['rustup'] <NEW_LINE> <DEDENT> def getVersionParts(self): <NEW_LINE> <INDENT> return 3 <NEW_LINE> <DEDENT> def _isGlobalRust(self): <NEW_LINE> <INDENT> return self._detect.isAlpineLinux() <NEW_LINE> <DEDENT> def _installTool(self, env): <NEW_LINE> <INDENT> if self._isGlobalRust(): <NEW_LINE> <INDENT> self._install.apk('rust') <NEW_LINE> return <NEW_LINE> <DEDENT> self._executil.callExternal([ env['rustupBin'], 'toolchain', 'install', env['rustVer'] ]) <NEW_LINE> <DEDENT> def _updateTool(self, env): <NEW_LINE> <INDENT> self._installTool(env) <NEW_LINE> <DEDENT> def uninstallTool(self, env): <NEW_LINE> <INDENT> if self._isGlobalRust(): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> self._executil.callExternal([ env['rustupBin'], 'toolchain', 'uninstall', env['rustVer'] ]) <NEW_LINE> self._have_tool = False <NEW_LINE> <DEDENT> def envNames(self): <NEW_LINE> <INDENT> return ['rustBin', 'rustVer'] <NEW_LINE> <DEDENT> def initEnv(self, env): <NEW_LINE> <INDENT> if not self._isGlobalRust(): <NEW_LINE> <INDENT> ver = env.setdefault('rustVer', 'stable') <NEW_LINE> self._environ['RUSTUP_TOOLCHAIN'] = ver <NEW_LINE> try: <NEW_LINE> <INDENT> res = self._executil.callExternal([ env['rustupBin'], 'which', 'rustc' ], verbose=False) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> super(rustTool, self).initEnv(env, 'rustc')
Rust is a systems programming language. Home: https://www.rust-lang.org
62598faf460517430c432065
class OutboxEvent(BaseModel): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> table_name = OUTBOX_TABLE_NAME <NEW_LINE> region = "eu-west-1" <NEW_LINE> <DEDENT> event_id = UnicodeAttribute(hash_key=True) <NEW_LINE> event_content = UnicodeAttribute()
Outbox event data model
62598faf4428ac0f6e658534
class Version(db.Model): <NEW_LINE> <INDENT> __tablename__ = 'version' <NEW_LINE> id = db.Column(db.Integer, primary_key=True) <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Version, self).__init__(*args, **kwargs) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<{0}(id={1})>'.format(type(self).__name__, self.id)
Schema version for the search-index database
62598faff548e778e596b5b3
class rule_203(Rule): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Rule.__init__(self, 'function', '203') <NEW_LINE> self.message.append('Rule ' + self.unique_id + ' has been moved to rule subprogram_body_203.')
This rule has been moved to rule `subprogram_body_203 <subprogram_rules.html#subprogram-body-203>`_.
62598faf38b623060ffa90aa
class MessagePool(object): <NEW_LINE> <INDENT> pass
global = query = session =
62598faf8e7ae83300ee90b1
class Diary(db.Model, Utility): <NEW_LINE> <INDENT> __tablename__ = "Diaries" <NEW_LINE> id = db.Column(UUID(as_uuid=True), unique=True, nullable=False, default=lambda: uuid4().hex, primary_key=True) <NEW_LINE> user_id = db.Column(UUID(as_uuid=True), ForeignKey("Users.id")) <NEW_LINE> title = db.Column(db.String(255), nullable=False) <NEW_LINE> content = db.Column(db.Text, nullable=False) <NEW_LINE> created_on = db.Column(db.DateTime, server_default=func.now()) <NEW_LINE> updated_on = db.Column(db.DateTime, onupdate=func.now()) <NEW_LINE> deleted = db.Column(db.Boolean, default=False, nullable=False) <NEW_LINE> user = relationship("User", back_populates="diaries", lazy=True) <NEW_LINE> def __init__(self, **kwargs): <NEW_LINE> <INDENT> for field in list(kwargs.keys()): <NEW_LINE> <INDENT> self.__dict__[field] = kwargs[field] <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<Diary >>> {self.__dict__}>"
Diary model for storing user's diary information
62598faf7cff6e4e811b5a3c
class MotionEncoder(abstractcodec.Encoder): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta <NEW_LINE> @abc.abstractmethod <NEW_LINE> def _compress(self, image): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def encode(self, image): <NEW_LINE> <INDENT> return self._compress(image)
A simple video encoder. This class represents a simple video encoder that only compresses a single video frame using a spatial compressor.
62598faf55399d3f05626533
class QuoteDecoder(json.JSONDecoder): <NEW_LINE> <INDENT> def default(self, json_obj): <NEW_LINE> <INDENT> if json_obj['mimetype'] == Quote.json_mimetype: <NEW_LINE> <INDENT> q = Quote() <NEW_LINE> q.id = int(json_obj['id']) <NEW_LINE> q.up_votes = int(json_obj['up']) <NEW_LINE> q.down_votes = int(json_obj['down']) <NEW_LINE> q.body = json_obj['body'] <NEW_LINE> q.tags = [QuoteDecoder.default(self, t) for t in json_obj['tags']] <NEW_LINE> return q <NEW_LINE> <DEDENT> elif json_obj['mimetype'] == Tag.json_mimetype: <NEW_LINE> <INDENT> t = Tag() <NEW_LINE> t.id = int(json_obj['id']) <NEW_LINE> t.tag = json_obj['tag'] <NEW_LINE> return tag <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return json.JSONDecoder.default(self, json_obj)
Custom decoder class. Can throw TypeErrors
62598faf2ae34c7f260ab0f1
class NewDBPerTestUnitTest(BaseDBUnitTest): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def setup_db(cls): <NEW_LINE> <INDENT> print('No-op for db setup in class init') <NEW_LINE> <DEDENT> def setUp(self): <NEW_LINE> <INDENT> print('Setting up new db connection') <NEW_LINE> init_db() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> print('Tearing down db connection') <NEW_LINE> self.disconnect() <NEW_LINE> <DEDENT> def disconnect(self): <NEW_LINE> <INDENT> global ThreadLocalSession, Session, engine <NEW_LINE> end_session() <NEW_LINE> if ThreadLocalSession: <NEW_LINE> <INDENT> ThreadLocalSession.close_all() <NEW_LINE> ThreadLocalSession = None <NEW_LINE> <DEDENT> if Session: <NEW_LINE> <INDENT> Session.close_all() <NEW_LINE> Session = None <NEW_LINE> <DEDENT> if engine: <NEW_LINE> <INDENT> engine.dispose() <NEW_LINE> engine = None
Unit test where each test gets a freshly initialized database in-memory to ensure no overlap between tests.
62598faff548e778e596b5b4
class FlockerScriptRunner(object): <NEW_LINE> <INDENT> _react = staticmethod(task.react) <NEW_LINE> def __init__(self, script, options, logging=True, reactor=None, sys_module=None): <NEW_LINE> <INDENT> self.script = script <NEW_LINE> self.options = options <NEW_LINE> self.logging = logging <NEW_LINE> if reactor is None: <NEW_LINE> <INDENT> reactor = global_reactor <NEW_LINE> <DEDENT> self._reactor = reactor <NEW_LINE> if sys_module is None: <NEW_LINE> <INDENT> sys_module = sys <NEW_LINE> <DEDENT> self.sys_module = sys_module <NEW_LINE> <DEDENT> def _parse_options(self, arguments): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.options.parseOptions(arguments) <NEW_LINE> <DEDENT> except usage.UsageError as e: <NEW_LINE> <INDENT> self.sys_module.stderr.write(unicode(self.options).encode('utf-8')) <NEW_LINE> self.sys_module.stderr.write( b'ERROR: ' + e.message.encode('utf-8') + b'\n') <NEW_LINE> raise SystemExit(1) <NEW_LINE> <DEDENT> return self.options <NEW_LINE> <DEDENT> def main(self): <NEW_LINE> <INDENT> options = self._parse_options(self.sys_module.argv[1:]) <NEW_LINE> if self.logging: <NEW_LINE> <INDENT> log_writer = eliot_logging_service( options.eliot_destination, self._reactor, True ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> log_writer = Service() <NEW_LINE> <DEDENT> log_writer.startService() <NEW_LINE> def run_and_log(reactor): <NEW_LINE> <INDENT> d = maybeDeferred(self.script.main, reactor, options) <NEW_LINE> def got_error(failure): <NEW_LINE> <INDENT> if failure.check(UsageError): <NEW_LINE> <INDENT> err(failure.value.args) <NEW_LINE> raise SystemExit(1) <NEW_LINE> <DEDENT> elif not failure.check(SystemExit): <NEW_LINE> <INDENT> err(failure) <NEW_LINE> <DEDENT> return failure <NEW_LINE> <DEDENT> d.addErrback(got_error) <NEW_LINE> return d <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._react(run_and_log, [], _reactor=self._reactor) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> log_writer.stopService()
An API for running standard flocker scripts. :ivar ICommandLineScript script: See ``script`` of ``__init__``. :ivar _react: A reference to ``task.react`` which can be overridden for testing purposes.
62598faf97e22403b383af1d
class _PostsListView(ListView): <NEW_LINE> <INDENT> template_name = '{0}/blog_posts_list.html'.format(settings.CURRENT_SKIN) <NEW_LINE> context_object_name = 'posts' <NEW_LINE> paginate_by = settings.BLOG_POSTS_PAGINATE_BY <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return super().get_queryset().prefetch_related('categories')
Base class for displaying post lists
62598fafd268445f26639b8b
class hello: <NEW_LINE> <INDENT> def print(self, what): <NEW_LINE> <INDENT> if what == 1: <NEW_LINE> <INDENT> printString = 'world' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> printString = 'BSB' <NEW_LINE> <DEDENT> print('hello ' + printString)
an example class
62598faf5166f23b2e2433e9
class Tests(IMP.test.TestCase): <NEW_LINE> <INDENT> def test_printing(self): <NEW_LINE> <INDENT> m = IMP.Model() <NEW_LINE> sf = IMP._ConstRestraint(m, [], 1).create_scoring_function() <NEW_LINE> IMP.set_log_level(IMP.MEMORY) <NEW_LINE> m.update() <NEW_LINE> sf.evaluate(False)
Test RestraintSets
62598faf85dfad0860cbfa7b
class APITestCase(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(APITestCase, self).setUp() <NEW_LINE> def fake_keystoneclient(request, admin=False): <NEW_LINE> <INDENT> return self.stub_keystoneclient() <NEW_LINE> <DEDENT> self._original_glanceclient = api.glance.glanceclient <NEW_LINE> self._original_keystoneclient = api.keystone.keystoneclient <NEW_LINE> self._original_novaclient = api.nova.novaclient <NEW_LINE> self._original_quantumclient = api.quantum.quantumclient <NEW_LINE> self._original_cinderclient = api.nova.cinderclient <NEW_LINE> api.glance.glanceclient = lambda request: self.stub_glanceclient() <NEW_LINE> api.keystone.keystoneclient = fake_keystoneclient <NEW_LINE> api.nova.novaclient = lambda request: self.stub_novaclient() <NEW_LINE> api.quantum.quantumclient = lambda request: self.stub_quantumclient() <NEW_LINE> api.nova.cinderclient = lambda request: self.stub_cinderclient() <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> super(APITestCase, self).tearDown() <NEW_LINE> api.glance.glanceclient = self._original_glanceclient <NEW_LINE> api.nova.novaclient = self._original_novaclient <NEW_LINE> api.keystone.keystoneclient = self._original_keystoneclient <NEW_LINE> api.quantum.quantumclient = self._original_quantumclient <NEW_LINE> api.nova.cinderclient = self._original_cinderclient <NEW_LINE> <DEDENT> def stub_novaclient(self): <NEW_LINE> <INDENT> if not hasattr(self, "novaclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(nova_client, 'Client') <NEW_LINE> self.novaclient = self.mox.CreateMock(nova_client.Client) <NEW_LINE> <DEDENT> return self.novaclient <NEW_LINE> <DEDENT> def stub_cinderclient(self): <NEW_LINE> <INDENT> if not hasattr(self, "cinderclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(cinder_client, 'Client') <NEW_LINE> self.cinderclient = self.mox.CreateMock(cinder_client.Client) <NEW_LINE> <DEDENT> return self.cinderclient <NEW_LINE> <DEDENT> def stub_keystoneclient(self): <NEW_LINE> <INDENT> if not hasattr(self, "keystoneclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(keystone_client, 'Client') <NEW_LINE> self.keystoneclient = self.mox.CreateMock(keystone_client.Client) <NEW_LINE> <DEDENT> return self.keystoneclient <NEW_LINE> <DEDENT> def stub_glanceclient(self): <NEW_LINE> <INDENT> if not hasattr(self, "glanceclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(glanceclient, 'Client') <NEW_LINE> self.glanceclient = self.mox.CreateMock(glanceclient.Client) <NEW_LINE> <DEDENT> return self.glanceclient <NEW_LINE> <DEDENT> def stub_quantumclient(self): <NEW_LINE> <INDENT> if not hasattr(self, "quantumclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(quantum_client, 'Client') <NEW_LINE> self.quantumclient = self.mox.CreateMock(quantum_client.Client) <NEW_LINE> <DEDENT> return self.quantumclient <NEW_LINE> <DEDENT> def stub_swiftclient(self, expected_calls=1): <NEW_LINE> <INDENT> if not hasattr(self, "swiftclient"): <NEW_LINE> <INDENT> self.mox.StubOutWithMock(swift_client, 'Connection') <NEW_LINE> self.swiftclient = self.mox.CreateMock(swift_client.Connection) <NEW_LINE> while expected_calls: <NEW_LINE> <INDENT> swift_client.Connection(None, mox.IgnoreArg(), None, preauthtoken=mox.IgnoreArg(), preauthurl=mox.IgnoreArg(), auth_version="2.0") .AndReturn(self.swiftclient) <NEW_LINE> expected_calls -= 1 <NEW_LINE> <DEDENT> <DEDENT> return self.swiftclient
The ``APITestCase`` class is for use with tests which deal with the underlying clients rather than stubbing out the horizon.api.* methods.
62598faf0c0af96317c56392
class OSUServer(TestbedDevice): <NEW_LINE> <INDENT> def __init__(self, prog_name): <NEW_LINE> <INDENT> TestbedDevice.__init__(self, prog_name) <NEW_LINE> self.dev_type = "OSUSERVER"
The class of OSU server device.
62598faf8a43f66fc4bf218a
class SubscriberDBStreamerCallbackTests(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> store = SqliteStore('file::memory:') <NEW_LINE> self._streamer_callback = SubscriberDBStreamerCallback(store, loop=asyncio.new_event_loop()) <NEW_LINE> ServiceRegistry.add_service('test', '0.0.0.0', 0) <NEW_LINE> ServiceRegistry._PROXY_CONFIG = {'local_port': 1234, 'cloud_address': '', 'proxy_cloud_connections': False} <NEW_LINE> ServiceRegistry._REGISTRY = {"services": {"s6a_service": {"ip_address": "0.0.0.0", "port": 2345}} } <NEW_LINE> <DEDENT> @unittest.mock.patch('magma.subscriberdb.streamer_callback.S6aServiceStub') <NEW_LINE> def test_detach_deleted_subscribers(self, s6a_service_mock_stub): <NEW_LINE> <INDENT> mock = unittest.mock.Mock() <NEW_LINE> mock.DeleteSubscriber.future.side_effect = [unittest.mock.Mock()] <NEW_LINE> s6a_service_mock_stub.side_effect = [mock] <NEW_LINE> old_sub_ids = ["IMSI202", "IMSI101"] <NEW_LINE> new_sub_ids = ["IMSI101", "IMSI202"] <NEW_LINE> self._streamer_callback.detach_deleted_subscribers(old_sub_ids, new_sub_ids) <NEW_LINE> s6a_service_mock_stub.DeleteSubscriber.future.assert_not_called() <NEW_LINE> self._streamer_callback._loop.stop() <NEW_LINE> old_sub_ids = ["IMSI202", "IMSI101", "IMSI303"] <NEW_LINE> new_sub_ids = ["IMSI202"] <NEW_LINE> self._streamer_callback.detach_deleted_subscribers(old_sub_ids, new_sub_ids) <NEW_LINE> mock.DeleteSubscriber.future.assert_called_once_with( DeleteSubscriberRequest( imsi_list=["IMSI101", "IMSI303"] ))
Tests for the SubscriberDBStreamerCallback detach_deleted_subscribers
62598fafbe8e80087fbbf074
class TradeListAPIView(generics.ListAPIView): <NEW_LINE> <INDENT> permission_classes = (IsFromUser,) <NEW_LINE> serializer_class = TradeSerializerComplete <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return Trade.objects.filter(portfolio=self.request.user.portfolio)
Vue qui permet de recuperer les trades
62598faf4f88993c371f0512
class HealthDict(PrintableDict): <NEW_LINE> <INDENT> def _get_health(self): <NEW_LINE> <INDENT> health = True <NEW_LINE> for item in self.keys(): <NEW_LINE> <INDENT> if self[item] != []: <NEW_LINE> <INDENT> health = False <NEW_LINE> <DEDENT> <DEDENT> return health <NEW_LINE> <DEDENT> health = property(fget=_get_health)
This class adds a 'health' check to a standard dictionary. This check looks into the dict values, and considers empty lists as healthy and all else as unhealthy. If one or more entries is 'unhealthy' the health method returns False.
62598fafac7a0e7691f72519
class FileSourceInfo(SourceInfo): <NEW_LINE> <INDENT> def is_my_business(self, action, **keywords): <NEW_LINE> <INDENT> status = SourceInfo.is_my_business(self, action, **keywords) <NEW_LINE> if status: <NEW_LINE> <INDENT> file_name = keywords.get("file_name", None) <NEW_LINE> if file_name: <NEW_LINE> <INDENT> if is_string(type(file_name)): <NEW_LINE> <INDENT> file_type = _find_file_type_from_file_name(file_name, action) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise IOError("Wrong file name") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> file_type = keywords.get("file_type") <NEW_LINE> <DEDENT> status = self.can_i_handle(action, file_type) <NEW_LINE> <DEDENT> return status <NEW_LINE> <DEDENT> def can_i_handle(self, action, file_type): <NEW_LINE> <INDENT> raise NotImplementedError("")
Plugin description for a file source
62598faf7d43ff248742740a
class UnknownValue(Exception): <NEW_LINE> <INDENT> pass
Raised when a *Config element has a valid type and inappropriate value
62598faf23849d37ff8510c4
class PhaseReassignment(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def reassign(self, spec_t, spec_f, spec): <NEW_LINE> <INDENT> phase = np.angle(spec) <NEW_LINE> dt = spec_t[1] - spec_t[0] <NEW_LINE> df = spec_f[1] - spec_f[0] <NEW_LINE> ps_df,ps_dt = np.gradient(phase) <NEW_LINE> ps_df /= df <NEW_LINE> ps_dt /= dt <NEW_LINE> plt.figure() <NEW_LINE> plt.subplot(2, 1, 1) <NEW_LINE> plt.hist(ps_df.ravel(), bins=15) <NEW_LINE> plt.title('ps_df') <NEW_LINE> plt.axis('tight') <NEW_LINE> plt.subplot(2, 1, 2) <NEW_LINE> plt.hist(ps_dt.ravel(), bins=15) <NEW_LINE> plt.title('ps_dt') <NEW_LINE> plt.axis('tight') <NEW_LINE> ps_r = np.zeros_like(phase) <NEW_LINE> for k,freq in enumerate(spec_f): <NEW_LINE> <INDENT> for j,t in enumerate(spec_t): <NEW_LINE> <INDENT> tnew = max(0, t - (ps_df[k, j] / (2*np.pi))) <NEW_LINE> fnew = max(0, ps_dt[k, j] / (2*np.pi)) <NEW_LINE> print('fnew=%0.0f, tnew=%0.0f' % (fnew, tnew)) <NEW_LINE> row = np.array(np.nonzero(spec_f <= fnew)).max() <NEW_LINE> col = np.array(np.nonzero(spec_t <= tnew)).max() <NEW_LINE> print('row=',row) <NEW_LINE> print('col=',col) <NEW_LINE> ps_r[row, col] += 1.0 <NEW_LINE> <DEDENT> <DEDENT> ps_r /= len(spec_t)*len(spec_f) <NEW_LINE> return ps_r
NOTE: doesn't work...
62598faf1b99ca400228f538
class Game: <NEW_LINE> <INDENT> def __init__(self, log_level=logging.DEBUG): <NEW_LINE> <INDENT> self.turn_number = 0 <NEW_LINE> raw_constants = read_input() <NEW_LINE> constants.load_constants(json.loads(raw_constants)) <NEW_LINE> num_players, self.my_id = map(int, read_input().split()) <NEW_LINE> logging.basicConfig( filename="bot-{}.log".format(self.my_id), filemode="w", level=log_level, ) <NEW_LINE> self.players = {} <NEW_LINE> for player in range(num_players): <NEW_LINE> <INDENT> self.players[player] = Player._generate() <NEW_LINE> <DEDENT> self.me = self.players[self.my_id] <NEW_LINE> self.game_map = GameMap._generate() <NEW_LINE> constants.set_dimensions(self.game_map.width, self.game_map.height) <NEW_LINE> <DEDENT> def ready(self, name): <NEW_LINE> <INDENT> send_commands([name]) <NEW_LINE> <DEDENT> def update_frame(self): <NEW_LINE> <INDENT> self.turn_number = int(read_input()) <NEW_LINE> logging.info("=============== TURN {:03} ================".format(self.turn_number)) <NEW_LINE> for _ in range(len(self.players)): <NEW_LINE> <INDENT> player, num_ships, num_dropoffs, halite = map(int, read_input().split()) <NEW_LINE> self.players[player]._update(num_ships, num_dropoffs, halite) <NEW_LINE> <DEDENT> self.game_map._update() <NEW_LINE> for player in self.players.values(): <NEW_LINE> <INDENT> for ship in player.get_ships(): <NEW_LINE> <INDENT> self.game_map[ship.position].mark_unsafe(ship) <NEW_LINE> <DEDENT> self.game_map[player.shipyard.position].structure = player.shipyard <NEW_LINE> for dropoff in player.get_dropoffs(): <NEW_LINE> <INDENT> self.game_map[dropoff.position].structure = dropoff <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def end_turn(commands): <NEW_LINE> <INDENT> send_commands(commands)
The game object holds all metadata pertinent to the game and all its contents
62598faf7047854f4633f3eb
class Golimar: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.client = golimar.client.Client() <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self.client.open() <NEW_LINE> <DEDENT> def send(self): <NEW_LINE> <INDENT> self.client.send() <NEW_LINE> <DEDENT> def chatWith(self, username): <NEW_LINE> <INDENT> self.client.chatWith(username) <NEW_LINE> <DEDENT> def searchChat(self, chatname): <NEW_LINE> <INDENT> self.client.searchChat(chatname) <NEW_LINE> <DEDENT> def render(self): <NEW_LINE> <INDENT> self.client.render() <NEW_LINE> <DEDENT> def openSelectedFriend(self): <NEW_LINE> <INDENT> self.client.openSelectedFriend() <NEW_LINE> <DEDENT> def openSelectedChat(self): <NEW_LINE> <INDENT> self.client.openSelectedChat()
Acts as a facade layer to the skype client.
62598faffff4ab517ebcd7f5
class SecretAttributes(Attributes): <NEW_LINE> <INDENT> _validation = { 'created': {'readonly': True}, 'updated': {'readonly': True}, 'recovery_level': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'enabled': {'key': 'enabled', 'type': 'bool'}, 'not_before': {'key': 'nbf', 'type': 'unix-time'}, 'expires': {'key': 'exp', 'type': 'unix-time'}, 'created': {'key': 'created', 'type': 'unix-time'}, 'updated': {'key': 'updated', 'type': 'unix-time'}, 'recovery_level': {'key': 'recoveryLevel', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, enabled: Optional[bool] = None, not_before: Optional[datetime.datetime] = None, expires: Optional[datetime.datetime] = None, **kwargs ): <NEW_LINE> <INDENT> super(SecretAttributes, self).__init__(enabled=enabled, not_before=not_before, expires=expires, **kwargs) <NEW_LINE> self.recovery_level = None
The secret management attributes. Variables are only populated by the server, and will be ignored when sending a request. :ivar enabled: Determines whether the object is enabled. :vartype enabled: bool :ivar not_before: Not before date in UTC. :vartype not_before: ~datetime.datetime :ivar expires: Expiry date in UTC. :vartype expires: ~datetime.datetime :ivar created: Creation time in UTC. :vartype created: ~datetime.datetime :ivar updated: Last updated time in UTC. :vartype updated: ~datetime.datetime :ivar recovery_level: Reflects the deletion recovery level currently in effect for secrets in the current vault. If it contains 'Purgeable', the secret can be permanently deleted by a privileged user; otherwise, only the system can purge the secret, at the end of the retention interval. Possible values include: "Purgeable", "Recoverable+Purgeable", "Recoverable", "Recoverable+ProtectedSubscription", "CustomizedRecoverable+Purgeable", "CustomizedRecoverable", "CustomizedRecoverable+ProtectedSubscription". :vartype recovery_level: str or ~azure.keyvault.v7_0.models.DeletionRecoveryLevel
62598fafcc40096d6161a1e2
class BaseBenchmarkLogger(object): <NEW_LINE> <INDENT> def log_evaluation_result(self, eval_results): <NEW_LINE> <INDENT> if not isinstance(eval_results, dict): <NEW_LINE> <INDENT> tf.logging.warning("eval_results should be dictionary for logging. " "Got %s", type(eval_results)) <NEW_LINE> return <NEW_LINE> <DEDENT> global_step = eval_results[tf.GraphKeys.GLOBAL_STEP] <NEW_LINE> for key in sorted(eval_results): <NEW_LINE> <INDENT> if key != tf.GraphKeys.GLOBAL_STEP: <NEW_LINE> <INDENT> self.log_metric(key, eval_results[key], global_step=global_step) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def log_metric(self, name, value, unit=None, global_step=None, extras=None): <NEW_LINE> <INDENT> if not isinstance(value, numbers.Number): <NEW_LINE> <INDENT> tf.logging.warning( "Metric value to log should be a number. Got %s", type(value)) <NEW_LINE> return <NEW_LINE> <DEDENT> extras = _convert_to_json_dict(extras) <NEW_LINE> tf.logging.info("Benchmark metric: " "Name %s, value %d, unit %s, global_step %d, extras %s", name, value, unit, global_step, extras) <NEW_LINE> <DEDENT> def log_run_info(self, model_name): <NEW_LINE> <INDENT> tf.logging.info("Benchmark run: %s", _gather_run_info(model_name))
Class to log the benchmark information to STDOUT.
62598faf32920d7e50bc6065
class ApplicationGatewaySslPredefinedPolicy(SubResource): <NEW_LINE> <INDENT> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'cipher_suites': {'key': 'properties.cipherSuites', 'type': '[str]'}, 'min_protocol_version': {'key': 'properties.minProtocolVersion', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, cipher_suites: Optional[List[Union[str, "ApplicationGatewaySslCipherSuite"]]] = None, min_protocol_version: Optional[Union[str, "ApplicationGatewaySslProtocol"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ApplicationGatewaySslPredefinedPolicy, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.cipher_suites = cipher_suites <NEW_LINE> self.min_protocol_version = min_protocol_version
An Ssl predefined policy. :param id: Resource ID. :type id: str :param name: Name of the Ssl predefined policy. :type name: str :param cipher_suites: Ssl cipher suites to be enabled in the specified order for application gateway. :type cipher_suites: list[str or ~azure.mgmt.network.v2019_07_01.models.ApplicationGatewaySslCipherSuite] :param min_protocol_version: Minimum version of Ssl protocol to be supported on application gateway. Possible values include: "TLSv1_0", "TLSv1_1", "TLSv1_2". :type min_protocol_version: str or ~azure.mgmt.network.v2019_07_01.models.ApplicationGatewaySslProtocol
62598fafa17c0f6771d5c246
class ImageRepoTargetsRoleTypeMismatchUptane(Uptane): <NEW_LINE> <INDENT> class ImageStep(Step): <NEW_LINE> <INDENT> TARGETS_KEYS_IDX = [1] <NEW_LINE> SNAPSHOT_KEYS_IDX = [2] <NEW_LINE> TIMESTAMP_KEYS_IDX = [3] <NEW_LINE> UPDATE_ERROR = 'SecurityException::Targets' <NEW_LINE> ROOT_KWARGS = { 'root_keys_idx': [0], 'targets_keys_idx': TARGETS_KEYS_IDX, 'snapshot_keys_idx': SNAPSHOT_KEYS_IDX, 'timestamp_keys_idx': TIMESTAMP_KEYS_IDX, } <NEW_LINE> TARGETS_KWARGS = { 'targets_keys_idx': TARGETS_KEYS_IDX, '_type': 'invalidrole', } <NEW_LINE> SNAPSHOT_KWARGS = { 'snapshot_keys_idx': SNAPSHOT_KEYS_IDX, } <NEW_LINE> TIMESTAMP_KWARGS = { 'timestamp_keys_idx': TIMESTAMP_KEYS_IDX, } <NEW_LINE> <DEDENT> class DirectorStep(Step): <NEW_LINE> <INDENT> TARGETS_KEYS_IDX = [5] <NEW_LINE> ROOT_KWARGS = { 'root_keys_idx': [4], 'targets_keys_idx': TARGETS_KEYS_IDX, } <NEW_LINE> TARGETS_KWARGS = { 'targets_keys_idx': TARGETS_KEYS_IDX, } <NEW_LINE> <DEDENT> STEPS = [ (DirectorStep, ImageStep), ]
The type of role must have an appropriate name in the metadata file. ImageRepo role Targets: _type = "Targets"
62598faf5fc7496912d48289
class NetworkWatcherListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[NetworkWatcher]'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["NetworkWatcher"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(NetworkWatcherListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value
Response for ListNetworkWatchers API service call. :param value: List of network watcher resources. :type value: list[~azure.mgmt.network.v2019_07_01.models.NetworkWatcher]
62598faf99cbb53fe6830ee9
class DataTableClientRenderedColoum(DataTableColoum): <NEW_LINE> <INDENT> pass
This class is still under planning.
62598faf38b623060ffa90ac
class FakeQuantizePerTensorBaseOpBenchmark(op_bench.TorchBenchmarkBase): <NEW_LINE> <INDENT> def init(self, N, C, H, W, nbits, device, op_func): <NEW_LINE> <INDENT> self.quant_min = 0 <NEW_LINE> self.quant_max = 2 ** nbits - 1 <NEW_LINE> self.quant_range = 2 ** nbits <NEW_LINE> self.input = torch.rand(N, C, H, W, dtype=torch.float, device=device, requires_grad=self.auto_set()) <NEW_LINE> self.scale = torch.tensor([1.], requires_grad=self.auto_set()).to(device) <NEW_LINE> self.zero_point = torch.tensor([0.], requires_grad=self.auto_set()).to(device) <NEW_LINE> self.inputs = { "input": self.input, "scale": self.scale, "zero_point": self.zero_point, "quant_min": self.quant_min, "quant_max": self.quant_max, } <NEW_LINE> self.op_func = op_func <NEW_LINE> <DEDENT> def forward( self, input, scale, zero_point, quant_min: int, quant_max: int ): <NEW_LINE> <INDENT> return self.op_func(input, scale, zero_point, quant_min, quant_max)
Benchmarks 3 different fake quantize per tensor operators.
62598fafeab8aa0e5d30bd9e
class Agent(): <NEW_LINE> <INDENT> def __init__(self, state_size, action_size, seed, double_dqn=False, duel_q=False): <NEW_LINE> <INDENT> self.state_size = state_size <NEW_LINE> self.action_size = action_size <NEW_LINE> self.seed = random.seed(seed) <NEW_LINE> if duel_q: <NEW_LINE> <INDENT> self.qnetwork_local = DuelQNetwork(state_size, action_size, seed).to(device) <NEW_LINE> self.qnetwork_target = DuelQNetwork(state_size, action_size, seed).to(device) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.qnetwork_local = QNetwork(state_size, action_size, seed).to(device) <NEW_LINE> self.qnetwork_target = QNetwork(state_size, action_size, seed).to(device) <NEW_LINE> <DEDENT> self.optimizer = optim.Adam(self.qnetwork_local.parameters(), lr=LR) <NEW_LINE> self.memory = ReplayBuffer(action_size, BUFFER_SIZE, BATCH_SIZE, seed) <NEW_LINE> self.t_step = 0 <NEW_LINE> self.double_dqn = double_dqn <NEW_LINE> <DEDENT> def step(self, state, action, reward, next_state, done): <NEW_LINE> <INDENT> self.memory.add(state, action, reward, next_state, done) <NEW_LINE> self.t_step = (self.t_step + 1) % UPDATE_EVERY <NEW_LINE> if self.t_step == 0: <NEW_LINE> <INDENT> if len(self.memory) > BATCH_SIZE: <NEW_LINE> <INDENT> experiences = self.memory.sample() <NEW_LINE> self.learn(experiences, GAMMA) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def act(self, state, eps=0.): <NEW_LINE> <INDENT> state = torch.from_numpy(state).float().unsqueeze(0).to(device) <NEW_LINE> self.qnetwork_local.eval() <NEW_LINE> with torch.no_grad(): <NEW_LINE> <INDENT> action_values = self.qnetwork_local(state) <NEW_LINE> <DEDENT> self.qnetwork_local.train() <NEW_LINE> if random.random() > eps: <NEW_LINE> <INDENT> return np.argmax(action_values.cpu().data.numpy()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return random.choice(np.arange(self.action_size)) <NEW_LINE> <DEDENT> <DEDENT> def learn(self, experiences, gamma): <NEW_LINE> <INDENT> states, actions, rewards, next_states, dones = experiences <NEW_LINE> Q_expected = self.qnetwork_local(states).gather(1, actions) <NEW_LINE> if self.double_dqn: <NEW_LINE> <INDENT> next_actions = self.qnetwork_local(next_states).detach().argmax(1).unsqueeze(1) <NEW_LINE> Q_targets_next = self.qnetwork_target(next_states).gather(1, next_actions).detach() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> Q_targets_next = self.qnetwork_target(next_states).detach().max(1)[0].unsqueeze(1) <NEW_LINE> <DEDENT> Q_targets = rewards + (gamma * (Q_targets_next) * (1 - dones)) <NEW_LINE> loss = F.mse_loss(Q_expected, Q_targets) <NEW_LINE> self.optimizer.zero_grad() <NEW_LINE> loss.backward() <NEW_LINE> self.optimizer.step() <NEW_LINE> self.soft_update(self.qnetwork_local, self.qnetwork_target, TAU) <NEW_LINE> <DEDENT> def soft_update(self, local_model, target_model, tau): <NEW_LINE> <INDENT> for target_param, local_param in zip(target_model.parameters(), local_model.parameters()): <NEW_LINE> <INDENT> target_param.data.copy_(tau*local_param.data + (1.0-tau)*target_param.data)
Interacts with and learns from the environment.
62598faff7d966606f747ff6
class ReadStatsCounts(AnnotationsAssociated): <NEW_LINE> <INDENT> pattern = "(.*)_readmap$" <NEW_LINE> mTable = "readmap" <NEW_LINE> mColumns = "COUNT(DISTINCT read_id)" <NEW_LINE> def __call__(self, track, slice=None): <NEW_LINE> <INDENT> data = [] <NEW_LINE> data.append( ("known", self.getValue(self.getStatement(track, slice="known")))) <NEW_LINE> data.append( ("ambiguous", self.getValue(self.getStatement(track, slice="ambiguous")))) <NEW_LINE> data.append( ("novel", self.getValue(self.getStatement(track, slice="unknown")))) <NEW_LINE> return odict(data)
simple join between a data table and table defining slices. The join works from transcripts to reads. :attr:`mTable` table to join with :attr:`mColums` columns to output Note: the default slices have been disabled, only known, ambiguous and unknown are returned.
62598faf009cb60464d01532
class Producer(Object): <NEW_LINE> <INDENT> id = Field(String) <NEW_LINE> name = Field(String) <NEW_LINE> cat = Field(Array(String)) <NEW_LINE> domain = Field(String)
This object describes the content of a site or app, depending on which object its parent is embedded in. The producer is useful when content where the ad is shown is syndicated, and may appear on a completely different publisher. The producer object itself and all of its parameters are optional, so default values are not provided. If an optional parameter is not specified, it should be considered unknown. This object is optional, but useful if the content producer is different from the site publisher.
62598fafe1aae11d1e7ce82c
class DECLGROUP_WITHPATH(CIMElement): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> CIMElement.__init__(self, 'DECLGROUP.WITHPATH') <NEW_LINE> self.appendChildren(data)
The DECLGROUP.WITHPATH element defines a logical set of CIM Class and Instance declarations. Each object is declared with its own independent naming and location information. :: <!ELEMENT DECLGROUP.WITHPATH (VALUE.OBJECTWITHPATH | VALUE.OBJECTWITHLOCALPATH)*>
62598faf2ae34c7f260ab0f3
class Solution: <NEW_LINE> <INDENT> def computeLastDigit(self, A, B): <NEW_LINE> <INDENT> factor=1 <NEW_LINE> value = B <NEW_LINE> while value> A: <NEW_LINE> <INDENT> factor=((value%10)*factor)%10 <NEW_LINE> if int(factor)==0: <NEW_LINE> <INDENT> return int(factor) <NEW_LINE> <DEDENT> value-=1 <NEW_LINE> <DEDENT> return int(factor)
@param A: the given number @param B: another number @return: the last digit of B! / A!
62598faf99cbb53fe6830eea
class Course(Base): <NEW_LINE> <INDENT> offering = ndb.StringProperty() <NEW_LINE> institution = ndb.StringProperty() <NEW_LINE> display_name = ndb.StringProperty() <NEW_LINE> instructor = ndb.KeyProperty(User, repeated=True) <NEW_LINE> active = ndb.BooleanProperty(default=True) <NEW_LINE> @property <NEW_LINE> def staff(self): <NEW_LINE> <INDENT> return [part.user for part in Participant.query( Participant.course == self.key, Participant.role == STAFF_ROLE).fetch()] <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def _can(cls, user, need, course, query): <NEW_LINE> <INDENT> action = need.action <NEW_LINE> if action == "get": <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif action == "index": <NEW_LINE> <INDENT> return query <NEW_LINE> <DEDENT> elif action == "modify": <NEW_LINE> <INDENT> return bool(course) and user.key in course.staff <NEW_LINE> <DEDENT> elif action == "staff": <NEW_LINE> <INDENT> if user.is_admin: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return user.key in course.staff <NEW_LINE> <DEDENT> elif action == "create": <NEW_LINE> <INDENT> return user.is_admin <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> @property <NEW_LINE> def assignments(self): <NEW_LINE> <INDENT> return Assignment.query(Assignment.course == self.key) <NEW_LINE> <DEDENT> def get_students(self, user): <NEW_LINE> <INDENT> query = Participant.query( Participant.course == self.key, Participant.role == 'student') <NEW_LINE> return list(query.fetch())
Courses are expected to have a unique offering.
62598faf4a966d76dd5eeee9
class BM_coverageType(object): <NEW_LINE> <INDENT> def __init__(self, cType, cUpper, cLower): <NEW_LINE> <INDENT> self.cType = cType <NEW_LINE> self.cUpper = cUpper <NEW_LINE> self.cLower = cLower
Container class for storing the type of coverage to calculate
62598faf851cf427c66b82cd
class _OVHLexiconClient(dns_common_lexicon.LexiconClient): <NEW_LINE> <INDENT> def __init__(self, endpoint, application_key, application_secret, consumer_key, ttl): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> config = dns_common_lexicon.build_lexicon_config('ovh', { 'ttl': ttl, }, { 'auth_entrypoint': endpoint, 'auth_application_key': application_key, 'auth_application_secret': application_secret, 'auth_consumer_key': consumer_key, }) <NEW_LINE> self.provider = ovh.Provider(config) <NEW_LINE> <DEDENT> def _handle_http_error(self, e, domain_name): <NEW_LINE> <INDENT> hint = None <NEW_LINE> if str(e).startswith('400 Client Error:'): <NEW_LINE> <INDENT> hint = 'Is your Application Secret value correct?' <NEW_LINE> <DEDENT> if str(e).startswith('403 Client Error:'): <NEW_LINE> <INDENT> hint = 'Are your Application Key and Consumer Key values correct?' <NEW_LINE> <DEDENT> return errors.PluginError('Error determining zone identifier for {0}: {1}.{2}' .format(domain_name, e, ' ({0})'.format(hint) if hint else '')) <NEW_LINE> <DEDENT> def _handle_general_error(self, e, domain_name): <NEW_LINE> <INDENT> if domain_name in str(e) and str(e).endswith('not found'): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> super()._handle_general_error(e, domain_name)
Encapsulates all communication with the OVH API via Lexicon.
62598fafd58c6744b42dc2e0
class TestDonationAnalytics: <NEW_LINE> <INDENT> def setup_class(self): <NEW_LINE> <INDENT> self.donation_anaytics = DonationAnalytics(30,"/tmp/repeat_donors.txt") <NEW_LINE> <DEDENT> def test_process_data(self): <NEW_LINE> <INDENT> record1 = RecipientRecord("C00384516","SABOURIN, JAMES","02895","01262016","230") <NEW_LINE> record2 = RecipientRecord("C00384517", "SABOURIN, JOE", "02892", "01152017", "330") <NEW_LINE> record3 = RecipientRecord("C00384516", "SABOURIN, JAMES", "02895", "01202017", "430") <NEW_LINE> record4 = RecipientRecord("C00384517", "SABOURIN, JAMES", "02895", "01312017", "130") <NEW_LINE> self.donation_anaytics.process_data(record1) <NEW_LINE> self.donation_anaytics.process_data(record2) <NEW_LINE> self.donation_anaytics.process_data(record3) <NEW_LINE> self.donation_anaytics.process_data(record4) <NEW_LINE> self.donation_anaytics.close_outfile() <NEW_LINE> with open("/tmp/repeat_donors.txt") as fp: <NEW_LINE> <INDENT> line1 = fp.readline() <NEW_LINE> print("line1 :", line1) <NEW_LINE> data = line1.split("|") <NEW_LINE> assert data[0] == "C00384516" <NEW_LINE> assert data[1] == "02895" <NEW_LINE> assert data[2] == "2017" <NEW_LINE> assert data[3] == "430" <NEW_LINE> assert data[4] == "430" <NEW_LINE> assert data[5] == "1\n" <NEW_LINE> line2 = fp.readline() <NEW_LINE> data = line2.split("|") <NEW_LINE> assert data[0] == "C00384517" <NEW_LINE> assert data[1] == "02895" <NEW_LINE> assert data[2] == "2017" <NEW_LINE> assert data[3] == "130" <NEW_LINE> assert data[4] == "130" <NEW_LINE> assert data[5] == "1\n"
This class is to test the process data method of DonationAnalytics class
62598faf283ffb24f3cf389e
class SmartPointerTransformation(typehandlers.TypeTransformation): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(SmartPointerTransformation, self).__init__() <NEW_LINE> self.rx = re.compile(r'(ns3::|::ns3::|)Ptr<([^>]+)>\s*$') <NEW_LINE> print("{0!r}".format(self), file=sys.stderr) <NEW_LINE> <DEDENT> def _get_untransformed_type_traits(self, name): <NEW_LINE> <INDENT> m = self.rx.match(name) <NEW_LINE> is_const = False <NEW_LINE> if m is None: <NEW_LINE> <INDENT> print("{0!r} did not match".format(name), file=sys.stderr) <NEW_LINE> return None, False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> name1 = m.group(2).strip() <NEW_LINE> if name1.startswith('const '): <NEW_LINE> <INDENT> name1 = name1[len('const '):] <NEW_LINE> is_const = True <NEW_LINE> <DEDENT> if name1.endswith(' const'): <NEW_LINE> <INDENT> name1 = name1[:-len(' const')] <NEW_LINE> is_const = True <NEW_LINE> <DEDENT> new_name = name1+' *' <NEW_LINE> if new_name.startswith('::'): <NEW_LINE> <INDENT> new_name = new_name[2:] <NEW_LINE> <DEDENT> return new_name, is_const <NEW_LINE> <DEDENT> <DEDENT> def get_untransformed_name(self, name): <NEW_LINE> <INDENT> new_name, dummy_is_const = self._get_untransformed_type_traits(name) <NEW_LINE> return new_name <NEW_LINE> <DEDENT> def create_type_handler(self, type_handler, *args, **kwargs): <NEW_LINE> <INDENT> if issubclass(type_handler, Parameter): <NEW_LINE> <INDENT> kwargs['transfer_ownership'] = False <NEW_LINE> <DEDENT> elif issubclass(type_handler, ReturnValue): <NEW_LINE> <INDENT> kwargs['caller_owns_return'] = False <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise AssertionError <NEW_LINE> <DEDENT> orig_ctype, is_const = self._get_untransformed_type_traits(args[0]) <NEW_LINE> if is_const: <NEW_LINE> <INDENT> correct_ctype = 'ns3::Ptr< {0} const >'.format(orig_ctype[:-2]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> correct_ctype = 'ns3::Ptr< {0} >'.format(orig_ctype[:-2]) <NEW_LINE> <DEDENT> args = tuple([correct_ctype] + list(args[1:])) <NEW_LINE> handler = type_handler(*args, **kwargs) <NEW_LINE> handler.set_transformation(self, orig_ctype) <NEW_LINE> return handler <NEW_LINE> <DEDENT> def untransform(self, type_handler, declarations, code_block, expression): <NEW_LINE> <INDENT> return 'const_cast<%s> (ns3::PeekPointer (%s))' % (type_handler.untransformed_ctype, expression) <NEW_LINE> <DEDENT> def transform(self, type_handler, declarations, code_block, expression): <NEW_LINE> <INDENT> assert type_handler.untransformed_ctype[-1] == '*' <NEW_LINE> return 'ns3::Ptr< %s > (%s)' % (type_handler.untransformed_ctype[:-1], expression)
This class provides a "type transformation" that tends to support NS-3 smart pointers. Parameters such as "Ptr<Foo> foo" are transformed into something like Parameter.new("Foo*", "foo", transfer_ownership=False). Return values such as Ptr<Foo> are transformed into ReturnValue.new("Foo*", caller_owns_return=False). Since the underlying objects have reference counting, PyBindGen does the right thing.
62598faf7b180e01f3e49059
class ACLQueueError(TriggerError): <NEW_LINE> <INDENT> pass
Raised when we encounter errors communicating with the Queue.
62598faf32920d7e50bc6066
class BackupLog(Base): <NEW_LINE> <INDENT> __tablename__ = 'backup' <NEW_LINE> event = db.Column(db.String(256)) <NEW_LINE> level = db.Column(db.String(128)) <NEW_LINE> admin = db.Column(db.String(128)) <NEW_LINE> msg = db.Column(db.Text()) <NEW_LINE> ip = db.Column(db.String(128)) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.user_id
数据备份表
62598faf63b5f9789fe85179
class StudyDirection(enum.Enum): <NEW_LINE> <INDENT> NOT_SET = 0 <NEW_LINE> MINIMIZE = 1 <NEW_LINE> MAXIMIZE = 2
Direction of a :class:`~diego.study.Study`. Attributes: NOT_SET: Direction has not been set. MNIMIZE: :class:`~diego.study.Study` minimizes the objective function. MAXIMIZE: :class:`~diego.study.Study` maximizes the objective function.
62598faf71ff763f4b5e7783
class Movie(object): <NEW_LINE> <INDENT> SINGULAR = "movie" <NEW_LINE> PLURAL = "movies" <NEW_LINE> ID = "id" <NEW_LINE> ACTIVE = "is_active" <NEW_LINE> TITLE = "title" <NEW_LINE> CREATED_AT = "created_at" <NEW_LINE> RELEASED_AT = "released_at" <NEW_LINE> class Release(object): <NEW_LINE> <INDENT> YEAR = "year" <NEW_LINE> DATE = "date" <NEW_LINE> ROMAN = "roman"
Movie constants.
62598fafdd821e528d6d8f47
class RemovedInFlaskBB3(FlaskBBDeprecation): <NEW_LINE> <INDENT> version = (3, 0, 0)
warning for features removed in FlaskBB3
62598faf6e29344779b0066e
class IPMPToolListDescriptor(BaseDescriptor, object): <NEW_LINE> <INDENT> def __init__(self, offset=0, descr_tag=DescrTag_IPMP_ToolsListDescrTag): <NEW_LINE> <INDENT> super(IPMPToolListDescriptor, self).__init__(offset, descr_tag) <NEW_LINE> self.ipmpTool = [] <NEW_LINE> <DEDENT> def decode(self, file_strm): <NEW_LINE> <INDENT> file_strm = super(IPMPToolListDescriptor, self).decode(file_strm) <NEW_LINE> if file_strm is None: <NEW_LINE> <INDENT> return file_strm <NEW_LINE> <DEDENT> return file_strm <NEW_LINE> <DEDENT> def dump(self): <NEW_LINE> <INDENT> dump_info = super(IPMPToolListDescriptor, self).dump() <NEW_LINE> return dump_info <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> return super(IPMPToolListDescriptor, self).size() <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> log_str = super(IPMPToolListDescriptor, self).__str__() <NEW_LINE> return log_str
7.2.6.14.3.1.1 Syntax class IPMP_ToolListDescriptor extends BaseDescriptor : bit(8) tag=IPMP_ToolsListDescrTag { IPMP_Tool ipmpTool[0 .. 255]; } 7.2.6.14.3.1.2 Semantics IPMP_Tool – a class describing a logical IPMP Tool required to access the content.
62598fafaad79263cf42e7e6
class CrewStatus(enum.Enum): <NEW_LINE> <INDENT> healthy = 0 <NEW_LINE> damaged = 1
Indicators used by crew members to describe what happened during the execution of the most recent task that was assigned to them
62598faf442bda511e95c46a
class PhotosListApiView(ListAPIView): <NEW_LINE> <INDENT> serializer_class = PhotoListSerializer <NEW_LINE> permission_classes = [AllowAny] <NEW_LINE> pagination_class = MyPageNumberPagination <NEW_LINE> queryset = Photo.objects.all()
Endpoint to view photos list
62598faf0c0af96317c56395
class CheckNameAvailabilityOperations: <NEW_LINE> <INDENT> models = _models <NEW_LINE> def __init__(self, client, config, serializer, deserializer) -> None: <NEW_LINE> <INDENT> self._client = client <NEW_LINE> self._serialize = serializer <NEW_LINE> self._deserialize = deserializer <NEW_LINE> self._config = config <NEW_LINE> <DEDENT> @distributed_trace_async <NEW_LINE> async def execute( self, location_name: str, name_availability_request: "_models.NameAvailabilityRequest", **kwargs: Any ) -> "_models.NameAvailability": <NEW_LINE> <INDENT> cls = kwargs.pop('cls', None) <NEW_LINE> error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } <NEW_LINE> error_map.update(kwargs.pop('error_map', {})) <NEW_LINE> content_type = kwargs.pop('content_type', "application/json") <NEW_LINE> _json = self._serialize.body(name_availability_request, 'NameAvailabilityRequest') <NEW_LINE> request = build_execute_request( subscription_id=self._config.subscription_id, location_name=location_name, content_type=content_type, json=_json, template_url=self.execute.metadata['url'], ) <NEW_LINE> request = _convert_request(request) <NEW_LINE> request.url = self._client.format_url(request.url) <NEW_LINE> pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs) <NEW_LINE> response = pipeline_response.http_response <NEW_LINE> if response.status_code not in [200]: <NEW_LINE> <INDENT> map_error(status_code=response.status_code, response=response, error_map=error_map) <NEW_LINE> raise HttpResponseError(response=response, error_format=ARMErrorFormat) <NEW_LINE> <DEDENT> deserialized = self._deserialize('NameAvailability', pipeline_response) <NEW_LINE> if cls: <NEW_LINE> <INDENT> return cls(pipeline_response, deserialized, {}) <NEW_LINE> <DEDENT> return deserialized <NEW_LINE> <DEDENT> execute.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DBforMySQL/locations/{locationName}/checkNameAvailability'}
CheckNameAvailabilityOperations async operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.rdbms.mysql_flexibleservers.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer.
62598fafcc40096d6161a1e3
class BearerTokenAuth(requests.auth.AuthBase): <NEW_LINE> <INDENT> def __init__(self, access_token): <NEW_LINE> <INDENT> self.access_token = access_token <NEW_LINE> <DEDENT> def __call__(self, r): <NEW_LINE> <INDENT> r.headers['Authorization'] = "Bearer %s" % self.access_token <NEW_LINE> return r
Authentication using the protocol: Bearer <access_token>
62598faf26068e7796d4c968
class IChooseMyOwnDamnName(IHTTPRequest): <NEW_LINE> <INDENT> pass
We need to be able to adapt the request for PloneRelease objects to get to our own IUserPreferredURLNormalizer.
62598fafa17c0f6771d5c248
class PublicTagsAPITests(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.client = APIClient() <NEW_LINE> <DEDENT> def test_login_required(self): <NEW_LINE> <INDENT> res = self.client.get(TAGS_URL) <NEW_LINE> self.assertEqual(res.status_code, status.HTTP_401_UNAUTHORIZED)
Test the publicly available tags API
62598faf92d797404e388b6d
class UnrewardedCondition(stimuli.StimulusConditionWav): <NEW_LINE> <INDENT> def __init__(self, file_path="", recursive=False): <NEW_LINE> <INDENT> super(UnrewardedCondition, self).__init__(name="Unrewarded", response=True, is_rewarded=False, is_punished=False, file_path=file_path, recursive=recursive)
Unrewarded stimuli are not consequated and should be pecked through (i.e. Go stimuli)
62598faf4428ac0f6e658538
class UserTokenRequestInnerUserField(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { '_id': 'int' } <NEW_LINE> self.attribute_map = { '_id': '_id' } <NEW_LINE> self._id = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> properties = [] <NEW_LINE> for p in self.__dict__: <NEW_LINE> <INDENT> if p != 'swaggerTypes' and p != 'attributeMap': <NEW_LINE> <INDENT> properties.append('{prop}={val!r}'.format(prop=p, val=self.__dict__[p])) <NEW_LINE> <DEDENT> <DEDENT> return '<{name} {props}>'.format(name=__name__, props=' '.join(properties))
NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually.
62598fafbaa26c4b54d4f2c7
class Requester( object ): <NEW_LINE> <INDENT> def __init__( self, logger ): <NEW_LINE> <INDENT> self.logger = logger <NEW_LINE> self.valid_search_keys = [ 'ISBN', 'ISSN', 'LCCN', 'OCLC', 'PHRASE' ] <NEW_LINE> <DEDENT> def request_item( self, patron_barcode, search_key, search_value, pickup_location, api_url_root, api_key, partnership_id, university_code ): <NEW_LINE> <INDENT> assert search_key in self.valid_search_keys <NEW_LINE> authorization_id = self.get_authorization_id( patron_barcode, api_url_root, api_key, partnership_id, university_code ) <NEW_LINE> params = self.build_params( partnership_id, authorization_id, pickup_location, search_key, search_value ) <NEW_LINE> url = '%s/dws/item/add?aid=%s' % ( api_url_root, authorization_id ) <NEW_LINE> headers = { 'Content-type': 'application/json' } <NEW_LINE> r = requests.post( url, data=json.dumps(params), headers=headers ) <NEW_LINE> self.logger.debug( 'request r.url, `%s`' % r.url ) <NEW_LINE> self.logger.debug( 'request r.content, `%s`' % r.content.decode('utf-8') ) <NEW_LINE> result_dct = r.json() <NEW_LINE> return result_dct <NEW_LINE> <DEDENT> def get_authorization_id( self, patron_barcode, api_url_root, api_key, partnership_id, university_code ): <NEW_LINE> <INDENT> authr = Authenticator( self.logger ) <NEW_LINE> authorization_id = authr.authenticate( patron_barcode, api_url_root, api_key, partnership_id, university_code ) <NEW_LINE> return authorization_id <NEW_LINE> <DEDENT> def build_params( self, partnership_id, authorization_id, pickup_location, search_key, search_value ): <NEW_LINE> <INDENT> params = { 'PartnershipId': partnership_id, 'PickupLocation': pickup_location, 'Notes': '', 'ExactSearch': [ { 'Type': search_key, 'Value': search_value } ] } <NEW_LINE> self.logger.debug( 'params, `%s`' % pprint.pformat(params) ) <NEW_LINE> return params
Enables easy calls to the BorrowDirect request webservice. BorrowDirect 'RequestItem Web Service' docs: <http://borrowdirect.pbworks.com/w/page/90133541/RequestItem%20Web%20Service> (login required) Called by BorrowDirect.run_request_item()
62598faf97e22403b383af21