code stringlengths 4 4.48k | docstring stringlengths 1 6.45k | _id stringlengths 24 24 |
|---|---|---|
class SgUnit(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.SgId = None <NEW_LINE> self.SgName = None <NEW_LINE> self.SgRemark = None <NEW_LINE> self.CreateTime = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.SgId = params.get("SgId") <NEW_LINE> self.SgName = params.get("SgName") <NEW_LINE> self.SgRemark = params.get("SgRemark") <NEW_LINE> self.CreateTime = params.get("CreateTime") | 安全组基础信息
| 62598fd155399d3f05626993 |
class TraceSelection(bpy.types.Operator): <NEW_LINE> <INDENT> bl_idname = "object.trace_selection" <NEW_LINE> bl_label = "Setup Mirror Canvas" <NEW_LINE> bl_options = { 'REGISTER', 'UNDO' } <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> scene = context.scene <NEW_LINE> bpy.ops.gpencil.convert(type='CURVE', use_timing_data=True) <NEW_LINE> bpy.ops.gpencil.data_unlink() <NEW_LINE> bpy.ops.paint.texture_paint_toggle() <NEW_LINE> bpy.ops.object.select_by_type(type = 'CURVE') <NEW_LINE> bpy.context.scene.objects.active = bpy.data.objects["GP_Layer"] <NEW_LINE> bpy.ops.object.origin_set(type='ORIGIN_GEOMETRY') <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.curve.cyclic_toggle() <NEW_LINE> bpy.context.object.data.dimensions = '2D' <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.object.convert(target='MESH') <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.mesh.select_all(action='TOGGLE') <NEW_LINE> bpy.ops.mesh.dissolve_faces() <NEW_LINE> bpy.ops.uv.project_from_view(camera_bounds=True, correct_aspect=False, scale_to_bounds=False) <NEW_LINE> bpy.ops.object.editmode_toggle() <NEW_LINE> bpy.ops.paint.texture_paint_toggle() <NEW_LINE> bpy.context.scene.tool_settings.image_paint.use_occlude = False <NEW_LINE> bpy.context.scene.tool_settings.image_paint.use_backface_culling = False <NEW_LINE> bpy.context.scene.tool_settings.image_paint.use_normal_falloff = False <NEW_LINE> bpy.context.scene.tool_settings.image_paint.seam_bleed = 0 <NEW_LINE> return {'FINISHED'} | Convert gpencil to CURVE | 62598fd13d592f4c4edbb330 |
class DeviceNotFoundError(DeviceError): <NEW_LINE> <INDENT> __metaclass__ = abc.ABCMeta | An exception indicating that no :class:`Device` was found.
.. versionchanged:: 0.5
Rename from ``NoSuchDeviceError`` to its current name. | 62598fd1cc40096d6161a414 |
class DevelopementConfig(Config): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> DEBUG_TB_INTERCEPT_REDIRECTS = False | This class configures the development
environment properties | 62598fd160cbc95b063647b7 |
class Protocol_Dns(Resource, CheckExistenceMixin): <NEW_LINE> <INDENT> def __init__(self, protocol_dns_s): <NEW_LINE> <INDENT> super(Protocol_Dns, self).__init__(protocol_dns_s) <NEW_LINE> self._meta_data['required_json_kind'] = 'tm:security:dos:profile:protocol-dns:protocol-dnsstate' <NEW_LINE> self.tmos_ver = self._meta_data['bigip']._meta_data['tmos_version'] <NEW_LINE> <DEDENT> def load(self, **kwargs): <NEW_LINE> <INDENT> if LooseVersion(self.tmos_ver) == LooseVersion('11.6.0'): <NEW_LINE> <INDENT> return self._load_11_6(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(Protocol_Dns, self)._load(**kwargs) <NEW_LINE> <DEDENT> <DEDENT> def _load_11_6(self, **kwargs): <NEW_LINE> <INDENT> if self._check_existence_by_collection(self._meta_data['container'], kwargs['name']): <NEW_LINE> <INDENT> return super(Protocol_Dns, self)._load(**kwargs) <NEW_LINE> <DEDENT> msg = 'The application resource named, {}, does not exist on the ' 'device.'.format(kwargs['name']) <NEW_LINE> raise NonExtantApplication(msg) <NEW_LINE> <DEDENT> def exists(self, **kwargs): <NEW_LINE> <INDENT> if LooseVersion(self.tmos_ver) == LooseVersion('11.6.0'): <NEW_LINE> <INDENT> return self._exists_11_6(**kwargs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(Protocol_Dns, self)._load(**kwargs) <NEW_LINE> <DEDENT> <DEDENT> def _exists_11_6(self, **kwargs): <NEW_LINE> <INDENT> return self._check_existence_by_collection( self._meta_data['container'], kwargs['name']) | BIG-IP® Dos Profile Protocol Dns resource | 62598fd1adb09d7d5dc0a9f5 |
class CookieStorage(BaseStorage): <NEW_LINE> <INDENT> cookie_name = 'messages' <NEW_LINE> max_cookie_size = 2048 <NEW_LINE> not_finished = '__messagesnotfinished__' <NEW_LINE> key_salt = 'django.contrib.messages' <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.signer = signing.get_cookie_signer(salt=self.key_salt) <NEW_LINE> <DEDENT> def _get(self, *args, **kwargs): <NEW_LINE> <INDENT> data = self.request.COOKIES.get(self.cookie_name) <NEW_LINE> messages = self._decode(data) <NEW_LINE> all_retrieved = not (messages and messages[-1] == self.not_finished) <NEW_LINE> if messages and not all_retrieved: <NEW_LINE> <INDENT> messages.pop() <NEW_LINE> <DEDENT> return messages, all_retrieved <NEW_LINE> <DEDENT> def _update_cookie(self, encoded_data, response): <NEW_LINE> <INDENT> if encoded_data: <NEW_LINE> <INDENT> response.set_cookie( self.cookie_name, encoded_data, domain=settings.SESSION_COOKIE_DOMAIN, secure=settings.SESSION_COOKIE_SECURE or None, httponly=settings.SESSION_COOKIE_HTTPONLY or None, samesite=settings.SESSION_COOKIE_SAMESITE, ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response.delete_cookie( self.cookie_name, domain=settings.SESSION_COOKIE_DOMAIN, samesite=settings.SESSION_COOKIE_SAMESITE, ) <NEW_LINE> <DEDENT> <DEDENT> def _store(self, messages, response, remove_oldest=True, *args, **kwargs): <NEW_LINE> <INDENT> unstored_messages = [] <NEW_LINE> encoded_data = self._encode(messages) <NEW_LINE> if self.max_cookie_size: <NEW_LINE> <INDENT> cookie = SimpleCookie() <NEW_LINE> def stored_length(val): <NEW_LINE> <INDENT> return len(cookie.value_encode(val)[1]) <NEW_LINE> <DEDENT> while encoded_data and stored_length(encoded_data) > self.max_cookie_size: <NEW_LINE> <INDENT> if remove_oldest: <NEW_LINE> <INDENT> unstored_messages.append(messages.pop(0)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> unstored_messages.insert(0, messages.pop()) <NEW_LINE> <DEDENT> encoded_data = self._encode(messages + [self.not_finished], encode_empty=unstored_messages) <NEW_LINE> <DEDENT> <DEDENT> self._update_cookie(encoded_data, response) <NEW_LINE> return unstored_messages <NEW_LINE> <DEDENT> def _encode(self, messages, encode_empty=False): <NEW_LINE> <INDENT> if messages or encode_empty: <NEW_LINE> <INDENT> return self.signer.sign_object(messages, serializer=MessageSerializer, compress=True) <NEW_LINE> <DEDENT> <DEDENT> def _decode(self, data): <NEW_LINE> <INDENT> if not data: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.signer.unsign_object(data, serializer=MessageSerializer) <NEW_LINE> <DEDENT> except signing.BadSignature: <NEW_LINE> <INDENT> decoded = None <NEW_LINE> <DEDENT> except json.JSONDecodeError: <NEW_LINE> <INDENT> decoded = self.signer.unsign(data) <NEW_LINE> <DEDENT> if decoded: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return json.loads(decoded, cls=MessageDecoder) <NEW_LINE> <DEDENT> except json.JSONDecodeError: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> <DEDENT> self.used = True <NEW_LINE> return None | Store messages in a cookie. | 62598fd1dc8b845886d53a38 |
class ModelExport(object): <NEW_LINE> <INDENT> def __init__(self, model, input_names, output_names): <NEW_LINE> <INDENT> assert isinstance(input_names, list) <NEW_LINE> assert isinstance(output_names, list) <NEW_LINE> assert isinstance(model, ModelDescBase) <NEW_LINE> self.model = model <NEW_LINE> self.output_names = output_names <NEW_LINE> self.input_names = input_names <NEW_LINE> <DEDENT> def export(self, checkpoint, export_path, tags=[tf.saved_model.tag_constants.SERVING], signature_name='prediction_pipeline'): <NEW_LINE> <INDENT> logger.info('[export] build model for %s' % checkpoint) <NEW_LINE> with TowerContext('', is_training=False): <NEW_LINE> <INDENT> input = PlaceholderInput() <NEW_LINE> input.setup(self.model.get_inputs_desc()) <NEW_LINE> self.model.build_graph(*input.get_input_tensors()) <NEW_LINE> <DEDENT> self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True)) <NEW_LINE> init = sessinit.SaverRestore(checkpoint) <NEW_LINE> self.sess.run(tf.global_variables_initializer()) <NEW_LINE> init.init(self.sess) <NEW_LINE> self.inputs = [] <NEW_LINE> for n in self.input_names: <NEW_LINE> <INDENT> tensor = tf.get_default_graph().get_tensor_by_name('%s:0' % n) <NEW_LINE> logger.info('[export] add input-tensor "%s"' % tensor.name) <NEW_LINE> self.inputs.append(tensor) <NEW_LINE> <DEDENT> self.outputs = [] <NEW_LINE> for n in self.output_names: <NEW_LINE> <INDENT> tensor = tf.get_default_graph().get_tensor_by_name('%s:0' % n) <NEW_LINE> logger.info('[export] add output-tensor "%s"' % tensor.name) <NEW_LINE> self.outputs.append(tensor) <NEW_LINE> <DEDENT> logger.info('[export] exporting trained model to %s' % export_path) <NEW_LINE> builder = tf.saved_model.builder.SavedModelBuilder(export_path) <NEW_LINE> logger.info('[export] build signatures') <NEW_LINE> inputs_signature = dict() <NEW_LINE> for n, v in zip(self.input_names, self.inputs): <NEW_LINE> <INDENT> logger.info('[export] add input signature: %s' % v) <NEW_LINE> inputs_signature[n] = tf.saved_model.utils.build_tensor_info(v) <NEW_LINE> <DEDENT> outputs_signature = dict() <NEW_LINE> for n, v in zip(self.output_names, self.outputs): <NEW_LINE> <INDENT> logger.info('[export] add output signature: %s' % v) <NEW_LINE> outputs_signature[n] = tf.saved_model.utils.build_tensor_info(v) <NEW_LINE> <DEDENT> prediction_signature = tf.saved_model.signature_def_utils.build_signature_def( inputs=inputs_signature, outputs=outputs_signature, method_name=tf.saved_model.signature_constants.PREDICT_METHOD_NAME) <NEW_LINE> builder.add_meta_graph_and_variables( self.sess, tags, signature_def_map={signature_name: prediction_signature}) <NEW_LINE> builder.save() | Wrapper for tf.saved_model | 62598fd150812a4eaa620e21 |
class ILazyableBehaviorLayer(Interface): <NEW_LINE> <INDENT> pass | mark items so they can be lazyloaded | 62598fd17b180e01f3e4928d |
class MultiPeerConnectivityLink(threading.Thread, VirtualLink): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> raise NotImplementedError() | This link sends traffic over Bluetooth to Apple devices using the MultiPeerConnectivity framework introduced in iOS 7.
| 62598fd1d8ef3951e32c8099 |
class TAB_Visual(wx.PyPanel): <NEW_LINE> <INDENT> def __init__(self, parent): <NEW_LINE> <INDENT> wx.PyPanel.__init__(self, parent, id=-1) <NEW_LINE> self.cfg = RConfig() <NEW_LINE> self._layout() <NEW_LINE> self.SetInitialSize() <NEW_LINE> self.cancel.Bind(wx.EVT_BUTTON, self.OnCancel) <NEW_LINE> self.save.Bind(wx.EVT_BUTTON, self.OnSave) <NEW_LINE> <DEDENT> def _layout(self): <NEW_LINE> <INDENT> self.cb1 = wx.CheckBox(self, label='View linenumber \t\t\t (default: yes)') <NEW_LINE> self.cb2 = wx.CheckBox(self, label='Wrap text \t\t\t\t (default: no)') <NEW_LINE> self.cb3 = wx.CheckBox(self, label='Highlight matching braces \t (default: yes)') <NEW_LINE> self.cb4 = wx.CheckBox(self, label='View indentation guides \t\t (default: yes)') <NEW_LINE> self.cb5 = wx.CheckBox(self, label='Use syntax-highliting \t\t (-not implemented-)') <NEW_LINE> self.cb1.SetValue( self.cfg.getValue('VISUAL', 'linenumber', bool) ) <NEW_LINE> self.cb2.SetValue( self.cfg.getValue('VISUAL', 'wraptext', bool) ) <NEW_LINE> self.cb3.SetValue( self.cfg.getValue('VISUAL', 'highlightbraces', bool) ) <NEW_LINE> self.cb4.SetValue( self.cfg.getValue('VISUAL', 'indentguides', bool) ) <NEW_LINE> self.cb5.SetValue( self.cfg.getValue('VISUAL', 'syntaxhighlighing', bool) ) <NEW_LINE> hbox1 = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> hbox1.Add(self.cb1, flag=wx.ALL, border=3) <NEW_LINE> hbox1.Add(self.cb2, flag=wx.ALL, border=3) <NEW_LINE> hbox1.Add(self.cb3, flag=wx.ALL, border=3) <NEW_LINE> hbox1.Add(self.cb4, flag=wx.ALL, border=3) <NEW_LINE> hbox1.Add(self.cb5, flag=wx.ALL, border=3) <NEW_LINE> self.save = wx.Button(self, label='Save', size=(70, 30)) <NEW_LINE> self.cancel = wx.Button(self, label='Cancel', size=(70, 30)) <NEW_LINE> hbox2 = wx.BoxSizer(wx.HORIZONTAL) <NEW_LINE> hbox2.Add(self.save) <NEW_LINE> hbox2.Add(self.cancel, flag=wx.LEFT|wx.BOTTOM, border=5) <NEW_LINE> mainbox = wx.BoxSizer(wx.VERTICAL) <NEW_LINE> mainbox.Add(hbox1, flag=wx.LEFT|wx.TOP|wx.EXPAND|wx.ALL, border=6) <NEW_LINE> mainbox.Add((-1, 40)) <NEW_LINE> mainbox.Add(hbox2, flag=wx.ALIGN_RIGHT|wx.RIGHT|wx.ALL, border=6) <NEW_LINE> self.SetSizer(mainbox) <NEW_LINE> <DEDENT> def OnSave(self, evt): <NEW_LINE> <INDENT> self.cfg.setValue('VISUAL', 'linenumber', self.cb1.GetValue()) <NEW_LINE> self.cfg.setValue('VISUAL', 'wraptext', self.cb2.GetValue()) <NEW_LINE> self.cfg.setValue('VISUAL', 'highlightbraces', self.cb3.GetValue()) <NEW_LINE> self.cfg.setValue('VISUAL', 'indentguides', self.cb4.GetValue()) <NEW_LINE> self.cfg.setValue('VISUAL', 'syntaxhighlighing', self.cb5.GetValue()) <NEW_LINE> evt.Skip() <NEW_LINE> <DEDENT> def OnCancel(self, evt): <NEW_LINE> <INDENT> evt.Skip() | Creates the "Visual" option menu
@todo: Prettify | 62598fd14527f215b58ea34a |
class TRexRunFailedError(TRexException): <NEW_LINE> <INDENT> code = -14 <NEW_LINE> _default_message = '' | Indicates that TRex has failed due to some reason. This Exception is used when TRex process itself terminates due to unknown reason | 62598fd2ad47b63b2c5a7cd8 |
class StatusBase(Exception): <NEW_LINE> <INDENT> def __init__(self, status_code, error_code, log_message): <NEW_LINE> <INDENT> self.status_code = status_code <NEW_LINE> self.error_code = error_code <NEW_LINE> self.log_message = log_message <NEW_LINE> <DEDENT> def emit(self, request_handler): <NEW_LINE> <INDENT> request_handler.set_header('Content-Type', 'application/json; charset = UTF-8') <NEW_LINE> request_handler.set_header(self.status_code) <NEW_LINE> request_handler.write(escape.json_encode(self.message)) | Error base function
| 62598fd2377c676e912f6fb7 |
class ForgetPwdView(View): <NEW_LINE> <INDENT> def get(self, request): <NEW_LINE> <INDENT> return render(request, get_temp('forgetpwd.html', temp_dir_p)) <NEW_LINE> <DEDENT> def post(self, request): <NEW_LINE> <INDENT> forget_form = users_forms.ForgetForm(request.POST) <NEW_LINE> if forget_form.is_valid(): <NEW_LINE> <INDENT> email = request.POST.get("email", "") <NEW_LINE> email = email.strip() <NEW_LINE> if not modelhelp.authuser(email): <NEW_LINE> <INDENT> return render( request, get_temp( 'email_msg.html', temp_dir_p), { 'msg': '邮箱不存在'}) <NEW_LINE> <DEDENT> send_register_email(email, "forget") <NEW_LINE> return render( request, get_temp( 'email_msg.html', temp_dir_p), { 'msg': '邮件已经发送 ,请到邮箱查看'}) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return render( request, get_temp('forgetpwd.html', temp_dir_p), { "forget_form": forget_form}) | 忘记密码发送邮件 | 62598fd2ec188e330fdf8d13 |
class EuclideanDistance(SquaredEuclideanDistance): <NEW_LINE> <INDENT> def __init__(self, metric: String = "Euclidean Distance") -> Void: <NEW_LINE> <INDENT> super().__init__(metric) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def compute(self, P: NumpyArray, Q: NumpyArray) -> NumpyArray: <NEW_LINE> <INDENT> return np.sqrt(super().compute(P, Q)) | The L2 norm distance, aka the euclidean Distance. | 62598fd297e22403b383b387 |
class Result(object): <NEW_LINE> <INDENT> def __init__(self, result, status, message=None): <NEW_LINE> <INDENT> self.result = result <NEW_LINE> self.status = status <NEW_LINE> self.message = message | Result that store result and some info about it. | 62598fd2377c676e912f6fb8 |
class PspAdapter(models.Model): <NEW_LINE> <INDENT> psp = models.ForeignKey(PaymentServiceProvider, on_delete=models.PROTECT) <NEW_LINE> port = models.IntegerField(null=True) <NEW_LINE> local = models.BooleanField(default=True) <NEW_LINE> up = models.BooleanField(default=False) | PspAdapter contains the id, the PSP it is used for, the port on which it
is currently listening for incoming payment data and a state flag. | 62598fd2fbf16365ca79453c |
class L2(Penalty): <NEW_LINE> <INDENT> def __init__(self, weights=1.): <NEW_LINE> <INDENT> super().__init__(weights) <NEW_LINE> <DEDENT> def func(self, params): <NEW_LINE> <INDENT> return np.sum(self.weights * self.alpha * params**2) <NEW_LINE> <DEDENT> def deriv(self, params): <NEW_LINE> <INDENT> return 2 * self.weights * self.alpha * params <NEW_LINE> <DEDENT> def deriv2(self, params): <NEW_LINE> <INDENT> return 2 * self.weights * self.alpha * np.ones(len(params)) | The L2 (ridge) penalty. | 62598fd2be7bc26dc9252098 |
class AssemblageSampledName(str): <NEW_LINE> <INDENT> def __init__(self, o=""): <NEW_LINE> <INDENT> if len(o) > 50: <NEW_LINE> <INDENT> raise ValueError( "AssemblageSampledName must be between 0 and 50 " "characters." ) | An association of interacting populations of organisms in a given waterbody. | 62598fd28a349b6b436866c0 |
class NotificationException(DiplomacyException): <NEW_LINE> <INDENT> pass | Unknown notification. | 62598fd24a966d76dd5ef358 |
class TestLandingPageView(TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> file_mock = mock.MagicMock(spec=File, name='FileMock') <NEW_LINE> file_mock.name = 'test1.png' <NEW_LINE> self.user = ProfileUser.objects.create( username="username", password="password", email="somemail@o2.pl", avatar=file_mock ) <NEW_LINE> <DEDENT> def test_not_logged_user(self): <NEW_LINE> <INDENT> response = self.client.get(LANDING_URL, follow=True) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, "exercise/index.html") <NEW_LINE> <DEDENT> def test_logged_user(self): <NEW_LINE> <INDENT> self.client.force_login(self.user) <NEW_LINE> response = self.client.get(LANDING_URL, follow=True) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertRedirects(response, HOME_URL) <NEW_LINE> self.assertTemplateUsed(response, "exercise/home.html") | Tests for LandingPageView | 62598fd250812a4eaa620e24 |
class OutputFormat(object): <NEW_LINE> <INDENT> JSON = 'json' <NEW_LINE> CSV = 'csv' | An enum used to list the valid output formats for API calls. | 62598fd23346ee7daa337887 |
class BodyguardAnt(Ant): <NEW_LINE> <INDENT> name = 'Bodyguard' <NEW_LINE> implemented = False <NEW_LINE> container = True <NEW_LINE> food_cost = 4 <NEW_LINE> damage = 0 <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> Ant.__init__(self, 2) <NEW_LINE> self.ant = None <NEW_LINE> <DEDENT> def contain_ant(self, ant): <NEW_LINE> <INDENT> self.ant = ant <NEW_LINE> <DEDENT> def action(self, colony): <NEW_LINE> <INDENT> if self.ant: <NEW_LINE> <INDENT> self.ant.action(colony) | BodyguardAnt provides protection to other Ants. | 62598fd2956e5f7376df58be |
class DadoPoker(Dado): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super().__init__("Negro", "Rojo", "J", "Q", "K", "As") | Simulará un dado de póquer | 62598fd20fa83653e46f5369 |
class IsindexControl(ScalarControl): <NEW_LINE> <INDENT> def __init__(self, type, name, attrs, index=None): <NEW_LINE> <INDENT> ScalarControl.__init__(self, type, name, attrs, index) <NEW_LINE> if self._value is None: <NEW_LINE> <INDENT> self._value = "" <NEW_LINE> <DEDENT> <DEDENT> def is_of_kind(self, kind): return kind in ["text", "clickable"] <NEW_LINE> def _totally_ordered_pairs(self): <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> def _click(self, form, coord, return_type, request_class=_urllib.request.Request): <NEW_LINE> <INDENT> parts = self._urlparse(form.action) <NEW_LINE> rest, (query, frag) = parts[:-2], parts[-2:] <NEW_LINE> parts = rest + (_urllib.parse.quote_plus(self.value), None) <NEW_LINE> url = self._urlunparse(parts) <NEW_LINE> req_data = url, None, [] <NEW_LINE> if return_type == "pairs": <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> elif return_type == "request_data": <NEW_LINE> <INDENT> return req_data <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return request_class(url) <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> value = self.value <NEW_LINE> if value is None: value = "<None>" <NEW_LINE> infos = [] <NEW_LINE> if self.disabled: infos.append("disabled") <NEW_LINE> if self.readonly: infos.append("readonly") <NEW_LINE> info = ", ".join(infos) <NEW_LINE> if info: info = " (%s)" % info <NEW_LINE> return "<%s(%s)%s>" % (self.__class__.__name__, value, info) | ISINDEX control.
ISINDEX is the odd-one-out of HTML form controls. In fact, it isn't really
part of regular HTML forms at all, and predates it. You're only allowed
one ISINDEX per HTML document. ISINDEX and regular form submission are
mutually exclusive -- either submit a form, or the ISINDEX.
Having said this, since ISINDEX controls may appear in forms (which is
probably bad HTML), ParseFile / ParseResponse will include them in the
HTMLForm instances it returns. You can set the ISINDEX's value, as with
any other control (but note that ISINDEX controls have no name, so you'll
need to use the type argument of set_value!). When you submit the form,
the ISINDEX will not be successful (ie., no data will get returned to the
server as a result of its presence), unless you click on the ISINDEX
control, in which case the ISINDEX gets submitted instead of the form:
form.set_value("my isindex value", type="isindex")
urllib2.urlopen(form.click(type="isindex"))
ISINDEX elements outside of FORMs are ignored. If you want to submit one
by hand, do it like so:
url = _urllib.parse.urljoin(page_uri, "?"+_urllib.parse.quote_plus("my isindex value"))
result = urllib2.urlopen(url) | 62598fd29f28863672818abe |
class TicketCreateView(CreateView): <NEW_LINE> <INDENT> model = Ticket <NEW_LINE> fields = ["title", "description"] <NEW_LINE> @method_decorator(login_required) <NEW_LINE> def dispatch(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(TicketCreateView, self).dispatch(*args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> form.instance.created_by = self.request.user <NEW_LINE> return super(TicketCreateView, self).form_valid(form) | Create ticket | 62598fd2377c676e912f6fba |
class FundingRateRecord(object): <NEW_LINE> <INDENT> openapi_types = {'t': 'int', 'r': 'str'} <NEW_LINE> attribute_map = {'t': 't', 'r': 'r'} <NEW_LINE> def __init__(self, t=None, r=None, local_vars_configuration=None): <NEW_LINE> <INDENT> if local_vars_configuration is None: <NEW_LINE> <INDENT> local_vars_configuration = Configuration() <NEW_LINE> <DEDENT> self.local_vars_configuration = local_vars_configuration <NEW_LINE> self._t = None <NEW_LINE> self._r = None <NEW_LINE> self.discriminator = None <NEW_LINE> if t is not None: <NEW_LINE> <INDENT> self.t = t <NEW_LINE> <DEDENT> if r is not None: <NEW_LINE> <INDENT> self.r = r <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def t(self): <NEW_LINE> <INDENT> return self._t <NEW_LINE> <DEDENT> @t.setter <NEW_LINE> def t(self, t): <NEW_LINE> <INDENT> self._t = t <NEW_LINE> <DEDENT> @property <NEW_LINE> def r(self): <NEW_LINE> <INDENT> return self._r <NEW_LINE> <DEDENT> @r.setter <NEW_LINE> def r(self, r): <NEW_LINE> <INDENT> self._r = r <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in six.iteritems(self.openapi_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map(lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value)) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict( map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items(), ) ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pprint.pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, FundingRateRecord): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> return self.to_dict() == other.to_dict() <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, FundingRateRecord): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return self.to_dict() != other.to_dict() | NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually. | 62598fd2fbf16365ca794540 |
class NetworkAlreadyExistsException(MCVirtException): <NEW_LINE> <INDENT> pass | Network already exists with the same name. | 62598fd2956e5f7376df58bf |
class AceQLExecUpdateApi(object): <NEW_LINE> <INDENT> __debug = False <NEW_LINE> def __init__(self, aceQLHttpApi: 'AceQLHttpApi'): <NEW_LINE> <INDENT> if aceQLHttpApi is None: <NEW_LINE> <INDENT> raise TypeError("aceQLHttpApi is null!") <NEW_LINE> <DEDENT> self.__aceQLHttpApi = aceQLHttpApi <NEW_LINE> self.__url = aceQLHttpApi.get_url() <NEW_LINE> <DEDENT> def execute_update(self, sql: str, is_prepared_statement: bool, statement_parameters: dict): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> action = "execute_update" <NEW_LINE> AceQLExecutionUtil.check_values(is_prepared_statement, sql) <NEW_LINE> dict_params: dict = {"sql": sql} <NEW_LINE> AceQLExecutionUtil.set_is_prepared_statement(dict_params, is_prepared_statement) <NEW_LINE> url_withaction = self.__url + action <NEW_LINE> AceQLDebug.debug("url_withaction: " + url_withaction) <NEW_LINE> AceQLDebug.debug("dict_params 1: " + str(dict_params)) <NEW_LINE> if statement_parameters is not None: <NEW_LINE> <INDENT> if not isinstance(statement_parameters, dict): <NEW_LINE> <INDENT> raise TypeError("statement_parameters is not a dictionary!") <NEW_LINE> <DEDENT> dict_params.update(statement_parameters) <NEW_LINE> <DEDENT> AceQLDebug.debug("dictParams 2: " + str(dict_params)) <NEW_LINE> if self.__aceQLHttpApi.get_timeout() is None: <NEW_LINE> <INDENT> AceQLDebug.debug("UPDATE HERE 1") <NEW_LINE> response: Request = requests.post(url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth()) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> AceQLDebug.debug("UPDATE HERE 2") <NEW_LINE> response: Request = requests.post(url_withaction, headers=self.__aceQLHttpApi.get_headers(), data=dict_params, proxies=self.__aceQLHttpApi.get_proxies(), auth=self.__aceQLHttpApi.get_auth(), timeout=self.__aceQLHttpApi.get_timeout()) <NEW_LINE> <DEDENT> self.__aceQLHttpApi.set_http_status_code(response.status_code) <NEW_LINE> result = response.text <NEW_LINE> AceQLDebug.debug("result: " + result) <NEW_LINE> result_analyzer = ResultAnalyzer(result, self.__aceQLHttpApi.get_http_status_code()) <NEW_LINE> if not result_analyzer.is_status_ok(): <NEW_LINE> <INDENT> raise Error(result_analyzer.get_error_message(), result_analyzer.get_error_type(), None, None, self.__aceQLHttpApi.get_http_status_code()) <NEW_LINE> <DEDENT> row_count = result_analyzer.get_int_value("row_count") <NEW_LINE> return row_count <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> if isinstance(e, Error): <NEW_LINE> <INDENT> raise <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Error(str(e), 0, e, None, self.__aceQLHttpApi.get_http_status_code()) | AceQL HTTP wrapper for /execute_update API. Takes care of all
HTTP calls and operations. | 62598fd2ad47b63b2c5a7ce0 |
class GarbageCollectionCasesPageSet(page_set_module.PageSet): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(GarbageCollectionCasesPageSet, self).__init__( archive_data_file='data/garbage_collection_cases.json', bucket=page_set_module.PARTNER_BUCKET) <NEW_LINE> self.AddPage(SpinningBallsPage(self)) | Description: GC test cases | 62598fd29f28863672818abf |
class update_until_result(object): <NEW_LINE> <INDENT> def __init__(self, error=None,): <NEW_LINE> <INDENT> self.error = error <NEW_LINE> <DEDENT> def read(self, iprot): <NEW_LINE> <INDENT> if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None: <NEW_LINE> <INDENT> iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec]) <NEW_LINE> return <NEW_LINE> <DEDENT> iprot.readStructBegin() <NEW_LINE> while True: <NEW_LINE> <INDENT> (fname, ftype, fid) = iprot.readFieldBegin() <NEW_LINE> if ftype == TType.STOP: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if fid == 1: <NEW_LINE> <INDENT> if ftype == TType.STRUCT: <NEW_LINE> <INDENT> self.error = ModelException() <NEW_LINE> self.error.read(iprot) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> iprot.skip(ftype) <NEW_LINE> <DEDENT> iprot.readFieldEnd() <NEW_LINE> <DEDENT> iprot.readStructEnd() <NEW_LINE> <DEDENT> def write(self, oprot): <NEW_LINE> <INDENT> if oprot._fast_encode is not None and self.thrift_spec is not None: <NEW_LINE> <INDENT> oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec])) <NEW_LINE> return <NEW_LINE> <DEDENT> oprot.writeStructBegin('update_until_result') <NEW_LINE> if self.error is not None: <NEW_LINE> <INDENT> oprot.writeFieldBegin('error', TType.STRUCT, 1) <NEW_LINE> self.error.write(oprot) <NEW_LINE> oprot.writeFieldEnd() <NEW_LINE> <DEDENT> oprot.writeFieldStop() <NEW_LINE> oprot.writeStructEnd() <NEW_LINE> <DEDENT> def validate(self): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> L = ['%s=%r' % (key, value) for key, value in self.__dict__.items()] <NEW_LINE> return '%s(%s)' % (self.__class__.__name__, ', '.join(L)) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return isinstance(other, self.__class__) and self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not (self == other) | Attributes:
- error | 62598fd2adb09d7d5dc0a9ff |
@admin.register(BankAccount) <NEW_LINE> class BankAccountAdmin( child_redirect_mixin("bankaccount"), unit_admin_mixin_generator("administrative_unit"), MoneyAccountChildAdmin, ): <NEW_LINE> <INDENT> base_model = BankAccount <NEW_LINE> show_in_index = True <NEW_LINE> list_display = ( "__str__", "id", "bank_account", "bank_account_number", "administrative_unit", ) | bank account polymorphic admin model child class | 62598fd2a219f33f346c6c8b |
class RF(Classifier): <NEW_LINE> <INDENT> def __init__(self, n, d): <NEW_LINE> <INDENT> self.estimator_ = RandomForestClassifier() <NEW_LINE> self.param_grid_ = {"n_estimators": np.arange(1,52,10), "max_depth": np.arange(1,min(12,n),2), "max_features": np.arange(1,min(12,d),2)} | A Random Forest classifier. | 62598fd24527f215b58ea354 |
class TagList(models.Model): <NEW_LINE> <INDENT> pass | This dummy model is used to allow exporting a /module/taglist as a ViewSet,
without having to write a metric ton of custom code. No instances of it are
ever created. | 62598fd2283ffb24f3cf3d06 |
class MidiController: <NEW_LINE> <INDENT> def __init__(self, channel: int): <NEW_LINE> <INDENT> self.channel = channel <NEW_LINE> self.port = None <NEW_LINE> self.note_tasks = {} <NEW_LINE> <DEDENT> def _set_port(self, port: mido.ports.BaseOutput): <NEW_LINE> <INDENT> self.port = port <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> for note, task in self.note_tasks.items(): <NEW_LINE> <INDENT> task.cancel() <NEW_LINE> self.note_off(note) <NEW_LINE> <DEDENT> <DEDENT> def note_off(self, note: int): <NEW_LINE> <INDENT> self.port.send(mido.Message("note_off", note=note, channel=self.channel)) <NEW_LINE> <DEDENT> def note_on(self, note: int, velocity: int): <NEW_LINE> <INDENT> self.port.send( mido.Message("note_on", note=note, velocity=velocity, channel=self.channel) ) <NEW_LINE> <DEDENT> def set_cc(self, control: int, value: int): <NEW_LINE> <INDENT> self.port.send( mido.Message( "control_change", control=control, value=value, channel=self.channel ) ) <NEW_LINE> <DEDENT> async def play_note(self, note: int, velocity: int, duration: float): <NEW_LINE> <INDENT> if note in self.note_tasks: <NEW_LINE> <INDENT> self.note_tasks[note].cancel() <NEW_LINE> self.note_off(note) <NEW_LINE> <DEDENT> self.note_on(note, velocity) <NEW_LINE> await asyncio.sleep(duration) <NEW_LINE> self.note_off(note) | Trigger note and other CC parameter events on a particular MIDI channel.
Use as a standalone or connect to a `Sequencer` instance via the `Sequencer.register` method.
Args:
channel: the MIDI channel to send messages on. | 62598fd2cc40096d6161a41a |
class Action(object): <NEW_LINE> <INDENT> def __init__(self, project, name, index, *arg, **kwarg): <NEW_LINE> <INDENT> self.project = Project.objects.get(name=project) <NEW_LINE> self.name = name <NEW_LINE> self.index = index <NEW_LINE> self.outputs = {} <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "Action: %s" % self.name <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return self.__unicode__() <NEW_LINE> <DEDENT> def send_event_hooks(self, status): <NEW_LINE> <INDENT> event = { 'type': 'action', 'name': self.name, 'status': status } <NEW_LINE> gearman_client = gearman.GearmanClient([settings.GEARMAN_SERVER]) <NEW_LINE> gearman_client.submit_job(settings.GEARMAN_JOB_LABEL, json.dumps({'event': event}), background=True) <NEW_LINE> <DEDENT> @property <NEW_LINE> def statsd_name(self): <NEW_LINE> <INDENT> return statsd_label_converter(self.name) <NEW_LINE> <DEDENT> def run(self, job): <NEW_LINE> <INDENT> self.job = job <NEW_LINE> log.info("Job %s - doing action: '%s - %s'" % (unicode(job.id), self.project.name, self.name)) <NEW_LINE> out_f, err_f = job.get_live_job_filenames() <NEW_LINE> with closing(open(out_f, 'w')) as stdoutlog, closing(open(err_f, 'w')) as stderrlog: <NEW_LINE> <INDENT> stdoutlog.write("Project=%s Action=%s\n" % (self.project.name, self.name)) <NEW_LINE> stdoutlog.flush() <NEW_LINE> try: <NEW_LINE> <INDENT> action_result = job.record_action_start(self) <NEW_LINE> response = self.execute(stdoutlog, stderrlog, action_result) <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> tb_str = traceback.format_exc(sys.exc_info()[2]) <NEW_LINE> log.error("Got an exception %s: %s" % (str(e), tb_str)) <NEW_LINE> job.record_action_response(self, { 'success': False, 'message': str(e), 'detail': tb_str }) <NEW_LINE> raise ActionFailure(e) <NEW_LINE> <DEDENT> <DEDENT> log.info("Job %s - completed action: '%s - %s' with code %s" % ( unicode(job.id), self.project.name, self.name, str(response.get('code', 'missing')) )) <NEW_LINE> job.record_action_response(self, response) <NEW_LINE> return response <NEW_LINE> <DEDENT> def execute(self, stdoutlog, stderrlog, action_result): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> def failure_message(self, action_result): <NEW_LINE> <INDENT> return 'Action "%s" failed.' % (self.name,) <NEW_LINE> <DEDENT> def as_form_data(self): <NEW_LINE> <INDENT> data = { 'id': self.id, 'name': self.name, 'project': self.project.id, 'index': self.index, } <NEW_LINE> return data <NEW_LINE> <DEDENT> def values_without_keys(self): <NEW_LINE> <INDENT> data = dict(self.values) <NEW_LINE> for k in ['public_key', 'private_key', 'response']: <NEW_LINE> <INDENT> if k in data: <NEW_LINE> <INDENT> del data[k] <NEW_LINE> <DEDENT> <DEDENT> return data | Represents one of a series of actions that may be performed after a commit.
| 62598fd2a05bb46b3848acf0 |
class Quota(dbmodels.DatabaseModelBase): <NEW_LINE> <INDENT> _data_fields = ['created', 'updated', 'tenant_id', 'resource', 'hard_limit'] <NEW_LINE> _table_name = 'quotas' <NEW_LINE> def __init__(self, tenant_id, resource, hard_limit, id=utils.generate_uuid(), created=timeutils.utcnow(), update=timeutils.utcnow()): <NEW_LINE> <INDENT> self.tenant_id = tenant_id <NEW_LINE> self.resource = resource <NEW_LINE> self.hard_limit = hard_limit <NEW_LINE> self.id = id <NEW_LINE> self.created = created <NEW_LINE> self.update = update | Defines the base model class for a quota. | 62598fd2099cdd3c63675622 |
class ReturnEnvelope: <NEW_LINE> <INDENT> __slots__ = ('reply_code', 'reply_text', 'exchange_name', 'routing_key') <NEW_LINE> def __init__(self, reply_code, reply_text, exchange_name, routing_key): <NEW_LINE> <INDENT> self.reply_code = reply_code <NEW_LINE> self.reply_text = reply_text <NEW_LINE> self.exchange_name = exchange_name <NEW_LINE> self.routing_key = routing_key | Class for basic return message fields | 62598fd27b180e01f3e49293 |
class LogitSplittedSampler(Sampler): <NEW_LINE> <INDENT> def __init__(self, keys, executed_iterations, weights=None): <NEW_LINE> <INDENT> self.keys = keys <NEW_LINE> if weights is None: <NEW_LINE> <INDENT> self.weights = torch.tensor([1.0/float(len(self.keys))]*len(self.keys), dtype=torch.double) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.weights = torch.from_numpy(weights) <NEW_LINE> <DEDENT> self.weights = Variable(self.weights, requires_grad=True) <NEW_LINE> assert len(self.weights) == len(self.keys), "Number of weights and keys should be the same" <NEW_LINE> self.iterations_to_execute = g_conf.NUMBER_ITERATIONS * g_conf.BATCH_SIZE - executed_iterations + g_conf.BATCH_SIZE <NEW_LINE> self.replacement = True <NEW_LINE> self.optim = optim.Adam([self.weights,], lr=0.01) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> weights = F.softmax(self.weights) <NEW_LINE> idx = torch.multinomial(weights, self.iterations_to_execute, True) <NEW_LINE> idx = idx.tolist() <NEW_LINE> return iter([random.choice(self.keys[i]) for i in idx]) <NEW_LINE> <DEDENT> def update_weights(self, advantage, perturb=False): <NEW_LINE> <INDENT> self.optim.zero_grad() <NEW_LINE> obj = torch.sum(-self.weights * advantage) <NEW_LINE> obj.backward() <NEW_LINE> self.optim.step() <NEW_LINE> if perturb: <NEW_LINE> <INDENT> N = len(self.weights) <NEW_LINE> self.weights = self.weights + torch.normal(torch.zeros(N), perturb * torch.ones(N)).double() <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self.iterations_to_execute | Sample on a list of keys that was previously splitted
weights for sampling are logits and have to be softmaxed | 62598fd2377c676e912f6fbc |
class ToTensor(object): <NEW_LINE> <INDENT> def __call__(self, sample): <NEW_LINE> <INDENT> new_sample = {} <NEW_LINE> for key in sample.keys(): <NEW_LINE> <INDENT> new_sample[key] = torch.from_numpy(sample[key]) <NEW_LINE> <DEDENT> return new_sample | Convert ndarrays in sample to Tensors. | 62598fd23617ad0b5ee065cf |
class ShowMribVrfRouteSummarySchema(MetaParser): <NEW_LINE> <INDENT> schema = { 'vrf': { Any(): { 'address_family': { Any(): { 'no_group_ranges': int, 'no_g_routes': int, 'no_s_g_routes': int, 'no_route_x_interfaces': int, 'total_no_interfaces': int, } } }, }, } | Schema for show mrib vrf <vrf> <address-family> route summary | 62598fd29f28863672818ac1 |
class Datasheet(object): <NEW_LINE> <INDENT> def __init__(self, instance=None): <NEW_LINE> <INDENT> if instance is None: <NEW_LINE> <INDENT> data = {} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> data = instance.__datasheets__.get(self.__id__) <NEW_LINE> if data is None: <NEW_LINE> <INDENT> data = OOBTree() <NEW_LINE> instance.__datasheets__[self.__id__] = data <NEW_LINE> <DEDENT> <DEDENT> self.__data__ = data <NEW_LINE> self.__instance__ = instance <NEW_LINE> <DEDENT> def __load__(self, datasheet): <NEW_LINE> <INDENT> if self.__schema__ is not datasheet.__schema__: <NEW_LINE> <INDENT> raise DatasheetException( "Can't load data from incompatible datasheet") <NEW_LINE> <DEDENT> for fieldId in getFields(self.__schema__): <NEW_LINE> <INDENT> field = self.__schema__[fieldId] <NEW_LINE> value = getattr(datasheet, fieldId, field.default) <NEW_LINE> if value is not field.default: <NEW_LINE> <INDENT> setattr(self, fieldId, value) | >>> from zope import interface, schema
>>> class IMyDatasheet1(interface.Interface):
... title = schema.TextLine(title = u'Title', default=u'Unset')
...
>>> class IMyDatasheet2(interface.Interface):
... title = schema.TextLine(title = u'Title')
>>> DatasheetClass1 = DatasheetType(
... 'mydatasheet1', IMyDatasheet1, title='MyDatasheet1')
>>> DatasheetClass2 = DatasheetType(
... 'mydatasheet2', IMyDatasheet2, title='MyDatasheet2')
>>> ds = DatasheetClass1()
>>> ds.title = u'test'
>>> ds1 = DatasheetClass1()
>>> ds1.title
u'Unset'
>>> ds1.__load__(ds)
>>> ds1.title
u'test'
>>> ds2 = DatasheetClass2()
>>> ds2.__load__(ds)
Traceback (most recent call last):
...
DatasheetException: Can't load data from incompatible datasheet | 62598fd2ff9c53063f51aad4 |
class UrlMapping(models.Model): <NEW_LINE> <INDENT> url = models.URLField(unique=True) <NEW_LINE> sha_hash = models.CharField(max_length=40, unique=True) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return 'http://sha1.us/%s' % self.sha_hash <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['-created'] | maps from a SHA hash to a URL | 62598fd27b180e01f3e49294 |
class IscsiCableDiagChannelResults(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.swagger_types = { 'channel_status': 'int', 'cable_length': 'int' } <NEW_LINE> self.attribute_map = { 'channel_status': 'channelStatus', 'cable_length': 'cableLength' } <NEW_LINE> self._channel_status = None <NEW_LINE> self._cable_length = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def channel_status(self): <NEW_LINE> <INDENT> return self._channel_status <NEW_LINE> <DEDENT> @channel_status.setter <NEW_LINE> def channel_status(self, channel_status): <NEW_LINE> <INDENT> self._channel_status = channel_status <NEW_LINE> <DEDENT> @property <NEW_LINE> def cable_length(self): <NEW_LINE> <INDENT> return self._cable_length <NEW_LINE> <DEDENT> @cable_length.setter <NEW_LINE> def cable_length(self, cable_length): <NEW_LINE> <INDENT> self._cable_length = cable_length <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if self is None or other is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 62598fd2656771135c489afa |
class Answer(models.Model): <NEW_LINE> <INDENT> question = models.ForeignKey(Question, on_delete=models.CASCADE,related_name='answers') <NEW_LINE> description = models.TextField(blank=True, null=False, default=None) <NEW_LINE> is_right = models.BooleanField(default=False) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> verbose_name = "Answer" <NEW_LINE> verbose_name_plural = "Answers" | Answers for Questions | 62598fd255399d3f056269a3 |
class RegistroK230(Registro): <NEW_LINE> <INDENT> campos = [ CampoFixo(1, 'REG', 'K230'), CampoData(2, 'DT_INI_OP'), CampoData(3, 'DT_FIN_OP'), Campo(4, 'COD_DOC_OP'), Campo(5, 'COD_ITEM'), CampoNumerico(6, 'QTD_ENC'), ] <NEW_LINE> nivel = 3 | ITENS PRODUZIDOS | 62598fd27cff6e4e811b5eb2 |
class CommandsTest(TestCase): <NEW_LINE> <INDENT> def test_build(self): <NEW_LINE> <INDENT> s = ShellProtocol() <NEW_LINE> build_response = {'uid': 'something'} <NEW_LINE> s.hub = FakeHub(build=build_response) <NEW_LINE> sendLine_called = [] <NEW_LINE> s.sendLine = sendLine_called.append <NEW_LINE> s.cmd_build('project', 'version') <NEW_LINE> self.assertNotEqual(sendLine_called, [], "Should have sent something back") <NEW_LINE> self.assertTrue('something' in sendLine_called[0], "Should include the build request uid in the response") <NEW_LINE> self.assertEqual(s.hub.called, [ ('build', dict(project='project', version='version', test_path=None)), ]) <NEW_LINE> <DEDENT> def test_start(self): <NEW_LINE> <INDENT> shell = ShellProtocol() <NEW_LINE> factory = object() <NEW_LINE> server = defer.Deferred() <NEW_LINE> shell.hub = FakeHub(getPBServerFactory=factory, startServer=server) <NEW_LINE> sendLine = [] <NEW_LINE> shell.sendLine = sendLine.append <NEW_LINE> shell.cmd_start('tcp:8080') <NEW_LINE> while sendLine: <NEW_LINE> <INDENT> sendLine.pop() <NEW_LINE> <DEDENT> self.assertIn(('startServer', factory, 'tcp:8080'), shell.hub.called) <NEW_LINE> server.callback('foo') <NEW_LINE> self.assertNotEqual(sendLine, [], "Once server starts, connectee " "should be notified.") <NEW_LINE> <DEDENT> def test_stop(self): <NEW_LINE> <INDENT> shell = ShellProtocol() <NEW_LINE> stop_d = defer.Deferred() <NEW_LINE> shell.hub = FakeHub(stopServer=stop_d) <NEW_LINE> sendLine = [] <NEW_LINE> shell.sendLine = sendLine.append <NEW_LINE> shell.cmd_stop('tcp:8080') <NEW_LINE> while sendLine: <NEW_LINE> <INDENT> sendLine.pop() <NEW_LINE> <DEDENT> self.assertIn(('stopServer', 'tcp:8080'), shell.hub.called) <NEW_LINE> stop_d.callback('foo') <NEW_LINE> self.assertNotEqual(sendLine, [], "Once server stops, connectee " "should be notified.") <NEW_LINE> <DEDENT> def test_connect(self): <NEW_LINE> <INDENT> shell = ShellProtocol() <NEW_LINE> client = defer.Deferred() <NEW_LINE> shell.hub = FakeHub(connect=client) <NEW_LINE> sendLine = [] <NEW_LINE> shell.sendLine = sendLine.append <NEW_LINE> shell.cmd_connect('my endpoint') <NEW_LINE> while sendLine: <NEW_LINE> <INDENT> sendLine.pop() <NEW_LINE> <DEDENT> self.assertIn(('connect', 'my endpoint'), shell.hub.called) <NEW_LINE> client.callback('foo') <NEW_LINE> self.assertNotEqual(sendLine, [], "Once client connects, connectee " "should be notified.") <NEW_LINE> <DEDENT> def test_disconnect(self): <NEW_LINE> <INDENT> shell = ShellProtocol() <NEW_LINE> stop_d = defer.Deferred() <NEW_LINE> shell.hub = FakeHub(disconnect=stop_d) <NEW_LINE> sendLine = [] <NEW_LINE> shell.sendLine = sendLine.append <NEW_LINE> shell.cmd_disconnect('tcp:8080') <NEW_LINE> self.assertIn(('disconnect', 'tcp:8080'), shell.hub.called) | I test specific commands | 62598fd2ab23a570cc2d4fb2 |
class RunningMeanStd(): <NEW_LINE> <INDENT> def __init__(self, epsilon=1e-4, shape=(), norm_dim=(0,), a_min=-5., a_max=5.): <NEW_LINE> <INDENT> assert epsilon > 0. <NEW_LINE> self.shape = shape <NEW_LINE> self.mean = torch.zeros(shape, dtype=torch.float) <NEW_LINE> self.var = torch.ones(shape, dtype=torch.float) <NEW_LINE> self.epsilon = epsilon <NEW_LINE> self.count = epsilon <NEW_LINE> self.a_min = a_min <NEW_LINE> self.a_max = a_max <NEW_LINE> self.norm_dim = norm_dim <NEW_LINE> <DEDENT> def update(self, x): <NEW_LINE> <INDENT> batch_mean = torch.mean(x, dim=self.norm_dim) <NEW_LINE> batch_var = torch.var(x, dim=self.norm_dim) <NEW_LINE> batch_count = int(torch.prod(torch.tensor( [x.shape[dim] for dim in self.norm_dim]))) <NEW_LINE> if batch_count == 1: <NEW_LINE> <INDENT> batch_var.fill_(0.) <NEW_LINE> <DEDENT> self.update_from_moments(batch_mean, batch_var, batch_count) <NEW_LINE> <DEDENT> def update_from_moments(self, batch_mean, batch_var, batch_count): <NEW_LINE> <INDENT> delta = batch_mean - self.mean <NEW_LINE> tot_count = self.count + batch_count <NEW_LINE> new_mean = self.mean + delta * batch_count / tot_count <NEW_LINE> m_a = self.var * self.count <NEW_LINE> m_b = batch_var * batch_count <NEW_LINE> M2 = m_a + m_b + (delta ** 2) * self.count * batch_count / tot_count <NEW_LINE> new_var = M2 / tot_count <NEW_LINE> new_count = tot_count <NEW_LINE> self.mean = new_mean <NEW_LINE> self.var = new_var <NEW_LINE> self.count = new_count <NEW_LINE> <DEDENT> def normalize(self, x): <NEW_LINE> <INDENT> x_shape = x.shape <NEW_LINE> x = x.reshape(-1, *self.shape) <NEW_LINE> normalized_x = torch.clamp((x - self.mean) / torch.sqrt(self.var + self.epsilon), min=self.a_min, max=self.a_max) <NEW_LINE> normalized_x[normalized_x != normalized_x] = 0. <NEW_LINE> normalized_x = normalized_x.reshape(x_shape) <NEW_LINE> return normalized_x <NEW_LINE> <DEDENT> def unnormalize(self, x): <NEW_LINE> <INDENT> return x * torch.sqrt(self.var + self.epsilon) + self.mean | Modified from Baseline
Assumes shape to be (number of inputs, input_shape) | 62598fd297e22403b383b392 |
class FilterReviewListSerializer(serializers.ListSerializer): <NEW_LINE> <INDENT> def to_representation(self, data): <NEW_LINE> <INDENT> data = data.filter(parent=None) <NEW_LINE> return super().to_representation(data) | Фильтр комментариев без родителей | 62598fd23617ad0b5ee065d1 |
class CirrOSImageProvider(ImageProviderBase): <NEW_LINE> <INDENT> name = 'CirrOS' <NEW_LINE> def __init__(self, version=r'[0-9]+\.[0-9]+\.[0-9]+', build=None, arch=DEFAULT_ARCH): <NEW_LINE> <INDENT> super().__init__(version=version, build=build, arch=arch) <NEW_LINE> self.url_versions = 'https://download.cirros-cloud.net/' <NEW_LINE> self.url_images = self.url_versions + '{version}/' <NEW_LINE> self.image_pattern = 'cirros-{version}-{arch}-disk.img$' | CirrOS Image Provider
CirrOS is a Tiny OS that specializes in running on a cloud. | 62598fd29f28863672818ac2 |
class TeamPinkBasePredictor(IPredictor): <NEW_LINE> <INDENT> def __init__(self, nn_filename: str): <NEW_LINE> <INDENT> self.model = load_keras_sequential(RELATIVE_PATH, nn_filename) <NEW_LINE> assert self.model is not None <NEW_LINE> <DEDENT> def doPredict(self, data: StockData) -> float: <NEW_LINE> <INDENT> return 0.0 | Predictor based on an already trained neural network. | 62598fd2ff9c53063f51aad6 |
class VirtualMachineSizeListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'aml_compute': {'key': 'amlCompute', 'type': '[VirtualMachineSize]'}, } <NEW_LINE> def __init__( self, *, aml_compute: Optional[List["VirtualMachineSize"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(VirtualMachineSizeListResult, self).__init__(**kwargs) <NEW_LINE> self.aml_compute = aml_compute | The List Virtual Machine size operation response.
:param aml_compute: The list of virtual machine sizes supported by AmlCompute.
:type aml_compute: list[~azure.mgmt.machinelearningservices.models.VirtualMachineSize] | 62598fd297e22403b383b394 |
class DeepCopyMagicMock(mock.MagicMock): <NEW_LINE> <INDENT> def _mock_call(self, *args, **kwargs): <NEW_LINE> <INDENT> return super()._mock_call(*copy.deepcopy(args), **copy.deepcopy(kwargs)) | A magic mock class that deep-copies the method arguments to check the state of mutable objects at call time | 62598fd23617ad0b5ee065d3 |
class ListMonitoredResourceDescriptorsResponse(_messages.Message): <NEW_LINE> <INDENT> nextPageToken = _messages.StringField(1) <NEW_LINE> resourceDescriptors = _messages.MessageField('MonitoredResourceDescriptor', 2, repeated=True) | Result returned from ListMonitoredResourceDescriptors.
Fields:
nextPageToken: If there might be more results than those appearing in this
response, then nextPageToken is included. To get the next set of
results, call this method again using the value of nextPageToken as
pageToken.
resourceDescriptors: A list of resource descriptors. | 62598fd260cbc95b063647ca |
class TagFilter(WorkflowProcessor): <NEW_LINE> <INDENT> def __init__(self, tag): <NEW_LINE> <INDENT> self.tag = tag <NEW_LINE> <DEDENT> def process(self, workflow): <NEW_LINE> <INDENT> foreigners = set() <NEW_LINE> for task in reversed(DFSLinearizer().linearize(workflow)): <NEW_LINE> <INDENT> tag = workflow[task].annotation.get("tag") <NEW_LINE> if self.tag != tag and workflow[task].children <= foreigners: <NEW_LINE> <INDENT> foreigners.add(task) <NEW_LINE> <DEDENT> <DEDENT> workflow.remove(*foreigners) | A workflow processor that filters out tasks that do not affect tasks with the given tag.
A task gets removed from the workflow iff its tag differs from the given one and it has
no followers with the given tag.
By default every task has a $None tag.
Attributes:
tag (Optional[String]) - the tag value to filter by | 62598fd2377c676e912f6fbf |
class UnsupportedOnNativeFieldError(FieldError): <NEW_LINE> <INDENT> pass | Exception raised whenever someone tries to perform an operation that is not supported on a "native" field. | 62598fd23d592f4c4edbb344 |
class NodeAccessorMixin(object): <NEW_LINE> <INDENT> def node_accessor(self, root, root_label): <NEW_LINE> <INDENT> self.assertEqual(root.label, root_label) <NEW_LINE> return NodeAccessor(root) | Mix in to tests to setup and test a root | 62598fd2ff9c53063f51aada |
class XMLParser(BaseParser): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> BaseParser.__init__(self) <NEW_LINE> <DEDENT> @try_except <NEW_LINE> def serialise(self, fileinput): <NEW_LINE> <INDENT> self.parse_data(fileinput) <NEW_LINE> xwriter = XMLWriter() <NEW_LINE> root = xwriter.create_element(name="root") <NEW_LINE> for d in self.data: <NEW_LINE> <INDENT> doc = xwriter.create_subelement(parent=root, name="doc") <NEW_LINE> name = xwriter.create_subelement(parent=doc, name=KEY_NAME, value=d.name) <NEW_LINE> address = xwriter.create_subelement(parent=doc, name=KEY_ADDRESS, value=d.address) <NEW_LINE> phone = xwriter.create_subelement(parent=doc, name=KEY_PHONE, value=d.phone) <NEW_LINE> <DEDENT> return xwriter.to_string(root) <NEW_LINE> <DEDENT> @try_except <NEW_LINE> def deserialise(self, fileinput): <NEW_LINE> <INDENT> xreader = XMLReader() <NEW_LINE> root = xreader.get_root_from_file(fileinput) <NEW_LINE> for doc in xreader.get_element_children(root): <NEW_LINE> <INDENT> self.data.append(Document( xreader.get_subelement_value(doc, KEY_NAME), xreader.get_subelement_value(doc, KEY_ADDRESS), xreader.get_subelement_value(doc, KEY_PHONE) ) ) <NEW_LINE> <DEDENT> return self.data | XMLParser
Given a file path, parse the file for serialisation and deserialisation. | 62598fd250812a4eaa620e2b |
class LAParams: <NEW_LINE> <INDENT> def __init__(self, line_overlap=0.5, char_margin=2.0, line_margin=0.5, word_margin=0.1, boxes_flow=0.5, detect_vertical=False, all_texts=False): <NEW_LINE> <INDENT> self.line_overlap = line_overlap <NEW_LINE> self.char_margin = char_margin <NEW_LINE> self.line_margin = line_margin <NEW_LINE> self.word_margin = word_margin <NEW_LINE> self.boxes_flow = boxes_flow <NEW_LINE> self.detect_vertical = detect_vertical <NEW_LINE> self.all_texts = all_texts <NEW_LINE> return <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<LAParams: char_margin=%.1f, line_margin=%.1f, ' 'word_margin=%.1f all_texts=%r>' % (self.char_margin, self.line_margin, self.word_margin, self.all_texts) | Parameters for layout analysis
:param line_overlap: If two characters have more overlap than this they
are considered to be on the same line. The overlap is specified
relative to the minimum height of both characters.
:param char_margin: If two characters are closer together than this
margin they are considered to be part of the same word. If
characters are on the same line but not part of the same word, an
intermediate space is inserted. The margin is specified relative to
the width of the character.
:param word_margin: If two words are are closer together than this
margin they are considered to be part of the same line. A space is
added in between for readability. The margin is specified relative
to the width of the word.
:param line_margin: If two lines are are close together they are
considered to be part of the same paragraph. The margin is
specified relative to the height of a line.
:param boxes_flow: Specifies how much a horizontal and vertical position
of a text matters when determining the order of text boxes. The value
should be within the range of -1.0 (only horizontal position
matters) to +1.0 (only vertical position matters).
:param detect_vertical: If vertical text should be considered during
layout analysis
:param all_texts: If layout analysis should be performed on text in
figures. | 62598fd2ab23a570cc2d4fb5 |
class GameObject(metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, classname = "none", *args, **kwargs): <NEW_LINE> <INDENT> super(GameObject, self).__init__(*args, **kwargs) <NEW_LINE> self._classname = classname <NEW_LINE> self._dirty = False <NEW_LINE> <DEDENT> def dirty(self): <NEW_LINE> <INDENT> self._dirty = True <NEW_LINE> <DEDENT> @property <NEW_LINE> def classname(self): <NEW_LINE> <INDENT> return self._classname <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def storage_location(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def symlinks(self): <NEW_LINE> <INDENT> pass | represents a game object which is written to disk by the storage class | 62598fd20fa83653e46f5377 |
class lfunc(gdb.Command): <NEW_LINE> <INDENT> def __init__ (self): <NEW_LINE> <INDENT> super (lfunc, self).__init__("lfunc", gdb.COMMAND_USER) <NEW_LINE> <DEDENT> def invoke (self, args, from_tty): <NEW_LINE> <INDENT> argv = gdb.string_to_argv(args) <NEW_LINE> if len(argv) != 2: <NEW_LINE> <INDENT> raise gdb.GdbError("Usage: lfunc file lineno") <NEW_LINE> <DEDENT> L = get_cur_L() <NEW_LINE> fname = str(argv[0]) <NEW_LINE> lineno = int(argv[1]) <NEW_LINE> g = G(L) <NEW_LINE> p = g['gc']['root'].address <NEW_LINE> while p: <NEW_LINE> <INDENT> o = gcref(p) <NEW_LINE> if not o: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> if o['gch']['gct'] == ~LJ_TFUNC(): <NEW_LINE> <INDENT> fn = o['fn'].address <NEW_LINE> pt = funcproto(fn) <NEW_LINE> if pt and pt['firstline'] == lineno: <NEW_LINE> <INDENT> name = proto_chunkname(pt) <NEW_LINE> if name: <NEW_LINE> <INDENT> path = lstr2str(name) <NEW_LINE> if string.find(path, fname) >= 0: <NEW_LINE> <INDENT> out("Found Lua function (GCfunc*)0x%x at %s:%d\n" % (ptr2int(fn), path, lineno)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> p = o['gch']['nextgc'].address | This command prints out all the Lua functions (the GCfunc* pointers) filtered by the file name and file line number where the function is defined.
Usage: lfunc file lineno | 62598fd2d8ef3951e32c80a3 |
class PrintH(Print): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> Print.__init__(self, *args, **kwargs) <NEW_LINE> <DEDENT> def process(self, x): <NEW_LINE> <INDENT> print >>self.file_, self.prefix, round(x.posterior_score,3), round(x.prior,3), round(x.likelihood,3), qq(x) <NEW_LINE> return x | Fancier printing for hypotheses | 62598fd2ad47b63b2c5a7cec |
class KittiOdometryVelodyneData(RNGDataFlow): <NEW_LINE> <INDENT> def __init__(self, base_path: str, sequence: str, shuffle: bool = False): <NEW_LINE> <INDENT> self.data = pykitti.odometry(base_path, sequence) <NEW_LINE> self.calib = self.data.calib.T_cam0_velo <NEW_LINE> self.shuffle = shuffle <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.data) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> idxs = list(range(len(self.data))) <NEW_LINE> if self.shuffle: <NEW_LINE> <INDENT> self.rng.shuffle(idxs) <NEW_LINE> <DEDENT> for k in idxs: <NEW_LINE> <INDENT> timestamp_us = _timedelta_to_us(self.data.timestamps[k]) <NEW_LINE> if len(self.data.poses) == 0: <NEW_LINE> <INDENT> pose = np.eye(4) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> pose = cam2velo(self.data.poses[k], self.calib) <NEW_LINE> <DEDENT> cloud = self.data.get_velo(k) <NEW_LINE> yield {'idx': k, 'timestamp': timestamp_us, 'pose': pose, 'cloud': cloud} | Read velodyne clouds and poses directly from KITTI odometry data. | 62598fd2dc8b845886d53a4e |
class product_product(oe_lx, osv.osv): <NEW_LINE> <INDENT> _inherit = 'product.product' <NEW_LINE> def write(self, cr, uid, ids, values, context=None): <NEW_LINE> <INDENT> res = super(product_product, self).write(cr, uid, ids, values, context=context) <NEW_LINE> if any([field for field in lx_product.required_fields if field in values.keys()]): <NEW_LINE> <INDENT> self.lx_upload(cr, 1, ids, context=context) <NEW_LINE> <DEDENT> return res <NEW_LINE> <DEDENT> def create(self, cr, uid, values, context=None): <NEW_LINE> <INDENT> res = super(product_product, self).create(cr, uid, values, context=context) <NEW_LINE> self.lx_upload(cr, 1, [res], context=context) <NEW_LINE> return res <NEW_LINE> <DEDENT> def lx_upload(self, cr, uid, ids, log=False, context=None): <NEW_LINE> <INDENT> uploaded_file_names = [] <NEW_LINE> for product in self.browse(cr, uid, ids): <NEW_LINE> <INDENT> uploaded_file_names.append(self.upload(cr, uid, product, lx_product)) <NEW_LINE> if log: <NEW_LINE> <INDENT> _logger.info("Uploaded product with id %d" % product.id) <NEW_LINE> <DEDENT> <DEDENT> return uploaded_file_names <NEW_LINE> <DEDENT> def lx_upload_all(self, cr, uid, ids=[], context=None): <NEW_LINE> <INDENT> ids = self.search(cr, uid, []) <NEW_LINE> _logger.info("Starting upload of %d products" % len(ids)) <NEW_LINE> return self.lx_upload_one(cr, uid, self.browse(cr, uid, ids, context=context), log=True, context=context) <NEW_LINE> <DEDENT> def is_delivery_method(self, cr, uid, ids, context=None): <NEW_LINE> <INDENT> if not isinstance(ids, (list, tuple)): <NEW_LINE> <INDENT> ids = [ids] <NEW_LINE> <DEDENT> is_delivery_map = dict.fromkeys(ids, False) <NEW_LINE> carrier_obj = self.pool['delivery.carrier'] <NEW_LINE> for product_id in ids: <NEW_LINE> <INDENT> delivery_method_ids = carrier_obj.search(cr, 1, [('product_id','=',product_id)]) <NEW_LINE> if delivery_method_ids: <NEW_LINE> <INDENT> is_delivery_map[product_id] = delivery_method_ids <NEW_LINE> <DEDENT> <DEDENT> return is_delivery_map | Trigger upload on create, and on write if fields we are interested in have been touched
Also provide upload all functionality, and helper method to determine if product is a delivery product | 62598fd2a219f33f346c6c97 |
class CompanyRepCreateView(CreateView): <NEW_LINE> <INDENT> form_class = CompanyRepCreationForm <NEW_LINE> success_url = reverse_lazy('companies:list') <NEW_LINE> template_name = 'companies/create_rep.html' <NEW_LINE> object = None <NEW_LINE> @method_decorator(login_required) <NEW_LINE> @method_decorator( permission_required('companies.add_companyrep', raise_exception=True)) <NEW_LINE> def dispatch(self, *args, **kwargs): <NEW_LINE> <INDENT> return super(CompanyRepCreateView, self).dispatch(*args, **kwargs) <NEW_LINE> <DEDENT> def form_valid(self, form): <NEW_LINE> <INDENT> self.object = form.save() <NEW_LINE> username = self.object.get_username() <NEW_LINE> company = form.cleaned_data['company'] <NEW_LINE> msg = ('Successfully created a new company rep account with the ' 'username {} for the company {}.'.format(username, company)) <NEW_LINE> messages.success(self.request, msg) <NEW_LINE> pw_reset_form = PasswordResetForm({'username_or_email': username}) <NEW_LINE> if pw_reset_form.is_valid(): <NEW_LINE> <INDENT> email_template = 'accounts/password_set_initial_email.html' <NEW_LINE> subject_template = 'accounts/password_set_initial_subject.txt' <NEW_LINE> pw_reset_form.save( email_template_name=email_template, subject_template_name=subject_template) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> error_msg = ( u'Failed to send new company rep their password reset email. \n' 'Form data: {data} \nForm Errors: {errors}'.format( data=pw_reset_form.data, errors=pw_reset_form.errors.items()) ) <NEW_LINE> raise ValidationError(error_msg) <NEW_LINE> <DEDENT> return HttpResponseRedirect(self.get_success_url()) | View for creating a new account for a company representative.
Upon successful completion of the creation form, an email is sent to the
company representative to allow them to set the password for their account. | 62598fd2fbf16365ca79454e |
class ARMA: <NEW_LINE> <INDENT> def __init__(self, P, RHO, ma_weights, T=None, epsilon=None): <NEW_LINE> <INDENT> if epsilon is None: <NEW_LINE> <INDENT> epsilon = normal(0, 1, (T,)) <NEW_LINE> <DEDENT> elif T is None: <NEW_LINE> <INDENT> raise ValueError("You must provide T or epsilon.") <NEW_LINE> <DEDENT> self.epsilon = epsilon <NEW_LINE> if T is None: <NEW_LINE> <INDENT> T = shape(epsilon)[0] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert T == shape(epsilon)[0], "T and shape(epsilon) are incoherent." <NEW_LINE> <DEDENT> self.T = T <NEW_LINE> Q = len(ma_weights) <NEW_LINE> X = array(typecode=Float32, shape=(T,)) <NEW_LINE> for t in range(T): <NEW_LINE> <INDENT> X[t] = epsilon[t] <NEW_LINE> for k in range(1, min(t, Q)+1): <NEW_LINE> <INDENT> alpha_k = ma_weights[k-1] <NEW_LINE> X[t] += alpha_k * epsilon[t-k] <NEW_LINE> <DEDENT> for p in range(1, min(t, P)+1): <NEW_LINE> <INDENT> X[t] += RHO**p * X[t-p] <NEW_LINE> <DEDENT> <DEDENT> self.x = X <NEW_LINE> self.p = P <NEW_LINE> self.q = Q <NEW_LINE> self.rho = RHO <NEW_LINE> self.ma_weights = ma_weights <NEW_LINE> <DEDENT> def __getitem__(self, t): <NEW_LINE> <INDENT> return self.x[t] <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.x) | Generates and represents the evoluation of a ARMA(P, Q) through time.
Returns $X$ such that, for $t \in {0, \ldots, T-1}$,
\[
X_t = \sum_{k=1}^P
ho_k X_{t-k} +
\sum_{k=1}^Q lpha_k \epsilon_{t-k} + \epsilon_t
\]
where $
ho_k =
ho^k$ (for now) and $lpha_k = ma_weights[k-1]$ and
$epsilon$ denotes innovations. Hence, $Q = len(ma_weights)$. Of
course, the summations are truncated whenever $t < P$ or $t < Q$.
Use the getitem protocol to access the value of the ARMA process at
time $t$, i.e.
a = ARMA(1, 0.94, T=100)
print a[97] # Will print the value at t=97
Note that iteration on an instance of this class is equivalent to
iterating the internal 'x' array containing the generated process. | 62598fd27cff6e4e811b5eba |
class HingeLoss(Loss): <NEW_LINE> <INDENT> __slots__ = ['penalty_type', 'reduction', 'penalty_direction'] <NEW_LINE> fancy_name = "Hinge Loss" <NEW_LINE> def __init__(self, reduction=DEFAULT["hingeloss"]['reduction'], penalty_type=DEFAULT["hingeloss"]['penalty_type'], penalty_direction=DEFAULT["hingeloss"]['penalty_direction']): <NEW_LINE> <INDENT> self.reduction = reduction <NEW_LINE> self.penalty_type = penalty_type <NEW_LINE> self.penalty_direction = penalty_direction <NEW_LINE> <DEDENT> def forward(self, input, target): <NEW_LINE> <INDENT> if self.penalty_direction == 'positive': <NEW_LINE> <INDENT> if self.penalty_type == 'linear': <NEW_LINE> <INDENT> temp = torch.clamp((input-target), min=0) <NEW_LINE> <DEDENT> elif self.penalty_type == 'squared': <NEW_LINE> <INDENT> temp = torch.clamp((input-target), min=0)**2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> elif self.penalty_direction == 'negative': <NEW_LINE> <INDENT> if self.penalty_type == 'linear': <NEW_LINE> <INDENT> temp = torch.clamp((input-target), max=0).abs() <NEW_LINE> <DEDENT> elif self.penalty_type == 'squared': <NEW_LINE> <INDENT> temp = torch.clamp((input-target), max=0)**2 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError <NEW_LINE> <DEDENT> if self.reduction == 'mean': <NEW_LINE> <INDENT> return torch.mean(temp) <NEW_LINE> <DEDENT> elif self.reduction == None: <NEW_LINE> <INDENT> return torch.sum(temp) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError | Hinge Loss | 62598fd2ab23a570cc2d4fb6 |
class DataSet(object): <NEW_LINE> <INDENT> def __init__(self, data): <NEW_LINE> <INDENT> self._d_size = len(data) <NEW_LINE> self._data = data <NEW_LINE> self._epochs_completed = 0 <NEW_LINE> self._i_in_epoch = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return self._d_size <NEW_LINE> <DEDENT> def nb_raw(self, batch_size, batch_wrap=False, shuffle=False): <NEW_LINE> <INDENT> start = self._i_in_epoch <NEW_LINE> if self._epochs_completed == 0 and start == 0 and shuffle: <NEW_LINE> <INDENT> np.random.shuffle(self._data) <NEW_LINE> <DEDENT> if start + batch_size >= self._d_size: <NEW_LINE> <INDENT> self._epochs_completed += 1 <NEW_LINE> self._i_in_epoch = 0 <NEW_LINE> data_batch = self._data[start:self._d_size] <NEW_LINE> if shuffle: <NEW_LINE> <INDENT> np.random.shuffle(self._data) <NEW_LINE> <DEDENT> if batch_wrap: <NEW_LINE> <INDENT> self._i_in_epoch = batch_size - (self._d_size - start) <NEW_LINE> end = self._i_in_epoch <NEW_LINE> data_new_part = self._data[0:end] <NEW_LINE> data_batch.extend(data_new_part) <NEW_LINE> <DEDENT> return data_batch <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._i_in_epoch += batch_size <NEW_LINE> end = self._i_in_epoch <NEW_LINE> return self._data[start:end] | Basic DataSet class.
| 62598fd2956e5f7376df58c6 |
class BookmarkSummaryView(Adapter): <NEW_LINE> <INDENT> implements(inevow.IRenderer, ISummaryView) <NEW_LINE> def rend(self, data): <NEW_LINE> <INDENT> return T.div(_class="summaryView bookmark")[ T.a(href=self.original.url)[self.original.name] ] | Render a summary of a Person.
| 62598fd2be7bc26dc92520a1 |
class DeviceNameTestCase(unittest.TestCase): <NEW_LINE> <INDENT> @patch.object(StorageDevice, "status", return_value=True) <NEW_LINE> @patch.object(StorageDevice, "update_sysfs_path", return_value=None) <NEW_LINE> @patch.object(StorageDevice, "read_current_size", return_value=None) <NEW_LINE> def test_storage_device(self, *patches): <NEW_LINE> <INDENT> good_names = ['sda1', '1sda', 'good-name', 'cciss/c0d0'] <NEW_LINE> bad_names = ['sda/1', 'sda\x00', '.', '..', 'cciss/..'] <NEW_LINE> sd = StorageDevice("tester") <NEW_LINE> for name in good_names: <NEW_LINE> <INDENT> self.assertTrue(sd.is_name_valid(name)) <NEW_LINE> <DEDENT> for name in bad_names: <NEW_LINE> <INDENT> self.assertFalse(sd.is_name_valid(name)) <NEW_LINE> <DEDENT> for name in good_names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> StorageDevice(name, exists=True) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.fail("Name check should not be performed nor failing") <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> StorageDevice(name, exists=False) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self.fail("Device name check failed when it shouldn't") <NEW_LINE> <DEDENT> <DEDENT> for name in bad_names: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> StorageDevice(name, exists=True) <NEW_LINE> <DEDENT> except ValueError as e: <NEW_LINE> <INDENT> if ' is not a valid name for this device' in str(e): <NEW_LINE> <INDENT> self.fail("Device name checked on already existing device") <NEW_LINE> <DEDENT> <DEDENT> with six.assertRaisesRegex(self, ValueError, ' is not a valid name for this device'): <NEW_LINE> <INDENT> StorageDevice(name, exists=False) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def test_volume_group(self): <NEW_LINE> <INDENT> good_names = ['vg00', 'group-name', 'groupname-'] <NEW_LINE> bad_names = ['-leading-hyphen', 'únicode', 'sp aces'] <NEW_LINE> pv = StorageDevice("pv1", fmt=blivet.formats.get_format("lvmpv"), size=Size("1 GiB")) <NEW_LINE> vg = LVMVolumeGroupDevice("testvg", parents=[pv]) <NEW_LINE> for name in good_names: <NEW_LINE> <INDENT> self.assertTrue(vg.is_name_valid(name)) <NEW_LINE> <DEDENT> for name in bad_names: <NEW_LINE> <INDENT> self.assertFalse(vg.is_name_valid(name)) <NEW_LINE> <DEDENT> <DEDENT> def test_logical_volume(self): <NEW_LINE> <INDENT> good_names = ['lv00', 'volume-name', 'volumename-'] <NEW_LINE> bad_names = ['-leading-hyphen', 'únicode', 'sp aces', 'snapshot47', 'pvmove0', 'sub_tmetastring'] <NEW_LINE> pv = StorageDevice("pv1", fmt=blivet.formats.get_format("lvmpv"), size=Size("1 GiB")) <NEW_LINE> vg = LVMVolumeGroupDevice("testvg", parents=[pv]) <NEW_LINE> lv = LVMLogicalVolumeDevice("testlv", parents=[vg], fmt=blivet.formats.get_format("xfs")) <NEW_LINE> for name in good_names: <NEW_LINE> <INDENT> self.assertTrue(lv.is_name_valid(name)) <NEW_LINE> <DEDENT> for name in bad_names: <NEW_LINE> <INDENT> self.assertFalse(lv.is_name_valid(name)) | Test device name validation | 62598fd2283ffb24f3cf3d12 |
class Logger: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.successfulPackets = 0 <NEW_LINE> self.timeOuts = 0 <NEW_LINE> self.totalPackets = 0 <NEW_LINE> self.packetToSuccess = [] <NEW_LINE> <DEDENT> def resetTimeOutCounter(self): <NEW_LINE> <INDENT> self.timeOuts = 0 | Records relevant information, used to print information about the run. | 62598fd2dc8b845886d53a50 |
class TimeoutException(Exception): <NEW_LINE> <INDENT> pass | Exception, if a timeout happens | 62598fd255399d3f056269ad |
class TestDrain(TestCase): <NEW_LINE> <INDENT> def test_consumes_all_items(self): <NEW_LINE> <INDENT> iterations = [] <NEW_LINE> inputs = make_list() <NEW_LINE> Stream(generate(inputs, iterations)).drain() <NEW_LINE> self.assertEqual(iterations, inputs) | Tests for `drain`. | 62598fd24527f215b58ea362 |
class Trace(base.Trace): <NEW_LINE> <INDENT> def _initialize(self, chain, length): <NEW_LINE> <INDENT> if self._getfunc is None: <NEW_LINE> <INDENT> self._getfunc = self.db.model._funs_to_tally[self.name] <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._shape = np.shape(self._getfunc()) <NEW_LINE> <DEDENT> except TypeError: <NEW_LINE> <INDENT> self._shape = None <NEW_LINE> <DEDENT> self._vstr = ', '.join(var_str(self._shape)) <NEW_LINE> if chain != 0: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> vstr = ', '.join(v + ' FLOAT' for v in var_str(self._shape)) <NEW_LINE> try: <NEW_LINE> <INDENT> query = "create table [%s] (recid INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, trace int(5), %s )" % (self.name, vstr) <NEW_LINE> self.db.cur.execute(query) <NEW_LINE> <DEDENT> except OperationalError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> <DEDENT> def tally(self, chain): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> valstring = ', '.join(['%f'%x for x in np.ravel(self._getfunc())]) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> valstring = str(self._getfunc()) <NEW_LINE> <DEDENT> query = "INSERT INTO [%s] (recid, trace, %s) values (NULL, %s, %s)" % (self.name, self._vstr, chain, valstring) <NEW_LINE> self.db.cur.execute(query) <NEW_LINE> <DEDENT> def gettrace(self, burn=0, thin=1, chain=-1, slicing=None): <NEW_LINE> <INDENT> if not slicing: <NEW_LINE> <INDENT> slicing = slice(burn, None, thin) <NEW_LINE> <DEDENT> if chain is None: <NEW_LINE> <INDENT> self.db.cur.execute('SELECT * FROM [%s]' % self.name) <NEW_LINE> trace = self.db.cur.fetchall() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if chain < 0: <NEW_LINE> <INDENT> chain = range(self.db.chains)[chain] <NEW_LINE> <DEDENT> self.db.cur.execute('SELECT * FROM [%s] WHERE trace=%s' % (self.name, chain)) <NEW_LINE> trace = self.db.cur.fetchall() <NEW_LINE> <DEDENT> trace = np.array(trace)[:,2:] <NEW_LINE> if len(self._shape) > 1: <NEW_LINE> <INDENT> trace = trace.reshape(-1, *self._shape) <NEW_LINE> <DEDENT> return squeeze(trace[slicing]) <NEW_LINE> <DEDENT> def __getitem__(self, index): <NEW_LINE> <INDENT> chain = self._chain <NEW_LINE> if chain is None: <NEW_LINE> <INDENT> self.db.cur.execute('SELECT * FROM [%s]' % self.name) <NEW_LINE> trace = self.db.cur.fetchall() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> if chain < 0: <NEW_LINE> <INDENT> chain = range(self.db.chains)[chain] <NEW_LINE> <DEDENT> self.db.cur.execute('SELECT * FROM [%s] WHERE trace=%s' % (self.name, chain)) <NEW_LINE> trace = self.db.cur.fetchall() <NEW_LINE> <DEDENT> trace = np.array(trace)[:,2:] <NEW_LINE> if len(self._shape) > 1: <NEW_LINE> <INDENT> trace = trace.reshape(-1, *self._shape) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> trace = np.squeeze(trace) <NEW_LINE> <DEDENT> return trace[index] <NEW_LINE> <DEDENT> __call__ = gettrace <NEW_LINE> def length(self, chain=-1): <NEW_LINE> <INDENT> return len(self.gettrace(chain=chain)) | SQLite Trace class. | 62598fd297e22403b383b39c |
class BaseCliModule(object): <NEW_LINE> <INDENT> def __init__(self, usage): <NEW_LINE> <INDENT> self.parser = OptionParser(usage) <NEW_LINE> self.config = None <NEW_LINE> self.options = None <NEW_LINE> self.user_config = read_user_config() <NEW_LINE> self._add_common_options() <NEW_LINE> <DEDENT> def _add_common_options(self): <NEW_LINE> <INDENT> self.parser.add_option("--debug", dest="debug", action="store_true", help="print debug messages", default=False) <NEW_LINE> self.parser.add_option("--offline", dest="offline", action="store_true", help="do not attempt any remote communication (avoid using " + "this please)", default=False) <NEW_LINE> default_output_dir = lookup_build_dir(self.user_config) <NEW_LINE> if not os.path.exists(default_output_dir): <NEW_LINE> <INDENT> print("Creating output directory: %s" % default_output_dir) <NEW_LINE> run_command("mkdir %s" % default_output_dir) <NEW_LINE> <DEDENT> self.parser.add_option("-o", "--output", dest="output_dir", metavar="OUTPUTDIR", default=default_output_dir, help="Path to write temp files, tarballs and rpms to. " "(default %s)" % default_output_dir) <NEW_LINE> <DEDENT> def main(self, argv): <NEW_LINE> <INDENT> (self.options, self.args) = self.parser.parse_args(argv) <NEW_LINE> self._validate_options() <NEW_LINE> if len(argv) < 1: <NEW_LINE> <INDENT> print(self.parser.error("Must supply an argument. " "Try -h for help.")) <NEW_LINE> <DEDENT> <DEDENT> def load_config(self, package_name, build_dir, tag): <NEW_LINE> <INDENT> self.config = ConfigLoader(package_name, build_dir, tag).load() <NEW_LINE> if self.config.has_option(BUILDCONFIG_SECTION, "offline"): <NEW_LINE> <INDENT> self.options.offline = True <NEW_LINE> <DEDENT> if self.options.debug: <NEW_LINE> <INDENT> os.environ['DEBUG'] = "true" <NEW_LINE> <DEDENT> if self.config.has_option(BUILDCONFIG_SECTION, "lib_dir"): <NEW_LINE> <INDENT> lib_dir = self.config.get(BUILDCONFIG_SECTION, "lib_dir") <NEW_LINE> if lib_dir[0] != '/': <NEW_LINE> <INDENT> lib_dir = os.path.join(find_git_root(), lib_dir) <NEW_LINE> <DEDENT> if os.path.exists(lib_dir): <NEW_LINE> <INDENT> sys.path.append(lib_dir) <NEW_LINE> debug("Added lib dir to PYTHONPATH: %s" % lib_dir) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> warn_out("lib_dir specified but does not exist: %s" % lib_dir) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _validate_options(self): <NEW_LINE> <INDENT> pass | Common code used amongst all CLI modules. | 62598fd29f28863672818ac7 |
class Student: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, age): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def to_json(self, attrs=None): <NEW_LINE> <INDENT> if attrs is not None and all(isinstance(x, str) for x in attrs): <NEW_LINE> <INDENT> d = {} <NEW_LINE> for k, v in self.__dict__.items(): <NEW_LINE> <INDENT> if k in attrs: <NEW_LINE> <INDENT> d[k] = v <NEW_LINE> <DEDENT> <DEDENT> return d <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.__dict__ | Student class | 62598fd2adb09d7d5dc0aa0f |
class ShearY(Affine): <NEW_LINE> <INDENT> def __init__(self, shear=(-30, 30), order=1, cval=0, mode="constant", fit_output=False, backend="auto", seed=None, name=None, random_state="deprecated", deterministic="deprecated"): <NEW_LINE> <INDENT> super(ShearY, self).__init__( shear={"y": shear}, order=order, cval=cval, mode=mode, fit_output=fit_output, backend=backend, seed=seed, name=name, random_state=random_state, deterministic=deterministic) | Apply affine shear on the y-axis to input data.
This is a wrapper around :class:`Affine`.
Added in 0.4.0.
**Supported dtypes**:
See :class:`~imgaug.augmenters.geometric.Affine`.
Parameters
----------
shear : number or tuple of number or list of number or imgaug.parameters.StochasticParameter, optional
Analogous to ``shear`` in :class:`Affine`, except that this shear
value only affects the y-axis. No dictionary input is allowed.
order : int or iterable of int or imgaug.ALL or imgaug.parameters.StochasticParameter, optional
See :class:`Affine`.
cval : number or tuple of number or list of number or imgaug.ALL or imgaug.parameters.StochasticParameter, optional
See :class:`Affine`.
mode : str or list of str or imgaug.ALL or imgaug.parameters.StochasticParameter, optional
See :class:`Affine`.
fit_output : bool, optional
See :class:`Affine`.
backend : str, optional
See :class:`Affine`.
seed : None or int or imgaug.random.RNG or numpy.random.Generator or numpy.random.BitGenerator or numpy.random.SeedSequence or numpy.random.RandomState, optional
See :func:`~imgaug.augmenters.meta.Augmenter.__init__`.
name : None or str, optional
See :func:`~imgaug.augmenters.meta.Augmenter.__init__`.
random_state : None or int or imgaug.random.RNG or numpy.random.Generator or numpy.random.BitGenerator or numpy.random.SeedSequence or numpy.random.RandomState, optional
Old name for parameter `seed`.
Its usage will not yet cause a deprecation warning,
but it is still recommended to use `seed` now.
Outdated since 0.4.0.
deterministic : bool, optional
Deprecated since 0.4.0.
See method ``to_deterministic()`` for an alternative and for
details about what the "deterministic mode" actually does.
Examples
--------
>>> import imgaug.augmenters as iaa
>>> aug = iaa.ShearY((-20, 20))
Create an augmenter that shears images along the y-axis by random amounts
between ``-20`` and ``20`` degrees. | 62598fd260cbc95b063647d2 |
class Round3Score(RoboticsScore): <NEW_LINE> <INDENT> items = RoboticsScore.items + ('passengers',) <NEW_LINE> passengers = 0 <NEW_LINE> def __init__(self, total_time=0, passengers=0): <NEW_LINE> <INDENT> super(Round3Score, self).__init__(total_time) <NEW_LINE> self.passengers = passengers <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def max_action_credits(cls): <NEW_LINE> <INDENT> return sys.maxint <NEW_LINE> <DEDENT> def evaluate_action_credits(self): <NEW_LINE> <INDENT> return self.passengers | Concrete score sub-class for round 3.
It counts:
- the number of passengers correctly transported | 62598fd2a219f33f346c6c9b |
class PageLayoutElement(models.Model): <NEW_LINE> <INDENT> page_type = models.ForeignKey(PageType, verbose_name=_("page type")) <NEW_LINE> region = models.CharField( max_length=50, db_index=True, verbose_name=_("region"), help_text=_('A hard coded region name that is rendered in template index and also used in ' '<a href="https://github.com/Wtower/django-ninecms#theme-suggestions" target="_blank">' 'theme suggestions</a>.'), ) <NEW_LINE> block = models.ForeignKey(ContentBlock, verbose_name=_("content block")) <NEW_LINE> weight = models.IntegerField( default=0, db_index=True, verbose_name=_("order weight"), help_text=_("Elements with greater number in the same region sink to the bottom of the page."), ) <NEW_LINE> hidden = models.BooleanField(default=False, db_index=True, verbose_name=_("hidden")) <NEW_LINE> def __str__(self): <NEW_LINE> <INDENT> return ' '.join((str(self.page_type), self.region)) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = _("page layout element") <NEW_LINE> verbose_name_plural = _("page layout elements") | Page Layout Element Model: a set of these records define the layout for each page type | 62598fd2656771135c489b06 |
class TokenFinish(Parsing.Token): <NEW_LINE> <INDENT> pass | %token finish | 62598fd24527f215b58ea364 |
class SymbioticTool(BaseTool, SymbioticBaseTool): <NEW_LINE> <INDENT> def __init__(self, opts): <NEW_LINE> <INDENT> SymbioticBaseTool.__init__(self, opts) <NEW_LINE> <DEDENT> def executable(self): <NEW_LINE> <INDENT> return util.find_executable('ikos') <NEW_LINE> <DEDENT> def version(self, executable): <NEW_LINE> <INDENT> stdout = self._version_from_tool(executable, '--version') <NEW_LINE> line = next(l for l in stdout.splitlines() if l.startswith('ikos')) <NEW_LINE> line = line.replace('ikos' , '') <NEW_LINE> return line.strip() <NEW_LINE> <DEDENT> def name(self): <NEW_LINE> <INDENT> return 'ikos' <NEW_LINE> <DEDENT> def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}): <NEW_LINE> <INDENT> opts = ['-d=dbm'] <NEW_LINE> if self._options.property.assertions(): <NEW_LINE> <INDENT> opts.append('-a=prover') <NEW_LINE> <DEDENT> elif self._options.property.memsafety(): <NEW_LINE> <INDENT> opts.append('-a=boa') <NEW_LINE> opts.append('-a=nullity') <NEW_LINE> opts.append('-a=dfa') <NEW_LINE> <DEDENT> elif self._options.property.signedoverflow(): <NEW_LINE> <INDENT> opts.append('-a=sio') <NEW_LINE> <DEDENT> return [executable] + options + opts + tasks <NEW_LINE> <DEDENT> def determine_result(self, returncode, returnsignal, output, isTimeout): <NEW_LINE> <INDENT> for line in output: <NEW_LINE> <INDENT> if 'error: double free' in line: <NEW_LINE> <INDENT> return result.RESULT_FALSE_FREE <NEW_LINE> <DEDENT> elif 'error: buffer overflow' in line: <NEW_LINE> <INDENT> return result.RESULT_FALSE_DEREF <NEW_LINE> <DEDENT> elif 'error: assertion never holds' in line: <NEW_LINE> <INDENT> return result.RESULT_FALSE_REACH <NEW_LINE> <DEDENT> elif 'The program is SAFE' in line: <NEW_LINE> <INDENT> return result.RESULT_TRUE_PROP <NEW_LINE> <DEDENT> elif 'The program is potentially UNSAFE' in line: <NEW_LINE> <INDENT> return result.RESULT_UNKNOWN <NEW_LINE> <DEDENT> <DEDENT> return result.RESULT_ERROR <NEW_LINE> <DEDENT> def llvm_version(self): <NEW_LINE> <INDENT> return '7.0.1' | Tool info for CPAchecker.
It has additional features such as building CPAchecker before running it
if executed within a source checkout.
It also supports extracting data from the statistics output of CPAchecker
for adding it to the result tables. | 62598fd27cff6e4e811b5ebe |
class LinkSearch(BaseFieldSearch): <NEW_LINE> <INDENT> _tag = 'linkto:' <NEW_LINE> _field_to_search = 'linkto' <NEW_LINE> costs = 5000 <NEW_LINE> def __init__(self, pattern, use_re=False, case=True): <NEW_LINE> <INDENT> super(LinkSearch, self).__init__(pattern, use_re, case) <NEW_LINE> self._textpattern = '(' + pattern.replace('/', '|') + ')' <NEW_LINE> self.textsearch = TextSearch(self._textpattern, use_re=True, case=case) <NEW_LINE> <DEDENT> def highlight_re(self): <NEW_LINE> <INDENT> return u"(%s)" % self._textpattern <NEW_LINE> <DEDENT> def _get_matches(self, page): <NEW_LINE> <INDENT> matches = [] <NEW_LINE> found = False <NEW_LINE> for link in page.getPageLinks(page.request): <NEW_LINE> <INDENT> if self.search_re.match(link): <NEW_LINE> <INDENT> found = True <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if found: <NEW_LINE> <INDENT> results = self.textsearch.search(page) <NEW_LINE> if results: <NEW_LINE> <INDENT> matches.extend(results) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> matches.append(TextMatch(0, 0)) <NEW_LINE> <DEDENT> <DEDENT> return matches | Search the term in the pagelinks | 62598fd2091ae356687050b1 |
class SearchTaskAPI(APIView): <NEW_LINE> <INDENT> renderer_classes = (JSONRenderer, XMLRenderer) <NEW_LINE> parser_classes = (JSONParser, XMLParser) <NEW_LINE> def get(self, request): <NEW_LINE> <INDENT> if 'q' not in request.query_params: <NEW_LINE> <INDENT> return Response({'error': 'Parámetros insuficientes', 'q': 'Requerido'}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> q = request.query_params.get('q') <NEW_LINE> order_by = 'id' <NEW_LINE> if 'orderby' in request.query_params: <NEW_LINE> <INDENT> order_by = request.query_params.get('orderby') <NEW_LINE> order_choices = ['creacion', 'actualizacion', 'descripcion', 'tiempo_estimado', 'tiempo_registrado', 'estatus'] <NEW_LINE> if not order_by in order_choices: <NEW_LINE> <INDENT> return Response({'error': f'[{order_by}] Valor inválido', 'orderby': 'opciones: creacion, actualizacion, descripcion, tiempo_estimado, tiempo_registrado, estatus'}, status=status.HTTP_400_BAD_REQUEST) <NEW_LINE> <DEDENT> <DEDENT> tasks = Task.objects.search(q, order_by) <NEW_LINE> serialized_data = TaskSerializer(tasks, many=True) <NEW_LINE> return Response({'results': serialized_data.data}) | API para la petición de búsqueda de tareas por descripción y/o estatus | 62598fd2ab23a570cc2d4fb8 |
class DynamicDocument(Document): <NEW_LINE> <INDENT> my_metaclass = TopLevelDocumentMetaclass <NEW_LINE> __metaclass__ = TopLevelDocumentMetaclass <NEW_LINE> _dynamic = True <NEW_LINE> def __delattr__(self, *args, **kwargs): <NEW_LINE> <INDENT> field_name = args[0] <NEW_LINE> if field_name in self._dynamic_fields: <NEW_LINE> <INDENT> setattr(self, field_name, None) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> super(DynamicDocument, self).__delattr__(*args, **kwargs) | A Dynamic Document class allowing flexible, expandable and uncontrolled
schemas. As a :class:`~mongoengine.Document` subclass, acts in the same
way as an ordinary document but has expando style properties. Any data
passed or set against the :class:`~mongoengine.DynamicDocument` that is
not a field is automatically converted into a
:class:`~mongoengine.fields.DynamicField` and data can be attributed to that
field.
.. note::
There is one caveat on Dynamic Documents: fields cannot start with `_` | 62598fd29f28863672818ac8 |
class LoginView(BaseView, TemplateView): <NEW_LINE> <INDENT> template_name = 'home/login.html' <NEW_LINE> def get_context_data(self, **kwargs): <NEW_LINE> <INDENT> context = super().get_context_data(**kwargs) <NEW_LINE> info = { 'info': { 'title': 'Login Page - NMS', }, } <NEW_LINE> context.update(info) <NEW_LINE> return context | docstring for LoginView | 62598fd23d592f4c4edbb34c |
class Photo(models.Model): <NEW_LINE> <INDENT> image = models.ImageField(upload_to='images/photomodels', verbose_name='Изображение') <NEW_LINE> photomodel = models.ForeignKey(Photomodel, verbose_name='Фотомодель') <NEW_LINE> def image_tag(self): <NEW_LINE> <INDENT> return u'<img height="100px" src="%s" />' % (settings.MEDIA_URL + str(self.image)) <NEW_LINE> <DEDENT> image_tag.short_description = 'Image' <NEW_LINE> image_tag.allow_tags = True <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.image <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name = 'Фотография фотомодели' <NEW_LINE> verbose_name_plural = 'Фотографии фотомодели' | Фотографии фотомодели | 62598fd2dc8b845886d53a54 |
class Host: <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> self._host = jenkinsapi.jenkins.Jenkins(*args, **kwargs) <NEW_LINE> <DEDENT> def list_jobs(self): <NEW_LINE> <INDENT> return self._host.get_jobs() <NEW_LINE> <DEDENT> def change_job(self, name, xml): <NEW_LINE> <INDENT> job = self._host.get_job(name) <NEW_LINE> return job.update_config(xml) <NEW_LINE> <DEDENT> def create_job(self, name, xml): <NEW_LINE> <INDENT> return self._host.create_job(name, xml) <NEW_LINE> <DEDENT> def erase_job(self, name): <NEW_LINE> <INDENT> return self._host.delete_job(name) | A Jenkins host to operate on. | 62598fd250812a4eaa620e2f |
class Posts(db.Model): <NEW_LINE> <INDENT> url = db.StringProperty(required=True) <NEW_LINE> title = db.StringProperty(required=True) <NEW_LINE> created = db.DateTimeProperty(auto_now_add=True) | Database Model to store each update/notification. | 62598fd2a219f33f346c6c9d |
class NodeHandler(tornado.websocket.WebSocketHandler): <NEW_LINE> <INDENT> node_dict = {} <NEW_LINE> def initialize(self, comms_handler,verbose=True): <NEW_LINE> <INDENT> self.__comms_handler = comms_handler <NEW_LINE> self.verbose = verbose <NEW_LINE> <DEDENT> def open(self): <NEW_LINE> <INDENT> self.id = uuid.uuid4().hex <NEW_LINE> NodeHandler.node_dict[self.id] = self <NEW_LINE> ip = self.request.remote_ip <NEW_LINE> print('(NDH {}) New NODE {} ({}). (out of {}) ' .format(time.strftime(TFORMAT), socket.getfqdn(ip), ip, len(NodeHandler.node_dict))) <NEW_LINE> print('(NDH) UUID: {}'.format(self.id)) <NEW_LINE> <DEDENT> def on_message(self, message): <NEW_LINE> <INDENT> message_dict = json.loads(message) <NEW_LINE> if METAKEYWORD not in message_dict: <NEW_LINE> <INDENT> if self.verbose: <NEW_LINE> <INDENT> if not message_dict['error']: <NEW_LINE> <INDENT> print('(NDH) time: {0:.3f}, user: {1}, error: {2}, ch0: {3}' .format(message_dict["x"], message_dict["user"], message_dict["error"], message_dict["ch0"])) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print('(NDH) time: {0:.3f}, user: {1}, error: {2}' .format(message_dict["x"], message_dict["user"], message_dict["error"])) <NEW_LINE> <DEDENT> <DEDENT> self.__comms_handler.last_data[self.id] = message_dict <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.user = message_dict['user'] <NEW_LINE> self.__comms_handler.add_metadata(self.id,message_dict) <NEW_LINE> <DEDENT> <DEDENT> def on_close(self): <NEW_LINE> <INDENT> self.__comms_handler.add_metadata(self.id,CONNCLOSEDSTR) <NEW_LINE> self.__comms_handler.remove_key(self.id) <NEW_LINE> NodeHandler.node_dict.pop(self.id, None) <NEW_LINE> ip = self.request.remote_ip <NEW_LINE> user = self.user <NEW_LINE> print('(NDH {}) Connection with {} ({}) closed ' .format(time.strftime(TFORMAT), ip, user)) <NEW_LINE> <DEDENT> def check_origin(self, origin): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def broadcast_to_nodes(cls,msg=DEFAULTMESSAGE): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> broadcast(cls.node_dict, msg) <NEW_LINE> <DEDENT> except KeyboardInterrupt: <NEW_LINE> <INDENT> raise | Class that handles the communication via websockets with the slave nodes.
| 62598fd2656771135c489b08 |
class Proxy: <NEW_LINE> <INDENT> def __init__(self, layer, url=None, provider_name=None): <NEW_LINE> <INDENT> if url: <NEW_LINE> <INDENT> self.provider = ModestMaps.Providers.TemplatedMercatorProvider(url) <NEW_LINE> <DEDENT> elif provider_name: <NEW_LINE> <INDENT> if provider_name in ModestMaps.builtinProviders: <NEW_LINE> <INDENT> self.provider = ModestMaps.builtinProviders[provider_name]() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Unkown Modest Maps provider: "%s"' % provider_name) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise Exception('Missing required url or provider parameter to Proxy provider') <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def prepareKeywordArgs(config_dict): <NEW_LINE> <INDENT> kwargs = dict() <NEW_LINE> if 'url' in config_dict: <NEW_LINE> <INDENT> kwargs['url'] = config_dict['url'] <NEW_LINE> <DEDENT> if 'provider' in config_dict: <NEW_LINE> <INDENT> kwargs['provider_name'] = config_dict['provider'] <NEW_LINE> <DEDENT> return kwargs <NEW_LINE> <DEDENT> def renderTile(self, width, height, srs, coord): <NEW_LINE> <INDENT> if srs != Geography.SphericalMercator.srs: <NEW_LINE> <INDENT> raise Exception('Projection doesn\'t match EPSG:900913: "%(srs)s"' % locals()) <NEW_LINE> <DEDENT> img = None <NEW_LINE> urls = self.provider.getTileUrls(coord) <NEW_LINE> for url in urls: <NEW_LINE> <INDENT> body = urllib.urlopen(url).read() <NEW_LINE> tile = Verbatim(body) <NEW_LINE> if len(urls) == 1: <NEW_LINE> <INDENT> return tile <NEW_LINE> <DEDENT> elif img is None: <NEW_LINE> <INDENT> img = Image.new('RGBA', (width, height)) <NEW_LINE> <DEDENT> img.paste(tile, (0, 0), tile) <NEW_LINE> <DEDENT> return img | Proxy provider, to pass through and cache tiles from other places.
This provider is identified by the name "proxy" in the TileStache config.
Additional arguments:
- url (optional)
URL template for remote tiles, for example:
"http://tile.openstreetmap.org/{Z}/{X}/{Y}.png"
- provider (optional)
Provider name string from Modest Maps built-ins.
See ModestMaps.builtinProviders.keys() for a list.
Example: "OPENSTREETMAP".
One of the above is required. When both are present, url wins.
Example configuration:
{
"name": "proxy",
"url": "http://tile.openstreetmap.org/{Z}/{X}/{Y}.png"
} | 62598fd28a349b6b436866d8 |
class Gaussian_UCB_5_Continuous_Policy(Policy): <NEW_LINE> <INDENT> def __init__(self, exploitation_over_exploration_ratio=None): <NEW_LINE> <INDENT> super(Gaussian_UCB_5_Continuous_Policy, self).__init__() <NEW_LINE> self.exploitation_over_exploration_ratio = exploitation_over_exploration_ratio <NEW_LINE> <DEDENT> def __call__(self, mab): <NEW_LINE> <INDENT> i_arm = [] <NEW_LINE> if (self.exploitation_over_exploration_ratio is None): <NEW_LINE> <INDENT> self.exploitation_over_exploration_ratio = 0.5 <NEW_LINE> <DEDENT> gstd = 0.3 <NEW_LINE> mi = None <NEW_LINE> if (len(mab.mean_rewards) > 10*len(mab.space)): <NEW_LINE> <INDENT> x = np.asarray(mab.array_rewards)[:,:-1] <NEW_LINE> y = np.asarray(mab.array_rewards)[:,-1] <NEW_LINE> sh = x.shape[0] <NEW_LINE> m = np.maximum(sh/4, 10) <NEW_LINE> sq = np.maximum(int(2*np.sqrt(sh)), 10) <NEW_LINE> gstd = np.maximum(2.,1.8+x.shape[0]/1000.)/x.shape[0]**0.25 <NEW_LINE> x_lg = np.zeros(x.shape) <NEW_LINE> for i,(_,_,_,lg) in enumerate(mab.space): <NEW_LINE> <INDENT> if lg: <NEW_LINE> <INDENT> x_lg[:,i] = np.log(x[:,i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> x_lg[:,i] = x[:,i] <NEW_LINE> <DEDENT> <DEDENT> lstd = [std for (_,_,std,_) in mab.space] <NEW_LINE> rs_std = np.random.randn(m, len(mab.space))*lstd <NEW_LINE> if (x.shape[0] < 300): <NEW_LINE> <INDENT> xc_ = [[(np.log(i[1]) if i[3] else i[1]) for i in mab.space]] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> xc_ = np.mean(x_lg, axis=0)[np.newaxis,:] <NEW_LINE> <DEDENT> x_lg_ = rs_std + xc_ <NEW_LINE> g = Gaussian_Average_KNN_Estimator(Train_Test([(x_lg/lstd,y),(x_lg_/lstd,0)]), k=sq) <NEW_LINE> mi = g.test() <NEW_LINE> mi = mi/np.std(mi)*0.3 <NEW_LINE> coef = self.exploitation_over_exploration_ratio <NEW_LINE> weights = coef/np.mean(g.sorted_distance[:,:3]**2, axis=1) <NEW_LINE> mi -= np.sqrt(2*np.log(sh)/weights) <NEW_LINE> mi = tuple(x_lg_[np.argmin(mi),:]) <NEW_LINE> <DEDENT> for i,(p,c,std,lg) in enumerate(mab.space): <NEW_LINE> <INDENT> cc = c <NEW_LINE> sstd = std <NEW_LINE> if (mi is not None): <NEW_LINE> <INDENT> if lg: <NEW_LINE> <INDENT> cc = np.exp(mi[i]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> cc = mi[i] <NEW_LINE> <DEDENT> sstd = std*gstd*0. <NEW_LINE> <DEDENT> self.all_centers.append(np.log(cc)) <NEW_LINE> if lg: <NEW_LINE> <INDENT> r = np.exp(np.random.randn()*sstd + np.log(cc)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> r = np.random.randn()*sstd + cc <NEW_LINE> <DEDENT> i_arm.append(r) <NEW_LINE> <DEDENT> return tuple(i_arm) | must have array_rewards | 62598fd2ad47b63b2c5a7cf4 |
class CharacterConfig(models.Model): <NEW_LINE> <INDENT> character = models.OneToOneField(Character, unique=True, primary_key=True, related_name='config') <NEW_LINE> is_public = models.BooleanField(default=False) <NEW_LINE> show_implants = models.BooleanField(default=False) <NEW_LINE> show_skill_queue = models.BooleanField(default=False) <NEW_LINE> show_standings = models.BooleanField(default=False) <NEW_LINE> show_wallet = models.BooleanField(default=False) <NEW_LINE> anon_key = models.CharField(max_length=16, default='') <NEW_LINE> class Meta: <NEW_LINE> <INDENT> app_label = 'thing' <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return self.character.name | Character configuration information | 62598fd25fdd1c0f98e5e424 |
class FiniteStateMachineLoggingTests(TestCase): <NEW_LINE> <INDENT> def test_logger(self): <NEW_LINE> <INDENT> fsm = constructFiniteStateMachine( Input, Output, MoreState, TRANSITIONS, MoreState.amber, [Gravenstein], {Output.aardvark: IFood}, MethodSuffixOutputer(AnimalWorld([]))) <NEW_LINE> self.assertIsInstance(fsm.logger, Logger) <NEW_LINE> <DEDENT> @validateLogging(None) <NEW_LINE> def test_loggerOverride(self, logger): <NEW_LINE> <INDENT> fsm = constructFiniteStateMachine( Input, Output, MoreState, TRANSITIONS, MoreState.amber, [Gravenstein], {Output.aardvark: IFood}, MethodSuffixOutputer(AnimalWorld([])), logger) <NEW_LINE> self.assertIs(logger, fsm.logger) <NEW_LINE> <DEDENT> @validateLogging(None) <NEW_LINE> def test_initializationLogging(self, logger): <NEW_LINE> <INDENT> constructFiniteStateMachine( Input, Output, MoreState, TRANSITIONS, MoreState.amber, [Gravenstein], {Output.aardvark: IFood}, MethodSuffixOutputer(AnimalWorld([])), logger) <NEW_LINE> self.assertTrue( issuperset(logger.messages[0], { u"fsm_identifier": u"<AnimalWorld>", u"fsm_state": u"<MoreState=amber>", u"action_status": u"started", u"action_type": u"fsm:initialize", })) <NEW_LINE> <DEDENT> @validateLogging(None) <NEW_LINE> def test_terminalLogging(self, logger): <NEW_LINE> <INDENT> fsm = constructFiniteStateMachine( Input, Output, MoreState, TRANSITIONS, MoreState.amber, [Gravenstein], {Output.aardvark: IFood}, MethodSuffixOutputer(AnimalWorld([])), logger) <NEW_LINE> fsm.receive(Gravenstein()) <NEW_LINE> self.assertTrue( issuperset(logger.messages[3], { u"fsm_terminal_state": u"<MoreState=blue>", u"action_type": u"fsm:initialize", u"action_status": u"succeeded", u"task_uuid": logger.messages[0][u"task_uuid"], u"task_level": u"/", })) <NEW_LINE> <DEDENT> @validateLogging(None) <NEW_LINE> def test_noRepeatedTerminalLogging(self, logger): <NEW_LINE> <INDENT> transitions = TRANSITIONS.addTransition( MoreState.blue, Input.apple, [], MoreState.blue) <NEW_LINE> fsm = constructFiniteStateMachine( Input, Output, MoreState, transitions, MoreState.amber, [Gravenstein], {Output.aardvark: IFood}, MethodSuffixOutputer(AnimalWorld([])), logger) <NEW_LINE> fsm.receive(Gravenstein()) <NEW_LINE> howMany = len(logger.messages) <NEW_LINE> fsm.receive(Gravenstein()) <NEW_LINE> self.assertEqual([], [ msg for msg in logger.messages[howMany:] if msg[u"action_type"] == u"fsm:initialize"]) | Tests for logging behavior of the L{IFiniteStateMachine} returned by
L{constructFiniteStateMachine}. | 62598fd2adb09d7d5dc0aa13 |
class Sunacoop(CachedModel): <NEW_LINE> <INDENT> pst = models.ForeignKey('Pst') <NEW_LINE> numero = models.IntegerField(null=True) <NEW_LINE> fecha = models.DateField(blank=True) <NEW_LINE> archivo_comprobante = ContentTypeRestrictedFileField( upload_to=RUTA_DOCUMENTOS, content_types='application/pdf', blank=True, max_upload_size=TAMANO_MAXIMO_ARCHIVO ) <NEW_LINE> @classmethod <NEW_LINE> def create(cls, data): <NEW_LINE> <INDENT> if isinstance(data, Storage): <NEW_LINE> <INDENT> obj = cls() <NEW_LINE> obj.pst = data.pst <NEW_LINE> obj.numero = data.numero <NEW_LINE> obj.fecha = data.fecha <NEW_LINE> obj.archivo_comprobante = data.archivo_comprobante <NEW_LINE> return obj <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise NameError('Error de Tipo. Los parametros enviados no son correctos') | Modelo Sunacoop
Contiene el registro de la superintendencia nacional de cooperativas para cada prestador de
servicio turístico que sean de tipo juririco y tambien cooperativas | 62598fd24527f215b58ea368 |
class Semester: <NEW_LINE> <INDENT> def __init__(self, year: int, semester: int): <NEW_LINE> <INDENT> self.year = year <NEW_LINE> self.semester = semester <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Semester): <NEW_LINE> <INDENT> if self.year == other.year: <NEW_LINE> <INDENT> return self.semester < other.semester <NEW_LINE> <DEDENT> return self.year < other.year <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise TypeError("Cannot compare Semester to a non Semester type.") <NEW_LINE> <DEDENT> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Semester): <NEW_LINE> <INDENT> if self == other: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> if self.year == other.year: <NEW_LINE> <INDENT> return self.semester < other.semester <NEW_LINE> <DEDENT> return self.year < other.year <NEW_LINE> <DEDENT> raise TypeError("Cannot compare Semester to a non Semester type.") <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, Semester): <NEW_LINE> <INDENT> return self.year == other.year and self.semester == other.semester <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self.year) + " Sem: " + str(self.semester) | Represents a semester in which the university offers a course
semester = 1 -> First semester
semester = 2 -> Second semester
semester = 3 -> Summer semester
semester > 3 -> Other (eg. trimesters etc.) | 62598fd2ab23a570cc2d4fba |
class SearchQueryBuilder(object): <NEW_LINE> <INDENT> def __init__(self, language=None): <NEW_LINE> <INDENT> self.terms = [] <NEW_LINE> self.language = language <NEW_LINE> <DEDENT> def add_term(self, term): <NEW_LINE> <INDENT> if not isinstance(term, InputSearchTerm) and not isinstance(term, OutputSearchTerm): <NEW_LINE> <INDENT> raise UserError('first level search term could be only InputSearchTerm, OutputSearchTerm') <NEW_LINE> <DEDENT> self.terms.append(term) <NEW_LINE> <DEDENT> def dict(self): <NEW_LINE> <INDENT> query = {"ands": [term.dict() for term in self.terms]} <NEW_LINE> if self.language is not None: <NEW_LINE> <INDENT> query.update({'language': self.language}) <NEW_LINE> <DEDENT> return query | Clarifai Image Search Query Builder
This builder is for advanced search use ONLY.
If you are looking for simple concept search, or simple image similarity search,
you should use one of the existing functions ``search_by_annotated_concepts``,
``search_by_predicted_concepts``,
``search_by_image`` or ``search_by_metadata``
Currently the query builder only supports a list of query terms with AND.
InputSearchTerm and OutputSearchTerm are the only terms supported by the query builder
Examples:
>>> qb = SearchQueryBuilder()
>>> qb.add_term(term1)
>>> qb.add_term(term2)
>>>
>>> app.inputs.search(qb)
>>>
>>> # for search over translated output concepts
>>> qb = SearchQueryBuilder(language='zh')
>>> qb.add_term(term1)
>>> qb.add_term(term2)
>>>
>>> app.inputs.search(qb) | 62598fd2956e5f7376df58ca |
class ColorGame(Base): <NEW_LINE> <INDENT> __tablename__ = 'game' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> done = Column(Boolean, default=False) <NEW_LINE> pattern_id = Column(Integer, ForeignKey('pattern.id')) <NEW_LINE> pattern = relationship('ColorPattern', backref=backref('game', order_by=id)) | Represents a color game that is stored in the database.
A game is linked to a pattern that is the correct solution to the game. | 62598fd2283ffb24f3cf3d1a |
class Fetcher: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.adapter = HTTPAdapter(max_retries=5) <NEW_LINE> self.fetchers = [] <NEW_LINE> <DEDENT> def fetch(self) -> dict: <NEW_LINE> <INDENT> fetcher_results = [fetcher.fetch() for fetcher in self.fetchers] <NEW_LINE> results = {key: val for result_dct in fetcher_results for key, val in result_dct.items()} <NEW_LINE> return results <NEW_LINE> <DEDENT> def register(self, fetcher_instance): <NEW_LINE> <INDENT> self.fetchers.append(fetcher_instance) | All data fetchers inherit from this class. | 62598fd2fbf16365ca794558 |
class Meta: <NEW_LINE> <INDENT> abstract = True | SettingsMixin Meta class | 62598fd2bf627c535bcb1949 |
class APIRequestError(Exception): <NEW_LINE> <INDENT> pass | Exception for handling error during HTTP requests | 62598fd3656771135c489b0e |
class UpdateCacheMiddleware(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.cache_timeout = settings.CACHE_MIDDLEWARE_SECONDS <NEW_LINE> self.key_prefix = settings.CACHE_MIDDLEWARE_KEY_PREFIX <NEW_LINE> self.cache_anonymous_only = getattr(settings, 'CACHE_MIDDLEWARE_ANONYMOUS_ONLY', False) <NEW_LINE> self.cache = get_cache(settings.CACHE_MIDDLEWARE_ALIAS) <NEW_LINE> <DEDENT> def process_response(self, request, response): <NEW_LINE> <INDENT> if not hasattr(request, '_cache_update_cache') or not request._cache_update_cache: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> if not response.status_code == 200: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> timeout = get_max_age(response) <NEW_LINE> if timeout == None: <NEW_LINE> <INDENT> timeout = self.cache_timeout <NEW_LINE> <DEDENT> elif timeout == 0: <NEW_LINE> <INDENT> return response <NEW_LINE> <DEDENT> patch_response_headers(response, timeout) <NEW_LINE> if timeout: <NEW_LINE> <INDENT> cache_key = learn_cache_key(request, response, timeout, self.key_prefix) <NEW_LINE> self.cache.set(cache_key, response, timeout) <NEW_LINE> <DEDENT> return response | Response-phase cache middleware that updates the cache if the response is
cacheable.
Must be used as part of the two-part update/fetch cache middleware.
UpdateCacheMiddleware must be the first piece of middleware in
MIDDLEWARE_CLASSES so that it'll get called last during the response phase. | 62598fd3a219f33f346c6ca3 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.