code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
return "freshmen" if (self._number and self._number == 9) else "{}s".format(self._name) if self._name else ""
def name_plural(self)
Return the grade's plural name (e.g. freshmen)
12.552982
6.805085
1.844647
def in_admin_group(user): return user.is_authenticated and user.has_admin_permission(group) return user_passes_test(in_admin_group)
def admin_required(group)
Decorator that requires the user to be in a certain admin group. For example, @admin_required("polls") would check whether a user is in the "admin_polls" group or in the "admin_all" group.
3.869001
3.702536
1.04496
# change this to not use other_phones num_phones = len(user.phones.all() or []) num_emails = len(user.emails.all() or []) num_websites = len(user.websites.all() or []) personal_info = {} for i in range(num_phones): personal_info["phone_{}".format(i)] = user.phones.all()[i] for i in range(num_emails): personal_info["email_{}".format(i)] = user.emails.all()[i] for i in range(num_websites): personal_info["website_{}".format(i)] = user.websites.all()[i] num_fields = {"phones": num_phones, "emails": num_emails, "websites": num_websites} return personal_info, num_fields
def get_personal_info(user)
Get a user's personal info attributes to pass as an initial value to a PersonalInformationForm.
2.17402
2.138873
1.016432
# FIXME: remove this hardcoded junk preferred_pic = {"preferred_photo": "AUTO"} if user.preferred_photo: preferred_pic["preferred_photo"] = user.preferred_photo.grade_number return preferred_pic
def get_preferred_pic(user)
Get a user's preferred picture attributes to pass as an initial value to a PreferredPictureForm.
7.690824
7.008366
1.097378
privacy_options = {} for ptype in user.permissions: for field in user.permissions[ptype]: if ptype == "self": privacy_options["{}-{}".format(field, ptype)] = user.permissions[ptype][field] else: privacy_options[field] = user.permissions[ptype][field] return privacy_options
def get_privacy_options(user)
Get a user's privacy options to pass as an initial value to a PrivacyOptionsForm.
2.887494
2.862257
1.008817
notification_options = {} notification_options["receive_news_emails"] = user.receive_news_emails notification_options["receive_eighth_emails"] = user.receive_eighth_emails try: notification_options["primary_email"] = user.primary_email except Email.DoesNotExist: user.primary_email = None user.save() notification_options["primary_email"] = None return notification_options
def get_notification_options(user)
Get a user's notification options to pass as an initial value to a NotificationOptionsForm.
2.659305
2.683732
0.990898
user = request.user if request.method == "POST": logger.debug(dict(request.POST)) phone_formset, email_formset, website_formset, errors = save_personal_info(request, user) if user.is_student: preferred_pic_form = save_preferred_pic(request, user) bus_route_form = save_bus_route(request, user) else: preferred_pic_form = None bus_route_form = None privacy_options_form = save_privacy_options(request, user) notification_options_form = save_notification_options(request, user) for error in errors: messages.error(request, error) try: save_gcm_options(request, user) except AttributeError: pass return redirect("preferences") else: phone_formset = PhoneFormset(instance=user, prefix='pf') email_formset = EmailFormset(instance=user, prefix='ef') website_formset = WebsiteFormset(instance=user, prefix='wf') if user.is_student: preferred_pic = get_preferred_pic(user) bus_route = get_bus_route(user) logger.debug(preferred_pic) preferred_pic_form = PreferredPictureForm(user, initial=preferred_pic) bus_route_form = BusRouteForm(user, initial=bus_route) else: bus_route_form = None preferred_pic = None preferred_pic_form = None privacy_options = get_privacy_options(user) logger.debug(privacy_options) privacy_options_form = PrivacyOptionsForm(user, initial=privacy_options) notification_options = get_notification_options(user) logger.debug(notification_options) notification_options_form = NotificationOptionsForm(user, initial=notification_options) context = { "phone_formset": phone_formset, "email_formset": email_formset, "website_formset": website_formset, "preferred_pic_form": preferred_pic_form, "privacy_options_form": privacy_options_form, "notification_options_form": notification_options_form, "bus_route_form": bus_route_form if settings.ENABLE_BUS_APP else None } return render(request, "preferences/preferences.html", context)
def preferences_view(request)
View and process updates to the preferences page.
1.951962
1.934681
1.008932
if "user" in request.GET: user = User.objects.user_with_ion_id(request.GET.get("user")) elif "student_id" in request.GET: user = User.objects.user_with_student_id(request.GET.get("student_id")) else: user = request.user if not user: messages.error(request, "Invalid user.") user = request.user if user.is_eighthoffice: user = None if user: if request.method == "POST": privacy_options_form = save_privacy_options(request, user) else: privacy_options = get_privacy_options(user) privacy_options_form = PrivacyOptionsForm(user, initial=privacy_options) context = {"privacy_options_form": privacy_options_form, "profile_user": user} else: context = {"profile_user": user} return render(request, "preferences/privacy_options.html", context)
def privacy_options_view(request)
View and edit privacy options for a user.
2.720208
2.670377
1.018661
app = AndroidApplication.instance() r = app.create_future() #: Initiate a scan pkg = BarcodePackage.instance() pkg.setBarcodeResultListener(pkg.getId()) pkg.onBarcodeResult.connect(r.set_result) intent = cls(app) if formats: intent.setDesiredBarcodeFormats(formats) if camera != -1: intent.setCameraId(camera) intent.initiateScan() return r
def scan(cls, formats=ALL_CODE_TYPES, camera=-1)
Shortcut only one at a time will work...
8.581172
8.436328
1.017169
super(AndroidBarcodeView, self).init_widget() d = self.declaration #: Observe activity state changes app = self.get_context() app.observe('state', self.on_activity_lifecycle_changed) if d.active: self.set_active(d.active) if d.light: self.set_light(d.light) self.widget.barcodeResult.connect(self.on_barcode_result) if d.scanning: self.set_scanning(d.scanning)
def init_widget(self)
Initialize the underlying widget.
4.775822
4.477264
1.066683
d = self.declaration if d.active: if change['value'] == 'paused': self.widget.pause(now=True) elif change['value'] == 'resumed': self.widget.resume()
def on_activity_lifecycle_changed(self, change)
If the app pauses without pausing the barcode scanner the camera can't be reopened. So we must do it here.
5.190529
5.012053
1.035609
if self.widget: self.set_active(False) super(AndroidBarcodeView, self).destroy()
def destroy(self)
Cleanup the activty lifecycle listener
11.203561
9.870759
1.135025
headers = {'Accept': 'application/json'} response = requests.get(urljoin(INAT_NODE_API_BASE_URL, endpoint), params, headers=headers, **kwargs) return response
def make_inaturalist_api_get_call(endpoint: str, params: Dict, **kwargs) -> requests.Response
Make an API call to iNaturalist. endpoint is a string such as 'observations' !! do not put / in front method: 'GET', 'HEAD', 'POST', 'PUT', 'PATCH', 'DELETE' kwargs are passed to requests.request Returns a requests.Response object
3.774199
4.501555
0.838421
r = get_observations(params={'id': observation_id}) if r['results']: return r['results'][0] raise ObservationNotFound()
def get_observation(observation_id: int) -> Dict[str, Any]
Get details about an observation. :param observation_id: :returns: a dict with details on the observation :raises: ObservationNotFound
5.853145
5.693261
1.028083
r = make_inaturalist_api_get_call('observations', params=params) return r.json()
def get_observations(params: Dict) -> Dict[str, Any]
Search observations, see: http://api.inaturalist.org/v1/docs/#!/Observations/get_observations. Returns the parsed JSON returned by iNaturalist (observations in r['results'], a list of dicts)
10.023524
6.719895
1.491619
# According to the doc: "The large size of the observations index prevents us from supporting the page parameter # when retrieving records from large result sets. If you need to retrieve large numbers of records, use the # per_page and id_above or id_below parameters instead. results = [] # type: List[Dict[str, Any]] id_above = 0 while True: iteration_params = merge_two_dicts(params, { 'order_by': 'id', 'order': 'asc', 'per_page': PER_PAGE_RESULTS, 'id_above': id_above }) page_obs = get_observations(params=iteration_params) results = results + page_obs['results'] if page_obs['total_results'] <= PER_PAGE_RESULTS: return results sleep(THROTTLING_DELAY) id_above = results[-1]['id']
def get_all_observations(params: Dict) -> List[Dict[str, Any]]
Like get_observations() but handles pagination so you get all the results in one shot. Some params will be overwritten: order_by, order, per_page, id_above (do NOT specify page when using this). Returns a list of dicts (one entry per observation)
4.811954
4.578864
1.050906
payload = { 'q': search_query, 'page': page } # type: Dict[str, Union[int, str]] response = requests.get("{base_url}/observation_fields.json".format(base_url=INAT_BASE_URL), params=payload) return response.json()
def get_observation_fields(search_query: str="", page: int=1) -> List[Dict[str, Any]]
Search the (globally available) observation :param search_query: :param page: :return:
3.316757
3.712963
0.893291
results = [] # type: List[Dict[str, Any]] page = 1 while True: r = get_observation_fields(search_query=search_query, page=page) if not r: return results results = results + r page = page + 1 sleep(THROTTLING_DELAY)
def get_all_observation_fields(search_query: str="") -> List[Dict[str, Any]]
Like get_observation_fields(), but handles pagination for you. :param search_query: a string to search
2.905384
3.027065
0.959802
# TODO: Also implement a put_or_update_observation_field_values() that deletes then recreates the field_value? # TODO: Write example use in docstring. # TODO: Return some meaningful exception if it fails because the field is already set. # TODO: Also show in example to obtain the observation_field_id? # TODO: What happens when parameters are invalid # TODO: It appears pushing the same value/pair twice in a row (but deleting it meanwhile via the UI)... # TODO: ...triggers an error 404 the second time (report to iNaturalist?) payload = { 'observation_field_value': { 'observation_id': observation_id, 'observation_field_id': observation_field_id, 'value': value } } response = requests.put("{base_url}/observation_field_values/{id}".format(base_url=INAT_BASE_URL, id=observation_field_id), headers=_build_auth_header(access_token), json=payload) response.raise_for_status() return response.json()
def put_observation_field_values(observation_id: int, observation_field_id: int, value: Any, access_token: str) -> Dict[str, Any]
Sets an observation field (value) on an observation. :param observation_id: :param observation_field_id: :param value :param access_token: access_token: the access token, as returned by :func:`get_access_token()` :returns: iNaturalist's response as a dict, for example: {'id': 31, 'observation_id': 18166477, 'observation_field_id': 31, 'value': 'fouraging', 'created_at': '2012-09-29T11:05:44.935+02:00', 'updated_at': '2018-11-13T10:49:47.985+01:00', 'user_id': 1, 'updater_id': 1263313, 'uuid': 'b404b654-1bf0-4299-9288-52eeda7ac0db', 'created_at_utc': '2012-09-29T09:05:44.935Z', 'updated_at_utc': '2018-11-13T09:49:47.985Z'} Will fail if this observation_field is already set for this observation.
6.928912
7.068284
0.980282
payload = { 'client_id': app_id, 'client_secret': app_secret, 'grant_type': "password", 'username': username, 'password': password } response = requests.post("{base_url}/oauth/token".format(base_url=INAT_BASE_URL), payload) try: return response.json()["access_token"] except KeyError: raise AuthenticationError("Authentication error, please check credentials.")
def get_access_token(username: str, password: str, app_id: str, app_secret: str) -> str
Get an access token using the user's iNaturalist username and password. (you still need an iNaturalist app to do this) :param username: :param password: :param app_id: :param app_secret: :return: the access token, example use: headers = {"Authorization": "Bearer %s" % access_token}
2.640991
2.409868
1.095907
data = {'observation_photo[observation_id]': observation_id} file_data = {'file': file_object} response = requests.post(url="{base_url}/observation_photos".format(base_url=INAT_BASE_URL), headers=_build_auth_header(access_token), data=data, files=file_data) return response.json()
def add_photo_to_observation(observation_id: int, file_object: BinaryIO, access_token: str)
Upload a picture and assign it to an existing observation. :param observation_id: the ID of the observation :param file_object: a file-like object for the picture. Example: open('/Users/nicolasnoe/vespa.jpg', 'rb') :param access_token: the access token, as returned by :func:`get_access_token()`
3.276493
3.41202
0.960279
response = requests.post(url="{base_url}/observations.json".format(base_url=INAT_BASE_URL), json=params, headers=_build_auth_header(access_token)) response.raise_for_status() return response.json()
def create_observations(params: Dict[str, Dict[str, Any]], access_token: str) -> List[Dict[str, Any]]
Create a single or several (if passed an array) observations). :param params: :param access_token: the access token, as returned by :func:`get_access_token()` :return: iNaturalist's JSON response, as a Python object :raise: requests.HTTPError, if the call is not successful. iNaturalist returns an error 422 (unprocessable entity) if it rejects the observation data (for example an observation date in the future or a latitude > 90. In that case the exception's `response` attribute give details about the errors. allowed params: see https://www.inaturalist.org/pages/api+reference#post-observations Example: params = {'observation': {'species_guess': 'Pieris rapae'}, } TODO investigate: according to the doc, we should be able to pass multiple observations (in an array, and in renaming observation to observations, but as far as I saw they are not created (while a status of 200 is returned)
3.322124
2.925768
1.135471
response = requests.put(url="{base_url}/observations/{id}.json".format(base_url=INAT_BASE_URL, id=observation_id), json=params, headers=_build_auth_header(access_token)) response.raise_for_status() return response.json()
def update_observation(observation_id: int, params: Dict[str, Any], access_token: str) -> List[Dict[str, Any]]
Update a single observation. See https://www.inaturalist.org/pages/api+reference#put-observations-id :param observation_id: the ID of the observation to update :param params: to be passed to iNaturalist API :param access_token: the access token, as returned by :func:`get_access_token()` :return: iNaturalist's JSON response, as a Python object :raise: requests.HTTPError, if the call is not successful. iNaturalist returns an error 410 if the observation doesn't exists or belongs to another user (as of November 2018).
2.896599
2.427588
1.1932
headers = _build_auth_header(access_token) headers['Content-type'] = 'application/json' response = requests.delete(url="{base_url}/observations/{id}.json".format(base_url=INAT_BASE_URL, id=observation_id), headers=headers) response.raise_for_status() # According to iNaturalist documentation, proper JSON should be returned. It seems however that the response is # currently empty (while the requests succeed), so you may receive a JSONDecode exception. # TODO: report to iNaturalist team if the issue persists return response.json()
def delete_observation(observation_id: int, access_token: str) -> List[Dict[str, Any]]
Delete an observation. :param observation_id: :param access_token: :return:
6.388655
6.482351
0.985546
self.kwargs['instantiate'] = True self.kwargs['parent'] = parent instance = self.cls(*self.args, **self.kwargs) instance._field_seqno = self._field_seqno return instance
def create_instance(self, parent)
Create an instance based off this placeholder with some parent
4.948251
4.685879
1.055992
if issubclass(placeholder.cls, FieldAccessor): return placeholder.cls.access(self._parent, placeholder) return self._parent.lookup_field_by_placeholder(placeholder)
def _ph2f(self, placeholder)
Lookup a field given a field placeholder
7.631084
6.153845
1.240051
recorded_checksum = self.field.getval() # convert negative offset to positive if offset < 0: offset += len(data) # replace checksum region with zero data = b''.join((data[:offset], b"\x00" * self.bytes_required, data[offset + self.bytes_required:])) actual_checksum = self.algo(data[self.start:self.end]) if recorded_checksum != actual_checksum: raise SuitcaseChecksumException( "recorded checksum %r did not match actual %r. full data: %r", recorded_checksum, actual_checksum, data)
def validate(self, data, offset)
Raises :class:`SuitcaseChecksumException` if not valid
5.470691
4.426862
1.235794
self.field.setval(self.algo(data[self.start:self.end])) sio = BytesIO() self.field.pack(sio) return sio.getvalue()
def packed_checksum(self, data)
Given the data of the entire packet return the checksum bytes
6.008844
6.008597
1.000041
try: return parent.lookup_field_by_placeholder(placeholder) except KeyError: field_placeholder, name = placeholder.args # Find the field whose attribute is being accessed. field = parent.lookup_field_by_placeholder(field_placeholder) # Instantiate the real FieldAccessor that wraps the attribute. accessor = cls(field, name, parent=parent, instantiate=True) # Keep this instance alive, and reuse it for future references. parent._placeholder_to_field[placeholder] = accessor return accessor
def access(cls, parent, placeholder)
Resolve the deferred field attribute access. :param cls: the FieldAccessor class :param parent: owning structure of the field being accessed :param placeholder: FieldPlaceholder object which holds our info :returns: FieldAccessor instance for that field's attribute
6.287016
5.387796
1.1669
tab = CRC16_KERMIT_TAB # minor optimization (now in locals) for byte in six.iterbytes(data): tbl_idx = (crc ^ byte) & 0xff crc = (tab[tbl_idx] ^ (crc >> 8)) & 0xffff return crc & 0xffff
def crc16_kermit(data, crc=0)
Calculate/Update the Kermit CRC16 checksum for some data
4.600915
4.759445
0.966691
tab = CRC16_CCITT_TAB # minor optimization (now in locals) for byte in six.iterbytes(data): crc = (((crc << 8) & 0xff00) ^ tab[((crc >> 8) & 0xff) ^ byte]) return crc & 0xffff
def crc16_ccitt(data, crc=0)
Calculate the crc16 ccitt checksum of some data A starting crc value may be specified if desired. The input data is expected to be a sequence of bytes (string) and the output is an integer in the range (0, 0xFFFF). No packing is done to the resultant crc value. To check the value a checksum, just pass in the data byes and checksum value. If the data matches the checksum, then the resultant checksum from this function should be 0.
4.225977
4.504866
0.938092
self._available_bytes += new_bytes callbacks = [] try: while True: packet = six.next(self._packet_generator) if packet is None: break else: callbacks.append(partial(self.packet_callback, packet)) except Exception: # When we receive an exception, we assume that the _available_bytes # has already been updated and we just choked on a field. That # is, unless the number of _available_bytes has not changed. In # that case, we reset the buffered entirely # TODO: black hole may not be the best. What should the logging # behavior be? self.reset() # callbacks are partials that are bound to packet already. We do # this in order to separate out parsing activity (and error handling) # from the execution of callbacks. Callbacks should not in any way # rely on the parsers position in the byte stream. for callback in callbacks: callback()
def feed(self, new_bytes)
Feed a new set of bytes into the protocol handler These bytes will be immediately fed into the parsing state machine and if new packets are found, the ``packet_callback`` will be executed with the fully-formed message. :param new_bytes: The new bytes to be fed into the stream protocol handler.
9.244908
8.850583
1.044554
crc_fields = [] greedy_field = None # go through the fields from first to last. If we hit a greedy # field, break out of the loop for i, (name, field) in enumerate(self.ordered_fields): if isinstance(field, CRCField): crc_fields.append((field, stream.tell())) length = field.bytes_required if field.is_substructure(): remaining_data = stream.getvalue()[stream.tell():] returned_stream = field.unpack(remaining_data, trailing=True) if returned_stream is not None: consumed = returned_stream.tell() else: consumed = 0 # We need to fast forward by as much as was consumed by the structure stream.seek(stream.tell() + consumed) continue elif length is None: if isinstance(field, FieldArray) and field.num_elements is not None: # Read the data greedily now, and we'll backtrack after enough elements have been read. data = stream.read() else: greedy_field = field break else: data = stream.read(length) if len(data) != length: raise SuitcaseParseError("While attempting to parse field " "%r we tried to read %s bytes but " "we were only able to read %s." % (name, length, len(data))) try: unused_data = field.unpack(data) stream.seek(-len(unused_data or ""), os.SEEK_CUR) except SuitcaseException: raise # just re-raise these except Exception: exc_type = SuitcaseParseError _, exc_value, exc_traceback = sys.exc_info() exc_value = exc_type("Unexpected exception while unpacking field %r: %s" % (name, str(exc_value))) six.reraise(exc_type, exc_value, exc_traceback) if greedy_field is not None: remaining_data = stream.read() inverted_stream = BytesIO(remaining_data[::-1]) # work through the remaining fields in reverse order in order # to narrow in on the right bytes for the greedy field reversed_remaining_fields = self.ordered_fields[(i + 1):][::-1] for _name, field in reversed_remaining_fields: if isinstance(field, CRCField): crc_fields.append( (field, -inverted_stream.tell() - field.bytes_required)) length = field.bytes_required data = inverted_stream.read(length)[::-1] if len(data) != length: raise SuitcaseParseError("While attempting to parse field " "%r we tried to read %s bytes but " "we were only able to read %s." % (_name, length, len(data))) try: field.unpack(data) except SuitcaseException: raise # just re-raise these except Exception: exc_type = SuitcaseParseError _, exc_value, exc_traceback = sys.exc_info() exc_value = exc_type("Unexpected exception while unpacking field %r: %s" % (name, str(exc_value))) six.reraise(exc_type, exc_value, exc_traceback) greedy_data_chunk = inverted_stream.read()[::-1] greedy_field.unpack(greedy_data_chunk) if crc_fields: data = stream.getvalue() for (crc_field, offset) in crc_fields: crc_field.validate(data, offset)
def unpack_stream(self, stream)
Unpack bytes from a stream of data field-by-field In the most basic case, the basic algorithm here is as follows:: for _name, field in self.ordered_fields: length = field.bytes_required data = stream.read(length) field.unpack(data) This logic is complicated somewhat by the handling of variable length greedy fields (there may only be one). The logic when we see a greedy field (bytes_required returns None) in the stream is to pivot and parse the remaining fields starting from the last and moving through the stream backwards. There is also some special logic present for dealing with checksum fields.
2.885664
2.668988
1.081183
value = super(URL, self).validate(instance, value) parsed_url = urlparse(value) if not parsed_url.scheme or not parsed_url.netloc: self.error(instance, value, extra='URL needs scheme and netloc.') parse_result = ParseResult( scheme=parsed_url.scheme, netloc=parsed_url.netloc, path=parsed_url.path, params='' if self.remove_parameters else parsed_url.params, query='' if self.remove_parameters else parsed_url.query, fragment='' if self.remove_fragment else parsed_url.fragment, ) parse_result = parse_result.geturl() return parse_result
def validate(self, instance, value)
Check if input is valid URL
2.255178
2.182803
1.033157
json_dict = super(Singleton, self).serialize( include_class=include_class, save_dynamic=save_dynamic, **kwargs ) json_dict['_singleton_id'] = self._singleton_id return json_dict
def serialize(self, include_class=True, save_dynamic=False, **kwargs)
Serialize Singleton instance to a dictionary. This behaves identically to HasProperties.serialize, except it also saves the identifying name in the dictionary as well.
2.710472
2.497156
1.085423
if not isinstance(value, dict): raise ValueError('HasProperties must deserialize from dictionary') identifier = value.pop('_singleton_id', value.get('name')) if identifier is None: raise ValueError('Singleton classes must contain identifying name') if identifier in cls._SINGLETONS: return cls._SINGLETONS[identifier] value = value.copy() name = value.get('name', None) value.update({'name': identifier}) newinst = super(Singleton, cls).deserialize( value, trusted=trusted, strict=strict, assert_valid=assert_valid, **kwargs ) if name: newinst.name = name return newinst
def deserialize(cls, value, trusted=False, strict=False, assert_valid=False, **kwargs)
Create a Singleton instance from a serialized dictionary. This behaves identically to HasProperties.deserialize, except if the singleton is already found in the singleton registry the existing value is used. .. note:: If property values differ from the existing singleton and the input dictionary, the new values from the input dictionary will be ignored
3.628622
3.408231
1.064664
for name in cls._mutators: #pylint: disable=protected-access if not hasattr(cls, name): continue setattr(cls, name, properties_mutator(cls, name)) for name in cls._operators: #pylint: disable=protected-access if not hasattr(cls, name): continue setattr(cls, name, properties_operator(cls, name)) for name in cls._ioperators: #pylint: disable=protected-access if not hasattr(cls, name): continue setattr(cls, name, properties_mutator(cls, name, True)) return cls
def add_properties_callbacks(cls)
Class decorator to add change notifications to builtin containers
2.374013
2.34528
1.012251
def wrapper(self, *args, **kwargs): if ( getattr(self, '_instance', None) is None or getattr(self, '_name', '') == '' or self is not getattr(self._instance, self._name) ): return getattr(super(cls, self), name)(*args, **kwargs) copy = cls(self) val = getattr(copy, name)(*args, **kwargs) if not ioper: setattr(self._instance, self._name, copy) self._instance = None self._name = '' return val wrapped = getattr(cls, name) wrapper.__name__ = wrapped.__name__ wrapper.__doc__ = wrapped.__doc__ return wrapper
def properties_mutator(cls, name, ioper=False)
Wraps a mutating container method to add HasProperties notifications If the container is not part of a HasProperties instance, behavior is unchanged. However, if it is part of a HasProperties instance the new method calls set, triggering change notifications.
3.006451
2.932717
1.025142
def wrapper(self, *args, **kwargs): output = getattr(super(cls, self), name)(*args, **kwargs) return cls(output) wrapped = getattr(cls, name) wrapper.__name__ = wrapped.__name__ wrapper.__doc__ = wrapped.__doc__ return wrapper
def properties_operator(cls, name)
Wraps a container operator to ensure container class is maintained
3.240168
2.866895
1.130201
container_class = value.__class__ if container_class in OBSERVABLE_REGISTRY: observable_class = OBSERVABLE_REGISTRY[container_class] elif container_class in OBSERVABLE_REGISTRY.values(): observable_class = container_class else: observable_class = add_properties_callbacks( type(container_class)( str('Observable{}'.format(container_class.__name__)), (container_class,), MUTATOR_CATEGORIES, ) ) OBSERVABLE_REGISTRY[container_class] = observable_class value = observable_class(value) value._name = name value._instance = instance return value
def observable_copy(value, name, instance)
Return an observable container for HasProperties notifications This method creates a new container class to allow HasProperties instances to :code:`observe_mutations`. It returns a copy of the input value as this new class. The output class behaves identically to the input value's original class, except when it is used as a property on a HasProperties instance. In that case, it notifies the HasProperties instance of any mutations or operations.
3.284345
3.014899
1.089372
if ( isinstance(value, CLASS_TYPES) and issubclass(value, HasProperties) ): value = Instance('', value) if not isinstance(value, basic.Property): raise TypeError('Contained prop must be a Property instance or ' 'HasProperties class') if value.default is not utils.undefined: warn('Contained prop default ignored: {}'.format(value.default), RuntimeWarning) return value
def validate_prop(value)
Validate Property instance for container items
6.980271
6.253002
1.116307
itext = self.class_info if self.prop.info: itext += ' (each item is {})'.format(self.prop.info) if self.max_length is None and self.min_length is None: return itext if self.max_length is None: lentext = 'length >= {}'.format(self.min_length) elif self.max_length == self.min_length: lentext = 'length of {}'.format(self.min_length) else: lentext = 'length between {mn} and {mx}'.format( mn='0' if self.min_length is None else self.min_length, mx=self.max_length, ) return '{} with {}'.format(itext, lentext)
def info(self)
Supplemental description of the list, with length and type
2.892304
2.761474
1.047377
if not self.coerce and not isinstance(value, self._class_container): self.error(instance, value) if self.coerce and not isinstance(value, CONTAINERS): value = [value] if not isinstance(value, self._class_container): out_class = self._class_container else: out_class = value.__class__ out = [] for val in value: try: out += [self.prop.validate(instance, val)] except ValueError: self.error(instance, val, extra='This item is invalid.') return out_class(out)
def validate(self, instance, value)
Check the class of the container and validate each element This returns a copy of the container to prevent unwanted sharing of pointers.
3.509772
3.356172
1.045766
valid = super(Tuple, self).assert_valid(instance, value) if not valid: return False if value is None: value = instance._get(self.name) if value is None: return True if ( (self.min_length is not None and len(value) < self.min_length) or (self.max_length is not None and len(value) > self.max_length) ): self.error( instance=instance, value=value, extra='(Length is {})'.format(len(value)), ) for val in value: if not self.prop.assert_valid(instance, val): return False return True
def assert_valid(self, instance, value=None)
Check if tuple and contained properties are valid
2.507269
2.286807
1.096406
kwargs.update({'include_class': kwargs.get('include_class', True)}) if self.serializer is not None: return self.serializer(value, **kwargs) if value is None: return None serial_list = [self.prop.serialize(val, **kwargs) for val in value] return serial_list
def serialize(self, value, **kwargs)
Return a serialized copy of the tuple
3.551071
3.375387
1.052049
kwargs.update({'trusted': kwargs.get('trusted', False)}) if self.deserializer is not None: return self.deserializer(value, **kwargs) if value is None: return None output_list = [self.prop.deserialize(val, **kwargs) for val in value] return self._class_container(output_list)
def deserialize(self, value, **kwargs)
Return a deserialized copy of the tuple
4.152282
4.155841
0.999144
serial_list = [ val.serialize(**kwargs) if isinstance(val, HasProperties) else val for val in value ] return serial_list
def to_json(value, **kwargs)
Return a copy of the tuple as a list If the tuple contains HasProperties instances, they are serialized.
7.401935
4.551094
1.626408
classdoc = self.prop.sphinx_class().replace( ':class:`', '{info} of :class:`' ) return classdoc.format(info=self.class_info)
def sphinx_class(self)
Redefine sphinx class to point to prop class
11.099962
10.181638
1.090194
itext = self.class_info if self.key_prop.info and self.value_prop.info: itext += ' (keys: {}; values: {})'.format( self.key_prop.info, self.value_prop.info ) elif self.key_prop.info: itext += ' (keys: {})'.format(self.key_prop.info) elif self.value_prop.info: itext += ' (values: {})'.format(self.value_prop.info) return itext
def info(self)
Supplemental description of the list, with length and type
2.373452
2.312552
1.026335
valid = super(Dictionary, self).assert_valid(instance, value) if not valid: return False if value is None: value = instance._get(self.name) if value is None: return True if self.key_prop or self.value_prop: for key, val in iteritems(value): if self.key_prop: self.key_prop.assert_valid(instance, key) if self.value_prop: self.value_prop.assert_valid(instance, val) return True
def assert_valid(self, instance, value=None)
Check if dict and contained properties are valid
2.256915
2.104882
1.072229
kwargs.update({'include_class': kwargs.get('include_class', True)}) if self.serializer is not None: return self.serializer(value, **kwargs) if value is None: return None serial_tuples = [ ( self.key_prop.serialize(key, **kwargs), self.value_prop.serialize(val, **kwargs) ) for key, val in iteritems(value) ] try: serial_dict = {key: val for key, val in serial_tuples} except TypeError as err: raise TypeError('Dictionary property {} cannot be serialized - ' 'keys contain {}'.format(self.name, err)) return serial_dict
def serialize(self, value, **kwargs)
Return a serialized copy of the dict
3.438232
3.211594
1.070569
kwargs.update({'trusted': kwargs.get('trusted', False)}) if self.deserializer is not None: return self.deserializer(value, **kwargs) if value is None: return None output_tuples = [ ( self.key_prop.deserialize(key, **kwargs), self.value_prop.deserialize(val, **kwargs) ) for key, val in iteritems(value) ] try: output_dict = {key: val for key, val in output_tuples} except TypeError as err: raise TypeError('Dictionary property {} cannot be deserialized - ' 'keys contain {}'.format(self.name, err)) return self._class_container(output_dict)
def deserialize(self, value, **kwargs)
Return a deserialized copy of the dict
3.783292
3.650862
1.036274
serial_dict = { key: ( val.serialize(**kwargs) if isinstance(val, HasProperties) else val ) for key, val in iteritems(value) } return serial_dict
def to_json(value, **kwargs)
Return a copy of the dictionary If the values are HasProperties instances, they are serialized
5.095697
3.80192
1.340296
props_dict = { k: v for k, v in iter(input_dict.items()) if ( k in has_props_cls._props and ( include_immutable or any( hasattr(has_props_cls._props[k], att) for att in ('required', 'new_name') ) ) ) } others_dict = {k: v for k, v in iter(input_dict.items()) if k not in props_dict} return (props_dict, others_dict)
def filter_props(has_props_cls, input_dict, include_immutable=True)
Split a dictionary based keys that correspond to Properties Returns: **(props_dict, others_dict)** - Tuple of two dictionaries. The first contains key/value pairs from the input dictionary that correspond to the Properties of the input HasProperties class. The second contains the remaining key/value pairs. **Parameters**: * **has_props_cls** - HasProperties class or instance used to filter the dictionary * **input_dict** - Dictionary to filter * **include_immutable** - If True (the default), immutable properties (i.e. Properties that inherit from GettableProperty but not Property) are included in props_dict. If False, immutable properties are excluded from props_dict. For example .. code:: class Profile(properties.HasProperties): name = properties.String('First and last name') age = properties.Integer('Age, years') bio_dict = { 'name': 'Bill', 'age': 65, 'hometown': 'Bakersfield', 'email': 'bill@gmail.com', } (props, others) = properties.filter_props(Profile, bio_dict) assert set(props) == {'name', 'age'} assert set(others) == {'hometown', 'email'}
2.813226
2.977041
0.944974
prop_def = getattr(self, '_default', utils.undefined) for prop in self.props: if prop.default is utils.undefined: continue if prop_def is utils.undefined: prop_def = prop.default break return prop_def
def default(self)
Default value of the property
3.915759
3.733598
1.04879
error_messages = [] for prop in self.props: try: return getattr(prop, method_name)(instance, value) except GENERIC_ERRORS as err: if hasattr(err, 'error_tuples'): error_messages += [ err_tup.message for err_tup in err.error_tuples ] if error_messages: extra = 'Possible explanation:' for message in error_messages: extra += '\n - {}'.format(message) else: extra = '' self.error(instance, value, extra=extra)
def _try_prop_method(self, instance, value, method_name)
Helper method to perform a method on each of the union props This method gathers all errors and returns them at the end if the method on each of the props fails.
3.85114
3.654415
1.053832
valid = super(Union, self).assert_valid(instance, value) if not valid: return False if value is None: value = instance._get(self.name) if value is None: return True return self._try_prop_method(instance, value, 'assert_valid')
def assert_valid(self, instance, value=None)
Check if the Union has a valid value
3.995696
3.525724
1.133298
kwargs.update({'include_class': kwargs.get('include_class', True)}) if self.serializer is not None: return self.serializer(value, **kwargs) if value is None: return None for prop in self.props: try: prop.validate(None, value) except GENERIC_ERRORS: continue return prop.serialize(value, **kwargs) return self.to_json(value, **kwargs)
def serialize(self, value, **kwargs)
Return a serialized value If no serializer is provided, it uses the serialize method of the prop corresponding to the value
3.470629
3.542449
0.979726
kwargs.update({'trusted': kwargs.get('trusted', False)}) if self.deserializer is not None: return self.deserializer(value, **kwargs) if value is None: return None instance_props = [ prop for prop in self.props if isinstance(prop, Instance) ] kwargs = kwargs.copy() kwargs.update({ 'strict': kwargs.get('strict') or self.strict_instances, 'assert_valid': self.strict_instances, }) if isinstance(value, dict) and value.get('__class__'): clsname = value.get('__class__') for prop in instance_props: if clsname == prop.instance_class.__name__: return prop.deserialize(value, **kwargs) for prop in self.props: try: out_val = prop.deserialize(value, **kwargs) prop.validate(None, out_val) return out_val except GENERIC_ERRORS: continue return self.from_json(value, **kwargs)
def deserialize(self, value, **kwargs)
Return a deserialized value If no deserializer is provided, it uses the deserialize method of the prop corresponding to the value
3.122062
3.073422
1.015826
if isinstance(value, HasProperties): return value.serialize(**kwargs) return value
def to_json(value, **kwargs)
Return value, serialized if value is a HasProperties instance
8.919294
3.765685
2.368571
def wrapped(val, **kwargs): try: return func(val, **kwargs) except TypeError: return func(val) return wrapped
def accept_kwargs(func)
Wrap a function that may not accept kwargs so they are accepted The output function will always have call signature of :code:`func(val, **kwargs)`, whereas the original function may have call signatures of :code:`func(val)` or :code:`func(val, **kwargs)`. In the case of the former, rather than erroring, kwargs are just ignored. This method is called on serializer/deserializer function; these functions always receive kwargs from serialize, but by using this, the original functions may simply take a single value.
3.688937
4.344003
0.849202
if ( (prop.min is not None and value < prop.min) or (prop.max is not None and value > prop.max) ): prop.error(instance, value, extra='Not within allowed range.')
def _in_bounds(prop, instance, value)
Checks if the value is in the range (min, max)
3.79356
3.288867
1.153455
terms = PropertyTerms( self.name, self.__class__, self._args, self._kwargs, self.meta ) return terms
def terms(self)
Initialization terms and options for Property
8.98317
6.092323
1.474506
if not tag: pass elif len(tag) == 1 and isinstance(tag[0], dict): self._meta.update(tag[0]) else: raise TypeError('Tags must be provided as key-word arguments or ' 'a dictionary') self._meta.update(kwtags) return self
def tag(self, *tag, **kwtags)
Tag a Property instance with metadata dictionary
3.481207
3.111923
1.118667
if value is None: value = instance._get(self.name) if ( value is not None and not self.equal(value, self.validate(instance, value)) ): message = 'Invalid value for property: {}: {}'.format( self.name, value ) raise ValidationError(message, 'invalid', self.name, instance) return True
def assert_valid(self, instance, value=None)
Returns True if the Property is valid on a HasProperties instance Raises a ValueError if the value is invalid.
3.736288
3.627931
1.029868
return all(equal) return equal
def equal(self, value_a, value_b): #pylint: disable=no-self-use equal = value_a == value_b if hasattr(equal, '__iter__')
Check if two valid Property values are equal .. note:: This method assumes that :code:`None` and :code:`properties.undefined` are never passed in as values
14.475471
140.182343
0.103262
scope = self def fget(self): return self._get(scope.name) return property(fget=fget, doc=scope.sphinx())
def get_property(self)
Establishes access of GettableProperty values
10.256699
8.740591
1.173456
return self.deserializer(value, **kwargs) if value is None: return None return self.from_json(value, **kwargs)
def deserialize(self, value, **kwargs): #pylint: disable=unused-argument kwargs.update({'trusted': kwargs.get('trusted', False)}) if self.deserializer is not None
Deserialize input value to valid Property value This method uses the Property :code:`deserializer` if available. Otherwise, it uses :code:`from_json`. Any keyword arguments are passed through to these methods.
3.904754
3.300376
1.183124
error_class = error_class or ValidationError prefix = 'The {} property'.format(self.__class__.__name__) if self.name != '': prefix = prefix + " '{}'".format(self.name) if instance is not None: prefix = prefix + ' of a {cls} instance'.format( cls=instance.__class__.__name__, ) print_value = repr(value) if len(print_value) > 107: print_value = '{} ... {}'.format( print_value[:50], print_value[-50:] ) message = ( '{prefix} must be {info}. An invalid value of {val} {vtype} was ' 'specified.{extra}'.format( prefix=prefix, info=self.info or 'corrected', val=print_value, vtype=type(value), extra=' {}'.format(extra) if extra else '', ) ) if issubclass(error_class, ValidationError): raise error_class(message, 'invalid', self.name, instance) raise error_class(message)
def error(self, instance, value, error_class=None, extra='')
Generate a :code:`ValueError` for invalid value assignment The instance is the containing HasProperties instance, but it may be None if the error is raised outside a HasProperties class.
3.30059
3.265547
1.010731
try: assert __IPYTHON__ classdoc = '' except (NameError, AssertionError): scls = self.sphinx_class() classdoc = ' ({})'.format(scls) if scls else '' prop_doc = '**{name}**{cls}: {doc}{info}'.format( name=self.name, cls=classdoc, doc=self.doc, info=', {}'.format(self.info) if self.info else '', ) return prop_doc
def sphinx(self)
Generate Sphinx-formatted documentation for the Property
4.261243
4.218251
1.010192
classdoc = ':class:`{cls} <{pref}.{cls}>`' if self.__module__.split('.')[0] == 'properties': pref = 'properties' else: pref = text_type(self.__module__) return classdoc.format(cls=self.__class__.__name__, pref=pref)
def sphinx_class(self)
Property class name formatted for Sphinx doc linking
5.25286
4.452349
1.179795
if not callable(func): raise TypeError('setter must be callable function') if hasattr(func, '__code__') and func.__code__.co_argcount != 2: raise TypeError('setter must be a function with two arguments') if func.__name__ != self.name: raise TypeError('setter function must have same name as getter') self._set_func = func return self
def setter(self, func)
Register a set function for the DynamicProperty This function must take two arguments, self and the new value. Input value to the function is validated with prop validation prior to execution.
2.776761
2.954503
0.93984
if not callable(func): raise TypeError('deleter must be callable function') if hasattr(func, '__code__') and func.__code__.co_argcount != 1: raise TypeError('deleter must be a function with two arguments') if func.__name__ != self.name: raise TypeError('deleter function must have same name as getter') self._del_func = func return self
def deleter(self, func)
Register a delete function for the DynamicProperty This function may only take one argument, self.
3.116201
3.169741
0.983109
scope = self def fget(self): value = scope.func(self) if value is None or value is undefined: return None return scope.validate(self, value) def fset(self, value): if scope.set_func is None: raise AttributeError('cannot set attribute') scope.set_func(self, scope.validate(self, value)) def fdel(self): if scope.del_func is None: raise AttributeError('cannot delete attribute') scope.del_func(self) return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())
def get_property(self)
Establishes the dynamic behavior of Property values
2.704075
2.529016
1.06922
if value is None: value = instance._get(self.name) if value is None and self.required: message = ( "The '{name}' property of a {cls} instance is required " "and has not been set.".format( name=self.name, cls=instance.__class__.__name__ ) ) raise ValidationError(message, 'missing', self.name, instance) valid = super(Property, self).assert_valid(instance, value) return valid
def assert_valid(self, instance, value=None)
Returns True if the Property is valid on a HasProperties instance Raises a ValueError if the value required and not set, not valid, not correctly coerced, etc. .. note:: Unlike :code:`validate`, this method requires instance to be a HasProperties instance; it cannot be None.
2.830689
2.818432
1.004349
scope = self def fget(self): return self._get(scope.name) def fset(self, value): if value is not undefined: value = scope.validate(self, value) self._set(scope.name, value) def fdel(self): self._set(scope.name, undefined) return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())
def get_property(self)
Establishes access of Property values
3.269409
3.037071
1.076501
if callable(self.default): default_val = self.default() default_str = 'new instance of {}'.format( default_val.__class__.__name__ ) else: default_val = self.default default_str = '{}'.format(self.default) try: if default_val is None or default_val is undefined: default_str = '' elif len(default_val) == 0: #pylint: disable=len-as-condition default_str = '' else: default_str = ', Default: {}'.format(default_str) except TypeError: default_str = ', Default: {}'.format(default_str) prop_doc = super(Property, self).sphinx() return '{doc}{default}'.format(doc=prop_doc, default=default_str)
def sphinx(self)
Basic docstring formatted for Sphinx docs
2.831217
2.723477
1.039559
if self.cast: value = bool(value) if not isinstance(value, BOOLEAN_TYPES): self.error(instance, value) return value
def validate(self, instance, value)
Checks if value is a boolean
5.060241
3.552391
1.424461
if isinstance(value, string_types): value = value.upper() if value in ('TRUE', 'Y', 'YES', 'ON'): return True if value in ('FALSE', 'N', 'NO', 'OFF'): return False if isinstance(value, int): return value raise ValueError('Could not load boolean from JSON: {}'.format(value))
def from_json(value, **kwargs)
Coerces JSON string to boolean
2.644061
2.484102
1.064393
try: intval = int(value) if not self.cast and abs(value - intval) > TOL: self.error( instance=instance, value=value, extra='Not within tolerance range of {}.'.format(TOL), ) except (TypeError, ValueError): self.error(instance, value, extra='Cannot cast to integer.') _in_bounds(self, instance, intval) return intval
def validate(self, instance, value)
Checks that value is an integer and in min/max bounds
5.605909
5.414555
1.035341
try: floatval = float(value) if not self.cast and abs(value - floatval) > TOL: self.error( instance=instance, value=value, extra='Not within tolerance range of {}.'.format(TOL), ) except (TypeError, ValueError): self.error(instance, value, extra='Cannot cast to float.') _in_bounds(self, instance, floatval) return floatval
def validate(self, instance, value)
Checks that value is a float and in min/max bounds Non-float numbers are coerced to floats
4.861944
4.853462
1.001748
try: compval = complex(value) if not self.cast and ( abs(value.real - compval.real) > TOL or abs(value.imag - compval.imag) > TOL ): self.error( instance=instance, value=value, extra='Not within tolerance range of {}.'.format(TOL), ) except (TypeError, ValueError, AttributeError): self.error(instance, value) return compval
def validate(self, instance, value)
Checks that value is a complex number Floats and Integers are coerced to complex numbers
4.16721
3.937744
1.058273
value_type = type(value) if not isinstance(value, string_types): self.error(instance, value) if self.regex is not None and self.regex.search(value) is None: #pylint: disable=no-member self.error(instance, value, extra='Regex does not match.') value = value.strip(self.strip) if self.change_case == 'upper': value = value.upper() elif self.change_case == 'lower': value = value.lower() if self.unicode: value = text_type(value) else: value = value_type(value) return value
def validate(self, instance, value)
Check if value is a string, and strips it and changes case
2.764745
2.55675
1.081351
if self.descriptions is None: choice_list = ['"{}"'.format(choice) for choice in self.choices] else: choice_list = [ '"{}" ({})'.format(choice, self.descriptions[choice]) for choice in self.choices ] if len(self.choices) == 2: return 'either {} or {}'.format(choice_list[0], choice_list[1]) return 'any of {}'.format(', '.join(choice_list))
def info(self)
Formatted string to display the available choices
2.647371
2.358539
1.122462
self.error(instance, value) for key, val in self.choices.items(): test_value = value if self.case_sensitive else value.upper() test_key = key if self.case_sensitive else key.upper() test_val = val if self.case_sensitive else [_.upper() for _ in val] if test_value == test_key or test_value in test_val: return key self.error(instance, value, extra='Not an available choice.')
def validate(self, instance, value): #pylint: disable=inconsistent-return-statements if not isinstance(value, string_types)
Check if input is a valid string based on the choices
3.201343
2.9305
1.092422
if isinstance(value, string_types): value = COLORS_NAMED.get(value, value) if value.upper() == 'RANDOM': value = random.choice(COLORS_20) value = value.upper().lstrip('#') if len(value) == 3: value = ''.join(v*2 for v in value) if len(value) != 6: self.error(instance, value, extra='Color must be known name ' 'or a hex with 6 digits. e.g. "#FF0000"') try: value = [ int(value[i:i + 6 // 3], 16) for i in range(0, 6, 6 // 3) ] except ValueError: self.error(instance, value, extra='Hex color must be base 16 (0-F)') if not isinstance(value, (list, tuple)): self.error(instance, value, extra='Color must be a list or tuple of length 3') if len(value) != 3: self.error(instance, value, extra='Color must be length 3') for val in value: if not isinstance(val, integer_types) or not 0 <= val <= 255: self.error(instance, value, extra='Color values must be ints 0-255.') return tuple(value)
def validate(self, instance, value)
Check if input is valid color and converts to RGB
2.539015
2.434873
1.042771
if isinstance(value, datetime.datetime): return value if not isinstance(value, string_types): self.error( instance=instance, value=value, extra='Cannot convert non-strings to datetime.', ) try: return self.from_json(value) except ValueError: self.error( instance=instance, value=value, extra='Invalid format for converting to datetime.', )
def validate(self, instance, value)
Check if value is a valid datetime object or JSON datetime string
3.174809
2.853313
1.112675
if not isinstance(value, uuid.UUID): self.error(instance, value) return value
def validate(self, instance, value)
Check that value is a valid UUID instance
4.522349
2.892602
1.563419
default_mode = (self.mode,) if self.mode is not None else None return getattr(self, '_valid_mode', default_mode)
def valid_modes(self)
Valid modes of an open file
5.697187
5.70002
0.999503
prop = super(File, self).get_property() # scope is the Property instance scope = self def fdel(self): if self._get(scope.name) is not None: self._get(scope.name).close() self._set(scope.name, undefined) new_prop = property(fget=prop.fget, fset=prop.fset, fdel=fdel, doc=scope.sphinx()) return new_prop
def get_property(self)
Establishes access of Property values
5.941183
5.73824
1.035367
if isinstance(value, string_types) and self.mode is not None: try: value = open(value, self.mode) except (IOError, TypeError): self.error(instance, value, extra='Cannot open file: {}'.format(value)) if not all([hasattr(value, attr) for attr in ('read', 'seek')]): self.error(instance, value, extra='Not a file-like object') if not hasattr(value, 'mode') or self.valid_modes is None: pass elif value.mode not in self.valid_modes: self.error(instance, value, extra='Invalid mode: {}'.format(value.mode)) if getattr(value, 'closed', False): self.error(instance, value, extra='File is closed.') return value
def validate(self, instance, value)
Checks that the value is a valid file open in the correct mode If value is a string, it attempts to open it with the given mode.
2.601071
2.323742
1.119346
if self.warn: warnings.warn( "\nProperty '{}' is deprecated and may be removed in the " "future. Please use '{}'.".format(self.name, self.new_name), FutureWarning, stacklevel=3 )
def display_warning(self)
Display a FutureWarning about using a Renamed Property
4.453189
3.504948
1.270543
scope = self def fget(self): scope.display_warning() return getattr(self, scope.new_name) def fset(self, value): scope.display_warning() setattr(self, scope.new_name, value) def fdel(self): scope.display_warning() delattr(self, scope.new_name) return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())
def get_property(self)
Establishes the dynamic behavior of Property values
2.800702
2.616706
1.070316
try: if isinstance(value, self.instance_class): return value if isinstance(value, dict): return self.instance_class(**value) return self.instance_class(value) except GENERIC_ERRORS as err: if hasattr(err, 'error_tuples'): extra = '({})'.format(' & '.join( err_tup.message for err_tup in err.error_tuples )) else: extra = '' self.error(instance, value, extra=extra)
def validate(self, instance, value)
Check if value is valid type of instance_class If value is an instance of instance_class, it is returned unmodified. If value is either (1) a keyword dictionary with valid parameters to construct an instance of instance_class or (2) a valid input argument to construct instance_class, then a new instance is created and returned.
3.602149
3.327232
1.082626
valid = super(Instance, self).assert_valid(instance, value) if not valid: return False if value is None: value = instance._get(self.name) if isinstance(value, HasProperties): value.validate() return True
def assert_valid(self, instance, value=None)
Checks if valid, including HasProperty instances pass validation
3.837647
3.063793
1.25258
kwargs.update({'include_class': kwargs.get('include_class', True)}) if self.serializer is not None: return self.serializer(value, **kwargs) if value is None: return None if isinstance(value, HasProperties): return value.serialize(**kwargs) return self.to_json(value, **kwargs)
def serialize(self, value, **kwargs)
Serialize instance to JSON If the value is a HasProperties instance, it is serialized with the include_class argument passed along. Otherwise, to_json is called.
3.132258
2.398305
1.30603
if isinstance(value, HasProperties): return value.serialize(**kwargs) try: return json.loads(json.dumps(value)) except TypeError: raise TypeError( "Cannot convert type {} to JSON without calling 'serialize' " "on an instance of Instance Property and registering a custom " "serializer".format(value.__class__.__name__) )
def to_json(value, **kwargs)
Convert instance to JSON
5.847172
5.619497
1.040515
classdoc = ':class:`{cls} <{pref}.{cls}>`'.format( cls=self.instance_class.__name__, pref=self.instance_class.__module__, ) return classdoc
def sphinx_class(self)
Redefine sphinx class so documentation links to instance_class
5.330386
4.574029
1.165359
change_only = kwargs.get('change_only', True) observer(instance, prop, callback, change_only=change_only)
def properties_observer(instance, prop, callback, **kwargs)
Adds properties callback handler
4.130173
4.114298
1.003858
if getattr(self, '_unlinked', False): return if getattr(self, '_updating', False): return self._updating = True try: setattr(self.target[0], self.target[1], self.transform( getattr(self.source[0], self.source[1]) )) finally: self._updating = False
def _update(self, *_)
Set target value to source value
3.142531
2.772813
1.133337