code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
super(FlaskRestyPlugin, self).path_helper( path=path, view=view, **kwargs ) resource = self.get_state().views[view] rule = self._rules[resource.rule] operations = defaultdict(Operation) view_instance = view() view_instance.spec_declaration(view, operations, self) # add path arguments parameters = [] for arg in rule.arguments: parameters.append({ 'name': arg, 'in': 'path', 'required': True, 'type': 'string', }) if parameters: operations['parameters'] = parameters path.path = FlaskPlugin.flaskpath2openapi(resource.rule) path.operations = dict(**operations)
def path_helper(self, path, view, **kwargs)
Path helper for Flask-RESTy views. :param view: An `ApiView` object.
5.467659
5.680279
0.962569
if axis == "wght": # 600.0 => 600, 250 => 250 return int(ufo_class) elif axis == "wdth": return WIDTH_CLASS_TO_VALUE[int(ufo_class)] raise NotImplementedError
def class_to_value(axis, ufo_class)
>>> class_to_value('wdth', 7) 125
4.095663
4.125328
0.992809
if axis_tag == "wght": try: value = _nospace_lookup(WEIGHT_CODES, user_loc) except KeyError: return None return class_to_value("wght", value) elif axis_tag == "wdth": try: value = _nospace_lookup(WIDTH_CODES, user_loc) except KeyError: return None return class_to_value("wdth", value) # Currently this function should only be called with a width or weight raise NotImplementedError
def user_loc_string_to_value(axis_tag, user_loc)
Go from Glyphs UI strings to user space location. Returns None if the string is invalid. >>> user_loc_string_to_value('wght', 'ExtraLight') 200 >>> user_loc_string_to_value('wdth', 'SemiCondensed') 87.5 >>> user_loc_string_to_value('wdth', 'Clearly Not From Glyphs UI')
3.853342
4.166656
0.924804
if axis_tag == "wght": return int(user_loc) elif axis_tag == "wdth": return min( sorted(WIDTH_CLASS_TO_VALUE.items()), key=lambda item: abs(item[1] - user_loc), )[0] raise NotImplementedError
def user_loc_value_to_class(axis_tag, user_loc)
Return the OS/2 weight or width class that is closest to the provided user location. For weight the user location is between 0 and 1000 and for width it is a percentage. >>> user_loc_value_to_class('wght', 310) 310 >>> user_loc_value_to_class('wdth', 62) 2
4.244338
4.072358
1.042231
codes = {} if axis_tag == "wght": codes = WEIGHT_CODES elif axis_tag == "wdth": codes = WIDTH_CODES else: raise NotImplementedError class_ = user_loc_value_to_class(axis_tag, user_loc) return min( sorted((code, class_) for code, class_ in codes.items() if code is not None), key=lambda item: abs(item[1] - class_), )[0]
def user_loc_value_to_instance_string(axis_tag, user_loc)
Return the Glyphs UI string (from the instance dropdown) that is closest to the provided user location. >>> user_loc_value_to_instance_string('wght', 430) 'Normal' >>> user_loc_value_to_instance_string('wdth', 150) 'Extra Expanded'
3.486826
3.072795
1.134741
if not font.masters: return None regular_name = font.customParameters["Variation Font Origin"] if regular_name is not None: for master in font.masters: if master.name == regular_name: return master base_style = find_base_style(font.masters) if not base_style: base_style = "Regular" for master in font.masters: if master.name == base_style: return master # Second try: maybe the base style has regular in it as well for master in font.masters: name_without_regular = " ".join( n for n in master.name.split(" ") if n != "Regular" ) if name_without_regular == base_style: return master return font.masters[0]
def get_regular_master(font)
Find the "regular" master among the GSFontMasters. Tries to find the master with the passed 'regularName'. If there is no such master or if regularName is None, tries to find a base style shared between all masters (defaulting to "Regular"), and then tries to find a master with that style name. If there is no master with that name, returns the first master in the list.
2.901001
2.671683
1.085833
if not masters: return "" base_style = (masters[0].name or "").split() for master in masters: style = master.name.split() base_style = [s for s in style if s in base_style] base_style = " ".join(base_style) return base_style
def find_base_style(masters)
Find a base style shared between all masters. Return empty string if none is found.
2.512999
2.397667
1.048102
mapping = sorted(mapping) if len(mapping) == 1: xa, ya = mapping[0] if xa == x: return ya return x for (xa, ya), (xb, yb) in zip(mapping[:-1], mapping[1:]): if xa <= x <= xb: return ya + float(x - xa) / (xb - xa) * (yb - ya) return x
def interp(mapping, x)
Compute the piecewise linear interpolation given by mapping for input x. >>> interp(((1, 1), (2, 4)), 1.5) 2.5
2.28175
2.497909
0.913464
user_loc = self.default_user_loc if self.tag != "wght": # The user location is by default the same as the design location. user_loc = self.get_design_loc(master_or_instance) # Try to guess the user location by looking at the OS/2 weightClass # and widthClass. If a weightClass is found, it translates directly # to a user location in 0..1000. If a widthClass is found, it # translate to a percentage of extension according to the spec, see # the mapping named `WIDTH_CLASS_TO_VALUE` at the top. if self.user_loc_key is not None and hasattr( master_or_instance, self.user_loc_key ): # Instances have special ways to specify a user location. # Only weight and with have a custom user location via a key. # The `user_loc_key` gives a "location code" = Glyphs UI string user_loc_str = getattr(master_or_instance, self.user_loc_key) new_user_loc = user_loc_string_to_value(self.tag, user_loc_str) if new_user_loc is not None: user_loc = new_user_loc # The custom param takes over the key if it exists # e.g. for weight: # key = "weight" -> "Bold" -> 700 # but param = "weightClass" -> 600 => 600 wins if self.user_loc_param is not None: class_ = master_or_instance.customParameters[self.user_loc_param] if class_ is not None: user_loc = class_to_value(self.tag, class_) # Masters have a customParameter that specifies a user location # along custom axes. If this is present it takes precedence over # everything else. loc_param = master_or_instance.customParameters["Axis Location"] try: for location in loc_param: if location.get("Axis") == self.name: user_loc = int(location["Location"]) except (TypeError, KeyError): pass return user_loc
def get_user_loc(self, master_or_instance)
Get the user location of a Glyphs master or instance. Masters in Glyphs can have a user location in the "Axis Location" custom parameter. The user location is what the user sees on the slider in his variable-font-enabled UI. For weight it is a value between 0 and 1000, 400 being Regular and 700 Bold. For width it's a percentage of extension with respect to the normal width, 100 being normal, 200 Ultra-expanded = twice as wide. It may or may not match the design location.
5.510863
4.820515
1.14321
if hasattr(master_or_instance, "instanceInterpolations"): # The following code is only valid for instances. # Masters also the keys `weight` and `width` but they should not be # used, they are deprecated and should only be used to store # (parts of) the master's name, but not its location. # Try to set the key if possible, i.e. if there is a key, and # if there exists a code that can represent the given value, e.g. # for "weight": 600 can be represented by SemiBold so we use that, # but for 550 there is no code so we will have to set the custom # parameter as well. if self.user_loc_key is not None and hasattr( master_or_instance, self.user_loc_key ): code = user_loc_value_to_instance_string(self.tag, value) value_for_code = user_loc_string_to_value(self.tag, code) setattr(master_or_instance, self.user_loc_key, code) if self.user_loc_param is not None and value != value_for_code: try: class_ = user_loc_value_to_class(self.tag, value) master_or_instance.customParameters[ self.user_loc_param ] = class_ except NotImplementedError: # user_loc_value_to_class only works for weight & width pass return # For masters, set directly the custom parameter (old way) # and also the Axis Location (new way). # Only masters can have an 'Axis Location' parameter. if self.user_loc_param is not None: try: class_ = user_loc_value_to_class(self.tag, value) master_or_instance.customParameters[self.user_loc_param] = class_ except NotImplementedError: pass loc_param = master_or_instance.customParameters["Axis Location"] if loc_param is None: loc_param = [] master_or_instance.customParameters["Axis Location"] = loc_param location = None for loc in loc_param: if loc.get("Axis") == self.name: location = loc if location is None: loc_param.append({"Axis": self.name, "Location": value}) else: location["Location"] = value
def set_user_loc(self, master_or_instance, value)
Set the user location of a Glyphs master or instance.
4.060067
3.913574
1.037432
kwargs.setdefault('in', location) if kwargs['in'] != 'body': kwargs.setdefault('type', 'string') self['parameters'].append(kwargs)
def add_parameter(self, location='query', **kwargs)
Adds a new parameter to the request :param location: the 'in' field of the parameter (e.g: 'query', 'body', 'path')
4.161942
4.0986
1.015455
self['responses'] \ .setdefault(str(code), self._new_operation()) \ .setdefault('schema', {'type': 'object'}) \ .setdefault('properties', {}) \ .setdefault(prop_name, {}) \ .update(**kwargs)
def add_property_to_response(self, code='200', prop_name='data', **kwargs)
Add a property (http://json-schema.org/latest/json-schema-validation.html#anchor64) # noqa: E501 to the schema of the response identified by the code
3.73337
4.078497
0.915379
# noqa: E501 self['responses'][str(code)] = self._new_operation(**kwargs)
def declare_response(self, code='200', **kwargs)
Declare a response for the specified code https://github.com/swagger-api/swagger-spec/blob/master/versions/2.0.md#responseObject
8.058552
7.238389
1.113307
value = super(SanitizedHTML, self)._deserialize(value, attr, data) return bleach.clean( value, tags=self.tags, attributes=self.attrs, strip=True, ).strip()
def _deserialize(self, value, attr, data)
Deserialize string by sanitizing HTML.
3.218501
2.434012
1.322303
pid_types = set() guessed = set() endpoint_prefixes = {} for key, endpoint in records_rest_endpoints.items(): pid_type = endpoint['pid_type'] pid_types.add(pid_type) is_guessed = key == pid_type is_default = endpoint.get('default_endpoint_prefix', False) if is_default: if pid_type in endpoint_prefixes and pid_type not in guessed: raise ValueError('More than one "{0}" defined.'.format( pid_type )) endpoint_prefixes[pid_type] = key guessed -= {pid_type} elif is_guessed and pid_type not in endpoint_prefixes: endpoint_prefixes[pid_type] = key guessed |= {pid_type} not_found = pid_types - set(endpoint_prefixes.keys()) if not_found: raise ValueError('No endpoint-prefix for {0}.'.format( ', '.join(not_found) )) return endpoint_prefixes
def build_default_endpoint_prefixes(records_rest_endpoints)
Build the default_endpoint_prefixes map.
2.652871
2.534551
1.046683
if isinstance(value, six.string_types): return import_string(value) elif value: return value return default
def obj_or_import_string(value, default=None)
Import string or return object. :params value: Import path or class object to instantiate. :params default: Default object to return if the import fails. :returns: The imported object.
2.787763
3.968532
0.702467
app = app or current_app imp = app.config.get(key) return obj_or_import_string(imp, default=default)
def load_or_import_from_config(key, app=None, default=None)
Load or import value from config. :returns: The loaded value.
4.068351
7.474287
0.544313
def can(self): search = request._methodview.search_class() search = search.get_record(str(record.id)) return search.count() == 1 return type('CheckES', (), {'can': can})()
def check_elasticsearch(record, *args, **kwargs)
Return permission that check if the record exists in ES index. :params record: A record object. :returns: A object instance with a ``can()`` method.
12.861825
9.611732
1.338138
try: return self.resolver.resolve(self.value) except PIDDoesNotExistError as pid_error: raise PIDDoesNotExistRESTError(pid_error=pid_error) except PIDUnregistered as pid_error: raise PIDUnregisteredRESTError(pid_error=pid_error) except PIDDeletedError as pid_error: raise PIDDeletedRESTError(pid_error=pid_error) except PIDMissingObjectError as pid_error: current_app.logger.exception( 'No object assigned to {0}.'.format(pid_error.pid), extra={'pid': pid_error.pid}) raise PIDMissingObjectRESTError(pid_error.pid, pid_error=pid_error) except PIDRedirectedError as pid_error: try: location = url_for( '.{0}_item'.format( current_records_rest.default_endpoint_prefixes[ pid_error.destination_pid.pid_type]), pid_value=pid_error.destination_pid.pid_value) data = dict( status=301, message='Moved Permanently', location=location, ) response = make_response(jsonify(data), data['status']) response.headers['Location'] = location abort(response) except (BuildError, KeyError): current_app.logger.exception( 'Invalid redirect - pid_type "{0}" ' 'endpoint missing.'.format( pid_error.destination_pid.pid_type), extra={ 'pid': pid_error.pid, 'destination_pid': pid_error.destination_pid, }) raise PIDRedirectedRESTError( pid_error.destination_pid.pid_type, pid_error=pid_error)
def data(self)
Resolve PID from a value and return a tuple with PID and the record. :returns: A tuple with the PID and the record resolved.
2.93593
2.833419
1.036179
if request and request.args.get('prettyprint'): return dict( indent=2, separators=(', ', ': '), ) else: return dict( indent=None, separators=(',', ':'), )
def _format_args()
Get JSON dump indentation and separates.
3.91978
3.179503
1.232828
return json.dumps( self.transform_record(pid, record, links_factory, **kwargs), **self._format_args())
def serialize(self, pid, record, links_factory=None, **kwargs)
Serialize a single record and persistent identifier. :param pid: Persistent identifier instance. :param record: Record instance. :param links_factory: Factory function for record links.
6.028668
8.429403
0.715195
return json.dumps(dict( hits=dict( hits=[self.transform_search_hit( pid_fetcher(hit['_id'], hit['_source']), hit, links_factory=item_links_factory, **kwargs ) for hit in search_result['hits']['hits']], total=search_result['hits']['total'], ), links=links or {}, aggregations=search_result.get('aggregations', dict()), ), **self._format_args())
def serialize_search(self, pid_fetcher, search_result, links=None, item_links_factory=None, **kwargs)
Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. :param search_result: Elasticsearch search result. :param links: Dictionary of links to add to response.
3.268605
3.564814
0.916908
error_handlers_registry = error_handlers_registry or {} # Catch record validation errors @blueprint.errorhandler(ValidationError) def validation_error(error): return JSONSchemaValidationError(error=error).get_response() @blueprint.errorhandler(RequestError) def elasticsearch_badrequest_error(error): handlers = current_app.config[ 'RECORDS_REST_ELASTICSEARCH_ERROR_HANDLERS'] cause_types = {c['type'] for c in error.info['error']['root_cause']} for cause_type, handler in handlers.items(): if cause_type in cause_types: return handler(error) # Default exception for unhandled errors exception = UnhandledElasticsearchError() current_app.logger.exception(error) # Log the original stacktrace return exception.get_response() for exc_or_code, handlers in error_handlers_registry.items(): # Build full endpoint names and resolve handlers handlers = { '.'.join([blueprint.name, view_name]): obj_or_import_string(func) for view_name, func in handlers.items() } def dispatch_handler(error): def default_handler(e): raise e return handlers.get(request.endpoint, default_handler)(error) blueprint.register_error_handler(exc_or_code, dispatch_handler) return blueprint
def create_error_handlers(blueprint, error_handlers_registry=None)
Create error handlers on blueprint. :params blueprint: Records API blueprint. :params error_handlers_registry: Configuration of error handlers per exception or HTTP status code and view name. The dictionary has the following structure: .. code-block:: python { SomeExceptionClass: { 'recid_list': 'path.to.error_handler_function_foo', 'recid_item': 'path.to.error_handler_function_foo', }, 410: { 'custom_pid_list': 'path.to.error_handler_function_bar', 'custom_pid_item': 'path.to.error_handler_function_bar', 'recid_item': 'path.to.error_handler_function_baz', 'recid_list': 'path.to.error_handler_function_baz', }, } :returns: Configured blueprint.
4.11522
4.276735
0.962234
endpoints = endpoints or {} blueprint = Blueprint( 'invenio_records_rest', __name__, url_prefix='', ) error_handlers_registry = defaultdict(dict) for endpoint, options in endpoints.items(): error_handlers = options.pop('error_handlers', {}) for rule in create_url_rules(endpoint, **options): for exc_or_code, handler in error_handlers.items(): view_name = rule['view_func'].__name__ error_handlers_registry[exc_or_code][view_name] = handler blueprint.add_url_rule(**rule) return create_error_handlers(blueprint, error_handlers_registry)
def create_blueprint(endpoints)
Create Invenio-Records-REST blueprint. :params endpoints: Dictionary representing the endpoints configuration. :returns: Configured blueprint.
3.088479
3.235679
0.954507
@wraps(f) def inner(self, pid_value, *args, **kwargs): try: pid, record = request.view_args['pid_value'].data return f(self, pid=pid, record=record, *args, **kwargs) except SQLAlchemyError: raise PIDResolveRESTError(pid) return inner
def pass_record(f)
Decorator to retrieve persistent identifier and record. This decorator will resolve the ``pid_value`` parameter from the route pattern and resolve it to a PID and a record, which are then available in the decorated function as ``pid`` and ``record`` kwargs respectively.
4.556181
3.876156
1.175438
# Note, cannot be done in one line due overloading of boolean # operations permission object. if not permission_factory(record=record).can(): from flask_login import current_user if not current_user.is_authenticated: abort(401) abort(403)
def verify_record_permission(permission_factory, record)
Check that the current user has the required permissions on record. In case the permission check fails, an Flask abort is launched. If the user was previously logged-in, a HTTP error 403 is returned. Otherwise, is returned a HTTP error 401. :param permission_factory: permission factory used to check permissions. :param record: record whose access is limited.
7.82936
7.755728
1.009494
def need_record_permission_builder(f): @wraps(f) def need_record_permission_decorator(self, record=None, *args, **kwargs): permission_factory = ( getattr(self, factory_name) or getattr(current_records_rest, factory_name) ) # FIXME use context instead request._methodview = self if permission_factory: verify_record_permission(permission_factory, record) return f(self, record=record, *args, **kwargs) return need_record_permission_decorator return need_record_permission_builder
def need_record_permission(factory_name)
Decorator checking that the user has the required permissions on record. :param factory_name: name of the permission factory.
3.938981
3.910102
1.007386
opts = current_app.config['RECORDS_REST_SORT_OPTIONS'].get( self.search_index) sort_fields = [] if opts: for key, item in sorted(opts.items(), key=lambda x: x[1]['order']): sort_fields.append( {key: dict( title=item['title'], default_order=item.get('default_order', 'asc'))} ) return jsonify(dict( sort_fields=sort_fields, max_result_window=self.max_result_window, default_media_type=self.default_media_type, search_media_types=sorted(self.search_media_types), item_media_types=sorted(self.item_media_types), ))
def get(self)
Get options.
3.15165
3.068583
1.02707
default_results_size = current_app.config.get( 'RECORDS_REST_DEFAULT_RESULTS_SIZE', 10) page = request.values.get('page', 1, type=int) size = request.values.get('size', default_results_size, type=int) if page * size >= self.max_result_window: raise MaxResultWindowRESTError() # Arguments that must be added in prev/next links urlkwargs = dict() search_obj = self.search_class() search = search_obj.with_preference_param().params(version=True) search = search[(page - 1) * size:page * size] search, qs_kwargs = self.search_factory(search) urlkwargs.update(qs_kwargs) # Execute search search_result = search.execute() # Generate links for prev/next urlkwargs.update( size=size, _external=True, ) endpoint = '.{0}_list'.format( current_records_rest.default_endpoint_prefixes[self.pid_type]) links = dict(self=url_for(endpoint, page=page, **urlkwargs)) if page > 1: links['prev'] = url_for(endpoint, page=page - 1, **urlkwargs) if size * page < search_result.hits.total and \ size * page < self.max_result_window: links['next'] = url_for(endpoint, page=page + 1, **urlkwargs) return self.make_response( pid_fetcher=self.pid_fetcher, search_result=search_result.to_dict(), links=links, item_links_factory=self.item_links_factory, )
def get(self, **kwargs)
Search records. Permissions: the `list_permission_factory` permissions are checked. :returns: Search result containing hits and aggregations as returned by invenio-search.
3.53645
3.266557
1.082623
if request.mimetype not in self.loaders: raise UnsupportedMediaRESTError(request.mimetype) data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() # Check permissions permission_factory = self.create_permission_factory if permission_factory: verify_record_permission(permission_factory, data) # Create uuid for record record_uuid = uuid.uuid4() # Create persistent identifier pid = self.minter(record_uuid, data=data) # Create record record = self.record_class.create(data, id_=record_uuid) db.session.commit() # Index the record if self.indexer_class: self.indexer_class().index(record) response = self.make_response( pid, record, 201, links_factory=self.item_links_factory) # Add location headers endpoint = '.{0}_item'.format( current_records_rest.default_endpoint_prefixes[pid.pid_type]) location = url_for(endpoint, pid_value=pid.pid_value, _external=True) response.headers.extend(dict(location=location)) return response
def post(self, **kwargs)
Create a record. Permissions: ``create_permission_factory`` Procedure description: #. First of all, the `create_permission_factory` permissions are checked. #. Then, the record is deserialized by the proper loader. #. A second call to the `create_permission_factory` factory is done: it differs from the previous call because this time the record is passed as parameter. #. A `uuid` is generated for the record and the minter is called. #. The record class is called to create the record. #. The HTTP response is built with the help of the item link factory. :returns: The created record.
4.038048
3.539136
1.14097
self.check_etag(str(record.model.version_id)) record.delete() # mark all PIDs as DELETED all_pids = PersistentIdentifier.query.filter( PersistentIdentifier.object_type == pid.object_type, PersistentIdentifier.object_uuid == pid.object_uuid, ).all() for rec_pid in all_pids: if not rec_pid.is_deleted(): rec_pid.delete() db.session.commit() if self.indexer_class: self.indexer_class().delete(record) return '', 204
def delete(self, pid, record, **kwargs)
Delete a record. Permissions: ``delete_permission_factory`` Procedure description: #. The record is resolved reading the pid value from the url. #. The ETag is checked. #. The record is deleted. #. All PIDs are marked as DELETED. :param pid: Persistent identifier for record. :param record: Record object.
3.45171
3.136493
1.1005
etag = str(record.revision_id) self.check_etag(str(record.revision_id)) self.check_if_modified_since(record.updated, etag=etag) return self.make_response( pid, record, links_factory=self.links_factory )
def get(self, pid, record, **kwargs)
Get a record. Permissions: ``read_permission_factory`` Procedure description: #. The record is resolved reading the pid value from the url. #. The ETag and If-Modifed-Since is checked. #. The HTTP response is built with the help of the link factory. :param pid: Persistent identifier for record. :param record: Record object. :returns: The requested record.
4.99497
4.385554
1.13896
data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() self.check_etag(str(record.revision_id)) try: record = record.patch(data) except (JsonPatchException, JsonPointerException): raise PatchJSONFailureRESTError() record.commit() db.session.commit() if self.indexer_class: self.indexer_class().index(record) return self.make_response( pid, record, links_factory=self.links_factory)
def patch(self, pid, record, **kwargs)
Modify a record. Permissions: ``update_permission_factory`` The data should be a JSON-patch, which will be applied to the record. Requires header ``Content-Type: application/json-patch+json``. Procedure description: #. The record is deserialized using the proper loader. #. The ETag is checked. #. The record is patched. #. The HTTP response is built with the help of the link factory. :param pid: Persistent identifier for record. :param record: Record object. :returns: The modified record.
5.740952
4.960565
1.157318
if request.mimetype not in self.loaders: raise UnsupportedMediaRESTError(request.mimetype) data = self.loaders[request.mimetype]() if data is None: raise InvalidDataRESTError() self.check_etag(str(record.revision_id)) record.clear() record.update(data) record.commit() db.session.commit() if self.indexer_class: self.indexer_class().index(record) return self.make_response( pid, record, links_factory=self.links_factory)
def put(self, pid, record, **kwargs)
Replace a record. Permissions: ``update_permission_factory`` The body should be a JSON object, which will fully replace the current record metadata. Procedure description: #. The ETag is checked. #. The record is updated by calling the record API `clear()`, `update()` and then `commit()`. #. The HTTP response is built with the help of the link factory. :param pid: Persistent identifier for record. :param record: Record object. :returns: The modified record.
4.549123
3.950987
1.151389
completions = [] size = request.values.get('size', type=int) for k in self.suggesters.keys(): val = request.values.get(k) if val: # Get completion suggestions opts = copy.deepcopy(self.suggesters[k]) if 'context' in opts.get('completion', {}): ctx_field = opts['completion']['context'] ctx_val = request.values.get(ctx_field) if not ctx_val: raise SuggestMissingContextRESTError opts['completion']['context'] = { ctx_field: ctx_val } if size: opts['completion']['size'] = size completions.append((k, val, opts)) if not completions: raise SuggestNoCompletionsRESTError( ', '.join(sorted(self.suggesters.keys()))) # Add completions s = self.search_class() for field, val, opts in completions: source = opts.pop('_source', None) if source is not None and ES_VERSION[0] >= 5: s = s.source(source).suggest(field, val, **opts) else: s = s.suggest(field, val, **opts) if ES_VERSION[0] == 2: # Execute search response = s.execute_suggest().to_dict() for field, _, _ in completions: for resp in response[field]: for op in resp['options']: if 'payload' in op: op['_source'] = copy.deepcopy(op['payload']) elif ES_VERSION[0] >= 5: response = s.execute().to_dict()['suggest'] result = dict() for field, val, opts in completions: result[field] = response[field] return make_response(jsonify(result))
def get(self, **kwargs)
Get suggestions.
3.264138
3.169005
1.03002
try: return super(DateString, self)._serialize( arrow.get(value).date(), attr, obj) except ParserError: return missing
def _serialize(self, value, attr, obj)
Serialize an ISO8601-formatted date.
7.985527
6.524373
1.223953
return super(DateString, self)._deserialize(value, attr, data).isoformat()
def _deserialize(self, value, attr, data)
Deserialize an ISO8601-formatted date.
8.293075
6.082599
1.36341
import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus indexer = RecordIndexer() index_queue = [] # Record 1 - Live record with db.session.begin_nested(): rec_uuid = uuid.uuid4() pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'description': 'This is an awesome description', # "mint" the record as recid minter does 'control_number': '1', }, id_=rec_uuid) index_queue.append(pid1.object_uuid) # Record 2 - Deleted PID with record rec_uuid = uuid.uuid4() pid = PersistentIdentifier.create( 'recid', '2', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Live ', 'control_number': '2', }, id_=rec_uuid) pid.delete() # Record 3 - Deleted PID without a record PersistentIdentifier.create( 'recid', '3', status=PIDStatus.DELETED) # Record 4 - Registered PID without a record PersistentIdentifier.create( 'recid', '4', status=PIDStatus.REGISTERED) # Record 5 - Redirected PID pid = PersistentIdentifier.create( 'recid', '5', status=PIDStatus.REGISTERED) pid.redirect(pid1) # Record 6 - Redirected non existing endpoint doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED) pid = PersistentIdentifier.create( 'recid', '6', status=PIDStatus.REGISTERED) pid.redirect(doi) # Record 7 - Unregistered PID PersistentIdentifier.create( 'recid', '7', status=PIDStatus.RESERVED) for rec_idx in range(len(record_examples)): rec_uuid = uuid.uuid4() rec_pid = 8 + rec_idx pid1 = PersistentIdentifier.create( 'recid', str(rec_pid), object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) # "mint" the record as recid minter does record = dict(record_examples[rec_idx]) record['control_number'] = str(rec_pid) # create the record Record.create(record, id_=rec_uuid) index_queue.append(rec_uuid) db.session.commit() for i in index_queue: indexer.index_by_id(i)
def records()
Load test data fixture.
2.34344
2.321666
1.009379
links_factory = links_factory or (lambda x, record=None, **k: dict()) metadata = copy.deepcopy(record.replace_refs()) if self.replace_refs \ else record.dumps() return dict( pid=pid, metadata=metadata, links=links_factory(pid, record=record, **kwargs), revision=record.revision_id, created=(pytz.utc.localize(record.created).isoformat() if record.created else None), updated=(pytz.utc.localize(record.updated).isoformat() if record.updated else None), )
def preprocess_record(self, pid, record, links_factory=None, **kwargs)
Prepare a record and persistent identifier for serialization.
3.215471
3.186822
1.00899
links_factory = links_factory or (lambda x, **k: dict()) record = dict( pid=pid, metadata=record_hit['_source'], links=links_factory(pid, record_hit=record_hit, **kwargs), revision=record_hit['_version'], created=None, updated=None, ) # Move created/updated attrs from source to object. for key in ['_created', '_updated']: if key in record['metadata']: record[key[1:]] = record['metadata'][key] del record['metadata'][key] return record
def preprocess_search_hit(pid, record_hit, links_factory=None, **kwargs)
Prepare a record hit from Elasticsearch for serialization.
3.251674
3.151866
1.031666
res = [] for field, error in errors.items(): if isinstance(error, list): res.append( dict(field=field, message=' '.join([str(x) for x in error]))) elif isinstance(error, dict): res.extend(_flatten_marshmallow_errors(error)) return res
def _flatten_marshmallow_errors(errors)
Flatten marshmallow errors.
2.534254
2.408931
1.052025
def json_loader(): request_json = request.get_json() context = {} pid_data = request.view_args.get('pid_value') if pid_data: pid, _ = pid_data.data context['pid'] = pid result = schema_class(context=context).load(request_json) if result.errors: raise MarshmallowErrors(result.errors) return result.data return json_loader
def marshmallow_loader(schema_class)
Marshmallow loader for JSON requests.
3.443595
3.349236
1.028173
body = dict( status=self.code, message=self.get_description(environ), ) if self.errors: body['errors'] = self.errors return json.dumps(body)
def get_body(self, environ=None)
Get the request body.
3.64439
3.535288
1.030861
for key in ('read', 'create', 'update', 'delete'): full_key = '{0}_permission_factory'.format(key) if full_key in self.__dict__: del self.__dict__[full_key]
def reset_permission_factories(self)
Remove cached permission factories.
2.865792
2.646613
1.082815
self.init_config(app) app.extensions['invenio-records-rest'] = _RecordRESTState(app)
def init_app(self, app)
Flask application initialization.
7.233972
7.317701
0.988558
# Set up API endpoints for records. for k in dir(config): if k.startswith('RECORDS_REST_'): app.config.setdefault(k, getattr(config, k)) # Resolve the Elasticsearch error handlers handlers = app.config['RECORDS_REST_ELASTICSEARCH_ERROR_HANDLERS'] for k, v in handlers.items(): handlers[k] = obj_or_import_string(v)
def init_config(self, app)
Initialize configuration.
4.683634
4.619136
1.013963
def inner(values): if len(values) != 1 or values[0].count('--') != 1 or values[0] == '--': raise RESTValidationError( errors=[FieldError(field, 'Invalid range format.')]) range_ends = values[0].split('--') range_args = dict() ineq_opers = [{'strict': 'gt', 'nonstrict': 'gte'}, {'strict': 'lt', 'nonstrict': 'lte'}] date_maths = [start_date_math, end_date_math] # Add the proper values to the dict for (range_end, strict, opers, date_math) in zip(range_ends, ['>', '<'], ineq_opers, date_maths): if range_end != '': # If first char is '>' for start or '<' for end if range_end[0] == strict: dict_key = opers['strict'] range_end = range_end[1:] else: dict_key = opers['nonstrict'] if date_math: range_end = '{0}||{1}'.format(range_end, date_math) range_args[dict_key] = range_end args = kwargs.copy() args.update(range_args) return Range(**{field: args}) return inner
def range_filter(field, start_date_math=None, end_date_math=None, **kwargs)
Create a range filter. :param field: Field name. :param start_date_math: Starting date. :param end_date_math: Ending date. :param kwargs: Addition arguments passed to the Range query. :returns: Function that returns the Range query.
3.636199
3.618983
1.004757
filters = [] for name, filter_factory in definitions.items(): values = request.values.getlist(name, type=text_type) if values: filters.append(filter_factory(values)) for v in values: urlkwargs.add(name, v) return (filters, urlkwargs)
def _create_filter_dsl(urlkwargs, definitions)
Create a filter DSL expression.
3.323678
3.18536
1.043423
filters, urlkwargs = _create_filter_dsl(urlkwargs, definitions) for filter_ in filters: search = search.post_filter(filter_) return (search, urlkwargs)
def _post_filter(search, urlkwargs, definitions)
Ingest post filter in query.
5.304158
4.928616
1.076196
filters, urlkwargs = _create_filter_dsl(urlkwargs, definitions) for filter_ in filters: search = search.filter(filter_) return (search, urlkwargs)
def _query_filter(search, urlkwargs, definitions)
Ingest query filter in query.
4.748719
4.838391
0.981467
if definitions: for name, agg in definitions.items(): search.aggs[name] = agg if not callable(agg) else agg() return search
def _aggregations(search, definitions)
Add aggregations to query.
4.335849
3.936745
1.101379
urlkwargs = MultiDict() facets = current_app.config['RECORDS_REST_FACETS'].get(index) if facets is not None: # Aggregations. search = _aggregations(search, facets.get("aggs", {})) # Query filter search, urlkwargs = _query_filter( search, urlkwargs, facets.get("filters", {})) # Post filter search, urlkwargs = _post_filter( search, urlkwargs, facets.get("post_filters", {})) return (search, urlkwargs)
def default_facets_factory(search, index)
Add a default facets to query. :param search: Basic search object. :param index: Index name. :returns: A tuple containing the new search object and a dictionary with all fields and values used.
3.462844
3.529548
0.981101
return self.schema_class(context=context).dump(obj).data
def dump(self, obj, context=None)
Serialize object with schema.
9.455564
5.752015
1.64387
context = kwargs.get('marshmallow_context', {}) context.setdefault('pid', pid) return self.dump(self.preprocess_record(pid, record, links_factory=links_factory, **kwargs), context)
def transform_record(self, pid, record, links_factory=None, **kwargs)
Transform record into an intermediate representation.
5.390502
5.225496
1.031577
context = kwargs.get('marshmallow_context', {}) context.setdefault('pid', pid) return self.dump(self.preprocess_search_hit(pid, record_hit, links_factory=links_factory, **kwargs), context)
def transform_search_hit(self, pid, record_hit, links_factory=None, **kwargs)
Transform search result hit into an intermediate representation.
4.884786
4.939527
0.988918
pid = (context or {}).get('pid') return pid.pid_value if pid else missing
def pid_from_context(_, context)
Get PID from marshmallow context.
12.389859
7.185184
1.724362
def view(pid, record, code=200, headers=None, links_factory=None): response = current_app.response_class( serializer.serialize(pid, record, links_factory=links_factory), mimetype=mimetype) response.status_code = code response.set_etag(str(record.revision_id)) response.last_modified = record.updated if headers is not None: response.headers.extend(headers) if links_factory is not None: add_link_header(response, links_factory(pid)) return response return view
def record_responsify(serializer, mimetype)
Create a Records-REST response serializer. :param serializer: Serializer instance. :param mimetype: MIME type of response. :returns: Function that generates a record HTTP response.
2.990037
3.118857
0.958696
def view(pid_fetcher, search_result, code=200, headers=None, links=None, item_links_factory=None): response = current_app.response_class( serializer.serialize_search(pid_fetcher, search_result, links=links, item_links_factory=item_links_factory), mimetype=mimetype) response.status_code = code if headers is not None: response.headers.extend(headers) if links is not None: add_link_header(response, links) return response return view
def search_responsify(serializer, mimetype)
Create a Records-REST search result response serializer. :param serializer: Serializer instance. :param mimetype: MIME type of response. :returns: Function that generates a record HTTP response.
3.284582
3.317281
0.990143
if links is not None: response.headers.extend({ 'Link': ', '.join([ '<{0}>; rel="{1}"'.format(l, r) for r, l in links.items()]) })
def add_link_header(response, links)
Add a Link HTTP header to a REST response. :param response: REST response instance :param links: Dictionary of links
2.638566
3.281752
0.804011
csl_args = { 'style': cls._default_style, 'locale': cls._default_locale } if has_request_context(): parser = FlaskParser(locations=('view_args', 'query')) csl_args.update(parser.parse(cls._user_args, request)) csl_args.update({k: kwargs[k] for k in ('style', 'locale') if k in kwargs}) try: csl_args['style'] = get_style_filepath(csl_args['style'].lower()) except StyleNotFoundError: if has_request_context(): raise StyleNotFoundRESTError(csl_args['style']) raise return csl_args
def _get_args(cls, **kwargs)
Parse style and locale. Argument location precedence: kwargs > view_args > query
4.424746
3.768914
1.174011
if self.record_format == 'csl': return CiteProcJSON([json.loads(data)]) elif self.record_format == 'bibtex': return BibTeX(data)
def _get_source(self, data)
Get source data object for citeproc-py.
7.174124
4.762493
1.50638
text = re.sub('\s\s+', ' ', text) text = re.sub('\.\.+', '.', text) text = text.replace("'", "\\'") return text
def _clean_result(self, text)
Remove double spaces, punctuation and escapes apostrophes.
3.557109
2.474039
1.437774
data = self.serializer.serialize(pid, record, links_factory) source = self._get_source(data) style = CitationStylesStyle(validate=False, **self._get_args(**kwargs)) bib = CitationStylesBibliography(style, source, formatter.plain) citation = Citation([CitationItem(pid.pid_value)]) bib.register(citation) return self._clean_result(''.join(bib.bibliography()[0]))
def serialize(self, pid, record, links_factory=None, **kwargs)
Serialize a single record. :param pid: Persistent identifier instance. :param record: Record instance. :param links_factory: Factory function for record links.
7.577976
8.15646
0.929077
codepoint = ord(char) return (0x20 <= codepoint <= 0xD7FF or codepoint in (0x9, 0xA, 0xD) or 0xE000 <= codepoint <= 0xFFFD or 0x10000 <= codepoint <= 0x10FFFF)
def is_valid_xml_char(self, char)
Check if a character is valid based on the XML specification.
1.732049
1.650462
1.049433
value = super(SanitizedUnicode, self)._deserialize(value, attr, data) value = fix_text(value) # NOTE: This `join` might be ineffiecient... There's a solution with a # large compiled regex lying around, but needs a lot of tweaking. value = ''.join(filter(self.is_valid_xml_char, value)) for char in self.UNWANTED_CHARACTERS: value = value.replace(char, '') return value
def _deserialize(self, value, attr, data)
Deserialize sanitized string value.
6.999733
6.721572
1.041383
return self.schema.tostring( self.transform_record(pid, record, links_factory))
def serialize(self, pid, record, links_factory=None)
Serialize a single record and persistent identifier. :param pid: Persistent identifier instance. :param record: Record instance. :param links_factory: Factory function for record links.
8.927658
12.732621
0.701164
records = [] for hit in search_result['hits']['hits']: records.append(self.schema.tostring(self.transform_search_hit( pid_fetcher(hit['_id'], hit['_source']), hit, links_factory=item_links_factory, ))) return "\n".join(records)
def serialize_search(self, pid_fetcher, search_result, links=None, item_links_factory=None)
Serialize a search result. :param pid_fetcher: Persistent identifier fetcher. :param search_result: Elasticsearch search result. :param links: Dictionary of links to add to response.
4.248312
4.790125
0.88689
obj = self.transform_record(pid, record['_source']) \ if isinstance(record['_source'], Record) \ else self.transform_search_hit(pid, record) return self.schema.dump_etree(obj)
def serialize_oaipmh(self, pid, record)
Serialize a single record for OAI-PMH.
7.167181
6.442283
1.112522
root = etree.Element( 'oai_datacite', nsmap={ None: 'http://schema.datacite.org/oai/oai-1.0/', 'xsi': 'http://www.w3.org/2001/XMLSchema-instance', 'xml': 'xml', }, attrib={ '{http://www.w3.org/2001/XMLSchema-instance}schemaLocation': 'http://schema.datacite.org/oai/oai-1.0/ oai_datacite.xsd', } ) root.append(E.isReferenceQuality(self.is_reference_quality)) root.append(E.schemaVersion(self.serializer.version)) root.append(E.datacentreSymbol(self.datacentre)) root.append(E.payload( self.serializer.serialize_oaipmh(pid, record) )) return root
def serialize_oaipmh(self, pid, record)
Serialize a single record for OAI-PMH.
2.574441
2.551144
1.009132
value = super(TrimmedString, self)._deserialize(value, attr, data) return value.strip()
def _deserialize(self, value, attr, data)
Deserialize string value.
3.796607
3.160031
1.201446
def _default_parser(qstr=None): if qstr: return Q('query_string', query=qstr) return Q() from .facets import default_facets_factory from .sorter import default_sorter_factory query_string = request.values.get('q') query_parser = query_parser or _default_parser try: search = search.query(query_parser(query_string)) except SyntaxError: current_app.logger.debug( "Failed parsing query: {0}".format( request.values.get('q', '')), exc_info=True) raise InvalidQueryRESTError() search_index = search._index[0] search, urlkwargs = default_facets_factory(search, search_index) search, sortkwargs = default_sorter_factory(search, search_index) for key, value in sortkwargs.items(): urlkwargs.add(key, value) urlkwargs.add('q', query_string) return search, urlkwargs
def default_search_factory(self, search, query_parser=None)
Parse query using elasticsearch DSL query. :param self: REST view. :param search: Elastic search DSL search instance. :returns: Tuple with search instance and URL arguments.
3.593017
3.344612
1.07427
if isinstance(func, functools.partial): return _get_func_args(func.func) if inspect.isfunction(func) or inspect.ismethod(func): return list(inspect.getargspec(func).args) if callable(func): return list(inspect.getargspec(func.__call__).args)
def _get_func_args(func)
Get a list of the arguments a function or method has.
1.912545
1.808537
1.05751
return simpledc.tostring( self.transform_record(pid, record, links_factory))
def serialize(self, pid, record, links_factory=None)
Serialize a single record and persistent identifier. :param pid: Persistent identifier instance. :param record: Record instance. :param links_factory: Factory function for record links.
15.826517
23.411119
0.676026
# Ensure we can run outside a application/request context. if request: if 'expanded' in request.args: return True elif 'compacted' in request.args: return False return self._expanded
def expanded(self)
Get JSON-LD expanded state.
10.380226
9.708156
1.069227
rec = copy.deepcopy(obj) rec.update(self.context) compacted = jsonld.compact(rec, self.context) if not self.expanded: return compacted else: return jsonld.expand(compacted)[0]
def transform_jsonld(self, obj)
Compact JSON according to context.
4.169566
3.401344
1.225858
result = super(JSONLDTransformerMixin, self).transform_record( pid, record, links_factory, **kwargs ) return self.transform_jsonld(result)
def transform_record(self, pid, record, links_factory=None, **kwargs)
Transform record into an intermediate representation.
3.675777
3.7256
0.986627
result = super(JSONLDTransformerMixin, self).transform_search_hit( pid, record_hit, links_factory, **kwargs ) return self.transform_jsonld(result)
def transform_search_hit(self, pid, record_hit, links_factory=None, **kwargs)
Transform search result hit into an intermediate representation.
3.351201
3.455591
0.969791
endpoint = '.{0}_item'.format( current_records_rest.default_endpoint_prefixes[pid.pid_type]) links = dict(self=url_for(endpoint, pid_value=pid.pid_value, _external=True)) return links
def default_links_factory(pid, record=None, **kwargs)
Factory for record links generation. :param pid: A Persistent Identifier instance. :returns: Dictionary containing a list of useful links for the record.
5.93134
6.91778
0.857405
def factory(pid, **kwargs): links = default_links_factory(pid) for link in additional_links: links[link] = additional_links[link].format(pid=pid, scheme=request.scheme, host=request.host) return links return factory
def default_links_factory_with_additional(additional_links)
Generate a links generation factory with the specified additional links. :param additional_links: A dict of link names to links to be added to the returned object. :returns: A link generation factory.
3.327774
4.205247
0.791339
def inner(asc): locations = request.values.getlist(argument, type=str) field = { '_geo_distance': { field_name: locations, 'order': 'asc' if asc else 'desc', 'unit': unit, } } if mode: field['_geo_distance']['mode'] = mode if distance_type: field['_geo_distance']['distance_type'] = distance_type return field return inner
def geolocation_sort(field_name, argument, unit, mode=None, distance_type=None)
Sort field factory for geo-location based sorting. :param argument: Name of URL query string field to parse pin location from. Multiple locations can be provided. Each location can be either a string "latitude,longitude" or a geohash. :param unit: Distance unit (e.g. km). :param mode: Sort mode (avg, min, max). :param distance_type: Distance calculation mode. :returns: Function that returns geolocation sort field.
2.713187
2.680089
1.012349
if isinstance(field, dict): if asc: return field else: # Field should only have one key and must have an order subkey. field = copy.deepcopy(field) key = list(field.keys())[0] field[key]['order'] = reverse_order(field[key]['order']) return field elif callable(field): return field(asc) else: key, key_asc = parse_sort_field(field) if not asc: key_asc = not key_asc return {key: {'order': 'asc' if key_asc else 'desc'}}
def eval_field(field, asc)
Evaluate a field for sorting purpose. :param field: Field definition (string, dict or callable). :param asc: ``True`` if order is ascending, ``False`` if descending. :returns: Dictionary with the sort field query.
3.364033
3.256023
1.033172
sort_arg_name = 'sort' urlfield = request.values.get(sort_arg_name, '', type=str) # Get default sorting if sort is not specified. if not urlfield: # cast to six.text_type to handle unicodes in Python 2 has_query = request.values.get('q', type=six.text_type) urlfield = current_app.config['RECORDS_REST_DEFAULT_SORT'].get( index, {}).get('query' if has_query else 'noquery', '') # Parse sort argument key, asc = parse_sort_field(urlfield) # Get sort options sort_options = current_app.config['RECORDS_REST_SORT_OPTIONS'].get( index, {}).get(key) if sort_options is None: return (search, {}) # Get fields to sort query by search = search.sort( *[eval_field(f, asc) for f in sort_options['fields']] ) return (search, {sort_arg_name: urlfield})
def default_sorter_factory(search, index)
Default sort query factory. :param query: Search query. :param index: Index to search in. :returns: Tuple of (query, URL arguments).
3.985606
3.973722
1.002991
if isinstance(original_data, list): for elem in original_data: self.check_unknown_fields(data, elem) else: for key in original_data: if key not in [ self.fields[field].attribute or field for field in self.fields ]: raise ValidationError( 'Unknown field name {}'.format(key), field_names=[key])
def check_unknown_fields(self, data, original_data)
Check for unknown keys.
3.050575
2.811833
1.084906
if isinstance(original_data, list): for elem in original_data: self.load_unknown_fields(data, elem) else: for key, value in original_data.items(): if key not in data: data[key] = value return data
def load_unknown_fields(self, data, original_data)
Check for unknown keys.
2.005402
1.812078
1.106687
# Remove already deserialized "pid" field pid_value = data.pop('pid', None) if pid_value: pid_field = current_app.config['PIDSTORE_RECID_FIELD'] data.setdefault(pid_field, pid_value) return data
def inject_pid(self, data)
Inject context PID in the RECID field.
4.475611
4.391768
1.019091
if isinstance(form, BaseFormSet): if settings.DEBUG: template = get_template('uni_form/uni_formset.html') else: template = uni_formset_template c = Context({'formset': form}) else: if settings.DEBUG: template = get_template('uni_form/uni_form.html') else: template = uni_form_template c = Context({'form': form}) return template.render(c)
def as_uni_form(form)
The original and still very useful way to generate a uni-form form/formset:: {% load uni_form_tags %} <form class="uniForm" action="post"> {% csrf_token %} {{ myform|as_uni_form }} </form>
2.154189
2.149029
1.002401
if isinstance(form, BaseFormSet): template = get_template('uni_form/errors_formset.html') c = Context({'formset': form}) else: template = get_template('uni_form/errors.html') c = Context({'form':form}) return template.render(c)
def as_uni_errors(form)
Renders only form errors like django-uni-form:: {% load uni_form_tags %} {{ form|as_uni_errors }}
2.604157
2.615817
0.995543
template = get_template('uni_form/field.html') c = Context({'field':field}) return template.render(c)
def as_uni_field(field)
Renders a form field like a django-uni-form field:: {% load uni_form_tags %} {{ form.field|as_uni_field }}
5.12032
4.936332
1.037272
return render_to_string(self.template, Context({'input': self}))
def render(self, form, form_style, context)
Renders an `<input />` if container is used as a Layout object
8.62542
7.232333
1.192619
FAIL_SILENTLY = getattr(settings, 'UNIFORM_FAIL_SILENTLY', True) if hasattr(field, 'render'): return field.render(form, form_style, context) else: # This allows fields to be unicode strings, always they don't use non ASCII try: if isinstance(field, unicode): field = str(field) # If `field` is not unicode then we turn it into a unicode string, otherwise doing # str(field) would give no error and the field would not be resolved, causing confusion else: field = str(unicode(field)) except (UnicodeEncodeError, UnicodeDecodeError): raise Exception("Field '%s' is using forbidden unicode characters" % field) try: field_instance = form.fields[field] except KeyError: if not FAIL_SILENTLY: raise Exception("Could not resolve form field '%s'." % field) else: field_instance = None logging.warning("Could not resolve form field '%s'." % field, exc_info=sys.exc_info()) if not field in form.rendered_fields: form.rendered_fields.append(field) else: if not FAIL_SILENTLY: raise Exception("A field should only be rendered once: %s" % field) else: logging.warning("A field should only be rendered once: %s" % field, exc_info=sys.exc_info()) if field_instance is None: html = '' else: bound_field = BoundField(form, field_instance, field) if template is None: template = default_field_template else: template = get_template(template) # We save the Layout object's bound fields in the layout object's `bound_fields` list if layout_object is not None: layout_object.bound_fields.append(bound_field) html = template.render(Context({'field': bound_field, 'labelclass': labelclass})) return html
def render_field(field, form, form_style, context, template=None, labelclass=None, layout_object=None)
Renders a django-uni-form field :param field: Can be a string or a Layout object like `Row`. If it's a layout object, we call its render method, otherwise we instantiate a BoundField and render it using default template 'uni_form/field.html' The field is added to a list that the form holds called `rendered_fields` to avoid double rendering fields. :param form: The form/formset to which that field belongs to. :param form_style: We need this to render uni-form divs using helper's chosen style. :template: Template used for rendering the field. :layout_object: If passed, it points to the Layout object that is being rendered. We use it to store its bound fields in a list called `layout_object.bound_fields`
3.271797
3.086381
1.060076
token = token.split_contents() form = token.pop(1) try: helper = token.pop(1) except IndexError: helper = None return UniFormNode(form, helper)
def do_uni_form(parser, token)
You need to pass in at least the form/formset object, and can also pass in the optional `uni_form.helpers.FormHelper` object. helper (optional): A `uni_form.helpers.FormHelper` object. Usage:: {% include uni_form_tags %} {% uni_form my-form my_helper %}
2.818368
3.044396
0.925756
actual_form = self.form.resolve(context) attrs = {} if self.helper is not None: helper = self.helper.resolve(context) if not isinstance(helper, FormHelper): raise TypeError('helper object provided to uni_form tag must be a uni_form.helpers.FormHelper object.') attrs = helper.get_attributes() else: helper = None # We get the response dictionary is_formset = isinstance(actual_form, BaseFormSet) response_dict = self.get_response_dict(attrs, context, is_formset) # If we have a helper's layout we use it, for the form or the formset's forms if helper and helper.layout: if not is_formset: actual_form.form_html = helper.render_layout(actual_form, context) else: forloop = ForLoopSimulator(actual_form) for form in actual_form.forms: context.update({'forloop': forloop}) form.form_html = helper.render_layout(form, context) forloop.iterate() if is_formset: response_dict.update({'formset': actual_form}) else: response_dict.update({'form': actual_form}) return Context(response_dict)
def get_render(self, context)
Returns a `Context` object with all the necesarry stuff for rendering the form :param context: `django.template.Context` variable holding the context for the node `self.form` and `self.helper` are resolved into real Python objects resolving them from the `context`. The `actual_form` can be a form or a formset. If it's a formset `is_formset` is set to True. If the helper has a layout we use it, for rendering the form or the formset's forms.
4.018039
3.254434
1.234635
form_type = "form" if is_formset: form_type = "formset" # We take form/formset parameters from attrs if they are set, otherwise we use defaults response_dict = { '%s_action' % form_type: attrs.get("form_action", ''), '%s_method' % form_type: attrs.get("form_method", 'post'), '%s_tag' % form_type: attrs.get("form_tag", True), '%s_class' % form_type: attrs.get("class", ''), '%s_id' % form_type: attrs.get("id", ""), '%s_style' % form_type: attrs.get("form_style", None), 'form_error_title': attrs.get("form_error_title", None), 'formset_error_title': attrs.get("formset_error_title", None), 'inputs': attrs.get('inputs', []), 'is_formset': is_formset, } if context.has_key('csrf_token'): response_dict['csrf_token'] = context['csrf_token'] return response_dict
def get_response_dict(self, attrs, context, is_formset)
Returns a dictionary with all the parameters necessary to render the form/formset in a template. :param attrs: Dictionary with the helper's attributes used for rendering the form/formset :param context: `django.template.Context` for the node :param is_formset: Boolean value. If set to True, indicates we are working with a formset.
2.215454
2.216444
0.999553
form.rendered_fields = [] html = self.layout.render(form, self.form_style, context) for field in form.fields.keys(): if not field in form.rendered_fields: html += render_field(field, form, self.form_style, context) return mark_safe(html)
def render_layout(self, form, context)
Returns safe html of the rendering of the layout
3.385013
3.31337
1.021623
items = {} items['form_method'] = self.form_method.strip() items['form_tag'] = self.form_tag items['form_style'] = self.form_style.strip() if self.form_action: items['form_action'] = self.form_action.strip() if self.form_id: items['id'] = self.form_id.strip() if self.form_class: items['class'] = self.form_class.strip() if self.inputs: items['inputs'] = self.inputs if self.form_error_title: items['form_error_title'] = self.form_error_title.strip() if self.formset_error_title: items['formset_error_title'] = self.formset_error_title.strip() return items
def get_attributes(self)
Used by the uni_form_tags to get helper attributes
1.976387
1.817976
1.087136
self.group[gr] = exp self.total = sum(self.group.values())
def add_exp(self,gr,exp)
Function to add the counts for each sample :param gr: name of the sample :param exp: counts of sample **gr** :returns: dict with key,values equally to name,counts.
5.487694
7.885586
0.695915
joint = Joint() for v1, p1 in pmf1.Items(): for v2, p2 in pmf2.Items(): joint.Set((v1, v2), p1 * p2) return joint
def MakeJoint(pmf1, pmf2)
Joint distribution of values from pmf1 and pmf2. Args: pmf1: Pmf object pmf2: Pmf object Returns: Joint pmf of value pairs
2.452446
3.175814
0.772226
hist = Hist(name=name) [hist.Incr(x) for x in t] return hist
def MakeHistFromList(t, name='')
Makes a histogram from an unsorted sequence of values. Args: t: sequence of numbers name: string name for this histogram Returns: Hist object
5.689939
8.134429
0.699488
hist = MakeHistFromList(t) d = hist.GetDict() pmf = Pmf(d, name) pmf.Normalize() return pmf
def MakePmfFromList(t, name='')
Makes a PMF from an unsorted sequence of values. Args: t: sequence of numbers name: string name for this PMF Returns: Pmf object
4.941692
5.233616
0.944221
pmf = Pmf(d, name) pmf.Normalize() return pmf
def MakePmfFromDict(d, name='')
Makes a PMF from a map from values to probabilities. Args: d: dictionary that maps values to probabilities name: string name for this PMF Returns: Pmf object
3.893511
7.951514
0.489657