idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
58,300
def json_to_file ( data , filename , pretty = False ) : kwargs = dict ( indent = 4 ) if pretty else { } dirname = os . path . dirname ( filename ) if not os . path . exists ( dirname ) : os . makedirs ( dirname ) dump = json . dumps ( api . __schema__ , ** kwargs ) with open ( filename , 'wb' ) as f : f . write ( dump ...
Dump JSON data to a file
58,301
def postman ( filename , pretty , urlvars , swagger ) : data = api . as_postman ( urlvars = urlvars , swagger = swagger ) json_to_file ( data , filename , pretty )
Dump the API as a Postman collection
58,302
def notify_badge_added_certified ( sender , kind = '' ) : if kind == CERTIFIED and isinstance ( sender , Organization ) : recipients = [ member . user for member in sender . members ] subject = _ ( 'Your organization "%(name)s" has been certified' , name = sender . name ) mail . send ( subject , recipients , 'badge_add...
Send an email when a CERTIFIED badge is added to an Organization
58,303
def discussions_notifications ( user ) : notifications = [ ] qs = discussions_for ( user ) . only ( 'id' , 'created' , 'title' , 'subject' ) for discussion in qs . no_dereference ( ) : notifications . append ( ( discussion . created , { 'id' : discussion . id , 'title' : discussion . title , 'subject' : { 'id' : discus...
Notify user about open discussions
58,304
def send_signal ( signal , request , user , ** kwargs ) : params = { 'user_ip' : request . remote_addr } params . update ( kwargs ) if user . is_authenticated : params [ 'uid' ] = user . id signal . send ( request . url , ** params )
Generic method to send signals to Piwik
58,305
def membership_request_notifications ( user ) : orgs = [ o for o in user . organizations if o . is_admin ( user ) ] notifications = [ ] for org in orgs : for request in org . pending_requests : notifications . append ( ( request . created , { 'id' : request . id , 'organization' : org . id , 'user' : { 'id' : request ....
Notify user about pending membership requests
58,306
def validate ( identifier ) : source = actions . validate_source ( identifier ) log . info ( 'Source %s (%s) has been validated' , source . slug , str ( source . id ) )
Validate a source given its identifier
58,307
def delete ( identifier ) : log . info ( 'Deleting source "%s"' , identifier ) actions . delete_source ( identifier ) log . info ( 'Deleted source "%s"' , identifier )
Delete a harvest source
58,308
def sources ( scheduled = False ) : sources = actions . list_sources ( ) if scheduled : sources = [ s for s in sources if s . periodic_task ] if sources : for source in sources : msg = '{source.name} ({source.backend}): {cron}' if source . periodic_task : cron = source . periodic_task . schedule_display else : cron = '...
List all harvest sources
58,309
def backends ( ) : log . info ( 'Available backends:' ) for backend in actions . list_backends ( ) : log . info ( '%s (%s)' , backend . name , backend . display_name or backend . name )
List available backends
58,310
def schedule ( identifier , ** kwargs ) : source = actions . schedule ( identifier , ** kwargs ) msg = 'Scheduled {source.name} with the following crontab: {cron}' log . info ( msg . format ( source = source , cron = source . periodic_task . crontab ) )
Schedule a harvest job to run periodically
58,311
def unschedule ( identifier ) : source = actions . unschedule ( identifier ) log . info ( 'Unscheduled harvest source "%s"' , source . name )
Unschedule a periodical harvest job
58,312
def attach ( domain , filename ) : log . info ( 'Attaching datasets for domain %s' , domain ) result = actions . attach ( domain , filename ) log . info ( 'Attached %s datasets to %s' , result . success , domain )
Attach existing datasets to their harvest remote id
58,313
def request_transfer ( subject , recipient , comment ) : TransferPermission ( subject ) . test ( ) if recipient == ( subject . organization or subject . owner ) : raise ValueError ( 'Recipient should be different than the current owner' ) transfer = Transfer . objects . create ( owner = subject . organization or subjec...
Initiate a transfer request
58,314
def accept_transfer ( transfer , comment = None ) : TransferResponsePermission ( transfer ) . test ( ) transfer . responded = datetime . now ( ) transfer . responder = current_user . _get_current_object ( ) transfer . status = 'accepted' transfer . response_comment = comment transfer . save ( ) subject = transfer . sub...
Accept an incoming a transfer request
58,315
def refuse_transfer ( transfer , comment = None ) : TransferResponsePermission ( transfer ) . test ( ) transfer . responded = datetime . now ( ) transfer . responder = current_user . _get_current_object ( ) transfer . status = 'refused' transfer . response_comment = comment transfer . save ( ) return transfer
Refuse an incoming a transfer request
58,316
def clean ( self ) : if not self . metrics : self . metrics = dict ( ( name , spec . default ) for name , spec in ( metric_catalog . get ( self . __class__ , { } ) . items ( ) ) ) return super ( WithMetrics , self ) . clean ( )
Fill metrics with defaults on create
58,317
def build_catalog ( site , datasets , format = None ) : site_url = url_for ( 'site.home_redirect' , _external = True ) catalog_url = url_for ( 'site.rdf_catalog' , _external = True ) graph = Graph ( namespace_manager = namespace_manager ) catalog = graph . resource ( URIRef ( catalog_url ) ) catalog . set ( RDF . type ...
Build the DCAT catalog for this site
58,318
def sendmail_proxy ( subject , email , template , ** context ) : sendmail . delay ( subject . value , email , template , ** context )
Cast the lazy_gettext ed subject to string before passing to Celery
58,319
def collect ( path , no_input ) : if exists ( path ) : msg = '"%s" directory already exists and will be erased' log . warning ( msg , path ) if not no_input : click . confirm ( 'Are you sure?' , abort = True ) log . info ( 'Deleting static directory "%s"' , path ) shutil . rmtree ( path ) prefix = current_app . static_...
Collect static files
58,320
def validate_harvester_notifications ( user ) : if not user . sysadmin : return [ ] notifications = [ ] qs = HarvestSource . objects ( validation__state = VALIDATION_PENDING ) qs = qs . only ( 'id' , 'created_at' , 'name' ) for source in qs : notifications . append ( ( source . created_at , { 'id' : source . id , 'name...
Notify admins about pending harvester validation
58,321
def get ( app , name ) : backend = get_all ( app ) . get ( name ) if not backend : msg = 'Harvest backend "{0}" is not registered' . format ( name ) raise EntrypointError ( msg ) return backend
Get a backend given its name
58,322
def search ( self ) : s = super ( TopicSearchMixin , self ) . search ( ) s = s . filter ( 'bool' , should = [ Q ( 'term' , tags = tag ) for tag in self . topic . tags ] ) return s
Override search to match on topic tags
58,323
def clean ( self ) : if not self . urlhash or 'url' in self . _get_changed_fields ( ) : self . urlhash = hash_url ( self . url ) super ( Reuse , self ) . clean ( )
Auto populate urlhash from url
58,324
def serve ( info , host , port , reload , debugger , eager_loading , with_threads ) : logger = logging . getLogger ( 'werkzeug' ) logger . setLevel ( logging . INFO ) logger . handlers = [ ] debug = current_app . config [ 'DEBUG' ] if reload is None : reload = bool ( debug ) if debugger is None : debugger = bool ( debu...
Runs a local udata development server .
58,325
def enforce_filetype_file ( form , field ) : if form . _fields . get ( 'filetype' ) . data != RESOURCE_FILETYPE_FILE : return domain = urlparse ( field . data ) . netloc allowed_domains = current_app . config [ 'RESOURCES_FILE_ALLOWED_DOMAINS' ] allowed_domains += [ current_app . config . get ( 'SERVER_NAME' ) ] if cur...
Only allowed domains in resource . url when filetype is file
58,326
def map_legacy_frequencies ( form , field ) : if field . data in LEGACY_FREQUENCIES : field . data = LEGACY_FREQUENCIES [ field . data ]
Map legacy frequencies to new ones
58,327
def resources_availability ( self ) : availabilities = list ( chain ( * [ org . check_availability ( ) for org in self . organizations ] ) ) availabilities = [ a for a in availabilities if type ( a ) is bool ] if availabilities : return round ( 100. * sum ( availabilities ) / len ( availabilities ) , 2 ) return 100
Return the percentage of availability for resources .
58,328
def datasets_org_count ( self ) : from udata . models import Dataset return sum ( Dataset . objects ( organization = org ) . visible ( ) . count ( ) for org in self . organizations )
Return the number of datasets of user s organizations .
58,329
def followers_org_count ( self ) : from udata . models import Follow return sum ( Follow . objects ( following = org ) . count ( ) for org in self . organizations )
Return the number of followers of user s organizations .
58,330
def get_badge ( self , kind ) : candidates = [ b for b in self . badges if b . kind == kind ] return candidates [ 0 ] if candidates else None
Get a badge given its kind if present
58,331
def add_badge ( self , kind ) : badge = self . get_badge ( kind ) if badge : return badge if kind not in getattr ( self , '__badges__' , { } ) : msg = 'Unknown badge type for {model}: {kind}' raise db . ValidationError ( msg . format ( model = self . __class__ . __name__ , kind = kind ) ) badge = Badge ( kind = kind ) ...
Perform an atomic prepend for a new badge
58,332
def remove_badge ( self , kind ) : self . update ( __raw__ = { '$pull' : { 'badges' : { 'kind' : kind } } } ) self . reload ( ) on_badge_removed . send ( self , kind = kind ) post_save . send ( self . __class__ , document = self )
Perform an atomic removal for a given badge
58,333
def toggle_badge ( self , kind ) : badge = self . get_badge ( kind ) if badge : return self . remove_badge ( kind ) else : return self . add_badge ( kind )
Toggle a bdage given its kind
58,334
def badge_label ( self , badge ) : kind = badge . kind if isinstance ( badge , Badge ) else badge return self . __badges__ [ kind ]
Display the badge label for a given kind
58,335
def discussions_for ( user , only_open = True ) : datasets = Dataset . objects . owned_by ( user . id , * user . organizations ) . only ( 'id' , 'slug' ) reuses = Reuse . objects . owned_by ( user . id , * user . organizations ) . only ( 'id' , 'slug' ) qs = Discussion . objects ( subject__in = list ( datasets ) + list...
Build a queryset to query discussions related to a given user s assets .
58,336
def nofollow_callback ( attrs , new = False ) : parsed_url = urlparse ( attrs [ ( None , 'href' ) ] ) if parsed_url . netloc in ( '' , current_app . config [ 'SERVER_NAME' ] ) : attrs [ ( None , 'href' ) ] = '{scheme}://{netloc}{path}' . format ( scheme = 'https' if request . is_secure else 'http' , netloc = current_ap...
Turn relative links into external ones and avoid nofollow for us
58,337
def bleach_clean ( stream ) : return bleach . clean ( stream , tags = current_app . config [ 'MD_ALLOWED_TAGS' ] , attributes = current_app . config [ 'MD_ALLOWED_ATTRIBUTES' ] , styles = current_app . config [ 'MD_ALLOWED_STYLES' ] , strip_comments = False )
Sanitize malicious attempts but keep the EXCERPT_TOKEN . By default only keeps bleach . ALLOWED_TAGS .
58,338
def toggle ( path_or_id , badge_kind ) : if exists ( path_or_id ) : with open ( path_or_id ) as open_file : for id_or_slug in open_file . readlines ( ) : toggle_badge ( id_or_slug . strip ( ) , badge_kind ) else : toggle_badge ( path_or_id , badge_kind )
Toggle a badge_kind for a given path_or_id
58,339
def upload ( name ) : storage = fs . by_name ( name ) return jsonify ( success = True , ** handle_upload ( storage ) )
Handle upload on POST if authorized .
58,340
def unindex_model_on_delete ( sender , document , ** kwargs ) : if current_app . config . get ( 'AUTO_INDEX' ) : unindex . delay ( document )
Unindex Mongo document on post_delete
58,341
def register ( adapter ) : if adapter . model and adapter . model not in adapter_catalog : adapter_catalog [ adapter . model ] = adapter post_save . connect ( reindex_model_on_save , sender = adapter . model ) post_delete . connect ( unindex_model_on_delete , sender = adapter . model ) return adapter
Register a search adapter
58,342
def process ( self , formdata = None , obj = None , data = None , ** kwargs ) : self . _obj = obj super ( CommonFormMixin , self ) . process ( formdata , obj , data , ** kwargs )
Wrap the process method to store the current object instance
58,343
def get ( name ) : linkcheckers = get_enabled ( ENTRYPOINT , current_app ) linkcheckers . update ( no_check = NoCheckLinkchecker ) selected_linkchecker = linkcheckers . get ( name ) if not selected_linkchecker : default_linkchecker = current_app . config . get ( 'LINKCHECKING_DEFAULT_LINKCHECKER' ) selected_linkchecker...
Get a linkchecker given its name or fallback on default
58,344
def get_notifications ( user ) : notifications = [ ] for name , func in _providers . items ( ) : notifications . extend ( [ { 'type' : name , 'created_on' : dt , 'details' : details } for dt , details in func ( user ) ] ) return notifications
List notification for a given user
58,345
def count_tags ( self ) : for key , model in TAGGED . items ( ) : collection = '{0}_tags' . format ( key ) results = ( model . objects ( tags__exists = True ) . map_reduce ( map_tags , reduce_tags , collection ) ) for result in results : tag , created = Tag . objects . get_or_create ( name = result . key , auto_save = ...
Count tag occurences by type and update the tag collection
58,346
def from_model ( cls , document ) : return cls ( meta = { 'id' : document . id } , ** cls . serialize ( document ) )
By default use the to_dict method
58,347
def completer_tokenize ( cls , value , min_length = 3 ) : tokens = list ( itertools . chain ( * [ [ m for m in n . split ( "'" ) if len ( m ) > min_length ] for n in value . split ( ' ' ) ] ) ) return list ( set ( [ value ] + tokens + [ ' ' . join ( tokens ) ] ) )
Quick and dirty tokenizer for completion suggester
58,348
def facet_search ( cls , * facets ) : f = dict ( ( k , v ) for k , v in cls . facets . items ( ) if k in facets ) class TempSearch ( SearchQuery ) : adapter = cls analyzer = cls . analyzer boosters = cls . boosters doc_types = cls facets = f fields = cls . fields fuzzy = cls . fuzzy match_type = cls . match_type model ...
Build a FacetSearch for a given list of facets
58,349
def populate_slug ( instance , field ) : value = getattr ( instance , field . db_field ) try : previous = instance . __class__ . objects . get ( id = instance . id ) except Exception : previous = None changed = field . db_field in instance . _get_changed_fields ( ) manual = not previous and value or changed if not manu...
Populate a slug field if needed .
58,350
def slugify ( self , value ) : if value is None : return return slugify . slugify ( value , max_length = self . max_length , separator = self . separator , to_lower = self . lower_case )
Apply slugification according to specified field rules
58,351
def cleanup_on_delete ( self , sender , document , ** kwargs ) : if not self . follow or sender is not self . owner_document : return slug = getattr ( document , self . db_field ) namespace = self . owner_document . __name__ SlugFollow . objects ( namespace = namespace , new_slug = slug ) . delete ( )
Clean up slug redirections on object deletion
58,352
def badge_form ( model ) : class BadgeForm ( ModelForm ) : model_class = Badge kind = fields . RadioField ( _ ( 'Kind' ) , [ validators . DataRequired ( ) ] , choices = model . __badges__ . items ( ) , description = _ ( 'Kind of badge (certified, etc)' ) ) return BadgeForm
A form factory for a given model badges
58,353
def delay ( name , args , kwargs ) : args = args or [ ] kwargs = dict ( k . split ( ) for k in kwargs ) if kwargs else { } if name not in celery . tasks : log . error ( 'Job %s not found' , name ) job = celery . tasks [ name ] log . info ( 'Sending job %s' , name ) async_result = job . delay ( * args , ** kwargs ) log ...
Run a job asynchronously
58,354
def is_url ( default_scheme = 'http' , ** kwargs ) : def converter ( value ) : if value is None : return value if '://' not in value and default_scheme : value = '://' . join ( ( default_scheme , value . strip ( ) ) ) try : return uris . validate ( value ) except uris . ValidationError as e : raise Invalid ( e . messag...
Return a converter that converts a clean string to an URL .
58,355
def hash ( value ) : if not value : return elif len ( value ) == 32 : type = 'md5' elif len ( value ) == 40 : type = 'sha1' elif len ( value ) == 64 : type = 'sha256' else : return None return { 'type' : type , 'value' : value }
Detect an hash type
58,356
def iter_adapters ( ) : adapters = adapter_catalog . values ( ) return sorted ( adapters , key = lambda a : a . model . __name__ )
Iter over adapter in predictable way
58,357
def iter_qs ( qs , adapter ) : for obj in qs . no_cache ( ) . no_dereference ( ) . timeout ( False ) : if adapter . is_indexable ( obj ) : try : doc = adapter . from_model ( obj ) . to_dict ( include_meta = True ) yield doc except Exception as e : model = adapter . model . __name__ log . error ( 'Unable to index %s "%s...
Safely iterate over a DB QuerySet yielding ES documents
58,358
def index_model ( index_name , adapter ) : model = adapter . model log . info ( 'Indexing {0} objects' . format ( model . __name__ ) ) qs = model . objects if hasattr ( model . objects , 'visible' ) : qs = qs . visible ( ) if adapter . exclude_fields : qs = qs . exclude ( * adapter . exclude_fields ) docs = iter_qs ( q...
Indel all objects given a model
58,359
def enable_refresh ( index_name ) : refresh_interval = current_app . config [ 'ELASTICSEARCH_REFRESH_INTERVAL' ] es . indices . put_settings ( index = index_name , body = { 'index' : { 'refresh_interval' : refresh_interval } } ) es . indices . forcemerge ( index = index_name , request_timeout = 30 )
Enable refresh and force merge . To be used after indexing .
58,360
def set_alias ( index_name , delete = True ) : log . info ( 'Creating alias "{0}" on index "{1}"' . format ( es . index_name , index_name ) ) if es . indices . exists_alias ( name = es . index_name ) : alias = es . indices . get_alias ( name = es . index_name ) previous_indices = alias . keys ( ) if index_name not in p...
Properly end an indexation by creating an alias . Previous alias is deleted if needed .
58,361
def handle_error ( index_name , keep = False ) : signal . signal ( signal . SIGINT , signal . default_int_handler ) signal . signal ( signal . SIGTERM , signal . default_int_handler ) has_error = False try : yield except KeyboardInterrupt : print ( '' ) log . warning ( 'Interrupted by signal' ) has_error = True except ...
Handle errors while indexing . In case of error properly log it remove the index and exit . If keep is True index is not deleted .
58,362
def index ( models = None , name = None , force = False , keep = False ) : index_name = name or default_index_name ( ) doc_types_names = [ m . __name__ . lower ( ) for m in adapter_catalog . keys ( ) ] models = [ model . lower ( ) . rstrip ( 's' ) for model in ( models or [ ] ) ] for model in models : if model not in d...
Initialize or rebuild the search index
58,363
def create_app ( config = 'udata.settings.Defaults' , override = None , init_logging = init_logging ) : app = UDataApp ( APP_NAME ) app . config . from_object ( config ) settings = os . environ . get ( 'UDATA_SETTINGS' , join ( os . getcwd ( ) , 'udata.cfg' ) ) if exists ( settings ) : app . settings_file = settings ap...
Factory for a minimal application
58,364
def standalone ( app ) : from udata import api , core , frontend core . init_app ( app ) frontend . init_app ( app ) api . init_app ( app ) register_features ( app ) return app
Factory for an all in one application
58,365
def get_migration ( plugin , filename ) : db = get_db ( ) return db . migrations . find_one ( { 'plugin' : plugin , 'filename' : filename } )
Get an existing migration record if exists
58,366
def record_migration ( plugin , filename , script , ** kwargs ) : db = get_db ( ) db . eval ( RECORD_WRAPPER , plugin , filename , script ) return True
Only record a migration without applying it
58,367
def available_migrations ( ) : migrations = [ ] for filename in resource_listdir ( 'udata' , 'migrations' ) : if filename . endswith ( '.js' ) : migrations . append ( ( 'udata' , 'udata' , filename ) ) plugins = entrypoints . get_enabled ( 'udata.models' , current_app ) for plugin , module in plugins . items ( ) : if r...
List available migrations for udata and enabled plugins
58,368
def log_status ( plugin , filename , status ) : display = ':' . join ( ( plugin , filename ) ) + ' ' log . info ( '%s [%s]' , '{:.<70}' . format ( display ) , status )
Properly display a migration status line
58,369
def status ( ) : for plugin , package , filename in available_migrations ( ) : migration = get_migration ( plugin , filename ) if migration : status = green ( migration [ 'date' ] . strftime ( DATE_FORMAT ) ) else : status = yellow ( 'Not applied' ) log_status ( plugin , filename , status )
Display the database migrations status
58,370
def migrate ( record , dry_run = False ) : handler = record_migration if record else execute_migration success = True for plugin , package , filename in available_migrations ( ) : migration = get_migration ( plugin , filename ) if migration or not success : log_status ( plugin , filename , cyan ( 'Skipped' ) ) else : s...
Perform database migrations
58,371
def unrecord ( plugin_or_specs , filename ) : plugin , filename = normalize_migration ( plugin_or_specs , filename ) migration = get_migration ( plugin , filename ) if migration : log . info ( 'Removing migration %s:%s' , plugin , filename ) db = get_db ( ) db . eval ( UNRECORD_WRAPPER , migration [ '_id' ] ) else : lo...
Remove a database migration record .
58,372
def validate ( url , schemes = None , tlds = None , private = None , local = None , credentials = None ) : url = url . strip ( ) private = config_for ( private , 'URLS_ALLOW_PRIVATE' ) local = config_for ( local , 'URLS_ALLOW_LOCAL' ) credentials = config_for ( credentials , 'URLS_ALLOW_CREDENTIALS' ) schemes = config_...
Validate and normalize an URL
58,373
def get_json_ld_extra ( key , value ) : value = value . serialize ( ) if hasattr ( value , 'serialize' ) else value return { '@type' : 'http://schema.org/PropertyValue' , 'name' : key , 'value' : value , }
Serialize an extras key value pair into JSON - LD
58,374
def get_resource ( id ) : dataset = Dataset . objects ( resources__id = id ) . first ( ) if dataset : return get_by ( dataset . resources , 'id' , id ) else : return CommunityResource . objects ( id = id ) . first ( )
Fetch a resource given its UUID
58,375
def guess ( cls , * strings , ** kwargs ) : license = None for string in strings : license = cls . guess_one ( string ) if license : break return license or kwargs . get ( 'default' )
Try to guess a license from a list of strings .
58,376
def guess_one ( cls , text ) : if not text : return qs = cls . objects text = text . strip ( ) . lower ( ) slug = cls . slug . slugify ( text ) license = qs ( db . Q ( id = text ) | db . Q ( slug = slug ) | db . Q ( url = text ) | db . Q ( alternate_urls = text ) ) . first ( ) if license is None : computed = ( ( l , rd...
Try to guess license from a string .
58,377
def need_check ( self ) : min_cache_duration , max_cache_duration , ko_threshold = [ current_app . config . get ( k ) for k in ( 'LINKCHECKING_MIN_CACHE_DURATION' , 'LINKCHECKING_MAX_CACHE_DURATION' , 'LINKCHECKING_UNAVAILABLE_THRESHOLD' , ) ] count_availability = self . extras . get ( 'check:count-availability' , 1 ) ...
Does the resource needs to be checked against its linkchecker?
58,378
def check_availability ( self ) : remote_resources = [ resource for resource in self . resources if resource . filetype == 'remote' ] if not remote_resources : return [ ] return [ resource . check_availability ( ) for resource in remote_resources ]
Check if resources from that dataset are available .
58,379
def next_update ( self ) : delta = None if self . frequency == 'daily' : delta = timedelta ( days = 1 ) elif self . frequency == 'weekly' : delta = timedelta ( weeks = 1 ) elif self . frequency == 'fortnighly' : delta = timedelta ( weeks = 2 ) elif self . frequency == 'monthly' : delta = timedelta ( weeks = 4 ) elif se...
Compute the next expected update date
58,380
def quality ( self ) : from udata . models import Discussion result = { } if not self . id : return result if self . next_update : result [ 'frequency' ] = self . frequency result [ 'update_in' ] = - ( self . next_update - datetime . now ( ) ) . days if self . tags : result [ 'tags_count' ] = len ( self . tags ) if sel...
Return a dict filled with metrics related to the inner
58,381
def compute_quality_score ( self , quality ) : score = 0 UNIT = 2 if 'frequency' in quality : if quality [ 'update_in' ] < 0 : score += UNIT else : score -= UNIT if 'tags_count' in quality : if quality [ 'tags_count' ] > 3 : score += UNIT if 'description_length' in quality : if quality [ 'description_length' ] > 100 : ...
Compute the score related to the quality of that dataset .
58,382
def add_resource ( self , resource ) : resource . validate ( ) self . update ( __raw__ = { '$push' : { 'resources' : { '$each' : [ resource . to_mongo ( ) ] , '$position' : 0 } } } ) self . reload ( ) post_save . send ( self . __class__ , document = self , resource_added = resource . id )
Perform an atomic prepend for a new resource
58,383
def update_resource ( self , resource ) : index = self . resources . index ( resource ) data = { 'resources__{index}' . format ( index = index ) : resource } self . update ( ** data ) self . reload ( ) post_save . send ( self . __class__ , document = self )
Perform an atomic update for an existing resource
58,384
def get_aggregation ( self , name ) : agg = self . aggregations [ name ] if 'buckets' in agg : return agg [ 'buckets' ] else : return agg
Fetch an aggregation result given its name
58,385
def language ( lang_code ) : ctx = None if not request : ctx = current_app . test_request_context ( ) ctx . push ( ) backup = g . get ( 'lang_code' ) g . lang_code = lang_code refresh ( ) yield g . lang_code = backup if ctx : ctx . pop ( ) refresh ( )
Force a given language
58,386
def redirect_to_lang ( * args , ** kwargs ) : endpoint = request . endpoint . replace ( '_redirect' , '' ) kwargs = multi_to_dict ( request . args ) kwargs . update ( request . view_args ) kwargs [ 'lang_code' ] = default_lang return redirect ( url_for ( endpoint , ** kwargs ) )
Redirect non lang - prefixed urls to default language .
58,387
def redirect_to_unlocalized ( * args , ** kwargs ) : endpoint = request . endpoint . replace ( '_redirect' , '' ) kwargs = multi_to_dict ( request . args ) kwargs . update ( request . view_args ) kwargs . pop ( 'lang_code' , None ) return redirect ( url_for ( endpoint , ** kwargs ) )
Redirect lang - prefixed urls to no prefixed URL .
58,388
def get_translations ( self ) : ctx = stack . top if ctx is None : return NullTranslations ( ) locale = get_locale ( ) cache = self . get_translations_cache ( ctx ) translations = cache . get ( str ( locale ) ) if translations is None : translations_dir = self . get_translations_path ( ctx ) translations = Translations...
Returns the correct gettext translations that should be used for this request . This will never fail and return a dummy translation object if used outside of the request or if a translation cannot be found .
58,389
def person_involved ( self , person ) : return any ( message . posted_by == person for message in self . discussion )
Return True if the given person has been involved in the
58,390
def is_ignored ( resource ) : ignored_domains = current_app . config [ 'LINKCHECKING_IGNORE_DOMAINS' ] url = resource . url if url : parsed_url = urlparse ( url ) return parsed_url . netloc in ignored_domains return True
Check of the resource s URL is part of LINKCHECKING_IGNORE_DOMAINS
58,391
def check_resource ( resource ) : linkchecker_type = resource . extras . get ( 'check:checker' ) LinkChecker = get_linkchecker ( linkchecker_type ) if not LinkChecker : return { 'error' : 'No linkchecker configured.' } , 503 if is_ignored ( resource ) : return dummy_check_response ( ) result = LinkChecker ( ) . check (...
Check a resource availability against a linkchecker backend
58,392
def owned_pre_save ( sender , document , ** kwargs ) : if not isinstance ( document , Owned ) : return changed_fields = getattr ( document , '_changed_fields' , [ ] ) if 'organization' in changed_fields : if document . owner : document . _previous_owner = document . owner document . owner = None else : original = sende...
Owned mongoengine . pre_save signal handler Need to fetch original owner before the new one erase it .
58,393
def owned_post_save ( sender , document , ** kwargs ) : if isinstance ( document , Owned ) and hasattr ( document , '_previous_owner' ) : Owned . on_owner_change . send ( document , previous = document . _previous_owner )
Owned mongoengine . post_save signal handler Dispatch the Owned . on_owner_change signal once the document has been saved including the previous owner .
58,394
def get_enabled_plugins ( ) : plugins = entrypoints . get_enabled ( 'udata.preview' , current_app ) . values ( ) valid = [ p for p in plugins if issubclass ( p , PreviewPlugin ) ] for plugin in plugins : if plugin not in valid : clsname = plugin . __name__ msg = '{0} is not a valid preview plugin' . format ( clsname ) ...
Returns enabled preview plugins .
58,395
def get_preview_url ( resource ) : candidates = ( p . preview_url ( resource ) for p in get_enabled_plugins ( ) if p . can_preview ( resource ) ) return next ( iter ( candidates ) , None )
Returns the most pertinent preview URL associated to the resource if any .
58,396
def get_by ( lst , field , value ) : for row in lst : if ( ( isinstance ( row , dict ) and row . get ( field ) == value ) or ( getattr ( row , field , None ) == value ) ) : return row
Find an object in a list given a field value
58,397
def multi_to_dict ( multi ) : return dict ( ( key , value [ 0 ] if len ( value ) == 1 else value ) for key , value in multi . to_dict ( False ) . items ( ) )
Transform a Werkzeug multidictionnary into a flat dictionnary
58,398
def daterange_start ( value ) : if not value : return None elif isinstance ( value , datetime ) : return value . date ( ) elif isinstance ( value , date ) : return value result = parse_dt ( value ) . date ( ) dashes = value . count ( '-' ) if dashes >= 2 : return result elif dashes == 1 : return result . replace ( day ...
Parse a date range start boundary
58,399
def daterange_end ( value ) : if not value : return None elif isinstance ( value , datetime ) : return value . date ( ) elif isinstance ( value , date ) : return value result = parse_dt ( value ) . date ( ) dashes = value . count ( '-' ) if dashes >= 2 : return result elif dashes == 1 : return result + relativedelta ( ...
Parse a date range end boundary