idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
58,300
def json_to_file ( data , filename , pretty = False ) : kwargs = dict ( indent = 4 ) if pretty else { } dirname = os . path . dirname ( filename ) if not os . path . exists ( dirname ) : os . makedirs ( dirname ) dump = json . dumps ( api . __schema__ , ** kwargs ) with open ( filename , 'wb' ) as f : f . write ( dump . encode ( 'utf-8' ) )
Dump JSON data to a file
58,301
def postman ( filename , pretty , urlvars , swagger ) : data = api . as_postman ( urlvars = urlvars , swagger = swagger ) json_to_file ( data , filename , pretty )
Dump the API as a Postman collection
58,302
def notify_badge_added_certified ( sender , kind = '' ) : if kind == CERTIFIED and isinstance ( sender , Organization ) : recipients = [ member . user for member in sender . members ] subject = _ ( 'Your organization "%(name)s" has been certified' , name = sender . name ) mail . send ( subject , recipients , 'badge_added_certified' , organization = sender , badge = sender . get_badge ( kind ) )
Send an email when a CERTIFIED badge is added to an Organization
58,303
def discussions_notifications ( user ) : notifications = [ ] qs = discussions_for ( user ) . only ( 'id' , 'created' , 'title' , 'subject' ) for discussion in qs . no_dereference ( ) : notifications . append ( ( discussion . created , { 'id' : discussion . id , 'title' : discussion . title , 'subject' : { 'id' : discussion . subject [ '_ref' ] . id , 'type' : discussion . subject [ '_cls' ] . lower ( ) , } } ) ) return notifications
Notify user about open discussions
58,304
def send_signal ( signal , request , user , ** kwargs ) : params = { 'user_ip' : request . remote_addr } params . update ( kwargs ) if user . is_authenticated : params [ 'uid' ] = user . id signal . send ( request . url , ** params )
Generic method to send signals to Piwik
58,305
def membership_request_notifications ( user ) : orgs = [ o for o in user . organizations if o . is_admin ( user ) ] notifications = [ ] for org in orgs : for request in org . pending_requests : notifications . append ( ( request . created , { 'id' : request . id , 'organization' : org . id , 'user' : { 'id' : request . user . id , 'fullname' : request . user . fullname , 'avatar' : str ( request . user . avatar ) } } ) ) return notifications
Notify user about pending membership requests
58,306
def validate ( identifier ) : source = actions . validate_source ( identifier ) log . info ( 'Source %s (%s) has been validated' , source . slug , str ( source . id ) )
Validate a source given its identifier
58,307
def delete ( identifier ) : log . info ( 'Deleting source "%s"' , identifier ) actions . delete_source ( identifier ) log . info ( 'Deleted source "%s"' , identifier )
Delete a harvest source
58,308
def sources ( scheduled = False ) : sources = actions . list_sources ( ) if scheduled : sources = [ s for s in sources if s . periodic_task ] if sources : for source in sources : msg = '{source.name} ({source.backend}): {cron}' if source . periodic_task : cron = source . periodic_task . schedule_display else : cron = 'not scheduled' log . info ( msg . format ( source = source , cron = cron ) ) elif scheduled : log . info ( 'No sources scheduled yet' ) else : log . info ( 'No sources defined yet' )
List all harvest sources
58,309
def backends ( ) : log . info ( 'Available backends:' ) for backend in actions . list_backends ( ) : log . info ( '%s (%s)' , backend . name , backend . display_name or backend . name )
List available backends
58,310
def schedule ( identifier , ** kwargs ) : source = actions . schedule ( identifier , ** kwargs ) msg = 'Scheduled {source.name} with the following crontab: {cron}' log . info ( msg . format ( source = source , cron = source . periodic_task . crontab ) )
Schedule a harvest job to run periodically
58,311
def unschedule ( identifier ) : source = actions . unschedule ( identifier ) log . info ( 'Unscheduled harvest source "%s"' , source . name )
Unschedule a periodical harvest job
58,312
def attach ( domain , filename ) : log . info ( 'Attaching datasets for domain %s' , domain ) result = actions . attach ( domain , filename ) log . info ( 'Attached %s datasets to %s' , result . success , domain )
Attach existing datasets to their harvest remote id
58,313
def request_transfer ( subject , recipient , comment ) : TransferPermission ( subject ) . test ( ) if recipient == ( subject . organization or subject . owner ) : raise ValueError ( 'Recipient should be different than the current owner' ) transfer = Transfer . objects . create ( owner = subject . organization or subject . owner , recipient = recipient , subject = subject , comment = comment ) return transfer
Initiate a transfer request
58,314
def accept_transfer ( transfer , comment = None ) : TransferResponsePermission ( transfer ) . test ( ) transfer . responded = datetime . now ( ) transfer . responder = current_user . _get_current_object ( ) transfer . status = 'accepted' transfer . response_comment = comment transfer . save ( ) subject = transfer . subject recipient = transfer . recipient if isinstance ( recipient , Organization ) : subject . organization = recipient elif isinstance ( recipient , User ) : subject . owner = recipient subject . save ( ) return transfer
Accept an incoming a transfer request
58,315
def refuse_transfer ( transfer , comment = None ) : TransferResponsePermission ( transfer ) . test ( ) transfer . responded = datetime . now ( ) transfer . responder = current_user . _get_current_object ( ) transfer . status = 'refused' transfer . response_comment = comment transfer . save ( ) return transfer
Refuse an incoming a transfer request
58,316
def clean ( self ) : if not self . metrics : self . metrics = dict ( ( name , spec . default ) for name , spec in ( metric_catalog . get ( self . __class__ , { } ) . items ( ) ) ) return super ( WithMetrics , self ) . clean ( )
Fill metrics with defaults on create
58,317
def build_catalog ( site , datasets , format = None ) : site_url = url_for ( 'site.home_redirect' , _external = True ) catalog_url = url_for ( 'site.rdf_catalog' , _external = True ) graph = Graph ( namespace_manager = namespace_manager ) catalog = graph . resource ( URIRef ( catalog_url ) ) catalog . set ( RDF . type , DCAT . Catalog ) catalog . set ( DCT . title , Literal ( site . title ) ) catalog . set ( DCT . language , Literal ( current_app . config [ 'DEFAULT_LANGUAGE' ] ) ) catalog . set ( FOAF . homepage , URIRef ( site_url ) ) publisher = graph . resource ( BNode ( ) ) publisher . set ( RDF . type , FOAF . Organization ) publisher . set ( FOAF . name , Literal ( current_app . config [ 'SITE_AUTHOR' ] ) ) catalog . set ( DCT . publisher , publisher ) for dataset in datasets : catalog . add ( DCAT . dataset , dataset_to_rdf ( dataset , graph ) ) if isinstance ( datasets , Paginable ) : if not format : raise ValueError ( 'Pagination requires format' ) catalog . add ( RDF . type , HYDRA . Collection ) catalog . set ( HYDRA . totalItems , Literal ( datasets . total ) ) kwargs = { 'format' : format , 'page_size' : datasets . page_size , '_external' : True , } first_url = url_for ( 'site.rdf_catalog_format' , page = 1 , ** kwargs ) page_url = url_for ( 'site.rdf_catalog_format' , page = datasets . page , ** kwargs ) last_url = url_for ( 'site.rdf_catalog_format' , page = datasets . pages , ** kwargs ) pagination = graph . resource ( URIRef ( page_url ) ) pagination . set ( RDF . type , HYDRA . PartialCollectionView ) pagination . set ( HYDRA . first , URIRef ( first_url ) ) pagination . set ( HYDRA . last , URIRef ( last_url ) ) if datasets . has_next : next_url = url_for ( 'site.rdf_catalog_format' , page = datasets . page + 1 , ** kwargs ) pagination . set ( HYDRA . next , URIRef ( next_url ) ) if datasets . has_prev : prev_url = url_for ( 'site.rdf_catalog_format' , page = datasets . page - 1 , ** kwargs ) pagination . set ( HYDRA . previous , URIRef ( prev_url ) ) catalog . set ( HYDRA . view , pagination ) return catalog
Build the DCAT catalog for this site
58,318
def sendmail_proxy ( subject , email , template , ** context ) : sendmail . delay ( subject . value , email , template , ** context )
Cast the lazy_gettext ed subject to string before passing to Celery
58,319
def collect ( path , no_input ) : if exists ( path ) : msg = '"%s" directory already exists and will be erased' log . warning ( msg , path ) if not no_input : click . confirm ( 'Are you sure?' , abort = True ) log . info ( 'Deleting static directory "%s"' , path ) shutil . rmtree ( path ) prefix = current_app . static_url_path or current_app . static_folder if prefix . startswith ( '/' ) : prefix = prefix [ 1 : ] destination = join ( path , prefix ) log . info ( 'Copying application assets into "%s"' , destination ) shutil . copytree ( current_app . static_folder , destination ) for blueprint in current_app . blueprints . values ( ) : if blueprint . has_static_folder : prefix = current_app . static_prefixes . get ( blueprint . name ) prefix = prefix or blueprint . url_prefix or '' prefix += blueprint . static_url_path or '' if prefix . startswith ( '/' ) : prefix = prefix [ 1 : ] log . info ( 'Copying %s assets to %s' , blueprint . name , prefix ) destination = join ( path , prefix ) copy_recursive ( blueprint . static_folder , destination ) for prefix , source in current_app . config [ 'STATIC_DIRS' ] : log . info ( 'Copying %s to %s' , source , prefix ) destination = join ( path , prefix ) copy_recursive ( source , destination ) log . info ( 'Done' )
Collect static files
58,320
def validate_harvester_notifications ( user ) : if not user . sysadmin : return [ ] notifications = [ ] qs = HarvestSource . objects ( validation__state = VALIDATION_PENDING ) qs = qs . only ( 'id' , 'created_at' , 'name' ) for source in qs : notifications . append ( ( source . created_at , { 'id' : source . id , 'name' : source . name , } ) ) return notifications
Notify admins about pending harvester validation
58,321
def get ( app , name ) : backend = get_all ( app ) . get ( name ) if not backend : msg = 'Harvest backend "{0}" is not registered' . format ( name ) raise EntrypointError ( msg ) return backend
Get a backend given its name
58,322
def search ( self ) : s = super ( TopicSearchMixin , self ) . search ( ) s = s . filter ( 'bool' , should = [ Q ( 'term' , tags = tag ) for tag in self . topic . tags ] ) return s
Override search to match on topic tags
58,323
def clean ( self ) : if not self . urlhash or 'url' in self . _get_changed_fields ( ) : self . urlhash = hash_url ( self . url ) super ( Reuse , self ) . clean ( )
Auto populate urlhash from url
58,324
def serve ( info , host , port , reload , debugger , eager_loading , with_threads ) : logger = logging . getLogger ( 'werkzeug' ) logger . setLevel ( logging . INFO ) logger . handlers = [ ] debug = current_app . config [ 'DEBUG' ] if reload is None : reload = bool ( debug ) if debugger is None : debugger = bool ( debug ) if eager_loading is None : eager_loading = not reload app = DispatchingApp ( info . load_app , use_eager_loading = eager_loading ) settings = os . environ . get ( 'UDATA_SETTINGS' , os . path . join ( os . getcwd ( ) , 'udata.cfg' ) ) extra_files = [ settings ] if reload : extra_files . extend ( assets . manifests_paths ( ) ) run_simple ( host , port , app , use_reloader = reload , use_debugger = debugger , threaded = with_threads , extra_files = extra_files )
Runs a local udata development server .
58,325
def enforce_filetype_file ( form , field ) : if form . _fields . get ( 'filetype' ) . data != RESOURCE_FILETYPE_FILE : return domain = urlparse ( field . data ) . netloc allowed_domains = current_app . config [ 'RESOURCES_FILE_ALLOWED_DOMAINS' ] allowed_domains += [ current_app . config . get ( 'SERVER_NAME' ) ] if current_app . config . get ( 'CDN_DOMAIN' ) : allowed_domains . append ( current_app . config [ 'CDN_DOMAIN' ] ) if '*' in allowed_domains : return if domain and domain not in allowed_domains : message = _ ( 'Domain "{domain}" not allowed for filetype "{filetype}"' ) raise validators . ValidationError ( message . format ( domain = domain , filetype = RESOURCE_FILETYPE_FILE ) )
Only allowed domains in resource . url when filetype is file
58,326
def map_legacy_frequencies ( form , field ) : if field . data in LEGACY_FREQUENCIES : field . data = LEGACY_FREQUENCIES [ field . data ]
Map legacy frequencies to new ones
58,327
def resources_availability ( self ) : availabilities = list ( chain ( * [ org . check_availability ( ) for org in self . organizations ] ) ) availabilities = [ a for a in availabilities if type ( a ) is bool ] if availabilities : return round ( 100. * sum ( availabilities ) / len ( availabilities ) , 2 ) return 100
Return the percentage of availability for resources .
58,328
def datasets_org_count ( self ) : from udata . models import Dataset return sum ( Dataset . objects ( organization = org ) . visible ( ) . count ( ) for org in self . organizations )
Return the number of datasets of user s organizations .
58,329
def followers_org_count ( self ) : from udata . models import Follow return sum ( Follow . objects ( following = org ) . count ( ) for org in self . organizations )
Return the number of followers of user s organizations .
58,330
def get_badge ( self , kind ) : candidates = [ b for b in self . badges if b . kind == kind ] return candidates [ 0 ] if candidates else None
Get a badge given its kind if present
58,331
def add_badge ( self , kind ) : badge = self . get_badge ( kind ) if badge : return badge if kind not in getattr ( self , '__badges__' , { } ) : msg = 'Unknown badge type for {model}: {kind}' raise db . ValidationError ( msg . format ( model = self . __class__ . __name__ , kind = kind ) ) badge = Badge ( kind = kind ) if current_user . is_authenticated : badge . created_by = current_user . id self . update ( __raw__ = { '$push' : { 'badges' : { '$each' : [ badge . to_mongo ( ) ] , '$position' : 0 } } } ) self . reload ( ) post_save . send ( self . __class__ , document = self ) on_badge_added . send ( self , kind = kind ) return self . get_badge ( kind )
Perform an atomic prepend for a new badge
58,332
def remove_badge ( self , kind ) : self . update ( __raw__ = { '$pull' : { 'badges' : { 'kind' : kind } } } ) self . reload ( ) on_badge_removed . send ( self , kind = kind ) post_save . send ( self . __class__ , document = self )
Perform an atomic removal for a given badge
58,333
def toggle_badge ( self , kind ) : badge = self . get_badge ( kind ) if badge : return self . remove_badge ( kind ) else : return self . add_badge ( kind )
Toggle a bdage given its kind
58,334
def badge_label ( self , badge ) : kind = badge . kind if isinstance ( badge , Badge ) else badge return self . __badges__ [ kind ]
Display the badge label for a given kind
58,335
def discussions_for ( user , only_open = True ) : datasets = Dataset . objects . owned_by ( user . id , * user . organizations ) . only ( 'id' , 'slug' ) reuses = Reuse . objects . owned_by ( user . id , * user . organizations ) . only ( 'id' , 'slug' ) qs = Discussion . objects ( subject__in = list ( datasets ) + list ( reuses ) ) if only_open : qs = qs ( closed__exists = False ) return qs
Build a queryset to query discussions related to a given user s assets .
58,336
def nofollow_callback ( attrs , new = False ) : parsed_url = urlparse ( attrs [ ( None , 'href' ) ] ) if parsed_url . netloc in ( '' , current_app . config [ 'SERVER_NAME' ] ) : attrs [ ( None , 'href' ) ] = '{scheme}://{netloc}{path}' . format ( scheme = 'https' if request . is_secure else 'http' , netloc = current_app . config [ 'SERVER_NAME' ] , path = parsed_url . path ) return attrs else : rel = [ x for x in attrs . get ( ( None , 'rel' ) , '' ) . split ( ' ' ) if x ] if 'nofollow' not in [ x . lower ( ) for x in rel ] : rel . append ( 'nofollow' ) attrs [ ( None , 'rel' ) ] = ' ' . join ( rel ) return attrs
Turn relative links into external ones and avoid nofollow for us
58,337
def bleach_clean ( stream ) : return bleach . clean ( stream , tags = current_app . config [ 'MD_ALLOWED_TAGS' ] , attributes = current_app . config [ 'MD_ALLOWED_ATTRIBUTES' ] , styles = current_app . config [ 'MD_ALLOWED_STYLES' ] , strip_comments = False )
Sanitize malicious attempts but keep the EXCERPT_TOKEN . By default only keeps bleach . ALLOWED_TAGS .
58,338
def toggle ( path_or_id , badge_kind ) : if exists ( path_or_id ) : with open ( path_or_id ) as open_file : for id_or_slug in open_file . readlines ( ) : toggle_badge ( id_or_slug . strip ( ) , badge_kind ) else : toggle_badge ( path_or_id , badge_kind )
Toggle a badge_kind for a given path_or_id
58,339
def upload ( name ) : storage = fs . by_name ( name ) return jsonify ( success = True , ** handle_upload ( storage ) )
Handle upload on POST if authorized .
58,340
def unindex_model_on_delete ( sender , document , ** kwargs ) : if current_app . config . get ( 'AUTO_INDEX' ) : unindex . delay ( document )
Unindex Mongo document on post_delete
58,341
def register ( adapter ) : if adapter . model and adapter . model not in adapter_catalog : adapter_catalog [ adapter . model ] = adapter post_save . connect ( reindex_model_on_save , sender = adapter . model ) post_delete . connect ( unindex_model_on_delete , sender = adapter . model ) return adapter
Register a search adapter
58,342
def process ( self , formdata = None , obj = None , data = None , ** kwargs ) : self . _obj = obj super ( CommonFormMixin , self ) . process ( formdata , obj , data , ** kwargs )
Wrap the process method to store the current object instance
58,343
def get ( name ) : linkcheckers = get_enabled ( ENTRYPOINT , current_app ) linkcheckers . update ( no_check = NoCheckLinkchecker ) selected_linkchecker = linkcheckers . get ( name ) if not selected_linkchecker : default_linkchecker = current_app . config . get ( 'LINKCHECKING_DEFAULT_LINKCHECKER' ) selected_linkchecker = linkcheckers . get ( default_linkchecker ) if not selected_linkchecker : log . error ( 'No linkchecker found ({} requested and no fallback)' . format ( name ) ) return selected_linkchecker
Get a linkchecker given its name or fallback on default
58,344
def get_notifications ( user ) : notifications = [ ] for name , func in _providers . items ( ) : notifications . extend ( [ { 'type' : name , 'created_on' : dt , 'details' : details } for dt , details in func ( user ) ] ) return notifications
List notification for a given user
58,345
def count_tags ( self ) : for key , model in TAGGED . items ( ) : collection = '{0}_tags' . format ( key ) results = ( model . objects ( tags__exists = True ) . map_reduce ( map_tags , reduce_tags , collection ) ) for result in results : tag , created = Tag . objects . get_or_create ( name = result . key , auto_save = False ) tag . counts [ key ] = int ( result . value ) if result . value else 0 tag . save ( )
Count tag occurences by type and update the tag collection
58,346
def from_model ( cls , document ) : return cls ( meta = { 'id' : document . id } , ** cls . serialize ( document ) )
By default use the to_dict method
58,347
def completer_tokenize ( cls , value , min_length = 3 ) : tokens = list ( itertools . chain ( * [ [ m for m in n . split ( "'" ) if len ( m ) > min_length ] for n in value . split ( ' ' ) ] ) ) return list ( set ( [ value ] + tokens + [ ' ' . join ( tokens ) ] ) )
Quick and dirty tokenizer for completion suggester
58,348
def facet_search ( cls , * facets ) : f = dict ( ( k , v ) for k , v in cls . facets . items ( ) if k in facets ) class TempSearch ( SearchQuery ) : adapter = cls analyzer = cls . analyzer boosters = cls . boosters doc_types = cls facets = f fields = cls . fields fuzzy = cls . fuzzy match_type = cls . match_type model = cls . model return TempSearch
Build a FacetSearch for a given list of facets
58,349
def populate_slug ( instance , field ) : value = getattr ( instance , field . db_field ) try : previous = instance . __class__ . objects . get ( id = instance . id ) except Exception : previous = None changed = field . db_field in instance . _get_changed_fields ( ) manual = not previous and value or changed if not manual and field . populate_from : value = getattr ( instance , field . populate_from ) if previous and value == getattr ( previous , field . populate_from ) : return value if previous and getattr ( previous , field . db_field ) == value : return value if previous and not changed and not field . update : return value slug = field . slugify ( value ) if slug is None : return old_slug = getattr ( previous , field . db_field , None ) if slug == old_slug : return slug if field . unique : base_slug = slug index = 1 qs = instance . __class__ . objects if previous : qs = qs ( id__ne = previous . id ) def exists ( s ) : return qs ( class_check = False , ** { field . db_field : s } ) . limit ( 1 ) . count ( True ) > 0 while exists ( slug ) : slug = '{0}-{1}' . format ( base_slug , index ) index += 1 if field . follow and old_slug != slug : ns = instance . __class__ . __name__ SlugFollow . objects ( namespace = ns , old_slug = slug ) . delete ( ) if old_slug : slug_follower , created = SlugFollow . objects . get_or_create ( namespace = ns , old_slug = old_slug , auto_save = False , ) slug_follower . new_slug = slug slug_follower . save ( ) SlugFollow . objects ( namespace = ns , new_slug = old_slug ) . update ( new_slug = slug ) setattr ( instance , field . db_field , slug ) return slug
Populate a slug field if needed .
58,350
def slugify ( self , value ) : if value is None : return return slugify . slugify ( value , max_length = self . max_length , separator = self . separator , to_lower = self . lower_case )
Apply slugification according to specified field rules
58,351
def cleanup_on_delete ( self , sender , document , ** kwargs ) : if not self . follow or sender is not self . owner_document : return slug = getattr ( document , self . db_field ) namespace = self . owner_document . __name__ SlugFollow . objects ( namespace = namespace , new_slug = slug ) . delete ( )
Clean up slug redirections on object deletion
58,352
def badge_form ( model ) : class BadgeForm ( ModelForm ) : model_class = Badge kind = fields . RadioField ( _ ( 'Kind' ) , [ validators . DataRequired ( ) ] , choices = model . __badges__ . items ( ) , description = _ ( 'Kind of badge (certified, etc)' ) ) return BadgeForm
A form factory for a given model badges
58,353
def delay ( name , args , kwargs ) : args = args or [ ] kwargs = dict ( k . split ( ) for k in kwargs ) if kwargs else { } if name not in celery . tasks : log . error ( 'Job %s not found' , name ) job = celery . tasks [ name ] log . info ( 'Sending job %s' , name ) async_result = job . delay ( * args , ** kwargs ) log . info ( 'Job %s sended to workers' , async_result . id )
Run a job asynchronously
58,354
def is_url ( default_scheme = 'http' , ** kwargs ) : def converter ( value ) : if value is None : return value if '://' not in value and default_scheme : value = '://' . join ( ( default_scheme , value . strip ( ) ) ) try : return uris . validate ( value ) except uris . ValidationError as e : raise Invalid ( e . message ) return converter
Return a converter that converts a clean string to an URL .
58,355
def hash ( value ) : if not value : return elif len ( value ) == 32 : type = 'md5' elif len ( value ) == 40 : type = 'sha1' elif len ( value ) == 64 : type = 'sha256' else : return None return { 'type' : type , 'value' : value }
Detect an hash type
58,356
def iter_adapters ( ) : adapters = adapter_catalog . values ( ) return sorted ( adapters , key = lambda a : a . model . __name__ )
Iter over adapter in predictable way
58,357
def iter_qs ( qs , adapter ) : for obj in qs . no_cache ( ) . no_dereference ( ) . timeout ( False ) : if adapter . is_indexable ( obj ) : try : doc = adapter . from_model ( obj ) . to_dict ( include_meta = True ) yield doc except Exception as e : model = adapter . model . __name__ log . error ( 'Unable to index %s "%s": %s' , model , str ( obj . id ) , str ( e ) , exc_info = True )
Safely iterate over a DB QuerySet yielding ES documents
58,358
def index_model ( index_name , adapter ) : model = adapter . model log . info ( 'Indexing {0} objects' . format ( model . __name__ ) ) qs = model . objects if hasattr ( model . objects , 'visible' ) : qs = qs . visible ( ) if adapter . exclude_fields : qs = qs . exclude ( * adapter . exclude_fields ) docs = iter_qs ( qs , adapter ) docs = iter_for_index ( docs , index_name ) for ok , info in streaming_bulk ( es . client , docs , raise_on_error = False ) : if not ok : log . error ( 'Unable to index %s "%s": %s' , model . __name__ , info [ 'index' ] [ '_id' ] , info [ 'index' ] [ 'error' ] )
Indel all objects given a model
58,359
def enable_refresh ( index_name ) : refresh_interval = current_app . config [ 'ELASTICSEARCH_REFRESH_INTERVAL' ] es . indices . put_settings ( index = index_name , body = { 'index' : { 'refresh_interval' : refresh_interval } } ) es . indices . forcemerge ( index = index_name , request_timeout = 30 )
Enable refresh and force merge . To be used after indexing .
58,360
def set_alias ( index_name , delete = True ) : log . info ( 'Creating alias "{0}" on index "{1}"' . format ( es . index_name , index_name ) ) if es . indices . exists_alias ( name = es . index_name ) : alias = es . indices . get_alias ( name = es . index_name ) previous_indices = alias . keys ( ) if index_name not in previous_indices : es . indices . put_alias ( index = index_name , name = es . index_name ) for index in previous_indices : if index != index_name : es . indices . delete_alias ( index = index , name = es . index_name ) if delete : es . indices . delete ( index = index ) else : es . indices . put_alias ( index = index_name , name = es . index_name )
Properly end an indexation by creating an alias . Previous alias is deleted if needed .
58,361
def handle_error ( index_name , keep = False ) : signal . signal ( signal . SIGINT , signal . default_int_handler ) signal . signal ( signal . SIGTERM , signal . default_int_handler ) has_error = False try : yield except KeyboardInterrupt : print ( '' ) log . warning ( 'Interrupted by signal' ) has_error = True except Exception as e : log . error ( e ) has_error = True if has_error : if not keep : log . info ( 'Removing index %s' , index_name ) es . indices . delete ( index = index_name ) sys . exit ( - 1 )
Handle errors while indexing . In case of error properly log it remove the index and exit . If keep is True index is not deleted .
58,362
def index ( models = None , name = None , force = False , keep = False ) : index_name = name or default_index_name ( ) doc_types_names = [ m . __name__ . lower ( ) for m in adapter_catalog . keys ( ) ] models = [ model . lower ( ) . rstrip ( 's' ) for model in ( models or [ ] ) ] for model in models : if model not in doc_types_names : log . error ( 'Unknown model %s' , model ) sys . exit ( - 1 ) log . info ( 'Initiliazing index "{0}"' . format ( index_name ) ) if es . indices . exists ( index_name ) : if IS_TTY and not force : msg = 'Index {0} will be deleted, are you sure?' click . confirm ( msg . format ( index_name ) , abort = True ) es . indices . delete ( index_name ) es . initialize ( index_name ) with handle_error ( index_name , keep ) : disable_refresh ( index_name ) for adapter in iter_adapters ( ) : if not models or adapter . doc_type ( ) . lower ( ) in models : index_model ( index_name , adapter ) else : log . info ( 'Copying {0} objects to the new index' . format ( adapter . model . __name__ ) ) es_reindex ( es . client , es . index_name , index_name , scan_kwargs = { 'doc_type' : adapter . doc_type ( ) } ) enable_refresh ( index_name ) set_alias ( index_name , delete = not keep )
Initialize or rebuild the search index
58,363
def create_app ( config = 'udata.settings.Defaults' , override = None , init_logging = init_logging ) : app = UDataApp ( APP_NAME ) app . config . from_object ( config ) settings = os . environ . get ( 'UDATA_SETTINGS' , join ( os . getcwd ( ) , 'udata.cfg' ) ) if exists ( settings ) : app . settings_file = settings app . config . from_pyfile ( settings ) if override : app . config . from_object ( override ) for pkg in entrypoints . get_roots ( app ) : if pkg == 'udata' : continue module = '{}.settings' . format ( pkg ) if pkgutil . find_loader ( module ) : settings = pkgutil . get_loader ( module ) for key , default in settings . __dict__ . items ( ) : app . config . setdefault ( key , default ) app . json_encoder = UDataJsonEncoder app . debug = app . config [ 'DEBUG' ] and not app . config [ 'TESTING' ] app . wsgi_app = ProxyFix ( app . wsgi_app ) init_logging ( app ) register_extensions ( app ) return app
Factory for a minimal application
58,364
def standalone ( app ) : from udata import api , core , frontend core . init_app ( app ) frontend . init_app ( app ) api . init_app ( app ) register_features ( app ) return app
Factory for an all in one application
58,365
def get_migration ( plugin , filename ) : db = get_db ( ) return db . migrations . find_one ( { 'plugin' : plugin , 'filename' : filename } )
Get an existing migration record if exists
58,366
def record_migration ( plugin , filename , script , ** kwargs ) : db = get_db ( ) db . eval ( RECORD_WRAPPER , plugin , filename , script ) return True
Only record a migration without applying it
58,367
def available_migrations ( ) : migrations = [ ] for filename in resource_listdir ( 'udata' , 'migrations' ) : if filename . endswith ( '.js' ) : migrations . append ( ( 'udata' , 'udata' , filename ) ) plugins = entrypoints . get_enabled ( 'udata.models' , current_app ) for plugin , module in plugins . items ( ) : if resource_isdir ( module . __name__ , 'migrations' ) : for filename in resource_listdir ( module . __name__ , 'migrations' ) : if filename . endswith ( '.js' ) : migrations . append ( ( plugin , module . __name__ , filename ) ) return sorted ( migrations , key = lambda r : r [ 2 ] )
List available migrations for udata and enabled plugins
58,368
def log_status ( plugin , filename , status ) : display = ':' . join ( ( plugin , filename ) ) + ' ' log . info ( '%s [%s]' , '{:.<70}' . format ( display ) , status )
Properly display a migration status line
58,369
def status ( ) : for plugin , package , filename in available_migrations ( ) : migration = get_migration ( plugin , filename ) if migration : status = green ( migration [ 'date' ] . strftime ( DATE_FORMAT ) ) else : status = yellow ( 'Not applied' ) log_status ( plugin , filename , status )
Display the database migrations status
58,370
def migrate ( record , dry_run = False ) : handler = record_migration if record else execute_migration success = True for plugin , package , filename in available_migrations ( ) : migration = get_migration ( plugin , filename ) if migration or not success : log_status ( plugin , filename , cyan ( 'Skipped' ) ) else : status = magenta ( 'Recorded' ) if record else yellow ( 'Apply' ) log_status ( plugin , filename , status ) script = resource_string ( package , join ( 'migrations' , filename ) ) success &= handler ( plugin , filename , script , dryrun = dry_run )
Perform database migrations
58,371
def unrecord ( plugin_or_specs , filename ) : plugin , filename = normalize_migration ( plugin_or_specs , filename ) migration = get_migration ( plugin , filename ) if migration : log . info ( 'Removing migration %s:%s' , plugin , filename ) db = get_db ( ) db . eval ( UNRECORD_WRAPPER , migration [ '_id' ] ) else : log . error ( 'Migration not found %s:%s' , plugin , filename )
Remove a database migration record .
58,372
def validate ( url , schemes = None , tlds = None , private = None , local = None , credentials = None ) : url = url . strip ( ) private = config_for ( private , 'URLS_ALLOW_PRIVATE' ) local = config_for ( local , 'URLS_ALLOW_LOCAL' ) credentials = config_for ( credentials , 'URLS_ALLOW_CREDENTIALS' ) schemes = config_for ( schemes , 'URLS_ALLOWED_SCHEMES' ) tlds = config_for ( tlds , 'URLS_ALLOWED_TLDS' ) match = URL_REGEX . match ( url ) if not match : error ( url ) scheme = ( match . group ( 'scheme' ) or '' ) . lower ( ) if scheme and scheme not in schemes : error ( url , 'Invalid scheme {0}' . format ( scheme ) ) if not credentials and match . group ( 'credentials' ) : error ( url , 'Credentials in URL are not allowed' ) tld = match . group ( 'tld' ) if tld and tld not in tlds and tld . encode ( 'idna' ) not in tlds : error ( url , 'Invalid TLD {0}' . format ( tld ) ) ip = match . group ( 'ipv6' ) or match . group ( 'ipv4' ) if ip : try : ip = IPAddress ( ip ) except AddrFormatError : error ( url ) if ip . is_multicast ( ) : error ( url , '{0} is a multicast IP' . format ( ip ) ) elif not ip . is_loopback ( ) and ip . is_hostmask ( ) or ip . is_netmask ( ) : error ( url , '{0} is a mask IP' . format ( ip ) ) if not local : if ip and ip . is_loopback ( ) or match . group ( 'localhost' ) : error ( url , 'is a local URL' ) if not private and ip and ip . is_private ( ) : error ( url , 'is a private URL' ) return url
Validate and normalize an URL
58,373
def get_json_ld_extra ( key , value ) : value = value . serialize ( ) if hasattr ( value , 'serialize' ) else value return { '@type' : 'http://schema.org/PropertyValue' , 'name' : key , 'value' : value , }
Serialize an extras key value pair into JSON - LD
58,374
def get_resource ( id ) : dataset = Dataset . objects ( resources__id = id ) . first ( ) if dataset : return get_by ( dataset . resources , 'id' , id ) else : return CommunityResource . objects ( id = id ) . first ( )
Fetch a resource given its UUID
58,375
def guess ( cls , * strings , ** kwargs ) : license = None for string in strings : license = cls . guess_one ( string ) if license : break return license or kwargs . get ( 'default' )
Try to guess a license from a list of strings .
58,376
def guess_one ( cls , text ) : if not text : return qs = cls . objects text = text . strip ( ) . lower ( ) slug = cls . slug . slugify ( text ) license = qs ( db . Q ( id = text ) | db . Q ( slug = slug ) | db . Q ( url = text ) | db . Q ( alternate_urls = text ) ) . first ( ) if license is None : computed = ( ( l , rdlevenshtein ( l . slug , slug ) ) for l in cls . objects ) candidates = [ l for l , d in computed if d <= MAX_DISTANCE ] if len ( candidates ) == 1 : license = candidates [ 0 ] if license is None : computed = ( ( l , rdlevenshtein ( cls . slug . slugify ( t ) , slug ) ) for l in cls . objects for t in l . alternate_titles ) candidates = [ l for l , d in computed if d <= MAX_DISTANCE ] if len ( candidates ) == 1 : license = candidates [ 0 ] return license
Try to guess license from a string .
58,377
def need_check ( self ) : min_cache_duration , max_cache_duration , ko_threshold = [ current_app . config . get ( k ) for k in ( 'LINKCHECKING_MIN_CACHE_DURATION' , 'LINKCHECKING_MAX_CACHE_DURATION' , 'LINKCHECKING_UNAVAILABLE_THRESHOLD' , ) ] count_availability = self . extras . get ( 'check:count-availability' , 1 ) is_available = self . check_availability ( ) if is_available == 'unknown' : return True elif is_available or count_availability > ko_threshold : delta = min ( min_cache_duration * count_availability , max_cache_duration ) else : delta = min_cache_duration if self . extras . get ( 'check:date' ) : limit_date = datetime . now ( ) - timedelta ( minutes = delta ) check_date = self . extras [ 'check:date' ] if not isinstance ( check_date , datetime ) : try : check_date = parse_dt ( check_date ) except ( ValueError , TypeError ) : return True if check_date >= limit_date : return False return True
Does the resource needs to be checked against its linkchecker?
58,378
def check_availability ( self ) : remote_resources = [ resource for resource in self . resources if resource . filetype == 'remote' ] if not remote_resources : return [ ] return [ resource . check_availability ( ) for resource in remote_resources ]
Check if resources from that dataset are available .
58,379
def next_update ( self ) : delta = None if self . frequency == 'daily' : delta = timedelta ( days = 1 ) elif self . frequency == 'weekly' : delta = timedelta ( weeks = 1 ) elif self . frequency == 'fortnighly' : delta = timedelta ( weeks = 2 ) elif self . frequency == 'monthly' : delta = timedelta ( weeks = 4 ) elif self . frequency == 'bimonthly' : delta = timedelta ( weeks = 4 * 2 ) elif self . frequency == 'quarterly' : delta = timedelta ( weeks = 52 / 4 ) elif self . frequency == 'biannual' : delta = timedelta ( weeks = 52 / 2 ) elif self . frequency == 'annual' : delta = timedelta ( weeks = 52 ) elif self . frequency == 'biennial' : delta = timedelta ( weeks = 52 * 2 ) elif self . frequency == 'triennial' : delta = timedelta ( weeks = 52 * 3 ) elif self . frequency == 'quinquennial' : delta = timedelta ( weeks = 52 * 5 ) if delta is None : return else : return self . last_update + delta
Compute the next expected update date
58,380
def quality ( self ) : from udata . models import Discussion result = { } if not self . id : return result if self . next_update : result [ 'frequency' ] = self . frequency result [ 'update_in' ] = - ( self . next_update - datetime . now ( ) ) . days if self . tags : result [ 'tags_count' ] = len ( self . tags ) if self . description : result [ 'description_length' ] = len ( self . description ) if self . resources : result [ 'has_resources' ] = True result [ 'has_only_closed_or_no_formats' ] = all ( resource . closed_or_no_format for resource in self . resources ) result [ 'has_unavailable_resources' ] = not all ( self . check_availability ( ) ) discussions = Discussion . objects ( subject = self ) if discussions : result [ 'discussions' ] = len ( discussions ) result [ 'has_untreated_discussions' ] = not all ( discussion . person_involved ( self . owner ) for discussion in discussions ) result [ 'score' ] = self . compute_quality_score ( result ) return result
Return a dict filled with metrics related to the inner
58,381
def compute_quality_score ( self , quality ) : score = 0 UNIT = 2 if 'frequency' in quality : if quality [ 'update_in' ] < 0 : score += UNIT else : score -= UNIT if 'tags_count' in quality : if quality [ 'tags_count' ] > 3 : score += UNIT if 'description_length' in quality : if quality [ 'description_length' ] > 100 : score += UNIT if 'has_resources' in quality : if quality [ 'has_only_closed_or_no_formats' ] : score -= UNIT else : score += UNIT if quality [ 'has_unavailable_resources' ] : score -= UNIT else : score += UNIT if 'discussions' in quality : if quality [ 'has_untreated_discussions' ] : score -= UNIT else : score += UNIT if score < 0 : return 0 return score
Compute the score related to the quality of that dataset .
58,382
def add_resource ( self , resource ) : resource . validate ( ) self . update ( __raw__ = { '$push' : { 'resources' : { '$each' : [ resource . to_mongo ( ) ] , '$position' : 0 } } } ) self . reload ( ) post_save . send ( self . __class__ , document = self , resource_added = resource . id )
Perform an atomic prepend for a new resource
58,383
def update_resource ( self , resource ) : index = self . resources . index ( resource ) data = { 'resources__{index}' . format ( index = index ) : resource } self . update ( ** data ) self . reload ( ) post_save . send ( self . __class__ , document = self )
Perform an atomic update for an existing resource
58,384
def get_aggregation ( self , name ) : agg = self . aggregations [ name ] if 'buckets' in agg : return agg [ 'buckets' ] else : return agg
Fetch an aggregation result given its name
58,385
def language ( lang_code ) : ctx = None if not request : ctx = current_app . test_request_context ( ) ctx . push ( ) backup = g . get ( 'lang_code' ) g . lang_code = lang_code refresh ( ) yield g . lang_code = backup if ctx : ctx . pop ( ) refresh ( )
Force a given language
58,386
def redirect_to_lang ( * args , ** kwargs ) : endpoint = request . endpoint . replace ( '_redirect' , '' ) kwargs = multi_to_dict ( request . args ) kwargs . update ( request . view_args ) kwargs [ 'lang_code' ] = default_lang return redirect ( url_for ( endpoint , ** kwargs ) )
Redirect non lang - prefixed urls to default language .
58,387
def redirect_to_unlocalized ( * args , ** kwargs ) : endpoint = request . endpoint . replace ( '_redirect' , '' ) kwargs = multi_to_dict ( request . args ) kwargs . update ( request . view_args ) kwargs . pop ( 'lang_code' , None ) return redirect ( url_for ( endpoint , ** kwargs ) )
Redirect lang - prefixed urls to no prefixed URL .
58,388
def get_translations ( self ) : ctx = stack . top if ctx is None : return NullTranslations ( ) locale = get_locale ( ) cache = self . get_translations_cache ( ctx ) translations = cache . get ( str ( locale ) ) if translations is None : translations_dir = self . get_translations_path ( ctx ) translations = Translations . load ( translations_dir , locale , domain = self . domain ) if isinstance ( translations , Translations ) : from wtforms . i18n import messages_path wtforms_translations = Translations . load ( messages_path ( ) , locale , domain = 'wtforms' ) translations . merge ( wtforms_translations ) import flask_security flask_security_translations = Translations . load ( join ( flask_security . __path__ [ 0 ] , 'translations' ) , locale , domain = 'flask_security' ) translations . merge ( flask_security_translations ) for pkg in entrypoints . get_roots ( current_app ) : package = pkgutil . get_loader ( pkg ) path = join ( package . filename , 'translations' ) domains = [ f . replace ( path , '' ) . replace ( '.pot' , '' ) [ 1 : ] for f in iglob ( join ( path , '*.pot' ) ) ] for domain in domains : translations . merge ( Translations . load ( path , locale , domain = domain ) ) from . import theme theme_translations_dir = join ( theme . current . path , 'translations' ) if exists ( theme_translations_dir ) : domain = theme . current . identifier theme_translations = Translations . load ( theme_translations_dir , locale , domain = domain ) translations . merge ( theme_translations ) cache [ str ( locale ) ] = translations return translations
Returns the correct gettext translations that should be used for this request . This will never fail and return a dummy translation object if used outside of the request or if a translation cannot be found .
58,389
def person_involved ( self , person ) : return any ( message . posted_by == person for message in self . discussion )
Return True if the given person has been involved in the
58,390
def is_ignored ( resource ) : ignored_domains = current_app . config [ 'LINKCHECKING_IGNORE_DOMAINS' ] url = resource . url if url : parsed_url = urlparse ( url ) return parsed_url . netloc in ignored_domains return True
Check of the resource s URL is part of LINKCHECKING_IGNORE_DOMAINS
58,391
def check_resource ( resource ) : linkchecker_type = resource . extras . get ( 'check:checker' ) LinkChecker = get_linkchecker ( linkchecker_type ) if not LinkChecker : return { 'error' : 'No linkchecker configured.' } , 503 if is_ignored ( resource ) : return dummy_check_response ( ) result = LinkChecker ( ) . check ( resource ) if not result : return { 'error' : 'No response from linkchecker' } , 503 elif result . get ( 'check:error' ) : return { 'error' : result [ 'check:error' ] } , 500 elif not result . get ( 'check:status' ) : return { 'error' : 'No status in response from linkchecker' } , 503 previous_status = resource . extras . get ( 'check:available' ) check_keys = _get_check_keys ( result , resource , previous_status ) resource . extras . update ( check_keys ) resource . save ( signal_kwargs = { 'ignores' : [ 'post_save' ] } ) return result
Check a resource availability against a linkchecker backend
58,392
def owned_pre_save ( sender , document , ** kwargs ) : if not isinstance ( document , Owned ) : return changed_fields = getattr ( document , '_changed_fields' , [ ] ) if 'organization' in changed_fields : if document . owner : document . _previous_owner = document . owner document . owner = None else : original = sender . objects . only ( 'organization' ) . get ( pk = document . pk ) document . _previous_owner = original . organization elif 'owner' in changed_fields : if document . organization : document . _previous_owner = document . organization document . organization = None else : original = sender . objects . only ( 'owner' ) . get ( pk = document . pk ) document . _previous_owner = original . owner
Owned mongoengine . pre_save signal handler Need to fetch original owner before the new one erase it .
58,393
def owned_post_save ( sender , document , ** kwargs ) : if isinstance ( document , Owned ) and hasattr ( document , '_previous_owner' ) : Owned . on_owner_change . send ( document , previous = document . _previous_owner )
Owned mongoengine . post_save signal handler Dispatch the Owned . on_owner_change signal once the document has been saved including the previous owner .
58,394
def get_enabled_plugins ( ) : plugins = entrypoints . get_enabled ( 'udata.preview' , current_app ) . values ( ) valid = [ p for p in plugins if issubclass ( p , PreviewPlugin ) ] for plugin in plugins : if plugin not in valid : clsname = plugin . __name__ msg = '{0} is not a valid preview plugin' . format ( clsname ) warnings . warn ( msg , PreviewWarning ) return [ p ( ) for p in sorted ( valid , key = lambda p : 1 if p . fallback else 0 ) ]
Returns enabled preview plugins .
58,395
def get_preview_url ( resource ) : candidates = ( p . preview_url ( resource ) for p in get_enabled_plugins ( ) if p . can_preview ( resource ) ) return next ( iter ( candidates ) , None )
Returns the most pertinent preview URL associated to the resource if any .
58,396
def get_by ( lst , field , value ) : for row in lst : if ( ( isinstance ( row , dict ) and row . get ( field ) == value ) or ( getattr ( row , field , None ) == value ) ) : return row
Find an object in a list given a field value
58,397
def multi_to_dict ( multi ) : return dict ( ( key , value [ 0 ] if len ( value ) == 1 else value ) for key , value in multi . to_dict ( False ) . items ( ) )
Transform a Werkzeug multidictionnary into a flat dictionnary
58,398
def daterange_start ( value ) : if not value : return None elif isinstance ( value , datetime ) : return value . date ( ) elif isinstance ( value , date ) : return value result = parse_dt ( value ) . date ( ) dashes = value . count ( '-' ) if dashes >= 2 : return result elif dashes == 1 : return result . replace ( day = 1 ) else : return result . replace ( day = 1 , month = 1 )
Parse a date range start boundary
58,399
def daterange_end ( value ) : if not value : return None elif isinstance ( value , datetime ) : return value . date ( ) elif isinstance ( value , date ) : return value result = parse_dt ( value ) . date ( ) dashes = value . count ( '-' ) if dashes >= 2 : return result elif dashes == 1 : return result + relativedelta ( months = + 1 , days = - 1 , day = 1 ) else : return result . replace ( month = 12 , day = 31 )
Parse a date range end boundary