idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
6,200
def placeFavicon ( context ) : fav = Favicon . objects . filter ( isFavicon = True ) . first ( ) if not fav : return mark_safe ( '<!-- no favicon ) html = '' for rel in config : for size in sorted ( config [ rel ] , reverse = True ) : n = fav . get_favicon ( size = size , rel = rel ) html += '<link rel="%s" sizes="%sx...
Gets Favicon - URL for the Model .
6,201
def set_default_theme ( theme ) : pref_init ( ) parser = cp . ConfigParser ( ) parser . read ( PREFS_FILE ) if not parser . has_section ( "theme" ) : parser . add_section ( "theme" ) parser . set ( "theme" , "default" , theme ) with open ( "%s.2" % PREFS_FILE , "w" ) as fp : parser . write ( fp ) copy ( "%s.2" % PREFS_...
Set default theme name based in config file .
6,202
def pick_theme ( manual ) : if manual : return manual pref_init ( ) parser = cp . ConfigParser ( ) parser . read ( PREFS_FILE ) try : theme = parser . get ( "theme" , "default" ) except ( cp . NoSectionError , cp . NoOptionError ) : theme = "plain" return theme
Return theme name based on manual input prefs file or default to plain .
6,203
def install_theme ( path_to_theme ) : pref_init ( ) filename = basename ( path_to_theme ) dest = join ( THEMES_DIR , filename ) copy ( path_to_theme , dest ) zf = zipfile . ZipFile ( dest ) zf . extractall ( THEMES_DIR ) unlink ( dest )
Pass a path to a theme file which will be extracted to the themes directory .
6,204
def main ( ) : args = docopt ( __doc__ , version = "cdk" ) if args [ 'FILE' ] : out = output_file ( args [ 'FILE' ] ) theme = pick_theme ( args [ '--theme' ] ) if theme not in listdir ( THEMES_DIR ) : exit ( 'Selected theme "%s" not found. Check ~/.cdk/prefs' % theme ) cmd = create_command ( theme , args [ '--bare' ] ,...
Entry point for choosing what subcommand to run . Really should be using asciidocapi
6,205
def separate_resources ( self ) : self . _separate_hdxobjects ( self . resources , 'resources' , 'name' , hdx . data . resource . Resource )
Move contents of resources key in internal dictionary into self . resources
6,206
def add_update_resources ( self , resources , ignore_datasetid = False ) : if not isinstance ( resources , list ) : raise HDXError ( 'Resources should be a list!' ) for resource in resources : self . add_update_resource ( resource , ignore_datasetid )
Add new or update existing resources with new metadata to the dataset
6,207
def delete_resource ( self , resource , delete = True ) : if isinstance ( resource , str ) : if is_valid_uuid ( resource ) is False : raise HDXError ( '%s is not a valid resource id!' % resource ) return self . _remove_hdxobject ( self . resources , resource , delete = delete )
Delete a resource from the dataset and also from HDX by default
6,208
def reorder_resources ( self , resource_ids , hxl_update = True ) : dataset_id = self . data . get ( 'id' ) if not dataset_id : raise HDXError ( 'Dataset has no id! It must be read, created or updated first.' ) data = { 'id' : dataset_id , 'order' : resource_ids } self . _write_to_hdx ( 'reorder' , data , 'package_id' ...
Reorder resources in dataset according to provided list . If only some resource ids are supplied then these are assumed to be first and the other resources will stay in their original order .
6,209
def update_from_yaml ( self , path = join ( 'config' , 'hdx_dataset_static.yml' ) ) : super ( Dataset , self ) . update_from_yaml ( path ) self . separate_resources ( )
Update dataset metadata with static metadata from YAML file
6,210
def update_from_json ( self , path = join ( 'config' , 'hdx_dataset_static.json' ) ) : super ( Dataset , self ) . update_from_json ( path ) self . separate_resources ( )
Update dataset metadata with static metadata from JSON file
6,211
def read_from_hdx ( identifier , configuration = None ) : dataset = Dataset ( configuration = configuration ) result = dataset . _dataset_load_from_hdx ( identifier ) if result : return dataset return None
Reads the dataset given by identifier from HDX and returns Dataset object
6,212
def _dataset_create_resources ( self ) : if 'resources' in self . data : self . old_data [ 'resources' ] = self . _copy_hdxobjects ( self . resources , hdx . data . resource . Resource , 'file_to_upload' ) self . init_resources ( ) self . separate_resources ( )
Creates resource objects in dataset
6,213
def _dataset_load_from_hdx ( self , id_or_name ) : if not self . _load_from_hdx ( 'dataset' , id_or_name ) : return False self . _dataset_create_resources ( ) return True
Loads the dataset given by either id or name from HDX
6,214
def check_required_fields ( self , ignore_fields = list ( ) , allow_no_resources = False ) : if self . is_requestable ( ) : self . _check_required_fields ( 'dataset-requestable' , ignore_fields ) else : self . _check_required_fields ( 'dataset' , ignore_fields ) if len ( self . resources ) == 0 and not allow_no_resourc...
Check that metadata for dataset and its resources is complete . The parameter ignore_fields should be set if required to any fields that should be ignored for the particular operation .
6,215
def _dataset_merge_filestore_resource ( self , resource , updated_resource , filestore_resources , ignore_fields ) : if updated_resource . get_file_to_upload ( ) : resource . set_file_to_upload ( updated_resource . get_file_to_upload ( ) ) filestore_resources . append ( resource ) merge_two_dictionaries ( resource , up...
Helper method to merge updated resource from dataset into HDX resource read from HDX including filestore .
6,216
def _dataset_merge_filestore_newresource ( self , new_resource , ignore_fields , filestore_resources ) : new_resource . check_required_fields ( ignore_fields = ignore_fields ) self . resources . append ( new_resource ) if new_resource . get_file_to_upload ( ) : filestore_resources . append ( new_resource ) new_resource...
Helper method to add new resource from dataset including filestore .
6,217
def _add_filestore_resources ( self , filestore_resources , create_default_views , hxl_update ) : for resource in filestore_resources : for created_resource in self . data [ 'resources' ] : if resource [ 'name' ] == created_resource [ 'name' ] : merge_two_dictionaries ( resource . data , created_resource ) del resource...
Helper method to create files in filestore by updating resources .
6,218
def _dataset_merge_hdx_update ( self , update_resources , update_resources_by_name , remove_additional_resources , create_default_views , hxl_update ) : merge_two_dictionaries ( self . data , self . old_data ) if 'resources' in self . data : del self . data [ 'resources' ] updated_resources = self . old_data . get ( 'r...
Helper method to check if dataset or its resources exist and update them
6,219
def update_in_hdx ( self , update_resources = True , update_resources_by_name = True , remove_additional_resources = False , create_default_views = True , hxl_update = True ) : loaded = False if 'id' in self . data : self . _check_existing_object ( 'dataset' , 'id' ) if self . _dataset_load_from_hdx ( self . data [ 'id...
Check if dataset exists in HDX and if so update it
6,220
def create_in_hdx ( self , allow_no_resources = False , update_resources = True , update_resources_by_name = True , remove_additional_resources = False , create_default_views = True , hxl_update = True ) : self . check_required_fields ( allow_no_resources = allow_no_resources ) loadedid = None if 'id' in self . data : ...
Check if dataset exists in HDX and if so update it otherwise create it
6,221
def search_in_hdx ( cls , query = '*:*' , configuration = None , page_size = 1000 , ** kwargs ) : dataset = Dataset ( configuration = configuration ) total_rows = kwargs . get ( 'rows' , cls . max_int ) start = kwargs . get ( 'start' , 0 ) all_datasets = None attempts = 0 while attempts < cls . max_attempts and all_dat...
Searches for datasets in HDX
6,222
def get_all_dataset_names ( configuration = None , ** kwargs ) : dataset = Dataset ( configuration = configuration ) dataset [ 'id' ] = 'all dataset names' return dataset . _write_to_hdx ( 'list' , kwargs , 'id' )
Get all dataset names in HDX
6,223
def get_all_datasets ( cls , configuration = None , page_size = 1000 , check_duplicates = True , ** kwargs ) : dataset = Dataset ( configuration = configuration ) dataset [ 'id' ] = 'all datasets' total_rows = kwargs . get ( 'limit' , cls . max_int ) start = kwargs . get ( 'offset' , 0 ) all_datasets = None attempts = ...
Get all datasets in HDX
6,224
def get_dataset_date_as_datetime ( self ) : dataset_date = self . data . get ( 'dataset_date' , None ) if dataset_date : if '-' in dataset_date : dataset_date = dataset_date . split ( '-' ) [ 0 ] return datetime . strptime ( dataset_date , '%m/%d/%Y' ) else : return None
Get dataset date as datetime . datetime object . For range returns start date .
6,225
def get_dataset_end_date_as_datetime ( self ) : dataset_date = self . data . get ( 'dataset_date' , None ) if dataset_date : if '-' in dataset_date : dataset_date = dataset_date . split ( '-' ) [ 1 ] return datetime . strptime ( dataset_date , '%m/%d/%Y' ) return None
Get dataset end date as datetime . datetime object .
6,226
def _get_formatted_date ( dataset_date , date_format = None ) : if dataset_date : if date_format : return dataset_date . strftime ( date_format ) else : return dataset_date . date ( ) . isoformat ( ) else : return None
Get supplied dataset date as string in specified format . If no format is supplied an ISO 8601 string is returned .
6,227
def set_dataset_date_from_datetime ( self , dataset_date , dataset_end_date = None ) : start_date = dataset_date . strftime ( '%m/%d/%Y' ) if dataset_end_date is None : self . data [ 'dataset_date' ] = start_date else : end_date = dataset_end_date . strftime ( '%m/%d/%Y' ) self . data [ 'dataset_date' ] = '%s-%s' % ( s...
Set dataset date from datetime . datetime object
6,228
def _parse_date ( dataset_date , date_format ) : if date_format is None : try : return parser . parse ( dataset_date ) except ( ValueError , OverflowError ) as e : raisefrom ( HDXError , 'Invalid dataset date!' , e ) else : try : return datetime . strptime ( dataset_date , date_format ) except ValueError as e : raisefr...
Parse dataset date from string using specified format . If no format is supplied the function will guess . For unambiguous formats this should be fine .
6,229
def set_dataset_date ( self , dataset_date , dataset_end_date = None , date_format = None ) : parsed_date = self . _parse_date ( dataset_date , date_format ) if dataset_end_date is None : self . set_dataset_date_from_datetime ( parsed_date ) else : parsed_end_date = self . _parse_date ( dataset_end_date , date_format )...
Set dataset date from string using specified format . If no format is supplied the function will guess . For unambiguous formats this should be fine .
6,230
def set_dataset_year_range ( self , dataset_year , dataset_end_year = None ) : if isinstance ( dataset_year , int ) : dataset_date = '01/01/%d' % dataset_year elif isinstance ( dataset_year , str ) : dataset_date = '01/01/%s' % dataset_year else : raise hdx . data . hdxobject . HDXError ( 'dataset_year has type %s whic...
Set dataset date as a range from year or start and end year .
6,231
def set_expected_update_frequency ( self , update_frequency ) : try : int ( update_frequency ) except ValueError : update_frequency = Dataset . transform_update_frequency ( update_frequency ) if not update_frequency : raise HDXError ( 'Invalid update frequency supplied!' ) self . data [ 'data_update_frequency' ] = upda...
Set expected update frequency
6,232
def remove_tag ( self , tag ) : return self . _remove_hdxobject ( self . data . get ( 'tags' ) , tag , matchon = 'name' )
Remove a tag
6,233
def get_location ( self , locations = None ) : countries = self . data . get ( 'groups' , None ) if not countries : return list ( ) return [ Locations . get_location_from_HDX_code ( x [ 'name' ] , locations = locations , configuration = self . configuration ) for x in countries ]
Return the dataset s location
6,234
def add_country_location ( self , country , exact = True , locations = None , use_live = True ) : iso3 , match = Country . get_iso3_country_code_fuzzy ( country , use_live = use_live ) if iso3 is None : raise HDXError ( 'Country: %s - cannot find iso3 code!' % country ) return self . add_other_location ( iso3 , exact =...
Add a country . If an iso 3 code is not provided value is parsed and if it is a valid country name converted to an iso 3 code . If the country is already added it is ignored .
6,235
def add_country_locations ( self , countries , locations = None , use_live = True ) : allcountriesadded = True for country in countries : if not self . add_country_location ( country , locations = locations , use_live = use_live ) : allcountriesadded = False return allcountriesadded
Add a list of countries . If iso 3 codes are not provided values are parsed and where they are valid country names converted to iso 3 codes . If any country is already added it is ignored .
6,236
def add_region_location ( self , region , locations = None , use_live = True ) : return self . add_country_locations ( Country . get_countries_in_region ( region , exception = HDXError , use_live = use_live ) , locations = locations )
Add all countries in a region . If a 3 digit UNStats M49 region code is not provided value is parsed as a region name . If any country is already added it is ignored .
6,237
def add_other_location ( self , location , exact = True , alterror = None , locations = None ) : hdx_code , match = Locations . get_HDX_code_from_location_partial ( location , locations = locations , configuration = self . configuration ) if hdx_code is None or ( exact is True and match is False ) : if alterror is None...
Add a location which is not a country or region . Value is parsed and compared to existing locations in HDX . If the location is already added it is ignored .
6,238
def remove_location ( self , location ) : res = self . _remove_hdxobject ( self . data . get ( 'groups' ) , location , matchon = 'name' ) if not res : res = self . _remove_hdxobject ( self . data . get ( 'groups' ) , location . upper ( ) , matchon = 'name' ) if not res : res = self . _remove_hdxobject ( self . data . g...
Remove a location . If the location is already added it is ignored .
6,239
def get_maintainer ( self ) : return hdx . data . user . User . read_from_hdx ( self . data [ 'maintainer' ] , configuration = self . configuration )
Get the dataset s maintainer .
6,240
def set_maintainer ( self , maintainer ) : if isinstance ( maintainer , hdx . data . user . User ) or isinstance ( maintainer , dict ) : if 'id' not in maintainer : maintainer = hdx . data . user . User . read_from_hdx ( maintainer [ 'name' ] , configuration = self . configuration ) maintainer = maintainer [ 'id' ] eli...
Set the dataset s maintainer .
6,241
def get_organization ( self ) : return hdx . data . organization . Organization . read_from_hdx ( self . data [ 'owner_org' ] , configuration = self . configuration )
Get the dataset s organization .
6,242
def set_organization ( self , organization ) : if isinstance ( organization , hdx . data . organization . Organization ) or isinstance ( organization , dict ) : if 'id' not in organization : organization = hdx . data . organization . Organization . read_from_hdx ( organization [ 'name' ] , configuration = self . config...
Set the dataset s organization .
6,243
def get_showcases ( self ) : assoc_result , showcases_dicts = self . _read_from_hdx ( 'showcase' , self . data [ 'id' ] , fieldname = 'package_id' , action = hdx . data . showcase . Showcase . actions ( ) [ 'list_showcases' ] ) showcases = list ( ) if assoc_result : for showcase_dict in showcases_dicts : showcase = hdx...
Get any showcases the dataset is in
6,244
def _get_dataset_showcase_dict ( self , showcase ) : if isinstance ( showcase , hdx . data . showcase . Showcase ) or isinstance ( showcase , dict ) : if 'id' not in showcase : showcase = hdx . data . showcase . Showcase . read_from_hdx ( showcase [ 'name' ] ) showcase = showcase [ 'id' ] elif not isinstance ( showcase...
Get dataset showcase dict
6,245
def add_showcase ( self , showcase , showcases_to_check = None ) : dataset_showcase = self . _get_dataset_showcase_dict ( showcase ) if showcases_to_check is None : showcases_to_check = self . get_showcases ( ) for showcase in showcases_to_check : if dataset_showcase [ 'showcase_id' ] == showcase [ 'id' ] : return Fals...
Add dataset to showcase
6,246
def add_showcases ( self , showcases , showcases_to_check = None ) : if showcases_to_check is None : showcases_to_check = self . get_showcases ( ) allshowcasesadded = True for showcase in showcases : if not self . add_showcase ( showcase , showcases_to_check = showcases_to_check ) : allshowcasesadded = False return all...
Add dataset to multiple showcases
6,247
def remove_showcase ( self , showcase ) : dataset_showcase = self . _get_dataset_showcase_dict ( showcase ) showcase = hdx . data . showcase . Showcase ( { 'id' : dataset_showcase [ 'showcase_id' ] } , configuration = self . configuration ) showcase . _write_to_hdx ( 'disassociate' , dataset_showcase , 'package_id' )
Remove dataset from showcase
6,248
def set_requestable ( self , requestable = True ) : self . data [ 'is_requestdata_type' ] = requestable if requestable : self . data [ 'private' ] = False
Set the dataset to be of type requestable or not
6,249
def get_filetypes ( self ) : if not self . is_requestable ( ) : return [ resource . get_file_type ( ) for resource in self . get_resources ( ) ] return self . _get_stringlist_from_commastring ( 'file_types' )
Return list of filetypes in your data
6,250
def clean_dataset_tags ( self ) : tags_dict , wildcard_tags = Tags . tagscleanupdicts ( ) def delete_tag ( tag ) : logger . info ( '%s - Deleting tag %s!' % ( self . data [ 'name' ] , tag ) ) return self . remove_tag ( tag ) , False def update_tag ( tag , final_tags , wording , remove_existing = True ) : text = '%s - %...
Clean dataset tags according to tags cleanup spreadsheet and return if any changes occurred
6,251
def set_quickchart_resource ( self , resource ) : if isinstance ( resource , int ) and not isinstance ( resource , bool ) : resource = self . get_resources ( ) [ resource ] if isinstance ( resource , hdx . data . resource . Resource ) or isinstance ( resource , dict ) : res = resource . get ( 'id' ) if res is None : re...
Set the resource that will be used for displaying QuickCharts in dataset preview
6,252
def create_default_views ( self , create_datastore_views = False ) : package = deepcopy ( self . data ) if self . resources : package [ 'resources' ] = self . _convert_hdxobjects ( self . resources ) data = { 'package' : package , 'create_datastore_views' : create_datastore_views } self . _write_to_hdx ( 'create_defaul...
Create default resource views for all resources in dataset
6,253
def _get_credentials ( self ) : site = self . data [ self . hdx_site ] username = site . get ( 'username' ) if username : return b64decode ( username ) . decode ( 'utf-8' ) , b64decode ( site [ 'password' ] ) . decode ( 'utf-8' ) else : return None
Return HDX site username and password
6,254
def call_remoteckan ( self , * args , ** kwargs ) : requests_kwargs = kwargs . get ( 'requests_kwargs' , dict ( ) ) credentials = self . _get_credentials ( ) if credentials : requests_kwargs [ 'auth' ] = credentials kwargs [ 'requests_kwargs' ] = requests_kwargs apikey = kwargs . get ( 'apikey' , self . get_api_key ( )...
Calls the remote CKAN
6,255
def create_remoteckan ( cls , site_url , user_agent = None , user_agent_config_yaml = None , user_agent_lookup = None , session = None , ** kwargs ) : if not session : session = get_session ( user_agent , user_agent_config_yaml , user_agent_lookup , prefix = Configuration . prefix , method_whitelist = frozenset ( [ 'HE...
Create remote CKAN instance from configuration
6,256
def setup_remoteckan ( self , remoteckan = None , ** kwargs ) : if remoteckan is None : self . _remoteckan = self . create_remoteckan ( self . get_hdx_site_url ( ) , full_agent = self . get_user_agent ( ) , ** kwargs ) else : self . _remoteckan = remoteckan
Set up remote CKAN from provided CKAN or by creating from configuration
6,257
def setup ( cls , configuration = None , ** kwargs ) : if configuration is None : cls . _configuration = Configuration ( ** kwargs ) else : cls . _configuration = configuration
Set up the HDX configuration
6,258
def _create ( cls , configuration = None , remoteckan = None , ** kwargs ) : kwargs = cls . _environment_variables ( ** kwargs ) cls . setup ( configuration , ** kwargs ) cls . _configuration . setup_remoteckan ( remoteckan , ** kwargs ) return cls . _configuration . get_hdx_site_url ( )
Create HDX configuration
6,259
def kwargs_to_variable_assignment ( kwargs : dict , value_representation = repr , assignment_operator : str = ' = ' , statement_separator : str = '\n' , statement_per_line : bool = False ) -> str : code = [ ] join_str = '\n' if statement_per_line else '' for key , value in kwargs . items ( ) : code . append ( key + ass...
Convert a dictionary into a string with assignments
6,260
def decode_json ( json_input : Union [ str , None ] = None ) : if json_input is None : return { } else : if isinstance ( json_input , str ) is False : raise TypeError ( ) elif json_input [ - 5 : ] == ".json" : with open ( json_input ) as f : decoded_json = json . load ( f ) else : decoded_json = json . loads ( json_inp...
Simple wrapper of json . load and json . loads .
6,261
def is_jsonable ( obj ) -> bool : try : return obj == json . loads ( json . dumps ( obj ) ) except TypeError : return False except : raise
Check if an object is jsonable .
6,262
def is_literal_eval ( node_or_string ) -> tuple : try : obj = ast . literal_eval ( node_or_string ) return ( True , obj ) except : return ( False , None )
Check if an expresion can be literal_eval .
6,263
def find_duplicates ( l : list ) -> set : return set ( [ x for x in l if l . count ( x ) > 1 ] )
Return the duplicates in a list .
6,264
def sort_dict ( d : dict , by : str = 'key' , allow_duplicates : bool = True ) -> collections . OrderedDict : if by == 'key' : i = 0 elif by == 'value' : values = list ( d . values ( ) ) if len ( values ) != len ( set ( values ) ) and not allow_duplicates : duplicates = find_duplicates ( values ) raise ValueError ( "Th...
Sort a dictionary by key or value .
6,265
def group_dict_by_value ( d : dict ) -> dict : d_out = { } for k , v in d . items ( ) : if v in d_out : d_out [ v ] . append ( k ) else : d_out [ v ] = [ k ] return d_out
Group a dictionary by values .
6,266
def variable_status ( code : str , exclude_variable : Union [ set , None ] = None , jsonable_parameter : bool = True ) -> tuple : if exclude_variable is None : exclude_variable = set ( ) else : exclude_variable = copy . deepcopy ( exclude_variable ) root = ast . parse ( code ) store_variable_name = set ( ) assign_only ...
Find the possible parameters and global variables from a python code .
6,267
def increment_name ( name : str , start_marker : str = " (" , end_marker : str = ")" ) -> str : if start_marker == '' : raise ValueError ( "start_marker can not be the empty string." ) a = name start = len ( a ) - a [ : : - 1 ] . find ( start_marker [ : : - 1 ] ) if ( a [ len ( a ) - len ( end_marker ) : len ( a ) ] ==...
Increment the name where the incremental part is given by parameters .
6,268
def read_from_hdx ( identifier , configuration = None ) : resourceview = ResourceView ( configuration = configuration ) result = resourceview . _load_from_hdx ( 'resource view' , identifier ) if result : return resourceview return None
Reads the resource view given by identifier from HDX and returns ResourceView object
6,269
def get_all_for_resource ( identifier , configuration = None ) : resourceview = ResourceView ( configuration = configuration ) success , result = resourceview . _read_from_hdx ( 'resource view' , identifier , 'id' , ResourceView . actions ( ) [ 'list' ] ) resourceviews = list ( ) if success : for resourceviewdict in re...
Read all resource views for a resource given by identifier from HDX and returns list of ResourceView objects
6,270
def _update_resource_view ( self , log = False ) : update = False if 'id' in self . data and self . _load_from_hdx ( 'resource view' , self . data [ 'id' ] ) : update = True else : if 'resource_id' in self . data : resource_views = self . get_all_for_resource ( self . data [ 'resource_id' ] ) for resource_view in resou...
Check if resource view exists in HDX and if so update resource view
6,271
def create_in_hdx ( self ) : self . check_required_fields ( ) if not self . _update_resource_view ( log = True ) : self . _save_to_hdx ( 'create' , 'title' )
Check if resource view exists in HDX and if so update it otherwise create resource view
6,272
def copy ( self , resource_view ) : if isinstance ( resource_view , str ) : if is_valid_uuid ( resource_view ) is False : raise HDXError ( '%s is not a valid resource view id!' % resource_view ) resource_view = ResourceView . read_from_hdx ( resource_view ) if not isinstance ( resource_view , dict ) and not isinstance ...
Copies all fields except id resource_id and package_id from another resource view .
6,273
def tagscleanupdicts ( configuration = None , url = None , keycolumn = 5 , failchained = True ) : if not Tags . _tags_dict : if configuration is None : configuration = Configuration . read ( ) with Download ( full_agent = configuration . get_user_agent ( ) ) as downloader : if url is None : url = configuration [ 'tags_...
Get tags cleanup dictionaries
6,274
def read_from_hdx ( identifier , configuration = None ) : user = User ( configuration = configuration ) result = user . _load_from_hdx ( 'user' , identifier ) if result : return user return None
Reads the user given by identifier from HDX and returns User object
6,275
def update_in_hdx ( self ) : capacity = self . data . get ( 'capacity' ) if capacity is not None : del self . data [ 'capacity' ] self . _update_in_hdx ( 'user' , 'id' ) if capacity is not None : self . data [ 'capacity' ] = capacity
Check if user exists in HDX and if so update user
6,276
def create_in_hdx ( self ) : capacity = self . data . get ( 'capacity' ) if capacity is not None : del self . data [ 'capacity' ] self . _create_in_hdx ( 'user' , 'id' , 'name' ) if capacity is not None : self . data [ 'capacity' ] = capacity
Check if user exists in HDX and if so update it otherwise create user
6,277
def email ( self , subject , text_body , html_body = None , sender = None , ** kwargs ) : self . configuration . emailer ( ) . send ( [ self . data [ 'email' ] ] , subject , text_body , html_body = html_body , sender = sender , ** kwargs )
Emails a user .
6,278
def get_all_users ( configuration = None , ** kwargs ) : user = User ( configuration = configuration ) user [ 'id' ] = 'all users' result = user . _write_to_hdx ( 'list' , kwargs , 'id' ) users = list ( ) if result : for userdict in result : user = User ( userdict , configuration = configuration ) users . append ( user...
Get all users in HDX
6,279
def email_users ( users , subject , text_body , html_body = None , sender = None , configuration = None , ** kwargs ) : if not users : raise ValueError ( 'No users supplied' ) recipients = list ( ) for user in users : recipients . append ( user . data [ 'email' ] ) if configuration is None : configuration = users [ 0 ]...
Email a list of users
6,280
def get_organizations ( self , permission = 'read' ) : success , result = self . _read_from_hdx ( 'user' , self . data [ 'name' ] , 'id' , self . actions ( ) [ 'listorgs' ] , permission = permission ) organizations = list ( ) if success : for organizationdict in result : organization = hdx . data . organization . Organ...
Get organizations in HDX that this user is a member of .
6,281
def facade ( projectmainfn , ** kwargs ) : site_url = Configuration . _create ( ** kwargs ) logger . info ( '--------------------------------------------------' ) logger . info ( '> Using HDX Python API Library %s' % Configuration . apiversion ) logger . info ( '> HDX Site: %s' % site_url ) UserAgent . user_agent = Con...
Facade to simplify project setup that calls project main function
6,282
def get_lint_config ( config_path = None ) : if config_path : config = LintConfig . load_from_file ( config_path ) click . echo ( "Using config from {0}" . format ( config_path ) ) elif os . path . exists ( DEFAULT_CONFIG_FILE ) : config = LintConfig . load_from_file ( DEFAULT_CONFIG_FILE ) click . echo ( "Using config...
Tries loading the config from the given path . If no path is specified the default config path is tried and if that is not specified we the default config is returned .
6,283
def cli ( list_files , config , ignore , path ) : files = MarkdownFileFinder . find_files ( path ) if list_files : echo_files ( files ) lint_config = get_lint_config ( config ) lint_config . apply_on_csv_string ( ignore , lint_config . disable_rule ) linter = MarkdownLinter ( lint_config ) error_count = linter . lint_f...
Markdown lint tool checks your markdown for styling issues
6,284
def run ( self , check_interval = 300 ) : while True : if args . config : config = config_file_parser . get_configuration ( args . config ) access_key_id = config [ 'access-key-id' ] secret_access_key = config [ 'secret-access-key' ] region = config [ 'region' ] else : access_key_id = args . access_key_id secret_access...
Run the daemon
6,285
def _apply_line_rules ( self , markdown_string ) : all_violations = [ ] lines = markdown_string . split ( "\n" ) line_rules = self . line_rules line_nr = 1 ignoring = False for line in lines : if ignoring : if line . strip ( ) == '<!-- markdownlint:enable : ignoring = False else : if line . strip ( ) == '<!-- markdown...
Iterates over the lines in a given markdown string and applies all the enabled line rules to each line
6,286
def ReadFrom ( self , byte_stream ) : try : return self . _struct . unpack_from ( byte_stream ) except ( TypeError , struct . error ) as exception : raise IOError ( 'Unable to read byte stream with error: {0!s}' . format ( exception ) )
Read values from a byte stream .
6,287
def WriteTo ( self , values ) : try : return self . _struct . pack ( * values ) except ( TypeError , struct . error ) as exception : raise IOError ( 'Unable to write stream with error: {0!s}' . format ( exception ) )
Writes values to a byte stream .
6,288
def run ( connection ) : volumes = volume_manager . get_watched_volumes ( connection ) for volume in volumes : _ensure_snapshot ( connection , volume ) _remove_old_snapshots ( connection , volume )
Ensure that we have snapshots for a given volume
6,289
def _create_snapshot ( volume ) : logger . info ( 'Creating new snapshot for {}' . format ( volume . id ) ) snapshot = volume . create_snapshot ( description = "Automatic snapshot by Automated EBS Snapshots" ) logger . info ( 'Created snapshot {} for volume {}' . format ( snapshot . id , volume . id ) ) return snapshot
Create a new snapshot
6,290
def _ensure_snapshot ( connection , volume ) : if 'AutomatedEBSSnapshots' not in volume . tags : logger . warning ( 'Missing tag AutomatedEBSSnapshots for volume {}' . format ( volume . id ) ) return interval = volume . tags [ 'AutomatedEBSSnapshots' ] if volume . tags [ 'AutomatedEBSSnapshots' ] not in VALID_INTERVALS...
Ensure that a given volume has an appropriate snapshot
6,291
def _remove_old_snapshots ( connection , volume ) : if 'AutomatedEBSSnapshotsRetention' not in volume . tags : logger . warning ( 'Missing tag AutomatedEBSSnapshotsRetention for volume {}' . format ( volume . id ) ) return retention = int ( volume . tags [ 'AutomatedEBSSnapshotsRetention' ] ) snapshots = connection . g...
Remove old snapshots
6,292
def list ( connection ) : volumes = get_watched_volumes ( connection ) if not volumes : logger . info ( 'No watched volumes found' ) return logger . info ( '+-----------------------' '+----------------------' '+--------------' '+------------+' ) logger . info ( '| {volume:<21} ' '| {volume_name:<20.20} ' '| {interval:<...
List watched EBS volumes
6,293
def unwatch ( connection , volume_id ) : try : volume = connection . get_all_volumes ( volume_ids = [ volume_id ] ) [ 0 ] volume . remove_tag ( 'AutomatedEBSSnapshots' ) except EC2ResponseError : pass logger . info ( 'Removed {} from the watchlist' . format ( volume_id ) ) return True
Remove watching of a volume
6,294
def get_volume_id ( connection , volume ) : volume_id_pattern = re . compile ( 'vol-\w{8}' ) if volume_id_pattern . match ( volume ) : try : connection . get_all_volumes ( volume_ids = [ volume ] ) volume_id = volume except EC2ResponseError : logger . warning ( 'Volume {} not found' . format ( volume ) ) return None el...
Get Volume ID from the given volume . Input can be volume id or its Name tag .
6,295
def list_snapshots ( connection , volume ) : logger . info ( '+----------------' '+----------------------' '+---------------------------+' ) logger . info ( '| {snapshot:<14} ' '| {snapshot_name:<20.20} ' '| {created:<25} |' . format ( snapshot = 'Snapshot ID' , snapshot_name = 'Snapshot name' , created = 'Created' ) )...
List all snapshots for the volume
6,296
def stem ( self , words , parser , ** kwargs ) : output = self . _run_morfologik ( words ) return parser . parse ( output , ** kwargs )
Get stems for the words using a given parser
6,297
def _run_morfologik ( self , words ) : p = subprocess . Popen ( [ 'java' , '-jar' , self . jar_path , 'plstem' , '-ie' , 'UTF-8' , '-oe' , 'UTF-8' ] , bufsize = - 1 , stdin = subprocess . PIPE , stdout = subprocess . PIPE , stderr = subprocess . STDOUT ) out , _ = p . communicate ( input = bytes ( "\n" . join ( words )...
Runs morfologik java jar and assumes that input and output is UTF - 8 encoded .
6,298
def read_from_hdx ( identifier , configuration = None ) : showcase = Showcase ( configuration = configuration ) result = showcase . _load_from_hdx ( 'showcase' , identifier ) if result : return showcase return None
Reads the showcase given by identifier from HDX and returns Showcase object
6,299
def get_datasets ( self ) : assoc_result , datasets_dicts = self . _read_from_hdx ( 'showcase' , self . data [ 'id' ] , fieldname = 'showcase_id' , action = self . actions ( ) [ 'list_datasets' ] ) datasets = list ( ) if assoc_result : for dataset_dict in datasets_dicts : dataset = hdx . data . dataset . Dataset ( data...
Get any datasets in the showcase