idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
6,100
def prepareInsert ( self , oself , store ) : if self . relative : fspath = self . __get__ ( oself ) oself . __dirty__ [ self . attrname ] = self , self . infilter ( fspath , oself , store )
Prepare for insertion into the database by making the dbunderlying attribute of the item a relative pathname with respect to the store rather than an absolute pathname .
6,101
def restore ( self , time = None ) : if self . deleted : time = time if time else self . deleted_at if time == self . deleted_at : self . deleted = False self . save ( ) return True else : return False return False
Undeletes the object . Returns True if undeleted False if it was already not deleted
6,102
def full_restore ( self , using = None ) : using = using or router . db_for_write ( self . __class__ , instance = self ) restore_counter = Counter ( ) if self . deleted : time = self . deleted_at else : return restore_counter self . collector = models . deletion . Collector ( using = using ) self . collector . collect ...
Restores itself as well as objects that might have been deleted along with it if cascade is the deletion strategy
6,103
def connect_to_ec2 ( region = 'us-east-1' , access_key = None , secret_key = None ) : if access_key : logger . info ( 'Connecting to AWS EC2 in {}' . format ( region ) ) connection = ec2 . connect_to_region ( region , aws_access_key_id = access_key , aws_secret_access_key = secret_key ) else : metadata = get_instance_m...
Connect to AWS ec2
6,104
def parse ( self , output ) : output = self . _get_lines_with_stems ( output ) words = self . _make_unique ( output ) return self . _parse_for_simple_stems ( words )
Find stems for a given text .
6,105
def validlocations ( configuration = None ) : if Locations . _validlocations is None : if configuration is None : configuration = Configuration . read ( ) Locations . _validlocations = configuration . call_remoteckan ( 'group_list' , { 'all_fields' : True } ) return Locations . _validlocations
Read valid locations from HDX
6,106
def get_location_from_HDX_code ( code , locations = None , configuration = None ) : if locations is None : locations = Locations . validlocations ( configuration ) for locdict in locations : if code . upper ( ) == locdict [ 'name' ] . upper ( ) : return locdict [ 'title' ] return None
Get location from HDX location code
6,107
def CheckDirectory ( self , path , extension = 'yaml' ) : result = True if extension : glob_spec = os . path . join ( path , '*.{0:s}' . format ( extension ) ) else : glob_spec = os . path . join ( path , '*' ) for definition_file in sorted ( glob . glob ( glob_spec ) ) : if not self . CheckFile ( definition_file ) : r...
Validates definition files in a directory .
6,108
def CheckFile ( self , path ) : print ( 'Checking: {0:s}' . format ( path ) ) definitions_registry = registry . DataTypeDefinitionsRegistry ( ) definitions_reader = reader . YAMLDataTypeDefinitionsFileReader ( ) result = False try : definitions_reader . ReadFile ( definitions_registry , path ) result = True except KeyE...
Validates the definition in a file .
6,109
def inline_css ( html_message , encoding = 'unicode' ) : document = etree . HTML ( html_message ) converter = Conversion ( ) converter . perform ( document , html_message , '' , encoding = encoding ) return converter . convertedHTML
Inlines all CSS in an HTML string
6,110
def _CheckByteStreamSize ( self , byte_stream , byte_offset , data_type_size ) : try : byte_stream_size = len ( byte_stream ) except Exception as exception : raise errors . MappingError ( exception ) if byte_stream_size - byte_offset < data_type_size : raise errors . ByteStreamTooSmallError ( 'Byte stream too small req...
Checks if the byte stream is large enough for the data type .
6,111
def _GetByteStreamOperation ( self ) : byte_order_string = self . GetStructByteOrderString ( ) format_string = self . GetStructFormatString ( ) if not format_string : return None format_string = '' . join ( [ byte_order_string , format_string ] ) return byte_operations . StructOperation ( format_string )
Retrieves the byte stream operation .
6,112
def FoldValue ( self , value ) : if value is False and self . _data_type_definition . false_value is not None : return self . _data_type_definition . false_value if value is True and self . _data_type_definition . true_value is not None : return self . _data_type_definition . true_value raise ValueError ( 'No matching ...
Folds the data type into a value .
6,113
def _CalculateElementsDataSize ( self , context ) : elements_data_size = None if self . _HasElementsDataSize ( ) : elements_data_size = self . _EvaluateElementsDataSize ( context ) elif self . _HasNumberOfElements ( ) : element_byte_size = self . _element_data_type_definition . GetByteSize ( ) if element_byte_size is n...
Calculates the elements data size .
6,114
def _EvaluateElementsDataSize ( self , context ) : elements_data_size = None if self . _data_type_definition . elements_data_size : elements_data_size = self . _data_type_definition . elements_data_size elif self . _data_type_definition . elements_data_size_expression : expression = self . _data_type_definition . eleme...
Evaluates elements data size .
6,115
def _EvaluateNumberOfElements ( self , context ) : number_of_elements = None if self . _data_type_definition . number_of_elements : number_of_elements = self . _data_type_definition . number_of_elements elif self . _data_type_definition . number_of_elements_expression : expression = self . _data_type_definition . numbe...
Evaluates number of elements .
6,116
def _GetElementDataTypeDefinition ( self , data_type_definition ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) element_data_type_definition = getattr ( data_type_definition , 'element_data_type_definition' , None ) if not element_data_type_definition : raise errors . For...
Retrieves the element data type definition .
6,117
def _CheckCompositeMap ( self , data_type_definition ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) members = getattr ( data_type_definition , 'members' , None ) if not members : raise errors . FormatError ( 'Invalid data type definition missing members' ) is_composite_m...
Determines if the data type definition needs a composite map .
6,118
def _GetMemberDataTypeMaps ( self , data_type_definition , data_type_map_cache ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) members = getattr ( data_type_definition , 'members' , None ) if not members : raise errors . FormatError ( 'Invalid data type definition missing...
Retrieves the member data type maps .
6,119
def GetName ( self , number ) : value = self . _data_type_definition . values_per_number . get ( number , None ) if not value : return None return value . name
Retrieves the name of an enumeration value by number .
6,120
def CreateDataTypeMap ( self , definition_name ) : data_type_definition = self . _definitions_registry . GetDefinitionByName ( definition_name ) if not data_type_definition : return None return DataTypeMapFactory . CreateDataTypeMapByType ( data_type_definition )
Creates a specific data type map by name .
6,121
def CreateDataTypeMapByType ( cls , data_type_definition ) : data_type_map_class = cls . _MAP_PER_DEFINITION . get ( data_type_definition . TYPE_INDICATOR , None ) if not data_type_map_class : return None return data_type_map_class ( data_type_definition )
Creates a specific data type map by type indicator .
6,122
def IsComposite ( self ) : return bool ( self . condition ) or ( self . member_data_type_definition and self . member_data_type_definition . IsComposite ( ) )
Determines if the data type is composite .
6,123
def AddValue ( self , name , number , aliases = None , description = None ) : if name in self . values_per_name : raise KeyError ( 'Value with name: {0:s} already exists.' . format ( name ) ) if number in self . values_per_number : raise KeyError ( 'Value with number: {0!s} already exists.' . format ( number ) ) for al...
Adds an enumeration value .
6,124
def _ReadBooleanDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . BooleanDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_BOOLEAN , is_member =...
Reads a boolean data type definition .
6,125
def _ReadCharacterDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . CharacterDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_T...
Reads a character data type definition .
6,126
def _ReadConstantDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) value = definition_values . get ( 'value' , None ) i...
Reads a constant data type definition .
6,127
def _ReadDataTypeDefinitionWithMembers ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supports_conditions = False ) : members = definition_values . get ( 'members' , None ) if not members : error_message = 'missing members' raise errors . DefinitionReaderError ( defin...
Reads a data type definition with members .
6,128
def _ReadEnumerationDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) values = definition_values . get ( 'values' ) if ...
Reads an enumeration data type definition .
6,129
def _ReadElementSequenceDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : unsupported_definition_values = set ( definition_values . keys ( ) ) . difference ( supported_definition_values ) if unsupported_definition_values...
Reads an element sequence data type definition .
6,130
def _ReadFixedSizeDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_attributes , default_size = definitions . SIZE_NATIVE , default_units = 'bytes' , is_member = False , supported_size_values = None ) : definition_object = self . _ReadStorage...
Reads a fixed - size data type definition .
6,131
def _ReadFloatingPointDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . FloatingPointDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZ...
Reads a floating - point data type definition .
6,132
def _ReadFormatDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadLayoutDataTypeDefiniti...
Reads a format data type definition .
6,133
def _ReadIntegerDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : definition_object = self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . IntegerDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_INTEGER ...
Reads an integer data type definition .
6,134
def _ReadLayoutDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : return self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_valu...
Reads a layout data type definition .
6,135
def _ReadPaddingDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if not is_member : error_message = 'data type only supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadDataTypeDefiniti...
Reads a padding data type definition .
6,136
def _ReadSemanticDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : return self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_va...
Reads a semantic data type definition .
6,137
def _ReadSequenceDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE ) else : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMEN...
Reads a sequence data type definition .
6,138
def _ReadStorageDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_attributes , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE ) else : supported_definition_values = ( ...
Reads a storage data type definition .
6,139
def _ReadStreamDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE ) else : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS...
Reads a stream data type definition .
6,140
def _ReadStringDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_STRING_MEMBER ) else : supported_definition_values = self . _SUPPORTED_DEFINITION_VALUES_STRING definition_obje...
Reads a string data type definition .
6,141
def _ReadStructureDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) return self . _ReadDataTypeDefinitionWithMembers ( ...
Reads a structure data type definition .
6,142
def _ReadStructureFamilyDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadLayoutDataTyp...
Reads a structure family data type definition .
6,143
def _ReadUnionDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadDataTypeDefinitionWithMembers ( definitions_registry , definition_values , data_types . UnionDefinition , definition_name , supports_conditions = False )
Reads an union data type definition .
6,144
def _ReadUUIDDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . UUIDDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE , defa...
Reads an UUID data type definition .
6,145
def ReadFile ( self , definitions_registry , path ) : with open ( path , 'r' ) as file_object : self . ReadFileObject ( definitions_registry , file_object )
Reads data type definitions from a file into the registry .
6,146
def _GetFormatErrorLocation ( self , yaml_definition , last_definition_object ) : name = yaml_definition . get ( 'name' , None ) if name : error_location = 'in: {0:s}' . format ( name or '<NAMELESS>' ) elif last_definition_object : error_location = 'after: {0:s}' . format ( last_definition_object . name ) else : error_...
Retrieves a format error location .
6,147
def ReadFileObject ( self , definitions_registry , file_object ) : last_definition_object = None error_location = None error_message = None try : yaml_generator = yaml . safe_load_all ( file_object ) for yaml_definition in yaml_generator : definition_object = self . _ReadDefinition ( definitions_registry , yaml_definit...
Reads data type definitions from a file - like object into the registry .
6,148
def read_from_hdx ( identifier , configuration = None ) : organization = Organization ( configuration = configuration ) result = organization . _load_from_hdx ( 'organization' , identifier ) if result : return organization return None
Reads the organization given by identifier from HDX and returns Organization object
6,149
def get_users ( self , capacity = None ) : users = list ( ) usersdicts = self . data . get ( 'users' ) if usersdicts is not None : for userdata in usersdicts : if capacity is not None and userdata [ 'capacity' ] != capacity : continue id = userdata . get ( 'id' ) if id is None : id = userdata [ 'name' ] user = hdx . da...
Returns the organization s users .
6,150
def get_datasets ( self , query = '*:*' , ** kwargs ) : return hdx . data . dataset . Dataset . search_in_hdx ( query = query , configuration = self . configuration , fq = 'organization:%s' % self . data [ 'name' ] , ** kwargs )
Get list of datasets in organization
6,151
def get_all_organization_names ( configuration = None , ** kwargs ) : organization = Organization ( configuration = configuration ) organization [ 'id' ] = 'all organizations' return organization . _write_to_hdx ( 'list' , kwargs , 'id' )
Get all organization names in HDX
6,152
def _read_from_hdx ( self , object_type , value , fieldname = 'id' , action = None , ** kwargs ) : if not fieldname : raise HDXError ( 'Empty %s field name!' % object_type ) if action is None : action = self . actions ( ) [ 'show' ] data = { fieldname : value } data . update ( kwargs ) try : result = self . configurati...
Makes a read call to HDX passing in given parameter .
6,153
def _load_from_hdx ( self , object_type , id_field ) : success , result = self . _read_from_hdx ( object_type , id_field ) if success : self . old_data = self . data self . data = result return True logger . debug ( result ) return False
Helper method to load the HDX object given by identifier from HDX
6,154
def _check_load_existing_object ( self , object_type , id_field_name , operation = 'update' ) : self . _check_existing_object ( object_type , id_field_name ) if not self . _load_from_hdx ( object_type , self . data [ id_field_name ] ) : raise HDXError ( 'No existing %s to %s!' % ( object_type , operation ) )
Check metadata exists and contains HDX object identifier and if so load HDX object
6,155
def _check_required_fields ( self , object_type , ignore_fields ) : for field in self . configuration [ object_type ] [ 'required_fields' ] : if field not in self . data and field not in ignore_fields : raise HDXError ( 'Field %s is missing in %s!' % ( field , object_type ) )
Helper method to check that metadata for HDX object is complete
6,156
def _merge_hdx_update ( self , object_type , id_field_name , file_to_upload = None , ** kwargs ) : merge_two_dictionaries ( self . data , self . old_data ) if 'batch_mode' in kwargs : self . data [ 'batch_mode' ] = kwargs [ 'batch_mode' ] if 'skip_validation' in kwargs : self . data [ 'skip_validation' ] = kwargs [ 'sk...
Helper method to check if HDX object exists and update it
6,157
def _update_in_hdx ( self , object_type , id_field_name , file_to_upload = None , ** kwargs ) : self . _check_load_existing_object ( object_type , id_field_name ) self . _merge_hdx_update ( object_type , id_field_name , file_to_upload , ** kwargs )
Helper method to check if HDX object exists in HDX and if so update it
6,158
def _write_to_hdx ( self , action , data , id_field_name , file_to_upload = None ) : file = None try : if file_to_upload : file = open ( file_to_upload , 'rb' ) files = [ ( 'upload' , file ) ] else : files = None return self . configuration . call_remoteckan ( self . actions ( ) [ action ] , data , files = files ) exce...
Creates or updates an HDX object in HDX and return HDX object metadata dict
6,159
def _save_to_hdx ( self , action , id_field_name , file_to_upload = None ) : result = self . _write_to_hdx ( action , self . data , id_field_name , file_to_upload ) self . old_data = self . data self . data = result
Creates or updates an HDX object in HDX saving current data and replacing with returned HDX object data from HDX
6,160
def _create_in_hdx ( self , object_type , id_field_name , name_field_name , file_to_upload = None ) : self . check_required_fields ( ) if id_field_name in self . data and self . _load_from_hdx ( object_type , self . data [ id_field_name ] ) : logger . warning ( '%s exists. Updating %s' % ( object_type , self . data [ i...
Helper method to check if resource exists in HDX and if so update it otherwise create it
6,161
def _delete_from_hdx ( self , object_type , id_field_name ) : if id_field_name not in self . data : raise HDXError ( 'No %s field (mandatory) in %s!' % ( id_field_name , object_type ) ) self . _save_to_hdx ( 'delete' , id_field_name )
Helper method to deletes a resource from HDX
6,162
def _addupdate_hdxobject ( self , hdxobjects , id_field , new_hdxobject ) : for hdxobject in hdxobjects : if hdxobject [ id_field ] == new_hdxobject [ id_field ] : merge_two_dictionaries ( hdxobject , new_hdxobject ) return hdxobject hdxobjects . append ( new_hdxobject ) return new_hdxobject
Helper function to add a new HDX object to a supplied list of HDX objects or update existing metadata if the object already exists in the list
6,163
def _remove_hdxobject ( self , objlist , obj , matchon = 'id' , delete = False ) : if objlist is None : return False if isinstance ( obj , six . string_types ) : obj_id = obj elif isinstance ( obj , dict ) or isinstance ( obj , HDXObject ) : obj_id = obj . get ( matchon ) else : raise HDXError ( 'Type of object not a s...
Remove an HDX object from a list within the parent HDX object
6,164
def _convert_hdxobjects ( self , hdxobjects ) : newhdxobjects = list ( ) for hdxobject in hdxobjects : newhdxobjects . append ( hdxobject . data ) return newhdxobjects
Helper function to convert supplied list of HDX objects to a list of dict
6,165
def _copy_hdxobjects ( self , hdxobjects , hdxobjectclass , attribute_to_copy = None ) : newhdxobjects = list ( ) for hdxobject in hdxobjects : newhdxobjectdata = copy . deepcopy ( hdxobject . data ) newhdxobject = hdxobjectclass ( newhdxobjectdata , configuration = self . configuration ) if attribute_to_copy : value =...
Helper function to make a deep copy of a supplied list of HDX objects
6,166
def _separate_hdxobjects ( self , hdxobjects , hdxobjects_name , id_field , hdxobjectclass ) : new_hdxobjects = self . data . get ( hdxobjects_name , list ( ) ) if new_hdxobjects : hdxobject_names = set ( ) for hdxobject in hdxobjects : hdxobject_name = hdxobject [ id_field ] hdxobject_names . add ( hdxobject_name ) fo...
Helper function to take a list of HDX objects contained in the internal dictionary and add them to a supplied list of HDX objects or update existing metadata if any objects already exist in the list . The list in the internal dictionary is then deleted .
6,167
def _get_tags ( self ) : tags = self . data . get ( 'tags' , None ) if not tags : return list ( ) return [ x [ 'name' ] for x in tags ]
Return the dataset s list of tags
6,168
def _add_tag ( self , tag ) : tags = self . data . get ( 'tags' , None ) if tags : if tag in [ x [ 'name' ] for x in tags ] : return False else : tags = list ( ) tags . append ( { 'name' : tag } ) self . data [ 'tags' ] = tags return True
Add a tag
6,169
def _add_tags ( self , tags ) : alltagsadded = True for tag in tags : if not self . _add_tag ( tag ) : alltagsadded = False return alltagsadded
Add a list of tag
6,170
def _get_stringlist_from_commastring ( self , field ) : strings = self . data . get ( field ) if strings : return strings . split ( ',' ) else : return list ( )
Return list of strings from comma separated list
6,171
def _add_string_to_commastring ( self , field , string ) : if string in self . _get_stringlist_from_commastring ( field ) : return False strings = '%s,%s' % ( self . data . get ( field , '' ) , string ) if strings [ 0 ] == ',' : strings = strings [ 1 : ] self . data [ field ] = strings return True
Add a string to a comma separated list of strings
6,172
def _add_strings_to_commastring ( self , field , strings ) : allstringsadded = True for string in strings : if not self . _add_string_to_commastring ( field , string ) : allstringsadded = False return allstringsadded
Add a list of strings to a comma separated list of strings
6,173
def _remove_string_from_commastring ( self , field , string ) : commastring = self . data . get ( field , '' ) if string in commastring : self . data [ field ] = commastring . replace ( string , '' ) return True return False
Remove a string from a comma separated list of strings
6,174
def read_from_hdx ( identifier , configuration = None ) : if is_valid_uuid ( identifier ) is False : raise HDXError ( '%s is not a valid resource id!' % identifier ) resource = Resource ( configuration = configuration ) result = resource . _load_from_hdx ( 'resource' , identifier ) if result : return resource return No...
Reads the resource given by identifier from HDX and returns Resource object
6,175
def set_file_to_upload ( self , file_to_upload ) : if 'url' in self . data : del self . data [ 'url' ] self . file_to_upload = file_to_upload
Delete any existing url and set the file uploaded to the local path provided
6,176
def check_url_filetoupload ( self ) : if self . file_to_upload is None : if 'url' in self . data : if 'resource_type' not in self . data : self . data [ 'resource_type' ] = 'api' if 'url_type' not in self . data : self . data [ 'url_type' ] = 'api' else : raise HDXError ( 'Either a url or a file to upload must be suppl...
Check if url or file to upload provided for resource and add resource_type and url_type if not supplied
6,177
def update_in_hdx ( self , ** kwargs ) : self . _check_load_existing_object ( 'resource' , 'id' ) if self . file_to_upload and 'url' in self . data : del self . data [ 'url' ] self . _merge_hdx_update ( 'resource' , 'id' , self . file_to_upload , ** kwargs )
Check if resource exists in HDX and if so update it
6,178
def create_in_hdx ( self ) : self . check_required_fields ( ) id = self . data . get ( 'id' ) if id and self . _load_from_hdx ( 'resource' , id ) : logger . warning ( '%s exists. Updating %s' % ( 'resource' , id ) ) if self . file_to_upload and 'url' in self . data : del self . data [ 'url' ] self . _merge_hdx_update (...
Check if resource exists in HDX and if so update it otherwise create it
6,179
def get_dataset ( self ) : package_id = self . data . get ( 'package_id' ) if package_id is None : raise HDXError ( 'Resource has no package id!' ) return hdx . data . dataset . Dataset . read_from_hdx ( package_id )
Return dataset containing this resource
6,180
def download ( self , folder = None ) : url = self . data . get ( 'url' , None ) if not url : raise HDXError ( 'No URL to download!' ) logger . debug ( 'Downloading %s' % url ) filename = self . data [ 'name' ] format = '.%s' % self . data [ 'format' ] if format not in filename : filename = '%s%s' % ( filename , format...
Download resource store to provided folder or temporary folder if no folder supplied
6,181
def get_all_resource_ids_in_datastore ( configuration = None ) : resource = Resource ( configuration = configuration ) success , result = resource . _read_from_hdx ( 'datastore' , '_table_metadata' , 'resource_id' , Resource . actions ( ) [ 'datastore_search' ] , limit = 10000 ) resource_ids = list ( ) if not success :...
Get list of resources that have a datastore returning their ids .
6,182
def has_datastore ( self ) : success , result = self . _read_from_hdx ( 'datastore' , self . data [ 'id' ] , 'resource_id' , self . actions ( ) [ 'datastore_search' ] ) if not success : logger . debug ( result ) else : if result : return True return False
Check if the resource has a datastore .
6,183
def delete_datastore ( self ) : success , result = self . _read_from_hdx ( 'datastore' , self . data [ 'id' ] , 'resource_id' , self . actions ( ) [ 'datastore_delete' ] , force = True ) if not success : logger . debug ( result )
Delete a resource from the HDX datastore
6,184
def create_datastore ( self , schema = None , primary_key = None , delete_first = 0 , path = None ) : if delete_first == 0 : pass elif delete_first == 1 : self . delete_datastore ( ) elif delete_first == 2 : if primary_key is None : self . delete_datastore ( ) else : raise HDXError ( 'delete_first must be 0, 1 or 2! (0...
For tabular data create a resource in the HDX datastore which enables data preview in HDX . If no schema is provided all fields are assumed to be text . If path is not supplied the file is first downloaded from HDX .
6,185
def create_datastore_for_topline ( self , delete_first = 0 , path = None ) : data = load_yaml ( script_dir_plus_file ( join ( '..' , 'hdx_datasource_topline.yml' ) , Resource ) ) self . create_datastore_from_dict_schema ( data , delete_first , path = path )
For tabular data create a resource in the HDX datastore which enables data preview in HDX using the built in YAML definition for a topline . If path is not supplied the file is first downloaded from HDX .
6,186
def update_datastore ( self , schema = None , primary_key = None , path = None ) : self . create_datastore ( schema , primary_key , 2 , path = path )
For tabular data update a resource in the HDX datastore which enables data preview in HDX . If no schema is provided all fields are assumed to be text . If path is not supplied the file is first downloaded from HDX .
6,187
def _get_resource_view ( self , resource_view ) : if isinstance ( resource_view , dict ) : resource_view = ResourceView ( resource_view , configuration = self . configuration ) if isinstance ( resource_view , ResourceView ) : return resource_view raise HDXError ( 'Type %s is not a valid resource view!' % type ( resourc...
Get resource view id
6,188
def add_update_resource_views ( self , resource_views ) : if not isinstance ( resource_views , list ) : raise HDXError ( 'ResourceViews should be a list!' ) for resource_view in resource_views : self . add_update_resource_view ( resource_view )
Add new or update existing resource views in resource with new metadata .
6,189
def reorder_resource_views ( self , resource_views ) : if not isinstance ( resource_views , list ) : raise HDXError ( 'ResourceViews should be a list!' ) ids = list ( ) for resource_view in resource_views : if isinstance ( resource_view , str ) : resource_view_id = resource_view else : resource_view_id = resource_view ...
Order resource views in resource .
6,190
def delete_resource_view ( self , resource_view ) : if isinstance ( resource_view , str ) : if is_valid_uuid ( resource_view ) is False : raise HDXError ( '%s is not a valid resource view id!' % resource_view ) resource_view = ResourceView ( { 'id' : resource_view } , configuration = self . configuration ) else : resou...
Delete a resource view from the resource and HDX
6,191
def parse_for_simple_stems ( output , skip_empty = False , skip_same_stems = True ) : lines_with_stems = _get_lines_with_stems ( output ) stems = list ( ) last_word = None for line in lines_with_stems : word , stem , _ = line . split ( "\t" ) stem = stem if stem != '-' else None if skip_empty and ( stem is None ) : con...
Parses the output stem lines to produce a list with possible stems for each word in the output .
6,192
def _CreateClassTemplate ( cls , data_type_definition ) : type_name = data_type_definition . name type_description = data_type_definition . description or type_name while type_description . endswith ( '.' ) : type_description = type_description [ : - 1 ] class_attributes_description = [ ] init_arguments = [ ] instance_...
Creates the class template .
6,193
def _IsIdentifier ( cls , string ) : return ( string and not string [ 0 ] . isdigit ( ) and all ( character . isalnum ( ) or character == '_' for character in string ) )
Checks if a string contains an identifier .
6,194
def _ValidateDataTypeDefinition ( cls , data_type_definition ) : if not cls . _IsIdentifier ( data_type_definition . name ) : raise ValueError ( 'Data type definition name: {0!s} not a valid identifier' . format ( data_type_definition . name ) ) if keyword . iskeyword ( data_type_definition . name ) : raise ValueError ...
Validates the data type definition .
6,195
def CreateClass ( cls , data_type_definition ) : cls . _ValidateDataTypeDefinition ( data_type_definition ) class_definition = cls . _CreateClassTemplate ( data_type_definition ) namespace = { '__builtins__' : { 'object' : builtins . object , 'super' : builtins . super } , '__name__' : '{0:s}' . format ( data_type_defi...
Creates a new structure values class .
6,196
def DeregisterDefinition ( self , data_type_definition ) : name = data_type_definition . name . lower ( ) if name not in self . _definitions : raise KeyError ( 'Definition not set for name: {0:s}.' . format ( data_type_definition . name ) ) del self . _definitions [ name ]
Deregisters a data type definition .
6,197
def GetDefinitionByName ( self , name ) : lookup_name = name . lower ( ) if lookup_name not in self . _definitions : lookup_name = self . _aliases . get ( name , None ) return self . _definitions . get ( lookup_name , None )
Retrieves a specific data type definition by name .
6,198
def RegisterDefinition ( self , data_type_definition ) : name_lower = data_type_definition . name . lower ( ) if name_lower in self . _definitions : raise KeyError ( 'Definition already set for name: {0:s}.' . format ( data_type_definition . name ) ) if data_type_definition . name in self . _aliases : raise KeyError ( ...
Registers a data type definition .
6,199
def apply_on_csv_string ( rules_str , func ) : splitted = rules_str . split ( "," ) for str in splitted : func ( str . strip ( ) )
Splits a given string by comma trims whitespace on the resulting strings and applies a given func to each item .