idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
6,100
def prepareInsert ( self , oself , store ) : if self . relative : fspath = self . __get__ ( oself ) oself . __dirty__ [ self . attrname ] = self , self . infilter ( fspath , oself , store )
Prepare for insertion into the database by making the dbunderlying attribute of the item a relative pathname with respect to the store rather than an absolute pathname .
6,101
def restore ( self , time = None ) : if self . deleted : time = time if time else self . deleted_at if time == self . deleted_at : self . deleted = False self . save ( ) return True else : return False return False
Undeletes the object . Returns True if undeleted False if it was already not deleted
6,102
def full_restore ( self , using = None ) : using = using or router . db_for_write ( self . __class__ , instance = self ) restore_counter = Counter ( ) if self . deleted : time = self . deleted_at else : return restore_counter self . collector = models . deletion . Collector ( using = using ) self . collector . collect ( [ self ] ) for model , instances in self . collector . data . items ( ) : instances_to_delete = sorted ( instances , key = attrgetter ( "pk" ) ) self . sort ( ) for qs in self . collector . fast_deletes : for qs_instance in qs : restore_counter . update ( [ qs_instance . _meta . model_name ] ) qs_instance . restore ( time = time ) for model , instances in self . collector . data . items ( ) : for instance in instances : restore_counter . update ( [ instance . _meta . model_name ] ) instance . restore ( time = time ) return sum ( restore_counter . values ( ) ) , dict ( restore_counter ) self . collector = models . deletion . Collector ( using = using ) self . collector . collect ( [ self ] , keep_parents = keep_parents )
Restores itself as well as objects that might have been deleted along with it if cascade is the deletion strategy
6,103
def connect_to_ec2 ( region = 'us-east-1' , access_key = None , secret_key = None ) : if access_key : logger . info ( 'Connecting to AWS EC2 in {}' . format ( region ) ) connection = ec2 . connect_to_region ( region , aws_access_key_id = access_key , aws_secret_access_key = secret_key ) else : metadata = get_instance_metadata ( timeout = 1 , num_retries = 1 ) if metadata : try : region = metadata [ 'placement' ] [ 'availability-zone' ] [ : - 1 ] except KeyError : pass logger . info ( 'Connecting to AWS EC2 in {}' . format ( region ) ) connection = ec2 . connect_to_region ( region ) if not connection : logger . error ( 'An error occurred when connecting to EC2' ) sys . exit ( 1 ) return connection
Connect to AWS ec2
6,104
def parse ( self , output ) : output = self . _get_lines_with_stems ( output ) words = self . _make_unique ( output ) return self . _parse_for_simple_stems ( words )
Find stems for a given text .
6,105
def validlocations ( configuration = None ) : if Locations . _validlocations is None : if configuration is None : configuration = Configuration . read ( ) Locations . _validlocations = configuration . call_remoteckan ( 'group_list' , { 'all_fields' : True } ) return Locations . _validlocations
Read valid locations from HDX
6,106
def get_location_from_HDX_code ( code , locations = None , configuration = None ) : if locations is None : locations = Locations . validlocations ( configuration ) for locdict in locations : if code . upper ( ) == locdict [ 'name' ] . upper ( ) : return locdict [ 'title' ] return None
Get location from HDX location code
6,107
def CheckDirectory ( self , path , extension = 'yaml' ) : result = True if extension : glob_spec = os . path . join ( path , '*.{0:s}' . format ( extension ) ) else : glob_spec = os . path . join ( path , '*' ) for definition_file in sorted ( glob . glob ( glob_spec ) ) : if not self . CheckFile ( definition_file ) : result = False return result
Validates definition files in a directory .
6,108
def CheckFile ( self , path ) : print ( 'Checking: {0:s}' . format ( path ) ) definitions_registry = registry . DataTypeDefinitionsRegistry ( ) definitions_reader = reader . YAMLDataTypeDefinitionsFileReader ( ) result = False try : definitions_reader . ReadFile ( definitions_registry , path ) result = True except KeyError as exception : logging . warning ( ( 'Unable to register data type definition in file: {0:s} with ' 'error: {1:s}' ) . format ( path , exception ) ) except errors . FormatError as exception : logging . warning ( 'Unable to validate file: {0:s} with error: {1:s}' . format ( path , exception ) ) return result
Validates the definition in a file .
6,109
def inline_css ( html_message , encoding = 'unicode' ) : document = etree . HTML ( html_message ) converter = Conversion ( ) converter . perform ( document , html_message , '' , encoding = encoding ) return converter . convertedHTML
Inlines all CSS in an HTML string
6,110
def _CheckByteStreamSize ( self , byte_stream , byte_offset , data_type_size ) : try : byte_stream_size = len ( byte_stream ) except Exception as exception : raise errors . MappingError ( exception ) if byte_stream_size - byte_offset < data_type_size : raise errors . ByteStreamTooSmallError ( 'Byte stream too small requested: {0:d} available: {1:d}' . format ( data_type_size , byte_stream_size ) )
Checks if the byte stream is large enough for the data type .
6,111
def _GetByteStreamOperation ( self ) : byte_order_string = self . GetStructByteOrderString ( ) format_string = self . GetStructFormatString ( ) if not format_string : return None format_string = '' . join ( [ byte_order_string , format_string ] ) return byte_operations . StructOperation ( format_string )
Retrieves the byte stream operation .
6,112
def FoldValue ( self , value ) : if value is False and self . _data_type_definition . false_value is not None : return self . _data_type_definition . false_value if value is True and self . _data_type_definition . true_value is not None : return self . _data_type_definition . true_value raise ValueError ( 'No matching True and False values' )
Folds the data type into a value .
6,113
def _CalculateElementsDataSize ( self , context ) : elements_data_size = None if self . _HasElementsDataSize ( ) : elements_data_size = self . _EvaluateElementsDataSize ( context ) elif self . _HasNumberOfElements ( ) : element_byte_size = self . _element_data_type_definition . GetByteSize ( ) if element_byte_size is not None : number_of_elements = self . _EvaluateNumberOfElements ( context ) elements_data_size = number_of_elements * element_byte_size return elements_data_size
Calculates the elements data size .
6,114
def _EvaluateElementsDataSize ( self , context ) : elements_data_size = None if self . _data_type_definition . elements_data_size : elements_data_size = self . _data_type_definition . elements_data_size elif self . _data_type_definition . elements_data_size_expression : expression = self . _data_type_definition . elements_data_size_expression namespace = { } if context and context . values : namespace . update ( context . values ) namespace [ '__builtins__' ] = { } try : elements_data_size = eval ( expression , namespace ) except Exception as exception : raise errors . MappingError ( 'Unable to determine elements data size with error: {0!s}' . format ( exception ) ) if elements_data_size is None or elements_data_size < 0 : raise errors . MappingError ( 'Invalid elements data size: {0!s}' . format ( elements_data_size ) ) return elements_data_size
Evaluates elements data size .
6,115
def _EvaluateNumberOfElements ( self , context ) : number_of_elements = None if self . _data_type_definition . number_of_elements : number_of_elements = self . _data_type_definition . number_of_elements elif self . _data_type_definition . number_of_elements_expression : expression = self . _data_type_definition . number_of_elements_expression namespace = { } if context and context . values : namespace . update ( context . values ) namespace [ '__builtins__' ] = { } try : number_of_elements = eval ( expression , namespace ) except Exception as exception : raise errors . MappingError ( 'Unable to determine number of elements with error: {0!s}' . format ( exception ) ) if number_of_elements is None or number_of_elements < 0 : raise errors . MappingError ( 'Invalid number of elements: {0!s}' . format ( number_of_elements ) ) return number_of_elements
Evaluates number of elements .
6,116
def _GetElementDataTypeDefinition ( self , data_type_definition ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) element_data_type_definition = getattr ( data_type_definition , 'element_data_type_definition' , None ) if not element_data_type_definition : raise errors . FormatError ( 'Invalid data type definition missing element' ) return element_data_type_definition
Retrieves the element data type definition .
6,117
def _CheckCompositeMap ( self , data_type_definition ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) members = getattr ( data_type_definition , 'members' , None ) if not members : raise errors . FormatError ( 'Invalid data type definition missing members' ) is_composite_map = False last_member_byte_order = data_type_definition . byte_order for member_definition in members : if member_definition . IsComposite ( ) : is_composite_map = True break if ( last_member_byte_order != definitions . BYTE_ORDER_NATIVE and member_definition . byte_order != definitions . BYTE_ORDER_NATIVE and last_member_byte_order != member_definition . byte_order ) : is_composite_map = True break last_member_byte_order = member_definition . byte_order return is_composite_map
Determines if the data type definition needs a composite map .
6,118
def _GetMemberDataTypeMaps ( self , data_type_definition , data_type_map_cache ) : if not data_type_definition : raise errors . FormatError ( 'Missing data type definition' ) members = getattr ( data_type_definition , 'members' , None ) if not members : raise errors . FormatError ( 'Invalid data type definition missing members' ) data_type_maps = [ ] members_data_size = 0 for member_definition in members : if isinstance ( member_definition , data_types . MemberDataTypeDefinition ) : member_definition = member_definition . member_data_type_definition if ( data_type_definition . byte_order != definitions . BYTE_ORDER_NATIVE and member_definition . byte_order == definitions . BYTE_ORDER_NATIVE ) : member_definition = copy . copy ( member_definition ) member_definition . name = '_{0:s}_{1:s}' . format ( data_type_definition . name , member_definition . name ) member_definition . byte_order = data_type_definition . byte_order if member_definition . name not in data_type_map_cache : data_type_map = DataTypeMapFactory . CreateDataTypeMapByType ( member_definition ) data_type_map_cache [ member_definition . name ] = data_type_map data_type_map = data_type_map_cache [ member_definition . name ] if members_data_size is not None : if not isinstance ( member_definition , data_types . PaddingDefinition ) : byte_size = member_definition . GetByteSize ( ) else : _ , byte_size = divmod ( members_data_size , member_definition . alignment_size ) if byte_size > 0 : byte_size = member_definition . alignment_size - byte_size data_type_map . byte_size = byte_size if byte_size is None : members_data_size = None else : members_data_size += byte_size data_type_maps . append ( data_type_map ) return data_type_maps
Retrieves the member data type maps .
6,119
def GetName ( self , number ) : value = self . _data_type_definition . values_per_number . get ( number , None ) if not value : return None return value . name
Retrieves the name of an enumeration value by number .
6,120
def CreateDataTypeMap ( self , definition_name ) : data_type_definition = self . _definitions_registry . GetDefinitionByName ( definition_name ) if not data_type_definition : return None return DataTypeMapFactory . CreateDataTypeMapByType ( data_type_definition )
Creates a specific data type map by name .
6,121
def CreateDataTypeMapByType ( cls , data_type_definition ) : data_type_map_class = cls . _MAP_PER_DEFINITION . get ( data_type_definition . TYPE_INDICATOR , None ) if not data_type_map_class : return None return data_type_map_class ( data_type_definition )
Creates a specific data type map by type indicator .
6,122
def IsComposite ( self ) : return bool ( self . condition ) or ( self . member_data_type_definition and self . member_data_type_definition . IsComposite ( ) )
Determines if the data type is composite .
6,123
def AddValue ( self , name , number , aliases = None , description = None ) : if name in self . values_per_name : raise KeyError ( 'Value with name: {0:s} already exists.' . format ( name ) ) if number in self . values_per_number : raise KeyError ( 'Value with number: {0!s} already exists.' . format ( number ) ) for alias in aliases or [ ] : if alias in self . values_per_alias : raise KeyError ( 'Value with alias: {0:s} already exists.' . format ( alias ) ) enumeration_value = EnumerationValue ( name , number , aliases = aliases , description = description ) self . values . append ( enumeration_value ) self . values_per_name [ name ] = enumeration_value self . values_per_number [ number ] = enumeration_value for alias in aliases or [ ] : self . values_per_alias [ alias ] = enumeration_value
Adds an enumeration value .
6,124
def _ReadBooleanDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . BooleanDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_BOOLEAN , is_member = is_member , supported_size_values = ( 1 , 2 , 4 ) )
Reads a boolean data type definition .
6,125
def _ReadCharacterDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . CharacterDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE , is_member = is_member , supported_size_values = ( 1 , 2 , 4 ) )
Reads a character data type definition .
6,126
def _ReadConstantDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) value = definition_values . get ( 'value' , None ) if value is None : error_message = 'missing value' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadSemanticDataTypeDefinition ( definitions_registry , definition_values , data_types . ConstantDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_CONSTANT ) definition_object . value = value return definition_object
Reads a constant data type definition .
6,127
def _ReadDataTypeDefinitionWithMembers ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supports_conditions = False ) : members = definition_values . get ( 'members' , None ) if not members : error_message = 'missing members' raise errors . DefinitionReaderError ( definition_name , error_message ) supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE_WITH_MEMBERS ) definition_object = self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) attributes = definition_values . get ( 'attributes' , None ) if attributes : unsupported_attributes = set ( attributes . keys ( ) ) . difference ( self . _SUPPORTED_ATTRIBUTES_STORAGE_DATA_TYPE ) if unsupported_attributes : error_message = 'unsupported attributes: {0:s}' . format ( ', ' . join ( unsupported_attributes ) ) raise errors . DefinitionReaderError ( definition_name , error_message ) byte_order = attributes . get ( 'byte_order' , definitions . BYTE_ORDER_NATIVE ) if byte_order not in definitions . BYTE_ORDERS : error_message = 'unsupported byte-order attribute: {0!s}' . format ( byte_order ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . byte_order = byte_order for member in members : section = member . get ( 'section' , None ) if section : member_section_definition = data_types . MemberSectionDefinition ( section ) definition_object . AddSectionDefinition ( member_section_definition ) else : member_data_type_definition = self . _ReadMemberDataTypeDefinitionMember ( definitions_registry , member , definition_object . name , supports_conditions = supports_conditions ) definition_object . AddMemberDefinition ( member_data_type_definition ) return definition_object
Reads a data type definition with members .
6,128
def _ReadEnumerationDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) values = definition_values . get ( 'values' ) if not values : error_message = 'missing values' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadSemanticDataTypeDefinition ( definitions_registry , definition_values , data_types . EnumerationDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_ENUMERATION ) last_name = None for enumeration_value in values : aliases = enumeration_value . get ( 'aliases' , None ) description = enumeration_value . get ( 'description' , None ) name = enumeration_value . get ( 'name' , None ) number = enumeration_value . get ( 'number' , None ) if not name or number is None : if last_name : error_location = 'after: {0:s}' . format ( last_name ) else : error_location = 'at start' error_message = '{0:s} missing name or number' . format ( error_location ) raise errors . DefinitionReaderError ( definition_name , error_message ) else : try : definition_object . AddValue ( name , number , aliases = aliases , description = description ) except KeyError as exception : error_message = '{0!s}' . format ( exception ) raise errors . DefinitionReaderError ( definition_name , error_message ) last_name = name return definition_object
Reads an enumeration data type definition .
6,129
def _ReadElementSequenceDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : unsupported_definition_values = set ( definition_values . keys ( ) ) . difference ( supported_definition_values ) if unsupported_definition_values : error_message = 'unsupported definition values: {0:s}' . format ( ', ' . join ( unsupported_definition_values ) ) raise errors . DefinitionReaderError ( definition_name , error_message ) element_data_type = definition_values . get ( 'element_data_type' , None ) if not element_data_type : error_message = 'missing element data type' raise errors . DefinitionReaderError ( definition_name , error_message ) elements_data_size = definition_values . get ( 'elements_data_size' , None ) elements_terminator = definition_values . get ( 'elements_terminator' , None ) number_of_elements = definition_values . get ( 'number_of_elements' , None ) size_values = ( elements_data_size , elements_terminator , number_of_elements ) size_values = [ value for value in size_values if value is not None ] if not size_values : error_message = ( 'missing element data size, elements terminator and number of ' 'elements' ) raise errors . DefinitionReaderError ( definition_name , error_message ) if len ( size_values ) > 1 : error_message = ( 'element data size, elements terminator and number of elements ' 'not allowed to be set at the same time' ) raise errors . DefinitionReaderError ( definition_name , error_message ) element_data_type_definition = definitions_registry . GetDefinitionByName ( element_data_type ) if not element_data_type_definition : error_message = 'undefined element data type: {0:s}.' . format ( element_data_type ) raise errors . DefinitionReaderError ( definition_name , error_message ) element_byte_size = element_data_type_definition . GetByteSize ( ) element_type_indicator = element_data_type_definition . TYPE_INDICATOR if not element_byte_size and element_type_indicator != ( definitions . TYPE_INDICATOR_STRING ) : error_message = ( 'unsupported variable size element data type: {0:s}' . format ( element_data_type ) ) raise errors . DefinitionReaderError ( definition_name , error_message ) aliases = definition_values . get ( 'aliases' , None ) description = definition_values . get ( 'description' , None ) urls = definition_values . get ( 'urls' , None ) definition_object = data_type_definition_class ( definition_name , element_data_type_definition , aliases = aliases , data_type = element_data_type , description = description , urls = urls ) if elements_data_size is not None : try : definition_object . elements_data_size = int ( elements_data_size ) except ValueError : definition_object . elements_data_size_expression = elements_data_size elif elements_terminator is not None : if isinstance ( elements_terminator , py2to3 . UNICODE_TYPE ) : elements_terminator = elements_terminator . encode ( 'ascii' ) definition_object . elements_terminator = elements_terminator elif number_of_elements is not None : try : definition_object . number_of_elements = int ( number_of_elements ) except ValueError : definition_object . number_of_elements_expression = number_of_elements return definition_object
Reads an element sequence data type definition .
6,130
def _ReadFixedSizeDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_attributes , default_size = definitions . SIZE_NATIVE , default_units = 'bytes' , is_member = False , supported_size_values = None ) : definition_object = self . _ReadStorageDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_attributes , is_member = is_member ) attributes = definition_values . get ( 'attributes' , None ) if attributes : size = attributes . get ( 'size' , default_size ) if size != definitions . SIZE_NATIVE : try : int ( size ) except ValueError : error_message = 'unuspported size attribute: {0!s}' . format ( size ) raise errors . DefinitionReaderError ( definition_name , error_message ) if supported_size_values and size not in supported_size_values : error_message = 'unuspported size value: {0!s}' . format ( size ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . size = size definition_object . units = attributes . get ( 'units' , default_units ) return definition_object
Reads a fixed - size data type definition .
6,131
def _ReadFloatingPointDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . FloatingPointDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE , is_member = is_member , supported_size_values = ( 4 , 8 ) )
Reads a floating - point data type definition .
6,132
def _ReadFormatDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadLayoutDataTypeDefinition ( definitions_registry , definition_values , data_types . FormatDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_FORMAT ) definition_object . metadata = definition_values . get ( 'metadata' , { } ) attributes = definition_values . get ( 'attributes' , None ) if attributes : unsupported_attributes = set ( attributes . keys ( ) ) . difference ( self . _SUPPORTED_ATTRIBUTES_FORMAT ) if unsupported_attributes : error_message = 'unsupported attributes: {0:s}' . format ( ', ' . join ( unsupported_attributes ) ) raise errors . DefinitionReaderError ( definition_name , error_message ) byte_order = attributes . get ( 'byte_order' , definitions . BYTE_ORDER_NATIVE ) if byte_order not in definitions . BYTE_ORDERS : error_message = 'unsupported byte-order attribute: {0!s}' . format ( byte_order ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . byte_order = byte_order return definition_object
Reads a format data type definition .
6,133
def _ReadIntegerDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : definition_object = self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . IntegerDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_INTEGER , is_member = is_member , supported_size_values = ( 1 , 2 , 4 , 8 ) ) attributes = definition_values . get ( 'attributes' , None ) if attributes : format_attribute = attributes . get ( 'format' , definitions . FORMAT_SIGNED ) if format_attribute not in self . _INTEGER_FORMAT_ATTRIBUTES : error_message = 'unsupported format attribute: {0!s}' . format ( format_attribute ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . format = format_attribute return definition_object
Reads an integer data type definition .
6,134
def _ReadLayoutDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : return self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values )
Reads a layout data type definition .
6,135
def _ReadPaddingDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if not is_member : error_message = 'data type only supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_types . PaddingDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_PADDING ) alignment_size = definition_values . get ( 'alignment_size' , None ) if not alignment_size : error_message = 'missing alignment_size' raise errors . DefinitionReaderError ( definition_name , error_message ) try : int ( alignment_size ) except ValueError : error_message = 'unuspported alignment size attribute: {0!s}' . format ( alignment_size ) raise errors . DefinitionReaderError ( definition_name , error_message ) if alignment_size not in ( 2 , 4 , 8 , 16 ) : error_message = 'unuspported alignment size value: {0!s}' . format ( alignment_size ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . alignment_size = alignment_size return definition_object
Reads a padding data type definition .
6,136
def _ReadSemanticDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) : return self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values )
Reads a semantic data type definition .
6,137
def _ReadSequenceDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE ) else : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE ) return self . _ReadElementSequenceDataTypeDefinition ( definitions_registry , definition_values , data_types . SequenceDefinition , definition_name , supported_definition_values )
Reads a sequence data type definition .
6,138
def _ReadStorageDataTypeDefinition ( self , definitions_registry , definition_values , data_type_definition_class , definition_name , supported_attributes , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_MEMBER_DATA_TYPE ) else : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_STORAGE_DATA_TYPE ) definition_object = self . _ReadDataTypeDefinition ( definitions_registry , definition_values , data_type_definition_class , definition_name , supported_definition_values ) attributes = definition_values . get ( 'attributes' , None ) if attributes : unsupported_attributes = set ( attributes . keys ( ) ) . difference ( supported_attributes ) if unsupported_attributes : error_message = 'unsupported attributes: {0:s}' . format ( ', ' . join ( unsupported_attributes ) ) raise errors . DefinitionReaderError ( definition_name , error_message ) byte_order = attributes . get ( 'byte_order' , definitions . BYTE_ORDER_NATIVE ) if byte_order not in definitions . BYTE_ORDERS : error_message = 'unsupported byte-order attribute: {0!s}' . format ( byte_order ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . byte_order = byte_order return definition_object
Reads a storage data type definition .
6,139
def _ReadStreamDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_MEMBER_DATA_TYPE ) else : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_ELEMENTS_DATA_TYPE ) return self . _ReadElementSequenceDataTypeDefinition ( definitions_registry , definition_values , data_types . StreamDefinition , definition_name , supported_definition_values )
Reads a stream data type definition .
6,140
def _ReadStringDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : supported_definition_values = ( self . _SUPPORTED_DEFINITION_VALUES_STRING_MEMBER ) else : supported_definition_values = self . _SUPPORTED_DEFINITION_VALUES_STRING definition_object = self . _ReadElementSequenceDataTypeDefinition ( definitions_registry , definition_values , data_types . StringDefinition , definition_name , supported_definition_values ) encoding = definition_values . get ( 'encoding' , None ) if not encoding : error_message = 'missing encoding' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . encoding = encoding return definition_object
Reads a string data type definition .
6,141
def _ReadStructureDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) return self . _ReadDataTypeDefinitionWithMembers ( definitions_registry , definition_values , data_types . StructureDefinition , definition_name , supports_conditions = True )
Reads a structure data type definition .
6,142
def _ReadStructureFamilyDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : if is_member : error_message = 'data type not supported as member' raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object = self . _ReadLayoutDataTypeDefinition ( definitions_registry , definition_values , data_types . StructureFamilyDefinition , definition_name , self . _SUPPORTED_DEFINITION_VALUES_STRUCTURE_FAMILY ) runtime = definition_values . get ( 'runtime' , None ) if not runtime : error_message = 'missing runtime' raise errors . DefinitionReaderError ( definition_name , error_message ) runtime_data_type_definition = definitions_registry . GetDefinitionByName ( runtime ) if not runtime_data_type_definition : error_message = 'undefined runtime: {0:s}.' . format ( runtime ) raise errors . DefinitionReaderError ( definition_name , error_message ) if runtime_data_type_definition . family_definition : error_message = 'runtime: {0:s} already part of a family.' . format ( runtime ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . AddRuntimeDefinition ( runtime_data_type_definition ) members = definition_values . get ( 'members' , None ) if not members : error_message = 'missing members' raise errors . DefinitionReaderError ( definition_name , error_message ) for member in members : member_data_type_definition = definitions_registry . GetDefinitionByName ( member ) if not member_data_type_definition : error_message = 'undefined member: {0:s}.' . format ( member ) raise errors . DefinitionReaderError ( definition_name , error_message ) if member_data_type_definition . family_definition : error_message = 'member: {0:s} already part of a family.' . format ( member ) raise errors . DefinitionReaderError ( definition_name , error_message ) definition_object . AddMemberDefinition ( member_data_type_definition ) return definition_object
Reads a structure family data type definition .
6,143
def _ReadUnionDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadDataTypeDefinitionWithMembers ( definitions_registry , definition_values , data_types . UnionDefinition , definition_name , supports_conditions = False )
Reads an union data type definition .
6,144
def _ReadUUIDDataTypeDefinition ( self , definitions_registry , definition_values , definition_name , is_member = False ) : return self . _ReadFixedSizeDataTypeDefinition ( definitions_registry , definition_values , data_types . UUIDDefinition , definition_name , self . _SUPPORTED_ATTRIBUTES_FIXED_SIZE_DATA_TYPE , default_size = 16 , is_member = is_member , supported_size_values = ( 16 , ) )
Reads an UUID data type definition .
6,145
def ReadFile ( self , definitions_registry , path ) : with open ( path , 'r' ) as file_object : self . ReadFileObject ( definitions_registry , file_object )
Reads data type definitions from a file into the registry .
6,146
def _GetFormatErrorLocation ( self , yaml_definition , last_definition_object ) : name = yaml_definition . get ( 'name' , None ) if name : error_location = 'in: {0:s}' . format ( name or '<NAMELESS>' ) elif last_definition_object : error_location = 'after: {0:s}' . format ( last_definition_object . name ) else : error_location = 'at start' return error_location
Retrieves a format error location .
6,147
def ReadFileObject ( self , definitions_registry , file_object ) : last_definition_object = None error_location = None error_message = None try : yaml_generator = yaml . safe_load_all ( file_object ) for yaml_definition in yaml_generator : definition_object = self . _ReadDefinition ( definitions_registry , yaml_definition ) if not definition_object : error_location = self . _GetFormatErrorLocation ( yaml_definition , last_definition_object ) error_message = '{0:s} Missing definition object.' . format ( error_location ) raise errors . FormatError ( error_message ) definitions_registry . RegisterDefinition ( definition_object ) last_definition_object = definition_object except errors . DefinitionReaderError as exception : error_message = 'in: {0:s} {1:s}' . format ( exception . name or '<NAMELESS>' , exception . message ) raise errors . FormatError ( error_message ) except ( yaml . reader . ReaderError , yaml . scanner . ScannerError ) as exception : error_location = self . _GetFormatErrorLocation ( { } , last_definition_object ) error_message = '{0:s} {1!s}' . format ( error_location , exception ) raise errors . FormatError ( error_message )
Reads data type definitions from a file - like object into the registry .
6,148
def read_from_hdx ( identifier , configuration = None ) : organization = Organization ( configuration = configuration ) result = organization . _load_from_hdx ( 'organization' , identifier ) if result : return organization return None
Reads the organization given by identifier from HDX and returns Organization object
6,149
def get_users ( self , capacity = None ) : users = list ( ) usersdicts = self . data . get ( 'users' ) if usersdicts is not None : for userdata in usersdicts : if capacity is not None and userdata [ 'capacity' ] != capacity : continue id = userdata . get ( 'id' ) if id is None : id = userdata [ 'name' ] user = hdx . data . user . User . read_from_hdx ( id , configuration = self . configuration ) user [ 'capacity' ] = userdata [ 'capacity' ] users . append ( user ) return users
Returns the organization s users .
6,150
def get_datasets ( self , query = '*:*' , ** kwargs ) : return hdx . data . dataset . Dataset . search_in_hdx ( query = query , configuration = self . configuration , fq = 'organization:%s' % self . data [ 'name' ] , ** kwargs )
Get list of datasets in organization
6,151
def get_all_organization_names ( configuration = None , ** kwargs ) : organization = Organization ( configuration = configuration ) organization [ 'id' ] = 'all organizations' return organization . _write_to_hdx ( 'list' , kwargs , 'id' )
Get all organization names in HDX
6,152
def _read_from_hdx ( self , object_type , value , fieldname = 'id' , action = None , ** kwargs ) : if not fieldname : raise HDXError ( 'Empty %s field name!' % object_type ) if action is None : action = self . actions ( ) [ 'show' ] data = { fieldname : value } data . update ( kwargs ) try : result = self . configuration . call_remoteckan ( action , data ) return True , result except NotFound : return False , '%s=%s: not found!' % ( fieldname , value ) except Exception as e : raisefrom ( HDXError , 'Failed when trying to read: %s=%s! (POST)' % ( fieldname , value ) , e )
Makes a read call to HDX passing in given parameter .
6,153
def _load_from_hdx ( self , object_type , id_field ) : success , result = self . _read_from_hdx ( object_type , id_field ) if success : self . old_data = self . data self . data = result return True logger . debug ( result ) return False
Helper method to load the HDX object given by identifier from HDX
6,154
def _check_load_existing_object ( self , object_type , id_field_name , operation = 'update' ) : self . _check_existing_object ( object_type , id_field_name ) if not self . _load_from_hdx ( object_type , self . data [ id_field_name ] ) : raise HDXError ( 'No existing %s to %s!' % ( object_type , operation ) )
Check metadata exists and contains HDX object identifier and if so load HDX object
6,155
def _check_required_fields ( self , object_type , ignore_fields ) : for field in self . configuration [ object_type ] [ 'required_fields' ] : if field not in self . data and field not in ignore_fields : raise HDXError ( 'Field %s is missing in %s!' % ( field , object_type ) )
Helper method to check that metadata for HDX object is complete
6,156
def _merge_hdx_update ( self , object_type , id_field_name , file_to_upload = None , ** kwargs ) : merge_two_dictionaries ( self . data , self . old_data ) if 'batch_mode' in kwargs : self . data [ 'batch_mode' ] = kwargs [ 'batch_mode' ] if 'skip_validation' in kwargs : self . data [ 'skip_validation' ] = kwargs [ 'skip_validation' ] ignore_field = self . configuration [ '%s' % object_type ] . get ( 'ignore_on_update' ) self . check_required_fields ( ignore_fields = [ ignore_field ] ) operation = kwargs . get ( 'operation' , 'update' ) self . _save_to_hdx ( operation , id_field_name , file_to_upload )
Helper method to check if HDX object exists and update it
6,157
def _update_in_hdx ( self , object_type , id_field_name , file_to_upload = None , ** kwargs ) : self . _check_load_existing_object ( object_type , id_field_name ) self . _merge_hdx_update ( object_type , id_field_name , file_to_upload , ** kwargs )
Helper method to check if HDX object exists in HDX and if so update it
6,158
def _write_to_hdx ( self , action , data , id_field_name , file_to_upload = None ) : file = None try : if file_to_upload : file = open ( file_to_upload , 'rb' ) files = [ ( 'upload' , file ) ] else : files = None return self . configuration . call_remoteckan ( self . actions ( ) [ action ] , data , files = files ) except Exception as e : raisefrom ( HDXError , 'Failed when trying to %s %s! (POST)' % ( action , data [ id_field_name ] ) , e ) finally : if file_to_upload and file : file . close ( )
Creates or updates an HDX object in HDX and return HDX object metadata dict
6,159
def _save_to_hdx ( self , action , id_field_name , file_to_upload = None ) : result = self . _write_to_hdx ( action , self . data , id_field_name , file_to_upload ) self . old_data = self . data self . data = result
Creates or updates an HDX object in HDX saving current data and replacing with returned HDX object data from HDX
6,160
def _create_in_hdx ( self , object_type , id_field_name , name_field_name , file_to_upload = None ) : self . check_required_fields ( ) if id_field_name in self . data and self . _load_from_hdx ( object_type , self . data [ id_field_name ] ) : logger . warning ( '%s exists. Updating %s' % ( object_type , self . data [ id_field_name ] ) ) self . _merge_hdx_update ( object_type , id_field_name , file_to_upload ) else : self . _save_to_hdx ( 'create' , name_field_name , file_to_upload )
Helper method to check if resource exists in HDX and if so update it otherwise create it
6,161
def _delete_from_hdx ( self , object_type , id_field_name ) : if id_field_name not in self . data : raise HDXError ( 'No %s field (mandatory) in %s!' % ( id_field_name , object_type ) ) self . _save_to_hdx ( 'delete' , id_field_name )
Helper method to deletes a resource from HDX
6,162
def _addupdate_hdxobject ( self , hdxobjects , id_field , new_hdxobject ) : for hdxobject in hdxobjects : if hdxobject [ id_field ] == new_hdxobject [ id_field ] : merge_two_dictionaries ( hdxobject , new_hdxobject ) return hdxobject hdxobjects . append ( new_hdxobject ) return new_hdxobject
Helper function to add a new HDX object to a supplied list of HDX objects or update existing metadata if the object already exists in the list
6,163
def _remove_hdxobject ( self , objlist , obj , matchon = 'id' , delete = False ) : if objlist is None : return False if isinstance ( obj , six . string_types ) : obj_id = obj elif isinstance ( obj , dict ) or isinstance ( obj , HDXObject ) : obj_id = obj . get ( matchon ) else : raise HDXError ( 'Type of object not a string, dict or T<=HDXObject' ) if not obj_id : return False for i , objdata in enumerate ( objlist ) : objid = objdata . get ( matchon ) if objid and objid == obj_id : if delete : objlist [ i ] . delete_from_hdx ( ) del objlist [ i ] return True return False
Remove an HDX object from a list within the parent HDX object
6,164
def _convert_hdxobjects ( self , hdxobjects ) : newhdxobjects = list ( ) for hdxobject in hdxobjects : newhdxobjects . append ( hdxobject . data ) return newhdxobjects
Helper function to convert supplied list of HDX objects to a list of dict
6,165
def _copy_hdxobjects ( self , hdxobjects , hdxobjectclass , attribute_to_copy = None ) : newhdxobjects = list ( ) for hdxobject in hdxobjects : newhdxobjectdata = copy . deepcopy ( hdxobject . data ) newhdxobject = hdxobjectclass ( newhdxobjectdata , configuration = self . configuration ) if attribute_to_copy : value = getattr ( hdxobject , attribute_to_copy ) setattr ( newhdxobject , attribute_to_copy , value ) newhdxobjects . append ( newhdxobject ) return newhdxobjects
Helper function to make a deep copy of a supplied list of HDX objects
6,166
def _separate_hdxobjects ( self , hdxobjects , hdxobjects_name , id_field , hdxobjectclass ) : new_hdxobjects = self . data . get ( hdxobjects_name , list ( ) ) if new_hdxobjects : hdxobject_names = set ( ) for hdxobject in hdxobjects : hdxobject_name = hdxobject [ id_field ] hdxobject_names . add ( hdxobject_name ) for new_hdxobject in new_hdxobjects : if hdxobject_name == new_hdxobject [ id_field ] : merge_two_dictionaries ( hdxobject , new_hdxobject ) break for new_hdxobject in new_hdxobjects : if not new_hdxobject [ id_field ] in hdxobject_names : hdxobjects . append ( hdxobjectclass ( new_hdxobject , configuration = self . configuration ) ) del self . data [ hdxobjects_name ]
Helper function to take a list of HDX objects contained in the internal dictionary and add them to a supplied list of HDX objects or update existing metadata if any objects already exist in the list . The list in the internal dictionary is then deleted .
6,167
def _get_tags ( self ) : tags = self . data . get ( 'tags' , None ) if not tags : return list ( ) return [ x [ 'name' ] for x in tags ]
Return the dataset s list of tags
6,168
def _add_tag ( self , tag ) : tags = self . data . get ( 'tags' , None ) if tags : if tag in [ x [ 'name' ] for x in tags ] : return False else : tags = list ( ) tags . append ( { 'name' : tag } ) self . data [ 'tags' ] = tags return True
Add a tag
6,169
def _add_tags ( self , tags ) : alltagsadded = True for tag in tags : if not self . _add_tag ( tag ) : alltagsadded = False return alltagsadded
Add a list of tag
6,170
def _get_stringlist_from_commastring ( self , field ) : strings = self . data . get ( field ) if strings : return strings . split ( ',' ) else : return list ( )
Return list of strings from comma separated list
6,171
def _add_string_to_commastring ( self , field , string ) : if string in self . _get_stringlist_from_commastring ( field ) : return False strings = '%s,%s' % ( self . data . get ( field , '' ) , string ) if strings [ 0 ] == ',' : strings = strings [ 1 : ] self . data [ field ] = strings return True
Add a string to a comma separated list of strings
6,172
def _add_strings_to_commastring ( self , field , strings ) : allstringsadded = True for string in strings : if not self . _add_string_to_commastring ( field , string ) : allstringsadded = False return allstringsadded
Add a list of strings to a comma separated list of strings
6,173
def _remove_string_from_commastring ( self , field , string ) : commastring = self . data . get ( field , '' ) if string in commastring : self . data [ field ] = commastring . replace ( string , '' ) return True return False
Remove a string from a comma separated list of strings
6,174
def read_from_hdx ( identifier , configuration = None ) : if is_valid_uuid ( identifier ) is False : raise HDXError ( '%s is not a valid resource id!' % identifier ) resource = Resource ( configuration = configuration ) result = resource . _load_from_hdx ( 'resource' , identifier ) if result : return resource return None
Reads the resource given by identifier from HDX and returns Resource object
6,175
def set_file_to_upload ( self , file_to_upload ) : if 'url' in self . data : del self . data [ 'url' ] self . file_to_upload = file_to_upload
Delete any existing url and set the file uploaded to the local path provided
6,176
def check_url_filetoupload ( self ) : if self . file_to_upload is None : if 'url' in self . data : if 'resource_type' not in self . data : self . data [ 'resource_type' ] = 'api' if 'url_type' not in self . data : self . data [ 'url_type' ] = 'api' else : raise HDXError ( 'Either a url or a file to upload must be supplied!' ) else : if 'url' in self . data : if self . data [ 'url' ] != hdx . data . dataset . Dataset . temporary_url : raise HDXError ( 'Either a url or a file to upload must be supplied not both!' ) if 'resource_type' not in self . data : self . data [ 'resource_type' ] = 'file.upload' if 'url_type' not in self . data : self . data [ 'url_type' ] = 'upload' if 'tracking_summary' in self . data : del self . data [ 'tracking_summary' ]
Check if url or file to upload provided for resource and add resource_type and url_type if not supplied
6,177
def update_in_hdx ( self , ** kwargs ) : self . _check_load_existing_object ( 'resource' , 'id' ) if self . file_to_upload and 'url' in self . data : del self . data [ 'url' ] self . _merge_hdx_update ( 'resource' , 'id' , self . file_to_upload , ** kwargs )
Check if resource exists in HDX and if so update it
6,178
def create_in_hdx ( self ) : self . check_required_fields ( ) id = self . data . get ( 'id' ) if id and self . _load_from_hdx ( 'resource' , id ) : logger . warning ( '%s exists. Updating %s' % ( 'resource' , id ) ) if self . file_to_upload and 'url' in self . data : del self . data [ 'url' ] self . _merge_hdx_update ( 'resource' , 'id' , self . file_to_upload ) else : self . _save_to_hdx ( 'create' , 'name' , self . file_to_upload )
Check if resource exists in HDX and if so update it otherwise create it
6,179
def get_dataset ( self ) : package_id = self . data . get ( 'package_id' ) if package_id is None : raise HDXError ( 'Resource has no package id!' ) return hdx . data . dataset . Dataset . read_from_hdx ( package_id )
Return dataset containing this resource
6,180
def download ( self , folder = None ) : url = self . data . get ( 'url' , None ) if not url : raise HDXError ( 'No URL to download!' ) logger . debug ( 'Downloading %s' % url ) filename = self . data [ 'name' ] format = '.%s' % self . data [ 'format' ] if format not in filename : filename = '%s%s' % ( filename , format ) with Download ( full_agent = self . configuration . get_user_agent ( ) ) as downloader : path = downloader . download_file ( url , folder , filename ) return url , path
Download resource store to provided folder or temporary folder if no folder supplied
6,181
def get_all_resource_ids_in_datastore ( configuration = None ) : resource = Resource ( configuration = configuration ) success , result = resource . _read_from_hdx ( 'datastore' , '_table_metadata' , 'resource_id' , Resource . actions ( ) [ 'datastore_search' ] , limit = 10000 ) resource_ids = list ( ) if not success : logger . debug ( result ) else : for record in result [ 'records' ] : resource_ids . append ( record [ 'name' ] ) return resource_ids
Get list of resources that have a datastore returning their ids .
6,182
def has_datastore ( self ) : success , result = self . _read_from_hdx ( 'datastore' , self . data [ 'id' ] , 'resource_id' , self . actions ( ) [ 'datastore_search' ] ) if not success : logger . debug ( result ) else : if result : return True return False
Check if the resource has a datastore .
6,183
def delete_datastore ( self ) : success , result = self . _read_from_hdx ( 'datastore' , self . data [ 'id' ] , 'resource_id' , self . actions ( ) [ 'datastore_delete' ] , force = True ) if not success : logger . debug ( result )
Delete a resource from the HDX datastore
6,184
def create_datastore ( self , schema = None , primary_key = None , delete_first = 0 , path = None ) : if delete_first == 0 : pass elif delete_first == 1 : self . delete_datastore ( ) elif delete_first == 2 : if primary_key is None : self . delete_datastore ( ) else : raise HDXError ( 'delete_first must be 0, 1 or 2! (0 = No, 1 = Yes, 2 = Delete if no primary key)' ) if path is None : url , path = self . download ( ) delete_after_download = True else : url = path delete_after_download = False def convert_to_text ( extended_rows ) : for number , headers , row in extended_rows : for i , val in enumerate ( row ) : row [ i ] = str ( val ) yield ( number , headers , row ) with Download ( full_agent = self . configuration . get_user_agent ( ) ) as downloader : try : stream = downloader . get_tabular_stream ( path , headers = 1 , post_parse = [ convert_to_text ] , bytes_sample_size = 1000000 ) nonefieldname = False if schema is None : schema = list ( ) for fieldname in stream . headers : if fieldname is not None : schema . append ( { 'id' : fieldname , 'type' : 'text' } ) else : nonefieldname = True data = { 'resource_id' : self . data [ 'id' ] , 'force' : True , 'fields' : schema , 'primary_key' : primary_key } self . _write_to_hdx ( 'datastore_create' , data , 'resource_id' ) if primary_key is None : method = 'insert' else : method = 'upsert' logger . debug ( 'Uploading data from %s to datastore' % url ) offset = 0 chunksize = 100 rowset = stream . read ( keyed = True , limit = chunksize ) while len ( rowset ) != 0 : if nonefieldname : for row in rowset : del row [ None ] data = { 'resource_id' : self . data [ 'id' ] , 'force' : True , 'method' : method , 'records' : rowset } self . _write_to_hdx ( 'datastore_upsert' , data , 'resource_id' ) rowset = stream . read ( keyed = True , limit = chunksize ) logger . debug ( 'Uploading: %s' % offset ) offset += chunksize except Exception as e : raisefrom ( HDXError , 'Upload to datastore of %s failed!' % url , e ) finally : if delete_after_download : remove ( path )
For tabular data create a resource in the HDX datastore which enables data preview in HDX . If no schema is provided all fields are assumed to be text . If path is not supplied the file is first downloaded from HDX .
6,185
def create_datastore_for_topline ( self , delete_first = 0 , path = None ) : data = load_yaml ( script_dir_plus_file ( join ( '..' , 'hdx_datasource_topline.yml' ) , Resource ) ) self . create_datastore_from_dict_schema ( data , delete_first , path = path )
For tabular data create a resource in the HDX datastore which enables data preview in HDX using the built in YAML definition for a topline . If path is not supplied the file is first downloaded from HDX .
6,186
def update_datastore ( self , schema = None , primary_key = None , path = None ) : self . create_datastore ( schema , primary_key , 2 , path = path )
For tabular data update a resource in the HDX datastore which enables data preview in HDX . If no schema is provided all fields are assumed to be text . If path is not supplied the file is first downloaded from HDX .
6,187
def _get_resource_view ( self , resource_view ) : if isinstance ( resource_view , dict ) : resource_view = ResourceView ( resource_view , configuration = self . configuration ) if isinstance ( resource_view , ResourceView ) : return resource_view raise HDXError ( 'Type %s is not a valid resource view!' % type ( resource_view ) . __name__ )
Get resource view id
6,188
def add_update_resource_views ( self , resource_views ) : if not isinstance ( resource_views , list ) : raise HDXError ( 'ResourceViews should be a list!' ) for resource_view in resource_views : self . add_update_resource_view ( resource_view )
Add new or update existing resource views in resource with new metadata .
6,189
def reorder_resource_views ( self , resource_views ) : if not isinstance ( resource_views , list ) : raise HDXError ( 'ResourceViews should be a list!' ) ids = list ( ) for resource_view in resource_views : if isinstance ( resource_view , str ) : resource_view_id = resource_view else : resource_view_id = resource_view [ 'id' ] if is_valid_uuid ( resource_view_id ) is False : raise HDXError ( '%s is not a valid resource view id!' % resource_view ) ids . append ( resource_view_id ) _ , result = self . _read_from_hdx ( 'resource view' , self . data [ 'id' ] , 'id' , ResourceView . actions ( ) [ 'reorder' ] , order = ids )
Order resource views in resource .
6,190
def delete_resource_view ( self , resource_view ) : if isinstance ( resource_view , str ) : if is_valid_uuid ( resource_view ) is False : raise HDXError ( '%s is not a valid resource view id!' % resource_view ) resource_view = ResourceView ( { 'id' : resource_view } , configuration = self . configuration ) else : resource_view = self . _get_resource_view ( resource_view ) if 'id' not in resource_view : found = False title = resource_view . get ( 'title' ) for rv in self . get_resource_views ( ) : if resource_view [ 'title' ] == rv [ 'title' ] : resource_view = rv found = True break if not found : raise HDXError ( 'No resource views have title %s in this resource!' % title ) resource_view . delete_from_hdx ( )
Delete a resource view from the resource and HDX
6,191
def parse_for_simple_stems ( output , skip_empty = False , skip_same_stems = True ) : lines_with_stems = _get_lines_with_stems ( output ) stems = list ( ) last_word = None for line in lines_with_stems : word , stem , _ = line . split ( "\t" ) stem = stem if stem != '-' else None if skip_empty and ( stem is None ) : continue if last_word != word : stems . append ( ( word , [ ] ) ) stem = None if skip_same_stems and stem in stems [ - 1 ] [ 1 ] else stem if stem is not None : stems [ - 1 ] [ 1 ] . append ( stem ) last_word = word return stems
Parses the output stem lines to produce a list with possible stems for each word in the output .
6,192
def _CreateClassTemplate ( cls , data_type_definition ) : type_name = data_type_definition . name type_description = data_type_definition . description or type_name while type_description . endswith ( '.' ) : type_description = type_description [ : - 1 ] class_attributes_description = [ ] init_arguments = [ ] instance_attributes = [ ] for member_definition in data_type_definition . members : attribute_name = member_definition . name description = member_definition . description or attribute_name while description . endswith ( '.' ) : description = description [ : - 1 ] member_data_type = getattr ( member_definition , 'member_data_type' , '' ) if isinstance ( member_definition , data_types . MemberDataTypeDefinition ) : member_definition = member_definition . member_data_type_definition member_type_indicator = member_definition . TYPE_INDICATOR if member_type_indicator == definitions . TYPE_INDICATOR_SEQUENCE : element_type_indicator = member_definition . element_data_type member_type_indicator = 'tuple[{0:s}]' . format ( element_type_indicator ) else : member_type_indicator = cls . _PYTHON_NATIVE_TYPES . get ( member_type_indicator , member_data_type ) argument = '{0:s}=None' . format ( attribute_name ) definition = ' self.{0:s} = {0:s}' . format ( attribute_name ) description = ' {0:s} ({1:s}): {2:s}.' . format ( attribute_name , member_type_indicator , description ) class_attributes_description . append ( description ) init_arguments . append ( argument ) instance_attributes . append ( definition ) class_attributes_description = '\n' . join ( sorted ( class_attributes_description ) ) init_arguments = ', ' . join ( init_arguments ) instance_attributes = '\n' . join ( sorted ( instance_attributes ) ) template_values = { 'class_attributes_description' : class_attributes_description , 'init_arguments' : init_arguments , 'instance_attributes' : instance_attributes , 'type_description' : type_description , 'type_name' : type_name } return cls . _CLASS_TEMPLATE . format ( ** template_values )
Creates the class template .
6,193
def _IsIdentifier ( cls , string ) : return ( string and not string [ 0 ] . isdigit ( ) and all ( character . isalnum ( ) or character == '_' for character in string ) )
Checks if a string contains an identifier .
6,194
def _ValidateDataTypeDefinition ( cls , data_type_definition ) : if not cls . _IsIdentifier ( data_type_definition . name ) : raise ValueError ( 'Data type definition name: {0!s} not a valid identifier' . format ( data_type_definition . name ) ) if keyword . iskeyword ( data_type_definition . name ) : raise ValueError ( 'Data type definition name: {0!s} matches keyword' . format ( data_type_definition . name ) ) members = getattr ( data_type_definition , 'members' , None ) if not members : raise ValueError ( 'Data type definition name: {0!s} missing members' . format ( data_type_definition . name ) ) defined_attribute_names = set ( ) for member_definition in members : attribute_name = member_definition . name if not cls . _IsIdentifier ( attribute_name ) : raise ValueError ( 'Attribute name: {0!s} not a valid identifier' . format ( attribute_name ) ) if attribute_name . startswith ( '_' ) : raise ValueError ( 'Attribute name: {0!s} starts with underscore' . format ( attribute_name ) ) if keyword . iskeyword ( attribute_name ) : raise ValueError ( 'Attribute name: {0!s} matches keyword' . format ( attribute_name ) ) if attribute_name in defined_attribute_names : raise ValueError ( 'Attribute name: {0!s} already defined' . format ( attribute_name ) ) defined_attribute_names . add ( attribute_name )
Validates the data type definition .
6,195
def CreateClass ( cls , data_type_definition ) : cls . _ValidateDataTypeDefinition ( data_type_definition ) class_definition = cls . _CreateClassTemplate ( data_type_definition ) namespace = { '__builtins__' : { 'object' : builtins . object , 'super' : builtins . super } , '__name__' : '{0:s}' . format ( data_type_definition . name ) } if sys . version_info [ 0 ] >= 3 : namespace [ '__builtins__' ] [ '__build_class__' ] = builtins . __build_class__ exec ( class_definition , namespace ) return namespace [ data_type_definition . name ]
Creates a new structure values class .
6,196
def DeregisterDefinition ( self , data_type_definition ) : name = data_type_definition . name . lower ( ) if name not in self . _definitions : raise KeyError ( 'Definition not set for name: {0:s}.' . format ( data_type_definition . name ) ) del self . _definitions [ name ]
Deregisters a data type definition .
6,197
def GetDefinitionByName ( self , name ) : lookup_name = name . lower ( ) if lookup_name not in self . _definitions : lookup_name = self . _aliases . get ( name , None ) return self . _definitions . get ( lookup_name , None )
Retrieves a specific data type definition by name .
6,198
def RegisterDefinition ( self , data_type_definition ) : name_lower = data_type_definition . name . lower ( ) if name_lower in self . _definitions : raise KeyError ( 'Definition already set for name: {0:s}.' . format ( data_type_definition . name ) ) if data_type_definition . name in self . _aliases : raise KeyError ( 'Alias already set for name: {0:s}.' . format ( data_type_definition . name ) ) for alias in data_type_definition . aliases : if alias in self . _aliases : raise KeyError ( 'Alias already set for name: {0:s}.' . format ( alias ) ) self . _definitions [ name_lower ] = data_type_definition for alias in data_type_definition . aliases : self . _aliases [ alias ] = name_lower if data_type_definition . TYPE_INDICATOR == definitions . TYPE_INDICATOR_FORMAT : self . _format_definitions . append ( name_lower )
Registers a data type definition .
6,199
def apply_on_csv_string ( rules_str , func ) : splitted = rules_str . split ( "," ) for str in splitted : func ( str . strip ( ) )
Splits a given string by comma trims whitespace on the resulting strings and applies a given func to each item .