idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
59,100
def exportpub ( self , format = "PEM" ) : bio = Membio ( ) if format == "PEM" : retcode = libcrypto . PEM_write_bio_PUBKEY ( bio . bio , self . key ) else : retcode = libcrypto . i2d_PUBKEY_bio ( bio . bio , self . key ) if retcode == 0 : raise PKeyError ( "error serializing public key" ) return str ( bio )
Returns public key as PEM or DER structure .
59,101
def exportpriv ( self , format = "PEM" , password = None , cipher = None ) : bio = Membio ( ) if cipher is None : evp_cipher = None else : evp_cipher = cipher . cipher if format == "PEM" : ret = libcrypto . PEM_write_bio_PrivateKey ( bio . bio , self . key , evp_cipher , None , 0 , _password_callback ( password ) , Non...
Returns private key as PEM or DER Structure . If password and cipher are specified encrypts key on given password using given algorithm . Cipher must be an ctypescrypto . cipher . CipherType object
59,102
def _configure_context ( ctx , opts , skip = ( ) ) : for oper in opts : if oper in skip : continue if isinstance ( oper , chartype ) : op = oper . encode ( "ascii" ) else : op = oper if isinstance ( opts [ oper ] , chartype ) : value = opts [ oper ] . encode ( "ascii" ) elif isinstance ( opts [ oper ] , bintype ) : val...
Configures context of public key operations
59,103
def read ( self , length = None ) : if not length is None : if not isinstance ( length , inttype ) : raise TypeError ( "length to read should be number" ) buf = create_string_buffer ( length ) readbytes = libcrypto . BIO_read ( self . bio , buf , length ) if readbytes == - 2 : raise NotImplementedError ( "Function is n...
Reads data from readble BIO . For test purposes .
59,104
def write ( self , data ) : if pyver == 2 : if isinstance ( data , unicode ) : data = data . encode ( "utf-8" ) else : data = str ( data ) else : if not isinstance ( data , bytes ) : data = str ( data ) . encode ( "utf-8" ) written = libcrypto . BIO_write ( self . bio , data , len ( data ) ) if written == - 2 : raise N...
Writes data to writable bio . For test purposes
59,105
def CMS ( data , format = "PEM" ) : bio = Membio ( data ) if format == "PEM" : ptr = libcrypto . PEM_read_bio_CMS ( bio . bio , None , None , None ) else : ptr = libcrypto . d2i_CMS_bio ( bio . bio , None ) if ptr is None : raise CMSError ( "Error parsing CMS data" ) typeoid = Oid ( libcrypto . OBJ_obj2nid ( libcrypto ...
Factory function to create CMS objects from received messages . Parses CMS data and returns either SignedData or EnvelopedData object . format argument can be either PEM or DER .
59,106
def pem ( self ) : bio = Membio ( ) if not libcrypto . PEM_write_bio_CMS ( bio . bio , self . ptr ) : raise CMSError ( "writing CMS to PEM" ) return str ( bio )
Serialize in PEM format
59,107
def create ( data , cert , pkey , flags = Flags . BINARY , certs = None ) : if not pkey . cansign : raise ValueError ( "Specified keypair has no private part" ) if cert . pubkey != pkey : raise ValueError ( "Certificate doesn't match public key" ) bio = Membio ( data ) if certs is not None and len ( certs ) > 0 : certs...
Creates SignedData message by signing data with pkey and certificate .
59,108
def sign ( self , cert , pkey , digest_type = None , data = None , flags = Flags . BINARY ) : if not pkey . cansign : raise ValueError ( "Specified keypair has no private part" ) if cert . pubkey != pkey : raise ValueError ( "Certificate doesn't match public key" ) if libcrypto . CMS_add1_signer ( self . ptr , cert . c...
Adds another signer to already signed message
59,109
def verify ( self , store , flags , data = None , certs = None ) : bio = None if data != None : bio_obj = Membio ( data ) bio = bio_obj . bio if certs is not None and len ( certs ) > 0 : certstack_obj = StackOfX509 ( certs ) certstack = certstack_obj . ptr else : certstack = None res = libcrypto . CMS_verify ( self . p...
Verifies signature under CMS message using trusted cert store
59,110
def signers ( self ) : signerlist = libcrypto . CMS_get0_signers ( self . ptr ) if signerlist is None : raise CMSError ( "Cannot get signers" ) return StackOfX509 ( ptr = signerlist , disposable = False )
Return list of signer s certificates
59,111
def data ( self ) : if self . detached : return None bio = Membio ( ) if not libcrypto . CMS_verify ( self . ptr , None , None , None , bio . bio , Flags . NO_VERIFY ) : raise CMSError ( "extract data" ) return str ( bio )
Returns signed data if present in the message
59,112
def certs ( self ) : certstack = libcrypto . CMS_get1_certs ( self . ptr ) if certstack is None : raise CMSError ( "getting certs" ) return StackOfX509 ( ptr = certstack , disposable = True )
List of the certificates contained in the structure
59,113
def create ( recipients , data , cipher , flags = 0 ) : recp = StackOfX509 ( recipients ) bio = Membio ( data ) cms_ptr = libcrypto . CMS_encrypt ( recp . ptr , bio . bio , cipher . cipher , flags ) if cms_ptr is None : raise CMSError ( "encrypt EnvelopedData" ) return EnvelopedData ( cms_ptr )
Creates and encrypts message
59,114
def create ( data , cipher , key , flags = 0 ) : bio = Membio ( data ) ptr = libcrypto . CMS_EncryptedData_encrypt ( bio . bio , cipher . cipher , key , len ( key ) , flags ) if ptr is None : raise CMSError ( "encrypt data" ) return EncryptedData ( ptr )
Creates an EncryptedData message .
59,115
def decrypt ( self , key , flags = 0 ) : bio = Membio ( ) if libcrypto . CMS_EncryptedData_decrypt ( self . ptr , key , len ( key ) , None , bio . bio , flags ) <= 0 : raise CMSError ( "decrypt data" ) return str ( bio )
Decrypts encrypted data message
59,116
def name ( self ) : if not hasattr ( self , 'digest_name' ) : self . digest_name = Oid ( libcrypto . EVP_MD_type ( self . digest ) ) . longname ( ) return self . digest_name
Returns name of the digest
59,117
def update ( self , data , length = None ) : if self . digest_finalized : raise DigestError ( "No updates allowed" ) if not isinstance ( data , bintype ) : raise TypeError ( "A byte string is expected" ) if length is None : length = len ( data ) elif length > len ( data ) : raise ValueError ( "Specified length is great...
Hashes given byte string
59,118
def digest ( self , data = None ) : if self . digest_finalized : return self . digest_out . raw [ : self . digest_size ] if data is not None : self . update ( data ) self . digest_out = create_string_buffer ( 256 ) length = c_long ( 0 ) result = libcrypto . EVP_DigestFinal_ex ( self . ctx , self . digest_out , byref ( ...
Finalizes digest operation and return digest value Optionally hashes more data before finalizing
59,119
def copy ( self ) : new_digest = Digest ( self . digest_type ) libcrypto . EVP_MD_CTX_copy ( new_digest . ctx , self . ctx ) return new_digest
Creates copy of the digest CTX to allow to compute digest while being able to hash more data
59,120
def _clean_ctx ( self ) : try : if self . ctx is not None : libcrypto . EVP_MD_CTX_free ( self . ctx ) del self . ctx except AttributeError : pass self . digest_out = None self . digest_finalized = False
Clears and deallocates context
59,121
def hexdigest ( self , data = None ) : from base64 import b16encode if pyver == 2 : return b16encode ( self . digest ( data ) ) else : return b16encode ( self . digest ( data ) ) . decode ( 'us-ascii' )
Returns digest in the hexadecimal form . For compatibility with hashlib
59,122
def _X509__asn1date_to_datetime ( asn1date ) : bio = Membio ( ) libcrypto . ASN1_TIME_print ( bio . bio , asn1date ) pydate = datetime . strptime ( str ( bio ) , "%b %d %H:%M:%S %Y %Z" ) return pydate . replace ( tzinfo = utc )
Converts openssl ASN1_TIME object to python datetime . datetime
59,123
def find ( self , oid ) : if not isinstance ( oid , Oid ) : raise TypeError ( "Need crytypescrypto.oid.Oid as argument" ) found = [ ] index = - 1 end = len ( self ) while True : index = libcrypto . X509_get_ext_by_NID ( self . cert . cert , oid . nid , index ) if index >= end or index < 0 : break found . append ( self ...
Return list of extensions with given Oid
59,124
def find_critical ( self , crit = True ) : if crit : flag = 1 else : flag = 0 found = [ ] end = len ( self ) index = - 1 while True : index = libcrypto . X509_get_ext_by_critical ( self . cert . cert , flag , index ) if index >= end or index < 0 : break found . append ( self [ index ] ) return found
Return list of critical extensions ( or list of non - cricital if optional second argument is False
59,125
def pem ( self ) : bio = Membio ( ) if libcrypto . PEM_write_bio_X509 ( bio . bio , self . cert ) == 0 : raise X509Error ( "error serializing certificate" ) return str ( bio )
Returns PEM represntation of the certificate
59,126
def serial ( self ) : asnint = libcrypto . X509_get_serialNumber ( self . cert ) bio = Membio ( ) libcrypto . i2a_ASN1_INTEGER ( bio . bio , asnint ) return int ( str ( bio ) , 16 )
Serial number of certificate as integer
59,127
def add_cert ( self , cert ) : if not isinstance ( cert , X509 ) : raise TypeError ( "cert should be X509" ) libcrypto . X509_STORE_add_cert ( self . store , cert . cert )
Explicitely adds certificate to set of trusted in the store
59,128
def setpurpose ( self , purpose ) : if isinstance ( purpose , str ) : purp_no = libcrypto . X509_PURPOSE_get_by_sname ( purpose ) if purp_no <= 0 : raise X509Error ( "Invalid certificate purpose '%s'" % purpose ) elif isinstance ( purpose , int ) : purp_no = purpose if libcrypto . X509_STORE_set_purpose ( self . store ...
Sets certificate purpose which verified certificate should match
59,129
def settime ( self , time ) : if isinstance ( time , datetime ) or isinstance ( time , datetime . date ) : seconds = int ( time . strftime ( "%s" ) ) elif isinstance ( time , int ) : seconds = time else : raise TypeError ( "datetime.date, datetime.datetime or integer " + "is required as time argument" ) raise NotImplem...
Set point in time used to check validity of certificates for Time can be either python datetime object or number of seconds sinse epoch
59,130
def append ( self , value ) : if not self . need_free : raise ValueError ( "Stack is read-only" ) if not isinstance ( value , X509 ) : raise TypeError ( 'StackOfX509 can contain only X509 objects' ) sk_push ( self . ptr , libcrypto . X509_dup ( value . cert ) )
Adds certificate to stack
59,131
def create ( curve , data ) : ec_key = libcrypto . EC_KEY_new_by_curve_name ( curve . nid ) if ec_key is None : raise PKeyError ( "EC_KEY_new_by_curvename" ) group = libcrypto . EC_KEY_get0_group ( ec_key ) if group is None : raise PKeyError ( "EC_KEY_get0_group" ) libcrypto . EC_GROUP_set_asn1_flag ( group , 1 ) raw_k...
Creates EC keypair from the just secret key and curve name
59,132
def new ( algname , key , encrypt = True , iv = None ) : ciph_type = CipherType ( algname ) return Cipher ( ciph_type , key , iv , encrypt )
Returns new cipher object ready to encrypt - decrypt data
59,133
def padding ( self , padding = True ) : padding_flag = 1 if padding else 0 libcrypto . EVP_CIPHER_CTX_set_padding ( self . ctx , padding_flag )
Sets padding mode of the cipher
59,134
def finish ( self ) : if self . cipher_finalized : raise CipherError ( "Cipher operation is already completed" ) outbuf = create_string_buffer ( self . block_size ) self . cipher_finalized = True outlen = c_int ( 0 ) result = libcrypto . EVP_CipherFinal_ex ( self . ctx , outbuf , byref ( outlen ) ) if result == 0 : sel...
Finalizes processing . If some data are kept in the internal state they would be processed and returned .
59,135
def _clean_ctx ( self ) : try : if self . ctx is not None : self . __ctxcleanup ( self . ctx ) libcrypto . EVP_CIPHER_CTX_free ( self . ctx ) del self . ctx except AttributeError : pass self . cipher_finalized = True
Cleans up cipher ctx and deallocates it
59,136
def set_default ( eng , algorithms = 0xFFFF ) : if not isinstance ( eng , Engine ) : eng = Engine ( eng ) global default libcrypto . ENGINE_set_default ( eng . ptr , c_int ( algorithms ) ) default = eng
Sets specified engine as default for all algorithms supported by it
59,137
def from_keyed_iterable ( iterable , key , filter_func = None ) : generated = { } for element in iterable : try : k = getattr ( element , key ) except AttributeError : raise RuntimeError ( "{} does not have the keyed attribute: {}" . format ( element , key ) ) if filter_func is None or filter_func ( element ) : if k in...
Construct a dictionary out of an iterable using an attribute name as the key . Optionally provide a filter function to determine what should be kept in the dictionary .
59,138
def subtract_by_key ( dict_a , dict_b ) : difference_dict = { } for key in dict_a : if key not in dict_b : difference_dict [ key ] = dict_a [ key ] return difference_dict
given two dicts a and b this function returns c = a - b where a - b is defined as the key difference between a and b .
59,139
def winnow_by_keys ( dct , keys = None , filter_func = None ) : has = { } has_not = { } for key in dct : key_passes_check = False if keys is not None : key_passes_check = key in keys elif filter_func is not None : key_passes_check = filter_func ( key ) if key_passes_check : has [ key ] = dct [ key ] else : has_not [ ke...
separates a dict into has - keys and not - has - keys pairs using either a list of keys or a filtering function .
59,140
def flat_map ( iterable , func ) : results = [ ] for element in iterable : result = func ( element ) if len ( result ) > 0 : results . extend ( result ) return results
func must take an item and return an interable that contains that item . this is flatmap in the classic mode
59,141
def product ( sequence , initial = 1 ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( "'{}' object is not iterable" . format ( type ( sequence ) . __name__ ) ) return reduce ( operator . mul , sequence , initial )
like the built - in sum but for multiplication .
59,142
def date_from_string ( string , format_string = None ) : if isinstance ( format_string , str ) : return datetime . datetime . strptime ( string , format_string ) . date ( ) elif format_string is None : format_string = [ "%Y-%m-%d" , "%m-%d-%Y" , "%m/%d/%Y" , "%d/%m/%Y" , ] for format in format_string : try : return dat...
Runs through a few common string formats for datetimes and attempts to coerce them into a datetime . Alternatively format_string can provide either a single string to attempt or an iterable of strings to attempt .
59,143
def to_datetime ( plain_date , hours = 0 , minutes = 0 , seconds = 0 , ms = 0 ) : if isinstance ( plain_date , datetime . datetime ) : return plain_date return datetime . datetime ( plain_date . year , plain_date . month , plain_date . day , hours , minutes , seconds , ms , )
given a datetime . date gives back a datetime . datetime
59,144
def get_containing_period ( cls , * periods ) : if any ( not isinstance ( period , TimePeriod ) for period in periods ) : raise TypeError ( "periods must all be TimePeriods: {}" . format ( periods ) ) latest = datetime . datetime . min earliest = datetime . datetime . max for period in periods : if period . _latest is ...
Given a bunch of TimePeriods return a TimePeriod that most closely contains them .
59,145
def get_user_password ( env , param , force = False ) : username = utils . assemble_username ( env , param ) if not utils . confirm_credential_display ( force ) : return password = password_get ( username ) if password : return ( username , password ) else : return False
Allows the user to print the credential for a particular keyring entry to the screen
59,146
def password_get ( username = None ) : password = keyring . get_password ( 'supernova' , username ) if password is None : split_username = tuple ( username . split ( ':' ) ) msg = ( "Couldn't find a credential for {0}:{1}. You need to set one " "with: supernova-keyring -s {0} {1}" ) . format ( * split_username ) raise ...
Retrieves a password from the keychain based on the environment and configuration parameter pair .
59,147
def set_user_password ( environment , parameter , password ) : username = '%s:%s' % ( environment , parameter ) return password_set ( username , password )
Sets a user s password in the keyring storage
59,148
def password_set ( username = None , password = None ) : result = keyring . set_password ( 'supernova' , username , password ) if result is None : return True else : return False
Stores a password in a keychain for a particular environment and configuration parameter pair .
59,149
def prep_shell_environment ( nova_env , nova_creds ) : new_env = { } for key , value in prep_nova_creds ( nova_env , nova_creds ) : if type ( value ) == six . binary_type : value = value . decode ( ) new_env [ key ] = value return new_env
Appends new variables to the current shell environment temporarily .
59,150
def prep_nova_creds ( nova_env , nova_creds ) : try : raw_creds = dict ( nova_creds . get ( 'DEFAULT' , { } ) , ** nova_creds [ nova_env ] ) except KeyError : msg = "{0} was not found in your supernova configuration " "file" . format ( nova_env ) raise KeyError ( msg ) proxy_re = re . compile ( r"(^http_proxy|^https_pr...
Finds relevant config options in the supernova config and cleans them up for novaclient .
59,151
def load_config ( config_file_override = False ) : supernova_config = get_config_file ( config_file_override ) supernova_config_dir = get_config_directory ( config_file_override ) if not supernova_config and not supernova_config_dir : raise Exception ( "Couldn't find a valid configuration file to parse" ) nova_creds = ...
Pulls the supernova configuration file and reads it
59,152
def get_config_file ( override_files = False ) : if override_files : if isinstance ( override_files , six . string_types ) : possible_configs = [ override_files ] else : raise Exception ( "Config file override must be a string" ) else : xdg_config_home = os . environ . get ( 'XDG_CONFIG_HOME' ) or os . path . expanduse...
Looks for the most specific configuration file available . An override can be provided as a string if needed .
59,153
def get_config_directory ( override_files = False ) : if override_files : possible_dirs = [ override_files ] else : xdg_config_home = os . environ . get ( 'XDG_CONFIG_HOME' ) or os . path . expanduser ( '~/.config' ) possible_dirs = [ os . path . join ( xdg_config_home , "supernova.d/" ) , os . path . expanduser ( "~/....
Looks for the most specific configuration directory possible in order to load individual configuration files .
59,154
def execute_executable ( nova_args , env_vars ) : process = subprocess . Popen ( nova_args , stdout = sys . stdout , stderr = subprocess . PIPE , env = env_vars ) process . wait ( ) return process
Executes the executable given by the user .
59,155
def check_for_debug ( supernova_args , nova_args ) : if supernova_args [ 'debug' ] and supernova_args [ 'executable' ] == 'heat' : nova_args . insert ( 0 , '-d ' ) elif supernova_args [ 'debug' ] : nova_args . insert ( 0 , '--debug ' ) return nova_args
If the user wanted to run the executable with debugging enabled we need to apply the correct arguments to the executable .
59,156
def check_for_executable ( supernova_args , env_vars ) : exe = supernova_args . get ( 'executable' , 'default' ) if exe != 'default' : return supernova_args if 'OS_EXECUTABLE' in env_vars . keys ( ) : supernova_args [ 'executable' ] = env_vars [ 'OS_EXECUTABLE' ] return supernova_args supernova_args [ 'executable' ] = ...
It s possible that a user might set their custom executable via an environment variable . If we detect one we should add it to supernova s arguments ONLY IF an executable wasn t set on the command line . The command line executable must take priority .
59,157
def check_for_bypass_url ( raw_creds , nova_args ) : if 'BYPASS_URL' in raw_creds . keys ( ) : bypass_args = [ '--bypass-url' , raw_creds [ 'BYPASS_URL' ] ] nova_args = bypass_args + nova_args return nova_args
Return a list of extra args that need to be passed on cmdline to nova .
59,158
def run_command ( nova_creds , nova_args , supernova_args ) : nova_env = supernova_args [ 'nova_env' ] nova_args = copy . copy ( nova_args ) env_vars = os . environ . copy ( ) env_vars . update ( credentials . prep_shell_environment ( nova_env , nova_creds ) ) nova_args = check_for_bypass_url ( nova_creds [ nova_env ] ...
Sets the environment variables for the executable runs the executable and handles the output .
59,159
def check_environment_presets ( ) : presets = [ x for x in os . environ . copy ( ) . keys ( ) if x . startswith ( 'NOVA_' ) or x . startswith ( 'OS_' ) ] if len ( presets ) < 1 : return True else : click . echo ( "_" * 80 ) click . echo ( "*WARNING* Found existing environment variables that may " "cause conflicts:" ) f...
Checks for environment variables that can cause problems with supernova
59,160
def get_envs_in_group ( group_name , nova_creds ) : envs = [ ] for key , value in nova_creds . items ( ) : supernova_groups = value . get ( 'SUPERNOVA_GROUP' , [ ] ) if hasattr ( supernova_groups , 'startswith' ) : supernova_groups = [ supernova_groups ] if group_name in supernova_groups : envs . append ( key ) elif gr...
Takes a group_name and finds any environments that have a SUPERNOVA_GROUP configuration line that matches the group_name .
59,161
def is_valid_group ( group_name , nova_creds ) : valid_groups = [ ] for key , value in nova_creds . items ( ) : supernova_groups = value . get ( 'SUPERNOVA_GROUP' , [ ] ) if hasattr ( supernova_groups , 'startswith' ) : supernova_groups = [ supernova_groups ] valid_groups . extend ( supernova_groups ) valid_groups . ap...
Checks to see if the configuration file contains a SUPERNOVA_GROUP configuration option .
59,162
def rm_prefix ( name ) : if name . startswith ( 'nova_' ) : return name [ 5 : ] elif name . startswith ( 'novaclient_' ) : return name [ 11 : ] elif name . startswith ( 'os_' ) : return name [ 3 : ] else : return name
Removes nova_ os_ novaclient_ prefix from string .
59,163
def __pad ( strdata ) : if request . args . get ( 'callback' ) : return "%s(%s);" % ( request . args . get ( 'callback' ) , strdata ) else : return strdata
Pads strdata with a Request s callback argument if specified or does nothing .
59,164
def __dumps ( * args , ** kwargs ) : indent = None if ( current_app . config . get ( 'JSONIFY_PRETTYPRINT_REGULAR' , False ) and not request . is_xhr ) : indent = 2 return json . dumps ( args [ 0 ] if len ( args ) is 1 else dict ( * args , ** kwargs ) , indent = indent )
Serializes args and kwargs as JSON . Supports serializing an array as the top - level object if it is the only argument .
59,165
def update_type_lookups ( self ) : self . type_to_typestring = dict ( zip ( self . types , self . python_type_strings ) ) self . typestring_to_type = dict ( zip ( self . python_type_strings , self . types ) )
Update type and typestring lookup dicts .
59,166
def get_type_string ( self , data , type_string ) : if type_string is not None : return type_string else : tp = type ( data ) try : return self . type_to_typestring [ tp ] except KeyError : return self . type_to_typestring [ tp . __module__ + '.' + tp . __name__ ]
Gets type string .
59,167
def write ( self , f , grp , name , data , type_string , options ) : raise NotImplementedError ( 'Can' 't write data type: ' + str ( type ( data ) ) )
Writes an object s metadata to file .
59,168
def write_metadata ( self , f , dsetgrp , data , type_string , options , attributes = None ) : if attributes is None : attributes = dict ( ) if options . store_python_metadata and 'Python.Type' not in attributes : attributes [ 'Python.Type' ] = ( 'string' , self . get_type_string ( data , type_string ) ) set_attributes...
Writes an object to file .
59,169
def process_path ( pth ) : if isinstance ( pth , bytes ) : p = pth . decode ( 'utf-8' ) elif ( sys . hexversion >= 0x03000000 and isinstance ( pth , str ) ) or ( sys . hexversion < 0x03000000 and isinstance ( pth , unicode ) ) : p = pth elif not isinstance ( pth , collections . Iterable ) : raise TypeError ( 'p must be...
Processes paths .
59,170
def write_object_array ( f , data , options ) : ref_dtype = h5py . special_dtype ( ref = h5py . Reference ) data_refs = np . zeros ( shape = data . shape , dtype = 'object' ) if options . group_for_references not in f : f . create_group ( options . group_for_references ) grp2 = f [ options . group_for_references ] if n...
Writes an array of objects recursively .
59,171
def read_object_array ( f , data , options ) : data_derefed = np . zeros ( shape = data . shape , dtype = 'object' ) for index , x in np . ndenumerate ( data ) : data_derefed [ index ] = read_data ( f , None , None , options , dsetgrp = f [ x ] ) return data_derefed
Reads an array of objects recursively .
59,172
def next_unused_name_in_group ( grp , length ) : fmt = '%0{0}x' . format ( length ) name = fmt % random . getrandbits ( length * 4 ) while name in grp : name = fmt % random . getrandbits ( length * 4 ) return name
Gives a name that isn t used in a Group .
59,173
def convert_numpy_str_to_uint16 ( data ) : if data . nbytes == 0 : return np . uint16 ( [ ] ) if sys . byteorder == 'little' : codec = 'UTF-16LE' else : codec = 'UTF-16BE' cdata = np . char . encode ( np . atleast_1d ( data ) , codec ) shape = list ( cdata . shape ) shape [ - 1 ] *= ( cdata . dtype . itemsize // 2 ) re...
Converts a numpy . unicode \ _ to UTF - 16 in numpy . uint16 form .
59,174
def convert_numpy_str_to_uint32 ( data ) : if data . nbytes == 0 : return np . uint32 ( [ ] ) else : shape = list ( np . atleast_1d ( data ) . shape ) shape [ - 1 ] *= data . dtype . itemsize // 4 return data . flatten ( ) . view ( np . uint32 ) . reshape ( tuple ( shape ) )
Converts a numpy . unicode \ _ to its numpy . uint32 representation .
59,175
def decode_complex ( data , complex_names = ( None , None ) ) : if data . dtype . fields is None : return data fields = list ( data . dtype . fields ) if len ( fields ) != 2 : return data real_fields = [ 'r' , 're' , 'real' ] imag_fields = [ 'i' , 'im' , 'imag' , 'imaginary' ] cnames = list ( complex_names ) for s in f...
Decodes possibly complex data read from an HDF5 file .
59,176
def encode_complex ( data , complex_names ) : dtype_name = data . dtype . name if dtype_name [ 0 : 7 ] == 'complex' : dtype_name = 'float' + str ( int ( float ( dtype_name [ 7 : ] ) / 2 ) ) dt = np . dtype ( [ ( complex_names [ 0 ] , dtype_name ) , ( complex_names [ 1 ] , dtype_name ) ] ) return data . view ( dt ) . co...
Encodes complex data to having arbitrary complex field names .
59,177
def convert_attribute_to_string ( value ) : if value is None : return value elif ( sys . hexversion >= 0x03000000 and isinstance ( value , str ) ) or ( sys . hexversion < 0x03000000 and isinstance ( value , unicode ) ) : return value elif isinstance ( value , bytes ) : return value . decode ( ) elif isinstance ( value ...
Convert an attribute value to a string .
59,178
def set_attribute ( target , name , value ) : try : target . attrs . modify ( name , value ) except : target . attrs . create ( name , value )
Sets an attribute on a Dataset or Group .
59,179
def set_attribute_string ( target , name , value ) : set_attribute ( target , name , np . bytes_ ( value ) )
Sets an attribute to a string on a Dataset or Group .
59,180
def set_attribute_string_array ( target , name , string_list ) : s_list = [ convert_to_str ( s ) for s in string_list ] if sys . hexversion >= 0x03000000 : target . attrs . create ( name , s_list , dtype = h5py . special_dtype ( vlen = str ) ) else : target . attrs . create ( name , s_list , dtype = h5py . special_dtyp...
Sets an attribute to an array of string on a Dataset or Group .
59,181
def set_attributes_all ( target , attributes , discard_others = True ) : attrs = target . attrs existing = dict ( attrs . items ( ) ) if sys . hexversion >= 0x03000000 : str_arr_dtype = h5py . special_dtype ( vlen = str ) else : str_arr_dtype = dtype = h5py . special_dtype ( vlen = unicode ) for k , ( kind , value ) in...
Set Attributes in bulk and optionally discard others .
59,182
def find_thirdparty_marshaller_plugins ( ) : all_plugins = tuple ( pkg_resources . iter_entry_points ( 'hdf5storage.marshallers.plugins' ) ) return { ver : { p . module_name : p for p in all_plugins if p . name == ver } for ver in supported_marshaller_api_versions ( ) }
Find but don t load all third party marshaller plugins .
59,183
def savemat ( file_name , mdict , appendmat = True , format = '7.3' , oned_as = 'row' , store_python_metadata = True , action_for_matlab_incompatible = 'error' , marshaller_collection = None , truncate_existing = False , truncate_invalid_matlab = False , ** keywords ) : if float ( format ) < 7.3 : import scipy . io sci...
Save a dictionary of python types to a MATLAB MAT file .
59,184
def loadmat ( file_name , mdict = None , appendmat = True , variable_names = None , marshaller_collection = None , ** keywords ) : try : options = Options ( marshaller_collection = marshaller_collection ) if appendmat and not file_name . endswith ( '.mat' ) : filename = file_name + '.mat' else : filename = file_name if...
Loads data to a MATLAB MAT file .
59,185
def _update_marshallers ( self ) : self . _marshallers = [ ] for v in self . _priority : if v == 'builtin' : self . _marshallers . extend ( self . _builtin_marshallers ) elif v == 'plugin' : self . _marshallers . extend ( self . _plugin_marshallers ) elif v == 'user' : self . _marshallers . extend ( self . _user_marsha...
Update the full marshaller list and other data structures .
59,186
def _import_marshaller_modules ( self , m ) : try : for name in m . required_modules : if name not in sys . modules : if _has_importlib : importlib . import_module ( name ) else : __import__ ( name ) except ImportError : return False except : raise else : return True
Imports the modules required by the marshaller .
59,187
def get_marshaller_for_type ( self , tp ) : if not isinstance ( tp , str ) : tp = tp . __module__ + '.' + tp . __name__ if tp in self . _types : index = self . _types [ tp ] else : return None , False m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_r...
Gets the appropriate marshaller for a type .
59,188
def get_marshaller_for_type_string ( self , type_string ) : if type_string in self . _type_strings : index = self . _type_strings [ type_string ] m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_required_modules [ index ] : return m , False success = ...
Gets the appropriate marshaller for a type string .
59,189
def get_marshaller_for_matlab_class ( self , matlab_class ) : if matlab_class in self . _matlab_classes : index = self . _matlab_classes [ matlab_class ] m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_required_modules [ index ] : return m , False su...
Gets the appropriate marshaller for a MATLAB class string .
59,190
def new_node ( self ) : node_id = self . generate_node_id ( ) node = { 'id' : node_id , 'edges' : [ ] , 'data' : { } } self . nodes [ node_id ] = node self . _num_nodes += 1 return node_id
Adds a new blank node to the graph . Returns the node id of the new node .
59,191
def new_edge ( self , node_a , node_b , cost = 1 ) : try : self . nodes [ node_a ] except KeyError : raise NonexistentNodeError ( node_a ) try : self . nodes [ node_b ] except KeyError : raise NonexistentNodeError ( node_b ) edge_id = self . generate_edge_id ( ) edge = { 'id' : edge_id , 'vertices' : ( node_a , node_b ...
Adds a new edge from node_a to node_b that has a cost . Returns the edge id of the new edge .
59,192
def adjacent ( self , node_a , node_b ) : neighbors = self . neighbors ( node_a ) return node_b in neighbors
Determines whether there is an edge from node_a to node_b . Returns True if such an edge exists otherwise returns False .
59,193
def edge_cost ( self , node_a , node_b ) : cost = float ( 'inf' ) node_object_a = self . get_node ( node_a ) for edge_id in node_object_a [ 'edges' ] : edge = self . get_edge ( edge_id ) tpl = ( node_a , node_b ) if edge [ 'vertices' ] == tpl : cost = edge [ 'cost' ] break return cost
Returns the cost of moving between the edge that connects node_a to node_b . Returns + inf if no such edge exists .
59,194
def get_node ( self , node_id ) : try : node_object = self . nodes [ node_id ] except KeyError : raise NonexistentNodeError ( node_id ) return node_object
Returns the node object identified by node_id .
59,195
def get_edge ( self , edge_id ) : try : edge_object = self . edges [ edge_id ] except KeyError : raise NonexistentEdgeError ( edge_id ) return edge_object
Returns the edge object identified by edge_id .
59,196
def delete_edge_by_nodes ( self , node_a , node_b ) : node = self . get_node ( node_a ) edge_ids = [ ] for e_id in node [ 'edges' ] : edge = self . get_edge ( e_id ) if edge [ 'vertices' ] [ 1 ] == node_b : edge_ids . append ( e_id ) for e in edge_ids : self . delete_edge_by_id ( e )
Removes all the edges from node_a to node_b from the graph .
59,197
def delete_node ( self , node_id ) : node = self . get_node ( node_id ) for e in node [ 'edges' ] : self . delete_edge_by_id ( e ) edges = [ edge_id for edge_id , edge in list ( self . edges . items ( ) ) if edge [ 'vertices' ] [ 1 ] == node_id ] for e in edges : self . delete_edge_by_id ( e ) del self . nodes [ node_i...
Removes the node identified by node_id from the graph .
59,198
def move_edge_source ( self , edge_id , node_a , node_b ) : edge = self . get_edge ( edge_id ) edge [ 'vertices' ] = ( node_b , edge [ 'vertices' ] [ 1 ] ) node = self . get_node ( node_a ) node [ 'edges' ] . remove ( edge_id ) node = self . get_node ( node_b ) node [ 'edges' ] . append ( edge_id )
Moves an edge originating from node_a so that it originates from node_b .
59,199
def get_edge_ids_by_node_ids ( self , node_a , node_b ) : if not self . adjacent ( node_a , node_b ) : return [ ] node = self . get_node ( node_a ) return [ edge_id for edge_id in node [ 'edges' ] if self . get_edge ( edge_id ) [ 'vertices' ] [ 1 ] == node_b ]
Returns a list of edge ids connecting node_a to node_b .