idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
59,100
def exportpub ( self , format = "PEM" ) : bio = Membio ( ) if format == "PEM" : retcode = libcrypto . PEM_write_bio_PUBKEY ( bio . bio , self . key ) else : retcode = libcrypto . i2d_PUBKEY_bio ( bio . bio , self . key ) if retcode == 0 : raise PKeyError ( "error serializing public key" ) return str ( bio )
Returns public key as PEM or DER structure .
59,101
def exportpriv ( self , format = "PEM" , password = None , cipher = None ) : bio = Membio ( ) if cipher is None : evp_cipher = None else : evp_cipher = cipher . cipher if format == "PEM" : ret = libcrypto . PEM_write_bio_PrivateKey ( bio . bio , self . key , evp_cipher , None , 0 , _password_callback ( password ) , None ) if ret == 0 : raise PKeyError ( "error serializing private key" ) return str ( bio ) else : ret = libcrypto . i2d_PKCS8PrivateKey_bio ( bio . bio , self . key , evp_cipher , None , 0 , _password_callback ( password ) , None ) if ret == 0 : raise PKeyError ( "error serializing private key" ) return bintype ( bio )
Returns private key as PEM or DER Structure . If password and cipher are specified encrypts key on given password using given algorithm . Cipher must be an ctypescrypto . cipher . CipherType object
59,102
def _configure_context ( ctx , opts , skip = ( ) ) : for oper in opts : if oper in skip : continue if isinstance ( oper , chartype ) : op = oper . encode ( "ascii" ) else : op = oper if isinstance ( opts [ oper ] , chartype ) : value = opts [ oper ] . encode ( "ascii" ) elif isinstance ( opts [ oper ] , bintype ) : value = opts [ oper ] else : if pyver == 2 : value = str ( opts [ oper ] ) else : value = str ( opts [ oper ] ) . encode ( 'ascii' ) ret = libcrypto . EVP_PKEY_CTX_ctrl_str ( ctx , op , value ) if ret == - 2 : raise PKeyError ( "Parameter %s is not supported by key" % oper ) if ret < 1 : raise PKeyError ( "Error setting parameter %s" % oper )
Configures context of public key operations
59,103
def read ( self , length = None ) : if not length is None : if not isinstance ( length , inttype ) : raise TypeError ( "length to read should be number" ) buf = create_string_buffer ( length ) readbytes = libcrypto . BIO_read ( self . bio , buf , length ) if readbytes == - 2 : raise NotImplementedError ( "Function is not supported by" + "this BIO" ) if readbytes == - 1 : raise IOError if readbytes == 0 : return b"" return buf . raw [ : readbytes ] else : buf = create_string_buffer ( 1024 ) out = b"" readbytes = 1 while readbytes > 0 : readbytes = libcrypto . BIO_read ( self . bio , buf , 1024 ) if readbytes == - 2 : raise NotImplementedError ( "Function is not supported by " + "this BIO" ) if readbytes == - 1 : raise IOError if readbytes > 0 : out += buf . raw [ : readbytes ] return out
Reads data from readble BIO . For test purposes .
59,104
def write ( self , data ) : if pyver == 2 : if isinstance ( data , unicode ) : data = data . encode ( "utf-8" ) else : data = str ( data ) else : if not isinstance ( data , bytes ) : data = str ( data ) . encode ( "utf-8" ) written = libcrypto . BIO_write ( self . bio , data , len ( data ) ) if written == - 2 : raise NotImplementedError ( "Function not supported by this BIO" ) if written < len ( data ) : raise IOError ( "Not all data were successfully written" )
Writes data to writable bio . For test purposes
59,105
def CMS ( data , format = "PEM" ) : bio = Membio ( data ) if format == "PEM" : ptr = libcrypto . PEM_read_bio_CMS ( bio . bio , None , None , None ) else : ptr = libcrypto . d2i_CMS_bio ( bio . bio , None ) if ptr is None : raise CMSError ( "Error parsing CMS data" ) typeoid = Oid ( libcrypto . OBJ_obj2nid ( libcrypto . CMS_get0_type ( ptr ) ) ) if typeoid . shortname ( ) == "pkcs7-signedData" : return SignedData ( ptr ) elif typeoid . shortname ( ) == "pkcs7-envelopedData" : return EnvelopedData ( ptr ) elif typeoid . shortname ( ) == "pkcs7-encryptedData" : return EncryptedData ( ptr ) else : raise NotImplementedError ( "cannot handle " + typeoid . shortname ( ) )
Factory function to create CMS objects from received messages . Parses CMS data and returns either SignedData or EnvelopedData object . format argument can be either PEM or DER .
59,106
def pem ( self ) : bio = Membio ( ) if not libcrypto . PEM_write_bio_CMS ( bio . bio , self . ptr ) : raise CMSError ( "writing CMS to PEM" ) return str ( bio )
Serialize in PEM format
59,107
def create ( data , cert , pkey , flags = Flags . BINARY , certs = None ) : if not pkey . cansign : raise ValueError ( "Specified keypair has no private part" ) if cert . pubkey != pkey : raise ValueError ( "Certificate doesn't match public key" ) bio = Membio ( data ) if certs is not None and len ( certs ) > 0 : certstack_obj = StackOfX509 ( certs ) certstack = certstack_obj . ptr else : certstack = None ptr = libcrypto . CMS_sign ( cert . cert , pkey . key , certstack , bio . bio , flags ) if ptr is None : raise CMSError ( "signing message" ) return SignedData ( ptr )
Creates SignedData message by signing data with pkey and certificate .
59,108
def sign ( self , cert , pkey , digest_type = None , data = None , flags = Flags . BINARY ) : if not pkey . cansign : raise ValueError ( "Specified keypair has no private part" ) if cert . pubkey != pkey : raise ValueError ( "Certificate doesn't match public key" ) if libcrypto . CMS_add1_signer ( self . ptr , cert . cert , pkey . key , digest_type . digest , flags ) is None : raise CMSError ( "adding signer" ) if flags & Flags . REUSE_DIGEST == 0 : if data is not None : bio = Membio ( data ) biodata = bio . bio else : biodata = None res = libcrypto . CMS_final ( self . ptr , biodata , None , flags ) if res <= 0 : raise CMSError ( "Cannot finalize CMS" )
Adds another signer to already signed message
59,109
def verify ( self , store , flags , data = None , certs = None ) : bio = None if data != None : bio_obj = Membio ( data ) bio = bio_obj . bio if certs is not None and len ( certs ) > 0 : certstack_obj = StackOfX509 ( certs ) certstack = certstack_obj . ptr else : certstack = None res = libcrypto . CMS_verify ( self . ptr , certstack , store . store , bio , None , flags ) return res > 0
Verifies signature under CMS message using trusted cert store
59,110
def signers ( self ) : signerlist = libcrypto . CMS_get0_signers ( self . ptr ) if signerlist is None : raise CMSError ( "Cannot get signers" ) return StackOfX509 ( ptr = signerlist , disposable = False )
Return list of signer s certificates
59,111
def data ( self ) : if self . detached : return None bio = Membio ( ) if not libcrypto . CMS_verify ( self . ptr , None , None , None , bio . bio , Flags . NO_VERIFY ) : raise CMSError ( "extract data" ) return str ( bio )
Returns signed data if present in the message
59,112
def certs ( self ) : certstack = libcrypto . CMS_get1_certs ( self . ptr ) if certstack is None : raise CMSError ( "getting certs" ) return StackOfX509 ( ptr = certstack , disposable = True )
List of the certificates contained in the structure
59,113
def create ( recipients , data , cipher , flags = 0 ) : recp = StackOfX509 ( recipients ) bio = Membio ( data ) cms_ptr = libcrypto . CMS_encrypt ( recp . ptr , bio . bio , cipher . cipher , flags ) if cms_ptr is None : raise CMSError ( "encrypt EnvelopedData" ) return EnvelopedData ( cms_ptr )
Creates and encrypts message
59,114
def create ( data , cipher , key , flags = 0 ) : bio = Membio ( data ) ptr = libcrypto . CMS_EncryptedData_encrypt ( bio . bio , cipher . cipher , key , len ( key ) , flags ) if ptr is None : raise CMSError ( "encrypt data" ) return EncryptedData ( ptr )
Creates an EncryptedData message .
59,115
def decrypt ( self , key , flags = 0 ) : bio = Membio ( ) if libcrypto . CMS_EncryptedData_decrypt ( self . ptr , key , len ( key ) , None , bio . bio , flags ) <= 0 : raise CMSError ( "decrypt data" ) return str ( bio )
Decrypts encrypted data message
59,116
def name ( self ) : if not hasattr ( self , 'digest_name' ) : self . digest_name = Oid ( libcrypto . EVP_MD_type ( self . digest ) ) . longname ( ) return self . digest_name
Returns name of the digest
59,117
def update ( self , data , length = None ) : if self . digest_finalized : raise DigestError ( "No updates allowed" ) if not isinstance ( data , bintype ) : raise TypeError ( "A byte string is expected" ) if length is None : length = len ( data ) elif length > len ( data ) : raise ValueError ( "Specified length is greater than length of data" ) result = libcrypto . EVP_DigestUpdate ( self . ctx , c_char_p ( data ) , length ) if result != 1 : raise DigestError ( "Unable to update digest" )
Hashes given byte string
59,118
def digest ( self , data = None ) : if self . digest_finalized : return self . digest_out . raw [ : self . digest_size ] if data is not None : self . update ( data ) self . digest_out = create_string_buffer ( 256 ) length = c_long ( 0 ) result = libcrypto . EVP_DigestFinal_ex ( self . ctx , self . digest_out , byref ( length ) ) if result != 1 : raise DigestError ( "Unable to finalize digest" ) self . digest_finalized = True return self . digest_out . raw [ : self . digest_size ]
Finalizes digest operation and return digest value Optionally hashes more data before finalizing
59,119
def copy ( self ) : new_digest = Digest ( self . digest_type ) libcrypto . EVP_MD_CTX_copy ( new_digest . ctx , self . ctx ) return new_digest
Creates copy of the digest CTX to allow to compute digest while being able to hash more data
59,120
def _clean_ctx ( self ) : try : if self . ctx is not None : libcrypto . EVP_MD_CTX_free ( self . ctx ) del self . ctx except AttributeError : pass self . digest_out = None self . digest_finalized = False
Clears and deallocates context
59,121
def hexdigest ( self , data = None ) : from base64 import b16encode if pyver == 2 : return b16encode ( self . digest ( data ) ) else : return b16encode ( self . digest ( data ) ) . decode ( 'us-ascii' )
Returns digest in the hexadecimal form . For compatibility with hashlib
59,122
def _X509__asn1date_to_datetime ( asn1date ) : bio = Membio ( ) libcrypto . ASN1_TIME_print ( bio . bio , asn1date ) pydate = datetime . strptime ( str ( bio ) , "%b %d %H:%M:%S %Y %Z" ) return pydate . replace ( tzinfo = utc )
Converts openssl ASN1_TIME object to python datetime . datetime
59,123
def find ( self , oid ) : if not isinstance ( oid , Oid ) : raise TypeError ( "Need crytypescrypto.oid.Oid as argument" ) found = [ ] index = - 1 end = len ( self ) while True : index = libcrypto . X509_get_ext_by_NID ( self . cert . cert , oid . nid , index ) if index >= end or index < 0 : break found . append ( self [ index ] ) return found
Return list of extensions with given Oid
59,124
def find_critical ( self , crit = True ) : if crit : flag = 1 else : flag = 0 found = [ ] end = len ( self ) index = - 1 while True : index = libcrypto . X509_get_ext_by_critical ( self . cert . cert , flag , index ) if index >= end or index < 0 : break found . append ( self [ index ] ) return found
Return list of critical extensions ( or list of non - cricital if optional second argument is False
59,125
def pem ( self ) : bio = Membio ( ) if libcrypto . PEM_write_bio_X509 ( bio . bio , self . cert ) == 0 : raise X509Error ( "error serializing certificate" ) return str ( bio )
Returns PEM represntation of the certificate
59,126
def serial ( self ) : asnint = libcrypto . X509_get_serialNumber ( self . cert ) bio = Membio ( ) libcrypto . i2a_ASN1_INTEGER ( bio . bio , asnint ) return int ( str ( bio ) , 16 )
Serial number of certificate as integer
59,127
def add_cert ( self , cert ) : if not isinstance ( cert , X509 ) : raise TypeError ( "cert should be X509" ) libcrypto . X509_STORE_add_cert ( self . store , cert . cert )
Explicitely adds certificate to set of trusted in the store
59,128
def setpurpose ( self , purpose ) : if isinstance ( purpose , str ) : purp_no = libcrypto . X509_PURPOSE_get_by_sname ( purpose ) if purp_no <= 0 : raise X509Error ( "Invalid certificate purpose '%s'" % purpose ) elif isinstance ( purpose , int ) : purp_no = purpose if libcrypto . X509_STORE_set_purpose ( self . store , purp_no ) <= 0 : raise X509Error ( "cannot set purpose" )
Sets certificate purpose which verified certificate should match
59,129
def settime ( self , time ) : if isinstance ( time , datetime ) or isinstance ( time , datetime . date ) : seconds = int ( time . strftime ( "%s" ) ) elif isinstance ( time , int ) : seconds = time else : raise TypeError ( "datetime.date, datetime.datetime or integer " + "is required as time argument" ) raise NotImplementedError
Set point in time used to check validity of certificates for Time can be either python datetime object or number of seconds sinse epoch
59,130
def append ( self , value ) : if not self . need_free : raise ValueError ( "Stack is read-only" ) if not isinstance ( value , X509 ) : raise TypeError ( 'StackOfX509 can contain only X509 objects' ) sk_push ( self . ptr , libcrypto . X509_dup ( value . cert ) )
Adds certificate to stack
59,131
def create ( curve , data ) : ec_key = libcrypto . EC_KEY_new_by_curve_name ( curve . nid ) if ec_key is None : raise PKeyError ( "EC_KEY_new_by_curvename" ) group = libcrypto . EC_KEY_get0_group ( ec_key ) if group is None : raise PKeyError ( "EC_KEY_get0_group" ) libcrypto . EC_GROUP_set_asn1_flag ( group , 1 ) raw_key = libcrypto . BN_new ( ) if isinstance ( data , int ) : libcrypto . BN_hex2bn ( byref ( raw_key ) , hex ( data ) ) else : if raw_key is None : raise PKeyError ( "BN_new" ) if libcrypto . BN_bin2bn ( data , len ( data ) , raw_key ) is None : raise PKeyError ( "BN_bin2bn" ) ctx = libcrypto . BN_CTX_new ( ) if ctx is None : raise PKeyError ( "BN_CTX_new" ) order = libcrypto . BN_new ( ) if order is None : raise PKeyError ( "BN_new" ) priv_key = libcrypto . BN_new ( ) if priv_key is None : raise PKeyError ( "BN_new" ) if libcrypto . EC_GROUP_get_order ( group , order , ctx ) <= 0 : raise PKeyError ( "EC_GROUP_get_order" ) if libcrypto . BN_nnmod ( priv_key , raw_key , order , ctx ) <= 0 : raise PKeyError ( "BN_nnmod" ) if libcrypto . EC_KEY_set_private_key ( ec_key , priv_key ) <= 0 : raise PKeyError ( "EC_KEY_set_private_key" ) pub_key = libcrypto . EC_POINT_new ( group ) if pub_key is None : raise PKeyError ( "EC_POINT_new" ) if libcrypto . EC_POINT_mul ( group , pub_key , priv_key , None , None , ctx ) <= 0 : raise PKeyError ( "EC_POINT_mul" ) if libcrypto . EC_KEY_set_public_key ( ec_key , pub_key ) <= 0 : raise PKeyError ( "EC_KEY_set_public_key" ) libcrypto . BN_free ( raw_key ) libcrypto . BN_free ( order ) libcrypto . BN_free ( priv_key ) libcrypto . BN_CTX_free ( ctx ) pkey = libcrypto . EVP_PKEY_new ( ) if pkey is None : raise PKeyError ( "EVP_PKEY_new" ) if libcrypto . EVP_PKEY_set1_EC_KEY ( pkey , ec_key ) <= 0 : raise PKeyError ( "EVP_PKEY_set1_EC_KEY" ) libcrypto . EC_KEY_free ( ec_key ) return PKey ( ptr = pkey , cansign = True )
Creates EC keypair from the just secret key and curve name
59,132
def new ( algname , key , encrypt = True , iv = None ) : ciph_type = CipherType ( algname ) return Cipher ( ciph_type , key , iv , encrypt )
Returns new cipher object ready to encrypt - decrypt data
59,133
def padding ( self , padding = True ) : padding_flag = 1 if padding else 0 libcrypto . EVP_CIPHER_CTX_set_padding ( self . ctx , padding_flag )
Sets padding mode of the cipher
59,134
def finish ( self ) : if self . cipher_finalized : raise CipherError ( "Cipher operation is already completed" ) outbuf = create_string_buffer ( self . block_size ) self . cipher_finalized = True outlen = c_int ( 0 ) result = libcrypto . EVP_CipherFinal_ex ( self . ctx , outbuf , byref ( outlen ) ) if result == 0 : self . _clean_ctx ( ) raise CipherError ( "Unable to finalize cipher" ) if outlen . value > 0 : return outbuf . raw [ : int ( outlen . value ) ] else : return b""
Finalizes processing . If some data are kept in the internal state they would be processed and returned .
59,135
def _clean_ctx ( self ) : try : if self . ctx is not None : self . __ctxcleanup ( self . ctx ) libcrypto . EVP_CIPHER_CTX_free ( self . ctx ) del self . ctx except AttributeError : pass self . cipher_finalized = True
Cleans up cipher ctx and deallocates it
59,136
def set_default ( eng , algorithms = 0xFFFF ) : if not isinstance ( eng , Engine ) : eng = Engine ( eng ) global default libcrypto . ENGINE_set_default ( eng . ptr , c_int ( algorithms ) ) default = eng
Sets specified engine as default for all algorithms supported by it
59,137
def from_keyed_iterable ( iterable , key , filter_func = None ) : generated = { } for element in iterable : try : k = getattr ( element , key ) except AttributeError : raise RuntimeError ( "{} does not have the keyed attribute: {}" . format ( element , key ) ) if filter_func is None or filter_func ( element ) : if k in generated : generated [ k ] += [ element ] else : generated [ k ] = [ element ] return generated
Construct a dictionary out of an iterable using an attribute name as the key . Optionally provide a filter function to determine what should be kept in the dictionary .
59,138
def subtract_by_key ( dict_a , dict_b ) : difference_dict = { } for key in dict_a : if key not in dict_b : difference_dict [ key ] = dict_a [ key ] return difference_dict
given two dicts a and b this function returns c = a - b where a - b is defined as the key difference between a and b .
59,139
def winnow_by_keys ( dct , keys = None , filter_func = None ) : has = { } has_not = { } for key in dct : key_passes_check = False if keys is not None : key_passes_check = key in keys elif filter_func is not None : key_passes_check = filter_func ( key ) if key_passes_check : has [ key ] = dct [ key ] else : has_not [ key ] = dct [ key ] return WinnowedResult ( has , has_not )
separates a dict into has - keys and not - has - keys pairs using either a list of keys or a filtering function .
59,140
def flat_map ( iterable , func ) : results = [ ] for element in iterable : result = func ( element ) if len ( result ) > 0 : results . extend ( result ) return results
func must take an item and return an interable that contains that item . this is flatmap in the classic mode
59,141
def product ( sequence , initial = 1 ) : if not isinstance ( sequence , collections . Iterable ) : raise TypeError ( "'{}' object is not iterable" . format ( type ( sequence ) . __name__ ) ) return reduce ( operator . mul , sequence , initial )
like the built - in sum but for multiplication .
59,142
def date_from_string ( string , format_string = None ) : if isinstance ( format_string , str ) : return datetime . datetime . strptime ( string , format_string ) . date ( ) elif format_string is None : format_string = [ "%Y-%m-%d" , "%m-%d-%Y" , "%m/%d/%Y" , "%d/%m/%Y" , ] for format in format_string : try : return datetime . datetime . strptime ( string , format ) . date ( ) except ValueError : continue raise ValueError ( "Could not produce date from string: {}" . format ( string ) )
Runs through a few common string formats for datetimes and attempts to coerce them into a datetime . Alternatively format_string can provide either a single string to attempt or an iterable of strings to attempt .
59,143
def to_datetime ( plain_date , hours = 0 , minutes = 0 , seconds = 0 , ms = 0 ) : if isinstance ( plain_date , datetime . datetime ) : return plain_date return datetime . datetime ( plain_date . year , plain_date . month , plain_date . day , hours , minutes , seconds , ms , )
given a datetime . date gives back a datetime . datetime
59,144
def get_containing_period ( cls , * periods ) : if any ( not isinstance ( period , TimePeriod ) for period in periods ) : raise TypeError ( "periods must all be TimePeriods: {}" . format ( periods ) ) latest = datetime . datetime . min earliest = datetime . datetime . max for period in periods : if period . _latest is None : latest = None elif latest is not None and period . _latest > latest : latest = period . _latest if period . _earliest is None : earliest = None elif earliest is not None and period . _earliest < earliest : earliest = period . _earliest return TimePeriod ( earliest , latest )
Given a bunch of TimePeriods return a TimePeriod that most closely contains them .
59,145
def get_user_password ( env , param , force = False ) : username = utils . assemble_username ( env , param ) if not utils . confirm_credential_display ( force ) : return password = password_get ( username ) if password : return ( username , password ) else : return False
Allows the user to print the credential for a particular keyring entry to the screen
59,146
def password_get ( username = None ) : password = keyring . get_password ( 'supernova' , username ) if password is None : split_username = tuple ( username . split ( ':' ) ) msg = ( "Couldn't find a credential for {0}:{1}. You need to set one " "with: supernova-keyring -s {0} {1}" ) . format ( * split_username ) raise LookupError ( msg ) else : return password . encode ( 'ascii' )
Retrieves a password from the keychain based on the environment and configuration parameter pair .
59,147
def set_user_password ( environment , parameter , password ) : username = '%s:%s' % ( environment , parameter ) return password_set ( username , password )
Sets a user s password in the keyring storage
59,148
def password_set ( username = None , password = None ) : result = keyring . set_password ( 'supernova' , username , password ) if result is None : return True else : return False
Stores a password in a keychain for a particular environment and configuration parameter pair .
59,149
def prep_shell_environment ( nova_env , nova_creds ) : new_env = { } for key , value in prep_nova_creds ( nova_env , nova_creds ) : if type ( value ) == six . binary_type : value = value . decode ( ) new_env [ key ] = value return new_env
Appends new variables to the current shell environment temporarily .
59,150
def prep_nova_creds ( nova_env , nova_creds ) : try : raw_creds = dict ( nova_creds . get ( 'DEFAULT' , { } ) , ** nova_creds [ nova_env ] ) except KeyError : msg = "{0} was not found in your supernova configuration " "file" . format ( nova_env ) raise KeyError ( msg ) proxy_re = re . compile ( r"(^http_proxy|^https_proxy)" ) creds = [ ] for param , value in raw_creds . items ( ) : if not proxy_re . match ( param ) : param = param . upper ( ) if not hasattr ( value , 'startswith' ) : continue if value . startswith ( "USE_KEYRING" ) : username , credential = pull_env_credential ( nova_env , param , value ) else : credential = value . strip ( "\"'" ) if not credential : raise LookupError ( "No matching credentials found in keyring" ) creds . append ( ( param , credential ) ) return creds
Finds relevant config options in the supernova config and cleans them up for novaclient .
59,151
def load_config ( config_file_override = False ) : supernova_config = get_config_file ( config_file_override ) supernova_config_dir = get_config_directory ( config_file_override ) if not supernova_config and not supernova_config_dir : raise Exception ( "Couldn't find a valid configuration file to parse" ) nova_creds = ConfigObj ( ) if supernova_config : try : nova_creds . merge ( ConfigObj ( supernova_config ) ) except : raise ( "There's an error in your configuration file" ) if supernova_config_dir : for dir_file in os . listdir ( supernova_config_dir ) : full_path = '' . join ( ( supernova_config_dir , dir_file ) ) try : nova_creds . merge ( ConfigObj ( full_path ) ) except : msg = "Skipping '%s', Parsing Error." . format ( full_path ) print ( msg ) create_dynamic_configs ( nova_creds ) return nova_creds
Pulls the supernova configuration file and reads it
59,152
def get_config_file ( override_files = False ) : if override_files : if isinstance ( override_files , six . string_types ) : possible_configs = [ override_files ] else : raise Exception ( "Config file override must be a string" ) else : xdg_config_home = os . environ . get ( 'XDG_CONFIG_HOME' ) or os . path . expanduser ( '~/.config' ) possible_configs = [ os . path . join ( xdg_config_home , "supernova" ) , os . path . expanduser ( "~/.supernova" ) , ".supernova" ] for config_file in reversed ( possible_configs ) : if os . path . isfile ( config_file ) : return config_file return False
Looks for the most specific configuration file available . An override can be provided as a string if needed .
59,153
def get_config_directory ( override_files = False ) : if override_files : possible_dirs = [ override_files ] else : xdg_config_home = os . environ . get ( 'XDG_CONFIG_HOME' ) or os . path . expanduser ( '~/.config' ) possible_dirs = [ os . path . join ( xdg_config_home , "supernova.d/" ) , os . path . expanduser ( "~/.supernova.d/" ) , ".supernova.d/" ] for config_dir in reversed ( possible_dirs ) : if os . path . isdir ( config_dir ) : return config_dir return False
Looks for the most specific configuration directory possible in order to load individual configuration files .
59,154
def execute_executable ( nova_args , env_vars ) : process = subprocess . Popen ( nova_args , stdout = sys . stdout , stderr = subprocess . PIPE , env = env_vars ) process . wait ( ) return process
Executes the executable given by the user .
59,155
def check_for_debug ( supernova_args , nova_args ) : if supernova_args [ 'debug' ] and supernova_args [ 'executable' ] == 'heat' : nova_args . insert ( 0 , '-d ' ) elif supernova_args [ 'debug' ] : nova_args . insert ( 0 , '--debug ' ) return nova_args
If the user wanted to run the executable with debugging enabled we need to apply the correct arguments to the executable .
59,156
def check_for_executable ( supernova_args , env_vars ) : exe = supernova_args . get ( 'executable' , 'default' ) if exe != 'default' : return supernova_args if 'OS_EXECUTABLE' in env_vars . keys ( ) : supernova_args [ 'executable' ] = env_vars [ 'OS_EXECUTABLE' ] return supernova_args supernova_args [ 'executable' ] = 'nova' return supernova_args
It s possible that a user might set their custom executable via an environment variable . If we detect one we should add it to supernova s arguments ONLY IF an executable wasn t set on the command line . The command line executable must take priority .
59,157
def check_for_bypass_url ( raw_creds , nova_args ) : if 'BYPASS_URL' in raw_creds . keys ( ) : bypass_args = [ '--bypass-url' , raw_creds [ 'BYPASS_URL' ] ] nova_args = bypass_args + nova_args return nova_args
Return a list of extra args that need to be passed on cmdline to nova .
59,158
def run_command ( nova_creds , nova_args , supernova_args ) : nova_env = supernova_args [ 'nova_env' ] nova_args = copy . copy ( nova_args ) env_vars = os . environ . copy ( ) env_vars . update ( credentials . prep_shell_environment ( nova_env , nova_creds ) ) nova_args = check_for_bypass_url ( nova_creds [ nova_env ] , nova_args ) supernova_args = check_for_executable ( supernova_args , env_vars ) nova_args = check_for_debug ( supernova_args , nova_args ) msg = "Running %s against %s..." % ( supernova_args . get ( 'executable' ) , nova_env ) if not supernova_args . get ( 'quiet' ) : click . echo ( "[%s] %s " % ( click . style ( 'SUPERNOVA' , fg = 'green' ) , msg ) ) nova_args . insert ( 0 , supernova_args [ 'executable' ] ) nova_args = [ nova_arg . strip ( ) for nova_arg in nova_args ] process = execute_executable ( nova_args , env_vars ) if not supernova_args . get ( 'quiet' ) : handle_stderr ( process . stderr ) return process . returncode
Sets the environment variables for the executable runs the executable and handles the output .
59,159
def check_environment_presets ( ) : presets = [ x for x in os . environ . copy ( ) . keys ( ) if x . startswith ( 'NOVA_' ) or x . startswith ( 'OS_' ) ] if len ( presets ) < 1 : return True else : click . echo ( "_" * 80 ) click . echo ( "*WARNING* Found existing environment variables that may " "cause conflicts:" ) for preset in presets : click . echo ( " - %s" % preset ) click . echo ( "_" * 80 ) return False
Checks for environment variables that can cause problems with supernova
59,160
def get_envs_in_group ( group_name , nova_creds ) : envs = [ ] for key , value in nova_creds . items ( ) : supernova_groups = value . get ( 'SUPERNOVA_GROUP' , [ ] ) if hasattr ( supernova_groups , 'startswith' ) : supernova_groups = [ supernova_groups ] if group_name in supernova_groups : envs . append ( key ) elif group_name == 'all' : envs . append ( key ) return envs
Takes a group_name and finds any environments that have a SUPERNOVA_GROUP configuration line that matches the group_name .
59,161
def is_valid_group ( group_name , nova_creds ) : valid_groups = [ ] for key , value in nova_creds . items ( ) : supernova_groups = value . get ( 'SUPERNOVA_GROUP' , [ ] ) if hasattr ( supernova_groups , 'startswith' ) : supernova_groups = [ supernova_groups ] valid_groups . extend ( supernova_groups ) valid_groups . append ( 'all' ) if group_name in valid_groups : return True else : return False
Checks to see if the configuration file contains a SUPERNOVA_GROUP configuration option .
59,162
def rm_prefix ( name ) : if name . startswith ( 'nova_' ) : return name [ 5 : ] elif name . startswith ( 'novaclient_' ) : return name [ 11 : ] elif name . startswith ( 'os_' ) : return name [ 3 : ] else : return name
Removes nova_ os_ novaclient_ prefix from string .
59,163
def __pad ( strdata ) : if request . args . get ( 'callback' ) : return "%s(%s);" % ( request . args . get ( 'callback' ) , strdata ) else : return strdata
Pads strdata with a Request s callback argument if specified or does nothing .
59,164
def __dumps ( * args , ** kwargs ) : indent = None if ( current_app . config . get ( 'JSONIFY_PRETTYPRINT_REGULAR' , False ) and not request . is_xhr ) : indent = 2 return json . dumps ( args [ 0 ] if len ( args ) is 1 else dict ( * args , ** kwargs ) , indent = indent )
Serializes args and kwargs as JSON . Supports serializing an array as the top - level object if it is the only argument .
59,165
def update_type_lookups ( self ) : self . type_to_typestring = dict ( zip ( self . types , self . python_type_strings ) ) self . typestring_to_type = dict ( zip ( self . python_type_strings , self . types ) )
Update type and typestring lookup dicts .
59,166
def get_type_string ( self , data , type_string ) : if type_string is not None : return type_string else : tp = type ( data ) try : return self . type_to_typestring [ tp ] except KeyError : return self . type_to_typestring [ tp . __module__ + '.' + tp . __name__ ]
Gets type string .
59,167
def write ( self , f , grp , name , data , type_string , options ) : raise NotImplementedError ( 'Can' 't write data type: ' + str ( type ( data ) ) )
Writes an object s metadata to file .
59,168
def write_metadata ( self , f , dsetgrp , data , type_string , options , attributes = None ) : if attributes is None : attributes = dict ( ) if options . store_python_metadata and 'Python.Type' not in attributes : attributes [ 'Python.Type' ] = ( 'string' , self . get_type_string ( data , type_string ) ) set_attributes_all ( dsetgrp , attributes , discard_others = True )
Writes an object to file .
59,169
def process_path ( pth ) : if isinstance ( pth , bytes ) : p = pth . decode ( 'utf-8' ) elif ( sys . hexversion >= 0x03000000 and isinstance ( pth , str ) ) or ( sys . hexversion < 0x03000000 and isinstance ( pth , unicode ) ) : p = pth elif not isinstance ( pth , collections . Iterable ) : raise TypeError ( 'p must be str, bytes, or an iterable ' + 'solely of one of those two.' ) else : if sys . hexversion >= 0x03000000 : if not all ( [ isinstance ( s , ( bytes , str ) ) for s in pth ] ) : raise TypeError ( 'Elements of p must be str or bytes.' ) else : if not all ( [ isinstance ( s , ( str , unicode ) ) for s in pth ] ) : raise TypeError ( 'Elements of p must be str or ' + 'unicode.' ) parts = [ None ] * len ( pth ) for i , s in enumerate ( pth ) : if isinstance ( s , bytes ) : s = s . decode ( 'utf-8' ) parts [ i ] = escape_path ( s ) parts = tuple ( parts ) p = posixpath . join ( * parts ) path = posixpath . normpath ( p ) groupname = posixpath . dirname ( path ) targetname = posixpath . basename ( path ) if len ( groupname ) == 0 : groupname = b'/' . decode ( 'ascii' ) if len ( targetname ) == 0 : targetname = b'.' . decode ( 'ascii' ) return groupname , targetname
Processes paths .
59,170
def write_object_array ( f , data , options ) : ref_dtype = h5py . special_dtype ( ref = h5py . Reference ) data_refs = np . zeros ( shape = data . shape , dtype = 'object' ) if options . group_for_references not in f : f . create_group ( options . group_for_references ) grp2 = f [ options . group_for_references ] if not isinstance ( grp2 , h5py . Group ) : del f [ options . group_for_references ] f . create_group ( options . group_for_references ) grp2 = f [ options . group_for_references ] try : dset_a = grp2 [ 'a' ] if dset_a . shape != ( 2 , ) or not dset_a . dtype . name . startswith ( 'uint' ) or np . any ( dset_a [ ... ] != np . uint64 ( [ 0 , 0 ] ) ) or get_attribute_string ( dset_a , 'MATLAB_class' ) != 'canonical empty' or get_attribute ( dset_a , 'MATLAB_empty' ) != 1 : del grp2 [ 'a' ] dset_a = grp2 . create_dataset ( 'a' , data = np . uint64 ( [ 0 , 0 ] ) ) set_attribute_string ( dset_a , 'MATLAB_class' , 'canonical empty' ) set_attribute ( dset_a , 'MATLAB_empty' , np . uint8 ( 1 ) ) except : dset_a = grp2 . create_dataset ( 'a' , data = np . uint64 ( [ 0 , 0 ] ) ) set_attribute_string ( dset_a , 'MATLAB_class' , 'canonical empty' ) set_attribute ( dset_a , 'MATLAB_empty' , np . uint8 ( 1 ) ) grp2name = grp2 . name for index , x in np . ndenumerate ( data ) : name_for_ref = next_unused_name_in_group ( grp2 , 16 ) write_data ( f , grp2 , name_for_ref , x , None , options ) try : dset = grp2 [ name_for_ref ] data_refs [ index ] = dset . ref if options . matlab_compatible : set_attribute_string ( dset , 'H5PATH' , grp2name ) else : del_attribute ( dset , 'H5PATH' ) except : data_refs [ index ] = dset_a . ref return data_refs . astype ( ref_dtype ) . copy ( )
Writes an array of objects recursively .
59,171
def read_object_array ( f , data , options ) : data_derefed = np . zeros ( shape = data . shape , dtype = 'object' ) for index , x in np . ndenumerate ( data ) : data_derefed [ index ] = read_data ( f , None , None , options , dsetgrp = f [ x ] ) return data_derefed
Reads an array of objects recursively .
59,172
def next_unused_name_in_group ( grp , length ) : fmt = '%0{0}x' . format ( length ) name = fmt % random . getrandbits ( length * 4 ) while name in grp : name = fmt % random . getrandbits ( length * 4 ) return name
Gives a name that isn t used in a Group .
59,173
def convert_numpy_str_to_uint16 ( data ) : if data . nbytes == 0 : return np . uint16 ( [ ] ) if sys . byteorder == 'little' : codec = 'UTF-16LE' else : codec = 'UTF-16BE' cdata = np . char . encode ( np . atleast_1d ( data ) , codec ) shape = list ( cdata . shape ) shape [ - 1 ] *= ( cdata . dtype . itemsize // 2 ) return np . ndarray ( shape = shape , dtype = 'uint16' , buffer = cdata . tostring ( ) )
Converts a numpy . unicode \ _ to UTF - 16 in numpy . uint16 form .
59,174
def convert_numpy_str_to_uint32 ( data ) : if data . nbytes == 0 : return np . uint32 ( [ ] ) else : shape = list ( np . atleast_1d ( data ) . shape ) shape [ - 1 ] *= data . dtype . itemsize // 4 return data . flatten ( ) . view ( np . uint32 ) . reshape ( tuple ( shape ) )
Converts a numpy . unicode \ _ to its numpy . uint32 representation .
59,175
def decode_complex ( data , complex_names = ( None , None ) ) : if data . dtype . fields is None : return data fields = list ( data . dtype . fields ) if len ( fields ) != 2 : return data real_fields = [ 'r' , 're' , 'real' ] imag_fields = [ 'i' , 'im' , 'imag' , 'imaginary' ] cnames = list ( complex_names ) for s in fields : if s . lower ( ) in real_fields : cnames [ 0 ] = s elif s . lower ( ) in imag_fields : cnames [ 1 ] = s if cnames [ 0 ] is not None and cnames [ 1 ] is not None : cdata = np . result_type ( data [ cnames [ 0 ] ] . dtype , data [ cnames [ 1 ] ] . dtype , 'complex64' ) . type ( data [ cnames [ 0 ] ] ) cdata . imag = data [ cnames [ 1 ] ] return cdata else : return data
Decodes possibly complex data read from an HDF5 file .
59,176
def encode_complex ( data , complex_names ) : dtype_name = data . dtype . name if dtype_name [ 0 : 7 ] == 'complex' : dtype_name = 'float' + str ( int ( float ( dtype_name [ 7 : ] ) / 2 ) ) dt = np . dtype ( [ ( complex_names [ 0 ] , dtype_name ) , ( complex_names [ 1 ] , dtype_name ) ] ) return data . view ( dt ) . copy ( )
Encodes complex data to having arbitrary complex field names .
59,177
def convert_attribute_to_string ( value ) : if value is None : return value elif ( sys . hexversion >= 0x03000000 and isinstance ( value , str ) ) or ( sys . hexversion < 0x03000000 and isinstance ( value , unicode ) ) : return value elif isinstance ( value , bytes ) : return value . decode ( ) elif isinstance ( value , np . unicode_ ) : return str ( value ) elif isinstance ( value , np . bytes_ ) : return value . decode ( ) else : return None
Convert an attribute value to a string .
59,178
def set_attribute ( target , name , value ) : try : target . attrs . modify ( name , value ) except : target . attrs . create ( name , value )
Sets an attribute on a Dataset or Group .
59,179
def set_attribute_string ( target , name , value ) : set_attribute ( target , name , np . bytes_ ( value ) )
Sets an attribute to a string on a Dataset or Group .
59,180
def set_attribute_string_array ( target , name , string_list ) : s_list = [ convert_to_str ( s ) for s in string_list ] if sys . hexversion >= 0x03000000 : target . attrs . create ( name , s_list , dtype = h5py . special_dtype ( vlen = str ) ) else : target . attrs . create ( name , s_list , dtype = h5py . special_dtype ( vlen = unicode ) )
Sets an attribute to an array of string on a Dataset or Group .
59,181
def set_attributes_all ( target , attributes , discard_others = True ) : attrs = target . attrs existing = dict ( attrs . items ( ) ) if sys . hexversion >= 0x03000000 : str_arr_dtype = h5py . special_dtype ( vlen = str ) else : str_arr_dtype = dtype = h5py . special_dtype ( vlen = unicode ) for k , ( kind , value ) in attributes . items ( ) : if kind == 'string_array' : attrs . create ( k , [ convert_to_str ( s ) for s in value ] , dtype = str_arr_dtype ) else : if kind == 'string' : value = np . bytes_ ( value ) if k not in existing : attrs . create ( k , value ) else : try : if value . dtype == existing [ k ] . dtype and value . shape == existing [ k ] . shape : attrs . modify ( k , value ) except : attrs . create ( k , value ) if discard_others : for k in set ( existing ) - set ( attributes ) : del attrs [ k ]
Set Attributes in bulk and optionally discard others .
59,182
def find_thirdparty_marshaller_plugins ( ) : all_plugins = tuple ( pkg_resources . iter_entry_points ( 'hdf5storage.marshallers.plugins' ) ) return { ver : { p . module_name : p for p in all_plugins if p . name == ver } for ver in supported_marshaller_api_versions ( ) }
Find but don t load all third party marshaller plugins .
59,183
def savemat ( file_name , mdict , appendmat = True , format = '7.3' , oned_as = 'row' , store_python_metadata = True , action_for_matlab_incompatible = 'error' , marshaller_collection = None , truncate_existing = False , truncate_invalid_matlab = False , ** keywords ) : if float ( format ) < 7.3 : import scipy . io scipy . io . savemat ( file_name , mdict , appendmat = appendmat , format = format , oned_as = oned_as , ** keywords ) return if appendmat and not file_name . endswith ( '.mat' ) : file_name = file_name + '.mat' options = Options ( store_python_metadata = store_python_metadata , matlab_compatible = True , oned_as = oned_as , action_for_matlab_incompatible = action_for_matlab_incompatible , marshaller_collection = marshaller_collection ) writes ( mdict = mdict , filename = file_name , truncate_existing = truncate_existing , truncate_invalid_matlab = truncate_invalid_matlab , options = options )
Save a dictionary of python types to a MATLAB MAT file .
59,184
def loadmat ( file_name , mdict = None , appendmat = True , variable_names = None , marshaller_collection = None , ** keywords ) : try : options = Options ( marshaller_collection = marshaller_collection ) if appendmat and not file_name . endswith ( '.mat' ) : filename = file_name + '.mat' else : filename = file_name if variable_names is None : data = dict ( ) with h5py . File ( filename , mode = 'r' ) as f : for k in f : if f [ k ] . name != options . group_for_references : try : data [ utilities . unescape_path ( k ) ] = utilities . read_data ( f , f , k , options ) except : pass else : values = reads ( paths = variable_names , filename = filename , options = options ) data = dict ( ) for i , name in enumerate ( variable_names ) : data [ name ] = values [ i ] if mdict is None : mdict = dict ( ) for k , v in data . items ( ) : mdict [ k ] = v return mdict except OSError : import scipy . io return scipy . io . loadmat ( file_name , mdict , appendmat = appendmat , variable_names = variable_names , ** keywords )
Loads data to a MATLAB MAT file .
59,185
def _update_marshallers ( self ) : self . _marshallers = [ ] for v in self . _priority : if v == 'builtin' : self . _marshallers . extend ( self . _builtin_marshallers ) elif v == 'plugin' : self . _marshallers . extend ( self . _plugin_marshallers ) elif v == 'user' : self . _marshallers . extend ( self . _user_marshallers ) else : raise ValueError ( 'priority attribute has an illegal ' 'element value.' ) self . _has_required_modules = len ( self . _marshallers ) * [ False ] self . _imported_required_modules = len ( self . _marshallers ) * [ False ] for i , m in enumerate ( self . _marshallers ) : try : for name in m . required_parent_modules : if name not in sys . modules and pkgutil . find_loader ( name ) is None : raise ImportError ( 'module not present' ) except ImportError : self . _has_required_modules [ i ] = False except : raise else : self . _has_required_modules [ i ] = True if not self . _has_required_modules [ i ] : self . _imported_required_modules [ i ] = False continue try : for name in m . required_modules : if name not in sys . modules : raise ImportError ( 'module not loaded yet.' ) except ImportError : if self . _lazy_loading : self . _imported_required_modules [ i ] = False else : success = self . _import_marshaller_modules ( m ) self . _has_required_modules [ i ] = success self . _imported_required_modules [ i ] = success except : raise else : self . _imported_required_modules [ i ] = True types_as_str = set ( ) self . _types = dict ( ) self . _type_strings = dict ( ) self . _matlab_classes = dict ( ) for i , m in enumerate ( self . _marshallers ) : for tp in m . types : if isinstance ( tp , str ) : tp_as_str = tp else : tp_as_str = tp . __module__ + '.' + tp . __name__ if tp_as_str not in types_as_str : self . _types [ tp_as_str ] = i types_as_str . add ( tp_as_str ) for type_string in m . python_type_strings : if type_string not in self . _type_strings : self . _type_strings [ type_string ] = i for matlab_class in m . matlab_classes : if matlab_class not in self . _matlab_classes : self . _matlab_classes [ matlab_class ] = i
Update the full marshaller list and other data structures .
59,186
def _import_marshaller_modules ( self , m ) : try : for name in m . required_modules : if name not in sys . modules : if _has_importlib : importlib . import_module ( name ) else : __import__ ( name ) except ImportError : return False except : raise else : return True
Imports the modules required by the marshaller .
59,187
def get_marshaller_for_type ( self , tp ) : if not isinstance ( tp , str ) : tp = tp . __module__ + '.' + tp . __name__ if tp in self . _types : index = self . _types [ tp ] else : return None , False m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_required_modules [ index ] : return m , False success = self . _import_marshaller_modules ( m ) self . _has_required_modules [ index ] = success self . _imported_required_modules [ index ] = success return m , success
Gets the appropriate marshaller for a type .
59,188
def get_marshaller_for_type_string ( self , type_string ) : if type_string in self . _type_strings : index = self . _type_strings [ type_string ] m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_required_modules [ index ] : return m , False success = self . _import_marshaller_modules ( m ) self . _has_required_modules [ index ] = success self . _imported_required_modules [ index ] = success return m , success else : return None , False
Gets the appropriate marshaller for a type string .
59,189
def get_marshaller_for_matlab_class ( self , matlab_class ) : if matlab_class in self . _matlab_classes : index = self . _matlab_classes [ matlab_class ] m = self . _marshallers [ index ] if self . _imported_required_modules [ index ] : return m , True if not self . _has_required_modules [ index ] : return m , False success = self . _import_marshaller_modules ( m ) self . _has_required_modules [ index ] = success self . _imported_required_modules [ index ] = success return m , success else : return None , False
Gets the appropriate marshaller for a MATLAB class string .
59,190
def new_node ( self ) : node_id = self . generate_node_id ( ) node = { 'id' : node_id , 'edges' : [ ] , 'data' : { } } self . nodes [ node_id ] = node self . _num_nodes += 1 return node_id
Adds a new blank node to the graph . Returns the node id of the new node .
59,191
def new_edge ( self , node_a , node_b , cost = 1 ) : try : self . nodes [ node_a ] except KeyError : raise NonexistentNodeError ( node_a ) try : self . nodes [ node_b ] except KeyError : raise NonexistentNodeError ( node_b ) edge_id = self . generate_edge_id ( ) edge = { 'id' : edge_id , 'vertices' : ( node_a , node_b ) , 'cost' : cost , 'data' : { } } self . edges [ edge_id ] = edge self . nodes [ node_a ] [ 'edges' ] . append ( edge_id ) self . _num_edges += 1 return edge_id
Adds a new edge from node_a to node_b that has a cost . Returns the edge id of the new edge .
59,192
def adjacent ( self , node_a , node_b ) : neighbors = self . neighbors ( node_a ) return node_b in neighbors
Determines whether there is an edge from node_a to node_b . Returns True if such an edge exists otherwise returns False .
59,193
def edge_cost ( self , node_a , node_b ) : cost = float ( 'inf' ) node_object_a = self . get_node ( node_a ) for edge_id in node_object_a [ 'edges' ] : edge = self . get_edge ( edge_id ) tpl = ( node_a , node_b ) if edge [ 'vertices' ] == tpl : cost = edge [ 'cost' ] break return cost
Returns the cost of moving between the edge that connects node_a to node_b . Returns + inf if no such edge exists .
59,194
def get_node ( self , node_id ) : try : node_object = self . nodes [ node_id ] except KeyError : raise NonexistentNodeError ( node_id ) return node_object
Returns the node object identified by node_id .
59,195
def get_edge ( self , edge_id ) : try : edge_object = self . edges [ edge_id ] except KeyError : raise NonexistentEdgeError ( edge_id ) return edge_object
Returns the edge object identified by edge_id .
59,196
def delete_edge_by_nodes ( self , node_a , node_b ) : node = self . get_node ( node_a ) edge_ids = [ ] for e_id in node [ 'edges' ] : edge = self . get_edge ( e_id ) if edge [ 'vertices' ] [ 1 ] == node_b : edge_ids . append ( e_id ) for e in edge_ids : self . delete_edge_by_id ( e )
Removes all the edges from node_a to node_b from the graph .
59,197
def delete_node ( self , node_id ) : node = self . get_node ( node_id ) for e in node [ 'edges' ] : self . delete_edge_by_id ( e ) edges = [ edge_id for edge_id , edge in list ( self . edges . items ( ) ) if edge [ 'vertices' ] [ 1 ] == node_id ] for e in edges : self . delete_edge_by_id ( e ) del self . nodes [ node_id ] self . _num_nodes -= 1
Removes the node identified by node_id from the graph .
59,198
def move_edge_source ( self , edge_id , node_a , node_b ) : edge = self . get_edge ( edge_id ) edge [ 'vertices' ] = ( node_b , edge [ 'vertices' ] [ 1 ] ) node = self . get_node ( node_a ) node [ 'edges' ] . remove ( edge_id ) node = self . get_node ( node_b ) node [ 'edges' ] . append ( edge_id )
Moves an edge originating from node_a so that it originates from node_b .
59,199
def get_edge_ids_by_node_ids ( self , node_a , node_b ) : if not self . adjacent ( node_a , node_b ) : return [ ] node = self . get_node ( node_a ) return [ edge_id for edge_id in node [ 'edges' ] if self . get_edge ( edge_id ) [ 'vertices' ] [ 1 ] == node_b ]
Returns a list of edge ids connecting node_a to node_b .