idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
14,000
def get_log_events ( self , user_id , page = 0 , per_page = 50 , sort = None , include_totals = False ) : params = { 'per_page' : per_page , 'page' : page , 'include_totals' : str ( include_totals ) . lower ( ) , 'sort' : sort } url = self . _url ( '{}/logs' . format ( user_id ) ) return self . client . get ( url , params = params )
Retrieve every log event for a specific user id
14,001
def all ( self , stage = 'login_success' , enabled = True , fields = None , include_fields = True , page = None , per_page = None , include_totals = False ) : params = { 'stage' : stage , 'fields' : fields and ',' . join ( fields ) or None , 'include_fields' : str ( include_fields ) . lower ( ) , 'page' : page , 'per_page' : per_page , 'include_totals' : str ( include_totals ) . lower ( ) } if enabled is not None : params [ 'enabled' ] = str ( enabled ) . lower ( ) return self . client . get ( self . _url ( ) , params = params )
Retrieves a list of all rules .
14,002
def get_failed_job ( self , id ) : url = self . _url ( '{}/errors' . format ( id ) ) return self . client . get ( url )
Get failed job error details
14,003
def get_results ( self , job_id ) : url = self . _url ( '%s/results' % job_id ) return self . client . get ( url )
Get results of a job
14,004
def export_users ( self , body ) : return self . client . post ( self . _url ( 'users-exports' ) , data = body )
Export all users to a file using a long running job .
14,005
def import_users ( self , connection_id , file_obj , upsert = False ) : return self . client . file_post ( self . _url ( 'users-imports' ) , data = { 'connection_id' : connection_id , 'upsert' : str ( upsert ) . lower ( ) } , files = { 'users' : file_obj } )
Imports users to a connection from a file .
14,006
def send_verification_email ( self , body ) : return self . client . post ( self . _url ( 'verification-email' ) , data = body )
Send verification email .
14,007
def config ( self , body ) : return self . client . post ( self . _url ( ) , data = body )
Configure the email provider .
14,008
def userinfo ( self , access_token ) : return self . get ( url = 'https://{}/userinfo' . format ( self . domain ) , headers = { 'Authorization' : 'Bearer {}' . format ( access_token ) } )
Returns the user information based on the Auth0 access token . This endpoint will work only if openid was granted as a scope for the access_token .
14,009
def tokeninfo ( self , jwt ) : warnings . warn ( "/tokeninfo will be deprecated in future releases" , DeprecationWarning ) return self . post ( url = 'https://{}/tokeninfo' . format ( self . domain ) , data = { 'id_token' : jwt } , headers = { 'Content-Type' : 'application/json' } )
Returns user profile based on the user s jwt
14,010
def all ( self , page = None , per_page = None , include_totals = False , extra_params = None ) : params = extra_params or { } params . update ( { 'page' : page , 'per_page' : per_page , 'include_totals' : str ( include_totals ) . lower ( ) } ) return self . client . get ( self . _url ( ) , params = params )
Retrieves all grants .
14,011
def change_password ( self , client_id , email , connection , password = None ) : return self . post ( 'https://{}/dbconnections/change_password' . format ( self . domain ) , data = { 'client_id' : client_id , 'email' : email , 'password' : password , 'connection' : connection , } , headers = { 'Content-Type' : 'application/json' } )
Asks to change a password for a given user .
14,012
def encrypt ( self , k , a , m ) : hkey = k [ : _inbytes ( self . keysize ) ] ekey = k [ _inbytes ( self . keysize ) : ] iv = _randombits ( self . blocksize ) cipher = Cipher ( algorithms . AES ( ekey ) , modes . CBC ( iv ) , backend = self . backend ) encryptor = cipher . encryptor ( ) padder = PKCS7 ( self . blocksize ) . padder ( ) padded_data = padder . update ( m ) + padder . finalize ( ) e = encryptor . update ( padded_data ) + encryptor . finalize ( ) t = self . _mac ( hkey , a , iv , e ) return ( iv , e , t )
Encrypt according to the selected encryption and hashing functions .
14,013
def encrypt ( self , k , a , m ) : iv = _randombits ( 96 ) cipher = Cipher ( algorithms . AES ( k ) , modes . GCM ( iv ) , backend = self . backend ) encryptor = cipher . encryptor ( ) encryptor . authenticate_additional_data ( a ) e = encryptor . update ( m ) + encryptor . finalize ( ) return ( iv , e , encryptor . tag )
Encrypt accoriding to the selected encryption and hashing functions .
14,014
def from_json ( cls , key ) : obj = cls ( ) try : jkey = json_decode ( key ) except Exception as e : raise InvalidJWKValue ( e ) obj . import_key ( ** jkey ) return obj
Creates a RFC 7517 JWK from the standard JSON format .
14,015
def export ( self , private_key = True ) : if private_key is True : return self . _export_all ( ) else : return self . export_public ( )
Exports the key in the standard JSON format . Exports the key regardless of type if private_key is False and the key is_symmetric an exceptionis raised .
14,016
def has_public ( self ) : if self . is_symmetric : return False reg = JWKValuesRegistry [ self . _params [ 'kty' ] ] for value in reg : if reg [ value ] . public and value in self . _key : return True
Whether this JWK has an asymmetric Public key .
14,017
def get_curve ( self , arg ) : k = self . _key if self . _params [ 'kty' ] not in [ 'EC' , 'OKP' ] : raise InvalidJWKType ( 'Not an EC or OKP key' ) if arg and k [ 'crv' ] != arg : raise InvalidJWKValue ( 'Curve requested is "%s", but ' 'key curve is "%s"' % ( arg , k [ 'crv' ] ) ) return self . _get_curve_by_name ( k [ 'crv' ] )
Gets the Elliptic Curve associated with the key .
14,018
def get_op_key ( self , operation = None , arg = None ) : validops = self . _params . get ( 'key_ops' , list ( JWKOperationsRegistry . keys ( ) ) ) if validops is not list : validops = [ validops ] if operation is None : if self . _params [ 'kty' ] == 'oct' : return self . _key [ 'k' ] raise InvalidJWKOperation ( operation , validops ) elif operation == 'sign' : self . _check_constraints ( 'sig' , operation ) return self . _get_private_key ( arg ) elif operation == 'verify' : self . _check_constraints ( 'sig' , operation ) return self . _get_public_key ( arg ) elif operation == 'encrypt' or operation == 'wrapKey' : self . _check_constraints ( 'enc' , operation ) return self . _get_public_key ( arg ) elif operation == 'decrypt' or operation == 'unwrapKey' : self . _check_constraints ( 'enc' , operation ) return self . _get_private_key ( arg ) else : raise NotImplementedError
Get the key object associated to the requested opration . For example the public RSA key for the verify operation or the private EC key for the decrypt operation .
14,019
def thumbprint ( self , hashalg = hashes . SHA256 ( ) ) : t = { 'kty' : self . _params [ 'kty' ] } for name , val in iteritems ( JWKValuesRegistry [ t [ 'kty' ] ] ) : if val . required : t [ name ] = self . _key [ name ] digest = hashes . Hash ( hashalg , backend = default_backend ( ) ) digest . update ( bytes ( json_encode ( t ) . encode ( 'utf8' ) ) ) return base64url_encode ( digest . finalize ( ) )
Returns the key thumbprint as specified by RFC 7638 .
14,020
def add ( self , elem ) : if not isinstance ( elem , JWK ) : raise TypeError ( 'Only JWK objects are valid elements' ) set . add ( self , elem )
Adds a JWK object to the set
14,021
def export ( self , private_keys = True ) : exp_dict = dict ( ) for k , v in iteritems ( self ) : if k == 'keys' : keys = list ( ) for jwk in v : keys . append ( json_decode ( jwk . export ( private_keys ) ) ) v = keys exp_dict [ k ] = v return json_encode ( exp_dict )
Exports a RFC 7517 keyset using the standard JSON format
14,022
def import_keyset ( self , keyset ) : try : jwkset = json_decode ( keyset ) except Exception : raise InvalidJWKValue ( ) if 'keys' not in jwkset : raise InvalidJWKValue ( ) for k , v in iteritems ( jwkset ) : if k == 'keys' : for jwk in v : self [ 'keys' ] . add ( JWK ( ** jwk ) ) else : self [ k ] = v
Imports a RFC 7517 keyset using the standard JSON format .
14,023
def add_recipient ( self , key , header = None ) : if self . plaintext is None : raise ValueError ( 'Missing plaintext' ) if not isinstance ( self . plaintext , bytes ) : raise ValueError ( "Plaintext must be 'bytes'" ) if isinstance ( header , dict ) : header = json_encode ( header ) jh = self . _get_jose_header ( header ) alg , enc = self . _get_alg_enc_from_headers ( jh ) rec = dict ( ) if header : rec [ 'header' ] = header wrapped = alg . wrap ( key , enc . wrap_key_size , self . cek , jh ) self . cek = wrapped [ 'cek' ] if 'ek' in wrapped : rec [ 'encrypted_key' ] = wrapped [ 'ek' ] if 'header' in wrapped : h = json_decode ( rec . get ( 'header' , '{}' ) ) nh = self . _merge_headers ( h , wrapped [ 'header' ] ) rec [ 'header' ] = json_encode ( nh ) if 'ciphertext' not in self . objects : self . _encrypt ( alg , enc , jh ) if 'recipients' in self . objects : self . objects [ 'recipients' ] . append ( rec ) elif 'encrypted_key' in self . objects or 'header' in self . objects : self . objects [ 'recipients' ] = list ( ) n = dict ( ) if 'encrypted_key' in self . objects : n [ 'encrypted_key' ] = self . objects . pop ( 'encrypted_key' ) if 'header' in self . objects : n [ 'header' ] = self . objects . pop ( 'header' ) self . objects [ 'recipients' ] . append ( n ) self . objects [ 'recipients' ] . append ( rec ) else : self . objects . update ( rec )
Encrypt the plaintext with the given key .
14,024
def serialize ( self , compact = False ) : if 'ciphertext' not in self . objects : raise InvalidJWEOperation ( "No available ciphertext" ) if compact : for invalid in 'aad' , 'unprotected' : if invalid in self . objects : raise InvalidJWEOperation ( "Can't use compact encoding when the '%s' parameter" "is set" % invalid ) if 'protected' not in self . objects : raise InvalidJWEOperation ( "Can't use compat encoding without protected headers" ) else : ph = json_decode ( self . objects [ 'protected' ] ) for required in 'alg' , 'enc' : if required not in ph : raise InvalidJWEOperation ( "Can't use compat encoding, '%s' must be in the " "protected header" % required ) if 'recipients' in self . objects : if len ( self . objects [ 'recipients' ] ) != 1 : raise InvalidJWEOperation ( "Invalid number of recipients" ) rec = self . objects [ 'recipients' ] [ 0 ] else : rec = self . objects if 'header' in rec : h = json_decode ( rec [ 'header' ] ) ph = json_decode ( self . objects [ 'protected' ] ) nph = self . _merge_headers ( h , ph ) self . objects [ 'protected' ] = json_encode ( nph ) jh = self . _get_jose_header ( ) alg , enc = self . _get_alg_enc_from_headers ( jh ) self . _encrypt ( alg , enc , jh ) del rec [ 'header' ] return '.' . join ( [ base64url_encode ( self . objects [ 'protected' ] ) , base64url_encode ( rec . get ( 'encrypted_key' , '' ) ) , base64url_encode ( self . objects [ 'iv' ] ) , base64url_encode ( self . objects [ 'ciphertext' ] ) , base64url_encode ( self . objects [ 'tag' ] ) ] ) else : obj = self . objects enc = { 'ciphertext' : base64url_encode ( obj [ 'ciphertext' ] ) , 'iv' : base64url_encode ( obj [ 'iv' ] ) , 'tag' : base64url_encode ( self . objects [ 'tag' ] ) } if 'protected' in obj : enc [ 'protected' ] = base64url_encode ( obj [ 'protected' ] ) if 'unprotected' in obj : enc [ 'unprotected' ] = json_decode ( obj [ 'unprotected' ] ) if 'aad' in obj : enc [ 'aad' ] = base64url_encode ( obj [ 'aad' ] ) if 'recipients' in obj : enc [ 'recipients' ] = list ( ) for rec in obj [ 'recipients' ] : e = dict ( ) if 'encrypted_key' in rec : e [ 'encrypted_key' ] = base64url_encode ( rec [ 'encrypted_key' ] ) if 'header' in rec : e [ 'header' ] = json_decode ( rec [ 'header' ] ) enc [ 'recipients' ] . append ( e ) else : if 'encrypted_key' in obj : enc [ 'encrypted_key' ] = base64url_encode ( obj [ 'encrypted_key' ] ) if 'header' in obj : enc [ 'header' ] = json_decode ( obj [ 'header' ] ) return json_encode ( enc )
Serializes the object into a JWE token .
14,025
def decrypt ( self , key ) : if 'ciphertext' not in self . objects : raise InvalidJWEOperation ( "No available ciphertext" ) self . decryptlog = list ( ) if 'recipients' in self . objects : for rec in self . objects [ 'recipients' ] : try : self . _decrypt ( key , rec ) except Exception as e : self . decryptlog . append ( 'Failed: [%s]' % repr ( e ) ) else : try : self . _decrypt ( key , self . objects ) except Exception as e : self . decryptlog . append ( 'Failed: [%s]' % repr ( e ) ) if not self . plaintext : raise InvalidJWEData ( 'No recipient matched the provided ' 'key' + repr ( self . decryptlog ) )
Decrypt a JWE token .
14,026
def deserialize ( self , raw_jwe , key = None ) : self . objects = dict ( ) self . plaintext = None self . cek = None o = dict ( ) try : try : djwe = json_decode ( raw_jwe ) o [ 'iv' ] = base64url_decode ( djwe [ 'iv' ] ) o [ 'ciphertext' ] = base64url_decode ( djwe [ 'ciphertext' ] ) o [ 'tag' ] = base64url_decode ( djwe [ 'tag' ] ) if 'protected' in djwe : p = base64url_decode ( djwe [ 'protected' ] ) o [ 'protected' ] = p . decode ( 'utf-8' ) if 'unprotected' in djwe : o [ 'unprotected' ] = json_encode ( djwe [ 'unprotected' ] ) if 'aad' in djwe : o [ 'aad' ] = base64url_decode ( djwe [ 'aad' ] ) if 'recipients' in djwe : o [ 'recipients' ] = list ( ) for rec in djwe [ 'recipients' ] : e = dict ( ) if 'encrypted_key' in rec : e [ 'encrypted_key' ] = base64url_decode ( rec [ 'encrypted_key' ] ) if 'header' in rec : e [ 'header' ] = json_encode ( rec [ 'header' ] ) o [ 'recipients' ] . append ( e ) else : if 'encrypted_key' in djwe : o [ 'encrypted_key' ] = base64url_decode ( djwe [ 'encrypted_key' ] ) if 'header' in djwe : o [ 'header' ] = json_encode ( djwe [ 'header' ] ) except ValueError : c = raw_jwe . split ( '.' ) if len ( c ) != 5 : raise InvalidJWEData ( ) p = base64url_decode ( c [ 0 ] ) o [ 'protected' ] = p . decode ( 'utf-8' ) ekey = base64url_decode ( c [ 1 ] ) if ekey != b'' : o [ 'encrypted_key' ] = base64url_decode ( c [ 1 ] ) o [ 'iv' ] = base64url_decode ( c [ 2 ] ) o [ 'ciphertext' ] = base64url_decode ( c [ 3 ] ) o [ 'tag' ] = base64url_decode ( c [ 4 ] ) self . objects = o except Exception as e : raise InvalidJWEData ( 'Invalid format' , repr ( e ) ) if key : self . decrypt ( key )
Deserialize a JWE token .
14,027
def sign ( self ) : payload = self . _payload ( ) sigin = b'.' . join ( [ self . protected . encode ( 'utf-8' ) , payload ] ) signature = self . engine . sign ( self . key , sigin ) return { 'protected' : self . protected , 'payload' : payload , 'signature' : base64url_encode ( signature ) }
Generates a signature
14,028
def verify ( self , signature ) : try : payload = self . _payload ( ) sigin = b'.' . join ( [ self . protected . encode ( 'utf-8' ) , payload ] ) self . engine . verify ( self . key , sigin , signature ) except Exception as e : raise InvalidJWSSignature ( 'Verification failed' , repr ( e ) ) return True
Verifies a signature
14,029
def verify ( self , key , alg = None ) : self . verifylog = list ( ) self . objects [ 'valid' ] = False obj = self . objects if 'signature' in obj : try : self . _verify ( alg , key , obj [ 'payload' ] , obj [ 'signature' ] , obj . get ( 'protected' , None ) , obj . get ( 'header' , None ) ) obj [ 'valid' ] = True except Exception as e : self . verifylog . append ( 'Failed: [%s]' % repr ( e ) ) elif 'signatures' in obj : for o in obj [ 'signatures' ] : try : self . _verify ( alg , key , obj [ 'payload' ] , o [ 'signature' ] , o . get ( 'protected' , None ) , o . get ( 'header' , None ) ) obj [ 'valid' ] = True except Exception as e : self . verifylog . append ( 'Failed: [%s]' % repr ( e ) ) else : raise InvalidJWSSignature ( 'No signatures availble' ) if not self . is_valid : raise InvalidJWSSignature ( 'Verification failed for all ' 'signatures' + repr ( self . verifylog ) )
Verifies a JWS token .
14,030
def deserialize ( self , raw_jws , key = None , alg = None ) : self . objects = dict ( ) o = dict ( ) try : try : djws = json_decode ( raw_jws ) if 'signatures' in djws : o [ 'signatures' ] = list ( ) for s in djws [ 'signatures' ] : os = self . _deserialize_signature ( s ) o [ 'signatures' ] . append ( os ) self . _deserialize_b64 ( o , os . get ( 'protected' ) ) else : o = self . _deserialize_signature ( djws ) self . _deserialize_b64 ( o , o . get ( 'protected' ) ) if 'payload' in djws : if o . get ( 'b64' , True ) : o [ 'payload' ] = base64url_decode ( str ( djws [ 'payload' ] ) ) else : o [ 'payload' ] = djws [ 'payload' ] except ValueError : c = raw_jws . split ( '.' ) if len ( c ) != 3 : raise InvalidJWSObject ( 'Unrecognized representation' ) p = base64url_decode ( str ( c [ 0 ] ) ) if len ( p ) > 0 : o [ 'protected' ] = p . decode ( 'utf-8' ) self . _deserialize_b64 ( o , o [ 'protected' ] ) o [ 'payload' ] = base64url_decode ( str ( c [ 1 ] ) ) o [ 'signature' ] = base64url_decode ( str ( c [ 2 ] ) ) self . objects = o except Exception as e : raise InvalidJWSObject ( 'Invalid format' , repr ( e ) ) if key : self . verify ( key , alg )
Deserialize a JWS token .
14,031
def add_signature ( self , key , alg = None , protected = None , header = None ) : if not self . objects . get ( 'payload' , None ) : raise InvalidJWSObject ( 'Missing Payload' ) b64 = True p = dict ( ) if protected : if isinstance ( protected , dict ) : p = protected protected = json_encode ( p ) else : p = json_decode ( protected ) if 'b64' in list ( p . keys ( ) ) : crit = p . get ( 'crit' , [ ] ) if 'b64' not in crit : raise InvalidJWSObject ( 'b64 header must always be critical' ) b64 = p [ 'b64' ] if 'b64' in self . objects : if b64 != self . objects [ 'b64' ] : raise InvalidJWSObject ( 'Mixed b64 headers on signatures' ) h = None if header : if isinstance ( header , dict ) : h = header header = json_encode ( header ) else : h = json_decode ( header ) p = self . _merge_check_headers ( p , h ) if 'alg' in p : if alg is None : alg = p [ 'alg' ] elif alg != p [ 'alg' ] : raise ValueError ( '"alg" value mismatch, specified "alg" ' 'does not match JOSE header value' ) if alg is None : raise ValueError ( '"alg" not specified' ) c = JWSCore ( alg , key , protected , self . objects [ 'payload' ] ) sig = c . sign ( ) o = dict ( ) o [ 'signature' ] = base64url_decode ( sig [ 'signature' ] ) if protected : o [ 'protected' ] = protected if header : o [ 'header' ] = h o [ 'valid' ] = True if 'signatures' in self . objects : self . objects [ 'signatures' ] . append ( o ) elif 'signature' in self . objects : self . objects [ 'signatures' ] = list ( ) n = dict ( ) n [ 'signature' ] = self . objects . pop ( 'signature' ) if 'protected' in self . objects : n [ 'protected' ] = self . objects . pop ( 'protected' ) if 'header' in self . objects : n [ 'header' ] = self . objects . pop ( 'header' ) if 'valid' in self . objects : n [ 'valid' ] = self . objects . pop ( 'valid' ) self . objects [ 'signatures' ] . append ( n ) self . objects [ 'signatures' ] . append ( o ) else : self . objects . update ( o ) self . objects [ 'b64' ] = b64
Adds a new signature to the object .
14,032
def serialize ( self , compact = False ) : if compact : if 'signatures' in self . objects : raise InvalidJWSOperation ( "Can't use compact encoding with " "multiple signatures" ) if 'signature' not in self . objects : raise InvalidJWSSignature ( "No available signature" ) if not self . objects . get ( 'valid' , False ) : raise InvalidJWSSignature ( "No valid signature found" ) if 'protected' in self . objects : protected = base64url_encode ( self . objects [ 'protected' ] ) else : protected = '' if self . objects . get ( 'payload' , False ) : if self . objects . get ( 'b64' , True ) : payload = base64url_encode ( self . objects [ 'payload' ] ) else : if isinstance ( self . objects [ 'payload' ] , bytes ) : payload = self . objects [ 'payload' ] . decode ( 'utf-8' ) else : payload = self . objects [ 'payload' ] if '.' in payload : raise InvalidJWSOperation ( "Can't use compact encoding with unencoded " "payload that uses the . character" ) else : payload = '' return '.' . join ( [ protected , payload , base64url_encode ( self . objects [ 'signature' ] ) ] ) else : obj = self . objects sig = dict ( ) if self . objects . get ( 'payload' , False ) : if self . objects . get ( 'b64' , True ) : sig [ 'payload' ] = base64url_encode ( self . objects [ 'payload' ] ) else : sig [ 'payload' ] = self . objects [ 'payload' ] if 'signature' in obj : if not obj . get ( 'valid' , False ) : raise InvalidJWSSignature ( "No valid signature found" ) sig [ 'signature' ] = base64url_encode ( obj [ 'signature' ] ) if 'protected' in obj : sig [ 'protected' ] = base64url_encode ( obj [ 'protected' ] ) if 'header' in obj : sig [ 'header' ] = obj [ 'header' ] elif 'signatures' in obj : sig [ 'signatures' ] = list ( ) for o in obj [ 'signatures' ] : if not o . get ( 'valid' , False ) : continue s = { 'signature' : base64url_encode ( o [ 'signature' ] ) } if 'protected' in o : s [ 'protected' ] = base64url_encode ( o [ 'protected' ] ) if 'header' in o : s [ 'header' ] = o [ 'header' ] sig [ 'signatures' ] . append ( s ) if len ( sig [ 'signatures' ] ) == 0 : raise InvalidJWSSignature ( "No valid signature found" ) else : raise InvalidJWSSignature ( "No available signature" ) return json_encode ( sig )
Serializes the object into a JWS token .
14,033
def make_signed_token ( self , key ) : t = JWS ( self . claims ) t . add_signature ( key , protected = self . header ) self . token = t
Signs the payload .
14,034
def make_encrypted_token ( self , key ) : t = JWE ( self . claims , self . header ) t . add_recipient ( key ) self . token = t
Encrypts the payload .
14,035
def deserialize ( self , jwt , key = None ) : c = jwt . count ( '.' ) if c == 2 : self . token = JWS ( ) elif c == 4 : self . token = JWE ( ) else : raise ValueError ( "Token format unrecognized" ) if self . _algs : self . token . allowed_algs = self . _algs self . deserializelog = list ( ) if key is None : self . token . deserialize ( jwt , None ) elif isinstance ( key , JWK ) : self . token . deserialize ( jwt , key ) self . deserializelog . append ( "Success" ) elif isinstance ( key , JWKSet ) : self . token . deserialize ( jwt , None ) if 'kid' in self . token . jose_header : kid_key = key . get_key ( self . token . jose_header [ 'kid' ] ) if not kid_key : raise JWTMissingKey ( 'Key ID %s not in key set' % self . token . jose_header [ 'kid' ] ) self . token . deserialize ( jwt , kid_key ) else : for k in key : try : self . token . deserialize ( jwt , k ) self . deserializelog . append ( "Success" ) break except Exception as e : keyid = k . key_id if keyid is None : keyid = k . thumbprint ( ) self . deserializelog . append ( 'Key [%s] failed: [%s]' % ( keyid , repr ( e ) ) ) continue if "Success" not in self . deserializelog : raise JWTMissingKey ( 'No working key found in key set' ) else : raise ValueError ( "Unrecognized Key Type" ) if key is not None : self . header = self . token . jose_header self . claims = self . token . payload . decode ( 'utf-8' ) self . _check_provided_claims ( )
Deserialize a JWT token .
14,036
def validate ( self , n_viz = 9 ) : iter_valid = copy . copy ( self . iter_valid ) losses , lbl_trues , lbl_preds = [ ] , [ ] , [ ] vizs = [ ] dataset = iter_valid . dataset desc = 'valid [iteration=%08d]' % self . iteration for batch in tqdm . tqdm ( iter_valid , desc = desc , total = len ( dataset ) , ncols = 80 , leave = False ) : img , lbl_true = zip ( * batch ) batch = map ( datasets . transform_lsvrc2012_vgg16 , batch ) with chainer . no_backprop_mode ( ) , chainer . using_config ( 'train' , False ) : in_vars = utils . batch_to_vars ( batch , device = self . device ) loss = self . model ( * in_vars ) losses . append ( float ( loss . data ) ) score = self . model . score lbl_pred = chainer . functions . argmax ( score , axis = 1 ) lbl_pred = chainer . cuda . to_cpu ( lbl_pred . data ) for im , lt , lp in zip ( img , lbl_true , lbl_pred ) : lbl_trues . append ( lt ) lbl_preds . append ( lp ) if len ( vizs ) < n_viz : viz = utils . visualize_segmentation ( lbl_pred = lp , lbl_true = lt , img = im , n_class = self . model . n_class ) vizs . append ( viz ) out_viz = osp . join ( self . out , 'visualizations_valid' , 'iter%08d.jpg' % self . iteration ) if not osp . exists ( osp . dirname ( out_viz ) ) : os . makedirs ( osp . dirname ( out_viz ) ) viz = utils . get_tile_image ( vizs ) skimage . io . imsave ( out_viz , viz ) acc = utils . label_accuracy_score ( lbl_trues , lbl_preds , self . model . n_class ) self . _write_log ( ** { 'epoch' : self . epoch , 'iteration' : self . iteration , 'elapsed_time' : time . time ( ) - self . stamp_start , 'valid/loss' : np . mean ( losses ) , 'valid/acc' : acc [ 0 ] , 'valid/acc_cls' : acc [ 1 ] , 'valid/mean_iu' : acc [ 2 ] , 'valid/fwavacc' : acc [ 3 ] , } ) self . _save_model ( )
Validate current model using validation dataset .
14,037
def train ( self ) : self . stamp_start = time . time ( ) for iteration , batch in tqdm . tqdm ( enumerate ( self . iter_train ) , desc = 'train' , total = self . max_iter , ncols = 80 ) : self . epoch = self . iter_train . epoch self . iteration = iteration if self . interval_validate and self . iteration % self . interval_validate == 0 : self . validate ( ) batch = map ( datasets . transform_lsvrc2012_vgg16 , batch ) in_vars = utils . batch_to_vars ( batch , device = self . device ) self . model . zerograds ( ) loss = self . model ( * in_vars ) if loss is not None : loss . backward ( ) self . optimizer . update ( ) lbl_true = zip ( * batch ) [ 1 ] lbl_pred = chainer . functions . argmax ( self . model . score , axis = 1 ) lbl_pred = chainer . cuda . to_cpu ( lbl_pred . data ) acc = utils . label_accuracy_score ( lbl_true , lbl_pred , self . model . n_class ) self . _write_log ( ** { 'epoch' : self . epoch , 'iteration' : self . iteration , 'elapsed_time' : time . time ( ) - self . stamp_start , 'train/loss' : float ( loss . data ) , 'train/acc' : acc [ 0 ] , 'train/acc_cls' : acc [ 1 ] , 'train/mean_iu' : acc [ 2 ] , 'train/fwavacc' : acc [ 3 ] , } ) if iteration >= self . max_iter : self . _save_model ( ) break
Train the network using the training dataset .
14,038
def centerize ( src , dst_shape , margin_color = None ) : if src . shape [ : 2 ] == dst_shape [ : 2 ] : return src centerized = np . zeros ( dst_shape , dtype = src . dtype ) if margin_color : centerized [ : , : ] = margin_color pad_vertical , pad_horizontal = 0 , 0 h , w = src . shape [ : 2 ] dst_h , dst_w = dst_shape [ : 2 ] if h < dst_h : pad_vertical = ( dst_h - h ) // 2 if w < dst_w : pad_horizontal = ( dst_w - w ) // 2 centerized [ pad_vertical : pad_vertical + h , pad_horizontal : pad_horizontal + w ] = src return centerized
Centerize image for specified image size
14,039
def _tile_images ( imgs , tile_shape , concatenated_image ) : y_num , x_num = tile_shape one_width = imgs [ 0 ] . shape [ 1 ] one_height = imgs [ 0 ] . shape [ 0 ] if concatenated_image is None : if len ( imgs [ 0 ] . shape ) == 3 : n_channels = imgs [ 0 ] . shape [ 2 ] assert all ( im . shape [ 2 ] == n_channels for im in imgs ) concatenated_image = np . zeros ( ( one_height * y_num , one_width * x_num , n_channels ) , dtype = np . uint8 , ) else : concatenated_image = np . zeros ( ( one_height * y_num , one_width * x_num ) , dtype = np . uint8 ) for y in six . moves . range ( y_num ) : for x in six . moves . range ( x_num ) : i = x + y * x_num if i >= len ( imgs ) : pass else : concatenated_image [ y * one_height : ( y + 1 ) * one_height , x * one_width : ( x + 1 ) * one_width ] = imgs [ i ] return concatenated_image
Concatenate images whose sizes are same .
14,040
def get_tile_image ( imgs , tile_shape = None , result_img = None , margin_color = None ) : def resize ( * args , ** kwargs ) : if LooseVersion ( skimage . __version__ ) < LooseVersion ( '0.14' ) : kwargs . pop ( 'anti_aliasing' , None ) return skimage . transform . resize ( * args , ** kwargs ) def get_tile_shape ( img_num ) : x_num = 0 y_num = int ( math . sqrt ( img_num ) ) while x_num * y_num < img_num : x_num += 1 return y_num , x_num if tile_shape is None : tile_shape = get_tile_shape ( len ( imgs ) ) max_height , max_width = np . inf , np . inf for img in imgs : max_height = min ( [ max_height , img . shape [ 0 ] ] ) max_width = min ( [ max_width , img . shape [ 1 ] ] ) for i , img in enumerate ( imgs ) : h , w = img . shape [ : 2 ] dtype = img . dtype h_scale , w_scale = max_height / h , max_width / w scale = min ( [ h_scale , w_scale ] ) h , w = int ( scale * h ) , int ( scale * w ) img = resize ( image = img , output_shape = ( h , w ) , mode = 'reflect' , preserve_range = True , anti_aliasing = True , ) . astype ( dtype ) if len ( img . shape ) == 3 : img = centerize ( img , ( max_height , max_width , 3 ) , margin_color ) else : img = centerize ( img , ( max_height , max_width ) , margin_color ) imgs [ i ] = img return _tile_images ( imgs , tile_shape , result_img )
Concatenate images whose sizes are different .
14,041
def visualize_segmentation ( ** kwargs ) : img = kwargs . pop ( 'img' , None ) lbl_true = kwargs . pop ( 'lbl_true' , None ) lbl_pred = kwargs . pop ( 'lbl_pred' , None ) n_class = kwargs . pop ( 'n_class' , None ) label_names = kwargs . pop ( 'label_names' , None ) if kwargs : raise RuntimeError ( 'Unexpected keys in kwargs: {}' . format ( kwargs . keys ( ) ) ) if lbl_true is None and lbl_pred is None : raise ValueError ( 'lbl_true or lbl_pred must be not None.' ) lbl_true = copy . deepcopy ( lbl_true ) lbl_pred = copy . deepcopy ( lbl_pred ) mask_unlabeled = None viz_unlabeled = None if lbl_true is not None : mask_unlabeled = lbl_true == - 1 lbl_true [ mask_unlabeled ] = 0 viz_unlabeled = ( np . random . random ( ( lbl_true . shape [ 0 ] , lbl_true . shape [ 1 ] , 3 ) ) * 255 ) . astype ( np . uint8 ) if lbl_pred is not None : lbl_pred [ mask_unlabeled ] = 0 vizs = [ ] if lbl_true is not None : viz_trues = [ img , label2rgb ( lbl_true , label_names = label_names , n_labels = n_class ) , label2rgb ( lbl_true , img , label_names = label_names , n_labels = n_class ) , ] viz_trues [ 1 ] [ mask_unlabeled ] = viz_unlabeled [ mask_unlabeled ] viz_trues [ 2 ] [ mask_unlabeled ] = viz_unlabeled [ mask_unlabeled ] vizs . append ( get_tile_image ( viz_trues , ( 1 , 3 ) ) ) if lbl_pred is not None : viz_preds = [ img , label2rgb ( lbl_pred , label_names = label_names , n_labels = n_class ) , label2rgb ( lbl_pred , img , label_names = label_names , n_labels = n_class ) , ] if mask_unlabeled is not None and viz_unlabeled is not None : viz_preds [ 1 ] [ mask_unlabeled ] = viz_unlabeled [ mask_unlabeled ] viz_preds [ 2 ] [ mask_unlabeled ] = viz_unlabeled [ mask_unlabeled ] vizs . append ( get_tile_image ( viz_preds , ( 1 , 3 ) ) ) if len ( vizs ) == 1 : return vizs [ 0 ] elif len ( vizs ) == 2 : return get_tile_image ( vizs , ( 2 , 1 ) ) else : raise RuntimeError
Visualize segmentation .
14,042
def create ( self , output_path , dry_run = False , output_format = None , compresslevel = None ) : if output_format is None : file_name , file_ext = path . splitext ( output_path ) output_format = file_ext [ len ( extsep ) : ] . lower ( ) self . LOG . debug ( "Output format is not explicitly set, determined format is {0}." . format ( output_format ) ) if not dry_run : if output_format in self . ZIPFILE_FORMATS : from zipfile import ZipFile , ZipInfo , ZIP_DEFLATED if compresslevel is not None : if sys . version_info > ( 3 , 7 ) : archive = ZipFile ( path . abspath ( output_path ) , 'w' , compresslevel = compresslevel ) else : raise ValueError ( "Compression level for zip archives requires Python 3.7+" ) else : archive = ZipFile ( path . abspath ( output_path ) , 'w' ) def add_file ( file_path , arcname ) : if not path . islink ( file_path ) : archive . write ( file_path , arcname , ZIP_DEFLATED ) else : i = ZipInfo ( arcname ) i . create_system = 3 i . external_attr = 0xA1ED0000 archive . writestr ( i , readlink ( file_path ) ) elif output_format in self . TARFILE_FORMATS : import tarfile mode = self . TARFILE_FORMATS [ output_format ] if compresslevel is not None : try : archive = tarfile . open ( path . abspath ( output_path ) , mode , compresslevel = compresslevel ) except TypeError : raise ValueError ( "{0} cannot be compressed" . format ( output_format ) ) else : archive = tarfile . open ( path . abspath ( output_path ) , mode ) def add_file ( file_path , arcname ) : archive . add ( file_path , arcname ) else : raise ValueError ( "unknown format: {0}" . format ( output_format ) ) def archiver ( file_path , arcname ) : self . LOG . debug ( "{0} => {1}" . format ( file_path , arcname ) ) add_file ( file_path , arcname ) else : archive = None def archiver ( file_path , arcname ) : self . LOG . info ( "{0} => {1}" . format ( file_path , arcname ) ) self . archive_all_files ( archiver ) if archive is not None : archive . close ( )
Create the archive at output_file_path .
14,043
def is_file_excluded ( self , repo_abspath , repo_file_path ) : next ( self . _check_attr_gens [ repo_abspath ] ) attrs = self . _check_attr_gens [ repo_abspath ] . send ( repo_file_path ) return attrs [ 'export-ignore' ] == 'set'
Checks whether file at a given path is excluded .
14,044
def archive_all_files ( self , archiver ) : for file_path in self . extra : archiver ( path . abspath ( file_path ) , path . join ( self . prefix , file_path ) ) for file_path in self . walk_git_files ( ) : archiver ( path . join ( self . main_repo_abspath , file_path ) , path . join ( self . prefix , file_path ) )
Archive all files using archiver .
14,045
def walk_git_files ( self , repo_path = '' ) : repo_abspath = path . join ( self . main_repo_abspath , repo_path ) assert repo_abspath not in self . _check_attr_gens self . _check_attr_gens [ repo_abspath ] = self . check_attr ( repo_abspath , [ 'export-ignore' ] ) try : repo_file_paths = self . run_git_shell ( 'git ls-files -z --cached --full-name --no-empty-directory' , repo_abspath ) . split ( '\0' ) [ : - 1 ] for repo_file_path in repo_file_paths : repo_file_abspath = path . join ( repo_abspath , repo_file_path ) main_repo_file_path = path . join ( repo_path , repo_file_path ) if not path . islink ( repo_file_abspath ) and path . isdir ( repo_file_abspath ) : continue if self . is_file_excluded ( repo_abspath , repo_file_path ) : continue yield main_repo_file_path if self . force_sub : self . run_git_shell ( 'git submodule init' , repo_abspath ) self . run_git_shell ( 'git submodule update' , repo_abspath ) try : repo_gitmodules_abspath = path . join ( repo_abspath , ".gitmodules" ) with open ( repo_gitmodules_abspath ) as f : lines = f . readlines ( ) for l in lines : m = re . match ( "^\\s*path\\s*=\\s*(.*)\\s*$" , l ) if m : repo_submodule_path = m . group ( 1 ) main_repo_submodule_path = path . join ( repo_path , repo_submodule_path ) if self . is_file_excluded ( repo_abspath , repo_submodule_path ) : continue for main_repo_submodule_file_path in self . walk_git_files ( main_repo_submodule_path ) : repo_submodule_file_path = path . relpath ( main_repo_submodule_file_path , repo_path ) if self . is_file_excluded ( repo_abspath , repo_submodule_file_path ) : continue yield main_repo_submodule_file_path except IOError : pass finally : self . _check_attr_gens [ repo_abspath ] . close ( ) del self . _check_attr_gens [ repo_abspath ]
An iterator method that yields a file path relative to main_repo_abspath for each file that should be included in the archive . Skips those that match the exclusion patterns found in any discovered . gitattributes files along the way .
14,046
def check_attr ( self , repo_abspath , attrs ) : def make_process ( ) : env = dict ( environ , GIT_FLUSH = '1' ) cmd = 'git check-attr --stdin -z {0}' . format ( ' ' . join ( attrs ) ) return Popen ( cmd , shell = True , stdin = PIPE , stdout = PIPE , cwd = repo_abspath , env = env ) def read_attrs ( process , repo_file_path ) : process . stdin . write ( repo_file_path . encode ( 'utf-8' ) + b'\0' ) process . stdin . flush ( ) path , attr , info = b'' , b'' , b'' nuls_count = 0 nuls_expected = 3 * len ( attrs ) while nuls_count != nuls_expected : b = process . stdout . read ( 1 ) if b == b'' and process . poll ( ) is not None : raise RuntimeError ( "check-attr exited prematurely" ) elif b == b'\0' : nuls_count += 1 if nuls_count % 3 == 0 : yield map ( self . decode_git_output , ( path , attr , info ) ) path , attr , info = b'' , b'' , b'' elif nuls_count % 3 == 0 : path += b elif nuls_count % 3 == 1 : attr += b elif nuls_count % 3 == 2 : info += b def read_attrs_old ( process , repo_file_path ) : process . stdin . write ( repo_file_path . encode ( 'utf-8' ) + b'\0' ) process . stdin . flush ( ) path , attr , info = b'' , b'' , b'' lines_count = 0 lines_expected = len ( attrs ) while lines_count != lines_expected : line = process . stdout . readline ( ) info_start = line . rfind ( b': ' ) if info_start == - 1 : raise RuntimeError ( "unexpected output of check-attr: {0}" . format ( line ) ) attr_start = line . rfind ( b': ' , 0 , info_start ) if attr_start == - 1 : raise RuntimeError ( "unexpected output of check-attr: {0}" . format ( line ) ) info = line [ info_start + 2 : len ( line ) - 1 ] attr = line [ attr_start + 2 : info_start ] path = line [ : attr_start ] yield map ( self . decode_git_output , ( path , attr , info ) ) lines_count += 1 if not attrs : return process = make_process ( ) try : while True : repo_file_path = yield repo_file_attrs = { } if self . git_version is None or self . git_version > ( 1 , 8 , 5 ) : reader = read_attrs else : reader = read_attrs_old for path , attr , value in reader ( process , repo_file_path ) : repo_file_attrs [ attr ] = value yield repo_file_attrs finally : process . stdin . close ( ) process . wait ( )
Generator that returns attributes for given paths relative to repo_abspath .
14,047
def run_git_shell ( cls , cmd , cwd = None ) : p = Popen ( cmd , shell = True , stdout = PIPE , cwd = cwd ) output , _ = p . communicate ( ) output = cls . decode_git_output ( output ) if p . returncode : if sys . version_info > ( 2 , 6 ) : raise CalledProcessError ( returncode = p . returncode , cmd = cmd , output = output ) else : raise CalledProcessError ( returncode = p . returncode , cmd = cmd ) return output
Runs git shell command reads output and decodes it into unicode string .
14,048
def get_git_version ( cls ) : try : output = cls . run_git_shell ( 'git version' ) except CalledProcessError : cls . LOG . warning ( "Unable to get Git version." ) return None try : version = output . split ( ) [ 2 ] except IndexError : cls . LOG . warning ( "Unable to parse Git version \"%s\"." , output ) return None try : return tuple ( int ( v ) for v in version . split ( '.' ) ) except ValueError : cls . LOG . warning ( "Unable to parse Git version \"%s\"." , version ) return None
Return version of git current shell points to .
14,049
async def request ( method , uri , ** kwargs ) : c_interact = kwargs . pop ( 'persist_cookies' , None ) ssl_context = kwargs . pop ( 'ssl_context' , None ) async with Session ( persist_cookies = c_interact , ssl_context = ssl_context ) as s : r = await s . request ( method , url = uri , ** kwargs ) return r
Base function for one time http requests .
14,050
async def make_request ( self , redirect = False ) : h11_connection = h11 . Connection ( our_role = h11 . CLIENT ) ( self . scheme , self . host , self . path , self . uri_parameters , self . query , _ ) = urlparse ( self . uri ) if not redirect : self . initial_scheme = self . scheme self . initial_netloc = self . host host = ( self . host if ( self . port == '80' or self . port == '443' ) else self . host . split ( ':' ) [ 0 ] + ':' + self . port ) asks_headers = c_i_dict ( [ ( 'Host' , host ) , ( 'Connection' , 'keep-alive' ) , ( 'Accept-Encoding' , 'gzip, deflate' ) , ( 'Accept' , '*/*' ) , ( 'Content-Length' , '0' ) , ( 'User-Agent' , 'python-asks/2.2.2' ) ] ) if self . persist_cookies is not None : self . cookies . update ( self . persist_cookies . get_additional_cookies ( self . host , self . path ) ) self . _build_path ( ) body = '' if any ( ( self . data , self . files , self . json is not None ) ) : content_type , content_len , body = await self . _formulate_body ( ) asks_headers [ 'Content-Type' ] = content_type asks_headers [ 'Content-Length' ] = content_len if self . headers is not None : asks_headers . update ( self . headers ) if self . auth is not None : asks_headers . update ( await self . _auth_handler_pre ( ) ) asks_headers . update ( await self . _auth_handler_post_get_auth ( ) ) if self . cookies : cookie_str = '' for k , v in self . cookies . items ( ) : cookie_str += '{}={}; ' . format ( k , v ) asks_headers [ 'Cookie' ] = cookie_str [ : - 1 ] if body : if not isinstance ( body , bytes ) : body = bytes ( body , self . encoding ) asks_headers [ 'Content-Length' ] = str ( len ( body ) ) req_body = h11 . Data ( data = body ) else : req_body = None req = h11 . Request ( method = self . method , target = self . path , headers = asks_headers . items ( ) ) response_obj = await self . _request_io ( req , req_body , h11_connection ) if redirect : if not ( self . scheme == self . initial_scheme and self . host == self . initial_netloc ) : self . sock . _active = False if self . streaming : return None , response_obj return self . sock , response_obj
Acts as the central hub for preparing requests to be sent and returning them upon completion . Generally just pokes through self s attribs and makes decisions about what to do .
14,051
def _build_path ( self ) : if not self . path : self . path = '/' if self . uri_parameters : self . path = self . path + ';' + requote_uri ( self . uri_parameters ) if self . query : self . path = ( self . path + '?' + self . query ) if self . params : try : if self . query : self . path = self . path + self . _dict_to_query ( self . params , base_query = True ) else : self . path = self . path + self . _dict_to_query ( self . params ) except AttributeError : self . path = self . path + '?' + self . params self . path = requote_uri ( self . path ) self . req_url = urlunparse ( ( self . scheme , self . host , ( self . path or '' ) , '' , '' , '' ) )
Constructs the actual request URL with accompanying query if any .
14,052
async def _catch_response ( self , h11_connection ) : response = await self . _recv_event ( h11_connection ) resp_data = { 'encoding' : self . encoding , 'method' : self . method , 'status_code' : response . status_code , 'reason_phrase' : str ( response . reason , 'utf-8' ) , 'http_version' : str ( response . http_version , 'utf-8' ) , 'headers' : c_i_dict ( [ ( str ( name , 'utf-8' ) , str ( value , 'utf-8' ) ) for name , value in response . headers ] ) , 'body' : b'' , 'url' : self . req_url } for header in response . headers : if header [ 0 ] == b'set-cookie' : try : resp_data [ 'headers' ] [ 'set-cookie' ] . append ( str ( header [ 1 ] , 'utf-8' ) ) except ( KeyError , AttributeError ) : resp_data [ 'headers' ] [ 'set-cookie' ] = [ str ( header [ 1 ] , 'utf-8' ) ] get_body = False try : if int ( resp_data [ 'headers' ] [ 'content-length' ] ) > 0 : get_body = True except KeyError : try : if 'chunked' in resp_data [ 'headers' ] [ 'transfer-encoding' ] . lower ( ) : get_body = True except KeyError : if resp_data [ 'headers' ] . get ( 'connection' , '' ) . lower ( ) == 'close' : get_body = True if get_body : if self . callback is not None : endof = await self . _body_callback ( h11_connection ) elif self . stream : if not ( ( self . scheme == self . initial_scheme and self . host == self . initial_netloc ) or resp_data [ 'headers' ] [ 'connection' ] . lower ( ) == 'close' ) : self . sock . _active = False resp_data [ 'body' ] = StreamBody ( h11_connection , self . sock , resp_data [ 'headers' ] . get ( 'content-encoding' , None ) , resp_data [ 'encoding' ] ) self . streaming = True else : while True : data = await self . _recv_event ( h11_connection ) if isinstance ( data , h11 . Data ) : resp_data [ 'body' ] += data . data elif isinstance ( data , h11 . EndOfMessage ) : break else : endof = await self . _recv_event ( h11_connection ) assert isinstance ( endof , h11 . EndOfMessage ) if self . streaming : return StreamResponse ( ** resp_data ) return Response ( ** resp_data )
Instantiates the parser which manages incoming data first getting the headers storing cookies and then parsing the response s body if any .
14,053
async def _send ( self , request_bytes , body_bytes , h11_connection ) : await self . sock . send_all ( h11_connection . send ( request_bytes ) ) if body_bytes is not None : await self . sock . send_all ( h11_connection . send ( body_bytes ) ) await self . sock . send_all ( h11_connection . send ( h11 . EndOfMessage ( ) ) )
Takes a package and body combines then then shoots em off in to the ether .
14,054
async def _location_auth_protect ( self , location ) : netloc_sans_port = self . host . split ( ':' ) [ 0 ] netloc_sans_port = netloc_sans_port . replace ( ( re . match ( _WWX_MATCH , netloc_sans_port ) [ 0 ] ) , '' ) base_domain = '.' . join ( netloc_sans_port . split ( '.' ) [ - 2 : ] ) l_scheme , l_netloc , _ , _ , _ , _ = urlparse ( location ) location_sans_port = l_netloc . split ( ':' ) [ 0 ] location_sans_port = location_sans_port . replace ( ( re . match ( _WWX_MATCH , location_sans_port ) [ 0 ] ) , '' ) location_domain = '.' . join ( location_sans_port . split ( '.' ) [ - 2 : ] ) if base_domain == location_domain : if l_scheme < self . scheme : return False else : return True
Checks to see if the new location is 1 . The same top level domain 2 . As or more secure than the current connection type
14,055
async def _body_callback ( self , h11_connection ) : while True : next_event = await self . _recv_event ( h11_connection ) if isinstance ( next_event , h11 . Data ) : await self . callback ( next_event . data ) else : return next_event
A callback func to be supplied if the user wants to do something directly with the response body s stream .
14,056
async def _connect ( self , host_loc ) : scheme , host , path , parameters , query , fragment = urlparse ( host_loc ) if parameters or query or fragment : raise ValueError ( 'Supplied info beyond scheme, host.' + ' Host should be top level only: ' , path ) host , port = get_netloc_port ( scheme , host ) if scheme == 'http' : return await self . _open_connection_http ( ( host , int ( port ) ) ) , port else : return await self . _open_connection_https ( ( host , int ( port ) ) ) , port
Simple enough stuff to figure out where we should connect and creates the appropriate connection .
14,057
async def request ( self , method , url = None , * , path = '' , retries = 1 , connection_timeout = 60 , ** kwargs ) : timeout = kwargs . get ( 'timeout' , None ) req_headers = kwargs . pop ( 'headers' , None ) if self . headers is not None : headers = copy ( self . headers ) if req_headers is not None : headers . update ( req_headers ) req_headers = headers async with self . sema : if url is None : url = self . _make_url ( ) + path retry = False sock = None try : sock = await timeout_manager ( connection_timeout , self . _grab_connection , url ) port = sock . port req_obj = RequestProcessor ( self , method , url , port , headers = req_headers , encoding = self . encoding , sock = sock , persist_cookies = self . _cookie_tracker , ** kwargs ) try : if timeout is None : sock , r = await req_obj . make_request ( ) else : sock , r = await timeout_manager ( timeout , req_obj . make_request ) except BadHttpResponse : if timeout is None : sock , r = await req_obj . make_request ( ) else : sock , r = await timeout_manager ( timeout , req_obj . make_request ) if sock is not None : try : if r . headers [ 'connection' ] . lower ( ) == 'close' : sock . _active = False await sock . close ( ) except KeyError : pass await self . return_to_pool ( sock ) except ConnectionError as e : if retries > 0 : retry = True retries -= 1 else : raise e except Exception as e : if sock : await self . _handle_exception ( e , sock ) raise except BaseException as e : if sock : await sock . close ( ) raise e if retry : return ( await self . request ( method , url , path = path , retries = retries , headers = headers , ** kwargs ) ) return r
This is the template for all of the http method methods for the Session .
14,058
async def _handle_exception ( self , e , sock ) : if isinstance ( e , ( RemoteProtocolError , AssertionError ) ) : await sock . close ( ) raise BadHttpResponse ( 'Invalid HTTP response from server.' ) from e if isinstance ( e , Exception ) : await sock . close ( ) raise e
Given an exception we want to handle it appropriately . Some exceptions we prefer to shadow with an asks exception and some we want to raise directly . In all cases we clean up the underlying socket .
14,059
async def _grab_connection ( self , url ) : scheme , host , _ , _ , _ , _ = urlparse ( url ) host_loc = urlunparse ( ( scheme , host , '' , '' , '' , '' ) ) sock = self . _checkout_connection ( host_loc ) if sock is None : sock = await self . _make_connection ( host_loc ) return sock
The connection pool handler . Returns a connection to the caller . If there are no connections ready and as many connections checked out as there are available total we yield control to the event loop .
14,060
def json ( self , ** kwargs ) : body = self . _decompress ( self . encoding ) return _json . loads ( body , ** kwargs )
If the response s body is valid json we load it as a python dict and return it .
14,061
def raise_for_status ( self ) : if 400 <= self . status_code < 500 : raise BadStatus ( '{} Client Error: {} for url: {}' . format ( self . status_code , self . reason_phrase , self . url ) , self . status_code ) elif 500 <= self . status_code < 600 : raise BadStatus ( '{} Server Error: {} for url: {}' . format ( self . status_code , self . reason_phrase , self . url ) , self . status_code )
Raise BadStatus if one occurred .
14,062
def recent_photos ( request ) : imgs = [ ] for obj in Image_File . objects . filter ( is_image = True ) . order_by ( "-date_created" ) : upurl = "/" + obj . upload . url thumburl = "" if obj . thumbnail : thumburl = "/" + obj . thumbnail . url imgs . append ( { 'src' : upurl , 'thumb' : thumburl , 'is_image' : True } ) return render_to_response ( 'dashboard/browse.html' , { 'files' : imgs } )
returns all the images from the data base
14,063
def marshal ( self , v ) : if v : orig = [ i for i in self . choices if self . choices [ i ] == v ] if len ( orig ) == 1 : return orig [ 0 ] elif len ( orig ) == 0 : raise NotImplementedError ( "No such reverse choice {0} for field {1}." . format ( v , self ) ) else : raise NotImplementedError ( "Too many reverse choices {0} for value {1} for field {2}" . format ( orig , v , self ) )
Turn this value into API format .
14,064
def unmarshal ( self , v ) : try : return self . choices [ v ] except KeyError : self . log . warning ( "No such choice {0} for field {1}." . format ( v , self ) ) return v
Convert the value from Strava API format to useful python representation .
14,065
def unmarshal ( self , value , bind_client = None ) : if not isinstance ( value , self . type ) : o = self . type ( ) if bind_client is not None and hasattr ( o . __class__ , 'bind_client' ) : o . bind_client = bind_client if isinstance ( value , dict ) : for ( k , v ) in value . items ( ) : if not hasattr ( o . __class__ , k ) : self . log . warning ( "Unable to set attribute {0} on entity {1!r}" . format ( k , o ) ) else : setattr ( o , k , v ) value = o else : raise Exception ( "Unable to unmarshall object {0!r}" . format ( value ) ) return value
Cast the specified value to the entity type .
14,066
def marshal ( self , values ) : if values is not None : return [ super ( EntityCollection , self ) . marshal ( v ) for v in values ]
Turn a list of entities into a list of dictionaries .
14,067
def unmarshal ( self , values , bind_client = None ) : if values is not None : return [ super ( EntityCollection , self ) . unmarshal ( v , bind_client = bind_client ) for v in values ]
Cast the list .
14,068
def authorization_url ( self , client_id , redirect_uri , approval_prompt = 'auto' , scope = None , state = None ) : return self . protocol . authorization_url ( client_id = client_id , redirect_uri = redirect_uri , approval_prompt = approval_prompt , scope = scope , state = state )
Get the URL needed to authorize your application to access a Strava user s information .
14,069
def get_activities ( self , before = None , after = None , limit = None ) : if before : before = self . _utc_datetime_to_epoch ( before ) if after : after = self . _utc_datetime_to_epoch ( after ) params = dict ( before = before , after = after ) result_fetcher = functools . partial ( self . protocol . get , '/athlete/activities' , ** params ) return BatchedResultsIterator ( entity = model . Activity , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Get activities for authenticated user sorted by newest first .
14,070
def get_athlete ( self , athlete_id = None ) : if athlete_id is None : raw = self . protocol . get ( '/athlete' ) else : raise NotImplementedError ( "The /athletes/{id} endpoint was removed by Strava. " "See https://developers.strava.com/docs/january-2018-update/" ) return model . Athlete . deserialize ( raw , bind_client = self )
Gets the specified athlete ; if athlete_id is None then retrieves a detail - level representation of currently authenticated athlete ; otherwise summary - level representation returned of athlete .
14,071
def update_athlete ( self , city = None , state = None , country = None , sex = None , weight = None ) : params = { 'city' : city , 'state' : state , 'country' : country , 'sex' : sex } params = { k : v for ( k , v ) in params . items ( ) if v is not None } if weight is not None : params [ 'weight' ] = float ( weight ) raw_athlete = self . protocol . put ( '/athlete' , ** params ) return model . Athlete . deserialize ( raw_athlete , bind_client = self )
Updates the properties of the authorized athlete .
14,072
def get_athlete_stats ( self , athlete_id = None ) : if athlete_id is None : athlete_id = self . get_athlete ( ) . id raw = self . protocol . get ( '/athletes/{id}/stats' , id = athlete_id ) return model . AthleteStats . deserialize ( raw )
Returns Statistics for the athlete . athlete_id must be the id of the authenticated athlete or left blank . If it is left blank two requests will be made - first to get the authenticated athlete s id and second to get the Stats .
14,073
def get_athlete_clubs ( self ) : club_structs = self . protocol . get ( '/athlete/clubs' ) return [ model . Club . deserialize ( raw , bind_client = self ) for raw in club_structs ]
List the clubs for the currently authenticated athlete .
14,074
def get_club ( self , club_id ) : raw = self . protocol . get ( "/clubs/{id}" , id = club_id ) return model . Club . deserialize ( raw , bind_client = self )
Return a specific club object .
14,075
def get_club_members ( self , club_id , limit = None ) : result_fetcher = functools . partial ( self . protocol . get , '/clubs/{id}/members' , id = club_id ) return BatchedResultsIterator ( entity = model . Athlete , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets the member objects for specified club ID .
14,076
def get_club_activities ( self , club_id , limit = None ) : result_fetcher = functools . partial ( self . protocol . get , '/clubs/{id}/activities' , id = club_id ) return BatchedResultsIterator ( entity = model . Activity , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets the activities associated with specified club .
14,077
def get_activity ( self , activity_id , include_all_efforts = False ) : raw = self . protocol . get ( '/activities/{id}' , id = activity_id , include_all_efforts = include_all_efforts ) return model . Activity . deserialize ( raw , bind_client = self )
Gets specified activity .
14,078
def create_activity ( self , name , activity_type , start_date_local , elapsed_time , description = None , distance = None ) : if isinstance ( elapsed_time , timedelta ) : elapsed_time = unithelper . timedelta_to_seconds ( elapsed_time ) if isinstance ( distance , Quantity ) : distance = float ( unithelper . meters ( distance ) ) if isinstance ( start_date_local , datetime ) : start_date_local = start_date_local . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) if not activity_type . lower ( ) in [ t . lower ( ) for t in model . Activity . TYPES ] : raise ValueError ( "Invalid activity type: {0}. Possible values: {1!r}" . format ( activity_type , model . Activity . TYPES ) ) params = dict ( name = name , type = activity_type , start_date_local = start_date_local , elapsed_time = elapsed_time ) if description is not None : params [ 'description' ] = description if distance is not None : params [ 'distance' ] = distance raw_activity = self . protocol . post ( '/activities' , ** params ) return model . Activity . deserialize ( raw_activity , bind_client = self )
Create a new manual activity .
14,079
def update_activity ( self , activity_id , name = None , activity_type = None , private = None , commute = None , trainer = None , gear_id = None , description = None , device_name = None ) : params = { } if name is not None : params [ 'name' ] = name if activity_type is not None : if not activity_type . lower ( ) in [ t . lower ( ) for t in model . Activity . TYPES ] : raise ValueError ( "Invalid activity type: {0}. Possible values: {1!r}" . format ( activity_type , model . Activity . TYPES ) ) params [ 'type' ] = activity_type if private is not None : params [ 'private' ] = int ( private ) if commute is not None : params [ 'commute' ] = int ( commute ) if trainer is not None : params [ 'trainer' ] = int ( trainer ) if gear_id is not None : params [ 'gear_id' ] = gear_id if description is not None : params [ 'description' ] = description if device_name is not None : params [ 'device_name' ] = device_name raw_activity = self . protocol . put ( '/activities/{activity_id}' , activity_id = activity_id , ** params ) return model . Activity . deserialize ( raw_activity , bind_client = self )
Updates the properties of a specific activity .
14,080
def get_activity_zones ( self , activity_id ) : zones = self . protocol . get ( '/activities/{id}/zones' , id = activity_id ) return [ model . BaseActivityZone . deserialize ( z , bind_client = self ) for z in zones ]
Gets zones for activity .
14,081
def get_activity_comments ( self , activity_id , markdown = False , limit = None ) : result_fetcher = functools . partial ( self . protocol . get , '/activities/{id}/comments' , id = activity_id , markdown = int ( markdown ) ) return BatchedResultsIterator ( entity = model . ActivityComment , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets the comments for an activity .
14,082
def get_activity_kudos ( self , activity_id , limit = None ) : result_fetcher = functools . partial ( self . protocol . get , '/activities/{id}/kudos' , id = activity_id ) return BatchedResultsIterator ( entity = model . ActivityKudos , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets the kudos for an activity .
14,083
def get_activity_photos ( self , activity_id , size = None , only_instagram = False ) : params = { } if not only_instagram : params [ 'photo_sources' ] = 'true' if size is not None : params [ 'size' ] = size result_fetcher = functools . partial ( self . protocol . get , '/activities/{id}/photos' , id = activity_id , ** params ) return BatchedResultsIterator ( entity = model . ActivityPhoto , bind_client = self , result_fetcher = result_fetcher )
Gets the photos from an activity .
14,084
def get_activity_laps ( self , activity_id ) : result_fetcher = functools . partial ( self . protocol . get , '/activities/{id}/laps' , id = activity_id ) return BatchedResultsIterator ( entity = model . ActivityLap , bind_client = self , result_fetcher = result_fetcher )
Gets the laps from an activity .
14,085
def get_gear ( self , gear_id ) : return model . Gear . deserialize ( self . protocol . get ( '/gear/{id}' , id = gear_id ) )
Get details for an item of gear .
14,086
def get_segment_effort ( self , effort_id ) : return model . SegmentEffort . deserialize ( self . protocol . get ( '/segment_efforts/{id}' , id = effort_id ) )
Return a specific segment effort by ID .
14,087
def get_segment ( self , segment_id ) : return model . Segment . deserialize ( self . protocol . get ( '/segments/{id}' , id = segment_id ) , bind_client = self )
Gets a specific segment by ID .
14,088
def get_starred_segments ( self , limit = None ) : params = { } if limit is not None : params [ "limit" ] = limit result_fetcher = functools . partial ( self . protocol . get , '/segments/starred' ) return BatchedResultsIterator ( entity = model . Segment , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Returns a summary representation of the segments starred by the authenticated user . Pagination is supported .
14,089
def get_athlete_starred_segments ( self , athlete_id , limit = None ) : result_fetcher = functools . partial ( self . protocol . get , '/athletes/{id}/segments/starred' , id = athlete_id ) return BatchedResultsIterator ( entity = model . Segment , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Returns a summary representation of the segments starred by the specified athlete . Pagination is supported .
14,090
def get_segment_leaderboard ( self , segment_id , gender = None , age_group = None , weight_class = None , following = None , club_id = None , timeframe = None , top_results_limit = None , page = None , context_entries = None ) : params = { } if gender is not None : if gender . upper ( ) not in ( 'M' , 'F' ) : raise ValueError ( "Invalid gender: {0}. Possible values: 'M' or 'F'" . format ( gender ) ) params [ 'gender' ] = gender valid_age_groups = ( '0_24' , '25_34' , '35_44' , '45_54' , '55_64' , '65_plus' ) if age_group is not None : if not age_group in valid_age_groups : raise ValueError ( "Invalid age group: {0}. Possible values: {1!r}" . format ( age_group , valid_age_groups ) ) params [ 'age_group' ] = age_group valid_weight_classes = ( '0_124' , '125_149' , '150_164' , '165_179' , '180_199' , '200_plus' , '0_54' , '55_64' , '65_74' , '75_84' , '85_94' , '95_plus' ) if weight_class is not None : if not weight_class in valid_weight_classes : raise ValueError ( "Invalid weight class: {0}. Possible values: {1!r}" . format ( weight_class , valid_weight_classes ) ) params [ 'weight_class' ] = weight_class if following is not None : params [ 'following' ] = int ( following ) if club_id is not None : params [ 'club_id' ] = club_id if timeframe is not None : valid_timeframes = 'this_year' , 'this_month' , 'this_week' , 'today' if not timeframe in valid_timeframes : raise ValueError ( "Invalid timeframe: {0}. Possible values: {1!r}" . format ( timeframe , valid_timeframes ) ) params [ 'date_range' ] = timeframe if top_results_limit is not None : params [ 'per_page' ] = top_results_limit if page is not None : params [ 'page' ] = page if context_entries is not None : params [ 'context_entries' ] = context_entries return model . SegmentLeaderboard . deserialize ( self . protocol . get ( '/segments/{id}/leaderboard' , id = segment_id , ** params ) , bind_client = self )
Gets the leaderboard for a segment .
14,091
def get_segment_efforts ( self , segment_id , athlete_id = None , start_date_local = None , end_date_local = None , limit = None ) : params = { "segment_id" : segment_id } if athlete_id is not None : params [ 'athlete_id' ] = athlete_id if start_date_local : if isinstance ( start_date_local , six . string_types ) : start_date_local = arrow . get ( start_date_local ) . naive params [ "start_date_local" ] = start_date_local . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) if end_date_local : if isinstance ( end_date_local , six . string_types ) : end_date_local = arrow . get ( end_date_local ) . naive params [ "end_date_local" ] = end_date_local . strftime ( "%Y-%m-%dT%H:%M:%SZ" ) if limit is not None : params [ "limit" ] = limit result_fetcher = functools . partial ( self . protocol . get , '/segments/{segment_id}/all_efforts' , ** params ) return BatchedResultsIterator ( entity = model . BaseEffort , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets all efforts on a particular segment sorted by start_date_local
14,092
def explore_segments ( self , bounds , activity_type = None , min_cat = None , max_cat = None ) : if len ( bounds ) == 2 : bounds = ( bounds [ 0 ] [ 0 ] , bounds [ 0 ] [ 1 ] , bounds [ 1 ] [ 0 ] , bounds [ 1 ] [ 1 ] ) elif len ( bounds ) != 4 : raise ValueError ( "Invalid bounds specified: {0!r}. Must be list of 4 float values or list of 2 (lat,lon) tuples." ) params = { 'bounds' : ',' . join ( str ( b ) for b in bounds ) } valid_activity_types = ( 'riding' , 'running' ) if activity_type is not None : if activity_type not in ( 'riding' , 'running' ) : raise ValueError ( 'Invalid activity type: {0}. Possible values: {1!r}' . format ( activity_type , valid_activity_types ) ) params [ 'activity_type' ] = activity_type if min_cat is not None : params [ 'min_cat' ] = min_cat if max_cat is not None : params [ 'max_cat' ] = max_cat raw = self . protocol . get ( '/segments/explore' , ** params ) return [ model . SegmentExplorerResult . deserialize ( v , bind_client = self ) for v in raw [ 'segments' ] ]
Returns an array of up to 10 segments .
14,093
def get_activity_streams ( self , activity_id , types = None , resolution = None , series_type = None ) : if types is not None : types = "," . join ( types ) params = { } if resolution is not None : params [ "resolution" ] = resolution if series_type is not None : params [ "series_type" ] = series_type result_fetcher = functools . partial ( self . protocol . get , '/activities/{id}/streams/{types}' . format ( id = activity_id , types = types ) , ** params ) streams = BatchedResultsIterator ( entity = model . Stream , bind_client = self , result_fetcher = result_fetcher ) try : return { i . type : i for i in streams } except exc . ObjectNotFound : return None
Returns an streams for an activity .
14,094
def get_effort_streams ( self , effort_id , types = None , resolution = None , series_type = None ) : if types is not None : types = "," . join ( types ) params = { } if resolution is not None : params [ "resolution" ] = resolution if series_type is not None : params [ "series_type" ] = series_type result_fetcher = functools . partial ( self . protocol . get , '/segment_efforts/{id}/streams/{types}' . format ( id = effort_id , types = types ) , ** params ) streams = BatchedResultsIterator ( entity = model . Stream , bind_client = self , result_fetcher = result_fetcher ) return { i . type : i for i in streams }
Returns an streams for an effort .
14,095
def get_running_race ( self , race_id ) : raw = self . protocol . get ( '/running_races/{id}' , id = race_id ) return model . RunningRace . deserialize ( raw , bind_client = self )
Gets a running race for a given identifier . t
14,096
def get_running_races ( self , year = None ) : if year is None : year = datetime . datetime . now ( ) . year params = { "year" : year } result_fetcher = functools . partial ( self . protocol . get , '/running_races' , ** params ) return BatchedResultsIterator ( entity = model . RunningRace , bind_client = self , result_fetcher = result_fetcher )
Gets a running races for a given year .
14,097
def get_routes ( self , athlete_id = None , limit = None ) : if athlete_id is None : athlete_id = self . get_athlete ( ) . id result_fetcher = functools . partial ( self . protocol . get , '/athletes/{id}/routes' . format ( id = athlete_id ) ) return BatchedResultsIterator ( entity = model . Route , bind_client = self , result_fetcher = result_fetcher , limit = limit )
Gets the routes list for an authenticated user .
14,098
def get_route ( self , route_id ) : raw = self . protocol . get ( '/routes/{id}' , id = route_id ) return model . Route . deserialize ( raw , bind_client = self )
Gets specified route .
14,099
def get_route_streams ( self , route_id ) : result_fetcher = functools . partial ( self . protocol . get , '/routes/{id}/streams/' . format ( id = route_id ) ) streams = BatchedResultsIterator ( entity = model . Stream , bind_client = self , result_fetcher = result_fetcher ) return { i . type : i for i in streams }
Returns streams for a route .