idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
8,500
|
def put_info ( self , key , value ) : return self . instance . put_task_info ( self . name , key , value )
|
Put associated information of the task .
|
8,501
|
def append_little_endian32 ( self , unsigned_value ) : if not 0 <= unsigned_value <= wire_format . UINT32_MAX : raise errors . EncodeError ( 'Unsigned 32-bit out of range: %d' % unsigned_value ) self . append_raw_bytes ( struct . pack ( wire_format . FORMAT_UINT32_LITTLE_ENDIAN , unsigned_value ) )
|
Appends an unsigned 32 - bit integer to the internal buffer in little - endian byte order .
|
8,502
|
def append_little_endian64 ( self , unsigned_value ) : if not 0 <= unsigned_value <= wire_format . UINT64_MAX : raise errors . EncodeError ( 'Unsigned 64-bit out of range: %d' % unsigned_value ) self . append_raw_bytes ( struct . pack ( wire_format . FORMAT_UINT64_LITTLE_ENDIAN , unsigned_value ) )
|
Appends an unsigned 64 - bit integer to the internal buffer in little - endian byte order .
|
8,503
|
def append_var_uint32 ( self , value ) : if not 0 <= value <= wire_format . UINT32_MAX : raise errors . EncodeError ( 'Value out of range: %d' % value ) self . append_var_uint64 ( value )
|
Appends an unsigned 32 - bit integer to the internal buffer encoded as a varint .
|
8,504
|
def append_varint64 ( self , value ) : if not wire_format . INT64_MIN <= value <= wire_format . INT64_MAX : raise errors . EncodeError ( 'Value out of range: %d' % value ) if value < 0 : value += ( 1 << 64 ) self . append_var_uint64 ( value )
|
Appends a signed 64 - bit integer to the internal buffer encoded as a varint .
|
8,505
|
def append_var_uint64 ( self , unsigned_value ) : if not 0 <= unsigned_value <= wire_format . UINT64_MAX : raise errors . EncodeError ( 'Value out of range: %d' % unsigned_value ) while True : bits = unsigned_value & 0x7f unsigned_value >>= 7 if unsigned_value : bits |= 0x80 self . _buffer . append ( bits ) if not unsigned_value : break
|
Appends an unsigned 64 - bit integer to the internal buffer encoded as a varint .
|
8,506
|
def groupby ( expr , by , * bys ) : if not isinstance ( by , list ) : by = [ by , ] if len ( bys ) > 0 : by = by + list ( bys ) return GroupBy ( _input = expr , _by = by )
|
Group collection by a series of sequences .
|
8,507
|
def value_counts ( expr , sort = True , ascending = False , dropna = False ) : names = [ expr . name , 'count' ] typos = [ expr . dtype , types . int64 ] return ValueCounts ( _input = expr , _schema = Schema . from_lists ( names , typos ) , _sort = sort , _ascending = ascending , _dropna = dropna )
|
Return object containing counts of unique values .
|
8,508
|
def _hashes ( self , item ) : item = self . _binary ( item ) m = hashlib . sha1 ( ) m . update ( item ) digits = m . hexdigest ( ) for i in range ( int ( self . num_hashes // 8 ) ) : m . update ( self . _binary ( str ( i ) ) ) digits += m . hexdigest ( ) hashes = [ int ( digits [ i * 5 : i * 5 + 5 ] , 16 ) % self . hashbits for i in range ( self . num_hashes ) ] return hashes
|
To create the hash functions we use the SHA - 1 hash of the string and chop that up into 20 bit values and then mod down to the length of the Bloom filter .
|
8,509
|
def join ( left , right , on = None , how = 'inner' , suffixes = ( '_x' , '_y' ) , mapjoin = False ) : if on is None and not mapjoin : on = [ name for name in left . schema . names if name in right . schema . _name_indexes ] if isinstance ( suffixes , ( tuple , list ) ) and len ( suffixes ) == 2 : left_suffix , right_suffix = suffixes else : raise ValueError ( 'suffixes must be a tuple or list with two elements, got %s' % suffixes ) if not isinstance ( on , list ) : on = [ on , ] for i in range ( len ( on ) ) : it = on [ i ] if inspect . isfunction ( it ) : on [ i ] = it ( left , right ) left , right = _make_different_sources ( left , right , on ) try : return _join_dict [ how . upper ( ) ] ( _lhs = left , _rhs = right , _predicate = on , _left_suffix = left_suffix , _right_suffix = right_suffix , _mapjoin = mapjoin ) except KeyError : return JoinCollectionExpr ( _lhs = left , _rhs = right , _predicate = on , _how = how , _left_suffix = left_suffix , _right_suffix = right_suffix , _mapjoin = mapjoin )
|
Join two collections .
|
8,510
|
def inner_join ( left , right , on = None , suffixes = ( '_x' , '_y' ) , mapjoin = False ) : return join ( left , right , on , suffixes = suffixes , mapjoin = mapjoin )
|
Inner join two collections .
|
8,511
|
def left_join ( left , right , on = None , suffixes = ( '_x' , '_y' ) , mapjoin = False , merge_columns = None ) : joined = join ( left , right , on , how = 'left' , suffixes = suffixes , mapjoin = mapjoin ) return joined . _merge_joined_fields ( merge_columns )
|
Left join two collections .
|
8,512
|
def union ( left , right , distinct = False ) : left , right = _make_different_sources ( left , right ) return UnionCollectionExpr ( _lhs = left , _rhs = right , _distinct = distinct )
|
Union two collections .
|
8,513
|
def concat ( left , rights , distinct = False , axis = 0 ) : from . . utils import to_collection if isinstance ( rights , Node ) : rights = [ rights , ] if not rights : raise ValueError ( 'At least one DataFrame should be provided.' ) if axis == 0 : for right in rights : left = union ( left , right , distinct = distinct ) return left else : rights = [ to_collection ( r ) for r in rights ] ConcatCollectionExpr . validate_input ( left , * rights ) if hasattr ( left , '_xflow_concat' ) : return left . _xflow_concat ( rights ) else : return __horz_concat ( left , rights )
|
Concat collections .
|
8,514
|
def _drop ( expr , data , axis = 0 , columns = None ) : from . . utils import to_collection expr = to_collection ( expr ) if axis == 0 : if not isinstance ( data , ( CollectionExpr , SequenceExpr ) ) : raise ExpressionError ( 'data should be a collection or sequence when axis == 1.' ) data = to_collection ( data ) if columns is None : columns = [ n for n in data . schema . names ] if isinstance ( columns , six . string_types ) : columns = [ columns , ] data = data . select ( * columns ) . distinct ( ) drop_predicates = [ data [ n ] . isnull ( ) for n in data . schema . names ] return expr . left_join ( data , on = columns , suffixes = ( '' , '_dp' ) ) . filter ( * drop_predicates ) . select ( * expr . schema . names ) else : if isinstance ( data , ( CollectionExpr , SequenceExpr ) ) : data = to_collection ( data ) . schema . names return expr . exclude ( data )
|
Drop data from a DataFrame .
|
8,515
|
def setdiff ( left , * rights , ** kwargs ) : import time from . . utils import output distinct = kwargs . get ( 'distinct' , False ) if isinstance ( rights [ 0 ] , list ) : rights = rights [ 0 ] cols = [ n for n in left . schema . names ] types = [ n for n in left . schema . types ] counter_col_name = 'exc_counter_%d' % int ( time . time ( ) ) left = left [ left , Scalar ( 1 ) . rename ( counter_col_name ) ] rights = [ r [ r , Scalar ( - 1 ) . rename ( counter_col_name ) ] for r in rights ] unioned = left for r in rights : unioned = unioned . union ( r ) if distinct : aggregated = unioned . groupby ( * cols ) . agg ( ** { counter_col_name : unioned [ counter_col_name ] . min ( ) } ) return aggregated . filter ( aggregated [ counter_col_name ] == 1 ) . select ( * cols ) else : aggregated = unioned . groupby ( * cols ) . agg ( ** { counter_col_name : unioned [ counter_col_name ] . sum ( ) } ) @ output ( cols , types ) def exploder ( row ) : import sys irange = xrange if sys . version_info [ 0 ] < 3 else range for _ in irange ( getattr ( row , counter_col_name ) ) : yield row [ : - 1 ] return aggregated . map_reduce ( mapper = exploder ) . select ( * cols )
|
Exclude data from a collection like except clause in SQL . All collections involved should have same schema .
|
8,516
|
def intersect ( left , * rights , ** kwargs ) : import time from . . utils import output distinct = kwargs . get ( 'distinct' , False ) if isinstance ( rights [ 0 ] , list ) : rights = rights [ 0 ] cols = [ n for n in left . schema . names ] types = [ n for n in left . schema . types ] collections = ( left , ) + rights idx_col_name = 'idx_%d' % int ( time . time ( ) ) counter_col_name = 'exc_counter_%d' % int ( time . time ( ) ) collections = [ c [ c , Scalar ( idx ) . rename ( idx_col_name ) ] for idx , c in enumerate ( collections ) ] unioned = reduce ( lambda a , b : a . union ( b ) , collections ) src_agg = unioned . groupby ( * ( cols + [ idx_col_name ] ) ) . agg ( ** { counter_col_name : unioned . count ( ) } ) aggregators = { idx_col_name : src_agg [ idx_col_name ] . nunique ( ) , counter_col_name : src_agg [ counter_col_name ] . min ( ) , } final_agg = src_agg . groupby ( * cols ) . agg ( ** aggregators ) final_agg = final_agg . filter ( final_agg [ idx_col_name ] == len ( collections ) ) if distinct : return final_agg . filter ( final_agg [ counter_col_name ] > 0 ) . select ( * cols ) else : @ output ( cols , types ) def exploder ( row ) : import sys irange = xrange if sys . version_info [ 0 ] < 3 else range for _ in irange ( getattr ( row , counter_col_name ) ) : yield row [ : - 2 ] return final_agg . map_reduce ( mapper = exploder ) . select ( * cols )
|
Calc intersection among datasets
|
8,517
|
def _verify_and_add_jwt ( ) : if not app_context_has_jwt_data ( ) : guard = current_guard ( ) token = guard . read_token_from_header ( ) jwt_data = guard . extract_jwt_token ( token ) add_jwt_data_to_app_context ( jwt_data )
|
This helper method just checks and adds jwt data to the app context . Will not add jwt data if it is already present . Only use in this module
|
8,518
|
def auth_required ( method ) : @ functools . wraps ( method ) def wrapper ( * args , ** kwargs ) : _verify_and_add_jwt ( ) try : return method ( * args , ** kwargs ) finally : remove_jwt_data_from_app_context ( ) return wrapper
|
This decorator is used to ensure that a user is authenticated before being able to access a flask route . It also adds the current user to the current flask context .
|
8,519
|
def roles_required ( * required_rolenames ) : def decorator ( method ) : @ functools . wraps ( method ) def wrapper ( * args , ** kwargs ) : role_set = set ( [ str ( n ) for n in required_rolenames ] ) _verify_and_add_jwt ( ) try : MissingRoleError . require_condition ( current_rolenames ( ) . issuperset ( role_set ) , "This endpoint requires all the following roles: {}" , [ ', ' . join ( role_set ) ] , ) return method ( * args , ** kwargs ) finally : remove_jwt_data_from_app_context ( ) return wrapper return decorator
|
This decorator ensures that any uses accessing the decorated route have all the needed roles to access it . If an
|
8,520
|
def roles_accepted ( * accepted_rolenames ) : def decorator ( method ) : @ functools . wraps ( method ) def wrapper ( * args , ** kwargs ) : role_set = set ( [ str ( n ) for n in accepted_rolenames ] ) _verify_and_add_jwt ( ) try : MissingRoleError . require_condition ( not current_rolenames ( ) . isdisjoint ( role_set ) , "This endpoint requires one of the following roles: {}" , [ ', ' . join ( role_set ) ] , ) return method ( * args , ** kwargs ) finally : remove_jwt_data_from_app_context ( ) return wrapper return decorator
|
This decorator ensures that any uses accessing the decorated route have one of the needed roles to access it . If an
|
8,521
|
def init_app ( self , app , user_class , is_blacklisted = None ) : PraetorianError . require_condition ( app . config . get ( 'SECRET_KEY' ) is not None , "There must be a SECRET_KEY app config setting set" , ) possible_schemes = [ 'argon2' , 'bcrypt' , 'pbkdf2_sha512' , ] self . pwd_ctx = CryptContext ( default = 'pbkdf2_sha512' , schemes = possible_schemes + [ 'plaintext' ] , deprecated = [ ] , ) self . hash_scheme = app . config . get ( 'PRAETORIAN_HASH_SCHEME' ) valid_schemes = self . pwd_ctx . schemes ( ) PraetorianError . require_condition ( self . hash_scheme in valid_schemes or self . hash_scheme is None , "If {} is set, it must be one of the following schemes: {}" , 'PRAETORIAN_HASH_SCHEME' , valid_schemes , ) self . user_class = self . _validate_user_class ( user_class ) self . is_blacklisted = is_blacklisted or ( lambda t : False ) self . encode_key = app . config [ 'SECRET_KEY' ] self . allowed_algorithms = app . config . get ( 'JWT_ALLOWED_ALGORITHMS' , DEFAULT_JWT_ALLOWED_ALGORITHMS , ) self . encode_algorithm = app . config . get ( 'JWT_ALGORITHM' , DEFAULT_JWT_ALGORITHM , ) self . access_lifespan = pendulum . Duration ( ** app . config . get ( 'JWT_ACCESS_LIFESPAN' , DEFAULT_JWT_ACCESS_LIFESPAN , ) ) self . refresh_lifespan = pendulum . Duration ( ** app . config . get ( 'JWT_REFRESH_LIFESPAN' , DEFAULT_JWT_REFRESH_LIFESPAN , ) ) self . header_name = app . config . get ( 'JWT_HEADER_NAME' , DEFAULT_JWT_HEADER_NAME , ) self . header_type = app . config . get ( 'JWT_HEADER_TYPE' , DEFAULT_JWT_HEADER_TYPE , ) self . user_class_validation_method = app . config . get ( 'USER_CLASS_VALIDATION_METHOD' , DEFAULT_USER_CLASS_VALIDATION_METHOD , ) if not app . config . get ( 'DISABLE_PRAETORIAN_ERROR_HANDLER' ) : app . register_error_handler ( PraetorianError , PraetorianError . build_error_handler ( ) , ) self . is_testing = app . config . get ( 'TESTING' , False ) if not hasattr ( app , 'extensions' ) : app . extensions = { } app . extensions [ 'praetorian' ] = self
|
Initializes the Praetorian extension
|
8,522
|
def _validate_user_class ( cls , user_class ) : PraetorianError . require_condition ( getattr ( user_class , 'lookup' , None ) is not None , textwrap . dedent ( ) , ) PraetorianError . require_condition ( getattr ( user_class , 'identify' , None ) is not None , textwrap . dedent ( ) , ) return user_class
|
Validates the supplied user_class to make sure that it has the class methods necessary to function correctly .
|
8,523
|
def authenticate ( self , username , password ) : PraetorianError . require_condition ( self . user_class is not None , "Praetorian must be initialized before this method is available" , ) user = self . user_class . lookup ( username ) MissingUserError . require_condition ( user is not None , 'Could not find the requested user' , ) AuthenticationError . require_condition ( self . _verify_password ( password , user . password ) , 'The password is incorrect' , ) return user
|
Verifies that a password matches the stored password for that username . If verification passes the matching user instance is returned
|
8,524
|
def _verify_password ( self , raw_password , hashed_password ) : PraetorianError . require_condition ( self . pwd_ctx is not None , "Praetorian must be initialized before this method is available" , ) return self . pwd_ctx . verify ( raw_password , hashed_password )
|
Verifies that a plaintext password matches the hashed version of that password using the stored passlib password context
|
8,525
|
def encrypt_password ( self , raw_password ) : PraetorianError . require_condition ( self . pwd_ctx is not None , "Praetorian must be initialized before this method is available" , ) return self . pwd_ctx . encrypt ( raw_password , scheme = self . hash_scheme )
|
Encrypts a plaintext password using the stored passlib password context
|
8,526
|
def _check_user ( self , user ) : MissingUserError . require_condition ( user is not None , 'Could not find the requested user' , ) user_validate_method = getattr ( user , self . user_class_validation_method , None ) if user_validate_method is None : return InvalidUserError . require_condition ( user_validate_method ( ) , "The user is not valid or has had access revoked" , )
|
Checks to make sure that a user is valid . First checks that the user is not None . If this check fails a MissingUserError is raised . Next checks if the user has a validation method . If the method does not exist the check passes . If the method exists it is called . If the result of the call is not truthy an InvalidUserError is raised
|
8,527
|
def encode_jwt_token ( self , user , override_access_lifespan = None , override_refresh_lifespan = None , ** custom_claims ) : ClaimCollisionError . require_condition ( set ( custom_claims . keys ( ) ) . isdisjoint ( RESERVED_CLAIMS ) , "The custom claims collide with required claims" , ) self . _check_user ( user ) moment = pendulum . now ( 'UTC' ) if override_refresh_lifespan is None : refresh_lifespan = self . refresh_lifespan else : refresh_lifespan = override_refresh_lifespan refresh_expiration = ( moment + refresh_lifespan ) . int_timestamp if override_access_lifespan is None : access_lifespan = self . access_lifespan else : access_lifespan = override_access_lifespan access_expiration = min ( ( moment + access_lifespan ) . int_timestamp , refresh_expiration , ) payload_parts = dict ( iat = moment . int_timestamp , exp = access_expiration , rf_exp = refresh_expiration , jti = str ( uuid . uuid4 ( ) ) , id = user . identity , rls = ',' . join ( user . rolenames ) , ** custom_claims ) return jwt . encode ( payload_parts , self . encode_key , self . encode_algorithm , ) . decode ( 'utf-8' )
|
Encodes user data into a jwt token that can be used for authorization at protected endpoints
|
8,528
|
def encode_eternal_jwt_token ( self , user , ** custom_claims ) : return self . encode_jwt_token ( user , override_access_lifespan = VITAM_AETERNUM , override_refresh_lifespan = VITAM_AETERNUM , ** custom_claims )
|
This utility function encodes a jwt token that never expires
|
8,529
|
def refresh_jwt_token ( self , token , override_access_lifespan = None ) : moment = pendulum . now ( 'UTC' ) with InvalidTokenHeader . handle_errors ( 'failed to decode JWT token' ) : data = jwt . decode ( token , self . encode_key , algorithms = self . allowed_algorithms , options = { 'verify_exp' : False } , ) self . _validate_jwt_data ( data , access_type = AccessType . refresh ) user = self . user_class . identify ( data [ 'id' ] ) self . _check_user ( user ) if override_access_lifespan is None : access_lifespan = self . access_lifespan else : access_lifespan = override_access_lifespan refresh_expiration = data [ 'rf_exp' ] access_expiration = min ( ( moment + access_lifespan ) . int_timestamp , refresh_expiration , ) custom_claims = { k : v for ( k , v ) in data . items ( ) if k not in RESERVED_CLAIMS } payload_parts = dict ( iat = moment . int_timestamp , exp = access_expiration , rf_exp = refresh_expiration , jti = data [ 'jti' ] , id = data [ 'id' ] , rls = ',' . join ( user . rolenames ) , ** custom_claims ) return jwt . encode ( payload_parts , self . encode_key , self . encode_algorithm , ) . decode ( 'utf-8' )
|
Creates a new token for a user if and only if the old token s access permission is expired but its refresh permission is not yet expired . The new token s refresh expiration moment is the same as the old token s but the new token s access expiration is refreshed
|
8,530
|
def extract_jwt_token ( self , token ) : with InvalidTokenHeader . handle_errors ( 'failed to decode JWT token' ) : data = jwt . decode ( token , self . encode_key , algorithms = self . allowed_algorithms , options = { 'verify_exp' : False } , ) self . _validate_jwt_data ( data , access_type = AccessType . access ) return data
|
Extracts a data dictionary from a jwt token
|
8,531
|
def _validate_jwt_data ( self , data , access_type ) : MissingClaimError . require_condition ( 'jti' in data , 'Token is missing jti claim' , ) BlacklistedError . require_condition ( not self . is_blacklisted ( data [ 'jti' ] ) , 'Token has a blacklisted jti' , ) MissingClaimError . require_condition ( 'id' in data , 'Token is missing id field' , ) MissingClaimError . require_condition ( 'exp' in data , 'Token is missing exp claim' , ) MissingClaimError . require_condition ( 'rf_exp' in data , 'Token is missing rf_exp claim' , ) moment = pendulum . now ( 'UTC' ) . int_timestamp if access_type == AccessType . access : ExpiredAccessError . require_condition ( moment <= data [ 'exp' ] , 'access permission has expired' , ) elif access_type == AccessType . refresh : EarlyRefreshError . require_condition ( moment > data [ 'exp' ] , 'access permission for token has not expired. may not refresh' , ) ExpiredRefreshError . require_condition ( moment <= data [ 'rf_exp' ] , 'refresh permission for token has expired' , )
|
Validates that the data for a jwt token is valid
|
8,532
|
def _unpack_header ( self , headers ) : jwt_header = headers . get ( self . header_name ) MissingTokenHeader . require_condition ( jwt_header is not None , "JWT token not found in headers under '{}'" , self . header_name , ) match = re . match ( self . header_type + r'\s*([\w\.-]+)' , jwt_header ) InvalidTokenHeader . require_condition ( match is not None , "JWT header structure is invalid" , ) token = match . group ( 1 ) return token
|
Unpacks a jwt token from a request header
|
8,533
|
def pack_header_for_user ( self , user , override_access_lifespan = None , override_refresh_lifespan = None , ** custom_claims ) : token = self . encode_jwt_token ( user , override_access_lifespan = override_access_lifespan , override_refresh_lifespan = override_refresh_lifespan , ** custom_claims ) return { self . header_name : self . header_type + ' ' + token }
|
Encodes a jwt token and packages it into a header dict for a given user
|
8,534
|
def refresh ( ) : old_token = guard . read_token_from_header ( ) new_token = guard . refresh_jwt_token ( old_token ) ret = { 'access_token' : new_token } return flask . jsonify ( ret ) , 200
|
Refreshes an existing JWT by creating a new one that is a copy of the old except that it has a refrehsed access expiration .
|
8,535
|
def disable_user ( ) : req = flask . request . get_json ( force = True ) usr = User . query . filter_by ( username = req . get ( 'username' , None ) ) . one ( ) usr . is_active = False db . session . commit ( ) return flask . jsonify ( message = 'disabled user {}' . format ( usr . username ) )
|
Disables a user in the data store
|
8,536
|
def blacklist_token ( ) : req = flask . request . get_json ( force = True ) data = guard . extract_jwt_token ( req [ 'token' ] ) blacklist . add ( data [ 'jti' ] ) return flask . jsonify ( message = 'token blacklisted ({})' . format ( req [ 'token' ] ) )
|
Blacklists an existing JWT by registering its jti claim in the blacklist .
|
8,537
|
def protected ( ) : custom_claims = flask_praetorian . current_custom_claims ( ) firstname = custom_claims . pop ( 'firstname' , None ) nickname = custom_claims . pop ( 'nickname' , None ) surname = custom_claims . pop ( 'surname' , None ) if nickname is None : user_string = "{} {}" . format ( firstname , surname ) else : user_string = "{} '{}' {}" . format ( firstname , nickname , surname ) return flask . jsonify ( message = "protected endpoint (allowed user {u})" . format ( u = user_string ) , )
|
A protected endpoint . The auth_required decorator will require a header containing a valid JWT
|
8,538
|
def current_guard ( ) : guard = flask . current_app . extensions . get ( 'praetorian' , None ) PraetorianError . require_condition ( guard is not None , "No current guard found; Praetorian must be initialized first" , ) return guard
|
Fetches the current instance of flask - praetorian that is attached to the current flask app
|
8,539
|
def current_user_id ( ) : jwt_data = get_jwt_data_from_app_context ( ) user_id = jwt_data . get ( 'id' ) PraetorianError . require_condition ( user_id is not None , "Could not fetch an id for the current user" , ) return user_id
|
This method returns the user id retrieved from jwt token data attached to the current flask app s context
|
8,540
|
def current_user ( ) : user_id = current_user_id ( ) guard = current_guard ( ) user = guard . user_class . identify ( user_id ) PraetorianError . require_condition ( user is not None , "Could not identify the current user from the current id" , ) return user
|
This method returns a user instance for jwt token data attached to the current flask app s context
|
8,541
|
def current_rolenames ( ) : jwt_data = get_jwt_data_from_app_context ( ) if 'rls' not in jwt_data : return set ( [ 'non-empty-but-definitely-not-matching-subset' ] ) else : return set ( r . strip ( ) for r in jwt_data [ 'rls' ] . split ( ',' ) )
|
This method returns the names of all roles associated with the current user
|
8,542
|
def current_custom_claims ( ) : jwt_data = get_jwt_data_from_app_context ( ) return { k : v for ( k , v ) in jwt_data . items ( ) if k not in RESERVED_CLAIMS }
|
This method returns any custom claims in the current jwt
|
8,543
|
def check_redirect_uris ( uris , client_type = None ) : if client_type not in [ None , 'native' , 'web' ] : raise ValueError ( 'Invalid client type indicator used' ) if not isinstance ( uris , list ) : raise ValueError ( 'uris needs to be a list of strings' ) if len ( uris ) < 1 : raise ValueError ( 'At least one return URI needs to be provided' ) for uri in uris : if uri . startswith ( 'https://' ) : if client_type == 'native' : raise ValueError ( 'https url with native client' ) client_type = 'web' elif uri . startswith ( 'http://localhost' ) : if client_type == 'web' : raise ValueError ( 'http://localhost url with web client' ) client_type = 'native' else : if ( uri . startswith ( 'http://' ) and not uri . startswith ( 'http://localhost' ) ) : raise ValueError ( 'http:// url with non-localhost is illegal' ) else : raise ValueError ( 'Invalid uri provided: %s' % uri ) return client_type
|
This function checks all return uris provided and tries to deduce as what type of client we should register .
|
8,544
|
def register_client ( provider_info , redirect_uris ) : client_type = check_redirect_uris ( redirect_uris ) submit_info = { 'redirect_uris' : redirect_uris , 'application_type' : client_type , 'token_endpoint_auth_method' : 'client_secret_post' } headers = { 'Content-type' : 'application/json' } resp , content = httplib2 . Http ( ) . request ( provider_info [ 'registration_endpoint' ] , 'POST' , json . dumps ( submit_info ) , headers = headers ) if int ( resp [ 'status' ] ) >= 400 : raise Exception ( 'Error: the server returned HTTP ' + resp [ 'status' ] ) client_info = _json_loads ( content ) if 'error' in client_info : raise Exception ( 'Error occured during registration: %s (%s)' % ( client_info [ 'error' ] , client_info . get ( 'error_description' ) ) ) json_file = { 'web' : { 'client_id' : client_info [ 'client_id' ] , 'client_secret' : client_info [ 'client_secret' ] , 'auth_uri' : provider_info [ 'authorization_endpoint' ] , 'token_uri' : provider_info [ 'token_endpoint' ] , 'userinfo_uri' : provider_info [ 'userinfo_endpoint' ] , 'redirect_uris' : redirect_uris , 'issuer' : provider_info [ 'issuer' ] , } } return json_file
|
This function registers a new client with the specified OpenID Provider and then returns the regitered client ID and other information .
|
8,545
|
def discover_OP_information ( OP_uri ) : _ , content = httplib2 . Http ( ) . request ( '%s/.well-known/openid-configuration' % OP_uri ) return _json_loads ( content )
|
Discovers information about the provided OpenID Provider .
|
8,546
|
def init_app ( self , app ) : secrets = self . load_secrets ( app ) self . client_secrets = list ( secrets . values ( ) ) [ 0 ] secrets_cache = DummySecretsCache ( secrets ) app . config . setdefault ( 'OIDC_SCOPES' , [ 'openid' , 'email' ] ) app . config . setdefault ( 'OIDC_GOOGLE_APPS_DOMAIN' , None ) app . config . setdefault ( 'OIDC_ID_TOKEN_COOKIE_NAME' , 'oidc_id_token' ) app . config . setdefault ( 'OIDC_ID_TOKEN_COOKIE_PATH' , '/' ) app . config . setdefault ( 'OIDC_ID_TOKEN_COOKIE_TTL' , 7 * 86400 ) app . config . setdefault ( 'OIDC_COOKIE_SECURE' , True ) app . config . setdefault ( 'OIDC_VALID_ISSUERS' , ( self . client_secrets . get ( 'issuer' ) or GOOGLE_ISSUERS ) ) app . config . setdefault ( 'OIDC_CLOCK_SKEW' , 60 ) app . config . setdefault ( 'OIDC_REQUIRE_VERIFIED_EMAIL' , False ) app . config . setdefault ( 'OIDC_OPENID_REALM' , None ) app . config . setdefault ( 'OIDC_USER_INFO_ENABLED' , True ) app . config . setdefault ( 'OIDC_CALLBACK_ROUTE' , '/oidc_callback' ) app . config . setdefault ( 'OVERWRITE_REDIRECT_URI' , False ) app . config . setdefault ( "OIDC_EXTRA_REQUEST_AUTH_PARAMS" , { } ) app . config . setdefault ( 'OIDC_RESOURCE_SERVER_ONLY' , False ) app . config . setdefault ( 'OIDC_RESOURCE_CHECK_AUD' , False ) app . config . setdefault ( 'OIDC_INTROSPECTION_AUTH_METHOD' , 'client_secret_post' ) app . config . setdefault ( 'OIDC_TOKEN_TYPE_HINT' , 'access_token' ) if not 'openid' in app . config [ 'OIDC_SCOPES' ] : raise ValueError ( 'The value "openid" must be in the OIDC_SCOPES' ) if not app . config [ 'OIDC_RESOURCE_SERVER_ONLY' ] : app . route ( app . config [ 'OIDC_CALLBACK_ROUTE' ] ) ( self . _oidc_callback ) app . before_request ( self . _before_request ) app . after_request ( self . _after_request ) self . flow = flow_from_clientsecrets ( app . config [ 'OIDC_CLIENT_SECRETS' ] , scope = app . config [ 'OIDC_SCOPES' ] , cache = secrets_cache ) assert isinstance ( self . flow , OAuth2WebServerFlow ) self . extra_data_serializer = JSONWebSignatureSerializer ( app . config [ 'SECRET_KEY' ] , salt = 'flask-oidc-extra-data' ) self . cookie_serializer = JSONWebSignatureSerializer ( app . config [ 'SECRET_KEY' ] , salt = 'flask-oidc-cookie' ) try : self . credentials_store = app . config [ 'OIDC_CREDENTIALS_STORE' ] except KeyError : pass
|
Do setup that requires a Flask app .
|
8,547
|
def user_getfield ( self , field , access_token = None ) : info = self . user_getinfo ( [ field ] , access_token ) return info . get ( field )
|
Request a single field of information about the user .
|
8,548
|
def user_getinfo ( self , fields , access_token = None ) : if g . oidc_id_token is None and access_token is None : raise Exception ( 'User was not authenticated' ) info = { } all_info = None for field in fields : if access_token is None and field in g . oidc_id_token : info [ field ] = g . oidc_id_token [ field ] elif current_app . config [ 'OIDC_USER_INFO_ENABLED' ] : if all_info is None : all_info = self . _retrieve_userinfo ( access_token ) if all_info is None : all_info = { } if field in all_info : info [ field ] = all_info [ field ] else : pass return info
|
Request multiple fields of information about the user .
|
8,549
|
def get_access_token ( self ) : try : credentials = OAuth2Credentials . from_json ( self . credentials_store [ g . oidc_id_token [ 'sub' ] ] ) return credentials . access_token except KeyError : logger . debug ( "Expired ID token, credentials missing" , exc_info = True ) return None
|
Method to return the current requests access_token .
|
8,550
|
def get_refresh_token ( self ) : try : credentials = OAuth2Credentials . from_json ( self . credentials_store [ g . oidc_id_token [ 'sub' ] ] ) return credentials . refresh_token except KeyError : logger . debug ( "Expired ID token, credentials missing" , exc_info = True ) return None
|
Method to return the current requests refresh_token .
|
8,551
|
def _retrieve_userinfo ( self , access_token = None ) : if 'userinfo_uri' not in self . client_secrets : logger . debug ( 'Userinfo uri not specified' ) raise AssertionError ( 'UserInfo URI not specified' ) if '_oidc_userinfo' in g : return g . _oidc_userinfo http = httplib2 . Http ( ) if access_token is None : try : credentials = OAuth2Credentials . from_json ( self . credentials_store [ g . oidc_id_token [ 'sub' ] ] ) except KeyError : logger . debug ( "Expired ID token, credentials missing" , exc_info = True ) return None credentials . authorize ( http ) resp , content = http . request ( self . client_secrets [ 'userinfo_uri' ] ) else : resp , content = http . request ( self . client_secrets [ 'userinfo_uri' ] , "POST" , body = urlencode ( { "access_token" : access_token } ) , headers = { 'Content-Type' : 'application/x-www-form-urlencoded' } ) logger . debug ( 'Retrieved user info: %s' % content ) info = _json_loads ( content ) g . _oidc_userinfo = info return info
|
Requests extra user information from the Provider s UserInfo and returns the result .
|
8,552
|
def _after_request ( self , response ) : cookie_secure = ( current_app . config [ 'OIDC_COOKIE_SECURE' ] and current_app . config . get ( 'OIDC_ID_TOKEN_COOKIE_SECURE' , True ) ) if getattr ( g , 'oidc_id_token_dirty' , False ) : if g . oidc_id_token : signed_id_token = self . cookie_serializer . dumps ( g . oidc_id_token ) response . set_cookie ( current_app . config [ 'OIDC_ID_TOKEN_COOKIE_NAME' ] , signed_id_token , secure = cookie_secure , httponly = True , max_age = current_app . config [ 'OIDC_ID_TOKEN_COOKIE_TTL' ] ) else : response . set_cookie ( current_app . config [ 'OIDC_ID_TOKEN_COOKIE_NAME' ] , '' , path = current_app . config [ 'OIDC_ID_TOKEN_COOKIE_PATH' ] , secure = cookie_secure , httponly = True , expires = 0 ) return response
|
Set a new ID token cookie if the ID token has changed .
|
8,553
|
def require_login ( self , view_func ) : @ wraps ( view_func ) def decorated ( * args , ** kwargs ) : if g . oidc_id_token is None : return self . redirect_to_auth_server ( request . url ) return view_func ( * args , ** kwargs ) return decorated
|
Use this to decorate view functions that require a user to be logged in . If the user is not already logged in they will be sent to the Provider to log in after which they will be returned .
|
8,554
|
def require_keycloak_role ( self , client , role ) : def wrapper ( view_func ) : @ wraps ( view_func ) def decorated ( * args , ** kwargs ) : pre , tkn , post = self . get_access_token ( ) . split ( '.' ) access_token = json . loads ( b64decode ( tkn ) ) if role in access_token [ 'resource_access' ] [ client ] [ 'roles' ] : return view_func ( * args , ** kwargs ) else : return abort ( 403 ) return decorated return wrapper
|
Function to check for a KeyCloak client role in JWT access token .
|
8,555
|
def redirect_to_auth_server ( self , destination = None , customstate = None ) : if not self . _custom_callback and customstate : raise ValueError ( 'Custom State is only avilable with a custom ' 'handler' ) if 'oidc_csrf_token' not in session : csrf_token = urlsafe_b64encode ( os . urandom ( 24 ) ) . decode ( 'utf-8' ) session [ 'oidc_csrf_token' ] = csrf_token state = { 'csrf_token' : session [ 'oidc_csrf_token' ] , } statefield = 'destination' statevalue = destination if customstate is not None : statefield = 'custom' statevalue = customstate state [ statefield ] = self . extra_data_serializer . dumps ( statevalue ) . decode ( 'utf-8' ) extra_params = { 'state' : urlsafe_b64encode ( json . dumps ( state ) . encode ( 'utf-8' ) ) , } extra_params . update ( current_app . config [ 'OIDC_EXTRA_REQUEST_AUTH_PARAMS' ] ) if current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] : extra_params [ 'hd' ] = current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] if current_app . config [ 'OIDC_OPENID_REALM' ] : extra_params [ 'openid.realm' ] = current_app . config [ 'OIDC_OPENID_REALM' ] flow = self . _flow_for_request ( ) auth_url = '{url}&{extra_params}' . format ( url = flow . step1_get_authorize_url ( ) , extra_params = urlencode ( extra_params ) ) self . _set_cookie_id_token ( None ) return redirect ( auth_url )
|
Set a CSRF token in the session and redirect to the IdP .
|
8,556
|
def _is_id_token_valid ( self , id_token ) : if not id_token : return False if id_token [ 'iss' ] not in current_app . config [ 'OIDC_VALID_ISSUERS' ] : logger . error ( 'id_token issued by non-trusted issuer: %s' % id_token [ 'iss' ] ) return False if isinstance ( id_token [ 'aud' ] , list ) : if self . flow . client_id not in id_token [ 'aud' ] : logger . error ( 'We are not a valid audience' ) return False if 'azp' not in id_token and len ( id_token [ 'aud' ] ) > 1 : logger . error ( 'Multiple audiences and not authorized party' ) return False else : if id_token [ 'aud' ] != self . flow . client_id : logger . error ( 'We are not the audience' ) return False if 'azp' in id_token and id_token [ 'azp' ] != self . flow . client_id : logger . error ( 'Authorized Party is not us' ) return False if int ( time . time ( ) ) >= int ( id_token [ 'exp' ] ) : logger . error ( 'Token has expired' ) return False if id_token [ 'iat' ] < ( time . time ( ) - current_app . config [ 'OIDC_CLOCK_SKEW' ] ) : logger . error ( 'Token issued in the past' ) return False if current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] and id_token . get ( 'hd' ) != current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] : logger . error ( 'Invalid google apps domain' ) return False if not id_token . get ( 'email_verified' , False ) and current_app . config [ 'OIDC_REQUIRE_VERIFIED_EMAIL' ] : logger . error ( 'Email not verified' ) return False return True
|
Check if id_token is a current ID token for this application was issued by the Apps domain we expected and that the email address has been verified .
|
8,557
|
def custom_callback ( self , view_func ) : @ wraps ( view_func ) def decorated ( * args , ** kwargs ) : plainreturn , data = self . _process_callback ( 'custom' ) if plainreturn : return data else : return view_func ( data , * args , ** kwargs ) self . _custom_callback = decorated return decorated
|
Wrapper function to use a custom callback . The custom OIDC callback will get the custom state field passed in with redirect_to_auth_server .
|
8,558
|
def _process_callback ( self , statefield ) : try : session_csrf_token = session . get ( 'oidc_csrf_token' ) state = _json_loads ( urlsafe_b64decode ( request . args [ 'state' ] . encode ( 'utf-8' ) ) ) csrf_token = state [ 'csrf_token' ] code = request . args [ 'code' ] except ( KeyError , ValueError ) : logger . debug ( "Can't retrieve CSRF token, state, or code" , exc_info = True ) return True , self . _oidc_error ( ) if csrf_token != session_csrf_token : logger . debug ( "CSRF token mismatch" ) return True , self . _oidc_error ( ) flow = self . _flow_for_request ( ) credentials = flow . step2_exchange ( code ) id_token = credentials . id_token if not self . _is_id_token_valid ( id_token ) : logger . debug ( "Invalid ID token" ) if id_token . get ( 'hd' ) != current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] : return True , self . _oidc_error ( "You must log in with an account from the {0} domain." . format ( current_app . config [ 'OIDC_GOOGLE_APPS_DOMAIN' ] ) , self . WRONG_GOOGLE_APPS_DOMAIN ) return True , self . _oidc_error ( ) self . credentials_store [ id_token [ 'sub' ] ] = credentials . to_json ( ) try : response = self . extra_data_serializer . loads ( state [ statefield ] ) except BadSignature : logger . error ( 'State field was invalid' ) return True , self . _oidc_error ( ) self . _set_cookie_id_token ( id_token ) return False , response
|
Exchange the auth code for actual credentials then redirect to the originally requested page .
|
8,559
|
def validate_token ( self , token , scopes_required = None ) : valid = self . _validate_token ( token , scopes_required ) if valid is True : return True else : return ErrStr ( valid )
|
This function can be used to validate tokens .
|
8,560
|
def _validate_token ( self , token , scopes_required = None ) : if scopes_required is None : scopes_required = [ ] scopes_required = set ( scopes_required ) token_info = None valid_token = False has_required_scopes = False if token : try : token_info = self . _get_token_info ( token ) except Exception as ex : token_info = { 'active' : False } logger . error ( 'ERROR: Unable to get token info' ) logger . error ( str ( ex ) ) valid_token = token_info . get ( 'active' , False ) if 'aud' in token_info and current_app . config [ 'OIDC_RESOURCE_CHECK_AUD' ] : valid_audience = False aud = token_info [ 'aud' ] clid = self . client_secrets [ 'client_id' ] if isinstance ( aud , list ) : valid_audience = clid in aud else : valid_audience = clid == aud if not valid_audience : logger . error ( 'Refused token because of invalid ' 'audience' ) valid_token = False if valid_token : token_scopes = token_info . get ( 'scope' , '' ) . split ( ' ' ) else : token_scopes = [ ] has_required_scopes = scopes_required . issubset ( set ( token_scopes ) ) if not has_required_scopes : logger . debug ( 'Token missed required scopes' ) if ( valid_token and has_required_scopes ) : g . oidc_token_info = token_info return True if not valid_token : return 'Token required but invalid' elif not has_required_scopes : return 'Token does not have required scopes' else : return 'Something went wrong checking your token'
|
The actual implementation of validate_token .
|
8,561
|
def accept_token ( self , require_token = False , scopes_required = None , render_errors = True ) : def wrapper ( view_func ) : @ wraps ( view_func ) def decorated ( * args , ** kwargs ) : token = None if 'Authorization' in request . headers and request . headers [ 'Authorization' ] . startswith ( 'Bearer ' ) : token = request . headers [ 'Authorization' ] . split ( None , 1 ) [ 1 ] . strip ( ) if 'access_token' in request . form : token = request . form [ 'access_token' ] elif 'access_token' in request . args : token = request . args [ 'access_token' ] validity = self . validate_token ( token , scopes_required ) if ( validity is True ) or ( not require_token ) : return view_func ( * args , ** kwargs ) else : response_body = { 'error' : 'invalid_token' , 'error_description' : validity } if render_errors : response_body = json . dumps ( response_body ) return response_body , 401 , { 'WWW-Authenticate' : 'Bearer' } return decorated return wrapper
|
Use this to decorate view functions that should accept OAuth2 tokens this will most likely apply to API functions .
|
8,562
|
def delete_rows_csr ( mat , indices ) : if not isinstance ( mat , scipy . sparse . csr_matrix ) : raise ValueError ( "works only for CSR format -- use .tocsr() first" ) indices = list ( indices ) mask = np . ones ( mat . shape [ 0 ] , dtype = bool ) mask [ indices ] = False return mat [ mask ]
|
Remove the rows denoted by indices form the CSR sparse matrix mat .
|
8,563
|
def restrict ( self , support ) : if self . has_been_restricted == True : return self new_numerical_cols = [ ] new_categorical_cols = [ ] new_additional_numerical_cols = [ ] new_feature_names = [ ] new_vocab = { } for idx , val in enumerate ( support ) : if val == True : feature_name = self . feature_names_ [ idx ] if self . separator in feature_name : base_feature_name = feature_name [ : feature_name . rfind ( self . separator ) ] else : base_feature_name = feature_name new_feature_names . append ( feature_name ) new_vocab [ feature_name ] = len ( new_vocab ) if feature_name in self . numerical_columns : new_numerical_cols . append ( feature_name ) elif base_feature_name in self . categorical_columns and base_feature_name not in new_categorical_cols : new_categorical_cols . append ( base_feature_name ) elif feature_name in self . additional_numerical_cols : new_additional_numerical_cols . append ( feature_name ) self . feature_names_ = new_feature_names self . vocabulary_ = new_vocab self . numerical_columns = new_numerical_cols self . categorical_columns = new_categorical_cols self . additional_numerical_cols = new_additional_numerical_cols self . has_been_restricted = True return self
|
Restrict the features to those in support using feature selection .
|
8,564
|
def check_type ( self , zenpy_objects ) : expected_type = self . api . _object_mapping . class_for_type ( self . api . object_type ) if not isinstance ( zenpy_objects , collections . Iterable ) : zenpy_objects = [ zenpy_objects ] for zenpy_object in zenpy_objects : if type ( zenpy_object ) is not expected_type : raise ZenpyException ( "Invalid type - expected {} found {}" . format ( expected_type , type ( zenpy_object ) ) )
|
Ensure the passed type matches this API s object_type .
|
8,565
|
def to_snake_case ( name ) : s1 = FIRST_CAP_REGEX . sub ( r'\1_\2' , name ) return ALL_CAP_REGEX . sub ( r'\1_\2' , s1 ) . lower ( )
|
Given a name in camelCase return in snake_case
|
8,566
|
def to_unix_ts ( start_time ) : if isinstance ( start_time , datetime ) : if is_timezone_aware ( start_time ) : start_time = start_time . astimezone ( pytz . utc ) else : log . warning ( "Non timezone-aware datetime object passed to IncrementalEndpoint. " "The Zendesk API expects UTC time, if this is not the case results will be incorrect!" ) unix_time = calendar . timegm ( start_time . timetuple ( ) ) else : unix_time = start_time return int ( unix_time )
|
Given a datetime object returns its value as a unix timestamp
|
8,567
|
def as_singular ( result_key ) : if result_key . endswith ( 'ies' ) : return re . sub ( 'ies$' , 'y' , result_key ) elif result_key . endswith ( 'uses' ) : return re . sub ( "uses$" , "us" , result_key ) elif result_key . endswith ( 'addresses' ) : return result_key [ : - 2 ] elif result_key . endswith ( 's' ) : return result_key [ : - 1 ] else : return result_key
|
Given a result key return in the singular form
|
8,568
|
def as_plural ( result_key ) : if result_key . endswith ( 'y' ) : return re . sub ( "y$" , "ies" , result_key ) elif result_key . endswith ( 'address' ) : return result_key + 'es' elif result_key . endswith ( 'us' ) : return re . sub ( "us$" , "uses" , result_key ) elif not result_key . endswith ( 's' ) : return result_key + 's' else : return result_key
|
Given a result key return in the plural form .
|
8,569
|
def extract_id ( * object_types ) : def outer ( func ) : def inner ( * args , ** kwargs ) : def id_of ( x ) : return x . id if type ( x ) in object_types else x new_args = [ id_of ( arg ) for arg in args ] new_kwargs = { k : id_of ( v ) for k , v in kwargs . items ( ) } return func ( * new_args , ** new_kwargs ) return inner return outer
|
Decorator for extracting id from passed parameters for specific types .
|
8,570
|
def json_encode ( obj , serialize ) : if hasattr ( obj , 'to_dict' ) : return obj . to_dict ( serialize = serialize ) elif isinstance ( obj , datetime ) : return obj . date ( ) . isoformat ( ) elif isinstance ( obj , date ) : return obj . isoformat ( ) elif isinstance ( obj , ProxyDict ) : return dict ( obj ) elif isinstance ( obj , ProxyList ) : return list ( obj ) elif is_iterable_but_not_string ( obj ) : return list ( obj )
|
Handle encoding complex types .
|
8,571
|
def _call_api ( self , http_method , url , ** kwargs ) : log . debug ( "{}: {} - {}" . format ( http_method . __name__ . upper ( ) , url , kwargs ) ) if self . ratelimit is not None : response = self . _ratelimit ( http_method = http_method , url = url , ** kwargs ) else : response = http_method ( url , ** kwargs ) if response . status_code == 429 : while 'retry-after' in response . headers and int ( response . headers [ 'retry-after' ] ) > 0 : retry_after_seconds = int ( response . headers [ 'retry-after' ] ) log . warning ( "Waiting for requested retry-after period: %s seconds" % retry_after_seconds ) while retry_after_seconds > 0 : retry_after_seconds -= 1 self . check_ratelimit_budget ( 1 ) log . debug ( " -> sleeping: %s more seconds" % retry_after_seconds ) sleep ( 1 ) response = http_method ( url , ** kwargs ) self . _check_response ( response ) self . _update_callsafety ( response ) return response
|
Execute a call to the Zendesk API . Handles rate limiting checking the response from Zendesk and deserialization of the Zendesk response . All communication with Zendesk should go through this method .
|
8,572
|
def check_ratelimit_budget ( self , seconds_waited ) : if self . ratelimit_budget is not None : self . ratelimit_budget -= seconds_waited if self . ratelimit_budget < 1 : raise RatelimitBudgetExceeded ( "Rate limit budget exceeded!" )
|
If we have a ratelimit_budget ensure it is not exceeded .
|
8,573
|
def _ratelimit ( self , http_method , url , ** kwargs ) : def time_since_last_call ( ) : if self . callsafety [ 'lastcalltime' ] is not None : return int ( time ( ) - self . callsafety [ 'lastcalltime' ] ) else : return None lastlimitremaining = self . callsafety [ 'lastlimitremaining' ] if time_since_last_call ( ) is None or time_since_last_call ( ) >= self . ratelimit_request_interval or lastlimitremaining >= self . ratelimit : response = http_method ( url , ** kwargs ) else : log . warning ( "Safety Limit Reached of %s remaining calls and time since last call is under %s seconds" % ( self . ratelimit , self . ratelimit_request_interval ) ) while time_since_last_call ( ) < self . ratelimit_request_interval : remaining_sleep = int ( self . ratelimit_request_interval - time_since_last_call ( ) ) log . debug ( " -> sleeping: %s more seconds" % remaining_sleep ) self . check_ratelimit_budget ( 1 ) sleep ( 1 ) response = http_method ( url , ** kwargs ) self . callsafety [ 'lastcalltime' ] = time ( ) self . callsafety [ 'lastlimitremaining' ] = int ( response . headers . get ( 'X-Rate-Limit-Remaining' , 0 ) ) return response
|
Ensure we do not hit the rate limit .
|
8,574
|
def _update_callsafety ( self , response ) : if self . ratelimit is not None : self . callsafety [ 'lastcalltime' ] = time ( ) self . callsafety [ 'lastlimitremaining' ] = int ( response . headers . get ( 'X-Rate-Limit-Remaining' , 0 ) )
|
Update the callsafety data structure
|
8,575
|
def _process_response ( self , response , object_mapping = None ) : try : pretty_response = response . json ( ) except ValueError : pretty_response = response for handler in self . _response_handlers : if handler . applies_to ( self , response ) : log . debug ( "{} matched: {}" . format ( handler . __name__ , pretty_response ) ) r = handler ( self , object_mapping ) . build ( response ) self . _clean_dirty_objects ( ) return r raise ZenpyException ( "Could not handle response: {}" . format ( pretty_response ) )
|
Attempt to find a ResponseHandler that knows how to process this response . If no handler can be found raise an Exception .
|
8,576
|
def _clean_dirty_objects ( self ) : if self . _dirty_object is None : return if not is_iterable_but_not_string ( self . _dirty_object ) : self . _dirty_object = [ self . _dirty_object ] log . debug ( "Cleaning objects: {}" . format ( self . _dirty_object ) ) for o in self . _dirty_object : if isinstance ( o , BaseObject ) : o . _clean_dirty ( ) self . _dirty_object = None
|
Clear all dirty attributes for the last object or list of objects successfully submitted to Zendesk .
|
8,577
|
def _serialize ( self , zenpy_object ) : if not type ( zenpy_object ) == dict : log . debug ( "Setting dirty object: {}" . format ( zenpy_object ) ) self . _dirty_object = zenpy_object return json . loads ( json . dumps ( zenpy_object , default = json_encode_for_zendesk ) )
|
Serialize a Zenpy object to JSON
|
8,578
|
def _query_zendesk ( self , endpoint , object_type , * endpoint_args , ** endpoint_kwargs ) : _id = endpoint_kwargs . get ( 'id' , None ) if _id : item = self . cache . get ( object_type , _id ) if item : return item else : return self . _get ( url = self . _build_url ( endpoint ( * endpoint_args , ** endpoint_kwargs ) ) ) elif 'ids' in endpoint_kwargs : cached_objects = [ ] for _id in endpoint_kwargs [ 'ids' ] : obj = self . cache . get ( object_type , _id ) if not obj : return self . _get ( self . _build_url ( endpoint = endpoint ( * endpoint_args , ** endpoint_kwargs ) ) ) cached_objects . append ( obj ) return ZendeskResultGenerator ( self , { } , response_objects = cached_objects , object_type = object_type ) else : return self . _get ( self . _build_url ( endpoint = endpoint ( * endpoint_args , ** endpoint_kwargs ) ) )
|
Query Zendesk for items . If an id or list of ids are passed attempt to locate these items in the relevant cache . If they cannot be found or no ids are passed execute a call to Zendesk to retrieve the items .
|
8,579
|
def _check_response ( self , response ) : if response . status_code > 299 or response . status_code < 200 : log . debug ( "Received response code [%s] - headers: %s" % ( response . status_code , str ( response . headers ) ) ) try : _json = response . json ( ) err_type = _json . get ( "error" , '' ) if err_type == 'RecordNotFound' : raise RecordNotFoundException ( json . dumps ( _json ) , response = response ) elif err_type == "TooManyValues" : raise TooManyValuesException ( json . dumps ( _json ) , response = response ) else : raise APIException ( json . dumps ( _json ) , response = response ) except ValueError : response . raise_for_status ( )
|
Check the response code returned by Zendesk . If it is outside the 200 range raise an exception of the correct type .
|
8,580
|
def _build_url ( self , endpoint ) : if not issubclass ( type ( self ) , ChatApiBase ) and not self . subdomain : raise ZenpyException ( "subdomain is required when accessing the Zendesk API!" ) if self . subdomain : endpoint . netloc = '{}.{}' . format ( self . subdomain , self . domain ) else : endpoint . netloc = self . domain endpoint . prefix_path ( self . api_prefix ) return endpoint . build ( )
|
Build complete URL
|
8,581
|
def tags ( self , ticket_id ) : return self . _query_zendesk ( self . endpoint . tags , 'tag' , id = ticket_id )
|
Lists the most popular recent tags in decreasing popularity from a specific ticket .
|
8,582
|
def incremental ( self , start_time , include = None ) : return self . _query_zendesk ( self . endpoint . incremental , self . object_type , start_time = start_time , include = include )
|
Retrieve bulk data from the incremental API .
|
8,583
|
def incremental ( self , start_time , ** kwargs ) : return self . _query_zendesk ( self . endpoint . incremental , self . object_type , start_time = start_time , ** kwargs )
|
Retrieve bulk data from the chat incremental API .
|
8,584
|
def show ( self , user , identity ) : url = self . _build_url ( self . endpoint . show ( user , identity ) ) return self . _get ( url )
|
Show the specified identity for the specified user .
|
8,585
|
def update ( self , user , identity ) : return UserIdentityRequest ( self ) . put ( self . endpoint . update , user , identity )
|
Update specified identity for the specified user
|
8,586
|
def make_primary ( self , user , identity ) : return UserIdentityRequest ( self ) . put ( self . endpoint . make_primary , user , identity )
|
Set the specified user as primary for the specified user .
|
8,587
|
def request_verification ( self , user , identity ) : return UserIdentityRequest ( self ) . put ( self . endpoint . request_verification , user , identity )
|
Sends the user a verification email with a link to verify ownership of the email address .
|
8,588
|
def verify ( self , user , identity ) : return UserIdentityRequest ( self ) . put ( self . endpoint . verify , user , identity )
|
Verify an identity for a user
|
8,589
|
def groups ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . groups , 'group' , id = user , include = include )
|
Retrieve the groups for this user .
|
8,590
|
def organizations ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . organizations , 'organization' , id = user , include = include )
|
Retrieve the organizations for this user .
|
8,591
|
def requested ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . requested , 'ticket' , id = user , include = include )
|
Retrieve the requested tickets for this user .
|
8,592
|
def cced ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . cced , 'ticket' , id = user , include = include )
|
Retrieve the tickets this user is cc d into .
|
8,593
|
def assigned ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . assigned , 'ticket' , id = user , include = include )
|
Retrieve the assigned tickets for this user .
|
8,594
|
def group_memberships ( self , user , include = None ) : return self . _query_zendesk ( self . endpoint . group_memberships , 'group_membership' , id = user , include = include )
|
Retrieve the group memberships for this user .
|
8,595
|
def related ( self , user ) : return self . _query_zendesk ( self . endpoint . related , 'user_related' , id = user )
|
Returns the UserRelated information for the requested User
|
8,596
|
def me ( self , include = None ) : return self . _query_zendesk ( self . endpoint . me , 'user' , include = include )
|
Return the logged in user
|
8,597
|
def user_fields ( self , user ) : return self . _query_zendesk ( self . endpoint . user_fields , 'user_field' , id = user )
|
Retrieve the user fields for this user .
|
8,598
|
def organization_memberships ( self , user ) : return self . _query_zendesk ( self . endpoint . organization_memberships , 'organization_membership' , id = user )
|
Retrieve the organization memberships for this user .
|
8,599
|
def upload ( self , fp , token = None , target_name = None , content_type = None ) : return UploadRequest ( self ) . post ( fp , token = token , target_name = target_name , content_type = content_type )
|
Upload a file to Zendesk .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.