idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
53,900
def remove_file ( path , remove = os . remove , exists = os . path . exists ) : try : remove ( path ) except OSError : if exists ( path ) : raise
Remove file and raise OSError if still exists .
53,901
def unix_domain_socket_server ( sock_path ) : log . debug ( 'serving on %s' , sock_path ) remove_file ( sock_path ) server = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) server . bind ( sock_path ) server . listen ( 1 ) try : yield server finally : remove_file ( sock_path )
Create UNIX - domain socket on specified path .
53,902
def handle_connection ( conn , handler , mutex ) : try : log . debug ( 'welcome agent' ) with contextlib . closing ( conn ) : while True : msg = util . read_frame ( conn ) with mutex : reply = handler . handle ( msg = msg ) util . send ( conn , reply ) except EOFError : log . debug ( 'goodbye agent' ) except Exception as e : log . warning ( 'error: %s' , e , exc_info = True )
Handle a single connection using the specified protocol handler in a loop .
53,903
def retry ( func , exception_type , quit_event ) : while True : if quit_event . is_set ( ) : raise StopIteration try : return func ( ) except exception_type : pass
Run the function retrying when the specified exception_type occurs .
53,904
def spawn ( func , kwargs ) : t = threading . Thread ( target = func , kwargs = kwargs ) t . start ( ) yield t . join ( )
Spawn a thread and join it after the context is over .
53,905
def run_process ( command , environ ) : log . info ( 'running %r with %r' , command , environ ) env = dict ( os . environ ) env . update ( environ ) try : p = subprocess . Popen ( args = command , env = env ) except OSError as e : raise OSError ( 'cannot run %r: %s' % ( command , e ) ) log . debug ( 'subprocess %d is running' , p . pid ) ret = p . wait ( ) log . debug ( 'subprocess %d exited: %d' , p . pid , ret ) return ret
Run the specified process and wait until it finishes .
53,906
def check_output ( args , env = None , sp = subprocess ) : log . debug ( 'calling %s with env %s' , args , env ) output = sp . check_output ( args = args , env = env ) log . debug ( 'output: %r' , output ) return output
Call an external binary and return its stdout .
53,907
def get_agent_sock_path ( env = None , sp = subprocess ) : args = [ util . which ( 'gpgconf' ) , '--list-dirs' ] output = check_output ( args = args , env = env , sp = sp ) lines = output . strip ( ) . split ( b'\n' ) dirs = dict ( line . split ( b':' , 1 ) for line in lines ) log . debug ( '%s: %s' , args , dirs ) return dirs [ b'agent-socket' ]
Parse gpgconf output to find out GPG agent UNIX socket path .
53,908
def connect_to_agent ( env = None , sp = subprocess ) : sock_path = get_agent_sock_path ( sp = sp , env = env ) check_output ( args = [ 'gpg-connect-agent' , '/bye' ] , sp = sp ) sock = socket . socket ( socket . AF_UNIX , socket . SOCK_STREAM ) sock . connect ( sock_path ) return sock
Connect to GPG agent s UNIX socket .
53,909
def sendline ( sock , msg , confidential = False ) : log . debug ( '<- %r' , ( '<snip>' if confidential else msg ) ) sock . sendall ( msg + b'\n' )
Send a binary message followed by EOL .
53,910
def recvline ( sock ) : reply = io . BytesIO ( ) while True : c = sock . recv ( 1 ) if not c : return None if c == b'\n' : break reply . write ( c ) result = reply . getvalue ( ) log . debug ( '-> %r' , result ) return result
Receive a single line from the socket .
53,911
def parse_term ( s ) : size , s = s . split ( b':' , 1 ) size = int ( size ) return s [ : size ] , s [ size : ]
Parse single s - expr term from bytes .
53,912
def parse ( s ) : if s . startswith ( b'(' ) : s = s [ 1 : ] name , s = parse_term ( s ) values = [ name ] while not s . startswith ( b')' ) : value , s = parse ( s ) values . append ( value ) return values , s [ 1 : ] return parse_term ( s )
Parse full s - expr from bytes .
53,913
def parse_sig ( sig ) : label , sig = sig assert label == b'sig-val' algo_name = sig [ 0 ] parser = { b'rsa' : _parse_rsa_sig , b'ecdsa' : _parse_ecdsa_sig , b'eddsa' : _parse_eddsa_sig , b'dsa' : _parse_dsa_sig } [ algo_name ] return parser ( args = sig [ 1 : ] )
Parse signature integer values from s - expr .
53,914
def sign_digest ( sock , keygrip , digest , sp = subprocess , environ = None ) : hash_algo = 8 assert len ( digest ) == 32 assert communicate ( sock , 'RESET' ) . startswith ( b'OK' ) ttyname = check_output ( args = [ 'tty' ] , sp = sp ) . strip ( ) options = [ 'ttyname={}' . format ( ttyname ) ] display = ( environ or os . environ ) . get ( 'DISPLAY' ) if display is not None : options . append ( 'display={}' . format ( display ) ) for opt in options : assert communicate ( sock , 'OPTION {}' . format ( opt ) ) == b'OK' assert communicate ( sock , 'SIGKEY {}' . format ( keygrip ) ) == b'OK' hex_digest = binascii . hexlify ( digest ) . upper ( ) . decode ( 'ascii' ) assert communicate ( sock , 'SETHASH {} {}' . format ( hash_algo , hex_digest ) ) == b'OK' assert communicate ( sock , 'SETKEYDESC ' 'Sign+a+new+TREZOR-based+subkey' ) == b'OK' assert communicate ( sock , 'PKSIGN' ) == b'OK' while True : line = recvline ( sock ) . strip ( ) if line . startswith ( b'S PROGRESS' ) : continue else : break line = unescape ( line ) log . debug ( 'unescaped: %r' , line ) prefix , sig = line . split ( b' ' , 1 ) if prefix != b'D' : raise ValueError ( prefix ) sig , leftover = parse ( sig ) assert not leftover , leftover return parse_sig ( sig )
Sign a digest using specified key using GPG agent .
53,915
def get_gnupg_components ( sp = subprocess ) : args = [ util . which ( 'gpgconf' ) , '--list-components' ] output = check_output ( args = args , sp = sp ) components = dict ( re . findall ( '(.*):.*:(.*)' , output . decode ( 'utf-8' ) ) ) log . debug ( 'gpgconf --list-components: %s' , components ) return components
Parse GnuPG components paths .
53,916
def gpg_command ( args , env = None ) : if env is None : env = os . environ cmd = get_gnupg_binary ( neopg_binary = env . get ( 'NEOPG_BINARY' ) ) return [ cmd ] + args
Prepare common GPG command line arguments .
53,917
def export_public_key ( user_id , env = None , sp = subprocess ) : args = gpg_command ( [ '--export' , user_id ] ) result = check_output ( args = args , env = env , sp = sp ) if not result : log . error ( 'could not find public key %r in local GPG keyring' , user_id ) raise KeyError ( user_id ) return result
Export GPG public key for specified user_id .
53,918
def export_public_keys ( env = None , sp = subprocess ) : args = gpg_command ( [ '--export' ] ) result = check_output ( args = args , env = env , sp = sp ) if not result : raise KeyError ( 'No GPG public keys found at env: {!r}' . format ( env ) ) return result
Export all GPG public keys .
53,919
def create_agent_signer ( user_id ) : sock = connect_to_agent ( env = os . environ ) keygrip = get_keygrip ( user_id ) def sign ( digest ) : return sign_digest ( sock = sock , keygrip = keygrip , digest = digest ) return sign
Sign digest with existing GPG keys using gpg - agent tool .
53,920
def msg_name ( code ) : ids = { v : k for k , v in COMMANDS . items ( ) } return ids [ code ]
Convert integer message code into a string name .
53,921
def _legacy_pubs ( buf ) : leftover = buf . read ( ) if leftover : log . warning ( 'skipping leftover: %r' , leftover ) code = util . pack ( 'B' , msg_code ( 'SSH_AGENT_RSA_IDENTITIES_ANSWER' ) ) num = util . pack ( 'L' , 0 ) return util . frame ( code , num )
SSH v1 public keys are not supported .
53,922
def handle ( self , msg ) : debug_msg = ': {!r}' . format ( msg ) if self . debug else '' log . debug ( 'request: %d bytes%s' , len ( msg ) , debug_msg ) buf = io . BytesIO ( msg ) code , = util . recv ( buf , '>B' ) if code not in self . methods : log . warning ( 'Unsupported command: %s (%d)' , msg_name ( code ) , code ) return failure ( ) method = self . methods [ code ] log . debug ( 'calling %s()' , method . __name__ ) reply = method ( buf = buf ) debug_reply = ': {!r}' . format ( reply ) if self . debug else '' log . debug ( 'reply: %d bytes%s' , len ( reply ) , debug_reply ) return reply
Handle SSH message from the SSH client and return the response .
53,923
def list_pubs ( self , buf ) : assert not buf . read ( ) keys = self . conn . parse_public_keys ( ) code = util . pack ( 'B' , msg_code ( 'SSH2_AGENT_IDENTITIES_ANSWER' ) ) num = util . pack ( 'L' , len ( keys ) ) log . debug ( 'available keys: %s' , [ k [ 'name' ] for k in keys ] ) for i , k in enumerate ( keys ) : log . debug ( '%2d) %s' , i + 1 , k [ 'fingerprint' ] ) pubs = [ util . frame ( k [ 'blob' ] ) + util . frame ( k [ 'name' ] ) for k in keys ] return util . frame ( code , num , * pubs )
SSH v2 public keys are serialized and returned .
53,924
def sign_message ( self , buf ) : key = formats . parse_pubkey ( util . read_frame ( buf ) ) log . debug ( 'looking for %s' , key [ 'fingerprint' ] ) blob = util . read_frame ( buf ) assert util . read_frame ( buf ) == b'' assert not buf . read ( ) for k in self . conn . parse_public_keys ( ) : if ( k [ 'fingerprint' ] ) == ( key [ 'fingerprint' ] ) : log . debug ( 'using key %r (%s)' , k [ 'name' ] , k [ 'fingerprint' ] ) key = k break else : raise KeyError ( 'key not found' ) label = key [ 'name' ] . decode ( 'utf-8' ) log . debug ( 'signing %d-byte blob with "%s" key' , len ( blob ) , label ) try : signature = self . conn . sign ( blob = blob , identity = key [ 'identity' ] ) except IOError : return failure ( ) log . debug ( 'signature: %r' , signature ) try : sig_bytes = key [ 'verifier' ] ( sig = signature , msg = blob ) log . info ( 'signature status: OK' ) except formats . ecdsa . BadSignatureError : log . exception ( 'signature status: ERROR' ) raise ValueError ( 'invalid ECDSA signature' ) log . debug ( 'signature size: %d bytes' , len ( sig_bytes ) ) data = util . frame ( util . frame ( key [ 'type' ] ) , util . frame ( sig_bytes ) ) code = util . pack ( 'B' , msg_code ( 'SSH2_AGENT_SIGN_RESPONSE' ) ) return util . frame ( code , data )
SSH v2 public key authentication is performed .
53,925
def recv ( conn , size ) : try : fmt = size size = struct . calcsize ( fmt ) except TypeError : fmt = None try : _read = conn . recv except AttributeError : _read = conn . read res = io . BytesIO ( ) while size > 0 : buf = _read ( size ) if not buf : raise EOFError size = size - len ( buf ) res . write ( buf ) res = res . getvalue ( ) if fmt : return struct . unpack ( fmt , res ) else : return res
Receive bytes from connection socket or stream .
53,926
def bytes2num ( s ) : res = 0 for i , c in enumerate ( reversed ( bytearray ( s ) ) ) : res += c << ( i * 8 ) return res
Convert MSB - first bytes to an unsigned integer .
53,927
def num2bytes ( value , size ) : res = [ ] for _ in range ( size ) : res . append ( value & 0xFF ) value = value >> 8 assert value == 0 return bytes ( bytearray ( list ( reversed ( res ) ) ) )
Convert an unsigned integer to MSB - first bytes with specified size .
53,928
def frame ( * msgs ) : res = io . BytesIO ( ) for msg in msgs : res . write ( msg ) msg = res . getvalue ( ) return pack ( 'L' , len ( msg ) ) + msg
Serialize MSB - first length - prefixed frame .
53,929
def split_bits ( value , * bits ) : result = [ ] for b in reversed ( bits ) : mask = ( 1 << b ) - 1 result . append ( value & mask ) value = value >> b assert value == 0 result . reverse ( ) return result
Split integer value into list of ints according to bits list .
53,930
def readfmt ( stream , fmt ) : size = struct . calcsize ( fmt ) blob = stream . read ( size ) return struct . unpack ( fmt , blob )
Read and unpack an object from stream using a struct format string .
53,931
def setup_logging ( verbosity , filename = None ) : levels = [ logging . WARNING , logging . INFO , logging . DEBUG ] level = levels [ min ( verbosity , len ( levels ) - 1 ) ] logging . root . setLevel ( level ) fmt = logging . Formatter ( '%(asctime)s %(levelname)-12s %(message)-100s ' '[%(filename)s:%(lineno)d]' ) hdlr = logging . StreamHandler ( ) hdlr . setFormatter ( fmt ) logging . root . addHandler ( hdlr ) if filename : hdlr = logging . FileHandler ( filename , 'a' ) hdlr . setFormatter ( fmt ) logging . root . addHandler ( hdlr )
Configure logging for this tool .
53,932
def which ( cmd ) : try : from shutil import which as _which except ImportError : from backports . shutil_which import which as _which full_path = _which ( cmd ) if full_path is None : raise OSError ( 'Cannot find {!r} in $PATH' . format ( cmd ) ) log . debug ( 'which %r => %r' , cmd , full_path ) return full_path
Return full path to specified command or raise OSError if missing .
53,933
def readfmt ( self , fmt ) : size = struct . calcsize ( fmt ) blob = self . read ( size ) obj , = struct . unpack ( fmt , blob ) return obj
Read a specified object using a struct format string .
53,934
def read ( self , size = None ) : blob = self . s . read ( size ) if size is not None and len ( blob ) < size : raise EOFError if self . _captured : self . _captured . write ( blob ) return blob
Read size bytes from stream .
53,935
def get ( self ) : if self . timer ( ) > self . deadline : self . value = None return self . value
Returns existing value or None if deadline has expired .
53,936
def set ( self , value ) : self . deadline = self . timer ( ) + self . duration self . value = value
Set new value and reset the deadline for expiration .
53,937
def sig_encode ( r , s ) : r = util . assuan_serialize ( util . num2bytes ( r , 32 ) ) s = util . assuan_serialize ( util . num2bytes ( s , 32 ) ) return b'(7:sig-val(5:ecdsa(1:r32:' + r + b')(1:s32:' + s + b')))'
Serialize ECDSA signature data into GPG S - expression .
53,938
def parse_ecdh ( line ) : prefix , line = line . split ( b' ' , 1 ) assert prefix == b'D' exp , leftover = keyring . parse ( keyring . unescape ( line ) ) log . debug ( 'ECDH s-exp: %r' , exp ) assert not leftover label , exp = exp assert label == b'enc-val' assert exp [ 0 ] == b'ecdh' items = exp [ 1 : ] log . debug ( 'ECDH parameters: %r' , items ) return dict ( items ) [ b'e' ]
Parse ECDH request and return remote public key .
53,939
def handle_getinfo ( self , conn , args ) : result = None if args [ 0 ] == b'version' : result = self . version elif args [ 0 ] == b's2k_count' : result = '{}' . format ( 64 << 20 ) . encode ( 'ascii' ) else : log . warning ( 'Unknown GETINFO command: %s' , args ) if result : keyring . sendline ( conn , b'D ' + result )
Handle some of the GETINFO messages .
53,940
def handle_scd ( self , conn , args ) : reply = { ( b'GETINFO' , b'version' ) : self . version , } . get ( args ) if reply is None : raise AgentError ( b'ERR 100696144 No such device <SCD>' ) keyring . sendline ( conn , b'D ' + reply )
No support for smart - card device protocol .
53,941
def get_identity ( self , keygrip ) : keygrip_bytes = binascii . unhexlify ( keygrip ) pubkey_dict , user_ids = decode . load_by_keygrip ( pubkey_bytes = self . pubkey_bytes , keygrip = keygrip_bytes ) user_id = user_ids [ 0 ] [ 'value' ] . decode ( 'utf-8' ) curve_name = protocol . get_curve_name_by_oid ( pubkey_dict [ 'curve_oid' ] ) ecdh = ( pubkey_dict [ 'algo' ] == protocol . ECDH_ALGO_ID ) identity = client . create_identity ( user_id = user_id , curve_name = curve_name ) verifying_key = self . client . pubkey ( identity = identity , ecdh = ecdh ) pubkey = protocol . PublicKey ( curve_name = curve_name , created = pubkey_dict [ 'created' ] , verifying_key = verifying_key , ecdh = ecdh ) assert pubkey . key_id ( ) == pubkey_dict [ 'key_id' ] assert pubkey . keygrip ( ) == keygrip_bytes return identity
Returns device . interface . Identity that matches specified keygrip .
53,942
def pksign ( self , conn ) : log . debug ( 'signing %r digest (algo #%s)' , self . digest , self . algo ) identity = self . get_identity ( keygrip = self . keygrip ) r , s = self . client . sign ( identity = identity , digest = binascii . unhexlify ( self . digest ) ) result = sig_encode ( r , s ) log . debug ( 'result: %r' , result ) keyring . sendline ( conn , b'D ' + result )
Sign a message digest using a private EC key .
53,943
def pkdecrypt ( self , conn ) : for msg in [ b'S INQUIRE_MAXLEN 4096' , b'INQUIRE CIPHERTEXT' ] : keyring . sendline ( conn , msg ) line = keyring . recvline ( conn ) assert keyring . recvline ( conn ) == b'END' remote_pubkey = parse_ecdh ( line ) identity = self . get_identity ( keygrip = self . keygrip ) ec_point = self . client . ecdh ( identity = identity , pubkey = remote_pubkey ) keyring . sendline ( conn , b'D ' + _serialize_point ( ec_point ) )
Handle decryption using ECDH .
53,944
def have_key ( self , * keygrips ) : for keygrip in keygrips : try : self . get_identity ( keygrip = keygrip ) break except KeyError as e : log . warning ( 'HAVEKEY(%s) failed: %s' , keygrip , e ) else : raise AgentError ( b'ERR 67108881 No secret key <GPG Agent>' )
Check if any keygrip corresponds to a TREZOR - based key .
53,945
def set_hash ( self , algo , digest ) : self . algo = algo self . digest = digest
Set algorithm ID and hexadecimal digest for next operation .
53,946
def handle ( self , conn ) : keyring . sendline ( conn , b'OK' ) for line in keyring . iterlines ( conn ) : parts = line . split ( b' ' ) command = parts [ 0 ] args = tuple ( parts [ 1 : ] ) if command == b'BYE' : return elif command == b'KILLAGENT' : keyring . sendline ( conn , b'OK' ) raise AgentStop ( ) if command not in self . handlers : log . error ( 'unknown request: %r' , line ) continue handler = self . handlers [ command ] if handler : try : handler ( conn , args ) except AgentError as e : msg , = e . args keyring . sendline ( conn , msg ) continue keyring . sendline ( conn , b'OK' )
Handle connection from GPG binary using the ASSUAN protocol .
53,947
def connect ( self ) : log . critical ( 'NEVER USE THIS CODE FOR REAL-LIFE USE-CASES!!!' ) log . critical ( 'ONLY FOR DEBUGGING AND TESTING!!!' ) self . secexp = 1 self . sk = ecdsa . SigningKey . from_secret_exponent ( secexp = self . secexp , curve = ecdsa . curves . NIST256p , hashfunc = hashlib . sha256 ) self . vk = self . sk . get_verifying_key ( ) return self
Return dummy connection .
53,948
def create_identity ( user_id , curve_name ) : result = interface . Identity ( identity_str = 'gpg://' , curve_name = curve_name ) result . identity_dict [ 'host' ] = user_id return result
Create GPG identity for hardware device .
53,949
def pubkey ( self , identity , ecdh = False ) : with self . device : pubkey = self . device . pubkey ( ecdh = ecdh , identity = identity ) return formats . decompress_pubkey ( pubkey = pubkey , curve_name = identity . curve_name )
Return public key as VerifyingKey object .
53,950
def sign ( self , identity , digest ) : log . info ( 'please confirm GPG signature on %s for "%s"...' , self . device , identity . to_string ( ) ) if identity . curve_name == formats . CURVE_NIST256 : digest = digest [ : 32 ] log . debug ( 'signing digest: %s' , util . hexlify ( digest ) ) with self . device : sig = self . device . sign ( blob = digest , identity = identity ) return ( util . bytes2num ( sig [ : 32 ] ) , util . bytes2num ( sig [ 32 : ] ) )
Sign the digest and return a serialized signature .
53,951
def ecdh ( self , identity , pubkey ) : log . info ( 'please confirm GPG decryption on %s for "%s"...' , self . device , identity . to_string ( ) ) with self . device : return self . device . ecdh ( pubkey = pubkey , identity = identity )
Derive shared secret using ECDH from remote public key .
53,952
def connect ( self ) : transport = self . _defs . find_device ( ) if not transport : raise interface . NotFoundError ( '{} not connected' . format ( self ) ) log . debug ( 'using transport: %s' , transport ) for _ in range ( 5 ) : connection = self . _defs . Client ( transport = transport , ui = self . ui , state = self . __class__ . cached_state ) self . _verify_version ( connection ) try : connection . ping ( msg = '' , pin_protection = True ) return connection except ( self . _defs . PinException , ValueError ) as e : log . error ( 'Invalid PIN: %s, retrying...' , e ) continue except Exception as e : log . exception ( 'ping failed: %s' , e ) connection . close ( ) raise
Enumerate and connect to the first available interface .
53,953
def string_to_identity ( identity_str ) : m = _identity_regexp . match ( identity_str ) result = m . groupdict ( ) log . debug ( 'parsed identity: %s' , result ) return { k : v for k , v in result . items ( ) if v }
Parse string into Identity dictionary .
53,954
def identity_to_string ( identity_dict ) : result = [ ] if identity_dict . get ( 'proto' ) : result . append ( identity_dict [ 'proto' ] + '://' ) if identity_dict . get ( 'user' ) : result . append ( identity_dict [ 'user' ] + '@' ) result . append ( identity_dict [ 'host' ] ) if identity_dict . get ( 'port' ) : result . append ( ':' + identity_dict [ 'port' ] ) if identity_dict . get ( 'path' ) : result . append ( identity_dict [ 'path' ] ) log . debug ( 'identity parts: %s' , result ) return '' . join ( result )
Dump Identity dictionary into its string representation .
53,955
def items ( self ) : return [ ( k , unidecode . unidecode ( v ) ) for k , v in self . identity_dict . items ( ) ]
Return a copy of identity_dict items .
53,956
def to_bytes ( self ) : s = identity_to_string ( self . identity_dict ) return unidecode . unidecode ( s ) . encode ( 'ascii' )
Transliterate Unicode into ASCII .
53,957
def get_curve_name ( self , ecdh = False ) : if ecdh : return formats . get_ecdh_curve_name ( self . curve_name ) else : return self . curve_name
Return correct curve name for device operations .
53,958
def serve ( handler , sock_path , timeout = UNIX_SOCKET_TIMEOUT ) : ssh_version = subprocess . check_output ( [ 'ssh' , '-V' ] , stderr = subprocess . STDOUT ) log . debug ( 'local SSH version: %r' , ssh_version ) environ = { 'SSH_AUTH_SOCK' : sock_path , 'SSH_AGENT_PID' : str ( os . getpid ( ) ) } device_mutex = threading . Lock ( ) with server . unix_domain_socket_server ( sock_path ) as sock : sock . settimeout ( timeout ) quit_event = threading . Event ( ) handle_conn = functools . partial ( server . handle_connection , handler = handler , mutex = device_mutex ) kwargs = dict ( sock = sock , handle_conn = handle_conn , quit_event = quit_event ) with server . spawn ( server . server_thread , kwargs ) : try : yield environ finally : log . debug ( 'closing server' ) quit_event . set ( )
Start the ssh - agent server on a UNIX - domain socket .
53,959
def run_server ( conn , command , sock_path , debug , timeout ) : ret = 0 try : handler = protocol . Handler ( conn = conn , debug = debug ) with serve ( handler = handler , sock_path = sock_path , timeout = timeout ) as env : if command : ret = server . run_process ( command = command , environ = env ) else : signal . pause ( ) except KeyboardInterrupt : log . info ( 'server stopped' ) return ret
Common code for run_agent and run_git below .
53,960
def handle_connection_error ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except device . interface . NotFoundError as e : log . error ( 'Connection error (try unplugging and replugging your device): %s' , e ) return 1 return wrapper
Fail with non - zero exit code .
53,961
def parse_config ( contents ) : for identity_str , curve_name in re . findall ( r'\<(.*?)\|(.*?)\>' , contents ) : yield device . interface . Identity ( identity_str = identity_str , curve_name = curve_name )
Parse config file into a list of Identity objects .
53,962
def main ( device_type ) : args = create_agent_parser ( device_type = device_type ) . parse_args ( ) util . setup_logging ( verbosity = args . verbose , filename = args . log_file ) public_keys = None filename = None if args . identity . startswith ( '/' ) : filename = args . identity contents = open ( filename , 'rb' ) . read ( ) . decode ( 'utf-8' ) if filename . endswith ( '.pub' ) : public_keys = list ( import_public_keys ( contents ) ) identities = list ( parse_config ( contents ) ) else : identities = [ device . interface . Identity ( identity_str = args . identity , curve_name = args . ecdsa_curve_name ) ] for index , identity in enumerate ( identities ) : identity . identity_dict [ 'proto' ] = u'ssh' log . info ( 'identity #%d: %s' , index , identity . to_string ( ) ) device_type . ui = device . ui . UI ( device_type = device_type , config = vars ( args ) ) device_type . ui . cached_passphrase_ack = util . ExpiringCache ( args . cache_expiry_seconds ) conn = JustInTimeConnection ( conn_factory = lambda : client . Client ( device_type ( ) ) , identities = identities , public_keys = public_keys ) sock_path = _get_sock_path ( args ) command = args . command context = _dummy_context ( ) if args . connect : command = [ 'ssh' ] + ssh_args ( conn ) + args . command elif args . mosh : command = [ 'mosh' ] + mosh_args ( conn ) + args . command elif args . daemonize : out = 'SSH_AUTH_SOCK={0}; export SSH_AUTH_SOCK;\n' . format ( sock_path ) sys . stdout . write ( out ) sys . stdout . flush ( ) context = daemon . DaemonContext ( ) log . info ( 'running the agent as a daemon on %s' , sock_path ) elif args . foreground : log . info ( 'running the agent on %s' , sock_path ) use_shell = bool ( args . shell ) if use_shell : command = os . environ [ 'SHELL' ] sys . stdin . close ( ) if command or args . daemonize or args . foreground : with context : return run_server ( conn = conn , command = command , sock_path = sock_path , debug = args . debug , timeout = args . timeout ) else : for pk in conn . public_keys ( ) : sys . stdout . write ( pk ) return 0
Run ssh - agent using given hardware client factory .
53,963
def parse_public_keys ( self ) : public_keys = [ formats . import_public_key ( pk ) for pk in self . public_keys ( ) ] for pk , identity in zip ( public_keys , self . identities ) : pk [ 'identity' ] = identity return public_keys
Parse SSH public keys into dictionaries .
53,964
def public_keys_as_files ( self ) : if not self . public_keys_tempfiles : for pk in self . public_keys ( ) : f = tempfile . NamedTemporaryFile ( prefix = 'trezor-ssh-pubkey-' , mode = 'w' ) f . write ( pk ) f . flush ( ) self . public_keys_tempfiles . append ( f ) return self . public_keys_tempfiles
Store public keys as temporary SSH identity files .
53,965
def sign ( self , blob , identity ) : conn = self . conn_factory ( ) return conn . sign_ssh_challenge ( blob = blob , identity = identity )
Sign a given blob using the specified identity on the device .
53,966
def packet ( tag , blob ) : assert len ( blob ) < 2 ** 32 if len ( blob ) < 2 ** 8 : length_type = 0 elif len ( blob ) < 2 ** 16 : length_type = 1 else : length_type = 2 fmt = [ '>B' , '>H' , '>L' ] [ length_type ] leading_byte = 0x80 | ( tag << 2 ) | ( length_type ) return struct . pack ( '>B' , leading_byte ) + util . prefix_len ( fmt , blob )
Create small GPG packet .
53,967
def subpacket ( subpacket_type , fmt , * values ) : blob = struct . pack ( fmt , * values ) if values else fmt return struct . pack ( '>B' , subpacket_type ) + blob
Create GPG subpacket .
53,968
def subpacket_prefix_len ( item ) : n = len ( item ) if n >= 8384 : prefix = b'\xFF' + struct . pack ( '>L' , n ) elif n >= 192 : n = n - 192 prefix = struct . pack ( 'BB' , ( n // 256 ) + 192 , n % 256 ) else : prefix = struct . pack ( 'B' , n ) return prefix + item
Prefix subpacket length according to RFC 4880 section - 5 . 2 . 3 . 1 .
53,969
def subpackets ( * items ) : prefixed = [ subpacket_prefix_len ( item ) for item in items ] return util . prefix_len ( '>H' , b'' . join ( prefixed ) )
Serialize several GPG subpackets .
53,970
def mpi ( value ) : bits = value . bit_length ( ) data_size = ( bits + 7 ) // 8 data_bytes = bytearray ( data_size ) for i in range ( data_size ) : data_bytes [ i ] = value & 0xFF value = value >> 8 data_bytes . reverse ( ) return struct . pack ( '>H' , bits ) + bytes ( data_bytes )
Serialize multipresicion integer using GPG format .
53,971
def keygrip_nist256 ( vk ) : curve = vk . curve . curve gen = vk . curve . generator g = ( 4 << 512 ) | ( gen . x ( ) << 256 ) | gen . y ( ) point = vk . pubkey . point q = ( 4 << 512 ) | ( point . x ( ) << 256 ) | point . y ( ) return _compute_keygrip ( [ [ 'p' , util . num2bytes ( curve . p ( ) , size = 32 ) ] , [ 'a' , util . num2bytes ( curve . a ( ) % curve . p ( ) , size = 32 ) ] , [ 'b' , util . num2bytes ( curve . b ( ) % curve . p ( ) , size = 32 ) ] , [ 'g' , util . num2bytes ( g , size = 65 ) ] , [ 'n' , util . num2bytes ( vk . curve . order , size = 32 ) ] , [ 'q' , util . num2bytes ( q , size = 65 ) ] , ] )
Compute keygrip for NIST256 curve public keys .
53,972
def keygrip_ed25519 ( vk ) : return _compute_keygrip ( [ [ 'p' , util . num2bytes ( 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED , size = 32 ) ] , [ 'a' , b'\x01' ] , [ 'b' , util . num2bytes ( 0x2DFC9311D490018C7338BF8688861767FF8FF5B2BEBE27548A14B235ECA6874A , size = 32 ) ] , [ 'g' , util . num2bytes ( 0x04216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A6666666666666666666666666666666666666666666666666666666666666658 , size = 65 ) ] , [ 'n' , util . num2bytes ( 0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED , size = 32 ) ] , [ 'q' , vk . to_bytes ( ) ] , ] )
Compute keygrip for Ed25519 public keys .
53,973
def keygrip_curve25519 ( vk ) : return _compute_keygrip ( [ [ 'p' , util . num2bytes ( 0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED , size = 32 ) ] , [ 'a' , b'\x01\xDB\x41' ] , [ 'b' , b'\x01' ] , [ 'g' , util . num2bytes ( 0x04000000000000000000000000000000000000000000000000000000000000000920ae19a1b8a086b4e01edd2c7748d14c923d4d7e6d7c61b229e9c5a27eced3d9 , size = 65 ) ] , [ 'n' , util . num2bytes ( 0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED , size = 32 ) ] , [ 'q' , vk . to_bytes ( ) ] , ] )
Compute keygrip for Curve25519 public keys .
53,974
def get_curve_name_by_oid ( oid ) : for curve_name , info in SUPPORTED_CURVES . items ( ) : if info [ 'oid' ] == oid : return curve_name raise KeyError ( 'Unknown OID: {!r}' . format ( oid ) )
Return curve name matching specified OID or raise KeyError .
53,975
def make_signature ( signer_func , data_to_sign , public_algo , hashed_subpackets , unhashed_subpackets , sig_type = 0 ) : header = struct . pack ( '>BBBB' , 4 , sig_type , public_algo , 8 ) hashed = subpackets ( * hashed_subpackets ) unhashed = subpackets ( * unhashed_subpackets ) tail = b'\x04\xff' + struct . pack ( '>L' , len ( header ) + len ( hashed ) ) data_to_hash = data_to_sign + header + hashed + tail log . debug ( 'hashing %d bytes' , len ( data_to_hash ) ) digest = hashlib . sha256 ( data_to_hash ) . digest ( ) log . debug ( 'signing digest: %s' , util . hexlify ( digest ) ) params = signer_func ( digest = digest ) sig = b'' . join ( mpi ( p ) for p in params ) return bytes ( header + hashed + unhashed + digest [ : 2 ] + sig )
Create new GPG signature .
53,976
def data ( self ) : header = struct . pack ( '>BLB' , 4 , self . created , self . algo_id ) oid = util . prefix_len ( '>B' , self . curve_info [ 'oid' ] ) blob = self . curve_info [ 'serialize' ] ( self . verifying_key ) return header + oid + blob + self . ecdh_packet
Data for packet creation .
53,977
def create_subkey ( primary_bytes , subkey , signer_func , secret_bytes = b'' ) : subkey_packet = protocol . packet ( tag = ( 7 if secret_bytes else 14 ) , blob = ( subkey . data ( ) + secret_bytes ) ) packets = list ( decode . parse_packets ( io . BytesIO ( primary_bytes ) ) ) primary , user_id , signature = packets [ : 3 ] data_to_sign = primary [ '_to_hash' ] + subkey . data_to_hash ( ) if subkey . ecdh : embedded_sig = None else : hashed_subpackets = [ protocol . subpacket_time ( subkey . created ) ] unhashed_subpackets = [ protocol . subpacket ( 16 , subkey . key_id ( ) ) ] embedded_sig = protocol . make_signature ( signer_func = signer_func , data_to_sign = data_to_sign , public_algo = subkey . algo_id , sig_type = 0x19 , hashed_subpackets = hashed_subpackets , unhashed_subpackets = unhashed_subpackets ) flags = ( 2 ) if ( not subkey . ecdh ) else ( 4 | 8 ) hashed_subpackets = [ protocol . subpacket_time ( subkey . created ) , protocol . subpacket_byte ( 0x1B , flags ) ] unhashed_subpackets = [ ] unhashed_subpackets . append ( protocol . subpacket ( 16 , primary [ 'key_id' ] ) ) if embedded_sig is not None : unhashed_subpackets . append ( protocol . subpacket ( 32 , embedded_sig ) ) unhashed_subpackets . append ( protocol . CUSTOM_SUBPACKET ) if not decode . has_custom_subpacket ( signature ) : signer_func = keyring . create_agent_signer ( user_id [ 'value' ] ) signature = protocol . make_signature ( signer_func = signer_func , data_to_sign = data_to_sign , public_algo = primary [ 'algo' ] , sig_type = 0x18 , hashed_subpackets = hashed_subpackets , unhashed_subpackets = unhashed_subpackets ) sign_packet = protocol . packet ( tag = 2 , blob = signature ) return primary_bytes + subkey_packet + sign_packet
Export new subkey to GPG primary key .
53,978
def verify_gpg_version ( ) : existing_gpg = keyring . gpg_version ( ) . decode ( 'ascii' ) required_gpg = '>=2.1.11' msg = 'Existing GnuPG has version "{}" ({} required)' . format ( existing_gpg , required_gpg ) if not semver . match ( existing_gpg , required_gpg ) : log . error ( msg )
Make sure that the installed GnuPG is not too old .
53,979
def check_output ( args ) : log . debug ( 'run: %s' , args ) out = subprocess . check_output ( args = args ) . decode ( 'utf-8' ) log . debug ( 'out: %r' , out ) return out
Runs command and returns the output as string .
53,980
def check_call ( args , stdin = None , env = None ) : log . debug ( 'run: %s%s' , args , ' {}' . format ( env ) if env else '' ) subprocess . check_call ( args = args , stdin = stdin , env = env )
Runs command and verifies its success .
53,981
def write_file ( path , data ) : with open ( path , 'w' ) as f : log . debug ( 'setting %s contents:\n%s' , path , data ) f . write ( data ) return f
Writes data to specified path .
53,982
def run_agent ( device_type ) : p = argparse . ArgumentParser ( ) p . add_argument ( '--homedir' , default = os . environ . get ( 'GNUPGHOME' ) ) p . add_argument ( '-v' , '--verbose' , default = 0 , action = 'count' ) p . add_argument ( '--server' , default = False , action = 'store_true' , help = 'Use stdin/stdout for communication with GPG.' ) p . add_argument ( '--pin-entry-binary' , type = str , default = 'pinentry' , help = 'Path to PIN entry UI helper.' ) p . add_argument ( '--passphrase-entry-binary' , type = str , default = 'pinentry' , help = 'Path to passphrase entry UI helper.' ) p . add_argument ( '--cache-expiry-seconds' , type = float , default = float ( 'inf' ) , help = 'Expire passphrase from cache after this duration.' ) args , _ = p . parse_known_args ( ) assert args . homedir log_file = os . path . join ( args . homedir , 'gpg-agent.log' ) util . setup_logging ( verbosity = args . verbose , filename = log_file ) log . debug ( 'sys.argv: %s' , sys . argv ) log . debug ( 'os.environ: %s' , os . environ ) log . debug ( 'pid: %d, parent pid: %d' , os . getpid ( ) , os . getppid ( ) ) try : env = { 'GNUPGHOME' : args . homedir , 'PATH' : os . environ [ 'PATH' ] } pubkey_bytes = keyring . export_public_keys ( env = env ) device_type . ui = device . ui . UI ( device_type = device_type , config = vars ( args ) ) device_type . ui . cached_passphrase_ack = util . ExpiringCache ( seconds = float ( args . cache_expiry_seconds ) ) handler = agent . Handler ( device = device_type ( ) , pubkey_bytes = pubkey_bytes ) sock_server = _server_from_assuan_fd ( os . environ ) if sock_server is None : sock_server = _server_from_sock_path ( env ) with sock_server as sock : for conn in agent . yield_connections ( sock ) : with contextlib . closing ( conn ) : try : handler . handle ( conn ) except agent . AgentStop : log . info ( 'stopping gpg-agent' ) return except IOError as e : log . info ( 'connection closed: %s' , e ) return except Exception as e : log . exception ( 'handler failed: %s' , e ) except Exception as e : log . exception ( 'gpg-agent failed: %s' , e )
Run a simple GPG - agent server .
53,983
def find_device ( ) : try : return get_transport ( os . environ . get ( "TREZOR_PATH" ) ) except Exception as e : log . debug ( "Failed to find a Trezor device: %s" , e )
Selects a transport based on TREZOR_PATH environment variable .
53,984
def _convert_public_key ( ecdsa_curve_name , result ) : if ecdsa_curve_name == 'nist256p1' : if ( result [ 64 ] & 1 ) != 0 : result = bytearray ( [ 0x03 ] ) + result [ 1 : 33 ] else : result = bytearray ( [ 0x02 ] ) + result [ 1 : 33 ] else : result = result [ 1 : ] keyX = bytearray ( result [ 0 : 32 ] ) keyY = bytearray ( result [ 32 : ] [ : : - 1 ] ) if ( keyX [ 31 ] & 1 ) != 0 : keyY [ 31 ] |= 0x80 result = b'\x00' + bytes ( keyY ) return bytes ( result )
Convert Ledger reply into PublicKey object .
53,985
def connect ( self ) : try : return comm . getDongle ( ) except comm . CommException as e : raise interface . NotFoundError ( '{} not connected: "{}"' . format ( self , e ) )
Enumerate and connect to the first USB HID interface .
53,986
def pubkey ( self , identity , ecdh = False ) : curve_name = identity . get_curve_name ( ecdh ) path = _expand_path ( identity . get_bip32_address ( ecdh ) ) if curve_name == 'nist256p1' : p2 = '01' else : p2 = '02' apdu = '800200' + p2 apdu = binascii . unhexlify ( apdu ) apdu += bytearray ( [ len ( path ) + 1 , len ( path ) // 4 ] ) apdu += path log . debug ( 'apdu: %r' , apdu ) result = bytearray ( self . conn . exchange ( bytes ( apdu ) ) ) log . debug ( 'result: %r' , result ) return _convert_public_key ( curve_name , result [ 1 : ] )
Get PublicKey object for specified BIP32 address and elliptic curve .
53,987
def download_setuptools ( version = DEFAULT_VERSION , download_base = DEFAULT_URL , to_dir = os . curdir , delay = 15 ) : to_dir = os . path . abspath ( to_dir ) try : from urllib . request import urlopen except ImportError : from urllib2 import urlopen tgz_name = "distribute-%s.tar.gz" % version url = download_base + tgz_name saveto = os . path . join ( to_dir , tgz_name ) src = dst = None if not os . path . exists ( saveto ) : try : log . warn ( "Downloading %s" , url ) src = urlopen ( url ) data = src . read ( ) dst = open ( saveto , "wb" ) dst . write ( data ) finally : if src : src . close ( ) if dst : dst . close ( ) return os . path . realpath ( saveto )
Download distribute from a specified location and return its filename
53,988
def tokenize ( stream , separator ) : for value in stream : for token in value . split ( separator ) : if token : yield token . strip ( )
Tokenize and yield query parameter values .
53,989
def build_query ( self , ** filters ) : applicable_filters = [ ] applicable_exclusions = [ ] for param , value in filters . items ( ) : excluding_term = False param_parts = param . split ( "__" ) base_param = param_parts [ 0 ] negation_keyword = constants . DRF_HAYSTACK_NEGATION_KEYWORD if len ( param_parts ) > 1 and param_parts [ 1 ] == negation_keyword : excluding_term = True param = param . replace ( "__%s" % negation_keyword , "" ) if self . view . serializer_class : if hasattr ( self . view . serializer_class . Meta , 'field_aliases' ) : old_base = base_param base_param = self . view . serializer_class . Meta . field_aliases . get ( base_param , base_param ) param = param . replace ( old_base , base_param ) fields = getattr ( self . view . serializer_class . Meta , 'fields' , [ ] ) exclude = getattr ( self . view . serializer_class . Meta , 'exclude' , [ ] ) search_fields = getattr ( self . view . serializer_class . Meta , 'search_fields' , [ ] ) if ( ( fields or search_fields ) and base_param not in chain ( fields , search_fields ) ) or base_param in exclude or not value : continue field_queries = [ ] if len ( param_parts ) > 1 and param_parts [ - 1 ] in ( 'in' , 'range' ) : field_queries . append ( self . view . query_object ( ( param , list ( self . tokenize ( value , self . view . lookup_sep ) ) ) ) ) else : for token in self . tokenize ( value , self . view . lookup_sep ) : field_queries . append ( self . view . query_object ( ( param , token ) ) ) field_queries = [ fq for fq in field_queries if fq ] if len ( field_queries ) > 0 : term = six . moves . reduce ( operator . or_ , field_queries ) if excluding_term : applicable_exclusions . append ( term ) else : applicable_filters . append ( term ) applicable_filters = six . moves . reduce ( self . default_operator , filter ( lambda x : x , applicable_filters ) ) if applicable_filters else [ ] applicable_exclusions = six . moves . reduce ( self . default_operator , filter ( lambda x : x , applicable_exclusions ) ) if applicable_exclusions else [ ] return applicable_filters , applicable_exclusions
Creates a single SQ filter from querystring parameters that correspond to the SearchIndex fields that have been registered in view . fields .
53,990
def build_query ( self , ** filters ) : field_facets = { } date_facets = { } query_facets = { } facet_serializer_cls = self . view . get_facet_serializer_class ( ) if self . view . lookup_sep == ":" : raise AttributeError ( "The %(cls)s.lookup_sep attribute conflicts with the HaystackFacetFilter " "query parameter parser. Please choose another `lookup_sep` attribute " "for %(cls)s." % { "cls" : self . view . __class__ . __name__ } ) fields = facet_serializer_cls . Meta . fields exclude = facet_serializer_cls . Meta . exclude field_options = facet_serializer_cls . Meta . field_options for field , options in filters . items ( ) : if field not in fields or field in exclude : continue field_options = merge_dict ( field_options , { field : self . parse_field_options ( self . view . lookup_sep , * options ) } ) valid_gap = ( "year" , "month" , "day" , "hour" , "minute" , "second" ) for field , options in field_options . items ( ) : if any ( [ k in options for k in ( "start_date" , "end_date" , "gap_by" , "gap_amount" ) ] ) : if not all ( ( "start_date" , "end_date" , "gap_by" in options ) ) : raise ValueError ( "Date faceting requires at least 'start_date', 'end_date' " "and 'gap_by' to be set." ) if not options [ "gap_by" ] in valid_gap : raise ValueError ( "The 'gap_by' parameter must be one of %s." % ", " . join ( valid_gap ) ) options . setdefault ( "gap_amount" , 1 ) date_facets [ field ] = field_options [ field ] else : field_facets [ field ] = field_options [ field ] return { "date_facets" : date_facets , "field_facets" : field_facets , "query_facets" : query_facets }
Creates a dict of dictionaries suitable for passing to the SearchQuerySet facet date_facet or query_facet method . All key word arguments should be wrapped in a list .
53,991
def parse_field_options ( self , * options ) : defaults = { } for option in options : if isinstance ( option , six . text_type ) : tokens = [ token . strip ( ) for token in option . split ( self . view . lookup_sep ) ] for token in tokens : if not len ( token . split ( ":" ) ) == 2 : warnings . warn ( "The %s token is not properly formatted. Tokens need to be " "formatted as 'token:value' pairs." % token ) continue param , value = token . split ( ":" , 1 ) if any ( [ k == param for k in ( "start_date" , "end_date" , "gap_amount" ) ] ) : if param in ( "start_date" , "end_date" ) : value = parser . parse ( value ) if param == "gap_amount" : value = int ( value ) defaults [ param ] = value return defaults
Parse the field options query string and return it as a dictionary .
53,992
def build_query ( self , ** filters ) : applicable_filters = None filters = dict ( ( k , filters [ k ] ) for k in chain ( self . D . UNITS . keys ( ) , [ constants . DRF_HAYSTACK_SPATIAL_QUERY_PARAM ] ) if k in filters ) distance = dict ( ( k , v ) for k , v in filters . items ( ) if k in self . D . UNITS . keys ( ) ) try : latitude , longitude = map ( float , self . tokenize ( filters [ constants . DRF_HAYSTACK_SPATIAL_QUERY_PARAM ] , self . view . lookup_sep ) ) point = self . Point ( longitude , latitude , srid = constants . GEO_SRID ) except ValueError : raise ValueError ( "Cannot convert `from=latitude,longitude` query parameter to " "float values. Make sure to provide numerical values only!" ) except KeyError : pass else : for unit in distance . keys ( ) : if not len ( distance [ unit ] ) == 1 : raise ValueError ( "Each unit must have exactly one value." ) distance [ unit ] = float ( distance [ unit ] [ 0 ] ) if point and distance : applicable_filters = { "dwithin" : { "field" : self . backend . point_field , "point" : point , "distance" : self . D ( ** distance ) } , "distance" : { "field" : self . backend . point_field , "point" : point } } return applicable_filters
Build queries for geo spatial filtering .
53,993
def merge_dict ( a , b ) : if not isinstance ( b , dict ) : return b result = deepcopy ( a ) for key , val in six . iteritems ( b ) : if key in result and isinstance ( result [ key ] , dict ) : result [ key ] = merge_dict ( result [ key ] , val ) elif key in result and isinstance ( result [ key ] , list ) : result [ key ] = sorted ( list ( set ( val ) | set ( result [ key ] ) ) ) else : result [ key ] = deepcopy ( val ) return result
Recursively merges and returns dict a with dict b . Any list values will be combined and returned sorted .
53,994
def get_queryset ( self , index_models = [ ] ) : if self . queryset is not None and isinstance ( self . queryset , self . object_class ) : queryset = self . queryset . all ( ) else : queryset = self . object_class ( ) . _clone ( ) if len ( index_models ) : queryset = queryset . models ( * index_models ) elif len ( self . index_models ) : queryset = queryset . models ( * self . index_models ) return queryset
Get the list of items for this view . Returns self . queryset if defined and is a self . object_class instance .
53,995
def get_object ( self ) : queryset = self . get_queryset ( ) if "model" in self . request . query_params : try : app_label , model = map ( six . text_type . lower , self . request . query_params [ "model" ] . split ( "." , 1 ) ) ctype = ContentType . objects . get ( app_label = app_label , model = model ) queryset = self . get_queryset ( index_models = [ ctype . model_class ( ) ] ) except ( ValueError , ContentType . DoesNotExist ) : raise Http404 ( "Could not find any models matching '%s'. Make sure to use a valid " "'app_label.model' name for the 'model' query parameter." % self . request . query_params [ "model" ] ) lookup_url_kwarg = self . lookup_url_kwarg or self . lookup_field if lookup_url_kwarg not in self . kwargs : raise AttributeError ( "Expected view %s to be called with a URL keyword argument " "named '%s'. Fix your URL conf, or set the `.lookup_field` " "attribute on the view correctly." % ( self . __class__ . __name__ , lookup_url_kwarg ) ) queryset = queryset . filter ( self . query_object ( ( self . document_uid_field , self . kwargs [ lookup_url_kwarg ] ) ) ) count = queryset . count ( ) if count == 1 : return queryset [ 0 ] elif count > 1 : raise Http404 ( "Multiple results matches the given query. Expected a single result." ) raise Http404 ( "No result matches the given query." )
Fetch a single document from the data store according to whatever unique identifier is available for that document in the SearchIndex .
53,996
def more_like_this ( self , request , pk = None ) : obj = self . get_object ( ) . object queryset = self . filter_queryset ( self . get_queryset ( ) ) . more_like_this ( obj ) page = self . paginate_queryset ( queryset ) if page is not None : serializer = self . get_serializer ( page , many = True ) return self . get_paginated_response ( serializer . data ) serializer = self . get_serializer ( queryset , many = True ) return Response ( serializer . data )
Sets up a detail route for more - like - this results . Note that you ll need backend support in order to take advantage of this .
53,997
def filter_facet_queryset ( self , queryset ) : for backend in list ( self . facet_filter_backends ) : queryset = backend ( ) . filter_queryset ( self . request , queryset , self ) if self . load_all : queryset = queryset . load_all ( ) return queryset
Given a search queryset filter it with whichever facet filter backends in use .
53,998
def get_facet_serializer ( self , * args , ** kwargs ) : assert "objects" in kwargs , "`objects` is a required argument to `get_facet_serializer()`" facet_serializer_class = self . get_facet_serializer_class ( ) kwargs [ "context" ] = self . get_serializer_context ( ) kwargs [ "context" ] . update ( { "objects" : kwargs . pop ( "objects" ) , "facet_query_params_text" : self . facet_query_params_text , } ) return facet_serializer_class ( * args , ** kwargs )
Return the facet serializer instance that should be used for serializing faceted output .
53,999
def get_facet_serializer_class ( self ) : if self . facet_serializer_class is None : raise AttributeError ( "%(cls)s should either include a `facet_serializer_class` attribute, " "or override %(cls)s.get_facet_serializer_class() method." % { "cls" : self . __class__ . __name__ } ) return self . facet_serializer_class
Return the class to use for serializing facets . Defaults to using self . facet_serializer_class .