idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
7,000
def passed ( self ) : self . set_status ( Report . PASSED ) if 'reason' in self . _data_fields : del self . _data_fields [ 'reason' ]
Set the report status to PASSED
7,001
def failed ( self , reason = None ) : self . set_status ( Report . FAILED ) if reason : self . add ( 'reason' , reason )
Set the test status to Report . FAILED and set the failure reason
7,002
def error ( self , reason = None ) : self . set_status ( Report . ERROR ) if reason : self . add ( 'reason' , reason )
Set the test status to Report . ERROR and set the error reason
7,003
def set_status ( self , new_status ) : if new_status not in Report . allowed_statuses : raise Exception ( 'status must be one of: %s' % ( ', ' . join ( Report . allowed_statuses ) ) ) self . _data_fields [ 'status' ] = new_status . lower ( )
Set the status of the report .
7,004
def add ( self , key , value ) : if key in self . reserved_keys : raise Exception ( 'You cannot add the key %s directly, use %s' % ( key , self . reserved_keys [ key ] ) ) if isinstance ( value , Report ) : self . _sub_reports [ key ] = value self . _data_fields [ 'sub_reports' ] . append ( key ) else : self . _data_fields [ key ] = value
Add an entry to the report
7,005
def get ( self , key ) : if key in self . _data_fields : return self . _data_fields [ key ] if key in self . _sub_reports : return self . _sub_reports [ key ] return None
Get a value for a given key
7,006
def to_dict ( self , encoding = 'base64' ) : res = { } for k , v in self . _data_fields . items ( ) : if isinstance ( v , ( bytes , bytearray , six . string_types ) ) : v = StrEncodeEncoder ( encoding ) . encode ( v ) . tobytes ( ) . decode ( ) res [ k ] = v for k , v in self . _sub_reports . items ( ) : res [ k ] = v . to_dict ( encoding ) return res
Return a dictionary version of the report
7,007
def from_dict ( cls , d , encoding = 'base64' ) : report = Report ( Report . _decode ( d [ 'name' ] , encoding ) ) report . set_status ( Report . _decode ( d [ 'status' ] , encoding ) ) sub_reports = Report . _decode ( d [ 'sub_reports' ] , encoding ) del d [ 'sub_reports' ] for k , v in d . items ( ) : if k in sub_reports : report . add ( k , Report . from_dict ( v ) ) else : if k . lower ( ) == 'status' : report . set_status ( Report . _decode ( v , encoding ) ) else : report . add ( k , Report . _decode ( v , encoding ) ) return report
Construct a Report object from dictionary .
7,008
def get_status ( self ) : status = self . get ( 'status' ) if status == Report . PASSED : for sr_name in self . _sub_reports : sr = self . _sub_reports [ sr_name ] sr_status = sr . get_status ( ) reason = sr . get ( 'reason' ) if sr_status == Report . ERROR : self . error ( reason ) break if sr_status == Report . FAILED : self . failed ( reason ) break status = self . get ( 'status' ) return status
Get the status of the report and its sub - reports .
7,009
def setup ( self ) : super ( BaseMonitor , self ) . setup ( ) self . monitor_thread = LoopFuncThread ( self . _monitor_func ) self . monitor_thread . start ( )
Make sure the monitor is ready for fuzzing
7,010
def teardown ( self ) : self . monitor_thread . stop ( ) self . monitor_thread = None super ( BaseMonitor , self ) . teardown ( )
cleanup the monitor data and
7,011
def set_offset ( self , offset ) : super ( Container , self ) . set_offset ( offset ) if self . is_default ( ) : for field in self . _fields : field . set_offset ( offset ) offset += len ( field . _current_rendered )
Set the absolute offset of current field if the field should have default value set the offset of the sub fields as well .
7,012
def scan_for_field ( self , field_key ) : if field_key == self . get_name ( ) : return self if field_key in self . _fields_dict : return self . _fields_dict [ field_key ] for field in self . _fields : if isinstance ( field , Container ) : resolved = field . scan_for_field ( field_key ) if resolved : return resolved return None
Scan for a field in the container and its enclosed fields
7,013
def set_session_data ( self , session_data ) : if session_data : for field in self . _fields : if isinstance ( field , ( Container , Dynamic ) ) : field . set_session_data ( session_data )
Set session data in the container enclosed fields
7,014
def is_default ( self ) : for field in self . _fields : if not field . is_default ( ) : return False return super ( Container , self ) . is_default ( )
Checks if the field is in its default form
7,015
def _mutate ( self ) : for i in range ( self . _field_idx , len ( self . _fields ) ) : self . _field_idx = i if self . _current_field ( ) . mutate ( ) : return True self . _current_field ( ) . reset ( ) return False
Mutate enclosed fields
7,016
def append_fields ( self , new_fields ) : for field in new_fields : self . push ( field ) if isinstance ( field , Container ) : self . pop ( )
Add fields to the container
7,017
def get_info ( self ) : field = self . _current_field ( ) if field : info = field . get_info ( ) info [ 'path' ] = '%s/%s' % ( self . name if self . name else '<no name>' , info [ 'path' ] ) else : info = super ( Container , self ) . get_info ( ) return info
Get info regarding the current fuzzed enclosed node
7,018
def pop ( self ) : if not self . _containers : raise KittyException ( 'no container to pop' ) self . _containers . pop ( ) if self . _container ( ) : self . _container ( ) . pop ( )
Remove a the top container from the container stack
7,019
def copy ( self ) : dup = super ( Conditional , self ) . copy ( ) condition = self . _condition . copy ( ) condition . invalidate ( self ) dup . _condition = condition return dup
Copy the container put an invalidated copy of the condition in the new container
7,020
def render ( self , ctx = None ) : if ctx is None : ctx = RenderContext ( ) self . _initialize ( ) if self in ctx : self . _current_rendered = self . _in_render_value ( ) else : ctx . push ( self ) if self . _evaluate_condition ( ctx ) : super ( Conditional , self ) . render ( ctx ) else : self . set_current_value ( empty_bits ) ctx . pop ( ) return self . _current_rendered
Only render if condition applies
7,021
def _check_times ( self , min_times , max_times , step ) : kassert . is_int ( min_times ) kassert . is_int ( max_times ) kassert . is_int ( step ) if not ( ( min_times >= 0 ) and ( max_times > 0 ) and ( max_times >= min_times ) and ( step > 0 ) ) : raise KittyException ( 'one of the checks failed: min_times(%d)>=0, max_times(%d)>0, max_times>=min_times, step > 0' % ( min_times , max_times ) )
Make sure that the arguments are valid
7,022
def _rebuild_fields ( self ) : new_field_lists = [ ] field_list_len = self . min_elements while not field_list_len > self . max_elements : how_many = self . max_elements + 1 - field_list_len i = 0 while i < how_many : current = self . random . sample ( self . _fields , field_list_len ) if current not in new_field_lists : new_field_lists . append ( current ) i += 1 field_list_len += 1 new_containers = [ ] for i , fields in enumerate ( new_field_lists ) : dup_fields = [ field . copy ( ) for field in fields ] if self . get_name ( ) : name = '%s_sublist_%d' % ( self . get_name ( ) , i ) else : name = 'sublist_%d' % ( i ) new_containers . append ( Container ( fields = dup_fields , encoder = self . subcontainer_encoder , name = name ) ) self . replace_fields ( new_containers )
We take the original fields and create subsets of them each subset will be set into a container . all the resulted containers will then replace the original _fields since we inherit from OneOf each time only one of them will be mutated and used . This is super ugly and dangerous any idea how to implement it in a better way is welcome .
7,023
def get_info ( self ) : self . render ( ) info = super ( Template , self ) . get_info ( ) res = { } res [ 'name' ] = self . get_name ( ) res [ 'mutation' ] = { 'current_index' : self . _current_index , 'total_number' : self . num_mutations ( ) } res [ 'value' ] = { 'rendered' : { 'base64' : b64encode ( self . _current_rendered . tobytes ( ) ) . decode ( ) , 'length_in_bytes' : len ( self . _current_rendered . tobytes ( ) ) , } } res [ 'hash' ] = self . hash ( ) res [ 'field' ] = info return res
Get info regarding the current template state
7,024
def applies ( self , container , ctx ) : self . _get_ready ( container ) return self . _applies ( container , ctx )
Subclasses should not override applies but instead they should override _applies which has the same syntax as applies . In the _applies method the condition is guaranteed to have a reference to the desired field as self . _field .
7,025
def generate_keypair ( keysize , id , output ) : log ( "Generating a paillier keypair with keysize of {}" . format ( keysize ) ) pub , priv = phe . generate_paillier_keypair ( n_length = keysize ) log ( "Keys generated" ) date = datetime . datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) jwk_public = { 'kty' : "DAJ" , 'alg' : "PAI-GN1" , "key_ops" : [ "encrypt" ] , 'n' : phe . util . int_to_base64 ( pub . n ) , 'kid' : "Paillier public key generated by pheutil on {}" . format ( date ) } jwk_private = { 'kty' : "DAJ" , 'key_ops' : [ "decrypt" ] , 'p' : phe . util . int_to_base64 ( priv . p ) , 'q' : phe . util . int_to_base64 ( priv . q ) , 'pub' : jwk_public , 'kid' : "Paillier private key generated by pheutil on {}" . format ( date ) } json . dump ( jwk_private , output ) output . write ( '\n' ) log ( "Private key written to {}" . format ( output . name ) )
Generate a paillier private key .
7,026
def extract ( input , output ) : log ( "Loading paillier keypair" ) priv = json . load ( input ) error_msg = "Invalid private key" assert 'pub' in priv , error_msg assert priv [ 'kty' ] == 'DAJ' , error_msg json . dump ( priv [ 'pub' ] , output ) output . write ( '\n' ) log ( "Public key written to {}" . format ( output . name ) )
Extract public key from private key .
7,027
def encrypt ( public , plaintext , output = None ) : num = float ( plaintext ) log ( "Loading public key" ) publickeydata = json . load ( public ) pub = load_public_key ( publickeydata ) log ( "Encrypting: {:+.16f}" . format ( num ) ) enc = pub . encrypt ( num ) serialised = serialise_encrypted ( enc ) print ( serialised , file = output )
Encrypt a number with public key .
7,028
def decrypt ( private , ciphertext , output ) : privatekeydata = json . load ( private ) assert 'pub' in privatekeydata pub = load_public_key ( privatekeydata [ 'pub' ] ) log ( "Loading private key" ) private_key_error = "Invalid private key" assert 'key_ops' in privatekeydata , private_key_error assert "decrypt" in privatekeydata [ 'key_ops' ] , private_key_error assert 'p' in privatekeydata , private_key_error assert 'q' in privatekeydata , private_key_error assert privatekeydata [ 'kty' ] == 'DAJ' , private_key_error _p = phe . util . base64_to_int ( privatekeydata [ 'p' ] ) _q = phe . util . base64_to_int ( privatekeydata [ 'q' ] ) private_key = phe . PaillierPrivateKey ( pub , _p , _q ) log ( "Decrypting ciphertext" ) enc = load_encrypted_number ( ciphertext , pub ) out = private_key . decrypt ( enc ) print ( out , file = output )
Decrypt ciphertext with private key .
7,029
def add_encrypted ( public , encrypted_a , encrypted_b , output ) : log ( "Loading public key" ) publickeydata = json . load ( public ) pub = load_public_key ( publickeydata ) log ( "Loading first encrypted number" ) enc_a = load_encrypted_number ( encrypted_a , pub ) log ( "Loading second encrypted number" ) enc_b = load_encrypted_number ( encrypted_b , pub ) log ( "Adding encrypted numbers together" ) enc_result = enc_a + enc_b serialised_result = serialise_encrypted ( enc_result ) print ( serialised_result , file = output )
Add two encrypted numbers together .
7,030
def multiply_encrypted_to_plaintext ( public , encrypted , plaintext , output ) : log ( "Loading public key" ) publickeydata = json . load ( public ) pub = load_public_key ( publickeydata ) log ( "Loading encrypted number" ) enc = load_encrypted_number ( encrypted , pub ) log ( "Loading unencrypted number" ) num = float ( plaintext ) log ( "Multiplying" ) enc_result = enc * num serialised_result = serialise_encrypted ( enc_result ) print ( serialised_result , file = output )
Multiply encrypted num with unencrypted num .
7,031
def encrypted_score ( self , x ) : score = self . intercept _ , idx = x . nonzero ( ) for i in idx : score += x [ 0 , i ] * self . weights [ i ] return score
Compute the score of x by multiplying with the encrypted model which is a vector of paillier . EncryptedNumber
7,032
def fit ( self , n_iter , eta = 0.01 ) : for _ in range ( n_iter ) : gradient = self . compute_gradient ( ) self . gradient_step ( gradient , eta )
Linear regression for n_iter
7,033
def compute_gradient ( self ) : delta = self . predict ( self . X ) - self . y return delta . dot ( self . X ) / len ( self . X )
Compute the gradient of the current model using the training set
7,034
def encrypted_gradient ( self , sum_to = None ) : gradient = self . compute_gradient ( ) encrypted_gradient = encrypt_vector ( self . pubkey , gradient ) if sum_to is not None : return sum_encrypted_vectors ( sum_to , encrypted_gradient ) else : return encrypted_gradient
Compute and encrypt gradient .
7,035
def encrypt_encoded ( self , encoding , r_value ) : obfuscator = r_value or 1 ciphertext = self . raw_encrypt ( encoding . encoding , r_value = obfuscator ) encrypted_number = EncryptedNumber ( self , ciphertext , encoding . exponent ) if r_value is None : encrypted_number . obfuscate ( ) return encrypted_number
Paillier encrypt an encoded value .
7,036
def from_totient ( public_key , totient ) : p_plus_q = public_key . n - totient + 1 p_minus_q = isqrt ( p_plus_q * p_plus_q - public_key . n * 4 ) q = ( p_plus_q - p_minus_q ) // 2 p = p_plus_q - q if not p * q == public_key . n : raise ValueError ( 'given public key and totient do not match.' ) return PaillierPrivateKey ( public_key , p , q )
given the totient one can factorize the modulus
7,037
def raw_decrypt ( self , ciphertext ) : if not isinstance ( ciphertext , int ) : raise TypeError ( 'Expected ciphertext to be an int, not: %s' % type ( ciphertext ) ) decrypt_to_p = self . l_function ( powmod ( ciphertext , self . p - 1 , self . psquare ) , self . p ) * self . hp % self . p decrypt_to_q = self . l_function ( powmod ( ciphertext , self . q - 1 , self . qsquare ) , self . q ) * self . hq % self . q return self . crt ( decrypt_to_p , decrypt_to_q )
Decrypt raw ciphertext and return raw plaintext .
7,038
def h_function ( self , x , xsquare ) : return invert ( self . l_function ( powmod ( self . public_key . g , x - 1 , xsquare ) , x ) , x )
Computes the h - function as defined in Paillier s paper page 12 Decryption using Chinese - remaindering .
7,039
def crt ( self , mp , mq ) : u = ( mq - mp ) * self . p_inverse % self . q return mp + ( u * self . p )
The Chinese Remainder Theorem as needed for decryption . Returns the solution modulo n = pq .
7,040
def add ( self , private_key ) : if not isinstance ( private_key , PaillierPrivateKey ) : raise TypeError ( "private_key should be of type PaillierPrivateKey, " "not %s" % type ( private_key ) ) self . __keyring [ private_key . public_key ] = private_key
Add a key to the keyring .
7,041
def ciphertext ( self , be_secure = True ) : if be_secure and not self . __is_obfuscated : self . obfuscate ( ) return self . __ciphertext
Return the ciphertext of the EncryptedNumber .
7,042
def decrease_exponent_to ( self , new_exp ) : if new_exp > self . exponent : raise ValueError ( 'New exponent %i should be more negative than ' 'old exponent %i' % ( new_exp , self . exponent ) ) multiplied = self * pow ( EncodedNumber . BASE , self . exponent - new_exp ) multiplied . exponent = new_exp return multiplied
Return an EncryptedNumber with same value but lower exponent .
7,043
def encode ( cls , public_key , scalar , precision = None , max_exponent = None ) : if precision is None : if isinstance ( scalar , int ) : prec_exponent = 0 elif isinstance ( scalar , float ) : bin_flt_exponent = math . frexp ( scalar ) [ 1 ] bin_lsb_exponent = bin_flt_exponent - cls . FLOAT_MANTISSA_BITS prec_exponent = math . floor ( bin_lsb_exponent / cls . LOG2_BASE ) else : raise TypeError ( "Don't know the precision of type %s." % type ( scalar ) ) else : prec_exponent = math . floor ( math . log ( precision , cls . BASE ) ) if max_exponent is None : exponent = prec_exponent else : exponent = min ( max_exponent , prec_exponent ) int_rep = round ( fractions . Fraction ( scalar ) * fractions . Fraction ( cls . BASE ) ** - exponent ) if abs ( int_rep ) > public_key . max_int : raise ValueError ( 'Integer needs to be within +/- %d but got %d' % ( public_key . max_int , int_rep ) ) return cls ( public_key , int_rep % public_key . n , exponent )
Return an encoding of an int or float .
7,044
def decode ( self ) : if self . encoding >= self . public_key . n : raise ValueError ( 'Attempted to decode corrupted number' ) elif self . encoding <= self . public_key . max_int : mantissa = self . encoding elif self . encoding >= self . public_key . n - self . public_key . max_int : mantissa = self . encoding - self . public_key . n else : raise OverflowError ( 'Overflow detected in decrypted number' ) if self . exponent >= 0 : return mantissa * self . BASE ** self . exponent else : try : return mantissa / self . BASE ** - self . exponent except OverflowError as e : raise OverflowError ( 'decoded result too large for a float' ) from e
Decode plaintext and return the result .
7,045
def decrease_exponent_to ( self , new_exp ) : if new_exp > self . exponent : raise ValueError ( 'New exponent %i should be more negative than' 'old exponent %i' % ( new_exp , self . exponent ) ) factor = pow ( self . BASE , self . exponent - new_exp ) new_enc = self . encoding * factor % self . public_key . n return self . __class__ ( self . public_key , new_enc , new_exp )
Return an EncodedNumber with same value but lower exponent .
7,046
def powmod ( a , b , c ) : if a == 1 : return 1 if not HAVE_GMP or max ( a , b , c ) < _USE_MOD_FROM_GMP_SIZE : return pow ( a , b , c ) else : return int ( gmpy2 . powmod ( a , b , c ) )
Uses GMP if available to do a^b mod c where a b c are integers .
7,047
def extended_euclidean_algorithm ( a , b ) : r0 , r1 = a , b s0 , s1 = 1 , 0 t0 , t1 = 0 , 1 while r1 != 0 : q = r0 // r1 r0 , r1 = r1 , r0 - q * r1 s0 , s1 = s1 , s0 - q * s1 t0 , t1 = t1 , t0 - q * t1 return r0 , s0 , t0
Extended Euclidean algorithm
7,048
def invert ( a , b ) : if HAVE_GMP : s = int ( gmpy2 . invert ( a , b ) ) if s == 0 : raise ZeroDivisionError ( 'invert() no inverse exists' ) return s else : r , s , _ = extended_euclidean_algorithm ( a , b ) if r != 1 : raise ZeroDivisionError ( 'invert() no inverse exists' ) return s % b
The multiplicitive inverse of a in the integers modulo b .
7,049
def getprimeover ( N ) : if HAVE_GMP : randfunc = random . SystemRandom ( ) r = gmpy2 . mpz ( randfunc . getrandbits ( N ) ) r = gmpy2 . bit_set ( r , N - 1 ) return int ( gmpy2 . next_prime ( r ) ) elif HAVE_CRYPTO : return number . getPrime ( N , os . urandom ) else : randfunc = random . SystemRandom ( ) n = randfunc . randrange ( 2 ** ( N - 1 ) , 2 ** N ) | 1 while not is_prime ( n ) : n += 2 return n
Return a random N - bit prime number using the System s best Cryptographic random source .
7,050
def miller_rabin ( n , k ) : assert n > 3 d = n - 1 r = 0 while d % 2 == 0 : d //= 2 r += 1 assert n - 1 == d * 2 ** r assert d % 2 == 1 for _ in range ( k ) : a = random . randint ( 2 , n - 2 ) x = pow ( a , d , n ) if x == 1 or x == n - 1 : continue for _ in range ( 1 , r ) : x = x * x % n if x == n - 1 : break else : return False return True
Run the Miller - Rabin test on n with at most k iterations
7,051
def is_prime ( n , mr_rounds = 25 ) : if n <= first_primes [ - 1 ] : return n in first_primes for p in first_primes : if n % p == 0 : return False return miller_rabin ( n , mr_rounds )
Test whether n is probably prime
7,052
def findCommunities ( G ) : infomapWrapper = infomap . Infomap ( "--two-level" ) print ( "Building Infomap network from a NetworkX graph..." ) for e in G . edges ( ) : infomapWrapper . addLink ( * e ) print ( "Find communities with Infomap..." ) infomapWrapper . run ( ) tree = infomapWrapper . tree print ( "Found %d top modules with codelength: %f" % ( tree . numTopModules ( ) , tree . codelength ( ) ) ) communities = { } for node in tree . leafIter ( ) : communities [ node . originalLeafIndex ] = node . moduleIndex ( ) nx . set_node_attributes ( G , name = 'community' , values = communities ) return tree . numTopModules ( )
Partition network with the Infomap algorithm . Annotates nodes with community id and return number of communities found .
7,053
def setup ( self ) : try : subprocess . check_output ( [ "hcitool" , "clock" ] , stderr = subprocess . STDOUT ) except subprocess . CalledProcessError : raise BackendError ( "'hcitool clock' returned error. Make sure " "your bluetooth device is powered up with " "'hciconfig hciX up'." ) except OSError : raise BackendError ( "'hcitool' could not be found, make sure you " "have bluez-utils installed." )
Check if the bluetooth controller is available .
7,054
def find_device ( self ) : for bdaddr , name in self . scan ( ) : if name == "Wireless Controller" : self . logger . info ( "Found device {0}" , bdaddr ) return BluetoothDS4Device . connect ( bdaddr )
Scan for bluetooth devices and return a DS4 device if found .
7,055
def add_watcher ( self , fd , callback ) : if not isinstance ( fd , int ) : fd = fd . fileno ( ) self . callbacks [ fd ] = callback self . epoll . register ( fd , EPOLLIN )
Starts watching a non - blocking fd for data .
7,056
def remove_watcher ( self , fd ) : if not isinstance ( fd , int ) : fd = fd . fileno ( ) if fd not in self . callbacks : return self . callbacks . pop ( fd , None ) self . epoll . unregister ( fd )
Stops watching a fd .
7,057
def fire_event ( self , event , * args , ** kwargs ) : self . event_queue . append ( ( event , args ) ) self . process_events ( )
Fires a event .
7,058
def process_events ( self ) : for event , args in iter_except ( self . event_queue . popleft , IndexError ) : for callback in self . event_callbacks [ event ] : callback ( * args )
Processes any events in the queue .
7,059
def run ( self ) : self . running = True while self . running : for fd , event in self . epoll . poll ( self . epoll_timeout ) : callback = self . callbacks . get ( fd ) if callback : callback ( )
Starts the loop .
7,060
def stop ( self ) : self . running = False self . callbacks = { } self . epoll = epoll ( ) self . event_queue = deque ( ) self . event_callbacks = defaultdict ( set )
Stops the loop .
7,061
def _get_future_devices ( self , context ) : monitor = Monitor . from_netlink ( context ) monitor . filter_by ( "hidraw" ) monitor . start ( ) self . _scanning_log_message ( ) for device in iter ( monitor . poll , None ) : if device . action == "add" : sleep ( 1 ) yield device self . _scanning_log_message ( )
Return a generator yielding new devices .
7,062
def rumble ( self , small = 0 , big = 0 ) : self . _control ( small_rumble = small , big_rumble = big )
Sets the intensity of the rumble motors . Valid range is 0 - 255 .
7,063
def set_led ( self , red = 0 , green = 0 , blue = 0 ) : self . _led = ( red , green , blue ) self . _control ( )
Sets the LED color . Values are RGB between 0 - 255 .
7,064
def start_led_flash ( self , on , off ) : if not self . _led_flashing : self . _led_flash = ( on , off ) self . _led_flashing = True self . _control ( )
Starts flashing the LED .
7,065
def stop_led_flash ( self ) : if self . _led_flashing : self . _led_flash = ( 0 , 0 ) self . _led_flashing = False self . _control ( ) self . _control ( )
Stops flashing the LED .
7,066
def parse_report ( self , buf ) : dpad = buf [ 5 ] % 16 return DS4Report ( buf [ 1 ] , buf [ 2 ] , buf [ 3 ] , buf [ 4 ] , buf [ 8 ] , buf [ 9 ] , ( dpad in ( 0 , 1 , 7 ) ) , ( dpad in ( 3 , 4 , 5 ) ) , ( dpad in ( 5 , 6 , 7 ) ) , ( dpad in ( 1 , 2 , 3 ) ) , ( buf [ 5 ] & 32 ) != 0 , ( buf [ 5 ] & 64 ) != 0 , ( buf [ 5 ] & 16 ) != 0 , ( buf [ 5 ] & 128 ) != 0 , ( buf [ 6 ] & 1 ) != 0 , ( buf [ 6 ] & 4 ) != 0 , ( buf [ 6 ] & 64 ) != 0 , ( buf [ 6 ] & 2 ) != 0 , ( buf [ 6 ] & 8 ) != 0 , ( buf [ 6 ] & 128 ) != 0 , ( buf [ 6 ] & 16 ) != 0 , ( buf [ 6 ] & 32 ) != 0 , ( buf [ 7 ] & 2 ) != 0 , ( buf [ 7 ] & 1 ) != 0 , S16LE . unpack_from ( buf , 13 ) [ 0 ] , S16LE . unpack_from ( buf , 15 ) [ 0 ] , S16LE . unpack_from ( buf , 17 ) [ 0 ] , - ( S16LE . unpack_from ( buf , 19 ) [ 0 ] ) , S16LE . unpack_from ( buf , 21 ) [ 0 ] , S16LE . unpack_from ( buf , 23 ) [ 0 ] , buf [ 35 ] & 0x7f , ( buf [ 35 ] >> 7 ) == 0 , ( ( buf [ 37 ] & 0x0f ) << 8 ) | buf [ 36 ] , buf [ 38 ] << 4 | ( ( buf [ 37 ] & 0xf0 ) >> 4 ) , buf [ 39 ] & 0x7f , ( buf [ 39 ] >> 7 ) == 0 , ( ( buf [ 41 ] & 0x0f ) << 8 ) | buf [ 40 ] , buf [ 42 ] << 4 | ( ( buf [ 41 ] & 0xf0 ) >> 4 ) , buf [ 7 ] >> 2 , buf [ 30 ] % 16 , ( buf [ 30 ] & 16 ) != 0 , ( buf [ 30 ] & 32 ) != 0 , ( buf [ 30 ] & 64 ) != 0 )
Parse a buffer containing a HID report .
7,067
def exec_ ( controller , cmd , * args ) : controller . logger . info ( "Executing: {0} {1}" , cmd , " " . join ( args ) ) try : subprocess . check_call ( [ cmd ] + list ( args ) ) except ( OSError , subprocess . CalledProcessError ) as err : controller . logger . error ( "Failed to execute process: {0}" , err )
Executes a subprocess in the foreground blocking until returned .
7,068
def exec_background ( controller , cmd , * args ) : controller . logger . info ( "Executing in the background: {0} {1}" , cmd , " " . join ( args ) ) try : subprocess . Popen ( [ cmd ] + list ( args ) , stdout = open ( os . devnull , "wb" ) , stderr = open ( os . devnull , "wb" ) ) except OSError as err : controller . logger . error ( "Failed to execute process: {0}" , err )
Executes a subprocess in the background .
7,069
def create_uinput_device ( mapping ) : if mapping not in _mappings : raise DeviceError ( "Unknown device mapping: {0}" . format ( mapping ) ) try : mapping = _mappings [ mapping ] device = UInputDevice ( mapping ) except UInputError as err : raise DeviceError ( err ) return device
Creates a uinput device .
7,070
def parse_uinput_mapping ( name , mapping ) : axes , buttons , mouse , mouse_options = { } , { } , { } , { } description = "ds4drv custom mapping ({0})" . format ( name ) for key , attr in mapping . items ( ) : key = key . upper ( ) if key . startswith ( "BTN_" ) or key . startswith ( "KEY_" ) : buttons [ key ] = attr elif key . startswith ( "ABS_" ) : axes [ key ] = attr elif key . startswith ( "REL_" ) : mouse [ key ] = attr elif key . startswith ( "MOUSE_" ) : mouse_options [ key ] = attr create_mapping ( name , description , axes = axes , buttons = buttons , mouse = mouse , mouse_options = mouse_options )
Parses a dict of mapping options .
7,071
def next_joystick_device ( ) : for i in range ( 100 ) : dev = "/dev/input/js{0}" . format ( i ) if not os . path . exists ( dev ) : return dev
Finds the next available js device name .
7,072
def create_device ( self , layout ) : events = { ecodes . EV_ABS : [ ] , ecodes . EV_KEY : [ ] , ecodes . EV_REL : [ ] } if layout . axes or layout . buttons or layout . hats : self . joystick_dev = next_joystick_device ( ) for name in layout . axes : params = layout . axes_options . get ( name , DEFAULT_AXIS_OPTIONS ) if not absInfoUsesValue : params = params [ 1 : ] events [ ecodes . EV_ABS ] . append ( ( name , params ) ) for name in layout . hats : params = ( 0 , - 1 , 1 , 0 , 0 ) if not absInfoUsesValue : params = params [ 1 : ] events [ ecodes . EV_ABS ] . append ( ( name , params ) ) for name in layout . buttons : events [ ecodes . EV_KEY ] . append ( name ) if layout . mouse : self . mouse_pos = { } self . mouse_rel = { } self . mouse_analog_sensitivity = float ( layout . mouse_options . get ( "MOUSE_SENSITIVITY" , DEFAULT_MOUSE_SENSITIVTY ) ) self . mouse_analog_deadzone = int ( layout . mouse_options . get ( "MOUSE_DEADZONE" , DEFAULT_MOUSE_DEADZONE ) ) self . scroll_repeat_delay = float ( layout . mouse_options . get ( "MOUSE_SCROLL_REPEAT_DELAY" , DEFAULT_SCROLL_REPEAT_DELAY ) ) self . scroll_delay = float ( layout . mouse_options . get ( "MOUSE_SCROLL_DELAY" , DEFAULT_SCROLL_DELAY ) ) for name in layout . mouse : if name in ( ecodes . REL_WHEELUP , ecodes . REL_WHEELDOWN ) : if ecodes . REL_WHEEL not in events [ ecodes . EV_REL ] : events [ ecodes . EV_REL ] . append ( ecodes . REL_WHEEL ) else : events [ ecodes . EV_REL ] . append ( name ) self . mouse_rel [ name ] = 0.0 self . device = UInput ( name = layout . name , events = events , bustype = layout . bustype , vendor = layout . vendor , product = layout . product , version = layout . version ) self . layout = layout
Creates a uinput device using the specified layout .
7,073
def write_event ( self , etype , code , value ) : last_value = self . _write_cache . get ( code ) if last_value != value : self . device . write ( etype , code , value ) self . _write_cache [ code ] = value
Writes a event to the device if it has changed .
7,074
def emit ( self , report ) : for name , attr in self . layout . axes . items ( ) : value = getattr ( report , attr ) self . write_event ( ecodes . EV_ABS , name , value ) for name , attr in self . layout . buttons . items ( ) : attr , modifier = attr if attr in self . ignored_buttons : value = False else : value = getattr ( report , attr ) if modifier and "analog" in attr : if modifier == "+" : value = value > ( 128 + DEFAULT_A2D_DEADZONE ) elif modifier == "-" : value = value < ( 128 - DEFAULT_A2D_DEADZONE ) self . write_event ( ecodes . EV_KEY , name , value ) for name , attr in self . layout . hats . items ( ) : if getattr ( report , attr [ 0 ] ) : value = - 1 elif getattr ( report , attr [ 1 ] ) : value = 1 else : value = 0 self . write_event ( ecodes . EV_ABS , name , value ) self . device . syn ( )
Writes axes buttons and hats with values from the report to the device .
7,075
def emit_reset ( self ) : for name in self . layout . axes : params = self . layout . axes_options . get ( name , DEFAULT_AXIS_OPTIONS ) self . write_event ( ecodes . EV_ABS , name , int ( sum ( params [ 1 : 3 ] ) / 2 ) ) for name in self . layout . buttons : self . write_event ( ecodes . EV_KEY , name , False ) for name in self . layout . hats : self . write_event ( ecodes . EV_ABS , name , 0 ) self . device . syn ( )
Resets the device to a blank state .
7,076
def emit_mouse ( self , report ) : for name , attr in self . layout . mouse . items ( ) : attr , modifier = attr if attr . startswith ( "trackpad_touch" ) : active_attr = attr [ : 16 ] + "active" if not getattr ( report , active_attr ) : self . mouse_pos . pop ( name , None ) continue pos = getattr ( report , attr ) if name not in self . mouse_pos : self . mouse_pos [ name ] = pos sensitivity = 0.5 self . mouse_rel [ name ] += ( pos - self . mouse_pos [ name ] ) * sensitivity self . mouse_pos [ name ] = pos elif "analog" in attr : pos = getattr ( report , attr ) if ( pos > ( 128 + self . mouse_analog_deadzone ) or pos < ( 128 - self . mouse_analog_deadzone ) ) : accel = ( pos - 128 ) / 10 else : continue if ( modifier and modifier == "-" ) : accel = - accel sensitivity = self . mouse_analog_sensitivity self . mouse_rel [ name ] += accel * sensitivity if name in ( ecodes . REL_WHEELUP , ecodes . REL_WHEELDOWN ) : ecode = ecodes . REL_WHEEL write = False if getattr ( report , attr ) : self . _scroll_details [ 'direction' ] = name now = time . time ( ) last_write = self . _scroll_details . get ( 'last_write' ) if not last_write : write = True self . _scroll_details [ 'count' ] = 0 if name == ecodes . REL_WHEELUP : value = 1 elif name == ecodes . REL_WHEELDOWN : value = - 1 if last_write : if self . _scroll_details [ 'count' ] > 1 : if now - last_write > self . scroll_delay : write = True elif now - last_write > self . scroll_repeat_delay : write = True if write : self . device . write ( ecodes . EV_REL , ecode , value ) self . _scroll_details [ 'last_write' ] = now self . _scroll_details [ 'count' ] += 1 continue else : if self . _scroll_details . get ( 'direction' ) == name : self . _scroll_details [ 'last_write' ] = 0 self . _scroll_details [ 'count' ] = 0 rel = int ( self . mouse_rel [ name ] ) self . mouse_rel [ name ] = self . mouse_rel [ name ] - rel self . device . write ( ecodes . EV_REL , name , rel ) self . device . syn ( )
Calculates relative mouse values from a report and writes them .
7,077
def start_output ( self ) : super ( GMLLogger , self ) . start_output ( ) if self . has_part ( "intro" ) : self . write_intro ( ) self . writeln ( ) self . writeln ( u"graph [" ) self . writeln ( u" directed 1" ) self . flush ( )
Write start of checking info as gml comment .
7,078
def comment ( self , s , ** args ) : self . writeln ( s = u'comment "%s"' % s , ** args )
Write GML comment .
7,079
def getaddresses ( addr ) : parsed = [ mail for name , mail in AddressList ( addr ) . addresslist if mail ] if parsed : addresses = parsed elif addr : addresses = [ addr ] else : addresses = [ ] return addresses
Return list of email addresses from given field value .
7,080
def parse_addresses ( self ) : url = urllib . unquote ( self . base_url [ 7 : ] ) mode = 0 quote = None i = 0 for i , c in enumerate ( url ) : if mode == 0 : if c == '?' : break elif c in '<"' : quote = c mode = 1 elif c == '\\' : mode = 2 elif mode == 1 : if c == '"' and quote == '"' : mode = 0 elif c == '>' and quote == '<' : mode = 0 elif mode == 2 : mode = 0 if i < ( len ( url ) - 1 ) : self . addresses . update ( getaddresses ( url [ : i ] ) ) try : headers = urlparse . parse_qs ( url [ ( i + 1 ) : ] , strict_parsing = True ) for key , vals in headers . items ( ) : if key . lower ( ) in EMAIL_CGI_ADDRESS : self . addresses . update ( getaddresses ( urllib . unquote ( vals [ 0 ] ) ) ) if key . lower ( ) == EMAIL_CGI_SUBJECT : self . subject = vals [ 0 ] except ValueError as err : self . add_warning ( _ ( "Error parsing CGI values: %s" ) % str ( err ) ) else : self . addresses . update ( getaddresses ( url ) ) log . debug ( LOG_CHECK , "addresses: %s" , self . addresses )
Parse all mail addresses out of the URL target . Also parses optional CGI headers like ?to = foo
7,081
def check_connection ( self ) : for mail in sorted ( self . addresses ) : self . check_smtp_domain ( mail ) if not self . valid : break
Verify a list of email addresses . If one address fails the whole list will fail .
7,082
def check_smtp_domain ( self , mail ) : from dns . exception import DNSException log . debug ( LOG_CHECK , "checking mail address %r" , mail ) mail = strformat . ascii_safe ( mail ) username , domain = mail . rsplit ( '@' , 1 ) log . debug ( LOG_CHECK , "looking up MX mailhost %r" , domain ) try : answers = resolver . query ( domain , 'MX' ) except DNSException : answers = [ ] if len ( answers ) == 0 : self . add_warning ( _ ( "No MX mail host for %(domain)s found." ) % { 'domain' : domain } , tag = WARN_MAIL_NO_MX_HOST ) try : answers = resolver . query ( domain , 'A' ) except DNSException : answers = [ ] if len ( answers ) == 0 : self . set_result ( _ ( "No host for %(domain)s found." ) % { 'domain' : domain } , valid = False , overwrite = True ) return mxdata = [ ( 0 , rdata . to_text ( omit_final_dot = True ) ) for rdata in answers ] else : from dns . rdtypes . mxbase import MXBase mxdata = [ ( rdata . preference , rdata . exchange . to_text ( omit_final_dot = True ) ) for rdata in answers if isinstance ( rdata , MXBase ) ] if not mxdata : self . set_result ( _ ( "Got invalid DNS answer %(answer)s for %(domain)s." ) % { 'answer' : answers , 'domain' : domain } , valid = False , overwrite = True ) return mxdata . sort ( ) log . debug ( LOG_CHECK , "found %d MX mailhosts:" , len ( answers ) ) for preference , host in mxdata : log . debug ( LOG_CHECK , "MX host %r, preference %d" , host , preference ) pass self . set_result ( _ ( "Valid mail address syntax" ) )
Check a single mail address .
7,083
def set_cache_url ( self ) : emails = u"," . join ( sorted ( self . addresses ) ) self . cache_url = u"%s:%s" % ( self . scheme , emails )
The cache url is a comma separated list of emails .
7,084
def normpath ( path ) : expanded = os . path . expanduser ( os . path . expandvars ( path ) ) return os . path . normcase ( os . path . normpath ( expanded ) )
Norm given system path with all available norm or expand functions in os . path .
7,085
def get_modules_info ( ) : module_infos = [ ] for ( mod , name , version_attr ) in Modules : if not fileutil . has_module ( mod ) : continue if hasattr ( mod , version_attr ) : attr = getattr ( mod , version_attr ) version = attr ( ) if callable ( attr ) else attr module_infos . append ( "%s %s" % ( name , version ) ) else : module_infos . append ( name ) return u"Modules: %s" % ( u", " . join ( module_infos ) )
Return unicode string with detected module info .
7,086
def get_share_file ( filename , devel_dir = None ) : paths = [ get_share_dir ( ) ] if devel_dir is not None : paths . insert ( 0 , devel_dir ) for path in paths : fullpath = os . path . join ( path , filename ) if os . path . isfile ( fullpath ) : return fullpath msg = "%s not found in %s; check your installation" % ( filename , paths ) raise ValueError ( msg )
Return a filename in the share directory .
7,087
def get_system_cert_file ( ) : if os . name == 'posix' : filename = "/etc/ssl/certs/ca-certificates.crt" if os . path . isfile ( filename ) : return filename msg = "no system certificate file found" raise ValueError ( msg )
Try to find a system - wide SSL certificate file .
7,088
def get_certifi_file ( ) : import certifi filename = certifi . where ( ) if os . path . isfile ( filename ) : return filename msg = "%s not found; check your certifi installation" % filename raise ValueError ( msg )
Get the SSL certifications installed by the certifi package .
7,089
def make_userdir ( child ) : userdir = os . path . dirname ( child ) if not os . path . isdir ( userdir ) : if os . name == 'nt' : userdir += "." os . mkdir ( userdir , 0700 )
Create a child directory .
7,090
def get_kde_home_dir ( ) : if os . environ . get ( "KDEHOME" ) : kde_home = os . path . abspath ( os . environ [ "KDEHOME" ] ) else : home = os . environ . get ( "HOME" ) if not home : return kde3_home = os . path . join ( home , ".kde" ) kde4_home = os . path . join ( home , ".kde4" ) if fileutil . find_executable ( "kde4-config" ) : kde3_file = kde_home_to_config ( kde3_home ) kde4_file = kde_home_to_config ( kde4_home ) if os . path . exists ( kde4_file ) and os . path . exists ( kde3_file ) : if fileutil . get_mtime ( kde4_file ) >= fileutil . get_mtime ( kde3_file ) : kde_home = kde4_home else : kde_home = kde3_home else : kde_home = kde4_home else : kde_home = kde3_home return kde_home if os . path . exists ( kde_home ) else None
Return KDE home directory or None if not found .
7,091
def read_kioslaverc ( kde_config_dir ) : data = { } filename = os . path . join ( kde_config_dir , "kioslaverc" ) with open ( filename ) as fd : for line in fd : line = line . rstrip ( ) if line . startswith ( '[' ) : in_proxy_settings = line . startswith ( "[Proxy Settings]" ) elif in_proxy_settings : if '=' not in line : continue key , value = line . split ( '=' , 1 ) key = key . strip ( ) value = value . strip ( ) if not key : continue key = loc_ro . sub ( "" , key ) . strip ( ) if not key : continue add_kde_setting ( key , value , data ) resolve_kde_settings ( data ) return data
Read kioslaverc into data dictionary .
7,092
def add_kde_setting ( key , value , data ) : if key == "ProxyType" : mode = None int_value = int ( value ) if int_value == 1 : mode = "manual" elif int_value == 2 : mode = "pac" elif int_value == 3 : mode = "wpad" elif int_value == 4 : mode = "indirect" data [ "mode" ] = mode elif key == "Proxy Config Script" : data [ "autoconfig_url" ] = value elif key == "httpProxy" : add_kde_proxy ( "http_proxy" , value , data ) elif key == "httpsProxy" : add_kde_proxy ( "https_proxy" , value , data ) elif key == "ftpProxy" : add_kde_proxy ( "ftp_proxy" , value , data ) elif key == "ReversedException" : data [ "reversed_bypass" ] = bool ( value == "true" or int ( value ) ) elif key == "NoProxyFor" : data [ "ignore_hosts" ] = split_hosts ( value ) elif key == "AuthMode" : mode = int ( value )
Add a KDE proxy setting value to data dictionary .
7,093
def resolve_indirect ( data , key , splithosts = False ) : value = data [ key ] env_value = os . environ . get ( value ) if env_value : if splithosts : data [ key ] = split_hosts ( env_value ) else : data [ key ] = env_value else : del data [ key ]
Replace name of environment variable with its value .
7,094
def resolve_kde_settings ( data ) : if "mode" not in data : return if data [ "mode" ] == "indirect" : for key in ( "http_proxy" , "https_proxy" , "ftp_proxy" ) : if key in data : resolve_indirect ( data , key ) if "ignore_hosts" in data : resolve_indirect ( data , "ignore_hosts" , splithosts = True ) elif data [ "mode" ] != "manual" : for key in ( "http_proxy" , "https_proxy" , "ftp_proxy" ) : if key in data : del data [ key ]
Write final proxy configuration values in data dictionary .
7,095
def logger_new ( self , loggername , ** kwargs ) : args = self [ loggername ] args . update ( kwargs ) return self . loggers [ loggername ] ( ** args )
Instantiate new logger and return it .
7,096
def logger_add ( self , loggerclass ) : self . loggers [ loggerclass . LoggerName ] = loggerclass self [ loggerclass . LoggerName ] = { }
Add a new logger type to the known loggers .
7,097
def add_auth ( self , user = None , password = None , pattern = None ) : if not user or not pattern : log . warn ( LOG_CHECK , _ ( "missing user or URL pattern in authentication data." ) ) return entry = dict ( user = user , password = password , pattern = re . compile ( pattern ) , ) self [ "authentication" ] . append ( entry )
Add given authentication data .
7,098
def sanitize ( self ) : "Make sure the configuration is consistent." if self [ 'logger' ] is None : self . sanitize_logger ( ) if self [ 'loginurl' ] : self . sanitize_loginurl ( ) self . sanitize_proxies ( ) self . sanitize_plugins ( ) self . sanitize_ssl ( ) socket . setdefaulttimeout ( self [ 'timeout' ] )
Make sure the configuration is consistent .
7,099
def sanitize_logger ( self ) : if not self [ 'output' ] : log . warn ( LOG_CHECK , _ ( "activating text logger output." ) ) self [ 'output' ] = 'text' self [ 'logger' ] = self . logger_new ( self [ 'output' ] )
Make logger configuration consistent .