idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
10,200
|
def _close ( self ) : self . _usb_handle . releaseInterface ( ) try : self . _usb_handle . dev . attach_kernel_driver ( 0 ) except : pass self . _usb_int = None self . _usb_handle = None return True
|
Release the USB interface again .
|
10,201
|
def _get_usb_device ( self , skip = 0 ) : try : import usb . core import usb . legacy devices = [ usb . legacy . Device ( d ) for d in usb . core . find ( find_all = True , idVendor = YUBICO_VID ) ] except ImportError : import usb devices = [ d for bus in usb . busses ( ) for d in bus . devices ] for device in devices : if device . idVendor == YUBICO_VID : if device . idProduct in PID . all ( otp = True ) : if skip == 0 : return device skip -= 1 return None
|
Get YubiKey USB device .
|
10,202
|
def _debug ( self , out , print_prefix = True ) : if self . debug : if print_prefix : pre = self . __class__ . __name__ if hasattr ( self , 'debug_prefix' ) : pre = getattr ( self , 'debug_prefix' ) sys . stderr . write ( "%s: " % pre ) sys . stderr . write ( out )
|
Print out to stderr if debugging is enabled .
|
10,203
|
def init_config ( self , ** kw ) : return YubiKeyConfigUSBHID ( ykver = self . version_num ( ) , capabilities = self . capabilities , ** kw )
|
Get a configuration object for this type of YubiKey .
|
10,204
|
def write_config ( self , cfg , slot = 1 ) : cfg_req_ver = cfg . version_required ( ) if cfg_req_ver > self . version_num ( ) : raise yubikey_base . YubiKeyVersionError ( 'Configuration requires YubiKey version %i.%i (this is %s)' % ( cfg_req_ver [ 0 ] , cfg_req_ver [ 1 ] , self . version ( ) ) ) if not self . capabilities . have_configuration_slot ( slot ) : raise YubiKeyUSBHIDError ( "Can't write configuration to slot %i" % ( slot ) ) return self . _device . _write_config ( cfg , slot )
|
Write a configuration to the YubiKey .
|
10,205
|
def _read_serial ( self , may_block ) : frame = yubikey_frame . YubiKeyFrame ( command = SLOT . DEVICE_SERIAL ) self . _device . _write ( frame ) response = self . _device . _read_response ( may_block = may_block ) if not yubico_util . validate_crc16 ( response [ : 6 ] ) : raise YubiKeyUSBHIDError ( "Read from device failed CRC check" ) serial = struct . unpack ( '>lxxx' , response ) return serial [ 0 ]
|
Read the serial number from a YubiKey > 2 . 2 .
|
10,206
|
def _challenge_response ( self , challenge , mode , slot , variable , may_block ) : if mode == 'HMAC' : if len ( challenge ) > yubikey_defs . SHA1_MAX_BLOCK_SIZE : raise yubico_exception . InputError ( 'Mode HMAC challenge too big (%i/%i)' % ( yubikey_defs . SHA1_MAX_BLOCK_SIZE , len ( challenge ) ) ) if len ( challenge ) < yubikey_defs . SHA1_MAX_BLOCK_SIZE : pad_with = b'\0' if variable and challenge [ - 1 : ] == pad_with : pad_with = b'\xff' challenge = challenge . ljust ( yubikey_defs . SHA1_MAX_BLOCK_SIZE , pad_with ) response_len = yubikey_defs . SHA1_DIGEST_SIZE elif mode == 'OTP' : if len ( challenge ) != yubikey_defs . UID_SIZE : raise yubico_exception . InputError ( 'Mode OTP challenge must be %i bytes (got %i)' % ( yubikey_defs . UID_SIZE , len ( challenge ) ) ) challenge = challenge . ljust ( yubikey_defs . SHA1_MAX_BLOCK_SIZE , b'\0' ) response_len = 16 else : raise yubico_exception . InputError ( 'Invalid mode supplied (%s, valid values are HMAC and OTP)' % ( mode ) ) try : command = _CMD_CHALLENGE [ mode ] [ slot ] except : raise yubico_exception . InputError ( 'Invalid slot specified (%s)' % ( slot ) ) frame = yubikey_frame . YubiKeyFrame ( command = command , payload = challenge ) self . _device . _write ( frame ) response = self . _device . _read_response ( may_block = may_block ) if not yubico_util . validate_crc16 ( response [ : response_len + 2 ] ) : raise YubiKeyUSBHIDError ( "Read from device failed CRC check" ) return response [ : response_len ]
|
Do challenge - response with a YubiKey > 2 . 0 .
|
10,207
|
def valid_configs ( self ) : if self . ykver ( ) < ( 2 , 1 , 0 ) : raise YubiKeyUSBHIDError ( 'Valid configs unsupported in firmware %s' % ( self . version ( ) ) ) res = [ ] if self . touch_level & self . CONFIG1_VALID == self . CONFIG1_VALID : res . append ( 1 ) if self . touch_level & self . CONFIG2_VALID == self . CONFIG2_VALID : res . append ( 2 ) return res
|
Return a list of slots having a valid configurtion . Requires firmware 2 . 1 .
|
10,208
|
def command2str ( num ) : for attr in SLOT . __dict__ . keys ( ) : if not attr . startswith ( '_' ) and attr == attr . upper ( ) : if getattr ( SLOT , attr ) == num : return 'SLOT_%s' % attr return "0x%02x" % ( num )
|
Turn command number into name
|
10,209
|
def _get_flag ( which , flags ) : res = [ this for this in flags if this . is_equal ( which ) ] if len ( res ) == 0 : return None if len ( res ) == 1 : return res [ 0 ] assert ( )
|
Find which entry in flags .
|
10,210
|
def fixed_string ( self , data = None ) : old = self . fixed if data != None : new = self . _decode_input_string ( data ) if len ( new ) <= 16 : self . fixed = new else : raise yubico_exception . InputError ( 'The "fixed" string must be 0..16 bytes' ) return old
|
The fixed string is used to identify a particular Yubikey device .
|
10,211
|
def enable_extended_scan_code_mode ( self ) : if not self . capabilities . have_extended_scan_code_mode ( ) : raise self . _require_version ( major = 2 ) self . config_flag ( 'SHORT_TICKET' , True ) self . config_flag ( 'STATIC_TICKET' , False )
|
Extended scan code mode means the Yubikey will output the bytes in the fixed string as scan codes without modhex encoding the data .
|
10,212
|
def aes_key ( self , data ) : old = self . key if data : new = self . _decode_input_string ( data ) if len ( new ) == 16 : self . key = new else : raise yubico_exception . InputError ( 'AES128 key must be exactly 16 bytes' ) return old
|
AES128 key to program into YubiKey .
|
10,213
|
def unlock_key ( self , data ) : if data . startswith ( b'h:' ) : new = binascii . unhexlify ( data [ 2 : ] ) else : new = data if len ( new ) == 6 : self . unlock_code = new if not self . access_code : self . access_code = new else : raise yubico_exception . InputError ( 'Unlock key must be exactly 6 bytes' )
|
Access code to allow re - programming of your YubiKey .
|
10,214
|
def access_key ( self , data ) : if data . startswith ( b'h:' ) : new = binascii . unhexlify ( data [ 2 : ] ) else : new = data if len ( new ) == 6 : self . access_code = new else : raise yubico_exception . InputError ( 'Access key must be exactly 6 bytes' )
|
Set a new access code which will be required for future re - programmings of your YubiKey .
|
10,215
|
def mode_yubikey_otp ( self , private_uid , aes_key ) : if not self . capabilities . have_yubico_OTP ( ) : raise yubikey_base . YubiKeyVersionError ( 'Yubico OTP not available in %s version %d.%d' % ( self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) if private_uid . startswith ( b'h:' ) : private_uid = binascii . unhexlify ( private_uid [ 2 : ] ) if len ( private_uid ) != yubikey_defs . UID_SIZE : raise yubico_exception . InputError ( 'Private UID must be %i bytes' % ( yubikey_defs . UID_SIZE ) ) self . _change_mode ( 'YUBIKEY_OTP' , major = 0 , minor = 9 ) self . uid = private_uid self . aes_key ( aes_key )
|
Set the YubiKey up for standard OTP validation .
|
10,216
|
def mode_oath_hotp ( self , secret , digits = 6 , factor_seed = None , omp = 0x0 , tt = 0x0 , mui = '' ) : if not self . capabilities . have_OATH ( 'HOTP' ) : raise yubikey_base . YubiKeyVersionError ( 'OATH HOTP not available in %s version %d.%d' % ( self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) if digits != 6 and digits != 8 : raise yubico_exception . InputError ( 'OATH-HOTP digits must be 6 or 8' ) self . _change_mode ( 'OATH_HOTP' , major = 2 , minor = 1 ) self . _set_20_bytes_key ( secret ) if digits == 8 : self . config_flag ( 'OATH_HOTP8' , True ) if omp or tt or mui : decoded_mui = self . _decode_input_string ( mui ) fixed = yubico_util . chr_byte ( omp ) + yubico_util . chr_byte ( tt ) + decoded_mui self . fixed_string ( fixed ) if factor_seed : self . uid = self . uid + struct . pack ( '<H' , factor_seed )
|
Set the YubiKey up for OATH - HOTP operation .
|
10,217
|
def mode_challenge_response ( self , secret , type = 'HMAC' , variable = True , require_button = False ) : if not type . upper ( ) in [ 'HMAC' , 'OTP' ] : raise yubico_exception . InputError ( 'Invalid \'type\' (%s)' % type ) if not self . capabilities . have_challenge_response ( type . upper ( ) ) : raise yubikey_base . YubiKeyVersionError ( '%s Challenge-Response not available in %s version %d.%d' % ( type . upper ( ) , self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) self . _change_mode ( 'CHAL_RESP' , major = 2 , minor = 2 ) if type . upper ( ) == 'HMAC' : self . config_flag ( 'CHAL_HMAC' , True ) self . config_flag ( 'HMAC_LT64' , variable ) self . _set_20_bytes_key ( secret ) else : self . config_flag ( 'CHAL_YUBICO' , True ) self . aes_key ( secret ) self . config_flag ( 'CHAL_BTN_TRIG' , require_button )
|
Set the YubiKey up for challenge - response operation .
|
10,218
|
def ticket_flag ( self , which , new = None ) : flag = _get_flag ( which , TicketFlags ) if flag : if not self . capabilities . have_ticket_flag ( flag ) : raise yubikey_base . YubiKeyVersionError ( 'Ticket flag %s requires %s, and this is %s %d.%d' % ( which , flag . req_string ( self . capabilities . model ) , self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) req_major , req_minor = flag . req_version ( ) self . _require_version ( major = req_major , minor = req_minor ) value = flag . to_integer ( ) else : if type ( which ) is not int : raise yubico_exception . InputError ( 'Unknown non-integer TicketFlag (%s)' % which ) value = which return self . ticket_flags . get_set ( value , new )
|
Get or set a ticket flag .
|
10,219
|
def config_flag ( self , which , new = None ) : flag = _get_flag ( which , ConfigFlags ) if flag : if not self . capabilities . have_config_flag ( flag ) : raise yubikey_base . YubiKeyVersionError ( 'Config flag %s requires %s, and this is %s %d.%d' % ( which , flag . req_string ( self . capabilities . model ) , self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) req_major , req_minor = flag . req_version ( ) self . _require_version ( major = req_major , minor = req_minor ) value = flag . to_integer ( ) else : if type ( which ) is not int : raise yubico_exception . InputError ( 'Unknown non-integer ConfigFlag (%s)' % which ) value = which return self . config_flags . get_set ( value , new )
|
Get or set a config flag .
|
10,220
|
def extended_flag ( self , which , new = None ) : flag = _get_flag ( which , ExtendedFlags ) if flag : if not self . capabilities . have_extended_flag ( flag ) : raise yubikey_base . YubiKeyVersionError ( 'Extended flag %s requires %s, and this is %s %d.%d' % ( which , flag . req_string ( self . capabilities . model ) , self . capabilities . model , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) req_major , req_minor = flag . req_version ( ) self . _require_version ( major = req_major , minor = req_minor ) value = flag . to_integer ( ) else : if type ( which ) is not int : raise yubico_exception . InputError ( 'Unknown non-integer ExtendedFlag (%s)' % which ) value = which return self . extended_flags . get_set ( value , new )
|
Get or set a extended flag .
|
10,221
|
def _require_version ( self , major , minor = 0 ) : new_ver = ( major , minor ) if self . ykver and new_ver > self . ykver : raise yubikey_base . YubiKeyVersionError ( 'Configuration requires YubiKey %d.%d, and this is %d.%d' % ( major , minor , self . ykver [ 0 ] , self . ykver [ 1 ] ) ) if new_ver > self . yk_req_version : self . yk_req_version = new_ver
|
Update the minimum version of YubiKey this configuration can be applied to .
|
10,222
|
def _change_mode ( self , mode , major , minor ) : if self . _mode : if self . _mode != mode : raise RuntimeError ( 'Can\'t change mode (from %s to %s)' % ( self . _mode , mode ) ) self . _require_version ( major = major , minor = minor ) self . _mode = mode self . ticket_flags = YubiKeyConfigBits ( 0x0 ) self . config_flags = YubiKeyConfigBits ( 0x0 ) self . extended_flags = YubiKeyConfigBits ( 0x0 ) if mode != 'YUBIKEY_OTP' : self . ticket_flag ( mode , True )
|
Change mode of operation with some sanity checks .
|
10,223
|
def _set_20_bytes_key ( self , data ) : if data . startswith ( b'h:' ) : new = binascii . unhexlify ( data [ 2 : ] ) else : new = data if len ( new ) == 20 : self . key = new [ : 16 ] self . uid = new [ 16 : ] else : raise yubico_exception . InputError ( 'HMAC key must be exactly 20 bytes' )
|
Set a 20 bytes key . This is used in CHAL_HMAC and OATH_HOTP mode .
|
10,224
|
def modhex_decode ( data ) : try : maketrans = string . maketrans except AttributeError : maketrans = bytes . maketrans t_map = maketrans ( b"cbdefghijklnrtuv" , b"0123456789abcdef" ) return data . translate ( t_map )
|
Convert a modhex bytestring to ordinary hex .
|
10,225
|
def hotp_truncate ( hmac_result , length = 6 ) : if len ( hmac_result ) != 20 : raise yubico_exception . YubicoError ( "HMAC-SHA-1 not 20 bytes long" ) offset = ord_byte ( hmac_result [ 19 ] ) & 0xf bin_code = ( ord_byte ( hmac_result [ offset ] ) & 0x7f ) << 24 | ( ord_byte ( hmac_result [ offset + 1 ] ) & 0xff ) << 16 | ( ord_byte ( hmac_result [ offset + 2 ] ) & 0xff ) << 8 | ( ord_byte ( hmac_result [ offset + 3 ] ) & 0xff ) return bin_code % ( 10 ** length )
|
Perform the HOTP Algorithm truncating .
|
10,226
|
def tlv_parse ( data ) : parsed = { } while data : t , l , data = ord_byte ( data [ 0 ] ) , ord_byte ( data [ 1 ] ) , data [ 2 : ] parsed [ t ] , data = data [ : l ] , data [ l : ] return parsed
|
Parses a bytestring of TLV values into a dict with the tags as keys .
|
10,227
|
def get ( self , what ) : if self . enabled : if what in self . colors : return self . colors [ what ] return ''
|
Get the ANSI code for what
|
10,228
|
def all ( cls , otp = False , ccid = False , u2f = False ) : modes = set ( [ cls . OTP , cls . CCID , cls . OTP_CCID , cls . U2F , cls . OTP_U2F , cls . U2F_CCID , cls . OTP_U2F_CCID ] ) if otp : modes . difference_update ( set ( [ cls . CCID , cls . U2F , cls . U2F_CCID ] ) ) if ccid : modes . difference_update ( set ( [ cls . OTP , cls . U2F , cls . OTP_U2F ] ) ) if u2f : modes . difference_update ( set ( [ cls . OTP , cls . CCID , cls . OTP_CCID ] ) ) return modes
|
Returns a set of all USB modes with optional filtering
|
10,229
|
def all ( cls , otp = False , ccid = False , u2f = False ) : pids = set ( [ cls . YUBIKEY , cls . NEO_OTP , cls . NEO_OTP_CCID , cls . NEO_CCID , cls . NEO_U2F , cls . NEO_OTP_U2F , cls . NEO_U2F_CCID , cls . NEO_OTP_U2F_CCID , cls . NEO_SKY , cls . YK4_OTP , cls . YK4_U2F , cls . YK4_OTP_U2F , cls . YK4_CCID , cls . YK4_OTP_CCID , cls . YK4_U2F_CCID , cls . YK4_OTP_U2F_CCID , cls . PLUS_U2F_OTP ] ) if otp : pids . difference_update ( set ( [ cls . NEO_CCID , cls . NEO_U2F , cls . NEO_U2F_CCID , cls . NEO_SKY , cls . YK4_U2F , cls . YK4_CCID , cls . YK4_U2F_CCID ] ) ) if ccid : pids . difference_update ( set ( [ cls . YUBIKEY , cls . NEO_OTP , cls . NEO_U2F , cls . NEO_OTP_U2F , cls . NEO_SKY , cls . YK4_OTP , cls . YK4_U2F , cls . YK4_OTP_U2F , cls . PLUS_U2F_OTP ] ) ) if u2f : pids . difference_update ( set ( [ cls . YUBIKEY , cls . NEO_OTP , cls . NEO_OTP_CCID , cls . NEO_CCID , cls . YK4_OTP , cls . YK4_CCID , cls . YK4_OTP_CCID ] ) ) return pids
|
Returns a set of all PIDs with optional filtering
|
10,230
|
def to_string ( self ) : filler = b'' return struct . pack ( '<64sBH3s' , self . payload , self . command , self . crc , filler )
|
Return the frame as a 70 byte string .
|
10,231
|
def to_feature_reports ( self , debug = False ) : rest = self . to_string ( ) seq = 0 out = [ ] while rest : this , rest = rest [ : 7 ] , rest [ 7 : ] if seq > 0 and rest : if this != b'\x00\x00\x00\x00\x00\x00\x00' : this += yubico_util . chr_byte ( yubikey_defs . SLOT_WRITE_FLAG + seq ) out . append ( self . _debug_string ( debug , this ) ) else : this += yubico_util . chr_byte ( yubikey_defs . SLOT_WRITE_FLAG + seq ) out . append ( self . _debug_string ( debug , this ) ) seq += 1 return out
|
Return the frame as an array of 8 - byte parts ready to be sent to a YubiKey .
|
10,232
|
def _debug_string ( self , debug , data ) : if not debug : return data if self . command in [ SLOT . CONFIG , SLOT . CONFIG2 , SLOT . UPDATE1 , SLOT . UPDATE2 , SLOT . SWAP , ] : if yubico_util . ord_byte ( data [ - 1 ] ) == 0x80 : return ( data , "FFFFFFF" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x81 : return ( data , "FFFFFFF" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x82 : return ( data , "FFUUUUU" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x83 : return ( data , "UKKKKKK" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x84 : return ( data , "KKKKKKK" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x85 : return ( data , "KKKAAAA" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x86 : return ( data , "AAlETCr" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x87 : return ( data , "rCRaaaa" ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x88 : return ( data , 'aa' ) if yubico_util . ord_byte ( data [ - 1 ] ) == 0x89 : return ( data , " Scr" ) return ( data , '' )
|
Annotate a frames data if debug is True .
|
10,233
|
def write_ndef ( self , ndef , slot = 1 ) : if not self . capabilities . have_nfc_ndef ( slot ) : raise yubikey_base . YubiKeyVersionError ( "NDEF slot %i unsupported in %s" % ( slot , self ) ) return self . _device . _write_config ( ndef , _NDEF_SLOTS [ slot ] )
|
Write an NDEF tag configuration to the YubiKey NEO .
|
10,234
|
def write_device_config ( self , device_config ) : if not self . capabilities . have_usb_mode ( device_config . _mode ) : raise yubikey_base . YubiKeyVersionError ( "USB mode: %02x not supported for %s" % ( device_config . _mode , self ) ) return self . _device . _write_config ( device_config , SLOT . DEVICE_CONFIG )
|
Write a DEVICE_CONFIG to the YubiKey NEO .
|
10,235
|
def text ( self , encoding = 'UTF-8' , language = 'en' ) : self . ndef_type = _NDEF_TEXT_TYPE self . ndef_text_lang = language self . ndef_text_enc = encoding return self
|
Configure parameters for NDEF type TEXT .
|
10,236
|
def type ( self , url = False , text = False , other = None ) : if ( url , text , other ) == ( True , False , None ) : self . ndef_type = _NDEF_URI_TYPE elif ( url , text , other ) == ( False , True , None ) : self . ndef_type = _NDEF_TEXT_TYPE elif ( url , text , type ( other ) ) == ( False , False , int ) : self . ndef_type = other else : raise YubiKeyNEO_USBHIDError ( "Bad or conflicting NDEF type specified" ) return self
|
Change the NDEF type .
|
10,237
|
def _encode_ndef_uri_type ( self , data ) : t = 0x0 for ( code , prefix ) in uri_identifiers : if data [ : len ( prefix ) ] . decode ( 'latin-1' ) . lower ( ) == prefix : t = code data = data [ len ( prefix ) : ] break data = yubico_util . chr_byte ( t ) + data return data
|
Implement NDEF URI Identifier Code .
|
10,238
|
def _encode_ndef_text_params ( self , data ) : status = len ( self . ndef_text_lang ) if self . ndef_text_enc == 'UTF16' : status = status & 0b10000000 return yubico_util . chr_byte ( status ) + self . ndef_text_lang + data
|
Prepend language and enconding information to data according to nfcforum - ts - rtd - text - 1 - 0 . pdf
|
10,239
|
def find_key ( debug = False , skip = 0 ) : try : hid_device = YubiKeyHIDDevice ( debug , skip ) yk_version = hid_device . status ( ) . ykver ( ) if ( 2 , 1 , 4 ) <= yk_version <= ( 2 , 1 , 9 ) : return YubiKeyNEO_USBHID ( debug , skip , hid_device ) if yk_version < ( 3 , 0 , 0 ) : return YubiKeyUSBHID ( debug , skip , hid_device ) if yk_version < ( 4 , 0 , 0 ) : return YubiKeyNEO_USBHID ( debug , skip , hid_device ) return YubiKey4_USBHID ( debug , skip , hid_device ) except YubiKeyUSBHIDError as inst : if 'No USB YubiKey found' in str ( inst ) : raise YubiKeyError ( 'No YubiKey found' ) else : raise
|
Locate a connected YubiKey . Throws an exception if none is found .
|
10,240
|
def norm_package_version ( version ) : if version : version = ',' . join ( v . strip ( ) for v in version . split ( ',' ) ) . strip ( ) if version . startswith ( '(' ) and version . endswith ( ')' ) : version = version [ 1 : - 1 ] version = '' . join ( v for v in version if v . strip ( ) ) else : version = '' return version
|
Normalize a version by removing extra spaces and parentheses .
|
10,241
|
def split_spec ( spec , sep ) : parts = spec . rsplit ( sep , 1 ) spec_start = parts [ 0 ] . strip ( ) spec_end = '' if len ( parts ) == 2 : spec_end = parts [ - 1 ] . strip ( ) return spec_start , spec_end
|
Split a spec by separator and return stripped start and end parts .
|
10,242
|
def parse_specification ( spec ) : name , extras , const = spec , [ ] , '' spec = ' ' . join ( p for p in spec . split ( ' ' ) if p ) . strip ( ) spec , marker = split_spec ( spec , ';' ) spec , url = split_spec ( spec , '@' ) r = PARTIAL_PYPI_SPEC_PATTERN . match ( spec ) if r : name = r . group ( 'name' ) extras = r . group ( 'extras' ) extras = [ e . strip ( ) for e in extras . split ( ',' ) if e ] if extras else [ ] const = r . group ( 'constraints' ) const = '' . join ( c for c in const . split ( ' ' ) if c ) . strip ( ) if const . startswith ( '(' ) and const . endswith ( ')' ) : const = const [ 1 : - 1 ] return name , extras , const , marker , url
|
Parse a requirement from a python distribution metadata and return a tuple with name extras constraints marker and url components .
|
10,243
|
def get_header_description ( filedata ) : python_version = sys . version_info . major if python_version == 3 : filedata = Parser ( ) . parsestr ( filedata ) else : filedata = Parser ( ) . parsestr ( filedata . encode ( "UTF-8" , "replace" ) ) payload = filedata . get_payload ( ) lines = payload . split ( '\n' ) while True : if lines and lines [ - 1 ] == '' : lines . pop ( ) else : break return '\n' . join ( lines )
|
Get description from metadata file and remove any empty lines at end .
|
10,244
|
def check_server ( self ) : msg = 'API server not found. Please check your API url configuration.' try : response = self . session . head ( self . domain ) except Exception as e : raise_from ( errors . ServerError ( msg ) , e ) try : self . _check_response ( response ) except errors . NotFound as e : raise raise_from ( errors . ServerError ( msg ) , e )
|
Checks if the server is reachable and throws and exception if it isn t
|
10,245
|
def _authenticate ( self , auth , application , application_url = None , for_user = None , scopes = None , created_with = None , max_age = None , strength = 'strong' , fail_if_already_exists = False , hostname = platform . node ( ) ) : url = '%s/authentications' % ( self . domain ) payload = { "scopes" : scopes , "note" : application , "note_url" : application_url , 'hostname' : hostname , 'user' : for_user , 'max-age' : max_age , 'created_with' : None , 'strength' : strength , 'fail-if-exists' : fail_if_already_exists } data , headers = jencode ( payload ) res = self . session . post ( url , auth = auth , data = data , headers = headers ) self . _check_response ( res ) res = res . json ( ) token = res [ 'token' ] self . session . headers . update ( { 'Authorization' : 'token %s' % ( token ) } ) return token
|
Use basic authentication to create an authentication token using the interface below . With this technique a username and password need not be stored permanently and the user can revoke access at any time .
|
10,246
|
def authentication ( self ) : url = '%s/authentication' % ( self . domain ) res = self . session . get ( url ) self . _check_response ( res ) return res . json ( )
|
Retrieve information on the current authentication token
|
10,247
|
def remove_authentication ( self , auth_name = None , organization = None ) : if auth_name : if organization : url = '%s/authentications/org/%s/name/%s' % ( self . domain , organization , auth_name ) else : url = '%s/authentications/name/%s' % ( self . domain , auth_name ) else : url = '%s/authentications' % ( self . domain , ) res = self . session . delete ( url ) self . _check_response ( res , [ 201 ] )
|
Remove the current authentication or the one given by auth_name
|
10,248
|
def user_packages ( self , login = None , platform = None , package_type = None , type_ = None , access = None ) : if login : url = '{0}/packages/{1}' . format ( self . domain , login ) else : url = '{0}/packages' . format ( self . domain ) arguments = collections . OrderedDict ( ) if platform : arguments [ 'platform' ] = platform if package_type : arguments [ 'package_type' ] = package_type if type_ : arguments [ 'type' ] = type_ if access : arguments [ 'access' ] = access res = self . session . get ( url , params = arguments ) self . _check_response ( res ) return res . json ( )
|
Returns a list of packages for a given user and optionally filter by platform package_type and type_ .
|
10,249
|
def package ( self , login , package_name ) : url = '%s/package/%s/%s' % ( self . domain , login , package_name ) res = self . session . get ( url ) self . _check_response ( res ) return res . json ( )
|
Get information about a specific package
|
10,250
|
def add_package ( self , login , package_name , summary = None , license = None , public = True , license_url = None , license_family = None , attrs = None , package_type = None ) : url = '%s/package/%s/%s' % ( self . domain , login , package_name ) attrs = attrs or { } attrs [ 'summary' ] = summary attrs [ 'package_types' ] = [ package_type ] attrs [ 'license' ] = { 'name' : license , 'url' : license_url , 'family' : license_family , } payload = dict ( public = bool ( public ) , publish = False , public_attrs = dict ( attrs or { } ) ) data , headers = jencode ( payload ) res = self . session . post ( url , data = data , headers = headers ) self . _check_response ( res ) return res . json ( )
|
Add a new package to a users account
|
10,251
|
def release ( self , login , package_name , version ) : url = '%s/release/%s/%s/%s' % ( self . domain , login , package_name , version ) res = self . session . get ( url ) self . _check_response ( res ) return res . json ( )
|
Get information about a specific release
|
10,252
|
def remove_release ( self , username , package_name , version ) : url = '%s/release/%s/%s/%s' % ( self . domain , username , package_name , version ) res = self . session . delete ( url ) self . _check_response ( res , [ 201 ] ) return
|
remove a release and all files under it
|
10,253
|
def add_release ( self , login , package_name , version , requirements , announce , release_attrs ) : url = '%s/release/%s/%s/%s' % ( self . domain , login , package_name , version ) if not release_attrs : release_attrs = { } payload = { 'requirements' : requirements , 'announce' : announce , 'description' : None , } payload . update ( release_attrs ) data , headers = jencode ( payload ) res = self . session . post ( url , data = data , headers = headers ) self . _check_response ( res ) return res . json ( )
|
Add a new release to a package .
|
10,254
|
def download ( self , login , package_name , release , basename , md5 = None ) : url = '%s/download/%s/%s/%s/%s' % ( self . domain , login , package_name , release , basename ) if md5 : headers = { 'ETag' : md5 , } else : headers = { } res = self . session . get ( url , headers = headers , allow_redirects = False ) self . _check_response ( res , allowed = [ 200 , 302 , 304 ] ) if res . status_code == 200 : return res elif res . status_code == 304 : return None elif res . status_code == 302 : res2 = requests . get ( res . headers [ 'location' ] , stream = True ) return res2
|
Download a package distribution
|
10,255
|
def upload ( self , login , package_name , release , basename , fd , distribution_type , description = '' , md5 = None , size = None , dependencies = None , attrs = None , channels = ( 'main' , ) , callback = None ) : url = '%s/stage/%s/%s/%s/%s' % ( self . domain , login , package_name , release , quote ( basename ) ) if attrs is None : attrs = { } if not isinstance ( attrs , dict ) : raise TypeError ( 'argument attrs must be a dictionary' ) payload = dict ( distribution_type = distribution_type , description = description , attrs = attrs , dependencies = dependencies , channels = channels ) data , headers = jencode ( payload ) res = self . session . post ( url , data = data , headers = headers ) self . _check_response ( res ) obj = res . json ( ) s3url = obj [ 'post_url' ] s3data = obj [ 'form_data' ] if md5 is None : _hexmd5 , b64md5 , size = compute_hash ( fd , size = size ) elif size is None : spos = fd . tell ( ) fd . seek ( 0 , os . SEEK_END ) size = fd . tell ( ) - spos fd . seek ( spos ) s3data [ 'Content-Length' ] = size s3data [ 'Content-MD5' ] = b64md5 data_stream , headers = stream_multipart ( s3data , files = { 'file' : ( basename , fd ) } , callback = callback ) request_method = self . session if s3url . startswith ( self . domain ) else requests s3res = request_method . post ( s3url , data = data_stream , verify = self . session . verify , timeout = 10 * 60 * 60 , headers = headers ) if s3res . status_code != 201 : logger . info ( s3res . text ) logger . info ( '' ) logger . info ( '' ) raise errors . BinstarError ( 'Error uploading package' , s3res . status_code ) url = '%s/commit/%s/%s/%s/%s' % ( self . domain , login , package_name , release , quote ( basename ) ) payload = dict ( dist_id = obj [ 'dist_id' ] ) data , headers = jencode ( payload ) res = self . session . post ( url , data = data , headers = headers ) self . _check_response ( res ) return res . json ( )
|
Upload a new distribution to a package release .
|
10,256
|
def transform_conda_deps ( deps ) : depends = [ ] for dep in deps : dep = dep . strip ( ) name_spec = dep . split ( ' ' , 1 ) if len ( name_spec ) == 1 : name , = name_spec depends . append ( { 'name' : name , 'specs' : [ ] } ) elif len ( name_spec ) == 2 : name , spec = name_spec if spec . endswith ( '*' ) : spec = spec [ : - 1 ] match = specs_re . match ( spec ) if match : op , spec = match . groups ( ) else : op = '==' depends . append ( { 'name' : name , 'specs' : [ [ op , spec ] ] } ) elif len ( name_spec ) == 3 : name , spec , build_str = name_spec if spec . endswith ( '*' ) : spec = spec [ : - 1 ] match = specs_re . match ( spec ) if match : op , spec = match . groups ( ) else : op = '==' depends . append ( { 'name' : name , 'specs' : [ [ '==' , '%s+%s' % ( spec , build_str ) ] ] } ) return { 'depends' : depends }
|
Format dependencies into a common binstar format
|
10,257
|
def file_or_token ( value ) : if isfile ( value ) : with open ( value ) as fd : return fd . read ( ) . strip ( ) if any ( char in value for char in '/\\.' ) : raise ValueError ( ) return value
|
If value is a file path and the file exists its contents are stripped and returned otherwise value is returned .
|
10,258
|
def get_server_api ( token = None , site = None , cls = None , config = None , ** kwargs ) : if not cls : from binstar_client import Binstar cls = Binstar config = config if config is not None else get_config ( site = site ) url = config . get ( 'url' , DEFAULT_URL ) logger . info ( "Using Anaconda API: %s" , url ) if token : logger . debug ( "Using token from command line args" ) elif 'BINSTAR_API_TOKEN' in os . environ : logger . debug ( "Using token from environment variable BINSTAR_API_TOKEN" ) token = os . environ [ 'BINSTAR_API_TOKEN' ] elif 'ANACONDA_API_TOKEN' in os . environ : logger . debug ( "Using token from environment variable ANACONDA_API_TOKEN" ) token = os . environ [ 'ANACONDA_API_TOKEN' ] else : token = load_token ( url ) verify = config . get ( 'ssl_verify' , config . get ( 'verify_ssl' , True ) ) return cls ( token , domain = url , verify = verify , ** kwargs )
|
Get the anaconda server api class
|
10,259
|
def get_conda_root ( ) : try : conda_root = _import_conda_root ( ) except ImportError : envs_dir = dirname ( CONDA_PREFIX ) if basename ( envs_dir ) == 'envs' : conda_root = dirname ( envs_dir ) else : conda_root = _conda_root_from_conda_info ( ) return conda_root
|
Get the PREFIX of the conda installation .
|
10,260
|
def download ( self , dist ) : filename = dist [ 'basename' ] requests_handle = self . aserver_api . download ( self . username , self . notebook , dist [ 'version' ] , filename ) if not os . path . exists ( os . path . dirname ( filename ) ) : try : os . makedirs ( os . path . dirname ( filename ) ) except OSError : pass with open ( os . path . join ( self . output , filename ) , 'wb' ) as fdout : for chunk in requests_handle . iter_content ( 4096 ) : fdout . write ( chunk )
|
Download file into location .
|
10,261
|
def ensure_output ( self ) : if not os . path . exists ( self . output ) : os . makedirs ( self . output )
|
Ensure output s directory exists
|
10,262
|
def merge_segments ( segments , exif = b"" ) : if segments [ 1 ] [ 0 : 2 ] == b"\xff\xe0" and segments [ 2 ] [ 0 : 2 ] == b"\xff\xe1" and segments [ 2 ] [ 4 : 10 ] == b"Exif\x00\x00" : if exif : segments [ 2 ] = exif segments . pop ( 1 ) elif exif is None : segments . pop ( 2 ) else : segments . pop ( 1 ) elif segments [ 1 ] [ 0 : 2 ] == b"\xff\xe0" : if exif : segments [ 1 ] = exif elif segments [ 1 ] [ 0 : 2 ] == b"\xff\xe1" and segments [ 1 ] [ 4 : 10 ] == b"Exif\x00\x00" : if exif : segments [ 1 ] = exif elif exif is None : segments . pop ( 1 ) else : if exif : segments . insert ( 1 , exif ) return b"" . join ( segments )
|
Merges Exif with APP0 and APP1 manipulations .
|
10,263
|
def load ( cls , data ) : if len ( data ) < cls . _PREFIX_SIZE : raise ValueError ( 'not enough data to decode UserComment' ) prefix = data [ : cls . _PREFIX_SIZE ] body = data [ cls . _PREFIX_SIZE : ] if prefix == cls . _UNDEFINED_PREFIX : raise ValueError ( 'prefix is UNDEFINED, unable to decode UserComment' ) try : encoding = { cls . _ASCII_PREFIX : cls . ASCII , cls . _JIS_PREFIX : cls . _JIS , cls . _UNICODE_PREFIX : cls . _UNICODE , } [ prefix ] except KeyError : raise ValueError ( 'unable to determine appropriate encoding' ) return body . decode ( encoding , errors = 'replace' )
|
Convert UserComment value in exif format to str .
|
10,264
|
def dump ( cls , data , encoding = "ascii" ) : if encoding not in cls . ENCODINGS : raise ValueError ( 'encoding {!r} must be one of {!r}' . format ( encoding , cls . ENCODINGS ) ) prefix = { cls . ASCII : cls . _ASCII_PREFIX , cls . JIS : cls . _JIS_PREFIX , cls . UNICODE : cls . _UNICODE_PREFIX } [ encoding ] internal_encoding = { cls . UNICODE : cls . _UNICODE , cls . JIS : cls . _JIS } . get ( encoding , encoding ) return prefix + data . encode ( internal_encoding , errors = 'replace' )
|
Convert str to appropriate format for UserComment .
|
10,265
|
def get_fs ( path ) : scheme = '' if '://' in path : scheme = path . partition ( '://' ) [ 0 ] for schemes , fs_class in FILE_EXTENSIONS : if scheme in schemes : return fs_class return FileSystem
|
Find the file system implementation for this path .
|
10,266
|
def awaitTermination ( self , timeout = None ) : if timeout is not None : IOLoop . current ( ) . call_later ( timeout , self . stop ) IOLoop . current ( ) . start ( ) IOLoop . clear_current ( )
|
Wait for context to stop .
|
10,267
|
def binaryRecordsStream ( self , directory , recordLength = None , process_all = False ) : deserializer = FileBinaryStreamDeserializer ( self . _context , recordLength ) file_stream = FileStream ( directory , process_all ) self . _on_stop_cb . append ( file_stream . stop ) return DStream ( file_stream , self , deserializer )
|
Monitor a directory and process all binary files .
|
10,268
|
def queueStream ( self , rdds , oneAtATime = True , default = None ) : deserializer = QueueStreamDeserializer ( self . _context ) if default is not None : default = deserializer ( default ) if Queue is False : log . error ( 'Run "pip install tornado" to install tornado.' ) q = Queue ( ) for i in rdds : q . put ( i ) qstream = QueueStream ( q , oneAtATime , default ) return DStream ( qstream , self , deserializer )
|
Create stream iterable over RDDs .
|
10,269
|
def socketBinaryStream ( self , hostname , port , length ) : deserializer = TCPDeserializer ( self . _context ) tcp_binary_stream = TCPBinaryStream ( length ) tcp_binary_stream . listen ( port , hostname ) self . _on_stop_cb . append ( tcp_binary_stream . stop ) return DStream ( tcp_binary_stream , self , deserializer )
|
Create a TCP socket server for binary input .
|
10,270
|
def socketTextStream ( self , hostname , port ) : deserializer = TCPDeserializer ( self . _context ) tcp_text_stream = TCPTextStream ( ) tcp_text_stream . listen ( port , hostname ) self . _on_stop_cb . append ( tcp_text_stream . stop ) return DStream ( tcp_text_stream , self , deserializer )
|
Create a TCP socket server .
|
10,271
|
def start ( self ) : def cb ( ) : time_ = time . time ( ) log . debug ( 'Step {}' . format ( time_ ) ) for d in self . _dstreams : d . _step ( time_ ) self . _pcb = PeriodicCallback ( cb , self . batch_duration * 1000.0 ) self . _pcb . start ( ) self . _on_stop_cb . append ( self . _pcb . stop ) StreamingContext . _activeContext = self
|
Start processing streams .
|
10,272
|
def stop ( self , stopSparkContext = True , stopGraceFully = False ) : while self . _on_stop_cb : cb = self . _on_stop_cb . pop ( ) log . debug ( 'calling on_stop_cb {}' . format ( cb ) ) cb ( ) IOLoop . current ( ) . stop ( ) StreamingContext . _activeContext = None
|
Stop processing streams .
|
10,273
|
def textFileStream ( self , directory , process_all = False ) : deserializer = FileTextStreamDeserializer ( self . _context ) file_stream = FileStream ( directory , process_all ) self . _on_stop_cb . append ( file_stream . stop ) return DStream ( file_stream , self , deserializer )
|
Monitor a directory and process all text files .
|
10,274
|
def get_codec ( path ) : if '.' not in path or path . rfind ( '/' ) > path . rfind ( '.' ) : return Codec for endings , codec_class in FILE_ENDINGS : if any ( path . endswith ( e ) for e in endings ) : log . debug ( 'Using {0} codec: {1}' . format ( endings , path ) ) return codec_class return NoCodec
|
Find the codec implementation for this path .
|
10,275
|
def count ( self ) : return ( self . mapPartitions ( lambda p : [ sum ( 1 for _ in p ) ] ) . reduce ( operator . add ) )
|
Count elements per RDD .
|
10,276
|
def countByValue ( self ) : return self . transform ( lambda rdd : self . _context . _context . parallelize ( rdd . countByValue ( ) . items ( ) ) )
|
Apply countByValue to every RDD . abs
|
10,277
|
def flatMap ( self , f , preservesPartitioning = False ) : return self . mapPartitions ( lambda p : ( e for pp in p for e in f ( pp ) ) , preservesPartitioning , )
|
Apply function f and flatten .
|
10,278
|
def map ( self , f , preservesPartitioning = False ) : return ( self . mapPartitions ( lambda p : ( f ( e ) for e in p ) , preservesPartitioning ) . transform ( lambda rdd : rdd . setName ( '{}:{}' . format ( rdd . prev . name ( ) , f ) ) ) )
|
Apply function f
|
10,279
|
def mapPartitions ( self , f , preservesPartitioning = False ) : return ( self . mapPartitionsWithIndex ( lambda i , p : f ( p ) , preservesPartitioning ) . transform ( lambda rdd : rdd . setName ( '{}:{}' . format ( rdd . prev . name ( ) , f ) ) ) )
|
Map partitions .
|
10,280
|
def pprint ( self , num = 10 ) : def pprint_map ( time_ , rdd ) : print ( '>>> Time: {}' . format ( time_ ) ) data = rdd . take ( num + 1 ) for d in data [ : num ] : py_pprint . pprint ( d ) if len ( data ) > num : print ( '...' ) print ( '' ) self . foreachRDD ( pprint_map )
|
Print the first num elements of each RDD .
|
10,281
|
def reduce ( self , func ) : return self . transform ( lambda rdd : ( rdd . map ( lambda i : ( None , i ) ) . reduceByKey ( func ) . map ( lambda none_i : none_i [ 1 ] ) ) )
|
Return a new DStream where each RDD was reduced with func .
|
10,282
|
def reduceByKey ( self , func , numPartitions = None ) : return self . transform ( lambda rdd : rdd . reduceByKey ( func ) )
|
Apply reduceByKey to every RDD .
|
10,283
|
def repartition ( self , numPartitions ) : return self . transform ( lambda rdd : ( rdd . repartition ( numPartitions ) if not isinstance ( rdd , EmptyRDD ) else rdd ) )
|
Repartition every RDD .
|
10,284
|
def slice ( self , begin , end ) : return self . transform ( lambda time_ , rdd : rdd if begin <= time_ <= end else EmptyRDD ( self . _context . _context ) )
|
Filter RDDs to between begin and end .
|
10,285
|
def union ( self , other ) : def union_rdds ( rdd_a , rdd_b ) : return self . _context . _context . union ( ( rdd_a , rdd_b ) ) return self . transformWith ( union_rdds , other )
|
Union of two DStreams .
|
10,286
|
def resolve_filenames ( all_expr ) : files = [ ] for expr in all_expr . split ( ',' ) : expr = expr . strip ( ) files += fs . get_fs ( expr ) . resolve_filenames ( expr ) log . debug ( 'Filenames: {0}' . format ( files ) ) return files
|
resolve expression for a filename
|
10,287
|
def database_caller_creator ( self , host , port , name = None ) : client = pymongo . MongoClient ( host , port ) if name : db = client [ name ] else : db = client [ 'mongodb_' + str_generator ( self ) ] return db
|
creates a mongodb database returns the related connection object which will be later used to spawn the cursor
|
10,288
|
def database_caller_creator ( self , host , port , name = None ) : name = name or 0 client = redis . StrictRedis ( host = host , port = port , db = name ) pipe = client . pipeline ( transaction = False ) return client , pipe
|
creates a redis connection object which will be later used to modify the db
|
10,289
|
def data_filler_simple_registration ( self , number_of_rows , pipe ) : try : for i in range ( number_of_rows ) : pipe . hmset ( 'simple_registration:%s' % i , { 'id' : rnd_id_generator ( self ) , 'email' : self . faker . safe_email ( ) , 'password' : self . faker . md5 ( raw_output = False ) } ) pipe . execute ( ) logger . warning ( 'simple_registration Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates keys with simple regis . information
|
10,290
|
def data_filler_detailed_registration ( self , number_of_rows , pipe ) : try : for i in range ( number_of_rows ) : pipe . hmset ( 'detailed_registration:%s' % i , { 'id' : rnd_id_generator ( self ) , 'email' : self . faker . safe_email ( ) , 'password' : self . faker . md5 ( raw_output = False ) , 'lastname' : self . faker . last_name ( ) , 'name' : self . faker . first_name ( ) , 'address' : self . faker . address ( ) , 'phone' : self . faker . phone_number ( ) } ) pipe . execute ( ) logger . warning ( 'detailed_registration Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates keys with detailed regis . information
|
10,291
|
def data_filler_user_agent ( self , number_of_rows , pipe ) : try : for i in range ( number_of_rows ) : pipe . hmset ( 'user_agent:%s' % i , { 'id' : rnd_id_generator ( self ) , 'ip' : self . faker . ipv4 ( ) , 'countrycode' : self . faker . country_code ( ) , 'useragent' : self . faker . user_agent ( ) } ) pipe . execute ( ) logger . warning ( 'user_agent Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates keys with user agent data
|
10,292
|
def data_filler_company ( self , number_of_rows , pipe ) : try : for i in range ( number_of_rows ) : pipe . hmset ( 'company:%s' % i , { 'id' : rnd_id_generator ( self ) , 'name' : self . faker . company ( ) , 'date' : self . faker . date ( pattern = "%d-%m-%Y" ) , 'email' : self . faker . company_email ( ) , 'domain' : self . faker . safe_email ( ) , 'city' : self . faker . city ( ) } ) pipe . execute ( ) logger . warning ( 'companies Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates keys with company data
|
10,293
|
def data_filler_customer ( self , number_of_rows , pipe ) : try : for i in range ( number_of_rows ) : pipe . hmset ( 'customer:%s' % i , { 'id' : rnd_id_generator ( self ) , 'name' : self . faker . first_name ( ) , 'lastname' : self . faker . last_name ( ) , 'address' : self . faker . address ( ) , 'country' : self . faker . country ( ) , 'city' : self . faker . city ( ) , 'registry_date' : self . faker . date ( pattern = "%d-%m-%Y" ) , 'birthdate' : self . faker . date ( pattern = "%d-%m-%Y" ) , 'email' : self . faker . safe_email ( ) , 'phone_number' : self . faker . phone_number ( ) , 'locale' : self . faker . locale ( ) } ) pipe . execute ( ) logger . warning ( 'customer Commits are successful after write job!' , extra = d ) except Exception as e : logger . error ( e , extra = d )
|
creates keys with customer data
|
10,294
|
def database_caller_creator ( self , number_of_rows , username , password , host , port , name = None , custom = None ) : cursor = None conn = None if name : dbname = name else : dbname = 'postgresql_' + str_generator ( self ) . lower ( ) try : conn = psycopg2 . connect ( user = username , password = password , host = host , port = port ) conn . set_isolation_level ( ISOLATION_LEVEL_AUTOCOMMIT ) cur = conn . cursor ( ) cur . execute ( 'CREATE DATABASE %s;' % dbname ) cur . close ( ) conn . close ( ) conn = psycopg2 . connect ( user = username , password = password , host = host , port = port , database = dbname ) cursor = conn . cursor ( ) logger . warning ( 'Database created and opened succesfully: %s' % dbname , extra = d ) except Exception as err : logger . error ( err , extra = d ) raise if custom : self . custom_db_creator ( number_of_rows , cursor , conn , custom ) cursor . close ( ) conn . close ( ) sys . exit ( 0 ) return cursor , conn
|
creates a postgresql db returns the related connection object which will be later used to spawn the cursor
|
10,295
|
def data_filler_customer ( self , number_of_rows , cursor , conn ) : customer_data = [ ] try : for i in range ( 0 , number_of_rows ) : customer_data . append ( ( rnd_id_generator ( self ) , self . faker . first_name ( ) , self . faker . last_name ( ) , self . faker . address ( ) , self . faker . country ( ) , self . faker . city ( ) , self . faker . date ( pattern = "%d-%m-%Y" ) , self . faker . date ( pattern = "%d-%m-%Y" ) , self . faker . safe_email ( ) , self . faker . phone_number ( ) , self . faker . locale ( ) ) ) customer_payload = ( "INSERT INTO customer " "(id, name, lastname, address, country, city, registry_date, birthdate, email, " "phone_number, locale)" "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" ) cursor . executemany ( customer_payload , customer_data ) conn . commit ( ) logger . warning ( 'detailed_registration Commits are successful after write job!' , extra = extra_information ) except Exception as e : logger . error ( e , extra = extra_information )
|
creates and fills the table with customer
|
10,296
|
def _mysqld_process_checkpoint ( ) : try : subprocess . check_output ( "pgrep mysqld" , shell = True ) except Exception : logger . warning ( 'Your mysql server is offline, fake2db will try to launch it now!' , extra = extra_information ) subprocess . Popen ( "mysqld" , close_fds = True , shell = True ) time . sleep ( 3 )
|
this helper method checks if mysql server is available in the sys if not fires up one
|
10,297
|
def _redis_process_checkpoint ( host , port ) : try : subprocess . check_output ( "pgrep redis" , shell = True ) except Exception : logger . warning ( 'Your redis server is offline, fake2db will try to launch it now!' , extra = extra_information ) subprocess . Popen ( "redis-server --bind %s --port %s" % ( host , port ) , close_fds = True , shell = True ) time . sleep ( 3 )
|
this helper method checks if redis server is available in the sys if not fires up one
|
10,298
|
def database_caller_creator ( self , name = None ) : try : if name : database = name + '.db' else : database = 'sqlite_' + str_generator ( self ) + '.db' conn = sqlite3 . connect ( database ) logger . warning ( 'Database created and opened succesfully: %s' % database , extra = d ) except Exception : logger . error ( 'Failed to connect or create database / sqlite3' , extra = d ) raise DbConnException return conn
|
creates a sqlite3 db returns the related connection object which will be later used to spawn the cursor
|
10,299
|
def database_caller_creator ( self , name = None ) : couch = couchdb . Server ( ) if name : db = couch . create ( name ) else : n = 'couchdb_' + lower_str_generator ( self ) db = couch . create ( n ) logger . warning ( 'couchdb database created with the name: %s' , n , extra = d ) return db
|
creates a couchdb database returns the related connection object which will be later used to spawn the cursor
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.