idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
49,900
def connect ( self ) : self . serial = serial . Serial ( port = self . port , baudrate = self . baudrate , timeout = self . timeout ) self . alive = True self . rxThread = threading . Thread ( target = self . _readLoop ) self . rxThread . daemon = True self . rxThread . start ( )
Connects to the device and starts the read thread
49,901
def close ( self ) : self . alive = False self . rxThread . join ( ) self . serial . close ( )
Stops the read thread waits for it to exit cleanly then closes the underlying serial port
49,902
def _readLoop ( self ) : try : readTermSeq = list ( self . RX_EOL_SEQ ) readTermLen = len ( readTermSeq ) rxBuffer = [ ] while self . alive : data = self . serial . read ( 1 ) if data != '' : rxBuffer . append ( data ) if rxBuffer [ - readTermLen : ] == readTermSeq : line = '' . join ( rxBuffer [ : - readTermLen ] ) rxBuffer = [ ] if len ( line ) > 0 : self . _handleLineRead ( line ) elif self . _expectResponseTermSeq : if rxBuffer [ - len ( self . _expectResponseTermSeq ) : ] == self . _expectResponseTermSeq : line = '' . join ( rxBuffer ) rxBuffer = [ ] self . _handleLineRead ( line , checkForResponseTerm = False ) except serial . SerialException as e : self . alive = False try : self . serial . close ( ) except Exception : pass self . fatalErrorCallback ( e )
Read thread main loop Reads lines from the connected device
49,903
def _decodeTimestamp ( byteIter ) : dateStr = decodeSemiOctets ( byteIter , 7 ) timeZoneStr = dateStr [ - 2 : ] return datetime . strptime ( dateStr [ : - 2 ] , '%y%m%d%H%M%S' ) . replace ( tzinfo = SmsPduTzInfo ( timeZoneStr ) )
Decodes a 7 - octet timestamp
49,904
def decodeUcs2 ( byteIter , numBytes ) : userData = [ ] i = 0 try : while i < numBytes : userData . append ( unichr ( ( next ( byteIter ) << 8 ) | next ( byteIter ) ) ) i += 2 except StopIteration : pass return '' . join ( userData )
Decodes UCS2 - encoded text from the specified byte iterator up to a maximum of numBytes
49,905
def encode ( self ) : result = bytearray ( ) result . append ( self . id ) result . append ( self . dataLength ) result . extend ( self . data ) return result
Encodes this IE and returns the resulting bytes
49,906
def parseArgsPy26 ( ) : from gsmtermlib . posoptparse import PosOptionParser , Option parser = PosOptionParser ( description = 'Simple script for sending SMS messages' ) parser . add_option ( '-i' , '--port' , metavar = 'PORT' , help = 'port to which the GSM modem is connected; a number or a device name.' ) parser . add_option ( '-b' , '--baud' , metavar = 'BAUDRATE' , default = 115200 , help = 'set baud rate' ) parser . add_option ( '-p' , '--pin' , metavar = 'PIN' , default = None , help = 'SIM card PIN' ) parser . add_option ( '-d' , '--deliver' , action = 'store_true' , help = 'wait for SMS delivery report' ) parser . add_positional_argument ( Option ( '--destination' , metavar = 'DESTINATION' , help = 'destination mobile number' ) ) options , args = parser . parse_args ( ) if len ( args ) != 1 : parser . error ( 'Incorrect number of arguments - please specify a DESTINATION to send to, e.g. {0} 012789456' . format ( sys . argv [ 0 ] ) ) else : options . destination = args [ 0 ] return options
Argument parser for Python 2 . 6
49,907
def lineMatching ( regexStr , lines ) : regex = re . compile ( regexStr ) for line in lines : m = regex . match ( line ) if m : return m else : return None
Searches through the specified list of strings and returns the regular expression match for the first line that matches the specified regex string or None if no match was found
49,908
def lineMatchingPattern ( pattern , lines ) : for line in lines : m = pattern . match ( line ) if m : return m else : return None
Searches through the specified list of strings and returns the regular expression match for the first line that matches the specified pre - compiled regex pattern or None if no match was found
49,909
def allLinesMatchingPattern ( pattern , lines ) : result = [ ] for line in lines : m = pattern . match ( line ) if m : result . append ( m ) return result
Like lineMatchingPattern but returns all lines that match the specified pattern
49,910
def clean ( decrypted : bytes ) -> str : r last = decrypted [ - 1 ] if isinstance ( last , int ) : return decrypted [ : - last ] . decode ( 'utf8' ) return decrypted [ : - ord ( last ) ] . decode ( 'utf8' )
r Strip padding from decrypted value .
49,911
def set_pattern_step_setpoint ( self , patternnumber , stepnumber , setpointvalue ) : _checkPatternNumber ( patternnumber ) _checkStepNumber ( stepnumber ) _checkSetpointValue ( setpointvalue , self . setpoint_max ) address = _calculateRegisterAddress ( 'setpoint' , patternnumber , stepnumber ) self . write_register ( address , setpointvalue , 1 )
Set the setpoint value for a step .
49,912
def get_pattern_step_time ( self , patternnumber , stepnumber ) : _checkPatternNumber ( patternnumber ) _checkStepNumber ( stepnumber ) address = _calculateRegisterAddress ( 'time' , patternnumber , stepnumber ) return self . read_register ( address , 0 )
Get the step time .
49,913
def set_pattern_step_time ( self , patternnumber , stepnumber , timevalue ) : _checkPatternNumber ( patternnumber ) _checkStepNumber ( stepnumber ) _checkTimeValue ( timevalue , self . time_max ) address = _calculateRegisterAddress ( 'time' , patternnumber , stepnumber ) self . write_register ( address , timevalue , 0 )
Set the step time .
49,914
def get_pattern_actual_step ( self , patternnumber ) : _checkPatternNumber ( patternnumber ) address = _calculateRegisterAddress ( 'actualstep' , patternnumber ) return self . read_register ( address , 0 )
Get the actual step parameter for a given pattern .
49,915
def set_pattern_actual_step ( self , patternnumber , value ) : _checkPatternNumber ( patternnumber ) _checkStepNumber ( value ) address = _calculateRegisterAddress ( 'actualstep' , patternnumber ) self . write_register ( address , value , 0 )
Set the actual step parameter for a given pattern .
49,916
def get_pattern_additional_cycles ( self , patternnumber ) : _checkPatternNumber ( patternnumber ) address = _calculateRegisterAddress ( 'cycles' , patternnumber ) return self . read_register ( address )
Get the number of additional cycles for a given pattern .
49,917
def set_pattern_additional_cycles ( self , patternnumber , value ) : _checkPatternNumber ( patternnumber ) minimalmodbus . _checkInt ( value , minvalue = 0 , maxvalue = 99 , description = 'number of additional cycles' ) address = _calculateRegisterAddress ( 'cycles' , patternnumber ) self . write_register ( address , value , 0 )
Set the number of additional cycles for a given pattern .
49,918
def get_pattern_link_topattern ( self , patternnumber ) : _checkPatternNumber ( patternnumber ) address = _calculateRegisterAddress ( 'linkpattern' , patternnumber ) return self . read_register ( address )
Get the linked pattern value for a given pattern .
49,919
def get_all_pattern_variables ( self , patternnumber ) : _checkPatternNumber ( patternnumber ) outputstring = '' for stepnumber in range ( 8 ) : outputstring += 'SP{0}: {1} Time{0}: {2}\n' . format ( stepnumber , self . get_pattern_step_setpoint ( patternnumber , stepnumber ) , self . get_pattern_step_time ( patternnumber , stepnumber ) ) outputstring += 'Actual step: {0}\n' . format ( self . get_pattern_actual_step ( patternnumber ) ) outputstring += 'Additional cycles: {0}\n' . format ( self . get_pattern_additional_cycles ( patternnumber ) ) outputstring += 'Linked pattern: {0}\n' . format ( self . get_pattern_link_topattern ( patternnumber ) ) return outputstring
Get all variables for a given pattern at one time .
49,920
def set_all_pattern_variables ( self , patternnumber , sp0 , ti0 , sp1 , ti1 , sp2 , ti2 , sp3 , ti3 , sp4 , ti4 , sp5 , ti5 , sp6 , ti6 , sp7 , ti7 , actual_step , additional_cycles , link_pattern ) : _checkPatternNumber ( patternnumber ) self . set_pattern_step_setpoint ( patternnumber , 0 , sp0 ) self . set_pattern_step_setpoint ( patternnumber , 1 , sp1 ) self . set_pattern_step_setpoint ( patternnumber , 2 , sp2 ) self . set_pattern_step_setpoint ( patternnumber , 3 , sp3 ) self . set_pattern_step_setpoint ( patternnumber , 4 , sp4 ) self . set_pattern_step_setpoint ( patternnumber , 5 , sp5 ) self . set_pattern_step_setpoint ( patternnumber , 6 , sp6 ) self . set_pattern_step_setpoint ( patternnumber , 7 , sp7 ) self . set_pattern_step_time ( patternnumber , 0 , ti0 ) self . set_pattern_step_time ( patternnumber , 1 , ti1 ) self . set_pattern_step_time ( patternnumber , 2 , ti2 ) self . set_pattern_step_time ( patternnumber , 3 , ti3 ) self . set_pattern_step_time ( patternnumber , 4 , ti4 ) self . set_pattern_step_time ( patternnumber , 5 , ti5 ) self . set_pattern_step_time ( patternnumber , 6 , ti6 ) self . set_pattern_step_time ( patternnumber , 7 , ti7 ) self . set_pattern_additional_cycles ( patternnumber , additional_cycles ) self . set_pattern_link_topattern ( patternnumber , link_pattern ) self . set_pattern_actual_step ( patternnumber , actual_step )
Set all variables for a given pattern at one time .
49,921
def close ( self ) : if VERBOSE : _print_out ( '\nDummy_serial: Closing port\n' ) if not self . _isOpen : raise IOError ( 'Dummy_serial: The port is already closed' ) self . _isOpen = False self . port = None
Close a port on dummy_serial .
49,922
def write ( self , inputdata ) : if VERBOSE : _print_out ( '\nDummy_serial: Writing to port. Given:' + repr ( inputdata ) + '\n' ) if sys . version_info [ 0 ] > 2 : if not type ( inputdata ) == bytes : raise TypeError ( 'The input must be type bytes. Given:' + repr ( inputdata ) ) inputstring = str ( inputdata , encoding = 'latin1' ) else : inputstring = inputdata if not self . _isOpen : raise IOError ( 'Dummy_serial: Trying to write, but the port is not open. Given:' + repr ( inputdata ) ) try : response = RESPONSES [ inputstring ] except : response = DEFAULT_RESPONSE self . _waiting_data = response
Write to a port on dummy_serial .
49,923
def read ( self , numberOfBytes ) : if VERBOSE : _print_out ( '\nDummy_serial: Reading from port (max length {!r} bytes)' . format ( numberOfBytes ) ) if numberOfBytes < 0 : raise IOError ( 'Dummy_serial: The numberOfBytes to read must not be negative. Given: {!r}' . format ( numberOfBytes ) ) if not self . _isOpen : raise IOError ( 'Dummy_serial: Trying to read, but the port is not open.' ) if self . _waiting_data == DEFAULT_RESPONSE : returnstring = self . _waiting_data elif numberOfBytes == len ( self . _waiting_data ) : returnstring = self . _waiting_data self . _waiting_data = NO_DATA_PRESENT elif numberOfBytes < len ( self . _waiting_data ) : if VERBOSE : _print_out ( 'Dummy_serial: The numberOfBytes to read is smaller than the available data. ' + 'Some bytes will be kept for later. Available data: {!r} (length = {}), numberOfBytes: {}' . format ( self . _waiting_data , len ( self . _waiting_data ) , numberOfBytes ) ) returnstring = self . _waiting_data [ : numberOfBytes ] self . _waiting_data = self . _waiting_data [ numberOfBytes : ] else : if VERBOSE : _print_out ( 'Dummy_serial: The numberOfBytes to read is larger than the available data. ' + 'Will sleep until timeout. Available data: {!r} (length = {}), numberOfBytes: {}' . format ( self . _waiting_data , len ( self . _waiting_data ) , numberOfBytes ) ) time . sleep ( self . timeout ) returnstring = self . _waiting_data self . _waiting_data = NO_DATA_PRESENT if VERBOSE : _print_out ( 'Dummy_serial read return data: {!r} (has length {})\n' . format ( returnstring , len ( returnstring ) ) ) if sys . version_info [ 0 ] > 2 : return bytes ( returnstring , encoding = 'latin1' ) else : return returnstring
Read from a port on dummy_serial .
49,924
def _embedPayload ( slaveaddress , mode , functioncode , payloaddata ) : _checkSlaveaddress ( slaveaddress ) _checkMode ( mode ) _checkFunctioncode ( functioncode , None ) _checkString ( payloaddata , description = 'payload' ) firstPart = _numToOneByteString ( slaveaddress ) + _numToOneByteString ( functioncode ) + payloaddata if mode == MODE_ASCII : request = _ASCII_HEADER + _hexencode ( firstPart ) + _hexencode ( _calculateLrcString ( firstPart ) ) + _ASCII_FOOTER else : request = firstPart + _calculateCrcString ( firstPart ) return request
Build a request from the slaveaddress the function code and the payload data .
49,925
def _extractPayload ( response , slaveaddress , mode , functioncode ) : BYTEPOSITION_FOR_ASCII_HEADER = 0 BYTEPOSITION_FOR_SLAVEADDRESS = 0 BYTEPOSITION_FOR_FUNCTIONCODE = 1 NUMBER_OF_RESPONSE_STARTBYTES = 2 NUMBER_OF_CRC_BYTES = 2 NUMBER_OF_LRC_BYTES = 1 BITNUMBER_FUNCTIONCODE_ERRORINDICATION = 7 MINIMAL_RESPONSE_LENGTH_RTU = NUMBER_OF_RESPONSE_STARTBYTES + NUMBER_OF_CRC_BYTES MINIMAL_RESPONSE_LENGTH_ASCII = 9 _checkString ( response , description = 'response' ) _checkSlaveaddress ( slaveaddress ) _checkMode ( mode ) _checkFunctioncode ( functioncode , None ) plainresponse = response if mode == MODE_ASCII : if len ( response ) < MINIMAL_RESPONSE_LENGTH_ASCII : raise ValueError ( 'Too short Modbus ASCII response (minimum length {} bytes). Response: {!r}' . format ( MINIMAL_RESPONSE_LENGTH_ASCII , response ) ) elif len ( response ) < MINIMAL_RESPONSE_LENGTH_RTU : raise ValueError ( 'Too short Modbus RTU response (minimum length {} bytes). Response: {!r}' . format ( MINIMAL_RESPONSE_LENGTH_RTU , response ) ) if mode == MODE_ASCII : if response [ BYTEPOSITION_FOR_ASCII_HEADER ] != _ASCII_HEADER : raise ValueError ( 'Did not find header ({!r}) as start of ASCII response. The plain response is: {!r}' . format ( _ASCII_HEADER , response ) ) elif response [ - len ( _ASCII_FOOTER ) : ] != _ASCII_FOOTER : raise ValueError ( 'Did not find footer ({!r}) as end of ASCII response. The plain response is: {!r}' . format ( _ASCII_FOOTER , response ) ) response = response [ 1 : - 2 ] if len ( response ) % 2 != 0 : template = 'Stripped ASCII frames should have an even number of bytes, but is {} bytes. ' + 'The stripped response is: {!r} (plain response: {!r})' raise ValueError ( template . format ( len ( response ) , response , plainresponse ) ) response = _hexdecode ( response ) if mode == MODE_ASCII : calculateChecksum = _calculateLrcString numberOfChecksumBytes = NUMBER_OF_LRC_BYTES else : calculateChecksum = _calculateCrcString numberOfChecksumBytes = NUMBER_OF_CRC_BYTES receivedChecksum = response [ - numberOfChecksumBytes : ] responseWithoutChecksum = response [ 0 : len ( response ) - numberOfChecksumBytes ] calculatedChecksum = calculateChecksum ( responseWithoutChecksum ) if receivedChecksum != calculatedChecksum : template = 'Checksum error in {} mode: {!r} instead of {!r} . The response is: {!r} (plain response: {!r})' text = template . format ( mode , receivedChecksum , calculatedChecksum , response , plainresponse ) raise ValueError ( text ) responseaddress = ord ( response [ BYTEPOSITION_FOR_SLAVEADDRESS ] ) if responseaddress != slaveaddress : raise ValueError ( 'Wrong return slave address: {} instead of {}. The response is: {!r}' . format ( responseaddress , slaveaddress , response ) ) receivedFunctioncode = ord ( response [ BYTEPOSITION_FOR_FUNCTIONCODE ] ) if receivedFunctioncode == _setBitOn ( functioncode , BITNUMBER_FUNCTIONCODE_ERRORINDICATION ) : raise ValueError ( 'The slave is indicating an error. The response is: {!r}' . format ( response ) ) elif receivedFunctioncode != functioncode : raise ValueError ( 'Wrong functioncode: {} instead of {}. The response is: {!r}' . format ( receivedFunctioncode , functioncode , response ) ) firstDatabyteNumber = NUMBER_OF_RESPONSE_STARTBYTES if mode == MODE_ASCII : lastDatabyteNumber = len ( response ) - NUMBER_OF_LRC_BYTES else : lastDatabyteNumber = len ( response ) - NUMBER_OF_CRC_BYTES payload = response [ firstDatabyteNumber : lastDatabyteNumber ] return payload
Extract the payload data part from the slave s response .
49,926
def _predictResponseSize ( mode , functioncode , payloadToSlave ) : MIN_PAYLOAD_LENGTH = 4 BYTERANGE_FOR_GIVEN_SIZE = slice ( 2 , 4 ) NUMBER_OF_PAYLOAD_BYTES_IN_WRITE_CONFIRMATION = 4 NUMBER_OF_PAYLOAD_BYTES_FOR_BYTECOUNTFIELD = 1 RTU_TO_ASCII_PAYLOAD_FACTOR = 2 NUMBER_OF_RTU_RESPONSE_STARTBYTES = 2 NUMBER_OF_RTU_RESPONSE_ENDBYTES = 2 NUMBER_OF_ASCII_RESPONSE_STARTBYTES = 5 NUMBER_OF_ASCII_RESPONSE_ENDBYTES = 4 _checkMode ( mode ) _checkFunctioncode ( functioncode , None ) _checkString ( payloadToSlave , description = 'payload' , minlength = MIN_PAYLOAD_LENGTH ) if functioncode in [ 5 , 6 , 15 , 16 ] : response_payload_size = NUMBER_OF_PAYLOAD_BYTES_IN_WRITE_CONFIRMATION elif functioncode in [ 1 , 2 , 3 , 4 ] : given_size = _twoByteStringToNum ( payloadToSlave [ BYTERANGE_FOR_GIVEN_SIZE ] ) if functioncode == 1 or functioncode == 2 : number_of_inputs = given_size response_payload_size = NUMBER_OF_PAYLOAD_BYTES_FOR_BYTECOUNTFIELD + number_of_inputs // 8 + ( 1 if number_of_inputs % 8 else 0 ) elif functioncode == 3 or functioncode == 4 : number_of_registers = given_size response_payload_size = NUMBER_OF_PAYLOAD_BYTES_FOR_BYTECOUNTFIELD + number_of_registers * _NUMBER_OF_BYTES_PER_REGISTER else : raise ValueError ( 'Wrong functioncode: {}. The payload is: {!r}' . format ( functioncode , payloadToSlave ) ) if mode == MODE_ASCII : return NUMBER_OF_ASCII_RESPONSE_STARTBYTES + response_payload_size * RTU_TO_ASCII_PAYLOAD_FACTOR + NUMBER_OF_ASCII_RESPONSE_ENDBYTES else : return NUMBER_OF_RTU_RESPONSE_STARTBYTES + response_payload_size + NUMBER_OF_RTU_RESPONSE_ENDBYTES
Calculate the number of bytes that should be received from the slave .
49,927
def _calculate_minimum_silent_period ( baudrate ) : _checkNumerical ( baudrate , minvalue = 1 , description = 'baudrate' ) BITTIMES_PER_CHARACTERTIME = 11 MINIMUM_SILENT_CHARACTERTIMES = 3.5 bittime = 1 / float ( baudrate ) return bittime * BITTIMES_PER_CHARACTERTIME * MINIMUM_SILENT_CHARACTERTIMES
Calculate the silent period length to comply with the 3 . 5 character silence between messages .
49,928
def _numToTwoByteString ( value , numberOfDecimals = 0 , LsbFirst = False , signed = False ) : _checkNumerical ( value , description = 'inputvalue' ) _checkInt ( numberOfDecimals , minvalue = 0 , description = 'number of decimals' ) _checkBool ( LsbFirst , description = 'LsbFirst' ) _checkBool ( signed , description = 'signed parameter' ) multiplier = 10 ** numberOfDecimals integer = int ( float ( value ) * multiplier ) if LsbFirst : formatcode = '<' else : formatcode = '>' if signed : formatcode += 'h' else : formatcode += 'H' outstring = _pack ( formatcode , integer ) assert len ( outstring ) == 2 return outstring
Convert a numerical value to a two - byte string possibly scaling it .
49,929
def _twoByteStringToNum ( bytestring , numberOfDecimals = 0 , signed = False ) : _checkString ( bytestring , minlength = 2 , maxlength = 2 , description = 'bytestring' ) _checkInt ( numberOfDecimals , minvalue = 0 , description = 'number of decimals' ) _checkBool ( signed , description = 'signed parameter' ) formatcode = '>' if signed : formatcode += 'h' else : formatcode += 'H' fullregister = _unpack ( formatcode , bytestring ) if numberOfDecimals == 0 : return fullregister divisor = 10 ** numberOfDecimals return fullregister / float ( divisor )
Convert a two - byte string to a numerical value possibly scaling it .
49,930
def _pack ( formatstring , value ) : _checkString ( formatstring , description = 'formatstring' , minlength = 1 ) try : result = struct . pack ( formatstring , value ) except : errortext = 'The value to send is probably out of range, as the num-to-bytestring conversion failed.' errortext += ' Value: {0!r} Struct format code is: {1}' raise ValueError ( errortext . format ( value , formatstring ) ) if sys . version_info [ 0 ] > 2 : return str ( result , encoding = 'latin1' ) return result
Pack a value into a bytestring .
49,931
def _unpack ( formatstring , packed ) : _checkString ( formatstring , description = 'formatstring' , minlength = 1 ) _checkString ( packed , description = 'packed string' , minlength = 1 ) if sys . version_info [ 0 ] > 2 : packed = bytes ( packed , encoding = 'latin1' ) try : value = struct . unpack ( formatstring , packed ) [ 0 ] except : errortext = 'The received bytestring is probably wrong, as the bytestring-to-num conversion failed.' errortext += ' Bytestring: {0!r} Struct format code is: {1}' raise ValueError ( errortext . format ( packed , formatstring ) ) return value
Unpack a bytestring into a value .
49,932
def _hexencode ( bytestring , insert_spaces = False ) : _checkString ( bytestring , description = 'byte string' ) separator = '' if not insert_spaces else ' ' byte_representions = [ ] for c in bytestring : byte_representions . append ( '{0:02X}' . format ( ord ( c ) ) ) return separator . join ( byte_representions ) . strip ( )
Convert a byte string to a hex encoded string .
49,933
def _hexdecode ( hexstring ) : _checkString ( hexstring , description = 'hexstring' ) if len ( hexstring ) % 2 != 0 : raise ValueError ( 'The input hexstring must be of even length. Given: {!r}' . format ( hexstring ) ) if sys . version_info [ 0 ] > 2 : by = bytes ( hexstring , 'latin1' ) try : return str ( binascii . unhexlify ( by ) , encoding = 'latin1' ) except binascii . Error as err : new_error_message = 'Hexdecode reported an error: {!s}. Input hexstring: {}' . format ( err . args [ 0 ] , hexstring ) raise TypeError ( new_error_message ) else : try : return hexstring . decode ( 'hex' ) except TypeError as err : raise TypeError ( 'Hexdecode reported an error: {}. Input hexstring: {}' . format ( err . message , hexstring ) )
Convert a hex encoded string to a byte string .
49,934
def _bitResponseToValue ( bytestring ) : _checkString ( bytestring , description = 'bytestring' , minlength = 1 , maxlength = 1 ) RESPONSE_ON = '\x01' RESPONSE_OFF = '\x00' if bytestring == RESPONSE_ON : return 1 elif bytestring == RESPONSE_OFF : return 0 else : raise ValueError ( 'Could not convert bit response to a value. Input: {0!r}' . format ( bytestring ) )
Convert a response string to a numerical value .
49,935
def _createBitpattern ( functioncode , value ) : _checkFunctioncode ( functioncode , [ 5 , 15 ] ) _checkInt ( value , minvalue = 0 , maxvalue = 1 , description = 'inputvalue' ) if functioncode == 5 : if value == 0 : return '\x00\x00' else : return '\xff\x00' elif functioncode == 15 : if value == 0 : return '\x00' else : return '\x01'
Create the bit pattern that is used for writing single bits .
49,936
def _twosComplement ( x , bits = 16 ) : _checkInt ( bits , minvalue = 0 , description = 'number of bits' ) _checkInt ( x , description = 'input' ) upperlimit = 2 ** ( bits - 1 ) - 1 lowerlimit = - 2 ** ( bits - 1 ) if x > upperlimit or x < lowerlimit : raise ValueError ( 'The input value is out of range. Given value is {0}, but allowed range is {1} to {2} when using {3} bits.' . format ( x , lowerlimit , upperlimit , bits ) ) if x >= 0 : return x return x + 2 ** bits
Calculate the two s complement of an integer .
49,937
def _setBitOn ( x , bitNum ) : _checkInt ( x , minvalue = 0 , description = 'input value' ) _checkInt ( bitNum , minvalue = 0 , description = 'bitnumber' ) return x | ( 1 << bitNum )
Set bit bitNum to True .
49,938
def _calculateCrcString ( inputstring ) : _checkString ( inputstring , description = 'input CRC string' ) register = 0xFFFF for char in inputstring : register = ( register >> 8 ) ^ _CRC16TABLE [ ( register ^ ord ( char ) ) & 0xFF ] return _numToTwoByteString ( register , LsbFirst = True )
Calculate CRC - 16 for Modbus .
49,939
def _calculateLrcString ( inputstring ) : _checkString ( inputstring , description = 'input LRC string' ) register = 0 for character in inputstring : register += ord ( character ) lrc = ( ( register ^ 0xFF ) + 1 ) & 0xFF lrcString = _numToOneByteString ( lrc ) return lrcString
Calculate LRC for Modbus .
49,940
def _checkMode ( mode ) : if not isinstance ( mode , str ) : raise TypeError ( 'The {0} should be a string. Given: {1!r}' . format ( "mode" , mode ) ) if mode not in [ MODE_RTU , MODE_ASCII ] : raise ValueError ( "Unreconized Modbus mode given. Must be 'rtu' or 'ascii' but {0!r} was given." . format ( mode ) )
Check that the Modbus mode is valie .
49,941
def _checkFunctioncode ( functioncode , listOfAllowedValues = [ ] ) : FUNCTIONCODE_MIN = 1 FUNCTIONCODE_MAX = 127 _checkInt ( functioncode , FUNCTIONCODE_MIN , FUNCTIONCODE_MAX , description = 'functioncode' ) if listOfAllowedValues is None : return if not isinstance ( listOfAllowedValues , list ) : raise TypeError ( 'The listOfAllowedValues should be a list. Given: {0!r}' . format ( listOfAllowedValues ) ) for value in listOfAllowedValues : _checkInt ( value , FUNCTIONCODE_MIN , FUNCTIONCODE_MAX , description = 'functioncode inside listOfAllowedValues' ) if functioncode not in listOfAllowedValues : raise ValueError ( 'Wrong function code: {0}, allowed values are {1!r}' . format ( functioncode , listOfAllowedValues ) )
Check that the given functioncode is in the listOfAllowedValues .
49,942
def _checkResponseByteCount ( payload ) : POSITION_FOR_GIVEN_NUMBER = 0 NUMBER_OF_BYTES_TO_SKIP = 1 _checkString ( payload , minlength = 1 , description = 'payload' ) givenNumberOfDatabytes = ord ( payload [ POSITION_FOR_GIVEN_NUMBER ] ) countedNumberOfDatabytes = len ( payload ) - NUMBER_OF_BYTES_TO_SKIP if givenNumberOfDatabytes != countedNumberOfDatabytes : errortemplate = 'Wrong given number of bytes in the response: {0}, but counted is {1} as data payload length is {2}.' + ' The data payload is: {3!r}' errortext = errortemplate . format ( givenNumberOfDatabytes , countedNumberOfDatabytes , len ( payload ) , payload ) raise ValueError ( errortext )
Check that the number of bytes as given in the response is correct .
49,943
def _checkResponseRegisterAddress ( payload , registeraddress ) : _checkString ( payload , minlength = 2 , description = 'payload' ) _checkRegisteraddress ( registeraddress ) BYTERANGE_FOR_STARTADDRESS = slice ( 0 , 2 ) bytesForStartAddress = payload [ BYTERANGE_FOR_STARTADDRESS ] receivedStartAddress = _twoByteStringToNum ( bytesForStartAddress ) if receivedStartAddress != registeraddress : raise ValueError ( 'Wrong given write start adress: {0}, but commanded is {1}. The data payload is: {2!r}' . format ( receivedStartAddress , registeraddress , payload ) )
Check that the start adress as given in the response is correct .
49,944
def _checkResponseNumberOfRegisters ( payload , numberOfRegisters ) : _checkString ( payload , minlength = 4 , description = 'payload' ) _checkInt ( numberOfRegisters , minvalue = 1 , maxvalue = 0xFFFF , description = 'numberOfRegisters' ) BYTERANGE_FOR_NUMBER_OF_REGISTERS = slice ( 2 , 4 ) bytesForNumberOfRegisters = payload [ BYTERANGE_FOR_NUMBER_OF_REGISTERS ] receivedNumberOfWrittenReisters = _twoByteStringToNum ( bytesForNumberOfRegisters ) if receivedNumberOfWrittenReisters != numberOfRegisters : raise ValueError ( 'Wrong number of registers to write in the response: {0}, but commanded is {1}. The data payload is: {2!r}' . format ( receivedNumberOfWrittenReisters , numberOfRegisters , payload ) )
Check that the number of written registers as given in the response is correct .
49,945
def _checkResponseWriteData ( payload , writedata ) : _checkString ( payload , minlength = 4 , description = 'payload' ) _checkString ( writedata , minlength = 2 , maxlength = 2 , description = 'writedata' ) BYTERANGE_FOR_WRITEDATA = slice ( 2 , 4 ) receivedWritedata = payload [ BYTERANGE_FOR_WRITEDATA ] if receivedWritedata != writedata : raise ValueError ( 'Wrong write data in the response: {0!r}, but commanded is {1!r}. The data payload is: {2!r}' . format ( receivedWritedata , writedata , payload ) )
Check that the write data as given in the response is correct .
49,946
def _checkString ( inputstring , description , minlength = 0 , maxlength = None ) : if not isinstance ( description , str ) : raise TypeError ( 'The description should be a string. Given: {0!r}' . format ( description ) ) if not isinstance ( inputstring , str ) : raise TypeError ( 'The {0} should be a string. Given: {1!r}' . format ( description , inputstring ) ) if not isinstance ( maxlength , ( int , type ( None ) ) ) : raise TypeError ( 'The maxlength must be an integer or None. Given: {0!r}' . format ( maxlength ) ) _checkInt ( minlength , minvalue = 0 , maxvalue = None , description = 'minlength' ) if len ( inputstring ) < minlength : raise ValueError ( 'The {0} is too short: {1}, but minimum value is {2}. Given: {3!r}' . format ( description , len ( inputstring ) , minlength , inputstring ) ) if not maxlength is None : if maxlength < 0 : raise ValueError ( 'The maxlength must be positive. Given: {0}' . format ( maxlength ) ) if maxlength < minlength : raise ValueError ( 'The maxlength must not be smaller than minlength. Given: {0} and {1}' . format ( maxlength , minlength ) ) if len ( inputstring ) > maxlength : raise ValueError ( 'The {0} is too long: {1}, but maximum value is {2}. Given: {3!r}' . format ( description , len ( inputstring ) , maxlength , inputstring ) )
Check that the given string is valid .
49,947
def _checkInt ( inputvalue , minvalue = None , maxvalue = None , description = 'inputvalue' ) : if not isinstance ( description , str ) : raise TypeError ( 'The description should be a string. Given: {0!r}' . format ( description ) ) if not isinstance ( inputvalue , ( int , long ) ) : raise TypeError ( 'The {0} must be an integer. Given: {1!r}' . format ( description , inputvalue ) ) if not isinstance ( minvalue , ( int , long , type ( None ) ) ) : raise TypeError ( 'The minvalue must be an integer or None. Given: {0!r}' . format ( minvalue ) ) if not isinstance ( maxvalue , ( int , long , type ( None ) ) ) : raise TypeError ( 'The maxvalue must be an integer or None. Given: {0!r}' . format ( maxvalue ) ) _checkNumerical ( inputvalue , minvalue , maxvalue , description )
Check that the given integer is valid .
49,948
def _checkNumerical ( inputvalue , minvalue = None , maxvalue = None , description = 'inputvalue' ) : if not isinstance ( description , str ) : raise TypeError ( 'The description should be a string. Given: {0!r}' . format ( description ) ) if not isinstance ( inputvalue , ( int , long , float ) ) : raise TypeError ( 'The {0} must be numerical. Given: {1!r}' . format ( description , inputvalue ) ) if not isinstance ( minvalue , ( int , float , long , type ( None ) ) ) : raise TypeError ( 'The minvalue must be numeric or None. Given: {0!r}' . format ( minvalue ) ) if not isinstance ( maxvalue , ( int , float , long , type ( None ) ) ) : raise TypeError ( 'The maxvalue must be numeric or None. Given: {0!r}' . format ( maxvalue ) ) if ( not minvalue is None ) and ( not maxvalue is None ) : if maxvalue < minvalue : raise ValueError ( 'The maxvalue must not be smaller than minvalue. Given: {0} and {1}, respectively.' . format ( maxvalue , minvalue ) ) if not minvalue is None : if inputvalue < minvalue : raise ValueError ( 'The {0} is too small: {1}, but minimum value is {2}.' . format ( description , inputvalue , minvalue ) ) if not maxvalue is None : if inputvalue > maxvalue : raise ValueError ( 'The {0} is too large: {1}, but maximum value is {2}.' . format ( description , inputvalue , maxvalue ) )
Check that the given numerical value is valid .
49,949
def _checkBool ( inputvalue , description = 'inputvalue' ) : _checkString ( description , minlength = 1 , description = 'description string' ) if not isinstance ( inputvalue , bool ) : raise TypeError ( 'The {0} must be boolean. Given: {1!r}' . format ( description , inputvalue ) )
Check that the given inputvalue is a boolean .
49,950
def _getDiagnosticString ( ) : text = '\n## Diagnostic output from minimalmodbus ## \n\n' text += 'Minimalmodbus version: ' + __version__ + '\n' text += 'Minimalmodbus status: ' + __status__ + '\n' text += 'File name (with relative path): ' + __file__ + '\n' text += 'Full file path: ' + os . path . abspath ( __file__ ) + '\n\n' text += 'pySerial version: ' + serial . VERSION + '\n' text += 'pySerial full file path: ' + os . path . abspath ( serial . __file__ ) + '\n\n' text += 'Platform: ' + sys . platform + '\n' text += 'Filesystem encoding: ' + repr ( sys . getfilesystemencoding ( ) ) + '\n' text += 'Byteorder: ' + sys . byteorder + '\n' text += 'Python version: ' + sys . version + '\n' text += 'Python version info: ' + repr ( sys . version_info ) + '\n' text += 'Python flags: ' + repr ( sys . flags ) + '\n' text += 'Python argv: ' + repr ( sys . argv ) + '\n' text += 'Python prefix: ' + repr ( sys . prefix ) + '\n' text += 'Python exec prefix: ' + repr ( sys . exec_prefix ) + '\n' text += 'Python executable: ' + repr ( sys . executable ) + '\n' try : text += 'Long info: ' + repr ( sys . long_info ) + '\n' except : text += 'Long info: (none)\n' try : text += 'Float repr style: ' + repr ( sys . float_repr_style ) + '\n\n' except : text += 'Float repr style: (none) \n\n' text += 'Variable __name__: ' + __name__ + '\n' text += 'Current directory: ' + os . getcwd ( ) + '\n\n' text += 'Python path: \n' text += '\n' . join ( sys . path ) + '\n' text += '\n## End of diagnostic output ## \n' return text
Generate a diagnostic string showing the module version the platform current directory etc .
49,951
def read_bit ( self , registeraddress , functioncode = 2 ) : _checkFunctioncode ( functioncode , [ 1 , 2 ] ) return self . _genericCommand ( functioncode , registeraddress )
Read one bit from the slave .
49,952
def write_bit ( self , registeraddress , value , functioncode = 5 ) : _checkFunctioncode ( functioncode , [ 5 , 15 ] ) _checkInt ( value , minvalue = 0 , maxvalue = 1 , description = 'input value' ) self . _genericCommand ( functioncode , registeraddress , value )
Write one bit to the slave .
49,953
def read_register ( self , registeraddress , numberOfDecimals = 0 , functioncode = 3 , signed = False ) : _checkFunctioncode ( functioncode , [ 3 , 4 ] ) _checkInt ( numberOfDecimals , minvalue = 0 , maxvalue = 10 , description = 'number of decimals' ) _checkBool ( signed , description = 'signed' ) return self . _genericCommand ( functioncode , registeraddress , numberOfDecimals = numberOfDecimals , signed = signed )
Read an integer from one 16 - bit register in the slave possibly scaling it .
49,954
def write_register ( self , registeraddress , value , numberOfDecimals = 0 , functioncode = 16 , signed = False ) : _checkFunctioncode ( functioncode , [ 6 , 16 ] ) _checkInt ( numberOfDecimals , minvalue = 0 , maxvalue = 10 , description = 'number of decimals' ) _checkBool ( signed , description = 'signed' ) _checkNumerical ( value , description = 'input value' ) self . _genericCommand ( functioncode , registeraddress , value , numberOfDecimals , signed = signed )
Write an integer to one 16 - bit register in the slave possibly scaling it .
49,955
def read_float ( self , registeraddress , functioncode = 3 , numberOfRegisters = 2 ) : _checkFunctioncode ( functioncode , [ 3 , 4 ] ) _checkInt ( numberOfRegisters , minvalue = 2 , maxvalue = 4 , description = 'number of registers' ) return self . _genericCommand ( functioncode , registeraddress , numberOfRegisters = numberOfRegisters , payloadformat = 'float' )
Read a floating point number from the slave .
49,956
def write_float ( self , registeraddress , value , numberOfRegisters = 2 ) : _checkNumerical ( value , description = 'input value' ) _checkInt ( numberOfRegisters , minvalue = 2 , maxvalue = 4 , description = 'number of registers' ) self . _genericCommand ( 16 , registeraddress , value , numberOfRegisters = numberOfRegisters , payloadformat = 'float' )
Write a floating point number to the slave .
49,957
def read_string ( self , registeraddress , numberOfRegisters = 16 , functioncode = 3 ) : _checkFunctioncode ( functioncode , [ 3 , 4 ] ) _checkInt ( numberOfRegisters , minvalue = 1 , description = 'number of registers for read string' ) return self . _genericCommand ( functioncode , registeraddress , numberOfRegisters = numberOfRegisters , payloadformat = 'string' )
Read a string from the slave .
49,958
def write_string ( self , registeraddress , textstring , numberOfRegisters = 16 ) : _checkInt ( numberOfRegisters , minvalue = 1 , description = 'number of registers for write string' ) _checkString ( textstring , 'input string' , minlength = 1 , maxlength = 2 * numberOfRegisters ) self . _genericCommand ( 16 , registeraddress , textstring , numberOfRegisters = numberOfRegisters , payloadformat = 'string' )
Write a string to the slave .
49,959
def write_registers ( self , registeraddress , values ) : if not isinstance ( values , list ) : raise TypeError ( 'The "values parameter" must be a list. Given: {0!r}' . format ( values ) ) _checkInt ( len ( values ) , minvalue = 1 , description = 'length of input list' ) self . _genericCommand ( 16 , registeraddress , values , numberOfRegisters = len ( values ) , payloadformat = 'registers' )
Write integers to 16 - bit registers in the slave .
49,960
def _communicate ( self , request , number_of_bytes_to_read ) : _checkString ( request , minlength = 1 , description = 'request' ) _checkInt ( number_of_bytes_to_read ) if self . debug : _print_out ( '\nMinimalModbus debug mode. Writing to instrument (expecting {} bytes back): {!r} ({})' . format ( number_of_bytes_to_read , request , _hexlify ( request ) ) ) if self . close_port_after_each_call : self . serial . open ( ) if sys . version_info [ 0 ] > 2 : request = bytes ( request , encoding = 'latin1' ) minimum_silent_period = _calculate_minimum_silent_period ( self . serial . baudrate ) time_since_read = time . time ( ) - _LATEST_READ_TIMES . get ( self . serial . port , 0 ) if time_since_read < minimum_silent_period : sleep_time = minimum_silent_period - time_since_read if self . debug : template = 'MinimalModbus debug mode. Sleeping for {:.1f} ms. ' + 'Minimum silent period: {:.1f} ms, time since read: {:.1f} ms.' text = template . format ( sleep_time * _SECONDS_TO_MILLISECONDS , minimum_silent_period * _SECONDS_TO_MILLISECONDS , time_since_read * _SECONDS_TO_MILLISECONDS ) _print_out ( text ) time . sleep ( sleep_time ) elif self . debug : template = 'MinimalModbus debug mode. No sleep required before write. ' + 'Time since previous read: {:.1f} ms, minimum silent period: {:.2f} ms.' text = template . format ( time_since_read * _SECONDS_TO_MILLISECONDS , minimum_silent_period * _SECONDS_TO_MILLISECONDS ) _print_out ( text ) latest_write_time = time . time ( ) self . serial . write ( request ) if self . handle_local_echo : localEchoToDiscard = self . serial . read ( len ( request ) ) if self . debug : template = 'MinimalModbus debug mode. Discarding this local echo: {!r} ({} bytes).' text = template . format ( localEchoToDiscard , len ( localEchoToDiscard ) ) _print_out ( text ) if localEchoToDiscard != request : template = 'Local echo handling is enabled, but the local echo does not match the sent request. ' + 'Request: {!r} ({} bytes), local echo: {!r} ({} bytes).' text = template . format ( request , len ( request ) , localEchoToDiscard , len ( localEchoToDiscard ) ) raise IOError ( text ) answer = self . serial . read ( number_of_bytes_to_read ) _LATEST_READ_TIMES [ self . serial . port ] = time . time ( ) if self . close_port_after_each_call : self . serial . close ( ) if sys . version_info [ 0 ] > 2 : answer = str ( answer , encoding = 'latin1' ) if self . debug : template = 'MinimalModbus debug mode. Response from instrument: {!r} ({}) ({} bytes), ' + 'roundtrip time: {:.1f} ms. Timeout setting: {:.1f} ms.\n' text = template . format ( answer , _hexlify ( answer ) , len ( answer ) , ( _LATEST_READ_TIMES . get ( self . serial . port , 0 ) - latest_write_time ) * _SECONDS_TO_MILLISECONDS , self . serial . timeout * _SECONDS_TO_MILLISECONDS ) _print_out ( text ) if len ( answer ) == 0 : raise IOError ( 'No communication with the instrument (no answer)' ) return answer
Talk to the slave via a serial port .
49,961
def _playsoundWin ( sound , block = True ) : from ctypes import c_buffer , windll from random import random from time import sleep from sys import getfilesystemencoding def winCommand ( * command ) : buf = c_buffer ( 255 ) command = ' ' . join ( command ) . encode ( getfilesystemencoding ( ) ) errorCode = int ( windll . winmm . mciSendStringA ( command , buf , 254 , 0 ) ) if errorCode : errorBuffer = c_buffer ( 255 ) windll . winmm . mciGetErrorStringA ( errorCode , errorBuffer , 254 ) exceptionMessage = ( '\n Error ' + str ( errorCode ) + ' for command:' '\n ' + command . decode ( ) + '\n ' + errorBuffer . value . decode ( ) ) raise PlaysoundException ( exceptionMessage ) return buf . value alias = 'playsound_' + str ( random ( ) ) winCommand ( 'open "' + sound + '" alias' , alias ) winCommand ( 'set' , alias , 'time format milliseconds' ) durationInMS = winCommand ( 'status' , alias , 'length' ) winCommand ( 'play' , alias , 'from 0 to' , durationInMS . decode ( ) ) if block : sleep ( float ( durationInMS ) / 1000.0 )
Utilizes windll . winmm . Tested and known to work with MP3 and WAVE on Windows 7 with Python 2 . 7 . Probably works with more file formats . Probably works on Windows XP thru Windows 10 . Probably works with all versions of Python .
49,962
def _playsoundOSX ( sound , block = True ) : from AppKit import NSSound from Foundation import NSURL from time import sleep if '://' not in sound : if not sound . startswith ( '/' ) : from os import getcwd sound = getcwd ( ) + '/' + sound sound = 'file://' + sound url = NSURL . URLWithString_ ( sound ) nssound = NSSound . alloc ( ) . initWithContentsOfURL_byReference_ ( url , True ) if not nssound : raise IOError ( 'Unable to load sound named: ' + sound ) nssound . play ( ) if block : sleep ( nssound . duration ( ) )
Utilizes AppKit . NSSound . Tested and known to work with MP3 and WAVE on OS X 10 . 11 with Python 2 . 7 . Probably works with anything QuickTime supports . Probably works on OS X 10 . 5 and newer . Probably works with all versions of Python .
49,963
def _playsoundNix ( sound , block = True ) : if not block : raise NotImplementedError ( "block=False cannot be used on this platform yet" ) import os try : from urllib . request import pathname2url except ImportError : from urllib import pathname2url import gi gi . require_version ( 'Gst' , '1.0' ) from gi . repository import Gst Gst . init ( None ) playbin = Gst . ElementFactory . make ( 'playbin' , 'playbin' ) if sound . startswith ( ( 'http://' , 'https://' ) ) : playbin . props . uri = sound else : playbin . props . uri = 'file://' + pathname2url ( os . path . abspath ( sound ) ) set_result = playbin . set_state ( Gst . State . PLAYING ) if set_result != Gst . StateChangeReturn . ASYNC : raise PlaysoundException ( "playbin.set_state returned " + repr ( set_result ) ) bus = playbin . get_bus ( ) bus . poll ( Gst . MessageType . EOS , Gst . CLOCK_TIME_NONE ) playbin . set_state ( Gst . State . NULL )
Play a sound using GStreamer .
49,964
def remove_rows_matching ( df , column , match ) : df = df . copy ( ) mask = df [ column ] . values != match return df . iloc [ mask , : ]
Return a DataFrame with rows where column values match match are removed .
49,965
def remove_rows_containing ( df , column , match ) : df = df . copy ( ) mask = [ match not in str ( v ) for v in df [ column ] . values ] return df . iloc [ mask , : ]
Return a DataFrame with rows where column values containing match are removed .
49,966
def filter_localization_probability ( df , threshold = 0.75 ) : df = df . copy ( ) localization_probability_mask = df [ 'Localization prob' ] . values >= threshold return df . iloc [ localization_probability_mask , : ]
Remove rows with a localization probability below 0 . 75
49,967
def minimum_valid_values_in_any_group ( df , levels = None , n = 1 , invalid = np . nan ) : df = df . copy ( ) if levels is None : if 'Group' in df . columns . names : levels = [ df . columns . names . index ( 'Group' ) ] if invalid is np . nan : dfx = ~ np . isnan ( df ) else : dfx = df != invalid dfc = dfx . astype ( int ) . sum ( axis = 1 , level = levels ) dfm = dfc . max ( axis = 1 ) >= n mask = dfm . values return df . iloc [ mask , : ]
Filter DataFrame by at least n valid values in at least one group .
49,968
def search ( df , match , columns = [ 'Proteins' , 'Protein names' , 'Gene names' ] ) : df = df . copy ( ) dft = df . reset_index ( ) mask = np . zeros ( ( dft . shape [ 0 ] , ) , dtype = bool ) idx = [ 'Proteins' , 'Protein names' , 'Gene names' ] for i in idx : if i in dft . columns : mask = mask | np . array ( [ match in str ( l ) for l in dft [ i ] . values ] ) return df . iloc [ mask ]
Search for a given string in a set of columns in a processed DataFrame .
49,969
def filter_select_columns_intensity ( df , prefix , columns ) : return df . filter ( regex = '^(%s.+|%s)$' % ( prefix , '|' . join ( columns ) ) )
Filter dataframe to include specified columns retaining any Intensity columns .
49,970
def filter_intensity ( df , label = "" , with_multiplicity = False ) : label += ".*__\d" if with_multiplicity else "" dft = df . filter ( regex = "^(?!Intensity).*$" ) dfi = df . filter ( regex = '^(.*Intensity.*%s.*__\d)$' % label ) return pd . concat ( [ dft , dfi ] , axis = 1 )
Filter to include only the Intensity values with optional specified label excluding other Intensity measurements but retaining all other columns .
49,971
def filter_ratio ( df , label = "" , with_multiplicity = False ) : label += ".*__\d" if with_multiplicity else "" dft = df . filter ( regex = "^(?!Ratio).*$" ) dfr = df . filter ( regex = '^(.*Ratio.*%s)$' % label ) return pd . concat ( [ dft , dfr ] , axis = 1 )
Filter to include only the Ratio values with optional specified label excluding other Intensity measurements but retaining all other columns .
49,972
def read_perseus ( f ) : df = pd . read_csv ( f , delimiter = '\t' , header = [ 0 , 1 , 2 , 3 ] , low_memory = False ) df . columns = pd . MultiIndex . from_tuples ( [ ( x , ) for x in df . columns . get_level_values ( 0 ) ] ) return df
Load a Perseus processed data table
49,973
def write_perseus ( f , df ) : FIELD_TYPE_MAP = { 'Amino acid' : 'C' , 'Charge' : 'C' , 'Reverse' : 'C' , 'Potential contaminant' : 'C' , 'Multiplicity' : 'C' , 'Localization prob' : 'N' , 'PEP' : 'N' , 'Score' : 'N' , 'Delta score' : 'N' , 'Score for localization' : 'N' , 'Mass error [ppm]' : 'N' , 'Intensity' : 'N' , 'Position' : 'N' , 'Proteins' : 'T' , 'Positions within proteins' : 'T' , 'Leading proteins' : 'T' , 'Protein names' : 'T' , 'Gene names' : 'T' , 'Sequence window' : 'T' , 'Unique identifier' : 'T' , } def map_field_type ( n , c ) : try : t = FIELD_TYPE_MAP [ c ] except : t = "E" if n == 0 : t = "#!{Type}%s" % t return t df = df . copy ( ) df . columns = pd . MultiIndex . from_tuples ( [ ( k , map_field_type ( n , k ) ) for n , k in enumerate ( df . columns ) ] , names = [ "Label" , "Type" ] ) df = df . transpose ( ) . reset_index ( ) . transpose ( ) df . to_csv ( f , index = False , header = False )
Export a dataframe to Perseus ; recreating the format
49,974
def write_phosphopath_ratio ( df , f , a , * args , ** kwargs ) : timepoint_idx = kwargs . get ( 'timepoint_idx' , None ) proteins = [ get_protein_id ( k ) for k in df . index . get_level_values ( 'Proteins' ) ] amino_acids = df . index . get_level_values ( 'Amino acid' ) positions = _get_positions ( df ) multiplicity = [ int ( k [ - 1 ] ) for k in df . index . get_level_values ( 'Multiplicity' ) ] apos = [ "%s%s" % x for x in zip ( amino_acids , positions ) ] phdfs = [ ] tp_map = set ( ) for c in args : tp_map . add ( c [ timepoint_idx ] ) tp_map = sorted ( tp_map ) for c in args : v = df [ a ] . mean ( axis = 1 ) . values / df [ c ] . mean ( axis = 1 ) . values tp = [ 1 + tp_map . index ( c [ timepoint_idx ] ) ] tps = tp * len ( proteins ) if timepoint_idx else [ 1 ] * len ( proteins ) prar = [ "%s-%s-%d-%d" % x for x in zip ( proteins , apos , multiplicity , tps ) ] phdf = pd . DataFrame ( np . array ( list ( zip ( prar , v ) ) ) ) phdf . columns = [ "ID" , "Ratio" ] phdfs . append ( phdf ) pd . concat ( phdfs ) . to_csv ( f , sep = '\t' , index = None )
Write out the data frame ratio between two groups protein - Rsite - multiplicity - timepoint ID Ratio Q13619 - S10 - 1 - 1 0 . 5 Q9H3Z4 - S10 - 1 - 1 0 . 502 Q6GQQ9 - S100 - 1 - 1 0 . 504 Q86YP4 - S100 - 1 - 1 0 . 506 Q9H307 - S100 - 1 - 1 0 . 508 Q8NEY1 - S1000 - 1 - 1 0 . 51 Q13541 - S101 - 1 - 1 0 . 512 O95785 - S1012 - 2 - 1 0 . 514 O95785 - S1017 - 2 - 1 0 . 516 Q9Y4G8 - S1022 - 1 - 1 0 . 518 P35658 - S1023 - 1 - 1 0 . 52
49,975
def write_r ( df , f , sep = "," , index_join = "@" , columns_join = "." ) : df = df . copy ( ) df . index = [ "@" . join ( [ str ( s ) for s in v ] ) for v in df . index . values ] df . columns = [ "." . join ( [ str ( s ) for s in v ] ) for v in df . index . values ] df . to_csv ( f , sep = sep )
Export dataframe in a format easily importable to R
49,976
def gaussian ( df , width = 0.3 , downshift = - 1.8 , prefix = None ) : df = df . copy ( ) imputed = df . isnull ( ) if prefix : mask = np . array ( [ l . startswith ( prefix ) for l in df . columns . values ] ) mycols = np . arange ( 0 , df . shape [ 1 ] ) [ mask ] else : mycols = np . arange ( 0 , df . shape [ 1 ] ) if type ( width ) is not list : width = [ width ] * len ( mycols ) elif len ( mycols ) != len ( width ) : raise ValueError ( "Length of iterable 'width' does not match # of columns" ) if type ( downshift ) is not list : downshift = [ downshift ] * len ( mycols ) elif len ( mycols ) != len ( downshift ) : raise ValueError ( "Length of iterable 'downshift' does not match # of columns" ) for i in mycols : data = df . iloc [ : , i ] mask = data . isnull ( ) . values mean = data . mean ( axis = 0 ) stddev = data . std ( axis = 0 ) m = mean + downshift [ i ] * stddev s = stddev * width [ i ] values = np . random . normal ( loc = m , scale = s , size = df . shape [ 0 ] ) df . iloc [ mask , i ] = values [ mask ] return df , imputed
Impute missing values by drawing from a normal distribution
49,977
def _pca_scores ( scores , pc1 = 0 , pc2 = 1 , fcol = None , ecol = None , marker = 'o' , markersize = 30 , label_scores = None , show_covariance_ellipse = True , optimize_label_iter = OPTIMIZE_LABEL_ITER_DEFAULT , ** kwargs ) : fig = plt . figure ( figsize = ( 8 , 8 ) ) ax = fig . add_subplot ( 1 , 1 , 1 ) levels = [ 0 , 1 ] for c in set ( scores . columns . values ) : try : data = scores [ c ] . values . reshape ( 2 , - 1 ) except : continue fc = hierarchical_match ( fcol , c , 'k' ) ec = hierarchical_match ( ecol , c ) if ec is None : ec = fc if type ( markersize ) == str : idx = scores . columns . names . index ( markersize ) s = c [ idx ] elif callable ( markersize ) : s = markersize ( c ) else : s = markersize ax . scatter ( data [ pc1 , : ] , data [ pc2 , : ] , s = s , marker = marker , edgecolors = ec , c = fc ) if show_covariance_ellipse and data . shape [ 1 ] > 2 : cov = data [ [ pc1 , pc2 ] , : ] . T ellip = plot_point_cov ( cov , nstd = 2 , linestyle = 'dashed' , linewidth = 0.5 , edgecolor = ec or fc , alpha = 0.8 ) ax . add_artist ( ellip ) if label_scores : scores_f = scores . iloc [ [ pc1 , pc2 ] ] idxs = get_index_list ( scores_f . columns . names , label_scores ) texts = [ ] for n , ( x , y ) in enumerate ( scores_f . T . values ) : t = ax . text ( x , y , build_combined_label ( scores_f . columns . values [ n ] , idxs , ', ' ) , bbox = dict ( boxstyle = 'round,pad=0.3' , fc = '#ffffff' , ec = 'none' , alpha = 0.6 ) ) texts . append ( t ) if texts and optimize_label_iter : adjust_text ( texts , lim = optimize_label_iter ) ax . set_xlabel ( scores . index [ pc1 ] , fontsize = 16 ) ax . set_ylabel ( scores . index [ pc2 ] , fontsize = 16 ) fig . tight_layout ( ) return ax
Plot a scores plot for two principal components as AxB scatter plot .
49,978
def modifiedaminoacids ( df , kind = 'pie' ) : colors = [ '#6baed6' , '#c6dbef' , '#bdbdbd' ] total_aas , quants = analysis . modifiedaminoacids ( df ) df = pd . DataFrame ( ) for a , n in quants . items ( ) : df [ a ] = [ n ] df . sort_index ( axis = 1 , inplace = True ) if kind == 'bar' or kind == 'both' : ax1 = df . plot ( kind = 'bar' , figsize = ( 7 , 7 ) , color = colors ) ax1 . set_ylabel ( 'Number of phosphorylated amino acids' ) ax1 . set_xlabel ( 'Amino acid' ) ax1 . set_xticks ( [ ] ) ylim = np . max ( df . values ) + 1000 ax1 . set_ylim ( 0 , ylim ) _bartoplabel ( ax1 , 100 * df . values [ 0 ] , total_aas , ylim ) ax1 . set_xlim ( ( - 0.3 , 0.3 ) ) return ax if kind == 'pie' or kind == 'both' : dfp = df . T residues = dfp . index . values dfp . index = [ "%.2f%% (%d)" % ( 100 * df [ i ] . values [ 0 ] / total_aas , df [ i ] . values [ 0 ] ) for i in dfp . index . values ] ax2 = dfp . plot ( kind = 'pie' , y = 0 , colors = colors ) ax2 . legend ( residues , loc = 'upper left' , bbox_to_anchor = ( 1.0 , 1.0 ) ) ax2 . set_ylabel ( '' ) ax2 . set_xlabel ( '' ) ax2 . figure . set_size_inches ( 6 , 6 ) for t in ax2 . texts : t . set_fontsize ( 15 ) return ax2 return ax1 , ax2
Generate a plot of relative numbers of modified amino acids in source DataFrame .
49,979
def venn ( df1 , df2 , df3 = None , labels = None , ix1 = None , ix2 = None , ix3 = None , return_intersection = False , fcols = None ) : try : import matplotlib_venn as mplv except ImportError : raise ImportError ( "To plot venn diagrams, install matplotlib-venn package: pip install matplotlib-venn" ) plt . gcf ( ) . clear ( ) if labels is None : labels = [ "A" , "B" , "C" ] s1 = _process_ix ( df1 . index , ix1 ) s2 = _process_ix ( df2 . index , ix2 ) if df3 is not None : s3 = _process_ix ( df3 . index , ix3 ) kwargs = { } if fcols : kwargs [ 'set_colors' ] = [ fcols [ l ] for l in labels ] if df3 is not None : vn = mplv . venn3 ( [ s1 , s2 , s3 ] , set_labels = labels , ** kwargs ) intersection = s1 & s2 & s3 else : vn = mplv . venn2 ( [ s1 , s2 ] , set_labels = labels , ** kwargs ) intersection = s1 & s2 ax = plt . gca ( ) if return_intersection : return ax , list ( intersection ) else : return ax
Plot a 2 or 3 - part venn diagram showing the overlap between 2 or 3 pandas DataFrames .
49,980
def sitespeptidesproteins ( df , labels = None , colors = None , site_localization_probability = 0.75 ) : fig = plt . figure ( figsize = ( 4 , 6 ) ) ax = fig . add_subplot ( 1 , 1 , 1 ) shift = 0.5 values = analysis . sitespeptidesproteins ( df , site_localization_probability ) if labels is None : labels = [ 'Sites (Class I)' , 'Peptides' , 'Proteins' ] if colors is None : colors = [ '#756bb1' , '#bcbddc' , '#dadaeb' ] for n , ( c , l , v ) in enumerate ( zip ( colors , labels , values ) ) : ax . fill_between ( [ 0 , 1 , 2 ] , np . array ( [ shift , 0 , shift ] ) + n , np . array ( [ 1 + shift , 1 , 1 + shift ] ) + n , color = c , alpha = 0.5 ) ax . text ( 1 , 0.5 + n , "{}\n{:,}" . format ( l , v ) , ha = 'center' , color = 'k' , fontsize = 16 ) ax . set_xticks ( [ ] ) ax . set_yticks ( [ ] ) ax . set_axis_off ( ) return ax
Plot the number of sites peptides and proteins in the dataset .
49,981
def _areadist ( ax , v , xr , c , bins = 100 , by = None , alpha = 1 , label = None ) : y , x = np . histogram ( v [ ~ np . isnan ( v ) ] , bins ) x = x [ : - 1 ] if by is None : by = np . zeros ( ( bins , ) ) ax . fill_between ( x , y , by , facecolor = c , alpha = alpha , label = label ) return y
Plot the histogram distribution but as an area plot
49,982
def hierarchical_timecourse ( df , cluster_cols = True , cluster_rows = False , n_col_clusters = False , n_row_clusters = False , fcol = None , z_score = 0 , method = 'ward' , cmap = cm . PuOr_r , return_clusters = False , rdistance_fn = distance . pdist , cdistance_fn = distance . pdist , xlabel = 'Timepoint' , ylabel = 'log$_2$ Fold Change' ) : dfc , row_clusters , row_denD , col_clusters , col_denD , edges = _cluster ( df , cluster_cols = cluster_cols , cluster_rows = cluster_rows , n_col_clusters = n_col_clusters , n_row_clusters = n_row_clusters , z_score = z_score , method = 'ward' , rdistance_fn = rdistance_fn , cdistance_fn = cdistance_fn ) dfh = dfc . iloc [ row_denD [ 'leaves' ] , col_denD [ 'leaves' ] ] dfh = dfh . mean ( axis = 0 , level = [ 0 , 1 ] ) vmax = np . max ( dfh . values ) color = ScalarMappable ( norm = Normalize ( vmin = 0 , vmax = vmax ) , cmap = viridis ) fig = plt . figure ( figsize = ( 12 , 6 ) ) edges = [ 0 ] + edges + [ dfh . shape [ 1 ] ] for n in range ( len ( edges ) - 1 ) : ax = fig . add_subplot ( 2 , 4 , n + 1 ) dfhf = dfh . iloc [ : , edges [ n ] : edges [ n + 1 ] ] xpos = dfhf . index . get_level_values ( 1 ) mv = dfhf . mean ( axis = 1 ) distances = [ distance . euclidean ( mv , dfhf . values [ : , n ] ) for n in range ( dfhf . shape [ 1 ] ) ] colors = [ color . to_rgba ( v ) for v in distances ] order = np . argsort ( distances ) [ : : - 1 ] for y in order : ax . plot ( xpos , dfhf . values [ : , y ] , c = colors [ y ] , alpha = 0.5 , lw = 1 ) ax . set_xticks ( xpos ) if n > 3 : ax . set_xticklabels ( xpos ) ax . set_xlabel ( xlabel ) else : ax . set_xticklabels ( [ ] ) if n % 4 != 0 : ax . set_yticklabels ( [ ] ) else : ax . set_ylabel ( ylabel ) ax . set_ylim ( ( - 3 , + 3 ) ) fig . subplots_adjust ( hspace = 0.15 , wspace = 0.15 ) if return_clusters : return fig , dfh , edges else : return fig
Hierarchical clustering of samples across timecourse experiment .
49,983
def subtract_column_median ( df , prefix = 'Intensity ' ) : df = df . copy ( ) df . replace ( [ np . inf , - np . inf ] , np . nan , inplace = True ) mask = [ l . startswith ( prefix ) for l in df . columns . values ] df . iloc [ : , mask ] = df . iloc [ : , mask ] - df . iloc [ : , mask ] . median ( axis = 0 ) return df
Apply column - wise normalisation to expression columns .
49,984
def get_protein_id_list ( df , level = 0 ) : protein_list = [ ] for s in df . index . get_level_values ( level ) : protein_list . extend ( get_protein_ids ( s ) ) return list ( set ( protein_list ) )
Return a complete list of shortform IDs from a DataFrame
49,985
def hierarchical_match ( d , k , default = None ) : if d is None : return default if type ( k ) != list and type ( k ) != tuple : k = [ k ] for n , _ in enumerate ( k ) : key = tuple ( k [ 0 : len ( k ) - n ] ) if len ( key ) == 1 : key = key [ 0 ] try : d [ key ] except : pass else : return d [ key ] return default
Match a key against a dict simplifying element at a time
49,986
def calculate_s0_curve ( s0 , minpval , maxpval , minratio , maxratio , curve_interval = 0.1 ) : mminpval = - np . log10 ( minpval ) mmaxpval = - np . log10 ( maxpval ) maxpval_adjust = mmaxpval - mminpval ax0 = ( s0 + maxpval_adjust * minratio ) / maxpval_adjust edge_offset = ( maxratio - ax0 ) % curve_interval max_x = maxratio - edge_offset if ( max_x > ax0 ) : x = np . arange ( ax0 , max_x , curve_interval ) else : x = np . arange ( max_x , ax0 , curve_interval ) fn = lambda x : 10 ** ( - s0 / ( x - minratio ) - mminpval ) y = fn ( x ) return x , y , fn
Calculate s0 curve for volcano plot .
49,987
def correlation ( df , rowvar = False ) : df = df . copy ( ) maskv = np . ma . masked_where ( np . isnan ( df . values ) , df . values ) cdf = np . ma . corrcoef ( maskv , rowvar = False ) cdf = pd . DataFrame ( np . array ( cdf ) ) cdf . columns = df . columns cdf . index = df . columns cdf = cdf . sort_index ( level = 0 , axis = 1 ) cdf = cdf . sort_index ( level = 0 ) return cdf
Calculate column - wise Pearson correlations using numpy . ma . corrcoef
49,988
def pca ( df , n_components = 2 , mean_center = False , ** kwargs ) : if not sklearn : assert ( 'This library depends on scikit-learn (sklearn) to perform PCA analysis' ) from sklearn . decomposition import PCA df = df . copy ( ) df [ np . isnan ( df ) ] = 0 if mean_center : mean = np . mean ( df . values , axis = 0 ) df = df - mean pca = PCA ( n_components = n_components , ** kwargs ) pca . fit ( df . values . T ) scores = pd . DataFrame ( pca . transform ( df . values . T ) ) . T scores . index = [ 'Principal Component %d (%.2f%%)' % ( ( n + 1 ) , pca . explained_variance_ratio_ [ n ] * 100 ) for n in range ( 0 , scores . shape [ 0 ] ) ] scores . columns = df . columns weights = pd . DataFrame ( pca . components_ ) . T weights . index = df . index weights . columns = [ 'Weights on Principal Component %d' % ( n + 1 ) for n in range ( 0 , weights . shape [ 1 ] ) ] return scores , weights
Principal Component Analysis based on sklearn . decomposition . PCA
49,989
def plsda ( df , a , b , n_components = 2 , mean_center = False , scale = True , ** kwargs ) : if not sklearn : assert ( 'This library depends on scikit-learn (sklearn) to perform PLS-DA' ) from sklearn . cross_decomposition import PLSRegression df = df . copy ( ) df [ np . isnan ( df ) ] = 0 if mean_center : mean = np . mean ( df . values , axis = 0 ) df = df - mean sxa , _ = df . columns . get_loc_level ( a ) sxb , _ = df . columns . get_loc_level ( b ) dfa = df . iloc [ : , sxa ] dfb = df . iloc [ : , sxb ] dff = pd . concat ( [ dfa , dfb ] , axis = 1 ) y = np . ones ( dff . shape [ 1 ] ) y [ np . arange ( dfa . shape [ 1 ] ) ] = 0 plsr = PLSRegression ( n_components = n_components , scale = scale , ** kwargs ) plsr . fit ( dff . values . T , y ) x_scores = plsr . transform ( df . values . T ) scores = pd . DataFrame ( x_scores . T ) scores . index = [ 'Latent Variable %d' % ( n + 1 ) for n in range ( 0 , scores . shape [ 0 ] ) ] scores . columns = df . columns weights = pd . DataFrame ( plsr . x_weights_ ) weights . index = df . index weights . columns = [ 'Weights on Latent Variable %d' % ( n + 1 ) for n in range ( 0 , weights . shape [ 1 ] ) ] loadings = pd . DataFrame ( plsr . x_loadings_ ) loadings . index = df . index loadings . columns = [ 'Loadings on Latent Variable %d' % ( n + 1 ) for n in range ( 0 , loadings . shape [ 1 ] ) ] return scores , weights , loadings
Partial Least Squares Discriminant Analysis based on sklearn . cross_decomposition . PLSRegression
49,990
def enrichment_from_evidence ( dfe , modification = "Phospho (STY)" ) : dfe = dfe . reset_index ( ) . set_index ( 'Experiment' ) dfe [ 'Modifications' ] = np . array ( [ modification in m for m in dfe [ 'Modifications' ] ] ) dfe = dfe . set_index ( 'Modifications' , append = True ) dfes = dfe . sum ( axis = 0 , level = [ 0 , 1 ] ) . T columns = dfes . sum ( axis = 1 , level = 0 ) . columns total = dfes . sum ( axis = 1 , level = 0 ) . values . flatten ( ) modified = dfes . iloc [ 0 , dfes . columns . get_level_values ( 'Modifications' ) . values ] . values enrichment = modified / total return pd . DataFrame ( [ enrichment ] , columns = columns , index = [ '% Enrichment' ] )
Calculate relative enrichment of peptide modifications from evidence . txt .
49,991
def enrichment_from_msp ( dfmsp , modification = "Phospho (STY)" ) : dfmsp [ 'Modifications' ] = np . array ( [ modification in m for m in dfmsp [ 'Modifications' ] ] ) dfmsp = dfmsp . set_index ( [ 'Modifications' ] ) dfmsp = dfmsp . filter ( regex = 'Intensity ' ) dfmsp [ dfmsp == 0 ] = np . nan df_r = dfmsp . sum ( axis = 0 , level = 0 ) modified = df_r . loc [ True ] . values total = df_r . sum ( axis = 0 ) . values enrichment = modified / total return pd . DataFrame ( [ enrichment ] , columns = dfmsp . columns , index = [ '% Enrichment' ] )
Calculate relative enrichment of peptide modifications from modificationSpecificPeptides . txt .
49,992
def sitespeptidesproteins ( df , site_localization_probability = 0.75 ) : sites = filters . filter_localization_probability ( df , site_localization_probability ) [ 'Sequence window' ] peptides = set ( df [ 'Sequence window' ] ) proteins = set ( [ str ( p ) . split ( ';' ) [ 0 ] for p in df [ 'Proteins' ] ] ) return len ( sites ) , len ( peptides ) , len ( proteins )
Generate summary count of modified sites peptides and proteins in a processed dataset DataFrame .
49,993
def modifiedaminoacids ( df ) : amino_acids = list ( df [ 'Amino acid' ] . values ) aas = set ( amino_acids ) quants = { } for aa in aas : quants [ aa ] = amino_acids . count ( aa ) total_aas = len ( amino_acids ) return total_aas , quants
Calculate the number of modified amino acids in supplied DataFrame .
49,994
def build_index_from_design ( df , design , remove_prefix = None , types = None , axis = 1 , auto_convert_numeric = True , unmatched_columns = 'index' ) : df = df . copy ( ) if 'Label' not in design . index . names : design = design . set_index ( 'Label' ) if remove_prefix is None : remove_prefix = [ ] if type ( remove_prefix ) is str : remove_prefix = [ remove_prefix ] unmatched_for_index = [ ] names = design . columns . values idx_levels = len ( names ) indexes = [ ] if auto_convert_numeric : design = design . apply ( pd . to_numeric , errors = "ignore" ) design . index = design . index . astype ( str ) if types : for n , t in types . items ( ) : if n in design . columns . values : design [ n ] = design [ n ] . astype ( t ) for lo in df . columns . values : l = copy ( lo ) for s in remove_prefix : l = l . replace ( s , '' ) l = l . strip ( ) l = numeric ( l ) try : idx = design . loc [ str ( l ) ] except : if unmatched_columns : unmatched_for_index . append ( lo ) else : idx = tuple ( [ None ] * idx_levels ) indexes . append ( idx ) else : idx = tuple ( idx . values ) indexes . append ( idx ) if axis == 0 : df . index = pd . MultiIndex . from_tuples ( indexes , names = names ) else : if unmatched_columns == 'index' : df = df . set_index ( unmatched_for_index , append = True ) elif unmatched_columns == 'drop' : df = df . drop ( unmatched_for_index , axis = 1 ) df . columns = pd . MultiIndex . from_tuples ( indexes , names = names ) df = df . sort_index ( axis = 1 ) return df
Build a MultiIndex from a design table .
49,995
def build_index_from_labels ( df , indices , remove_prefix = None , types = None , axis = 1 ) : df = df . copy ( ) if remove_prefix is None : remove_prefix = [ ] if types is None : types = { } idx = [ df . index , df . columns ] [ axis ] indexes = [ ] for l in idx . get_level_values ( 0 ) : for s in remove_prefix : l = l . replace ( s + " " , '' ) ixr = [ ] for n , m in indices : m = re . search ( m , l ) if m : r = m . group ( 1 ) if n in types : r = types [ n ] ( r ) else : r = None ixr . append ( r ) indexes . append ( tuple ( ixr ) ) if axis == 0 : df . index = pd . MultiIndex . from_tuples ( indexes , names = [ n for n , _ in indices ] ) else : df . columns = pd . MultiIndex . from_tuples ( indexes , names = [ n for n , _ in indices ] ) return df
Build a MultiIndex from a list of labels and matching regex
49,996
def combine_expression_columns ( df , columns_to_combine , remove_combined = True ) : df = df . copy ( ) for ca , cb in columns_to_combine : df [ "%s_(x+y)/2_%s" % ( ca , cb ) ] = ( df [ ca ] + df [ cb ] ) / 2 if remove_combined : for ca , cb in columns_to_combine : df . drop ( [ ca , cb ] , inplace = True , axis = 1 ) return df
Combine expression columns calculating the mean for 2 columns
49,997
def expand_side_table ( df ) : df = df . copy ( ) idx = df . index . names df . reset_index ( inplace = True ) def strip_multiplicity ( df ) : df . columns = [ c [ : - 4 ] for c in df . columns ] return df def strip_multiple ( s ) : for sr in [ ' 1' , ' 2' , ' 3' ] : if s . endswith ( sr ) : s = s [ : - 4 ] return s base = df . filter ( regex = '.*(?<! \d)$' ) for c in df . columns . values : if strip_multiple ( c ) != c and strip_multiple ( c ) in list ( base . columns . values ) : base . drop ( strip_multiple ( c ) , axis = 1 , inplace = True ) multi1 = df . filter ( regex = '^.* 1$' ) multi1 = strip_multiplicity ( multi1 ) multi1 [ 'Multiplicity' ] = ' 1' multi1 = pd . concat ( [ multi1 , base ] , axis = 1 ) multi2 = df . filter ( regex = '^.* 2$' ) multi2 = strip_multiplicity ( multi2 ) multi2 [ 'Multiplicity' ] = ' 2' multi2 = pd . concat ( [ multi2 , base ] , axis = 1 ) multi3 = df . filter ( regex = '^.* 3$' ) multi3 = strip_multiplicity ( multi3 ) multi3 [ 'Multiplicity' ] = ' 3' multi3 = pd . concat ( [ multi3 , base ] , axis = 1 ) df = pd . concat ( [ multi1 , multi2 , multi3 ] , axis = 0 ) df [ 'id' ] = [ "%s%s" % ( a , b ) for a , b in zip ( df [ 'id' ] , df [ 'Multiplicity' ] ) ] if idx [ 0 ] is not None : df . set_index ( idx , inplace = True ) return df
Perform equivalent of expand side table in Perseus by folding Multiplicity columns down onto duplicate rows
49,998
def apply_experimental_design ( df , f , prefix = 'Intensity ' ) : df = df . copy ( ) edt = pd . read_csv ( f , sep = '\t' , header = 0 ) edt . set_index ( 'Experiment' , inplace = True ) new_column_labels = [ ] for l in df . columns . values : try : l = edt . loc [ l . replace ( prefix , '' ) ] [ 'Name' ] except ( IndexError , KeyError ) : pass new_column_labels . append ( l ) df . columns = new_column_labels return df
Load the experimental design template from MaxQuant and use it to apply the label names to the data columns .
49,999
def transform_expression_columns ( df , fn = np . log2 , prefix = 'Intensity ' ) : df = df . copy ( ) mask = np . array ( [ l . startswith ( prefix ) for l in df . columns . values ] ) df . iloc [ : , mask ] = fn ( df . iloc [ : , mask ] ) df . replace ( [ np . inf , - np . inf ] , np . nan , inplace = True ) return df
Apply transformation to expression columns .