idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
56,000
def _parse ( data , obj_name , attr_map ) : parsed_xml = minidom . parseString ( data ) parsed_objects = [ ] for obj in parsed_xml . getElementsByTagName ( obj_name ) : parsed_obj = { } for ( py_name , xml_name ) in attr_map . items ( ) : parsed_obj [ py_name ] = _get_minidom_tag_value ( obj , xml_name ) parsed_objects . append ( parsed_obj ) return parsed_objects
parse xml data into a python map
56,001
def get_all_stations ( self , station_type = None ) : params = None if station_type and station_type in STATION_TYPE_TO_CODE_DICT : url = self . api_base_url + 'getAllStationsXML_WithStationType' params = { 'stationType' : STATION_TYPE_TO_CODE_DICT [ station_type ] } else : url = self . api_base_url + 'getAllStationsXML' response = requests . get ( url , params = params , timeout = 10 ) if response . status_code != 200 : return [ ] return self . _parse_station_list ( response . content )
Returns information of all stations .
56,002
def get_all_current_trains ( self , train_type = None , direction = None ) : params = None if train_type : url = self . api_base_url + 'getCurrentTrainsXML_WithTrainType' params = { 'TrainType' : STATION_TYPE_TO_CODE_DICT [ train_type ] } else : url = self . api_base_url + 'getCurrentTrainsXML' response = requests . get ( url , params = params , timeout = 10 ) if response . status_code != 200 : return [ ] trains = self . _parse_all_train_data ( response . content ) if direction is not None : return self . _prune_trains ( trains , direction = direction ) return trains
Returns all trains that are due to start in the next 10 minutes
56,003
def get_station_by_name ( self , station_name , num_minutes = None , direction = None , destination = None , stops_at = None ) : url = self . api_base_url + 'getStationDataByNameXML' params = { 'StationDesc' : station_name } if num_minutes : url = url + '_withNumMins' params [ 'NumMins' ] = num_minutes response = requests . get ( url , params = params , timeout = 10 ) if response . status_code != 200 : return [ ] trains = self . _parse_station_data ( response . content ) if direction is not None or destination is not None : return self . _prune_trains ( trains , direction = direction , destination = destination , stops_at = stops_at ) return trains
Returns all trains due to serve station station_name .
56,004
def get_train_stops ( self , train_code , date = None ) : if date is None : date = datetime . date . today ( ) . strftime ( "%d %B %Y" ) url = self . api_base_url + 'getTrainMovementsXML' params = { 'TrainId' : train_code , 'TrainDate' : date } response = requests . get ( url , params = params , timeout = 10 ) if response . status_code != 200 : return [ ] return self . _parse_train_movement_data ( response . content )
Get details for a train .
56,005
def fill_fields ( self , ** kwargs ) : for name , value in kwargs . items ( ) : field = getattr ( self , name ) field . send_keys ( value )
Fills the fields referenced by kwargs keys and fill them with the value
56,006
def authorize_url ( client_id = None , redirect_uri = None , state = None , scopes = None , show_dialog = False , http_client = None ) : params = { 'client_id' : client_id or os . environ . get ( 'SPOTIFY_CLIENT_ID' ) , 'redirect_uri' : redirect_uri or os . environ . get ( 'SPOTIFY_REDIRECT_URI' ) , 'state' : state or str ( uuid . uuid4 ( ) ) . replace ( '-' , '' ) , 'scope' : ' ' . join ( scopes ) if scopes else '' , 'show_dialog' : show_dialog , 'response_type' : 'code' } query = [ '{}={}' . format ( k , v ) for k , v in params . items ( ) ] return '{}?{}' . format ( 'https://accounts.spotify.com/authorize' , '&' . join ( query ) )
Trigger authorization dialog
56,007
def refresh ( self ) : data = { 'grant_type' : 'refresh_token' , 'refresh_token' : self . _token . refresh_token } response = self . http_client . post ( self . URL , data = data , auth = ( self . client_id , self . client_secret ) ) response . raise_for_status ( ) self . _token = Token . from_json ( response . json ( ) )
Refresh the access token
56,008
def instance_of ( cls ) : def check ( value ) : return ( isinstance ( value , cls ) , u"{value!r} is instance of {actual!s}, required {required!s}" . format ( value = value , actual = fullyQualifiedName ( type ( value ) ) , required = fullyQualifiedName ( cls ) , ) , ) return check
Create an invariant requiring the value is an instance of cls .
56,009
def provider_of ( iface ) : def check ( value ) : return ( iface . providedBy ( value ) , u"{value!r} does not provide {interface!s}" . format ( value = value , interface = fullyQualifiedName ( iface ) , ) , ) return check
Create an invariant requiring the value provides the zope . interface iface .
56,010
def temp_dir ( suffix = '' , prefix = 'tmp' , parent_dir = None , make_cwd = False ) : prev_cwd = os . getcwd ( ) parent_dir = parent_dir if parent_dir is None else str ( parent_dir ) abs_path = tempfile . mkdtemp ( suffix , prefix , parent_dir ) path = pathlib . Path ( abs_path ) try : if make_cwd : os . chdir ( str ( abs_path ) ) yield path . resolve ( ) finally : if make_cwd : os . chdir ( prev_cwd ) with temporary . util . allow_missing_file ( ) : shutil . rmtree ( str ( abs_path ) )
Create a temporary directory and optionally change the current working directory to it . The directory is deleted when the context exits .
56,011
def openSafeReplace ( filepath , mode = 'w+b' ) : tempfileName = None if not _isFileAccessible ( filepath ) : raise IOError ( 'File %s is not writtable' % ( filepath , ) ) with tempfile . NamedTemporaryFile ( delete = False , mode = mode ) as tmpf : tempfileName = tmpf . name yield tmpf if not _isFileAccessible ( filepath ) : raise IOError ( 'File %s is not writtable' % ( filepath , ) ) shutil . move ( tempfileName , filepath )
Context manager to open a temporary file and replace the original file on closing .
56,012
def _isFileAccessible ( filepath ) : directory = os . path . dirname ( filepath ) if not os . access ( directory , os . W_OK ) : return False if os . path . exists ( filepath ) : if not os . access ( filepath , os . W_OK ) : return False try : openfile = os . open ( filepath , os . O_WRONLY ) os . close ( openfile ) except IOError : return False return True
Returns True if the specified filepath is writable .
56,013
def writeJsonZipfile ( filelike , data , compress = True , mode = 'w' , name = 'data' ) : zipcomp = zipfile . ZIP_DEFLATED if compress else zipfile . ZIP_STORED with zipfile . ZipFile ( filelike , mode , allowZip64 = True ) as containerFile : containerFile . writestr ( name , json . dumps ( data , cls = MaspyJsonEncoder ) , zipcomp )
Serializes the objects contained in data to a JSON formated string and writes it to a zipfile .
56,014
def writeBinaryItemContainer ( filelike , binaryItemContainer , compress = True ) : allMetadata = dict ( ) binarydatafile = io . BytesIO ( ) for index , binaryItem in enumerate ( viewvalues ( binaryItemContainer ) ) : metadataList = _dumpArrayDictToFile ( binarydatafile , binaryItem . arrays ) allMetadata [ index ] = [ binaryItem . _reprJSON ( ) , metadataList ] binarydatafile . seek ( 0 ) zipcomp = zipfile . ZIP_DEFLATED if compress else zipfile . ZIP_STORED with zipfile . ZipFile ( filelike , 'w' , allowZip64 = True ) as containerFile : containerFile . writestr ( 'metadata' , json . dumps ( allMetadata , cls = MaspyJsonEncoder ) , zipcomp ) containerFile . writestr ( 'binarydata' , binarydatafile . getvalue ( ) , zipcomp )
Serializes the binaryItems contained in binaryItemContainer and writes them into a zipfile archive .
56,015
def _dumpArrayToFile ( filelike , array ) : bytedata = array . tobytes ( 'C' ) start = filelike . tell ( ) end = start + len ( bytedata ) metadata = { 'start' : start , 'end' : end , 'size' : array . size , 'dtype' : array . dtype . name } filelike . write ( bytedata ) return metadata
Serializes a 1 - dimensional numpy . array to bytes writes the bytes to the filelike object and returns a dictionary with metadata necessary to restore the numpy . array from the file .
56,016
def _dumpNdarrayToFile ( filelike , ndarray ) : bytedata = ndarray . tobytes ( 'C' ) start = filelike . tell ( ) end = start + len ( bytedata ) metadata = { 'start' : start , 'end' : end , 'size' : ndarray . size , 'dtype' : ndarray . dtype . name , 'shape' : ndarray . shape } filelike . write ( bytedata ) return metadata
Serializes an N - dimensional numpy . array to bytes writes the bytes to the filelike object and returns a dictionary with metadata necessary to restore the numpy . array from the file .
56,017
def _arrayFromBytes ( dataBytes , metadata ) : array = numpy . fromstring ( dataBytes , dtype = numpy . typeDict [ metadata [ 'dtype' ] ] ) if 'shape' in metadata : array = array . reshape ( metadata [ 'shape' ] ) return array
Generates and returns a numpy array from raw data bytes .
56,018
def searchFileLocation ( targetFileName , targetFileExtension , rootDirectory , recursive = True ) : expectedFileName = targetFileName . split ( '.' ) [ 0 ] + '.' + targetFileExtension targetFilePath = None if recursive : for dirpath , dirnames , filenames in os . walk ( rootDirectory ) : for filename in filenames : if filename == expectedFileName : targetFilePath = joinpath ( dirpath , filename ) break if targetFilePath is not None : break else : for filename in os . listdir ( rootDirectory ) : filePath = joinpath ( rootDirectory , filename ) if not os . path . isfile ( filePath ) : continue if filename == expectedFileName : targetFilePath = filePath break return targetFilePath
Search for a filename with a specified file extension in all subfolders of specified rootDirectory returns first matching instance .
56,019
def matchingFilePaths ( targetfilename , directory , targetFileExtension = None , selector = None ) : targetFilePaths = list ( ) targetfilename = os . path . splitext ( targetfilename ) [ 0 ] targetFileExtension = targetFileExtension . replace ( '.' , '' ) matchExtensions = False if targetFileExtension is None else True if selector is None : selector = functools . partial ( operator . eq , targetfilename ) for dirpath , dirnames , filenames in os . walk ( directory ) : for filename in filenames : filenameNoextension = os . path . splitext ( filename ) [ 0 ] if selector ( filenameNoextension ) : if matchExtensions : if not filename . endswith ( '.' + targetFileExtension ) : continue targetFilePaths . append ( joinpath ( dirpath , filename ) ) return targetFilePaths
Search for files in all subfolders of specified directory return filepaths of all matching instances .
56,020
def listFiletypes ( targetfilename , directory ) : targetextensions = list ( ) for filename in os . listdir ( directory ) : if not os . path . isfile ( joinpath ( directory , filename ) ) : continue splitname = filename . split ( '.' ) basename = splitname [ 0 ] extension = '.' . join ( splitname [ 1 : ] ) if basename == targetfilename : targetextensions . append ( extension ) return targetextensions
Looks for all occurences of a specified filename in a directory and returns a list of all present file extensions of this filename .
56,021
def findAllSubstrings ( string , substring ) : start = 0 positions = [ ] while True : start = string . find ( substring , start ) if start == - 1 : break positions . append ( start ) start += 1 return positions
Returns a list of all substring starting positions in string or an empty list if substring is not present in string .
56,022
def toList ( variable , types = ( basestring , int , float , ) ) : if isinstance ( variable , types ) : return [ variable ] else : return variable
Converts a variable of type string int float to a list containing the variable as the only element .
56,023
def calcDeviationLimits ( value , tolerance , mode ) : values = toList ( value ) if mode == 'relative' : lowerLimit = min ( values ) * ( 1 - tolerance ) upperLimit = max ( values ) * ( 1 + tolerance ) elif mode == 'absolute' : lowerLimit = min ( values ) - tolerance upperLimit = max ( values ) + tolerance else : raise Exception ( 'mode %s not specified' % ( filepath , ) ) return lowerLimit , upperLimit
Returns the upper and lower deviation limits for a value and a given tolerance either as relative or a absolute difference .
56,024
def open ( self , filepath , mode = 'w+b' ) : if not _isFileAccessible ( filepath ) : raise IOError ( 'File %s is not writable' % ( filepath , ) ) if filepath in self . _files : with open ( self . _files [ filepath ] , mode = mode ) as tmpf : yield tmpf else : tempfilepath = None with tempfile . NamedTemporaryFile ( delete = False , mode = mode ) as tmpf : tempfilepath = tmpf . name yield tmpf self . _files [ filepath ] = tempfilepath
Opens a file - will actually return a temporary file but replace the original file when the context is closed .
56,025
def add_state ( self ) : sid = len ( self . states ) self . states . append ( SFAState ( sid ) )
This function adds a new state
56,026
def _initAddons ( cls , recurse = True ) : for addon_module in cls . addonModules ( recurse ) : projex . importmodules ( addon_module )
Initializes the addons for this manager .
56,027
def get_field_label_css_class ( self , bound_field ) : if isinstance ( bound_field . field . widget , forms . CheckboxInput ) : return 'form-check-label' return super ( ) . get_field_label_css_class ( bound_field )
Returns form - check - label if widget is CheckboxInput . For all other fields no css class is added .
56,028
def create_textfile_with_contents ( filename , contents , encoding = 'utf-8' ) : ensure_directory_exists ( os . path . dirname ( filename ) ) if os . path . exists ( filename ) : os . remove ( filename ) outstream = codecs . open ( filename , "w" , encoding ) outstream . write ( contents ) if contents and not contents . endswith ( "\n" ) : outstream . write ( "\n" ) outstream . flush ( ) outstream . close ( ) assert os . path . exists ( filename ) , "ENSURE file exists: %s" % filename
Creates a textual file with the provided contents in the workdir . Overwrites an existing file .
56,029
def ensure_directory_exists ( dirname , context = None ) : real_dirname = dirname if context : real_dirname = realpath_with_context ( dirname , context ) if not os . path . exists ( real_dirname ) : os . makedirs ( real_dirname ) assert os . path . exists ( real_dirname ) , "ENSURE dir exists: %s" % dirname assert os . path . isdir ( real_dirname ) , "ENSURE isa dir: %s" % dirname
Ensures that a directory exits . If it does not exist it is automatically created .
56,030
def deserialize ( self , msg ) : 'deserialize output to a Python object' self . logger . debug ( 'deserializing %s' , msg ) return json . loads ( msg )
deserialize output to a Python object
56,031
def append_request_id ( req , resp , resource , params ) : def get_headers ( resp ) : if hasattr ( resp , 'headers' ) : return resp . headers if hasattr ( resp , '_headers' ) : return resp . _headers return None if ( isinstance ( resp , Response ) or ( get_headers ( resp ) is not None ) ) : request_id = get_headers ( resp ) . get ( 'x-request-id' ) else : request_id = resp if resource . req_ids is None : resource . req_ids = [ ] if request_id not in resource . req_ids : resource . req_ids . append ( request_id )
Append request id which got from response header to resource . req_ids list .
56,032
def _sanitizer ( self , obj ) : if isinstance ( obj , datetime . datetime ) : return obj . isoformat ( ) if hasattr ( obj , "to_dict" ) : return obj . to_dict ( ) return obj
Sanitizer method that will be passed to json . dumps .
56,033
def make_uniq_for_step ( ctx , ukeys , step , stage , full_data , clean_missing_after_seconds , to_uniq ) : if not ukeys : return to_uniq else : uniq_data = bubble_lod_load ( ctx , step , stage ) ctx . say ( 'Creating uniq identifiers for [' + step + '] information' , 0 ) ctx . gbc . say ( 'uniq_data:' , stuff = uniq_data , verbosity = 1000 ) uniq_step_res = make_uniq ( ctx = ctx , ldict = to_uniq , keyed = uniq_data , uniqstr = ukeys , tag = step , full_data = full_data , remove_missing_after_seconds = clean_missing_after_seconds ) ctx . gbc . say ( 'uniq_step_res:' , stuff = uniq_step_res , verbosity = 1000 ) to_uniq_newest = get_newest_uniq ( ctx . gbc , uniq_step_res ) to_uniq = to_uniq_newest uniq_res_list = get_uniq_list ( ctx . gbc , uniq_step_res ) reset = True pfr = bubble_lod_dump ( ctx = ctx , step = step , stage = stage , full_data = full_data , reset = reset , data_gen = uniq_res_list ) ctx . gbc . say ( 'saved uniq ' + step + ' data res:' , stuff = pfr , verbosity = 700 ) return to_uniq
initially just a copy from UNIQ_PULL
56,034
def list_ip ( self , instance_id ) : output = self . client . describe_instances ( InstanceIds = [ instance_id ] ) output = output . get ( "Reservations" ) [ 0 ] . get ( "Instances" ) [ 0 ] ips = { } ips [ 'PrivateIp' ] = output . get ( "PrivateIpAddress" ) ips [ 'PublicIp' ] = output . get ( "PublicIpAddress" ) return ips
Add all IPs
56,035
def main ( ) : if len ( argv ) < 2 : print 'Usage: %s fst_file [optional: save_file]' % argv [ 0 ] return flex_a = Flexparser ( ) mma = flex_a . yyparse ( argv [ 1 ] ) mma . minimize ( ) print mma if len ( argv ) == 3 : mma . save ( argv [ 2 ] )
Testing function for Flex Regular Expressions to FST DFA
56,036
def has_permission ( self ) : objs = [ None ] if hasattr ( self , 'get_perms_objects' ) : objs = self . get_perms_objects ( ) else : if hasattr ( self , 'get_object' ) : try : objs = [ self . get_object ( ) ] except Http404 : raise except : pass if objs == [ None ] : objs = self . get_queryset ( ) if ( hasattr ( self , 'permission_filter_queryset' ) and self . permission_filter_queryset is not False and self . request . method == 'GET' ) : if objs != [ None ] : self . perms_filter_queryset ( objs ) return True else : return check_perms ( self . request . user , self . get_permission_required ( ) , objs , self . request . method )
Permission checking for normal Django .
56,037
def check_permissions ( self , request ) : objs = [ None ] if hasattr ( self , 'get_perms_objects' ) : objs = self . get_perms_objects ( ) else : if hasattr ( self , 'get_object' ) : try : objs = [ self . get_object ( ) ] except Http404 : raise except : pass if objs == [ None ] : objs = self . get_queryset ( ) if len ( objs ) == 0 : objs = [ None ] if ( hasattr ( self , 'permission_filter_queryset' ) and self . permission_filter_queryset is not False and self . request . method == 'GET' ) : if objs != [ None ] : self . perms_filter_queryset ( objs ) else : has_perm = check_perms ( self . request . user , self . get_permission_required ( ) , objs , self . request . method ) if not has_perm : msg = self . get_permission_denied_message ( default = "Permission denied." ) if isinstance ( msg , Sequence ) : msg = msg [ 0 ] self . permission_denied ( request , message = msg )
Permission checking for DRF .
56,038
def _hashed_key ( self ) : return abs ( int ( hashlib . md5 ( self . key_prefix . encode ( 'utf8' ) ) . hexdigest ( ) , 16 ) ) % ( 10 ** ( self . _size_mod if hasattr ( self , '_size_mod' ) else 5 ) )
Returns 16 - digit numeric hash of the redis key
56,039
def update ( self , data ) : if not data : return _rk , _dumps = self . get_key , self . _dumps data = self . _client . mset ( { _rk ( key ) : _dumps ( value ) for key , value in data . items ( ) } )
Set given keys to their respective values
56,040
def expire_at ( self , key , _time ) : return self . _client . expireat ( self . get_key ( key ) , round ( _time ) )
Sets the expiration time of
56,041
def _bucket_key ( self ) : return "{}.size.{}" . format ( self . prefix , ( self . _hashed_key // 1000 ) if self . _hashed_key > 1000 else self . _hashed_key )
Returns hash bucket key for the redis key
56,042
def reverse_iter ( self , start = None , stop = None , count = 2000 ) : cursor = '0' count = 1000 start = start if start is not None else ( - 1 * count ) stop = stop if stop is not None else - 1 _loads = self . _loads while cursor : cursor = self . _client . lrange ( self . key_prefix , start , stop ) for x in reversed ( cursor or [ ] ) : yield _loads ( x ) start -= count stop -= count
- > yields items of the list in reverse
56,043
def pop ( self , index = None ) : if index is None : return self . _loads ( self . _client . rpop ( self . key_prefix ) ) elif index == 0 : return self . _loads ( self . _client . lpop ( self . key_prefix ) ) else : _uuid = gen_rand_str ( 16 , 24 ) r = self [ index ] self [ index ] = _uuid self . remove ( _uuid ) return r
Removes and returns the item at
56,044
def count ( self , value ) : cnt = 0 for x in self : if x == value : cnt += 1 return cnt
Not recommended for use on large lists due to time complexity but it works . Use with caution .
56,045
def push ( self , * items ) : if self . serialized : items = list ( map ( self . _dumps , items ) ) return self . _client . lpush ( self . key_prefix , * items )
Prepends the list with
56,046
def index ( self , item ) : for i , x in enumerate ( self . iter ( ) ) : if x == item : return i return None
Not recommended for use on large lists due to time complexity but it works
56,047
def intersection ( self , * others ) : others = self . _typesafe_others ( others ) return set ( map ( self . _loads , self . _client . sinter ( self . key_prefix , * others ) ) )
Calculates the intersection of all the given sets that is members which are present in all given sets .
56,048
def rank ( self , member ) : if self . reversed : return self . _client . zrevrank ( self . key_prefix , self . _dumps ( member ) ) return self . _client . zrank ( self . key_prefix , self . _dumps ( member ) )
Gets the ASC rank of
56,049
def recv_blocking ( conn , msglen ) : msg = b'' while len ( msg ) < msglen : maxlen = msglen - len ( msg ) if maxlen > 4096 : maxlen = 4096 tmpmsg = conn . recv ( maxlen ) if not tmpmsg : raise RuntimeError ( "socket connection broken" ) msg += tmpmsg logging . debug ( "Msglen: %d of %d" , len ( msg ) , msglen ) logging . debug ( "Message: %s" , msg ) return msg
Recieve data until msglen bytes have been received .
56,050
def compare_password ( expected , actual ) : if expected == actual : return True , "OK" msg = [ ] ver_exp = expected [ - 8 : ] . rstrip ( ) ver_act = actual [ - 8 : ] . rstrip ( ) if expected [ : - 8 ] != actual [ : - 8 ] : msg . append ( "Password mismatch" ) if ver_exp != ver_act : msg . append ( "asterisk_mbox version mismatch. Client: '" + ver_act + "', Server: '" + ver_exp + "'" ) return False , ". " . join ( msg )
Compare two 64byte encoded passwords .
56,051
def encode_to_sha ( msg ) : if isinstance ( msg , str ) : msg = msg . encode ( 'utf-8' ) return ( codecs . encode ( msg , "hex_codec" ) + ( b'00' * 32 ) ) [ : 64 ]
coerce numeric list inst sha - looking bytearray
56,052
def decode_from_sha ( sha ) : if isinstance ( sha , str ) : sha = sha . encode ( 'utf-8' ) return codecs . decode ( re . sub ( rb'(00)*$' , b'' , sha ) , "hex_codec" )
convert coerced sha back into numeric list
56,053
def _api_path ( self , item ) : if self . base_url is None : raise NotImplementedError ( "base_url not set" ) path = "/" . join ( [ x . blob [ "id" ] for x in item . path ] ) return "/" . join ( [ self . base_url , path ] )
Get the API path for the current cursor position .
56,054
def register_references ( kb_app : kb , sphinx_app : Sphinx , sphinx_env : BuildEnvironment , docnames : List [ str ] ) : references : ReferencesContainer = sphinx_app . env . references for name , klass in kb_app . config . resources . items ( ) : if getattr ( klass , 'is_reference' , False ) : references [ name ] = dict ( )
Walk the registry and add sphinx directives
56,055
def register_handlers ( self , handler_classes ) : for handler_class in handler_classes : self . handlers . append ( handler_class ( client = self . client ) ) logging . info ( 'Successfully registered {handler_class}' . format ( handler_class = getattr ( handler_class , '__name__' , str ( handler_class ) ) ) )
Create handlers from discovered handler classes
56,056
def get_merged_filter ( self ) : track = set ( ) follow = set ( ) for handler in self . handlers : track . update ( handler . filter . track ) follow . update ( handler . filter . follow ) return TweetFilter ( track = list ( track ) , follow = list ( follow ) )
Return merged filter from list of handlers
56,057
def get_domain ( url ) : parse_result = urlparse ( url ) domain = "{schema}://{netloc}" . format ( schema = parse_result . scheme , netloc = parse_result . netloc ) return domain
Get domain part of an url .
56,058
def join_all ( domain , * parts ) : l = list ( ) if domain . endswith ( "/" ) : domain = domain [ : - 1 ] l . append ( domain ) for part in parts : for i in part . split ( "/" ) : if i . strip ( ) : l . append ( i ) url = "/" . join ( l ) return url
Join all url components .
56,059
def _list_remote ( store , maildir , verbose = False ) : command = stdout = store . cmd ( command , verbose ) lines = stdout . split ( "\n" ) for line in lines : parts = line . split ( " " ) if len ( parts ) >= 3 : yield parts [ 0 : 3 ]
List the a maildir .
56,060
def sshpull ( host , maildir , localmaildir , noop = False , verbose = False , filterfile = None ) : store = _SSHStore ( host , maildir ) _pull ( store , localmaildir , noop , verbose , filterfile )
Pull a remote maildir to the local one .
56,061
def filepull ( maildir , localmaildir , noop = False , verbose = False , filterfile = None ) : store = _Store ( maildir ) _pull ( store , localmaildir , noop , verbose , filterfile )
Pull one local maildir into another .
56,062
def _filter ( msgdata , mailparser , mdfolder , mailfilters ) : if mailfilters : for f in mailfilters : msg = mailparser . parse ( StringIO ( msgdata ) ) rule = f ( msg , folder = mdfolder ) if rule : yield rule return
Filter msgdata by mailfilters
56,063
def cmd ( self , cmd , verbose = False ) : command = cmd . format ( maildir = self . directory ) if verbose : print ( command ) p = Popen ( [ "ssh" , "-T" , self . host , command ] , stdin = PIPE , stdout = PIPE , stderr = PIPE ) stdout , stderr = p . communicate ( ) return stdout
Executes the specified command on the remote host .
56,064
def fetch_result ( self ) : results = self . soup . find_all ( 'div' , { 'class' : 'container container-small' } ) href = None is_match = False i = 0 while i < len ( results ) and not is_match : result = results [ i ] anchor = result . find ( 'a' , { 'rel' : 'bookmark' } ) is_match = self . _filter_results ( result , anchor ) href = anchor [ 'href' ] i += 1 try : page = get_soup ( href ) except ( Exception ) : page = None if href and page : return page else : raise PageNotFoundError ( PAGE_ERROR )
Return a list of urls for each search result .
56,065
def _filter_results ( self , result , anchor ) : valid = True try : cat_tag = result . find ( 'a' , { 'rel' : 'category tag' } ) . string title = anchor . string . lower ( ) date_tag = result . find ( 'time' ) . string except ( AttributeError , TypeError ) : return False if cat_tag != "Daily Ratings" : valid = False if not date_in_range ( self . date , date_tag , 5 ) : valid = False if self . category == 'cable' and 'cable' not in title : valid = False elif self . category != 'cable' and 'cable' in title : valid = False return valid
Filter search results by checking category titles and dates
56,066
def _build_url ( self ) : url_params = [ BASE_URL , self . category + ' ratings' , self . day , self . year , self . month ] return SEARCH_URL . format ( * url_params )
Build url based on searching by date or by show .
56,067
def _assert_category ( self , category ) : category = category . lower ( ) valid_categories = [ 'cable' , 'broadcast' , 'final' , 'tv' ] assert_msg = "%s is not a valid category." % ( category ) assert ( category in valid_categories ) , assert_msg
Validate category argument
56,068
def get_data ( self , path , ** params ) : xml = self . get_response ( path , ** params ) try : return parse ( xml ) except Exception as err : print ( path ) print ( params ) print ( err ) raise
Giving a service path and optional specific arguments returns the XML data from the API parsed as a dict structure .
56,069
def run ( self , port ) : tornado . options . parse_command_line ( ) http_server = tornado . httpserver . HTTPServer ( self ) http_server . listen ( port ) tornado . ioloop . IOLoop . instance ( ) . start ( )
Run on given port . Parse standard options and start the http server .
56,070
def log_request ( self , handler ) : packet = { 'method' : handler . request . method , 'uri' : handler . request . uri , 'remote_ip' : handler . request . remote_ip , 'status' : handler . get_status ( ) , 'request_time_ms' : handler . request . request_time ( ) * 1000.0 , 'service_id' : self . service_id , 'request_id' : handler . request . headers . get ( REQUEST_ID_HEADER , 'undefined' ) } if hasattr ( handler , 'logvalues' ) : for key , value in handler . logvalues . iteritems ( ) : packet [ key ] = value servicelog . log ( packet ) metric = "requests." + str ( handler . get_status ( ) ) metrics . timing ( metric , handler . request . request_time ( ) * 1000.0 ) super ( LoggingApplication , self ) . log_request ( handler )
Override base method to log requests to JSON UDP collector and emit a metric .
56,071
def logvalue ( self , key , value ) : if not hasattr ( self , 'logvalues' ) : self . logvalues = { } self . logvalues [ key ] = value
Add log entry to request log info
56,072
def write_error ( self , status_code , ** kwargs ) : message = default_message = httplib . responses . get ( status_code , '' ) if 'exc_info' in kwargs : ( _ , exc , _ ) = kwargs [ 'exc_info' ] if hasattr ( exc , 'log_message' ) : message = str ( exc . log_message ) or default_message self . logvalue ( 'halt_reason' , message ) title = "{}: {}" . format ( status_code , default_message ) body = "{}: {}" . format ( status_code , message ) self . finish ( "<html><title>" + title + "</title>" "<body>" + body + "</body></html>" )
Log halt_reason in service log and output error page
56,073
def timeit ( self , metric , func , * args , ** kwargs ) : return metrics . timeit ( metric , func , * args , ** kwargs )
Time execution of callable and emit metric then return result .
56,074
def require_content_type ( self , content_type ) : if self . request . headers . get ( 'content-type' , '' ) != content_type : self . halt ( 400 , 'Content type must be ' + content_type )
Raises a 400 if request content type is not as specified .
56,075
def _ensure_request_id_header ( self ) : "Ensure request headers have a request ID. Set one if needed." if REQUEST_ID_HEADER not in self . request . headers : self . request . headers . add ( REQUEST_ID_HEADER , uuid . uuid1 ( ) . hex )
Ensure request headers have a request ID . Set one if needed .
56,076
def main ( ) : if len ( argv ) < 2 : targetfile = 'target.y' else : targetfile = argv [ 1 ] print 'Parsing ruleset: ' + targetfile , flex_a = Flexparser ( ) mma = flex_a . yyparse ( targetfile ) print 'OK' print 'Perform minimization on initial automaton:' , mma . minimize ( ) print 'OK' print 'Perform StateRemoval on minimal automaton:' , state_removal = StateRemoval ( mma ) mma_regex = state_removal . get_regex ( ) print mma_regex
Testing function for DFA _Brzozowski Operation
56,077
def _state_removal_init ( self ) : for state_i in self . mma . states : for state_j in self . mma . states : if state_i . stateid == state_j . stateid : self . l_transitions [ state_i . stateid , state_j . stateid ] = self . epsilon else : self . l_transitions [ state_i . stateid , state_j . stateid ] = self . empty for arc in state_i . arcs : if arc . nextstate == state_j . stateid : if self . l_transitions [ state_i . stateid , state_j . stateid ] != self . empty : self . l_transitions [ state_i . stateid , state_j . stateid ] += self . mma . isyms . find ( arc . ilabel ) else : self . l_transitions [ state_i . stateid , state_j . stateid ] = self . mma . isyms . find ( arc . ilabel )
State Removal Operation Initialization
56,078
def _state_removal_solve ( self ) : initial = sorted ( self . mma . states , key = attrgetter ( 'initial' ) , reverse = True ) [ 0 ] . stateid for state_k in self . mma . states : if state_k . final : continue if state_k . stateid == initial : continue self . _state_removal_remove ( state_k . stateid ) print self . l_transitions return self . l_transitions
The State Removal Operation
56,079
def request ( self , url , method , body = "" , headers = { } , retry = True ) : if headers : headers = dict ( list ( headers . items ( ) ) + list ( self . headers . items ( ) ) ) else : headers = self . headers if not sys . version_info >= ( 3 , ) and headers : headers = dict ( ( k . encode ( 'ascii' ) if isinstance ( k , unicode ) else k , v . encode ( 'ascii' ) if isinstance ( v , unicode ) else v ) for k , v in headers . items ( ) ) url = self . base_url + url if not sys . version_info >= ( 3 , ) : if isinstance ( url , unicode ) : url = url . encode ( 'ascii' ) r = self . _doRequest ( url , method , body , headers ) retry_http_codes = [ 503 , 504 ] if r . status_code in retry_http_codes and retry : tries = 5 delay = .5 backoff = 2 while r . status_code in retry_http_codes and tries > 0 : tries -= 1 time . sleep ( delay ) delay *= backoff r = self . _doRequest ( url , method , body , headers ) r . raise_for_status ( ) result = { } contentType = r . headers [ "Content-Type" ] if contentType is None : contentType = "text/plain" else : contentType = contentType . split ( ";" ) [ 0 ] if contentType . lower ( ) == "application/json" : try : result [ "body" ] = json . loads ( r . text ) except : result [ "body" ] = r . text else : result [ "body" ] = r . text result [ "status" ] = r . status_code result [ "resp" ] = r result [ "content-type" ] = contentType return result
Execute an HTTP request and return a dict containing the response and the response status code .
56,080
def get ( self , url , headers = { } , retry = True ) : return self . request ( url = url , method = "GET" , headers = headers , retry = retry )
Execute an HTTP GET request and return a dict containing the response and the response status code .
56,081
def post ( self , url , body = "" , headers = { } , retry = True ) : headers [ "Content-Length" ] = str ( len ( body ) ) return self . request ( url = url , method = "POST" , body = body , headers = headers , retry = retry )
Execute an HTTP POST request and return a dict containing the response and the response status code .
56,082
def patch ( self , url , body = "" , headers = { } , retry = True ) : return self . request ( url = url , method = "PATCH" , body = body , headers = headers , retry = retry )
Execute an HTTP PATCH request and return a dict containing the response and the response status code .
56,083
def clone ( cls , srcpath , destpath ) : try : os . makedirs ( destpath ) except OSError as e : if not e . errno == errno . EEXIST : raise cmd = [ SVNADMIN , 'dump' , '--quiet' , '.' ] dump = subprocess . Popen ( cmd , cwd = srcpath , stdout = subprocess . PIPE , stderr = subprocess . PIPE , ) repo = cls . create ( destpath ) repo . load ( dump . stdout ) stderr = dump . stderr . read ( ) dump . stdout . close ( ) dump . stderr . close ( ) dump . wait ( ) if dump . returncode != 0 : raise subprocess . CalledProcessError ( dump . returncode , cmd , stderr ) return repo
Copy a main repository to a new location .
56,084
def proplist ( self , rev , path = None ) : rev , prefix = self . _maprev ( rev ) if path is None : return self . _proplist ( str ( rev ) , None ) else : path = type ( self ) . cleanPath ( _join ( prefix , path ) ) return self . _proplist ( str ( rev ) , path )
List Subversion properties of the path
56,085
def propget ( self , prop , rev , path = None ) : rev , prefix = self . _maprev ( rev ) if path is None : return self . _propget ( prop , str ( rev ) , None ) else : path = type ( self ) . cleanPath ( _join ( prefix , path ) ) return self . _propget ( prop , str ( rev ) , path )
Get Subversion property value of the path
56,086
def dump ( self , stream , progress = None , lower = None , upper = None , incremental = False , deltas = False ) : cmd = [ SVNADMIN , 'dump' , '.' ] if progress is None : cmd . append ( '-q' ) if lower is not None : cmd . append ( '-r' ) if upper is None : cmd . append ( str ( int ( lower ) ) ) else : cmd . append ( '%d:%d' % ( int ( lower ) , int ( upper ) ) ) if incremental : cmd . append ( '--incremental' ) if deltas : cmd . append ( '--deltas' ) p = subprocess . Popen ( cmd , cwd = self . path , stdout = stream , stderr = progress ) p . wait ( ) if p . returncode != 0 : raise subprocess . CalledProcessError ( p . returncode , cmd )
Dump the repository to a dumpfile stream .
56,087
def load ( self , stream , progress = None , ignore_uuid = False , force_uuid = False , use_pre_commit_hook = False , use_post_commit_hook = False , parent_dir = None ) : cmd = [ SVNADMIN , 'load' , '.' ] if progress is None : cmd . append ( '-q' ) if ignore_uuid : cmd . append ( '--ignore-uuid' ) if force_uuid : cmd . append ( '--force-uuid' ) if use_pre_commit_hook : cmd . append ( '--use-pre-commit-hook' ) if use_post_commit_hook : cmd . append ( '--use-post-commit-hook' ) if parent_dir : cmd . extend ( [ '--parent-dir' , parent_dir ] ) p = subprocess . Popen ( cmd , cwd = self . path , stdin = stream , stdout = progress , stderr = subprocess . PIPE ) stderr = p . stderr . read ( ) p . stderr . close ( ) p . wait ( ) if p . returncode != 0 : raise subprocess . CalledProcessError ( p . returncode , cmd , stderr )
Load a dumpfile stream into the repository .
56,088
def temp_file ( content = None , suffix = '' , prefix = 'tmp' , parent_dir = None ) : binary = isinstance ( content , ( bytes , bytearray ) ) parent_dir = parent_dir if parent_dir is None else str ( parent_dir ) fd , abs_path = tempfile . mkstemp ( suffix , prefix , parent_dir , text = False ) path = pathlib . Path ( abs_path ) try : try : if content : os . write ( fd , content if binary else content . encode ( ) ) finally : os . close ( fd ) yield path . resolve ( ) finally : with temporary . util . allow_missing_file ( ) : path . unlink ( )
Create a temporary file and optionally populate it with content . The file is deleted when the context exits .
56,089
def load_content ( self ) : rel_path = self . root_file_url . replace ( os . path . basename ( self . root_file_url ) , '' ) self . toc_file_url = rel_path + self . root_file . find ( id = "ncx" ) [ 'href' ] self . toc_file_soup = bs ( self . book_file . read ( self . toc_file_url ) , 'xml' ) for n , c in cross ( self . toc_file_soup . find_all ( 'navLabel' ) , self . toc_file_soup . find_all ( 'content' ) ) : content_soup = bs ( self . book_file . read ( rel_path + c . get ( 'src' ) ) ) self . content . append ( { 'part_name' : c . text , 'source_url' : c . get ( 'src' ) , 'content_source' : content_soup , 'content_source_body' : content_soup . body , 'content_source_text' : content_soup . body . text } )
Load the book content
56,090
def UninstallTrump ( RemoveDataTables = True , RemoveOverrides = True , RemoveFailsafes = True ) : ts = [ '_symbols' , '_symbol_validity' , '_symbol_tags' , '_symbol_aliases' , '_feeds' , '_feed_munging' , '_feed_munging_args' , '_feed_sourcing' , '_feed_validity' , '_feed_meta' , '_feed_tags' , '_feed_handle' , '_index_kwargs' , '_indicies' , '_symbol_handle' , '_symboldatadef' ] if RemoveOverrides : ts . append ( '_overrides' ) if RemoveFailsafes : ts . append ( '_failsafes' ) engine = create_engine ( ENGINE_STR ) if RemoveDataTables : results = engine . execute ( "SELECT name FROM _symbols;" ) datatables = [ row [ 'name' ] for row in results ] ts = ts + datatables drops = "" . join ( [ 'DROP TABLE IF EXISTS "{}" CASCADE;' . format ( t ) for t in ts ] ) engine . execute ( drops )
This script removes all tables associated with Trump . It s written for PostgreSQL but should be very easy to adapt to other databases .
56,091
def digestInSilico ( proteinSequence , cleavageRule = '[KR]' , missedCleavage = 0 , removeNtermM = True , minLength = 5 , maxLength = 55 ) : passFilter = lambda startPos , endPos : ( endPos - startPos >= minLength and endPos - startPos <= maxLength ) _regexCleave = re . finditer ( cleavageRule , proteinSequence ) cleavagePosList = set ( itertools . chain ( map ( lambda x : x . end ( ) , _regexCleave ) ) ) cleavagePosList . add ( len ( proteinSequence ) ) cleavagePosList = sorted ( list ( cleavagePosList ) ) numCleavageSites = len ( cleavagePosList ) if missedCleavage >= numCleavageSites : missedCleavage = numCleavageSites - 1 digestionresults = list ( ) if removeNtermM and proteinSequence [ 0 ] == 'M' : for cleavagePos in range ( 0 , missedCleavage + 1 ) : startPos = 1 endPos = cleavagePosList [ cleavagePos ] if passFilter ( startPos , endPos ) : sequence = proteinSequence [ startPos : endPos ] info = dict ( ) info [ 'startPos' ] = startPos + 1 info [ 'endPos' ] = endPos info [ 'missedCleavage' ] = cleavagePos digestionresults . append ( ( sequence , info ) ) if cleavagePosList [ 0 ] != 0 : for cleavagePos in range ( 0 , missedCleavage + 1 ) : startPos = 0 endPos = cleavagePosList [ cleavagePos ] if passFilter ( startPos , endPos ) : sequence = proteinSequence [ startPos : endPos ] info = dict ( ) info [ 'startPos' ] = startPos + 1 info [ 'endPos' ] = endPos info [ 'missedCleavage' ] = cleavagePos digestionresults . append ( ( sequence , info ) ) lastCleavagePos = 0 while lastCleavagePos < numCleavageSites : for missedCleavage in range ( 0 , missedCleavage + 1 ) : nextCleavagePos = lastCleavagePos + missedCleavage + 1 if nextCleavagePos < numCleavageSites : startPos = cleavagePosList [ lastCleavagePos ] endPos = cleavagePosList [ nextCleavagePos ] if passFilter ( startPos , endPos ) : sequence = proteinSequence [ startPos : endPos ] info = dict ( ) info [ 'startPos' ] = startPos + 1 info [ 'endPos' ] = endPos info [ 'missedCleavage' ] = missedCleavage digestionresults . append ( ( sequence , info ) ) lastCleavagePos += 1 return digestionresults
Returns a list of peptide sequences and cleavage information derived from an in silico digestion of a polypeptide .
56,092
def calcPeptideMass ( peptide , ** kwargs ) : aaMass = kwargs . get ( 'aaMass' , maspy . constants . aaMass ) aaModMass = kwargs . get ( 'aaModMass' , maspy . constants . aaModMass ) elementMass = kwargs . get ( 'elementMass' , pyteomics . mass . nist_mass ) addModMass = float ( ) unmodPeptide = peptide for modId , modMass in viewitems ( aaModMass ) : modSymbol = '[' + modId + ']' numMod = peptide . count ( modSymbol ) if numMod > 0 : unmodPeptide = unmodPeptide . replace ( modSymbol , '' ) addModMass += modMass * numMod if unmodPeptide . find ( '[' ) != - 1 : print ( unmodPeptide ) raise Exception ( 'The peptide contains modification, ' + 'not present in maspy.constants.aaModMass' ) unmodPeptideMass = sum ( aaMass [ i ] for i in unmodPeptide ) unmodPeptideMass += elementMass [ 'H' ] [ 0 ] [ 0 ] * 2 + elementMass [ 'O' ] [ 0 ] [ 0 ] modPeptideMass = unmodPeptideMass + addModMass return modPeptideMass
Calculate the mass of a peptide .
56,093
def removeModifications ( peptide ) : while peptide . find ( '[' ) != - 1 : peptide = peptide . split ( '[' , 1 ) [ 0 ] + peptide . split ( ']' , 1 ) [ 1 ] return peptide
Removes all modifications from a peptide string and return the plain amino acid sequence .
56,094
def returnModPositions ( peptide , indexStart = 1 , removeModString = 'UNIMOD:' ) : unidmodPositionDict = dict ( ) while peptide . find ( '[' ) != - 1 : currModification = peptide . split ( '[' ) [ 1 ] . split ( ']' ) [ 0 ] currPosition = peptide . find ( '[' ) - 1 if currPosition == - 1 : currPosition = 0 currPosition += indexStart peptide = peptide . replace ( '[' + currModification + ']' , '' , 1 ) if removeModString : currModification = currModification . replace ( removeModString , '' ) unidmodPositionDict . setdefault ( currModification , list ( ) ) unidmodPositionDict [ currModification ] . append ( currPosition ) return unidmodPositionDict
Determines the amino acid positions of all present modifications .
56,095
def calcMhFromMz ( mz , charge ) : mh = ( mz * charge ) - ( maspy . constants . atomicMassProton * ( charge - 1 ) ) return mh
Calculate the MH + value from mz and charge .
56,096
def calcMzFromMh ( mh , charge ) : mz = ( mh + ( maspy . constants . atomicMassProton * ( charge - 1 ) ) ) / charge return mz
Calculate the mz value from MH + and charge .
56,097
def calcMzFromMass ( mass , charge ) : mz = ( mass + ( maspy . constants . atomicMassProton * charge ) ) / charge return mz
Calculate the mz value of a peptide from its mass and charge .
56,098
def calcMassFromMz ( mz , charge ) : mass = ( mz - maspy . constants . atomicMassProton ) * charge return mass
Calculate the mass of a peptide from its mz and charge .
56,099
def execute ( self , processProtocol , command , env = { } , path = None , uid = None , gid = None , usePTY = 0 , childFDs = None ) : sshCommand = ( command if isinstance ( command , SSHCommand ) else SSHCommand ( command , self . precursor , path ) ) commandLine = sshCommand . getCommandLine ( ) connectionDeferred = self . getConnection ( uid ) connectionDeferred . addCallback ( connectProcess , processProtocol , commandLine , env , usePTY , childFDs ) return connectionDeferred
Execute a process on the remote machine using SSH