idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
16,200
def get_model_queries ( self , query_obj , model_queries_config ) : for search4 , model_attrib in model_queries_config : if search4 is not None : query_obj = self . _model_query ( query_obj , search4 , model_attrib ) return query_obj
use this if your are searching for a field in the same model
16,201
def _login ( session ) : resp = session . get ( LOGIN_URL , params = _get_params ( session . auth . locale ) ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) csrf = parsed . find ( CSRF_FIND_TAG , CSRF_FIND_ATTR ) . get ( VALUE_ATTR ) resp = session . post ( LOGIN_URL , { 'userID' : session . auth . username , 'password' : session . auth . password , 'loginAction' : 'X' , 'CSRFToken' : csrf , 'loc' : session . auth . locale } ) if resp . status_code == 403 : raise UPSError ( 'login failure' ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) error = parsed . find ( ERROR_FIND_TAG , ERROR_FIND_ATTR ) if error and error . string : raise UPSError ( error . string . strip ( ) ) _save_cookies ( session . cookies , session . auth . cookie_path )
Login to UPS .
16,202
def get_packages ( session ) : resp = session . get ( DELIVERIES_URL , params = _get_params ( session . auth . locale ) ) parsed = BeautifulSoup ( resp . text , HTML_PARSER ) token_elem = parsed . find ( TOKEN_FIND_TAG , TOKEN_FIND_ATTR ) tid_elem = parsed . find ( TID_FIND_TAG , TID_FIND_ATTR ) if not token_elem or not tid_elem : raise UPSError ( 'failed to find token or tid' ) token = token_elem . get ( VALUE_ATTR ) tid = tid_elem . get ( VALUE_ATTR ) resp = session . post ( SERVICE_URL , { 'token' : token , 'uid' : session . auth . username , 'callType' : 'allShipments' , 'tid' : tid , 'loc' : session . auth . locale } ) try : packages = [ ] data = json . loads ( resp . text [ UPS_JSON_PREAMBLE_SIZE : ] ) shipments = data [ 'shipmentContainer' ] [ 'inboundShipments' ] + data [ 'shipmentContainer' ] [ 'historyShipments' ] for shipment in shipments : from_location = '{}, {}, {}' . format ( shipment [ 'sfc' ] , shipment [ 'sfs' ] , shipment [ 'sfcn' ] ) estimated_date = _parsed_date ( shipment [ 'sddfd' ] ) actual_date = _parsed_date ( shipment [ 'dd' ] ) packages . append ( { 'tracking_number' : shipment [ 'tn' ] , 'status' : shipment [ 'sts' ] , 'from' : shipment [ 'sfn' ] , 'from_location' : from_location , 'estimated_delivery_date' : estimated_date , 'estimated_delivery_timeframe' : shipment [ 'sdtfd' ] , 'delivery_date' : actual_date } ) return packages except JSONDecodeError : raise UPSError ( 'failed to parse json' )
Get deliveries in progress and completed .
16,203
def get_session ( username , password , locale = DEFAULT_LOCALE , cookie_path = COOKIE_PATH ) : class UPSAuth ( AuthBase ) : def __init__ ( self , username , password , locale , cookie_path ) : self . username = username self . password = password self . locale = locale self . cookie_path = cookie_path def __call__ ( self , r ) : return r session = requests . session ( ) session . auth = UPSAuth ( username , password , locale , cookie_path ) if os . path . exists ( cookie_path ) : session . cookies = _load_cookies ( cookie_path ) else : _login ( session ) return session
Get UPS HTTP session .
16,204
def hide ( self , event ) : if self . content . isHidden ( ) : self . content . show ( ) self . hideBtn . setIcon ( self . hideIcon ) self . setMaximumHeight ( 16777215 ) else : self . content . hide ( ) self . hideBtn . setIcon ( self . showIcon ) self . setFixedHeight ( 30 )
Toggles the visiblity of the content widget
16,205
def _next_rotation_id ( rotated_files ) : if not rotated_files : return 0 else : highest_rotated_file = max ( rotated_files , key = lambda x : x [ 1 ] ) return highest_rotated_file [ 1 ] + 1
Given the hanoi_rotator generated files in the output directory returns the rotation_id that will be given to the current file . If there are no existing rotated files return 0 .
16,206
def _locate_files_to_delete ( algorithm , rotated_files , next_rotation_id ) : rotation_slot = algorithm . id_to_slot ( next_rotation_id ) for a_path , a_rotation_id in rotated_files : if rotation_slot == algorithm . id_to_slot ( a_rotation_id ) : yield a_path
Looks for hanoi_rotator generated files that occupy the same slot that will be given to rotation_id .
16,207
def rotate ( algorithm , path , ext = "" , destination_dir = None , verbose = False ) : paths = Paths ( path , ext , destination_dir ) _move_files ( algorithm , paths , verbose )
Programmatic access to the archive rotator
16,208
def update_or_create_candidate ( self , candidate , aggregable = True , uncontested = False ) : candidate_election , c = CandidateElection . objects . update_or_create ( candidate = candidate , election = self , defaults = { "aggregable" : aggregable , "uncontested" : uncontested } , ) return candidate_election
Create a CandidateElection .
16,209
def delete_candidate ( self , candidate ) : CandidateElection . objects . filter ( candidate = candidate , election = self ) . delete ( )
Delete a CandidateElection .
16,210
def get_candidates ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) return [ ce . candidate for ce in candidate_elections ]
Get all CandidateElections for this election .
16,211
def get_candidates_by_party ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) return { ce . candidate . party . slug : ce . candidate for ce in candidate_elections }
Get CandidateElections serialized into an object with party - slug keys .
16,212
def get_candidate_election ( self , candidate ) : return CandidateElection . objects . get ( candidate = candidate , election = self )
Get CandidateElection for a Candidate in this election .
16,213
def get_candidate_votes ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . votes . all ( )
Get all votes attached to a CandidateElection for a Candidate in this election .
16,214
def get_votes ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) votes = None for ce in candidate_elections : votes = votes | ce . votes . all ( ) return votes
Get all votes for this election .
16,215
def get_candidate_electoral_votes ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . electoral_votes . all ( )
Get all electoral votes for a candidate in this election .
16,216
def get_electoral_votes ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) electoral_votes = None for ce in candidate_elections : electoral_votes = electoral_votes | ce . electoral_votes . all ( ) return electoral_votes
Get all electoral votes for all candidates in this election .
16,217
def get_candidate_delegates ( self , candidate ) : candidate_election = CandidateElection . objects . get ( candidate = candidate , election = self ) return candidate_election . delegates . all ( )
Get all pledged delegates for a candidate in this election .
16,218
def get_delegates ( self ) : candidate_elections = CandidateElection . objects . filter ( election = self ) delegates = None for ce in candidate_elections : delegates = delegates | ce . delegates . all ( ) return delegates
Get all pledged delegates for any candidate in this election .
16,219
def list_packet_names ( self ) : path = '/archive/{}/packet-names' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . GetPacketNamesResponse ( ) message . ParseFromString ( response . content ) names = getattr ( message , 'name' ) return iter ( names )
Returns the existing packet names .
16,220
def list_processed_parameter_groups ( self ) : path = '/archive/{}/parameter-groups' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . ParameterGroupInfo ( ) message . ParseFromString ( response . content ) groups = getattr ( message , 'group' ) return iter ( groups )
Returns the existing parameter groups .
16,221
def list_processed_parameter_group_histogram ( self , group = None , start = None , stop = None , merge_time = 20 ) : params = { } if group is not None : params [ 'group' ] = group if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if merge_time is not None : params [ 'mergeTime' ] = int ( merge_time * 1000 ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/parameter-index' . format ( self . _instance ) , params = params , response_class = archive_pb2 . IndexResponse , items_key = 'group' , item_mapper = IndexGroup , )
Reads index records related to processed parameter groups between the specified start and stop time .
16,222
def list_event_sources ( self ) : path = '/archive/{}/events/sources' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . EventSourceInfo ( ) message . ParseFromString ( response . content ) sources = getattr ( message , 'source' ) return iter ( sources )
Returns the existing event sources .
16,223
def list_completeness_index ( self , start = None , stop = None ) : params = { } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/completeness-index' . format ( self . _instance ) , params = params , response_class = archive_pb2 . IndexResponse , items_key = 'group' , item_mapper = IndexGroup , )
Reads completeness index records between the specified start and stop time .
16,224
def list_packets ( self , name = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if name is not None : params [ 'name' ] = name if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) return pagination . Iterator ( client = self . _client , path = '/archive/{}/packets' . format ( self . _instance ) , params = params , response_class = rest_pb2 . ListPacketsResponse , items_key = 'packet' , item_mapper = Packet , )
Reads packet information between the specified start and stop time .
16,225
def list_events ( self , source = None , severity = None , text_filter = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if source is not None : params [ 'source' ] = source if page_size is not None : params [ 'limit' ] = page_size if severity is not None : params [ 'severity' ] = severity if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if text_filter is not None : params [ 'q' ] = text_filter return pagination . Iterator ( client = self . _client , path = '/archive/{}/events' . format ( self . _instance ) , params = params , response_class = rest_pb2 . ListEventsResponse , items_key = 'event' , item_mapper = Event , )
Reads events between the specified start and stop time .
16,226
def sample_parameter_values ( self , parameter , start = None , stop = None , sample_count = 500 , parameter_cache = 'realtime' , source = 'ParameterArchive' ) : path = '/archive/{}/parameters{}/samples' . format ( self . _instance , parameter ) now = datetime . utcnow ( ) params = { 'count' : sample_count , 'source' : source , 'start' : to_isostring ( now - timedelta ( hours = 1 ) ) , 'stop' : to_isostring ( now ) , } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True response = self . _client . get_proto ( path = path , params = params ) message = pvalue_pb2 . TimeSeries ( ) message . ParseFromString ( response . content ) samples = getattr ( message , 'sample' ) return [ Sample ( s ) for s in samples ]
Returns parameter samples .
16,227
def list_parameter_ranges ( self , parameter , start = None , stop = None , min_gap = None , max_gap = None , parameter_cache = 'realtime' ) : path = '/archive/{}/parameters{}/ranges' . format ( self . _instance , parameter ) params = { } if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if min_gap is not None : params [ 'minGap' ] = int ( min_gap * 1000 ) if max_gap is not None : params [ 'maxGap' ] = int ( max_gap * 1000 ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True response = self . _client . get_proto ( path = path , params = params ) message = pvalue_pb2 . Ranges ( ) message . ParseFromString ( response . content ) ranges = getattr ( message , 'range' ) return [ ParameterRange ( r ) for r in ranges ]
Returns parameter ranges between the specified start and stop time .
16,228
def list_parameter_values ( self , parameter , start = None , stop = None , page_size = 500 , descending = False , parameter_cache = 'realtime' , source = 'ParameterArchive' ) : params = { 'source' : source , 'order' : 'desc' if descending else 'asc' , } if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if parameter_cache : params [ 'processor' ] = parameter_cache else : params [ 'norealtime' ] = True return pagination . Iterator ( client = self . _client , path = '/archive/{}/parameters{}' . format ( self . _instance , parameter ) , params = params , response_class = rest_pb2 . ListParameterValuesResponse , items_key = 'parameter' , item_mapper = ParameterValue , )
Reads parameter values between the specified start and stop time .
16,229
def list_command_history ( self , command = None , start = None , stop = None , page_size = 500 , descending = False ) : params = { 'order' : 'desc' if descending else 'asc' , } if page_size is not None : params [ 'limit' ] = page_size if start is not None : params [ 'start' ] = to_isostring ( start ) if stop is not None : params [ 'stop' ] = to_isostring ( stop ) if command : path = '/archive/{}/commands{}' . format ( self . _instance , command ) else : path = '/archive/{}/commands' . format ( self . _instance ) return pagination . Iterator ( client = self . _client , path = path , params = params , response_class = rest_pb2 . ListCommandsResponse , items_key = 'entry' , item_mapper = CommandHistory , )
Reads command history entries between the specified start and stop time .
16,230
def list_tables ( self ) : path = '/archive/{}/tables' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = rest_pb2 . ListTablesResponse ( ) message . ParseFromString ( response . content ) tables = getattr ( message , 'table' ) return iter ( [ Table ( table ) for table in tables ] )
Returns the existing tables .
16,231
def get_table ( self , table ) : path = '/archive/{}/tables/{}' . format ( self . _instance , table ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . TableInfo ( ) message . ParseFromString ( response . content ) return Table ( message )
Gets a single table .
16,232
def list_streams ( self ) : path = '/archive/{}/streams' . format ( self . _instance ) response = self . _client . get_proto ( path = path ) message = rest_pb2 . ListStreamsResponse ( ) message . ParseFromString ( response . content ) streams = getattr ( message , 'stream' ) return iter ( [ Stream ( stream ) for stream in streams ] )
Returns the existing streams .
16,233
def get_stream ( self , stream ) : path = '/archive/{}/streams/{}' . format ( self . _instance , stream ) response = self . _client . get_proto ( path = path ) message = archive_pb2 . StreamInfo ( ) message . ParseFromString ( response . content ) return Stream ( message )
Gets a single stream .
16,234
def create_stream_subscription ( self , stream , on_data , timeout = 60 ) : options = rest_pb2 . StreamSubscribeRequest ( ) options . stream = stream manager = WebSocketSubscriptionManager ( self . _client , resource = 'stream' , options = options ) subscription = WebSocketSubscriptionFuture ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_stream_data , subscription , on_data ) manager . open ( wrapped_callback , instance = self . _instance ) subscription . reply ( timeout = timeout ) return subscription
Create a new stream subscription .
16,235
def execute_sql ( self , statement ) : path = '/archive/{}/sql' . format ( self . _instance ) req = archive_pb2 . ExecuteSqlRequest ( ) req . statement = statement response = self . _client . post_proto ( path = path , data = req . SerializeToString ( ) ) message = archive_pb2 . ExecuteSqlResponse ( ) message . ParseFromString ( response . content ) if message . HasField ( 'result' ) : return message . result return None
Executes a single SQL statement .
16,236
def copy_file ( source , destination , unique = False , sort = False , case_sensitive = True , create_path = False ) : _File . copy ( source , destination , unique , sort , case_sensitive , create_path )
Python utility to create file
16,237
def get_details ( self ) : title = str ( self . get_title ( ) ) . strip ( ) artist = str ( self . get_artist ( ) ) . strip ( ) album = str ( self . get_album ( ) ) . strip ( ) year = str ( self . get_year ( ) ) . strip ( ) return { "title" : title , "artist" : artist , "album" : album , "year" : year }
Finds songs details
16,238
def _set_attr ( self , attribute ) : self . tags . add ( attribute ) self . song . save ( )
Sets attribute of song
16,239
def set_title ( self , name ) : self . _set_attr ( TIT2 ( encoding = 3 , text = name . decode ( 'utf-8' ) ) )
Sets song s title
16,240
def set_artist ( self , artist ) : self . _set_attr ( TPE1 ( encoding = 3 , text = artist . decode ( 'utf-8' ) ) )
Sets song s artist
16,241
def set_album ( self , album ) : self . _set_attr ( TALB ( encoding = 3 , text = album . decode ( 'utf-8' ) ) )
Sets song s album
16,242
def set_nr_track ( self , nr_track ) : self . _set_attr ( TRCK ( encoding = 3 , text = str ( nr_track ) ) )
Sets song s track numb
16,243
def set_year ( self , year ) : self . _set_attr ( TDRC ( encoding = 3 , text = str ( year ) ) )
Sets song s year
16,244
def set_genre ( self , genre ) : self . _set_attr ( TCON ( encoding = 3 , text = str ( genre ) ) )
Sets song s genre
16,245
def updateTraceCount ( self ) : self . ui . ntracesLbl . setNum ( self . ui . trackview . model ( ) . traceCount ( ) )
Updates the trace count label with the data from the model
16,246
def preview ( self ) : msg = self . ui . trackview . model ( ) . verify ( ) if msg : answer = QtGui . QMessageBox . warning ( self , "Bummer" , 'Problem: {}.' . format ( msg ) ) return stim_signal , atten , ovld = self . ui . trackview . model ( ) . signal ( ) fig = SpecWidget ( ) fig . setWindowModality ( 2 ) fig . updateData ( stim_signal , self . ui . trackview . model ( ) . samplerate ( ) ) fig . setTitle ( 'Stimulus Preview' ) fig . show ( ) self . previewFig = fig
Assemble the current components in the QStimulusModel and generate a spectrogram plot in a separate window
16,247
def assertpathsandfiles ( self ) : assert os . path . isdir ( self . miseqpath ) , u'MiSeqPath is not a valid directory {0!r:s}' . format ( self . miseqpath ) if not self . miseqfolder : miseqfolders = glob ( '{}*/' . format ( self . miseqpath ) ) self . miseqfolder = sorted ( miseqfolders ) [ - 1 ] self . miseqfoldername = self . miseqfolder . split ( "/" ) [ - 2 ] else : self . miseqfoldername = self . miseqfolder self . miseqfolder = self . miseqpath + self . miseqfolder + "/" assert os . path . isdir ( self . miseqfolder ) , u'MiSeqFolder is not a valid directory {0!r:s}' . format ( self . miseqfolder ) if self . customsamplesheet : self . samplesheet = self . customsamplesheet assert os . path . isfile ( self . customsamplesheet ) , u'Could not find CustomSampleSheet as entered: {0!r:s}' . format ( self . customsamplesheet ) else : self . samplesheet = self . miseqfolder + "SampleSheet.csv"
Assertions to make sure that arguments are at least mostly valid
16,248
def numberofsamples ( self ) : idline = 0 linenumber = 0 with open ( self . samplesheet , "rb" ) as ssheet : for linenumber , entry in enumerate ( ssheet ) : if "Sample_ID" in entry : idline = linenumber self . samplecount = linenumber - idline printtime ( 'There are {} samples in this run. ' 'Running off-hours module with the following parameters:\n' 'MiSeqPath: {},\n' 'MiSeqFolder: {},\n' 'SampleSheet: {}' . format ( self . samplecount , self . miseqpath , self . miseqfolder , self . samplesheet ) , self . start ) self . fastqlinker ( )
Count the number of samples is the samplesheet
16,249
def print_packet_range ( ) : first_packet = next ( iter ( archive . list_packets ( ) ) ) last_packet = next ( iter ( archive . list_packets ( descending = True ) ) ) print ( 'First packet:' , first_packet ) print ( 'Last packet:' , last_packet ) td = last_packet . generation_time - first_packet . generation_time print ( 'Timespan:' , td )
Print the range of archived packets .
16,250
def iterate_specific_packet_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for packet in archive . list_packets ( start = start , stop = now ) : total += 1 print ( 'Found' , total , 'packets in range' )
Count the number of packets in a specific range .
16,251
def iterate_specific_event_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for event in archive . list_events ( start = start , stop = now ) : total += 1 print ( 'Found' , total , 'events in range' )
Count the number of events in a specific range .
16,252
def print_last_values ( ) : iterable = archive . list_parameter_values ( '/YSS/SIMULATOR/BatteryVoltage1' , descending = True ) for pval in islice ( iterable , 0 , 10 ) : print ( pval )
Print the last 10 values .
16,253
def iterate_specific_parameter_range ( ) : now = datetime . utcnow ( ) start = now - timedelta ( hours = 1 ) total = 0 for pval in archive . list_parameter_values ( '/YSS/SIMULATOR/BatteryVoltage1' , start = start , stop = now ) : total += 1 print ( 'Found' , total , 'parameter values in range' )
Count the number of parameter values in a specific range .
16,254
def print_last_commands ( ) : iterable = archive . list_command_history ( descending = True ) for entry in islice ( iterable , 0 , 10 ) : print ( entry )
Print the last 10 commands .
16,255
def transmogrify ( l ) : d = { l [ 0 ] : { } } tmp = d for c in l : tmp [ c ] = { } tmp = tmp [ c ] return d
Fit a flat list into a treeable object .
16,256
def tree ( node , formatter = None , prefix = None , postfix = None , _depth = 1 ) : current = 0 length = len ( node . keys ( ) ) tee_joint = '\xe2\x94\x9c\xe2\x94\x80\xe2\x94\x80' elbow_joint = '\xe2\x94\x94\xe2\x94\x80\xe2\x94\x80' for key , value in node . iteritems ( ) : current += 1 k = formatter ( key ) if formatter else key pre = prefix ( key ) if prefix else '' post = postfix ( key ) if postfix else '' space = elbow_joint if current == length else tee_joint yield ' {space} {prefix}{key}{postfix}' . format ( space = space , key = k , prefix = pre , postfix = post ) if value : for e in tree ( value , formatter = formatter , prefix = prefix , postfix = postfix , _depth = _depth + 1 ) : yield ( ' | ' if current != length else ' ' ) + e
Print a tree .
16,257
def assemble_chain ( leaf , store ) : store_dict = { } for cert in store : store_dict [ cert . get_subject ( ) . CN ] = cert chain = [ leaf ] current = leaf try : while current . get_issuer ( ) . CN != current . get_subject ( ) . CN : chain . append ( store_dict [ current . get_issuer ( ) . CN ] ) current = store_dict [ current . get_issuer ( ) . CN ] except KeyError : invalid = crypto . X509 ( ) patch_certificate ( invalid ) invalid . set_subject ( current . get_issuer ( ) ) chain . append ( invalid ) chain . reverse ( ) return chain
Assemble the trust chain .
16,258
def _get_api_content ( self ) : if GITHUB_TOKEN is not None : self . add_params_to_url ( { "access_token" : GITHUB_TOKEN } ) api_content_response = requests . get ( self . api_url ) self . api_content = json . loads ( api_content_response . text )
Updates class api content by calling Github api and storing result
16,259
def get_trending_daily ( lang = "" ) : url = "https://github.com/trending/" url += str ( lang ) . lower ( ) . replace ( " " , "" ) + "?since=daily" api_content_request = urllib . request . Request ( url ) api_content_response = urllib . request . urlopen ( api_content_request ) . read ( ) . decode ( "utf-8" ) soup = BeautifulSoup ( api_content_response , "lxml" ) raw_repo_list = soup . find ( "ol" , { "class" : "repo-list" } ) . find_all ( "li" ) repos_list = [ ] for repo in raw_repo_list : details = repo . find_all ( "div" ) [ 0 ] . a . text . split ( "/" ) repo_owner = details [ 0 ] . strip ( ) repo_name = details [ 1 ] . strip ( ) repos_list . append ( GithubUserRepository ( repo_owner , repo_name ) ) return repos_list
Fetches repos in Trending Daily Github section
16,260
def _get_repos ( url ) : current_page = 1 there_is_something_left = True repos_list = [ ] while there_is_something_left : api_driver = GithubRawApi ( url , url_params = { "page" : current_page } , get_api_content_now = True ) for repo in api_driver . api_content : repo_name = repo [ "name" ] repo_user = repo [ "owner" ] [ "login" ] repos_list . append ( GithubUserRepository ( repo_user , repo_name ) ) there_is_something_left = bool ( api_driver . api_content ) current_page += 1 return repos_list
Gets repos in url
16,261
def _dict_of_vars_to_vcf_file ( variants , outfile ) : header_lines = [ '##fileformat=VCFv4.2' , '##source=cluster_vcf_records, version ' + cluster_vcf_records_version , '##fileDate=' + str ( datetime . date . today ( ) ) , '\t' . join ( [ '#CHROM' , 'POS' , 'ID' , 'REF' , 'ALT' , 'QUAL' , 'FILTER' , 'INFO' ] ) ] with open ( outfile , 'w' ) as f : print ( * header_lines , sep = '\n' , file = f ) for ref_name in sorted ( variants ) : for pos in sorted ( variants [ ref_name ] ) : for ref_string in sorted ( variants [ ref_name ] [ pos ] ) : alts = sorted ( list ( variants [ ref_name ] [ pos ] [ ref_string ] ) ) print ( ref_name , pos + 1 , '.' , ref_string , ',' . join ( alts ) , '.' , 'PASS' , 'SVTYPE=MERGED' , sep = '\t' , file = f )
Input is dict made by vcf_file_read . vcf_file_to_dict_of_vars or vcf_file_read . vcf_file_to_dict_of_vars . Output is bare - bones VCF file ( columns empty wherever possible
16,262
def collect_commands ( package_name = None , in_place = False , level = 1 ) : commands = { } frame = inspect . stack ( ) [ level ] [ 0 ] f_globals = frame . f_globals if package_name is None : package_name = f_globals [ '__name__' ] . rsplit ( '.' , 1 ) [ 0 ] package_paths = [ os . path . dirname ( f_globals [ '__file__' ] ) ] else : package = importlib . import_module ( package_name ) package_name = package . __name__ package_paths = package . __path__ for package_path in package_paths : package_path = pathlib . Path ( package_path ) for file in package_path . rglob ( '*.py' ) : rel_path = str ( file . relative_to ( package_path ) ) rel_path = rel_path [ : - 3 ] module_name = rel_path . replace ( os . sep , '.' ) module_name = '.' . join ( ( package_name , module_name ) ) module = importlib . import_module ( module_name ) module_commands = get_commands_in_namespace ( module ) commands . update ( module_commands ) commands = OrderedDict ( ( name , commands [ name ] ) for name in sorted ( commands ) ) if in_place : f_globals . update ( commands ) return commands
Collect commands from package and its subpackages .
16,263
def get_commands_in_namespace ( namespace = None , level = 1 ) : from . . command import Command commands = { } if namespace is None : frame = inspect . stack ( ) [ level ] [ 0 ] namespace = frame . f_globals elif inspect . ismodule ( namespace ) : namespace = vars ( namespace ) for name in namespace : obj = namespace [ name ] if isinstance ( obj , Command ) : commands [ name ] = obj return OrderedDict ( ( name , commands [ name ] ) for name in sorted ( commands ) )
Get commands in namespace .
16,264
def selectedIndexes ( self ) : model = self . model ( ) indexes = [ ] for comp in self . _selectedComponents : index = model . indexByComponent ( comp ) if index is None : self . _selectedComponents . remove ( comp ) else : indexes . append ( index ) return indexes
Returns a list of QModelIndex currently in the model
16,265
def selection ( self ) : sel = QtGui . QItemSelection ( ) for index in self . selectedIndexes ( ) : sel . select ( index , index ) return sel
Returns items in selection as a QItemSelection object
16,266
def selectionComponents ( self ) : comps = [ ] model = self . model ( ) for comp in self . _selectedComponents : index = model . indexByComponent ( comp ) if index is not None : comps . append ( comp ) return comps
Returns the names of the component types in this selection
16,267
def expose_content ( self ) : placeholders = get_placeholders ( self . get_template ( ) ) exposed_content = [ ] for lang in self . get_languages ( ) : for ctype in [ p . name for p in placeholders ] : content = self . get_content ( lang , ctype , False ) if content : exposed_content . append ( content ) return u"\r\n" . join ( exposed_content )
Return all the current content of this page into a string .
16,268
def read_http_header ( sock ) : buf = [ ] hdr_end = '\r\n\r\n' while True : buf . append ( sock . recv ( bufsize ) . decode ( 'utf-8' ) ) data = '' . join ( buf ) i = data . find ( hdr_end ) if i == - 1 : continue return data [ : i ] , data [ i + len ( hdr_end ) : ]
Read HTTP header from socket return header and rest of data .
16,269
def connect ( url ) : url = urlparse ( url ) if url . scheme == 'tcp' : sock = socket ( ) netloc = tuple ( url . netloc . rsplit ( ':' , 1 ) ) hostname = socket . gethostname ( ) elif url . scheme == 'ipc' : sock = socket ( AF_UNIX ) netloc = url . path hostname = 'localhost' else : raise ValueError ( 'unknown socket type: %s' % url . scheme ) sock . connect ( netloc ) return sock , hostname
Connect to UNIX or TCP socket .
16,270
def print_callback ( msg ) : json . dump ( msg , stdout ) stdout . write ( '\n' ) stdout . flush ( )
Print callback prints message to stdout as JSON in one line .
16,271
def prog_callback ( prog , msg ) : pipe = Popen ( prog , stdin = PIPE ) data = json . dumps ( msg ) pipe . stdin . write ( data . encode ( 'utf-8' ) ) pipe . stdin . close ( )
Program callback calls prog with message in stdin
16,272
def git_tag2eups_tag ( git_tag ) : eups_tag = git_tag if re . match ( r'\d' , eups_tag ) : eups_tag = "v{eups_tag}" . format ( eups_tag = eups_tag ) eups_tag = eups_tag . translate ( str . maketrans ( '.-' , '__' ) ) return eups_tag
Convert git tag to an acceptable eups tag format
16,273
def sqrt ( n ) : if isinstance ( n , Rational ) : n = Constructible ( n ) elif not isinstance ( n , Constructible ) : raise ValueError ( 'the square root is not implemented for the type %s' % type ( n ) ) r = n . _try_sqrt ( ) if r is not None : return r return Constructible ( Constructible . lift_rational_field ( 0 , n . field ) , Constructible . lift_rational_field ( 1 , n . field ) , ( n , n . field ) )
return the square root of n in an exact representation
16,274
def _try_sqrt ( self ) : if not self . field : assert self . b == 0 root , remainder = fsqrt ( self . a ) if remainder == 1 : return Constructible ( root ) else : return None if self . _sign ( ) < 0 : raise ValueError ( 'math domain error %s' % self ) nn = self . a * self . a - self . b * self . b * self . r if nn . _sign ( ) < 0 : return None n = nn . _try_sqrt ( ) if n is None : return None a = ( ( self . a + n ) * Fraction ( 1 , 2 ) ) . _try_sqrt ( ) if a is not None : result = Constructible ( a , self . b / a * Fraction ( 1 , 2 ) , self . field ) assert result . field == self . field return result b = ( ( self . a + n ) / self . r * Fraction ( 1 , 2 ) ) . _try_sqrt ( ) if b is not None : result = Constructible ( self . b / b * Fraction ( 1 , 2 ) , b , self . field ) assert result . field == self . field return result return None
try to compute the square root in the field itself .
16,275
def print_packet_count ( ) : for name in archive . list_packet_names ( ) : packet_count = 0 for group in archive . list_packet_histogram ( name ) : for rec in group . records : packet_count += rec . count print ( ' {: <40} {: >20}' . format ( name , packet_count ) )
Print the number of packets grouped by packet name .
16,276
def print_pp_groups ( ) : for group in archive . list_processed_parameter_groups ( ) : frame_count = 0 for pp_group in archive . list_processed_parameter_group_histogram ( group ) : for rec in pp_group . records : frame_count += rec . count print ( ' {: <40} {: >20}' . format ( group , frame_count ) )
Print the number of processed parameter frames by group name .
16,277
def print_event_count ( ) : for source in archive . list_event_sources ( ) : event_count = 0 for group in archive . list_event_histogram ( source ) : for rec in group . records : event_count += rec . count print ( ' {: <40} {: >20}' . format ( source , event_count ) )
Print the number of events grouped by source .
16,278
def print_command_count ( ) : mdb = client . get_mdb ( instance = 'simulator' ) for command in mdb . list_commands ( ) : total = 0 for group in archive . list_command_histogram ( command . qualified_name ) : for rec in group . records : total += rec . count print ( ' {: <40} {: >20}' . format ( command , total ) )
Print the number of commands grouped by name .
16,279
def cmp_dict ( d1 , d2 , ignore_keys = [ ] ) : return { k : v for k , v in d1 . items ( ) if k not in ignore_keys } == { k : v for k , v in d2 . items ( ) if k not in ignore_keys }
Compare dicts ignoring select keys
16,280
def cross_reference_products ( eups_products , manifest_products , ignore_manifest_versions = False , fail_fast = False , ) : products = { } problems = [ ] for name , eups_data in eups_products . items ( ) : try : manifest_data = manifest_products [ name ] except KeyError : yikes = RuntimeError ( textwrap . dedent ( ) . format ( product = name , eups_version = eups_data [ 'eups_version' ] , ) ) if fail_fast : raise yikes from None problems . append ( yikes ) error ( yikes ) if ignore_manifest_versions : manifest_data = manifest_data . copy ( ) manifest_data [ 'eups_version' ] = eups_data [ 'eups_version' ] if eups_data [ 'eups_version' ] != manifest_data [ 'eups_version' ] : yikes = RuntimeError ( textwrap . dedent ( ) . format ( product = name , eups_eups_version = eups_data [ 'eups_version' ] , manifest_eups_version = manifest_data [ 'eups_version' ] , ) ) if fail_fast : raise yikes problems . append ( yikes ) error ( yikes ) products [ name ] = eups_data . copy ( ) products [ name ] . update ( manifest_data ) if problems : error ( "{n} product(s) have error(s)" . format ( n = len ( problems ) ) ) return products , problems
Cross reference EupsTag and Manifest data and return a merged result
16,281
def check_existing_git_tag ( repo , t_tag , ** kwargs ) : assert isinstance ( repo , github . Repository . Repository ) , type ( repo ) assert isinstance ( t_tag , codekit . pygithub . TargetTag ) , type ( t_tag ) debug ( "looking for existing tag: {tag} in repo: {repo}" . format ( repo = repo . full_name , tag = t_tag . name , ) ) e_ref = pygithub . find_tag_by_name ( repo , t_tag . name ) if not e_ref : debug ( " not found: {tag}" . format ( tag = t_tag . name ) ) return False try : e_tag = repo . get_git_tag ( e_ref . object . sha ) except github . RateLimitExceededException : raise except github . GithubException as e : msg = "error getting tag: {tag} [{sha}]" . format ( tag = e_tag . tag , sha = e_tag . sha , ) raise pygithub . CaughtRepositoryError ( repo , e , msg ) from None debug ( " found existing: {tag} [{sha}]" . format ( tag = e_tag . tag , sha = e_tag . sha , ) ) if cmp_existing_git_tag ( t_tag , e_tag , ** kwargs ) : return True yikes = GitTagExistsError ( textwrap . dedent ( ) . format ( tag = t_tag . name , repo = repo . full_name , e_sha = e_tag . object . sha , e_message = e_tag . message , e_tagger = e_tag . tagger , t_sha = t_tag . sha , t_message = t_tag . message , t_tagger = t_tag . tagger , ) ) raise yikes
Check for a pre - existng tag in the github repo .
16,282
def run ( ) : args = parse_args ( ) codetools . setup_logging ( args . debug ) git_tag = args . tag git_email = codetools . lookup_email ( args ) git_user = codetools . lookup_user ( args ) if not args . manifest_only : eups_tag = args . eups_tag if not eups_tag : eups_tag = eups . git_tag2eups_tag ( git_tag ) debug ( "using eups tag: {eups_tag}" . format ( eups_tag = eups_tag ) ) manifest = args . manifest debug ( "using manifest: {manifest}" . format ( manifest = manifest ) ) if not args . manifest_only : message_template = "Version {{git_tag}}" " release from {eups_tag}/{manifest}" . format ( eups_tag = eups_tag , manifest = manifest , ) else : message_template = "Version {{git_tag}}" " release from manifest {manifest}" . format ( manifest = manifest , ) debug ( "using tag message: {msg}" . format ( msg = message_template ) ) tagger = github . InputGitAuthor ( git_user , git_email , codetools . current_timestamp ( ) , ) debug ( "using taggger: {tagger}" . format ( tagger = tagger ) ) global g g = pygithub . login_github ( token_path = args . token_path , token = args . token ) org = g . get_organization ( args . org ) info ( "tagging repos in org: {org}" . format ( org = org . login ) ) problems = [ ] manifest_products = versiondb . Manifest ( manifest , base_url = args . versiondb_base_url ) . products if not args . manifest_only : eups_products = eups . EupsTag ( eups_tag , base_url = args . eupstag_base_url ) . products products , err = cross_reference_products ( eups_products , manifest_products , ignore_manifest_versions = args . ignore_manifest_versions , fail_fast = False , ) problems += err else : products = manifest_products if args . limit : products = dict ( itertools . islice ( products . items ( ) , args . limit ) ) products , err = get_repo_for_products ( org = org , products = products , allow_teams = args . allow_team , ext_teams = args . external_team , deny_teams = args . deny_team , fail_fast = False , ) problems += err products_to_tag , err = check_product_tags ( products , git_tag , tag_message_template = message_template , tagger = tagger , force_tag = args . force_tag , fail_fast = False , ignore_git_message = args . ignore_git_message , ignore_git_tagger = args . ignore_git_tagger , ) problems += err if args . verify : err = identify_products_missing_tags ( products_to_tag ) problems += err if problems : msg = "{n} pre-flight error(s)" . format ( n = len ( problems ) ) raise codetools . DogpileError ( problems , msg ) tag_products ( products_to_tag , fail_fast = args . fail_fast , dry_run = args . dry_run , )
Create the tag
16,283
def can_proceed ( self ) : now = datetime . datetime . now ( ) delta = datetime . timedelta ( days = self . update_interval ) return now >= self . last_update + delta
Checks whether app can proceed
16,284
def parse_lock ( self ) : try : with open ( self . lock_file , "r" ) as reader : data = json . loads ( reader . read ( ) ) self . last_update = datetime . datetime . strptime ( data [ "last_update" ] , AppCronLock . DATETIME_FORMAT ) except : self . write_lock ( last_update = datetime . datetime . fromtimestamp ( 0 ) ) self . parse_lock ( )
Parses app lock file
16,285
def write_lock ( self , last_update = datetime . datetime . now ( ) ) : data = { "last_update" : last_update . strftime ( AppCronLock . DATETIME_FORMAT ) } with open ( self . lock_file , "w" ) as writer : json . dump ( data , writer )
Writes lock file
16,286
def filter_seq ( seq ) : if seq . res : return None n = nt . Factors ( seq . factors ) guide , s , t = aq . canonical_form ( n ) seq . guide = guide cls = aq . get_class ( guide = guide ) num_larges = seq . factors . count ( 'P' ) upper_bound_tau = cls - num_larges - len ( t ) if cls < 2 or upper_bound_tau < 2 : return None v = nt . Factors ( { p : a for p , a in guide . items ( ) if p != 2 and a > 0 } ) if int ( v ) == 1 and cls > 3 : return None if not aq . is_driver ( guide = guide ) : return None return n , guide
Examines unreserved sequences to see if they are prone to mutation . This currently ignores solely - power - of - 2 guides with b > 3
16,287
def get_token ( self , appname , username , password ) : ext_exception = TouchWorksException ( TouchWorksErrorMessages . GET_TOKEN_FAILED_ERROR ) data = { 'Username' : username , 'Password' : password } resp = self . _http_request ( TouchWorksEndPoints . GET_TOKEN , data ) try : logger . debug ( 'token : %s' % resp ) if not resp . text : raise ext_exception try : uuid . UUID ( resp . text , version = 4 ) return SecurityToken ( resp . text ) except ValueError : logger . error ( 'response was not valid uuid string. %s' % resp . text ) raise ext_exception except Exception as ex : logger . exception ( ex ) raise ext_exception
get the security token by connecting to TouchWorks API
16,288
def _http_request ( self , api , data , headers = None ) : if not headers : headers = { 'Content-Type' : 'application/json' } if not self . _token_valid : self . _token = self . get_token ( self . _app_name , self . _username , self . _password ) response = requests . post ( self . _base_url + '/' + api , data = json . dumps ( data ) , headers = headers ) logger . debug ( json . dumps ( data ) ) logger . debug ( response . text ) response . raise_for_status ( ) return response
internal method for handling request and response and raising an exception is http return status code is not success
16,289
def query_item ( self , key , abis ) : try : key = int ( key ) field = 'number' except ValueError : try : key = int ( key , 16 ) field = 'number' except ValueError : field = 'name' arg = and_ ( getattr ( Item , field ) == key , or_ ( Item . abi == abi for abi in abis ) ) return self . session . query ( Item ) . filter ( arg ) . all ( )
Query items based on system call number or name .
16,290
def query_decl ( self , ** kwargs ) : return self . session . query ( Decl ) . filter_by ( ** kwargs ) . all ( )
Query declarations .
16,291
def add_data ( self , filenames ) : def _parse_table ( table ) : def _parse_line ( line ) : return line . split ( '\t' ) lines = ( _parse_line ( one ) for one in table . splitlines ( ) if re . match ( r'^\d' , one ) ) return ( remove_false ( one ) for one in lines ) def _parse_decl ( decl ) : index = len ( 'SYSCALL_DEFINE' ) argc = decl [ index ] rest = decl [ index + 1 : ] [ 1 : - 1 ] . split ( ',' ) name = rest [ 0 ] args = ',' . join ( rest [ 1 : ] ) return name , argc , args def _parse_line ( line ) : index = line . find ( ':' ) if index == - 1 : raise RuntimeError ( 'This is unexpected: %s' , line ) filename = line [ : index ] decl = line [ index + 1 : ] return filename , _parse_decl ( decl ) def _split_into_lines ( grep_output ) : lines = grep_output . replace ( '\n\n' , '\n' ) lines = lines . replace ( '\n\t' , '' ) . replace ( '\t' , ' ' ) return lines . strip ( ) . splitlines ( ) for one in filenames : if one . name . endswith ( '.tbl' ) : for item in _parse_table ( one . read ( ) ) : args = list ( item ) if len ( args ) != 5 : args += [ '' ] * ( 5 - len ( args ) ) self . session . add ( Item ( name = args [ 2 ] , abi = args [ 1 ] , number = args [ 0 ] , entry = args [ 3 ] , compat = args [ 4 ] ) ) else : for line in _split_into_lines ( one . read ( ) ) : filename , rest = ( _parse_line ( line ) ) self . session . add ( Decl ( name = rest [ 0 ] , filename = filename , argc = rest [ 1 ] , args = rest [ 2 ] ) ) self . session . commit ( )
Add data .
16,292
def start ( self , on_exit_callback = None ) : for service in self . services . keys ( ) : self . services [ service ] = self . services [ service ] ( ) self . server . start ( on_exit_callback )
Start the Engel application by initializing all registered services and starting an Autobahn IOLoop .
16,293
def register ( self , event , callback , selector = None ) : self . processor . register ( event , callback , selector )
Resister an event that you want to monitor .
16,294
def unregister ( self , event , callback , selector = None ) : self . processor . unregister ( event , callback , selector )
Unregisters an event that was being monitored .
16,295
def from_api ( cls , api ) : ux = TodoUX ( api ) from . pseudorpc import PseudoRpc rpc = PseudoRpc ( api ) return cls ( { ViaAPI : api , ViaUX : ux , ViaRPC : rpc } )
create an application description for the todo app that based on the api can use either tha api or the ux for interaction
16,296
def consequence_level ( self ) : if self . _proto . HasField ( 'consequenceLevel' ) : return mdb_pb2 . SignificanceInfo . SignificanceLevelType . Name ( self . _proto . consequenceLevel ) return None
One of NONE WATCH WARNING DISTRESS CRITICAL or SEVERE .
16,297
def get_documents_count ( self ) : db_collections = [ self . database [ c ] for c in self . get_collection_names ( ) ] return sum ( [ c . count ( ) for c in db_collections ] )
Counts documents in database
16,298
def get_documents_in_collection ( self , collection_name , with_id = True ) : documents_iterator = self . database [ collection_name ] . find ( ) documents = [ d for d in documents_iterator ] if not with_id : for doc in documents : doc . pop ( "_id" ) return documents
Gets all documents in collection
16,299
def get_documents_in_database ( self , with_id = True ) : documents = [ ] for coll in self . get_collection_names ( ) : documents += self . get_documents_in_collection ( coll , with_id = with_id ) return documents
Gets all documents in database