idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
16,600
def locknode ( node , lock = True ) : oldstatus = cmds . lockNode ( node , q = 1 ) cmds . lockNode ( node , lock = lock ) try : yield finally : if isinstance ( node , basestring ) : if cmds . objExists ( node ) : cmds . lockNode ( node , lock = oldstatus [ 0 ] ) else : for n , l in zip ( node , oldstatus ) : if cmds . objExists ( n ) : cmds . lockNode ( n , lock = l )
Contextmanager that will lock or unlock the given node and afterwards restore the original status
16,601
def get_top_namespace ( node ) : name = node . rsplit ( "|" , 1 ) [ - 1 ] name = name . lstrip ( ":" ) if ":" not in name : return ":" else : return name . partition ( ":" ) [ 0 ]
Return the top namespace of the given node
16,602
def disconnect_node ( node , src = True , dst = True ) : if dst : destconns = cmds . listConnections ( node , connections = True , plugs = True , source = False ) or [ ] for i in range ( 0 , len ( destconns ) , 2 ) : source , dest = destconns [ i ] , destconns [ i + 1 ] cmds . disconnectAttr ( source , dest ) if src : srcconns = cmds . listConnections ( node , connections = True , plugs = True , destination = False ) or [ ] for i in range ( 0 , len ( srcconns ) , 2 ) : source , dest = srcconns [ i + 1 ] , srcconns [ i ] cmds . disconnectAttr ( source , dest )
Disconnect all connections from node
16,603
def fuzzy_match ( self , other ) : magic , fuzzy = False , False try : magic = self . alias == other . magic except AttributeError : pass if '.' in self . alias : major = self . alias . split ( '.' ) [ 0 ] fuzzy = major == other . alias return magic or fuzzy
Given another token see if either the major alias identifier matches the other alias or if magic matches the alias .
16,604
def eval ( self ) : if self . and_or == 'or' : return [ Input ( self . alias , file , self . cwd , 'and' ) for file in self . files ] return ' ' . join ( self . files )
Evaluates the given input and returns a string containing the actual filenames represented . If the input token represents multiple independent files then eval will return a list of all the input files needed otherwise it returns the filenames in a string .
16,605
def files ( self ) : res = None if not res : res = glob . glob ( self . path ) if not res and self . is_glob : res = glob . glob ( self . magic_path ) if not res : res = glob . glob ( self . alias ) if not res : raise ValueError ( 'No files match. %s' % self ) return res
Returns a list of all the files that match the given input token .
16,606
def from_string ( string , _or = '' ) : if _or : and_or = 'or' else : and_or = '' return Input ( string , and_or = and_or )
Parse a given string and turn it into an input token .
16,607
def eval ( self ) : if self . magic : return self . magic if not self . filename : return file_pattern . format ( self . alias , self . ext ) return self . path
Returns a filename to be used for script output .
16,608
def _clean ( self , magic ) : if magic . lower ( ) == 'o' : self . magic = '' elif magic [ : 2 ] . lower ( ) == 'o:' : self . magic = magic [ 2 : ] elif magic [ : 2 ] . lower ( ) == 'o.' : self . ext = magic [ 1 : ]
Given a magic string remove the output tag designator .
16,609
def get_candidate_election ( self , election ) : return CandidateElection . objects . get ( candidate = self , election = election )
Get a CandidateElection .
16,610
def get_election_votes ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . votes . all ( )
Get all votes for this candidate in an election .
16,611
def get_election_electoral_votes ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . electoral_votes . all ( )
Get all electoral votes for this candidate in an election .
16,612
def get_election_delegates ( self , election ) : candidate_election = CandidateElection . objects . get ( candidate = self , election = election ) return candidate_election . delegates . all ( )
Get all pledged delegates for this candidate in an election .
16,613
def load_config ( self , config_file = None ) : if config_file is None : config_file = [ '/etc/ellis.conf' , '/etc/ellis/ellis.conf' , os . path . join ( os . path . dirname ( __file__ ) , 'ellis.conf' ) , ] self . config . read ( config_file , encoding = 'utf-8' ) return self
If config_file is not None tries to load Ellis configuration from the given location . If for some reason the file can t be read Ellis will not start .
16,614
def load_rules ( self ) : for rule_name in self . config . sections ( ) : limit = 1 try : limit = self . config . getint ( rule_name , 'limit' ) except ValueError : warnings . warn ( "Rule '{0}': invalid value for 'limit' option. " "Limit must be an integer > 0. " "Going on with the default value of 1." . format ( rule_name ) ) except configparser . NoOptionError : warnings . warn ( "Rule '{0}': no value specified for 'limit' " "option. Going on with the default value of 1." . format ( rule_name ) ) try : filter_str = self . config . get ( rule_name , 'filter' ) action_str = self . config . get ( rule_name , 'action' ) except configparser . NoOptionError as e : warnings . warn ( "Ignoring '{0}' rule: {1}." . format ( rule_name , e ) ) else : try : rule = Rule ( rule_name , filter_str , limit , action_str ) except ValueError as e : warnings . warn ( "Ignoring '{0}' rule: {1}." . format ( rule_name , e ) ) else : self . rules . append ( rule ) if not self . rules : raise NoRuleError ( ) return self
Loads the Rules from the config file .
16,615
def load_units ( self ) : for rule in self . rules : try : systemd_unit = self . config . get ( rule . name , 'systemd_unit' ) except configparser . NoOptionError : warnings . warn ( "Rule '{0}' doesn't have a `systemd_unit` " "option set.\nThe filters will be checked " "against all journald entries, which will " "probably result in poor performance." . format ( rule . name ) ) self . units . clear ( ) break else : if not systemd_unit . endswith ( ".service" ) : systemd_unit += ".service" self . units . add ( systemd_unit ) return self
Build a set of systemd units that Ellis will watch .
16,616
def find_commons ( lists ) : others = lists [ 1 : ] return [ val for val in lists [ 0 ] if is_in_all ( val , others ) ]
Finds common values
16,617
def send_query ( self , query ) : if self . __switched_on : return self . __solr_server_connector . send_query ( query ) else : msg = 'Not sending query' LOGGER . debug ( msg ) raise esgfpid . exceptions . SolrSwitchedOff ( msg )
This method is called by the tasks . It is redirected to the submodule .
16,618
def strainer ( sequencepath ) : metadata_list = list ( ) assert os . path . isdir ( sequencepath ) , 'Cannot locate sequence path as specified: {}' . format ( sequencepath ) strains = sorted ( glob . glob ( os . path . join ( sequencepath , '*.fa*' ) ) ) assert strains , 'Could not find any files with an extension starting with "fa" in {}. Please check ' 'to ensure that your sequence path is correct' . format ( sequencepath ) for sample in strains : metadata = MetadataObject ( ) filename = os . path . splitext ( os . path . split ( sample ) [ 1 ] ) [ 0 ] metadata . name = filename metadata . general = GenObject ( ) metadata . commands = GenObject ( ) metadata . general . outputdirectory = os . path . join ( sequencepath , filename ) metadata . general . bestassemblyfile = os . path . join ( metadata . general . outputdirectory , '{sn}.fasta' . format ( sn = filename ) ) make_path ( metadata . general . outputdirectory ) relative_symlink ( sample , metadata . general . outputdirectory ) metadata . general . logout = os . path . join ( metadata . general . outputdirectory , 'out' ) metadata . general . logerr = os . path . join ( metadata . general . outputdirectory , 'err' ) metadata_list . append ( metadata ) return strains , metadata_list
Locate all the FASTA files in the supplied sequence path . Create basic metadata objects for each sample
16,619
def render ( self , model , color , num_turtles ) : self . program . bind ( ) glBindVertexArray ( self . vao ) self . model_buffer . load ( model . data , model . byte_size ) self . color_buffer . load ( color . data , color . byte_size ) glDrawArraysInstanced ( GL_TRIANGLES , 0 , len ( self . geometry . edges ) // 7 , num_turtles ) glBindVertexArray ( 0 ) self . program . unbind ( )
Renders all turtles of a given shape
16,620
def renew_connection ( password ) : with Controller . from_port ( port = 9051 ) as controller : controller . authenticate ( password = password ) controller . signal ( Signal . NEWNYM )
Renews TOR session
16,621
def parse_url ( url ) : parsed = url if not url . startswith ( "http://" ) and not url . startswith ( "https://" ) : parsed = "http://" + parsed elif url . startswith ( "https://" ) : parsed = parsed [ 8 : ] parsed = "http://" + parsed index_hash = parsed . rfind ( "#" ) index_slash = parsed . rfind ( "/" ) if index_hash > index_slash : parsed = parsed [ 0 : index_hash ] return parsed
Parses correctly url
16,622
def get_links ( self , recall , timeout ) : for _ in range ( recall ) : try : soup = BeautifulSoup ( self . source ) out_links = [ ] for tag in soup . findAll ( [ "a" , "link" ] , href = True ) : tag [ "href" ] = urljoin ( self . url , tag [ "href" ] ) out_links . append ( tag [ "href" ] ) return sorted ( out_links ) except : time . sleep ( timeout )
Gets links in page
16,623
def open_in_browser ( self , n_times ) : for _ in range ( n_times ) : webbrowser . open ( self . url )
Opens page in browser
16,624
def download_url ( self , local_file ) : downloader = urllib . request . URLopener ( ) downloader . retrieve ( self . url , local_file )
Downloads url to local file
16,625
def download_to_file ( self , local_file , headers = None , cookies = None , chunk_size = 1024 ) : if not headers : headers = HEADERS if not cookies : cookies = { } req = requests . get ( self . url , headers = headers , cookies = cookies , stream = True ) with open ( local_file , "wb" ) as local_download : for chunk in req . iter_content ( chunk_size ) : if chunk : local_download . write ( chunk )
Downloads link to local file
16,626
def getLogicalLines ( fp , allowQP = True , findBegin = False ) : if not allowQP : val = fp . read ( - 1 ) lineNumber = 1 for match in logical_lines_re . finditer ( val ) : line , n = wrap_re . subn ( '' , match . group ( ) ) if line != '' : yield line , lineNumber lineNumber += n else : quotedPrintable = False newbuffer = six . StringIO logicalLine = newbuffer ( ) lineNumber = 0 lineStartNumber = 0 while True : line = fp . readline ( ) if line == '' : break else : line = line . rstrip ( CRLF ) lineNumber += 1 if line . rstrip ( ) == '' : if logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber lineStartNumber = lineNumber logicalLine = newbuffer ( ) quotedPrintable = False continue if quotedPrintable and allowQP : logicalLine . write ( '\n' ) logicalLine . write ( line ) quotedPrintable = False elif line [ 0 ] in SPACEORTAB : logicalLine . write ( line [ 1 : ] ) elif logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber lineStartNumber = lineNumber logicalLine = newbuffer ( ) logicalLine . write ( line ) else : logicalLine = newbuffer ( ) logicalLine . write ( line ) val = logicalLine . getvalue ( ) if val [ - 1 ] == '=' and val . lower ( ) . find ( 'quoted-printable' ) >= 0 : quotedPrintable = True if logicalLine . tell ( ) > 0 : yield logicalLine . getvalue ( ) , lineStartNumber
Iterate through a stream yielding one logical line at a time .
16,627
def newFromBehavior ( name , id = None ) : name = name . upper ( ) behavior = getBehavior ( name , id ) if behavior is None : raise VObjectError ( "No behavior found named %s" % name ) if behavior . isComponent : obj = Component ( name ) else : obj = ContentLine ( name , [ ] , '' ) obj . behavior = behavior obj . isNative = False return obj
Given a name return a behaviored ContentLine or Component .
16,628
def transformFromNative ( self ) : if self . isNative and self . behavior and self . behavior . hasNative : try : return self . behavior . transformFromNative ( self ) except Exception as e : lineNumber = getattr ( self , 'lineNumber' , None ) if isinstance ( e , NativeError ) : if lineNumber is not None : e . lineNumber = lineNumber raise else : msg = "In transformFromNative, unhandled exception on line %s %s: %s" msg = msg % ( lineNumber , sys . exc_info ( ) [ 0 ] , sys . exc_info ( ) [ 1 ] ) raise NativeError ( msg , lineNumber ) else : return self
Return self transformed into a ContentLine or Component if needed .
16,629
def _wrap_callback_parse_time_info ( subscription , on_data , message ) : if message . type == message . REPLY : time_response = web_pb2 . TimeSubscriptionResponse ( ) time_response . ParseFromString ( message . reply . data ) time = parse_isostring ( time_response . timeInfo . currentTimeUTC ) subscription . _process ( time ) if on_data : on_data ( time ) elif message . type == message . DATA : if message . data . type == yamcs_pb2 . TIME_INFO : time_message = getattr ( message . data , 'timeInfo' ) time = parse_isostring ( time_message . currentTimeUTC ) subscription . _process ( time ) if on_data : on_data ( time )
Wraps a user callback to parse TimeInfo from a WebSocket data message
16,630
def _wrap_callback_parse_event ( on_data , message ) : if message . type == message . DATA : if message . data . type == yamcs_pb2 . EVENT : event = Event ( getattr ( message . data , 'event' ) ) on_data ( event )
Wraps a user callback to parse Events from a WebSocket data message
16,631
def _wrap_callback_parse_link_event ( subscription , on_data , message ) : if message . type == message . DATA : if message . data . type == yamcs_pb2 . LINK_EVENT : link_message = getattr ( message . data , 'linkEvent' ) link_event = LinkEvent ( link_message ) subscription . _process ( link_event ) if on_data : on_data ( link_event )
Wraps a user callback to parse LinkEvents from a WebSocket data message
16,632
def get_time ( self , instance ) : url = '/instances/{}' . format ( instance ) response = self . get_proto ( url ) message = yamcsManagement_pb2 . YamcsInstance ( ) message . ParseFromString ( response . content ) if message . HasField ( 'missionTime' ) : return parse_isostring ( message . missionTime ) return None
Return the current mission time for the specified instance .
16,633
def get_server_info ( self ) : response = self . get_proto ( path = '' ) message = rest_pb2 . GetApiOverviewResponse ( ) message . ParseFromString ( response . content ) return ServerInfo ( message )
Return general server info .
16,634
def get_auth_info ( self ) : try : response = self . session . get ( self . auth_root , headers = { 'Accept' : 'application/protobuf' } ) message = web_pb2 . AuthInfo ( ) message . ParseFromString ( response . content ) return AuthInfo ( message ) except requests . exceptions . ConnectionError : raise ConnectionFailure ( 'Connection to {} refused' . format ( self . address ) )
Returns general authentication information . This operation does not require authenticating and is useful to test if a server requires authentication or not .
16,635
def get_user_info ( self ) : response = self . get_proto ( path = '/user' ) message = yamcsManagement_pb2 . UserInfo ( ) message . ParseFromString ( response . content ) return UserInfo ( message )
Get information on the authenticated user .
16,636
def create_instance ( self , name , template , args = None , labels = None ) : req = rest_pb2 . CreateInstanceRequest ( ) req . name = name req . template = template if args : for k in args : req . templateArgs [ k ] = args [ k ] if labels : for k in labels : req . labels [ k ] = labels [ k ] url = '/instances' self . post_proto ( url , data = req . SerializeToString ( ) )
Create a new instance based on an existing template . This method blocks until the instance is fully started .
16,637
def list_instance_templates ( self ) : response = self . get_proto ( path = '/instance-templates' ) message = rest_pb2 . ListInstanceTemplatesResponse ( ) message . ParseFromString ( response . content ) templates = getattr ( message , 'template' ) return iter ( [ InstanceTemplate ( template ) for template in templates ] )
List the available instance templates .
16,638
def list_services ( self , instance ) : url = '/services/{}' . format ( instance ) response = self . get_proto ( path = url ) message = rest_pb2 . ListServiceInfoResponse ( ) message . ParseFromString ( response . content ) services = getattr ( message , 'service' ) return iter ( [ Service ( service ) for service in services ] )
List the services for an instance .
16,639
def stop_service ( self , instance , service ) : req = rest_pb2 . EditServiceRequest ( ) req . state = 'stopped' url = '/services/{}/{}' . format ( instance , service ) self . patch_proto ( url , data = req . SerializeToString ( ) )
Stops a single service .
16,640
def list_processors ( self , instance = None ) : url = '/processors' if instance : url += '/' + instance response = self . get_proto ( path = url ) message = rest_pb2 . ListProcessorsResponse ( ) message . ParseFromString ( response . content ) processors = getattr ( message , 'processor' ) return iter ( [ Processor ( processor ) for processor in processors ] )
Lists the processors .
16,641
def list_clients ( self , instance = None ) : url = '/clients' if instance : url = '/instances/{}/clients' . format ( instance ) response = self . get_proto ( path = url ) message = rest_pb2 . ListClientsResponse ( ) message . ParseFromString ( response . content ) clients = getattr ( message , 'client' ) return iter ( [ Client ( client ) for client in clients ] )
Lists the clients .
16,642
def list_instances ( self ) : response = self . get_proto ( path = '/instances' ) message = rest_pb2 . ListInstancesResponse ( ) message . ParseFromString ( response . content ) instances = getattr ( message , 'instance' ) return iter ( [ Instance ( instance ) for instance in instances ] )
Lists the instances .
16,643
def start_instance ( self , instance ) : params = { 'state' : 'running' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Starts a single instance .
16,644
def stop_instance ( self , instance ) : params = { 'state' : 'stopped' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Stops a single instance .
16,645
def restart_instance ( self , instance ) : params = { 'state' : 'restarted' } url = '/instances/{}' . format ( instance ) self . patch_proto ( url , params = params )
Restarts a single instance .
16,646
def list_data_links ( self , instance ) : response = self . get_proto ( path = '/links/' + instance ) message = rest_pb2 . ListLinkInfoResponse ( ) message . ParseFromString ( response . content ) links = getattr ( message , 'link' ) return iter ( [ Link ( link ) for link in links ] )
Lists the data links visible to this client .
16,647
def send_event ( self , instance , message , event_type = None , time = None , severity = 'info' , source = None , sequence_number = None ) : req = rest_pb2 . CreateEventRequest ( ) req . message = message req . severity = severity if event_type : req . type = event_type if time : req . time = to_isostring ( time ) if source : req . source = source if sequence_number is not None : req . sequence_number = sequence_number url = '/archive/{}/events' . format ( instance ) self . post_proto ( url , data = req . SerializeToString ( ) )
Post a new event .
16,648
def get_data_link ( self , instance , link ) : response = self . get_proto ( '/links/{}/{}' . format ( instance , link ) ) message = yamcsManagement_pb2 . LinkInfo ( ) message . ParseFromString ( response . content ) return Link ( message )
Gets a single data link .
16,649
def enable_data_link ( self , instance , link ) : req = rest_pb2 . EditLinkRequest ( ) req . state = 'enabled' url = '/links/{}/{}' . format ( instance , link ) self . patch_proto ( url , data = req . SerializeToString ( ) )
Enables a data link .
16,650
def create_data_link_subscription ( self , instance , on_data = None , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'links' ) subscription = DataLinkSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_link_event , subscription , on_data ) manager . open ( wrapped_callback , instance ) subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving data link updates of an instance .
16,651
def create_time_subscription ( self , instance , on_data = None , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'time' ) subscription = TimeSubscription ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_time_info , subscription , on_data ) manager . open ( wrapped_callback , instance ) subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving time updates of an instance . Time updates are emitted at 1Hz .
16,652
def create_event_subscription ( self , instance , on_data , timeout = 60 ) : manager = WebSocketSubscriptionManager ( self , resource = 'events' ) subscription = WebSocketSubscriptionFuture ( manager ) wrapped_callback = functools . partial ( _wrap_callback_parse_event , on_data ) manager . open ( wrapped_callback , instance ) subscription . reply ( timeout = timeout ) return subscription
Create a new subscription for receiving events of an instance .
16,653
def remove_group ( self , group ) : if not isinstance ( group , Group ) : raise TypeError ( "group must be Group" ) if group not in self . groups : raise ValueError ( "Group doesn't exist / is not bound to this database." ) num_entries = len ( group . entries ) for i in xrange ( num_entries ) : self . remove_entry ( group . entries [ 0 ] ) num_children = len ( group . children ) for i in xrange ( num_children ) : self . remove_group ( group . children [ 0 ] ) group . parent . children . remove ( group ) self . groups . remove ( group )
Remove the specified group .
16,654
def create_entry ( self , group , ** kwargs ) : if group not in self . groups : raise ValueError ( "Group doesn't exist / is not bound to this database." ) uuid = binascii . hexlify ( get_random_bytes ( 16 ) ) entry = Entry ( uuid = uuid , group_id = group . id , created = util . now ( ) , modified = util . now ( ) , accessed = util . now ( ) , ** kwargs ) self . entries . append ( entry ) group . entries . append ( entry ) return entry
Create a new Entry object . The group which should hold the entry is needed .
16,655
def _bind_model ( self ) : if self . groups [ 0 ] . level != 0 : self . log . info ( "Got invalid first group: {0}" . format ( self . groups [ 0 ] ) ) raise ValueError ( "Invalid group tree: first group must have level of 0 (got {0})" . format ( self . groups [ 0 ] . level ) ) class Stack ( list ) : def push ( self , el ) : self . append ( el ) parent_stack = Stack ( [ self . root ] ) current_parent = self . root prev_group = None for g in self . groups : g . db = self if prev_group is not None : if g . level > prev_group . level : current_parent = prev_group parent_stack . push ( current_parent ) elif g . level < prev_group . level : while g . level <= current_parent . level : current_parent = parent_stack . pop ( ) parent_stack . push ( current_parent ) g . parent = current_parent current_parent . children . append ( g ) prev_group = g for entry in self . entries : for group in self . groups : if entry . group_id == group . id : group . entries . append ( entry ) entry . group = group break else : raise NotImplementedError ( "Orphaned entries not (yet) supported." )
This method binds the various model objects together in the correct hierarchy and adds referneces to this database object in the groups .
16,656
def filepath ( self , value ) : if not self . readonly and self . _filepath != value : if self . _locked : self . log . debug ( "Releasing previously-held lock file: {0}" . format ( self . lockfile ) ) self . release_lock ( ) self . _filepath = value if self . _filepath is not None : self . acquire_lock ( ) else : self . _filepath = value
Property for setting current filepath automatically takes out lock on new file if not readonly db .
16,657
def close ( self ) : super ( LockingDatabase , self ) . close ( ) if not self . readonly : self . release_lock ( )
Closes the database releasing lock .
16,658
def get_dates_file ( path ) : with open ( path ) as f : dates = f . readlines ( ) return [ ( convert_time_string ( date_string . split ( " " ) [ 0 ] ) , float ( date_string . split ( " " ) [ 1 ] ) ) for date_string in dates ]
parse dates file of dates and probability of choosing
16,659
def get_dates_link ( url ) : urllib . request . urlretrieve ( url , "temp.txt" ) dates = get_dates_file ( "temp.txt" ) os . remove ( "temp.txt" ) return dates
download the dates file from the internet and parse it as a dates file
16,660
def parse_filename_meta ( filename ) : common_pattern = "_%s_%s" % ( "(?P<product>[a-zA-Z]{3}[a-zA-Z]?-[a-zA-Z0-9]{2}[a-zA-Z0-9]?-[a-zA-Z0-9]{4}[a-zA-Z0-9]?)" , "(?P<platform>[gG][1-9]{2})" ) patterns = { "l2_pattern" : re . compile ( "%s_s(?P<start>[0-9]{8}T[0-9]{6})Z_e(?P<end>[0-9]{8}T[0-9]{6})Z" % common_pattern ) , "l1b_pattern" : re . compile ( '%s_s(?P<start>[0-9]{14})_e(?P<end>[0-9]{14})' % common_pattern ) , "dayfile_pattern" : re . compile ( "%s_d(?P<start>[0-9]{8})" % common_pattern ) , "monthfile_pattern" : re . compile ( "%s_m(?P<start>[0-9]{6})" % common_pattern ) , "yearfile_pattern" : re . compile ( "%s_y(?P<start>[0-9]{4})" % common_pattern ) , } match , dt_start , dt_end = None , None , None for pat_type , pat in patterns . items ( ) : match = pat . search ( filename ) if match is not None : if pat_type == "l2_pattern" : dt_start = datetime . strptime ( match . group ( "start" ) , '%Y%m%dT%H%M%S' ) dt_end = datetime . strptime ( match . group ( "end" ) , '%Y%m%dT%H%M%S' ) elif pat_type == "l1b_pattern" : dt_start = datetime . strptime ( match . group ( "start" ) , '%Y%j%H%M%S%f' ) dt_end = datetime . strptime ( match . group ( "end" ) , '%Y%j%H%M%S%f' ) elif pat_type == "dayfile_pattern" : dt_start = datetime . strptime ( match . group ( "start" ) , "%Y%m%d" ) dt_end = dt_start + timedelta ( hours = 24 ) elif pat_type == "monthfile_pattern" : dt_start = datetime . strptime ( match . group ( "start" ) , "%Y%m" ) dt_end = datetime ( dt_start . year , dt_start . month + 1 , 1 ) elif pat_type == "yearfile_pattern" : dt_start = datetime . strptime ( match . group ( "start" ) , "%Y" ) dt_end = datetime ( dt_start . year + 1 , 1 , 1 ) break if match is None : if "NCEI" in filename and ".fits" in filename : dt_start = datetime . strptime ( "T" . join ( filename . split ( "_" ) [ 4 : 6 ] ) , "%Y%m%dT%H%M%S" ) dt_end = dt_start angstroms = int ( filename . split ( "_" ) [ 2 ] ) atom = "Fe" if angstroms != 304 else "He" product = "SUVI-L1b-{}{}" . format ( atom , angstroms ) return filename , dt_start , dt_end , "g16" , product else : raise ValueError ( "Timestamps not detected in filename: %s" % filename ) else : return filename , dt_start , dt_end , match . group ( "platform" ) , match . group ( "product" )
taken from suvi code by vhsu Parse the metadata from a product filename either L1b or l2 . - file start - file end - platform - product
16,661
def get_request_mock ( ) : basehandler = BaseHandler ( ) basehandler . load_middleware ( ) request = WSGIRequest ( { 'HTTP_COOKIE' : '' , 'PATH_INFO' : '/' , 'QUERY_STRING' : '' , 'REMOTE_ADDR' : '127.0.0.1' , 'REQUEST_METHOD' : 'GET' , 'SERVER_NAME' : 'page-request-mock' , 'SCRIPT_NAME' : '' , 'SERVER_PORT' : '80' , 'SERVER_PROTOCOL' : 'HTTP/1.1' , 'HTTP_HOST' : 'page-request-host' , 'CONTENT_TYPE' : 'text/html; charset=utf-8' , 'wsgi.version' : ( 1 , 0 ) , 'wsgi.url_scheme' : 'http' , 'wsgi.multiprocess' : True , 'wsgi.multithread' : False , 'wsgi.run_once' : False , 'wsgi.input' : StringIO ( ) } ) for middleware_method in basehandler . _request_middleware : if 'LocaleMiddleware' not in str ( middleware_method . __class__ ) : middleware_method ( request ) return request
Build a request mock up that is used in to render the templates in the most fidel environement as possible .
16,662
def pages_view ( view ) : def pages_view_decorator ( request , * args , ** kwargs ) : if ( kwargs . get ( 'current_page' , False ) or kwargs . get ( 'pages_navigation' , False ) ) : return view ( request , * args , ** kwargs ) path = kwargs . pop ( 'path' , None ) lang = kwargs . pop ( 'lang' , None ) if path : from basic_cms . views import details response = details ( request , path = path , lang = lang , only_context = True , delegation = False ) context = response extra_context_var = kwargs . pop ( 'extra_context_var' , None ) if extra_context_var : kwargs . update ( { extra_context_var : context } ) else : kwargs . update ( context ) return view ( request , * args , ** kwargs ) return pages_view_decorator
Make sure the decorated view gets the essential pages variables .
16,663
def true_neg_rate ( self ) : false_pos = self . matrix [ 1 ] [ 0 ] true_neg = self . matrix [ 1 ] [ 1 ] return divide ( 1.0 * true_neg , true_neg + false_pos )
Calculates true negative rate
16,664
def f1_score ( self ) : m_pre = self . precision ( ) rec = self . recall ( ) return divide ( 2.0 , 1.0 / m_pre + 1.0 / rec )
Calculates F1 score
16,665
def from_columns ( columns ) : data = [ [ column [ i ] for i in range ( len ( column ) ) ] for column in columns ] return Matrix ( data )
Parses raw columns
16,666
def get_version_details ( path ) : with open ( path , "r" ) as reader : lines = reader . readlines ( ) data = { line . split ( " = " ) [ 0 ] . replace ( "__" , "" ) : line . split ( " = " ) [ 1 ] . strip ( ) . replace ( "'" , "" ) for line in lines } return data
Parses version file
16,667
def _get_record ( self , name ) : request = self . _session . get ( self . _baseurl , params = { 'name' : name , 'type' : 'A' } ) if not request . ok : raise RuntimeError ( 'Failed to search record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) records = request . json ( ) if len ( records ) == 0 : return record = records [ 0 ] if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return int ( record [ 'record' ] [ 'id' ] )
Returns the id of a record if it exists .
16,668
def _create_record ( self , name , address , ttl ) : data = json . dumps ( { 'record' : { 'name' : name , 'record_type' : 'A' , 'content' : address , 'ttl' : ttl } } ) headers = { 'Content-Type' : 'application/json' } request = self . _session . post ( self . _baseurl , data = data , headers = headers ) if not request . ok : raise RuntimeError ( 'Failed to create new record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) record = request . json ( ) if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return record [ 'record' ]
Creates a new record .
16,669
def _update_record ( self , record_id , name , address , ttl ) : data = json . dumps ( { 'record' : { 'name' : name , 'content' : address , 'ttl' : ttl } } ) headers = { 'Content-Type' : 'application/json' } request = self . _session . put ( self . _baseurl + '/%d' % record_id , data = data , headers = headers ) if not request . ok : raise RuntimeError ( 'Failed to update record: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) record = request . json ( ) if 'record' not in record or 'id' not in record [ 'record' ] : raise RuntimeError ( 'Invalid record JSON format: %s - %s' % ( self . _format_hostname ( name ) , request . json ( ) ) ) return record [ 'record' ]
Updates an existing record .
16,670
def update_record ( self , name , address , ttl = 60 ) : record_id = self . _get_record ( name ) if record_id is None : return self . _create_record ( name , address , ttl ) return self . _update_record ( record_id , name , address , ttl )
Updates a record creating it if not exists .
16,671
def wheelEvent ( self , ev , axis = None ) : state = None if ev . modifiers ( ) == QtCore . Qt . ControlModifier : state = self . mouseEnabled ( ) self . setMouseEnabled ( not state [ 0 ] , not state [ 1 ] ) if self . _zeroWheel : ev . pos = lambda : self . mapViewToScene ( QtCore . QPoint ( 0 , 0 ) ) super ( SpikeyViewBox , self ) . wheelEvent ( ev , axis ) if state is not None : self . setMouseEnabled ( * state )
Reacts to mouse wheel movement custom behaviour switches zoom axis when ctrl is pressed and sets the locus of zoom if zeroWheel is set .
16,672
def copy ( self ) : m = QtGui . QMenu ( ) for sm in self . subMenus ( ) : if isinstance ( sm , QtGui . QMenu ) : m . addMenu ( sm ) else : m . addAction ( sm ) m . setTitle ( self . title ( ) ) return m
Adds menus to itself required by ViewBox
16,673
def select_k_best ( self , k ) : x_new = SelectKBest ( chi2 , k = k ) . fit_transform ( self . x_train , self . y_train ) return x_new
Selects k best features in dataset
16,674
def main ( self ) : logging . info ( 'Aligning reads with bowtie2 for Qualimap' ) self . bowtie ( ) self . indexing ( ) self . pilon ( ) self . filter ( ) self . clear ( )
Run the methods in the correct order
16,675
def bowtie ( self ) : for i in range ( self . cpus ) : threads = Thread ( target = self . align , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) with progressbar ( self . metadata ) as bar : for sample in bar : sample . mapping = GenObject ( ) sagen = sample . general if sagen . bestassemblyfile != "NA" : sagen . QualimapResults = os . path . join ( sagen . outputdirectory , 'qualimap_results' ) make_path ( sagen . QualimapResults ) sagen . sortedbam = os . path . join ( sagen . QualimapResults , '{}_sorted.bam' . format ( sample . name ) ) filenoext = os . path . splitext ( sagen . filteredfile ) [ 0 ] sagen . filenoext = filenoext sagen . bowtie2results = os . path . join ( sagen . QualimapResults , sample . name ) bowtie2build = Bowtie2BuildCommandLine ( reference = sagen . bestassemblyfile , bt2 = sagen . bowtie2results ) sample . mapping . BamFile = sagen . bowtie2results + "_sorted.bam" samsort = SamtoolsSortCommandline ( input = sample . mapping . BamFile , o = True , out_prefix = "-" ) samtools = [ SamtoolsViewCommandline ( b = True , S = True , input_file = "-" ) , samsort ] indict = { 'D' : 5 , 'R' : 1 , 'num_mismatches' : 0 , 'seed_length' : 22 , 'i_func' : "S,0,2.50" } try : _ = sample . general . mergedreads if len ( sample . general . trimmedcorrectedfastqfiles ) == 2 : indict . update ( { 'm1' : sample . general . trimmedcorrectedfastqfiles [ 0 ] , 'm2' : sample . general . trimmedcorrectedfastqfiles [ 1 ] } ) else : indict . update ( { 'U' : sample . general . trimmedcorrectedfastqfiles [ 0 ] } ) except AttributeError : if len ( sample . general . assemblyfastq ) == 2 : indict . update ( { 'm1' : sample . general . assemblyfastq [ 0 ] , 'm2' : sample . general . assemblyfastq [ 1 ] } ) else : indict . update ( { 'U' : sample . general . assemblyfastq [ 0 ] } ) bowtie2align = Bowtie2CommandLine ( bt2 = sagen . bowtie2results , threads = self . threads , samtools = samtools , ** indict ) sample . commands . bowtie2align = str ( bowtie2align ) sample . commands . bowtie2build = str ( bowtie2build ) self . bowqueue . put ( ( sample , sample . commands . bowtie2build , sample . commands . bowtie2align ) ) else : sample . commands . samtools = "NA" sample . mapping . MeanInsertSize = 'NA' sample . mapping . MeanCoveragedata = 'NA' self . bowqueue . join ( )
Create threads and commands for performing reference mapping for qualimap analyses
16,676
def pilon ( self ) : logging . info ( 'Improving quality of assembly with pilon' ) for i in range ( self . cpus ) : threads = Thread ( target = self . pilonthreads , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) with progressbar ( self . metadata ) as bar : for sample in bar : if sample . general . bestassemblyfile != 'NA' : if sample . general . polish : sample . general . contigsfile = sample . general . assemblyfile sample . mapping . pilondir = os . path . join ( sample . general . QualimapResults , 'pilon' ) make_path ( sample . mapping . pilondir ) sample . mapping . piloncmd = 'pilon --genome {} --bam {} --fix bases --threads {} ' '--outdir {} --changes --mindepth 0.25' . format ( sample . general . contigsfile , sample . mapping . BamFile , self . threads , sample . mapping . pilondir ) self . pilonqueue . put ( sample ) else : sample . general . contigsfile = sample . general . assemblyfile self . pilonqueue . join ( )
Run pilon to fix any misassemblies in the contigs - will look for SNPs and indels
16,677
def filter ( self ) : logging . info ( 'Filtering contigs' ) for i in range ( self . cpus ) : threads = Thread ( target = self . filterthreads , args = ( ) ) threads . setDaemon ( True ) threads . start ( ) with progressbar ( self . metadata ) as bar : for sample in bar : if sample . general . bestassemblyfile != 'NA' : sample . general . contigsfile = sample . general . assemblyfile self . filterqueue . put ( sample ) self . filterqueue . join ( )
Filter contigs based on depth
16,678
def clear ( self ) : for sample in self . metadata : try : delattr ( sample . depth , 'bases' ) delattr ( sample . depth , 'coverage' ) delattr ( sample . depth , 'length' ) delattr ( sample . depth , 'stddev' ) except AttributeError : pass
Clear out large attributes from the metadata objects
16,679
def pages_to_json ( queryset ) : queryset = queryset . order_by ( 'tree_id' , 'lft' ) return simplejson . dumps ( { JSON_PAGE_EXPORT_NAME : JSON_PAGE_EXPORT_VERSION , 'pages' : [ page . dump_json_data ( ) for page in queryset ] } , indent = JSON_PAGE_EXPORT_INDENT , sort_keys = True )
Return a JSON string export of the pages in queryset .
16,680
def validate_pages_json_data ( d , preferred_lang ) : from . models import Page errors = [ ] seen_complete_slugs = dict ( ( lang [ 0 ] , set ( ) ) for lang in settings . PAGE_LANGUAGES ) valid_templates = set ( t [ 0 ] for t in settings . get_page_templates ( ) ) valid_templates . add ( settings . PAGE_DEFAULT_TEMPLATE ) if d [ JSON_PAGE_EXPORT_NAME ] != JSON_PAGE_EXPORT_VERSION : return [ _ ( 'Unsupported file version: %s' ) % repr ( d [ JSON_PAGE_EXPORT_NAME ] ) ] , [ ] pages = d [ 'pages' ] for p in pages : slug = p [ 'complete_slug' ] . get ( preferred_lang , None ) seen_parent = False for lang , s in p [ 'complete_slug' ] . items ( ) : if lang not in seen_complete_slugs : continue seen_complete_slugs [ lang ] . add ( s ) if '/' not in s : seen_parent = True if not seen_parent : parent_slug , ignore = s . rsplit ( '/' , 1 ) if parent_slug in seen_complete_slugs [ lang ] : seen_parent = True else : parent = Page . objects . from_path ( parent_slug , lang , exclude_drafts = False ) if parent and parent . get_complete_slug ( lang ) == parent_slug : seen_parent = True if not slug : slug = s if not slug : errors . append ( _ ( "%s has no common language with this site" ) % ( p [ 'complete_slug' ] . values ( ) [ 0 ] , ) ) continue if not seen_parent : errors . append ( _ ( "%s did not include its parent page and a matching" " one was not found on this site" ) % ( slug , ) ) if p [ 'template' ] not in valid_templates : errors . append ( _ ( "%s uses a template not found on this site: %s" ) % ( slug , p [ 'template' ] ) ) continue import_fields = set ( p [ 'content' ] . keys ( ) ) import_fields |= set ( ( 'meta_title' , 'meta_description' , 'meta_keywords' , 'meta_author' , 'fb_page_type' , 'fb_image' ) ) template_fields = set ( p . name for p in get_placeholders ( p [ 'template' ] ) if p . name not in ( 'title' , 'slug' ) ) template_fields |= set ( ( 'meta_title' , 'meta_description' , 'meta_keywords' , 'meta_author' , 'fb_page_type' , 'fb_image' ) ) if template_fields != import_fields : errors . append ( _ ( "%s template contents are different than our " "template: %s" ) % ( slug , p [ 'template' ] ) ) continue return errors
Check if an import of d will succeed and return errors .
16,681
def import_po_files ( path = 'poexport' , stdout = None ) : import polib from basic_cms . models import Page , Content source_language = settings . PAGE_DEFAULT_LANGUAGE source_list = [ ] pages_to_invalidate = [ ] for page in Page . objects . published ( ) : source_list . extend ( page . content_by_language ( source_language ) ) if stdout is None : import sys stdout = sys . stdout if not path . endswith ( '/' ) : path += '/' for lang in settings . PAGE_LANGUAGES : if lang [ 0 ] != settings . PAGE_DEFAULT_LANGUAGE : stdout . write ( "Update language %s.\n" % lang [ 0 ] ) po_path = path + lang [ 0 ] + '.po' po = polib . pofile ( po_path ) for entry in po : meta_data = entry . tcomment . split ( do_not_msg ) [ 1 ] . split ( "\n" ) placeholder_name = meta_data [ 1 ] . split ( '=' ) [ 1 ] page_id = int ( meta_data [ 2 ] . split ( '=' ) [ 1 ] ) page = Page . objects . get ( id = page_id ) current_content = Content . objects . get_content ( page , lang [ 0 ] , placeholder_name ) if current_content != entry . msgstr : stdout . write ( "Update page %d placeholder %s.\n" % ( page_id , placeholder_name ) ) Content . objects . create_content_if_changed ( page , lang [ 0 ] , placeholder_name , entry . msgstr ) if page not in pages_to_invalidate : pages_to_invalidate . append ( page ) for page in pages_to_invalidate : page . invalidate ( ) stdout . write ( "Import finished from %s.\n" % path )
Import all the content updates from the po files into the pages .
16,682
def setCalibration ( self , db_boost_array , frequencies , frange ) : self . calibrationVector = db_boost_array self . calibrationFrequencies = frequencies self . calibrationFrange = frange for test in self . _tests : test . setCalibration ( db_boost_array , frequencies , frange )
Sets calibration for all tests
16,683
def insert ( self , stim , position ) : if position == - 1 : position = self . rowCount ( ) stim . setReferenceVoltage ( self . caldb , self . calv ) stim . setCalibration ( self . calibrationVector , self . calibrationFrequencies , self . calibrationFrange ) self . _tests . insert ( position , stim )
Inserts a new stimulus into the list at the given position
16,684
def verify ( self , windowSize = None ) : if self . rowCount ( ) == 0 : return "Protocol must have at least one test" if self . caldb is None or self . calv is None : return "Protocol reference voltage not set" for test in self . _tests : msg = test . verify ( windowSize ) if msg : return msg return 0
Verify that this protocol model is valid . Return 0 if sucessful a failure message otherwise
16,685
def open_any ( filename ) : if filename . endswith ( ".gz" ) : return gzip . open if filename . endswith ( ".bz2" ) : return bz2 . BZ2File return open
Helper to open also compressed files
16,686
def _get_group_no ( self , tag_name ) : if tag_name in self . full : return self . groups . index ( self . full [ tag_name ] [ "parent" ] ) else : return len ( self . groups )
Takes tag name and returns the number of the group to which tag belongs
16,687
async def add ( self , setname , ip , timeout = 0 ) : args = [ 'add' , '-exist' , setname , ip , 'timeout' , timeout ] return await self . start ( __class__ . CMD , * args )
Adds the given IP address to the given ipset . If a timeout is given the IP will stay in the ipset for the given duration . Else it s added forever .
16,688
async def list ( self , setname = None ) : args = [ 'list' ] if setname is not None : args . append ( setname ) return await self . start ( __class__ . CMD , * args )
Lists the existing ipsets .
16,689
def setup ( self , interval ) : self . trace_counter = 0 self . _halt = False self . interval = interval
Prepares the tests for execution interval in ms
16,690
def run ( self ) : self . _initialize_run ( ) stimuli = self . protocol_model . allTests ( ) self . acq_thread = threading . Thread ( target = self . _worker , args = ( stimuli , ) , ) if self . save_data : info = { 'calibration_used' : self . calname , 'calibration_range' : self . cal_frange } self . datafile . set_metadata ( self . current_dataset_name , info ) self . start_time = time . time ( ) self . last_tick = self . start_time - ( self . interval / 1000 ) self . acq_thread . start ( ) return self . acq_thread
Runs the acquisition
16,691
def train ( self , x_data , y_data ) : x_train , _ , y_train , _ = train_test_split ( x_data , y_data , test_size = 0.67 , random_state = None ) self . model . fit ( x_train , y_train )
Trains model on inputs
16,692
def get_max_similar ( string , lst ) : max_similarity , index = 0.0 , - 1 for i , candidate in enumerate ( lst ) : sim = how_similar_are ( str ( string ) , str ( candidate ) ) if sim > max_similarity : max_similarity , index = sim , i return max_similarity , index
Finds most similar string in list
16,693
def get_average_length_of_string ( strings ) : if not strings : return 0 return sum ( len ( word ) for word in strings ) / len ( strings )
Computes average length of words
16,694
def true_false_returns ( func ) : @ functools . wraps ( func ) def _execute ( * args , ** kwargs ) : try : func ( * args , ** kwargs ) return True except : return False return _execute
Executes function if error returns False else True
16,695
def none_returns ( func ) : @ functools . wraps ( func ) def _execute ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except : return None return _execute
Executes function if error returns None else value of function
16,696
def select_dag_nodes ( reftrack ) : refobj = reftrack . get_refobj ( ) if not refobj : return parentns = common . get_namespace ( refobj ) ns = cmds . getAttr ( "%s.namespace" % refobj ) fullns = ":" . join ( ( parentns . rstrip ( ":" ) , ns . lstrip ( ":" ) ) ) c = cmds . namespaceInfo ( fullns , listOnlyDependencyNodes = True , dagPath = True , recurse = True ) dag = cmds . ls ( c , dag = True , ap = True ) cmds . select ( dag , replace = True )
Select all dag nodes of the given reftrack
16,697
def get_scenenode ( self , nodes ) : scenenodes = cmds . ls ( nodes , type = 'jb_sceneNode' ) assert scenenodes , "Found no scene nodes!" return sorted ( scenenodes ) [ 0 ]
Get the scenenode in the given nodes
16,698
def reference ( self , refobj , taskfileinfo ) : with common . preserve_namespace ( ":" ) : jbfile = JB_File ( taskfileinfo ) filepath = jbfile . get_fullpath ( ) ns_suggestion = reftrack . get_namespace ( taskfileinfo ) newnodes = cmds . file ( filepath , reference = True , namespace = ns_suggestion , returnNewNodes = True ) for refnode in cmds . ls ( newnodes , type = 'reference' ) : if not cmds . referenceQuery ( refnode , isNodeReferenced = True ) : node = refnode break ns = cmds . referenceQuery ( node , namespace = True ) content = cmds . namespaceInfo ( ns , listOnlyDependencyNodes = True , dagPath = True ) scenenode = self . get_scenenode ( content ) self . get_refobjinter ( ) . connect_reftrack_scenenode ( refobj , scenenode ) reccontent = cmds . namespaceInfo ( ns , listOnlyDependencyNodes = True , dagPath = True , recurse = True ) dagcontent = cmds . ls ( reccontent , ap = True , assemblies = True ) if not dagcontent : return node grpname = reftrack . get_groupname ( taskfileinfo ) reftrack . group_content ( dagcontent , ns , grpname , "jb_asset" ) return node
Reference the given taskfileinfo into the scene and return the created reference node
16,699
def replace ( self , refobj , reference , taskfileinfo ) : jbfile = JB_File ( taskfileinfo ) filepath = jbfile . get_fullpath ( ) cmds . file ( filepath , loadReference = reference ) ns = cmds . referenceQuery ( reference , namespace = True ) content = cmds . namespaceInfo ( ns , listOnlyDependencyNodes = True , dagPath = True ) scenenode = self . get_scenenode ( content ) self . get_refobjinter ( ) . connect_reftrack_scenenode ( refobj , scenenode )
Replace the given reference with the given taskfileinfo