idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
57,200 | def get_table_info ( self , tablename ) : conn = self . __get_conn ( ) ret = a99 . get_table_info ( conn , tablename ) if len ( ret ) == 0 : raise RuntimeError ( "Cannot get info for table '{}'" . format ( tablename ) ) more = self . gui_info . get ( tablename ) for row in ret . values ( ) : caption , tooltip = None , None if more : info = more . get ( row [ "name" ] ) if info : caption , tooltip = info row [ "caption" ] = caption row [ "tooltip" ] = tooltip return ret | Returns information about fields of a specific table |
57,201 | def __get_conn ( self , flag_force_new = False , filename = None ) : flag_open_new = flag_force_new or not self . _conn_is_open ( ) if flag_open_new : if filename is None : filename = self . filename conn = self . _get_conn ( filename ) self . _conn = conn else : conn = self . _conn return conn | Returns connection to database . Tries to return existing connection unless flag_force_new |
57,202 | def flatten_multidict ( multidict ) : return dict ( [ ( key , value if len ( value ) > 1 else value [ 0 ] ) for ( key , value ) in multidict . iterlists ( ) ] ) | Return flattened dictionary from MultiDict . |
57,203 | def __setitem ( self , chunk , key , keys , value , extend = False ) : def setitem ( chunk ) : if keys : return self . __setitem ( chunk , keys [ 0 ] , keys [ 1 : ] , value , extend ) else : return value if key in [ '.' , ']' ] : chunk [ key ] = value elif ']' in key : key = int ( key [ : - 1 ] . replace ( 'n' , '-1' ) ) if extend : if chunk is None : chunk = [ None , ] else : if not isinstance ( chunk , list ) : chunk = [ chunk , ] if key != - 1 : chunk . insert ( key , None ) else : chunk . append ( None ) else : if chunk is None : chunk = [ None , ] chunk [ key ] = setitem ( chunk [ key ] ) else : if extend : if chunk is None : chunk = { } chunk [ key ] = None chunk [ key ] = setitem ( chunk [ key ] ) elif key not in chunk : chunk [ key ] = None chunk [ key ] = setitem ( chunk [ key ] ) else : if keys : chunk [ key ] = setitem ( chunk [ key ] ) else : if not isinstance ( chunk [ key ] , list ) : chunk [ key ] = [ chunk [ key ] , ] chunk [ key ] . append ( None ) chunk [ key ] [ - 1 ] = setitem ( chunk [ key ] [ - 1 ] ) else : if chunk is None : chunk = { } if key not in chunk : chunk [ key ] = None chunk [ key ] = setitem ( chunk [ key ] ) return chunk | Helper function to fill up the dictionary . |
57,204 | def set ( self , key , value , extend = False , ** kwargs ) : self . __setitem__ ( key , value , extend , ** kwargs ) | Extended standard set function . |
57,205 | def create_default_database ( reset : bool = False ) -> GraphDatabaseInterface : import sqlalchemy from sqlalchemy . ext . declarative import declarative_base from sqlalchemy . orm import sessionmaker from sqlalchemy . pool import StaticPool Base = declarative_base ( ) engine = sqlalchemy . create_engine ( "sqlite:///SpotifyArtistGraph.db" , poolclass = StaticPool ) Session = sessionmaker ( bind = engine ) dbi : GraphDatabaseInterface = create_graph_database_interface ( sqlalchemy , Session ( ) , Base , sqlalchemy . orm . relationship ) if reset : Base . metadata . drop_all ( engine ) Base . metadata . create_all ( engine ) return dbi | Creates and returns a default SQLAlchemy database interface to use . |
57,206 | def _create_node ( self , index : int , name : str , external_id : Optional [ str ] = None ) -> SpotifyArtistNode : if external_id is None : graph : SpotifyArtistGraph = self . _graph items : List [ NameExternalIDPair ] = graph . client . search_artists_by_name ( name ) for item in items : if item . name == name : external_id = item . external_id break return SpotifyArtistNode ( graph = self . _graph , index = index , name = name , external_id = external_id ) | Returns a new SpotifyArtistNode instance with the given index and name . |
57,207 | def access_token ( self ) -> str : if self . _token_expires_at < time . time ( ) + self . _REFRESH_THRESHOLD : self . request_token ( ) return self . _token [ "access_token" ] | The access token stored within the requested token . |
57,208 | def request_token ( self ) -> None : response : requests . Response = requests . post ( self . _TOKEN_URL , auth = HTTPBasicAuth ( self . _client_id , self . _client_key ) , data = { "grant_type" : self . _GRANT_TYPE } , verify = True ) response . raise_for_status ( ) self . _token = response . json ( ) self . _token_expires_at = time . time ( ) + self . _token [ "expires_in" ] | Requests a new Client Credentials Flow authentication token from the Spotify API and stores it in the token property of the object . |
57,209 | def search_artists_by_name ( self , artist_name : str , limit : int = 5 ) -> List [ NameExternalIDPair ] : response : requests . Response = requests . get ( self . _API_URL_TEMPLATE . format ( "search" ) , params = { "q" : artist_name , "type" : "artist" , "limit" : limit } , headers = { "Authorization" : "Bearer {}" . format ( self . _token . access_token ) } ) response . raise_for_status ( ) if not response . text : return [ ] result : List [ NameExternalIDPair ] = [ ] data : List [ Dict ] = response . json ( ) [ "artists" ] [ "items" ] for artist in data : artist = NameExternalIDPair ( artist [ "name" ] . strip ( ) , artist [ "id" ] . strip ( ) ) if not artist . name or not artist . external_id : raise SpotifyClientError ( "Name or ID is missing" ) result . append ( artist ) return result | Returns zero or more artist name - external ID pairs that match the specified artist name . |
57,210 | def colors ( lang = "en" ) : cache_name = "colors.%s.json" % lang data = get_cached ( "colors.json" , cache_name , params = dict ( lang = lang ) ) return data [ "colors" ] | This resource returns all dyes in the game including localized names and their color component information . |
57,211 | def event_names ( lang = "en" ) : cache_name = "event_names.%s.json" % lang data = get_cached ( "event_names.json" , cache_name , params = dict ( lang = lang ) ) return dict ( [ ( event [ "id" ] , event [ "name" ] ) for event in data ] ) | This resource returns an unordered list of the localized event names for the specified language . |
57,212 | def event_details ( event_id = None , lang = "en" ) : if event_id : cache_name = "event_details.%s.%s.json" % ( event_id , lang ) params = { "event_id" : event_id , "lang" : lang } else : cache_name = "event_details.%s.json" % lang params = { "lang" : lang } data = get_cached ( "event_details.json" , cache_name , params = params ) events = data [ "events" ] return events . get ( event_id ) if event_id else events | This resource returns static details about available events . |
57,213 | def PhenomModel ( self , r ) : if r <= 0 : raise ValueError field = self . B0 + self . B1 * G4 . m / r + self . B2 * math . exp ( - 1 * self . H * r / G4 . m ) return field | Fit to field map |
57,214 | def set_dir ( self , dir_ ) : self . __lock_set_dir ( dir_ ) self . __lock_auto_load ( ) self . __lock_update_table ( ) self . __update_info ( ) self . __update_window_title ( ) | Sets directory auto - loads updates all GUI contents . |
57,215 | def __update_info ( self ) : from f311 import explorer as ex import f311 t = self . tableWidget z = self . listWidgetVis z . clear ( ) classes = self . __vis_classes = [ ] propss = self . __lock_get_current_propss ( ) npp = len ( propss ) s0 , s1 = "" , "" if npp == 1 : p = propss [ 0 ] if p . flag_scanned : if isinstance ( p . f , f311 . DataFile ) : classes . extend ( f311 . get_suitable_vis_classes ( p . f ) ) if ex . VisPrint in classes : classes . remove ( ex . VisPrint ) if p . flag_text : classes . append ( "txt" ) for x in classes : if x == "txt" : text = "View plain text" else : text = x . action text += " (" + x . __name__ + ")" item = QListWidgetItem ( text ) z . addItem ( item ) s0 = p . get_summary ( ) s1 = p . get_info ( ) elif npp >= 2 : s0 = "{0:d} selected" . format ( npp ) ff = [ p . f for p in propss ] flag_spectra = all ( [ isinstance ( f , f311 . FileSpectrum ) for f in ff ] ) has_pyfant = False try : import pyfant has_pyfant = True except : pass flag_mod = False if has_pyfant : flag_mod = all ( [ isinstance ( f , pyfant . FileModBin ) and len ( f . records ) > 1 for f in ff ] ) if flag_spectra : z . addItem ( QListWidgetItem ( "Plot spectra stacked" ) ) classes . append ( "sta" ) z . addItem ( QListWidgetItem ( "Plot spectra overlapped" ) ) classes . append ( "ovl" ) elif flag_mod : z . addItem ( QListWidgetItem ( "View model grid" ) ) classes . append ( "modgrid" ) self . labelSummary . setText ( s0 ) self . textEditInfo . setPlainText ( s1 ) | Updates visualization options and file info areas . |
57,216 | def validate ( self , value ) : if value in self . empty_values and self . required : raise ValidationError ( self . error_messages [ 'required' ] ) | This was overridden to have our own empty_values . |
57,217 | def clean ( self , value ) : obj = self . factory . create ( value ) if obj : del obj . fields del obj . alias del obj . validators del obj . required del obj . factory self . _validate_existence ( obj ) self . _run_validators ( obj ) return obj | Clean the data and validate the nested spec . |
57,218 | def serialize ( self , value , entity , request ) : self . _validate_existence ( value ) self . _run_validators ( value ) if not value : return value return self . factory . serialize ( value , request ) | Propagate to nested fields . |
57,219 | def _run_validators ( self , value ) : errors = [ ] for v in self . validators : try : v ( value ) except ValidationError , e : errors . extend ( e . messages ) if errors : raise ValidationError ( errors ) | Execute all associated validators . |
57,220 | def get_all_active ( self ) : now = timezone . now ( ) return self . select_related ( ) . filter ( active_datetime__lte = now , inactive_datetime__gte = now ) . order_by ( 'active_datetime' ) | Get all of the active messages ordered by the active_datetime . |
57,221 | def render_word ( self , word , size , color ) : pygame . font . init ( ) font = pygame . font . Font ( None , size ) self . rendered_word = font . render ( word , 0 , color ) self . word_size = font . size ( word ) | Creates a surface that contains a word . |
57,222 | def plot_word ( self , position ) : posrectangle = pygame . Rect ( position , self . word_size ) self . used_pos . append ( posrectangle ) self . cloud . blit ( self . rendered_word , position ) | Blits a rendered word on to the main display surface |
57,223 | def collides ( self , position , size ) : word_rect = pygame . Rect ( position , self . word_size ) if word_rect . collidelistall ( self . used_pos ) == [ ] : return False else : return True | Returns True if the word collides with another plotted word . |
57,224 | def expand ( self , delta_width , delta_height ) : temp_surface = pygame . Surface ( ( self . width + delta_width , self . height + delta_height ) ) ( self . width , self . height ) = ( self . width + delta_width , self . height + delta_height ) temp_surface . blit ( self . cloud , ( 0 , 0 ) ) self . cloud = temp_surface | Makes the cloud surface bigger . Maintains all word positions . |
57,225 | def smart_cloud ( self , input , max_text_size = 72 , min_text_size = 12 , exclude_words = True ) : self . exclude_words = exclude_words if isdir ( input ) : self . directory_cloud ( input , max_text_size , min_text_size ) elif isfile ( input ) : text = read_file ( input ) self . text_cloud ( text , max_text_size , min_text_size ) elif isinstance ( input , basestring ) : self . text_cloud ( input , max_text_size , min_text_size ) else : print 'Input type not supported.' print 'Supported types: String, Directory, .txt file' | Creates a word cloud using the input . Input can be a file directory or text . Set exclude_words to true if you want to eliminate words that only occur once . |
57,226 | def directory_cloud ( self , directory , max_text_size = 72 , min_text_size = 12 , expand_width = 50 , expand_height = 50 , max_count = 100000 ) : worddict = assign_fonts ( tuplecount ( read_dir ( directory ) ) , max_text_size , min_text_size , self . exclude_words ) sorted_worddict = list ( reversed ( sorted ( worddict . keys ( ) , key = lambda x : worddict [ x ] ) ) ) colordict = assign_colors ( dir_freq ( directory ) ) num_words = 0 for word in sorted_worddict : self . render_word ( word , worddict [ word ] , colordict [ word ] ) if self . width < self . word_size [ 0 ] : self . expand ( self . word_size [ 0 ] - self . width , 0 ) elif self . height < self . word_size [ 1 ] : self . expand ( 0 , self . word_size [ 1 ] - self . height ) position = [ randint ( 0 , self . width - self . word_size [ 0 ] ) , randint ( 0 , self . height - self . word_size [ 1 ] ) ] loopcount = 0 while self . collides ( position , self . word_size ) : if loopcount > max_count : self . expand ( expand_width , expand_height ) loopcount = 0 position = [ randint ( 0 , self . width - self . word_size [ 0 ] ) , randint ( 0 , self . height - self . word_size [ 1 ] ) ] loopcount += 1 self . plot_word ( position ) num_words += 1 | Creates a word cloud using files from a directory . The color of the words correspond to the amount of documents the word occurs in . |
57,227 | def text_cloud ( self , text , max_text_size = 72 , min_text_size = 12 , expand_width = 50 , expand_height = 50 , max_count = 100000 ) : worddict = assign_fonts ( tuplecount ( text ) , max_text_size , min_text_size , self . exclude_words ) sorted_worddict = list ( reversed ( sorted ( worddict . keys ( ) , key = lambda x : worddict [ x ] ) ) ) for word in sorted_worddict : self . render_word ( word , worddict [ word ] , ( randint ( 0 , 255 ) , randint ( 0 , 255 ) , randint ( 0 , 255 ) ) ) if self . width < self . word_size [ 0 ] : self . expand ( self . word_size [ 0 ] - self . width , 0 ) elif self . height < self . word_size [ 1 ] : self . expand ( 0 , self . word_size [ 1 ] - self . height ) position = [ randint ( 0 , self . width - self . word_size [ 0 ] ) , randint ( 0 , self . height - self . word_size [ 1 ] ) ] loopcount = 0 while self . collides ( position , self . word_size ) : if loopcount > max_count : self . expand ( expand_width , expand_height ) loopcount = 0 position = [ randint ( 0 , self . width - self . word_size [ 0 ] ) , randint ( 0 , self . height - self . word_size [ 1 ] ) ] loopcount += 1 self . plot_word ( position ) | Creates a word cloud using plain text . |
57,228 | def display ( self ) : pygame . init ( ) self . display = pygame . display . set_mode ( ( self . width , self . height ) ) self . display . blit ( self . cloud , ( 0 , 0 ) ) pygame . display . update ( ) while True : for event in pygame . event . get ( ) : if event . type == pygame . QUIT : pygame . quit ( ) return | Displays the word cloud to the screen . |
57,229 | def fermi_dist ( energy , beta ) : exponent = np . asarray ( beta * energy ) . clip ( - 600 , 600 ) return 1. / ( np . exp ( exponent ) + 1 ) | Fermi Dirac distribution |
57,230 | def diagonalize ( operator ) : eig_values , eig_vecs = LA . eigh ( operator ) emin = np . amin ( eig_values ) eig_values -= emin return eig_values , eig_vecs | diagonalizes single site Spin Hamiltonian |
57,231 | def gf_lehmann ( eig_e , eig_states , d_dag , beta , omega , d = None ) : ew = np . exp ( - beta * eig_e ) zet = ew . sum ( ) G = np . zeros_like ( omega ) basis_create = np . dot ( eig_states . T , d_dag . dot ( eig_states ) ) if d is None : tmat = np . square ( basis_create ) else : tmat = np . dot ( eig_states . T , d . T . dot ( eig_states ) ) * basis_create tmat *= np . add . outer ( ew , ew ) gap = np . add . outer ( - eig_e , eig_e ) N = eig_e . size for i , j in product ( range ( N ) , range ( N ) ) : G += tmat [ i , j ] / ( omega + gap [ i , j ] ) return G / zet | Outputs the lehmann representation of the greens function omega has to be given as matsubara or real frequencies |
57,232 | def expected_value ( operator , eig_values , eig_states , beta ) : aux = np . einsum ( 'i,ji,ji' , np . exp ( - beta * eig_values ) , eig_states , operator . dot ( eig_states ) ) return aux / partition_func ( beta , eig_values ) | Calculates the average value of an observable it requires that states and operators have the same base |
57,233 | def get_plain_text ( self ) : _msg = self . message if self . message is not None else [ "" ] msg = _msg if isinstance ( _msg , list ) else [ _msg ] line = "" if not self . line else ", line {}" . format ( self . line ) ret = [ "{} found in file '{}'{}::" . format ( self . type . capitalize ( ) , self . filename , line ) , " <<" ] + [ " " + x for x in msg ] + [ " >>" ] return ret | Returns a list |
57,234 | def get_plain_text ( self ) : ret = [ ] for occ in self . occurrences : ret . extend ( occ . get_plain_text ( ) ) return ret | Returns a list of strings |
57,235 | def crunch_dir ( name , n = 50 ) : if len ( name ) > n + 3 : name = "..." + name [ - n : ] return name | Puts ... in the middle of a directory name if lengh > n . |
57,236 | def _add_log_tab ( self ) : text_tab = "Log" self . pages . append ( MyPage ( text_tab = text_tab ) ) te = self . textEdit_log = self . keep_ref ( QTextEdit ( ) ) te . setReadOnly ( True ) self . tabWidget . addTab ( te , text_tab ) | Adds element to pages and new tab |
57,237 | def keyPressEvent ( self , evt ) : incr = 0 if evt . modifiers ( ) == Qt . ControlModifier : n = self . tabWidget . count ( ) if evt . key ( ) in [ Qt . Key_PageUp , Qt . Key_Backtab ] : incr = - 1 elif evt . key ( ) in [ Qt . Key_PageDown , Qt . Key_Tab ] : incr = 1 if incr != 0 : new_index = self . _get_tab_index ( ) + incr if new_index < 0 : new_index = n - 1 elif new_index >= n : new_index = 0 self . tabWidget . setCurrentIndex ( new_index ) | This handles Ctrl + PageUp Ctrl + PageDown Ctrl + Tab Ctrl + Shift + Tab |
57,238 | def _on_changed ( self ) : page = self . _get_page ( ) if not page . flag_autosave : page . flag_changed = True self . _update_gui_text_tabs ( ) | Slot for changed events |
57,239 | def _update_gui_text_tabs ( self ) : for index , page in enumerate ( self . pages ) : self . tabWidget . setTabText ( index , "{} (Alt+&{}){}" . format ( page . text_tab , index + 1 , ( " (changed)" if page . flag_changed else "" ) ) ) | Iterates through pages to update tab texts |
57,240 | def __generic_save ( self ) : page = self . _get_page ( ) f = page . editor . f if not f : return True if not page . editor . flag_valid : a99 . show_error ( "Cannot save, {0!s} has error(s)!" . format ( f . description ) ) return True if f . filename : f . save_as ( ) self . add_log ( "Saved '{}'" . format ( f . filename ) ) page . flag_changed = False self . _update_gui_text_tabs ( ) if hasattr ( page . editor , "update_gui_label_fn" ) : page . editor . update_gui_label_fn ( ) return True else : return self . __generic_save_as ( ) | Returns False if user has cancelled a save as operation otherwise True . |
57,241 | def __generic_save_as ( self ) : page = self . _get_page ( ) if not page . editor . f : return True if page . editor . f . filename : d = page . editor . f . filename else : d = os . path . join ( self . save_dir if self . save_dir is not None else self . load_dir if self . load_dir is not None else "." , page . editor . f . default_filename ) new_filename = QFileDialog . getSaveFileName ( self , page . make_text_saveas ( ) , d , page . wild ) [ 0 ] if new_filename : self . save_dir , _ = os . path . split ( str ( new_filename ) ) page . editor . f . save_as ( str ( new_filename ) ) page . flag_changed = False self . _update_gui_text_tabs ( ) page . editor . update_gui_label_fn ( ) return True return False | Returns False if user has cancelled operation otherwise True . |
57,242 | def _start_nodes_sequentially ( self , nodes ) : started_nodes = set ( ) for node in copy ( nodes ) : started = self . _start_node ( node ) if started : started_nodes . add ( node ) self . repository . save_or_update ( self ) return started_nodes | Start the nodes sequentially without forking . |
57,243 | def _start_nodes_parallel ( self , nodes , max_thread_pool_size ) : thread_pool_size = min ( len ( nodes ) , max_thread_pool_size ) thread_pool = Pool ( processes = thread_pool_size ) log . debug ( "Created pool of %d threads" , thread_pool_size ) keep_running = True def sigint_handler ( signal , frame ) : log . error ( "Interrupted: will save cluster state and exit" " after all nodes have started." ) keep_running = False with sighandler ( signal . SIGINT , sigint_handler ) : result = thread_pool . map_async ( self . _start_node , nodes ) while not result . ready ( ) : result . wait ( 1 ) if not keep_running : log . error ( "Aborting upon user interruption ..." ) thread_pool . close ( ) thread_pool . join ( ) self . repository . save_or_update ( self ) sys . exit ( 1 ) return set ( node for node , ok in itertools . izip ( nodes , result . get ( ) ) if ok ) | Start the nodes using a pool of multiprocessing threads for speed - up . |
57,244 | def _start_node ( node ) : log . debug ( "_start_node: working on node `%s`" , node . name ) if node . is_alive ( ) : log . info ( "Not starting node `%s` which is " "already up&running." , node . name ) return True else : try : node . start ( ) log . info ( "Node `%s` has been started." , node . name ) return True except Exception as err : log . exception ( "Could not start node `%s`: %s -- %s" , node . name , err , err . __class__ ) return False | Start the given node VM . |
57,245 | def get_all_nodes ( self ) : nodes = self . nodes . values ( ) if nodes : return reduce ( operator . add , nodes , list ( ) ) else : return [ ] | Returns a list of all nodes in this cluster as a mixed list of different node kinds . |
57,246 | def get_frontend_node ( self ) : if self . ssh_to : if self . ssh_to in self . nodes : cls = self . nodes [ self . ssh_to ] if cls : return cls [ 0 ] else : log . warning ( "preferred `ssh_to` `%s` is empty: unable to " "get the choosen frontend node from that class." , self . ssh_to ) else : raise NodeNotFound ( "Invalid ssh_to `%s`. Please check your " "configuration file." % self . ssh_to ) for cls in sorted ( self . nodes . keys ( ) ) : if self . nodes [ cls ] : return self . nodes [ cls ] [ 0 ] raise NodeNotFound ( "Unable to find a valid frontend: " "cluster has no nodes!" ) | Returns the first node of the class specified in the configuration file as ssh_to or the first node of the first class in alphabetic order . |
57,247 | def new ( self , kind , ** extra ) : if self . _free [ kind ] : index = self . _free [ kind ] . pop ( ) else : self . _top [ kind ] += 1 index = self . _top [ kind ] return self . _format ( self . pattern , kind = kind , index = index , ** extra ) | Return a host name for a new node of the given kind . |
57,248 | def use ( self , kind , name ) : try : params = self . _parse ( name ) index = int ( params [ 'index' ] , 10 ) if index in self . _free [ kind ] : self . _free [ kind ] . remove ( index ) top = self . _top [ kind ] if index > top : self . _free [ kind ] . update ( range ( top + 1 , index ) ) self . _top [ kind ] = index except ValueError : log . warning ( "Cannot extract numerical index" " from node name `%s`!" , name ) | Mark a node name as used . |
57,249 | def free ( self , kind , name ) : try : params = self . _parse ( name ) index = int ( params [ 'index' ] , 10 ) self . _free [ kind ] . add ( index ) assert index <= self . _top [ kind ] if index == self . _top [ kind ] : self . _top [ kind ] -= 1 except ValueError : pass | Mark a node name as no longer in use . |
57,250 | def is_alive ( self ) : running = False if not self . instance_id : return False try : log . debug ( "Getting information for instance %s" , self . instance_id ) running = self . _cloud_provider . is_instance_running ( self . instance_id ) except Exception as ex : log . debug ( "Ignoring error while looking for vm id %s: %s" , self . instance_id , str ( ex ) ) if running : log . debug ( "node `%s` (instance id %s) is up and running" , self . name , self . instance_id ) self . update_ips ( ) else : log . debug ( "node `%s` (instance id `%s`) still building..." , self . name , self . instance_id ) return running | Checks if the current node is up and running in the cloud . It only checks the status provided by the cloud interface . Therefore a node might be running but not yet ready to ssh into it . |
57,251 | def connect ( self , keyfile = None ) : ssh = paramiko . SSHClient ( ) ssh . set_missing_host_key_policy ( paramiko . AutoAddPolicy ( ) ) if keyfile and os . path . exists ( keyfile ) : ssh . load_host_keys ( keyfile ) ips = self . ips [ : ] if self . preferred_ip : if self . preferred_ip in ips : ips . remove ( self . preferred_ip ) else : log . debug ( "IP %s does not seem to belong to %s anymore. Ignoring!" , self . preferred_ip , self . name ) self . preferred_ip = ips [ 0 ] for ip in itertools . chain ( [ self . preferred_ip ] , ips ) : if not ip : continue try : log . debug ( "Trying to connect to host %s (%s)" , self . name , ip ) addr , port = parse_ip_address_and_port ( ip , SSH_PORT ) ssh . connect ( str ( addr ) , username = self . image_user , allow_agent = True , key_filename = self . user_key_private , timeout = Node . connection_timeout , port = port ) log . debug ( "Connection to %s succeeded on port %d!" , ip , port ) if ip != self . preferred_ip : log . debug ( "Setting `preferred_ip` to %s" , ip ) self . preferred_ip = ip return ssh except socket . error as ex : log . debug ( "Host %s (%s) not reachable: %s." , self . name , ip , ex ) except paramiko . BadHostKeyException as ex : log . error ( "Invalid host key: host %s (%s); check keyfile: %s" , self . name , ip , keyfile ) except paramiko . SSHException as ex : log . debug ( "Ignoring error %s connecting to %s" , str ( ex ) , self . name ) return None | Connect to the node via ssh using the paramiko library . |
57,252 | def set_item ( self , path_ , value ) : section , path_ = self . _get_section ( path_ ) section [ path_ [ - 1 ] ] = value self . write ( ) | Sets item and automatically saves file |
57,253 | def all_subslices ( itr ) : assert iterable ( itr ) , 'generators.all_subslices only accepts iterable arguments, not {}' . format ( itr ) if not hasattr ( itr , '__len__' ) : itr = deque ( itr ) len_itr = len ( itr ) for start , _ in enumerate ( itr ) : d = deque ( ) for i in islice ( itr , start , len_itr ) : d . append ( i ) yield tuple ( d ) | generates every possible slice that can be generated from an iterable |
57,254 | def data_read_write ( data_path_in , data_path_out , format_type , ** kwargs ) : if format_type == "dense" : kwargs = _set_dense_defaults_and_eval ( kwargs ) try : nlc = [ nm . strip ( ) for nm in kwargs [ 'non_label_cols' ] . split ( "," ) ] kwargs . pop ( 'non_label_cols' , None ) except KeyError : raise KeyError ( "'non_label_cols' is a required keyword dense data" ) arch_data = pd . read_csv ( data_path_in , sep = kwargs [ 'delimiter' ] , na_values = kwargs [ 'na_values' ] ) form_data = format_dense ( arch_data , nlc , ** kwargs ) elif format_type == "grid" : pass elif format_type == "stacked" : pass elif format_type == "transect" : pass else : raise NameError ( "%s is not a supported data format" % format_type ) form_data . to_csv ( data_path_out , index = False ) | General function to read format and write data . |
57,255 | def format_dense ( base_data , non_label_cols , ** kwargs ) : kwargs = _set_dense_defaults_and_eval ( kwargs ) indexed_data = base_data . set_index ( keys = non_label_cols ) columnar_data = indexed_data . stack ( dropna = False ) columnar_data = columnar_data . reset_index ( ) num = len ( non_label_cols ) columnar_data . rename ( columns = { 0 : kwargs [ 'count_col' ] , 'level_%i' % num : kwargs [ 'label_col' ] } , inplace = True ) if kwargs [ 'nan_to_zero' ] : ind = np . isnan ( columnar_data [ kwargs [ 'count_col' ] ] ) columnar_data . loc [ ind , kwargs [ 'count_col' ] ] = 0 columnar_data . reset_index ( inplace = True , drop = True ) if kwargs [ 'drop_na' ] : columnar_data = columnar_data . dropna ( how = "any" ) columnar_data . reset_index ( inplace = True , drop = True ) return columnar_data | Formats dense data type to stacked data type . |
57,256 | def _set_dense_defaults_and_eval ( kwargs ) : kwargs [ 'delimiter' ] = kwargs . get ( 'delimiter' , ',' ) kwargs [ 'na_values' ] = kwargs . get ( 'na_values' , '' ) kwargs [ 'nan_to_zero' ] = kwargs . get ( 'nan_to_zero' , False ) kwargs [ 'drop_na' ] = kwargs . get ( 'drop_na' , False ) kwargs [ 'label_col' ] = kwargs . get ( 'label_col' , 'label' ) kwargs [ 'count_col' ] = kwargs . get ( 'count_col' , 'count' ) for key , val in kwargs . iteritems ( ) : try : kwargs [ key ] = eval ( val ) except : kwargs [ key ] = val return kwargs | Sets default values in kwargs if kwargs are not already given . |
57,257 | def plot_spectra_stacked ( ss , title = None , num_rows = None , setup = _default_setup ) : draw_spectra_stacked ( ss , title , num_rows , setup ) plt . show ( ) | Plots one or more stacked in subplots sharing same x - axis . |
57,258 | def plot_spectra_overlapped ( ss , title = None , setup = _default_setup ) : plt . figure ( ) draw_spectra_overlapped ( ss , title , setup ) plt . show ( ) | Plots one or more spectra in the same plot . |
57,259 | def plot_spectra_pieces_pdf ( ss , aint = 10 , pdf_filename = 'pieces.pdf' , setup = _default_setup ) : import f311 . explorer as ex xmin , xmax , ymin_ , ymax , _ , yspan = calc_max_min ( ss ) ymin = ymin_ if setup . ymin is None else setup . ymin num_pages = int ( math . ceil ( ( xmax - xmin ) / aint ) ) a99 . format_BLB ( ) pdf = matplotlib . backends . backend_pdf . PdfPages ( pdf_filename ) logger = a99 . get_python_logger ( ) for h in range ( num_pages ) : fig = plt . figure ( ) lambda0 = xmin + h * aint lambda1 = lambda0 + aint logger . info ( "Printing page {0:d}/{1:d} ([{2:g}, {3:g}])" . format ( h + 1 , num_pages , lambda0 , lambda1 ) ) for i , s in enumerate ( ss ) : s_cut = ex . cut_spectrum ( s , lambda0 , lambda1 ) ax = plt . gca ( ) ax . plot ( s_cut . x , s_cut . y , label = s . title ) if setup . flag_xlabel and setup . fmt_xlabel : plt . xlabel ( 'Wavelength (interval: [{0:g}, {1:g}])' . format ( lambda0 , lambda1 ) ) xspan = lambda1 - lambda0 ax . set_xlim ( [ lambda0 - xspan * _T , lambda1 + xspan * _T ] ) ax . set_ylim ( [ ymin - yspan * _T , ymax + yspan * _T ] ) if setup . flag_legend : leg = plt . legend ( loc = 0 ) a99 . format_legend ( leg ) plt . tight_layout ( ) pdf . savefig ( fig ) plt . close ( ) pdf . close ( ) logger . info ( "File {0!s} successfully created." . format ( pdf_filename ) ) | Plots spectra overlapped in small wavelength intervals into a PDF file one interval per page of the PDF file . |
57,260 | def plot_spectra_pages_pdf ( ss , pdf_filename = 'pages.pdf' , setup = _default_setup ) : logger = a99 . get_python_logger ( ) xmin , xmax , ymin_ , ymax , xspan , yspan = calc_max_min ( ss ) ymin = ymin_ if setup . ymin is None else setup . ymin num_pages = len ( ss ) a99 . format_BLB ( ) pdf = matplotlib . backends . backend_pdf . PdfPages ( pdf_filename ) for i , s in enumerate ( ss ) : title = s . title fig = plt . figure ( ) plt . plot ( s . x , s . y , c = _FAV_COLOR ) if setup . flag_xlabel and setup . fmt_xlabel : _set_plot ( plt . xlabel , setup . fmt_xlabel , s ) if setup . flag_ylabel and setup . fmt_ylabel : _set_plot ( plt . ylabel , setup . fmt_ylabel , s ) _set_plot ( plt . title , setup . fmt_title , s ) plt . xlim ( [ xmin - xspan * _T , xmax + xspan * _T ] ) plt . ylim ( [ ymin - yspan * _T , ymax + yspan * _T ] ) plt . tight_layout ( ) plt . subplots_adjust ( top = 0.94 ) logger . info ( "Printing page {0:d}/{1:d} ('{2!s}')" . format ( i + 1 , num_pages , title ) ) pdf . savefig ( fig ) plt . close ( ) pdf . close ( ) logger . info ( "File {0!s} successfully created." . format ( pdf_filename ) ) | Plots spectra into a PDF file one spectrum per page . |
57,261 | def repeal_target ( self ) : if not self . category == 'Repeal' : raise TypeError ( "This resolution doesn't repeal anything" ) return wa . resolution ( int ( self . option ) + 1 ) | The resolution this resolution has repealed or is attempting to repeal . |
57,262 | def resolution ( self , index ) : @ api_query ( 'resolution' , id = str ( index ) ) async def result ( _ , root ) : elem = root . find ( 'RESOLUTION' ) if not elem : raise NotFound ( f'No resolution found with index {index}' ) return Resolution ( elem ) return result ( self ) | Resolution with a given index . |
57,263 | async def resolution_at_vote ( self , root ) : elem = root . find ( 'RESOLUTION' ) if elem : resolution = ResolutionAtVote ( elem ) resolution . _council_id = self . _council_id return resolution | The proposal currently being voted on . |
57,264 | def indent ( self , levels , first_line = None ) : self . _indentation_levels . append ( levels ) self . _indent_first_line . append ( first_line ) | Increase indentation by levels levels . |
57,265 | def wrap ( self , text , width = None , indent = None ) : width = width if width is not None else self . options . wrap_length indent = indent if indent is not None else self . indentation initial_indent = self . initial_indentation return textwrap . fill ( text , width = width , initial_indent = initial_indent , subsequent_indent = indent ) | Return text wrapped to width and indented with indent . |
57,266 | def Construct ( self ) : self . gdml_parser . Read ( self . filename ) self . world = self . gdml_parser . GetWorldVolume ( ) self . log . info ( "Materials:" ) self . log . info ( G4 . G4Material . GetMaterialTable ( ) ) return self . world | Construct a cuboid from a GDML file without sensitive detector |
57,267 | def Construct ( self ) : self . world = self . gdml_parser . GetWorldVolume ( ) self . sensitive_detector = ScintSD ( ) my_lv = G4 . G4LogicalVolumeStore . GetInstance ( ) . GetVolumeID ( 1 ) assert my_lv . GetName ( ) == "ScintillatorBarX" my_lv . SetSensitiveDetector ( self . sensitive_detector ) my_lv = G4 . G4LogicalVolumeStore . GetInstance ( ) . GetVolumeID ( 2 ) assert my_lv . GetName ( ) == "ScintillatorBarY" my_lv . SetSensitiveDetector ( self . sensitive_detector ) my_lv = G4 . G4LogicalVolumeStore . GetInstance ( ) . GetVolumeID ( 0 ) assert my_lv . GetName ( ) == "SteelPlane" self . field_manager = G4 . G4FieldManager ( ) self . my_field = MagneticField . WandsToroidField ( self . field_polarity ) self . field_manager . SetDetectorField ( self . my_field ) self . field_manager . CreateChordFinder ( self . my_field ) my_lv . SetFieldManager ( self . field_manager , False ) self . log . info ( "Materials:" ) self . log . info ( G4 . G4Material . GetMaterialTable ( ) ) return self . world | Construct nuSTORM from a GDML file |
57,268 | def validate ( bbllines : iter , * , profiling = False ) : if isinstance ( bbllines , str ) : if os . path . exists ( bbllines ) : bbllines = utils . file_lines ( bbllines ) elif '\n' not in bbllines or '\t' not in bbllines : bbllines = utils . file_lines ( bbllines ) else : bbllines = bbllines . split ( '\n' ) bubble = tuple ( bbllines ) data = tuple ( utils . line_data ( line ) for line in bubble ) types = tuple ( utils . line_type ( line ) for line in bubble ) if profiling : ltype_counts = Counter ( types ) for ltype , count in ltype_counts . items ( ) : yield 'INFO {} lines of type {}' . format ( count , ltype ) yield 'INFO {} lines of payload' . format ( ltype_counts [ 'EDGE' ] + ltype_counts [ 'IN' ] + ltype_counts [ 'NODE' ] + ltype_counts [ 'SET' ] ) for errline in ( l for l , t in zip ( bubble , types ) if t == 'ERROR' ) : yield 'ERROR line is not bubble: "{}"' . format ( errline ) tree = BubbleTree . from_bubble_data ( data ) cc , subroots = tree . connected_components ( ) if profiling : yield 'INFO {} top (power)nodes' . format ( len ( tree . roots ) ) yield 'INFO {} connected components' . format ( len ( cc ) ) yield 'INFO {} nodes are defined, {} are used' . format ( ltype_counts [ 'NODE' ] , len ( tuple ( tree . nodes ( ) ) ) ) yield 'INFO {} powernodes are defined, {} are used' . format ( ltype_counts [ 'SET' ] , len ( tuple ( tree . powernodes ( ) ) ) ) yield from inclusions_validation ( tree ) yield from mergeability_validation ( tree ) | Yield lines of warnings and errors about input bbl lines . |
57,269 | def inclusions_validation ( tree : BubbleTree ) -> iter : for one , two in it . combinations ( tree . inclusions , 2 ) : assert len ( one ) == len ( one . strip ( ) ) assert len ( two ) == len ( two . strip ( ) ) one_inc = set ( included ( one , tree . inclusions ) ) two_inc = set ( included ( two , tree . inclusions ) ) common_inc = one_inc & two_inc if len ( common_inc ) == one_inc : if not two in one_inc : yield ( "ERROR inconsistency in inclusions: {} is both" " included and not included in {}." . format ( two , one ) ) if len ( common_inc ) == two_inc : if not one in two_inc : yield ( "ERROR inconsistency in inclusions: {} is both" " included and not included in {}." . format ( one , two ) ) if len ( common_inc ) > 0 : if len ( common_inc ) == len ( one_inc ) or len ( common_inc ) == len ( two_inc ) : pass else : yield ( "ERROR overlapping powernodes:" " {} nodes are shared by {} and {}," " which are not in inclusion." " Shared nodes are {}" . format ( len ( common_inc ) , one , two , common_inc ) ) for pwn in tree . powernodes ( ) : if len ( tree . inclusions [ pwn ] ) == 0 : yield ( "WARNING empty powernode: {} is defined," " but contains nothing" . format ( pwn ) ) if len ( tree . inclusions [ pwn ] ) == 1 : yield ( "WARNING singleton powernode: {} is defined," " but contains only {}" . format ( pwn , tree . inclusions [ pwn ] ) ) nodes_in_cycles = utils . have_cycle ( tree . inclusions ) if nodes_in_cycles : yield ( "ERROR inclusion cycle: the following {}" " nodes are involved: {}" . format ( len ( nodes_in_cycles ) , set ( nodes_in_cycles ) ) ) | Yield message about inclusions inconsistancies |
57,270 | def mergeability_validation ( tree : BubbleTree ) -> iter : def gen_warnings ( one , two , inc_message : str ) -> [ str ] : "Yield the warning for given (power)nodes if necessary" nodetype = '' if tree . inclusions [ one ] and tree . inclusions [ two ] : nodetype = 'power' elif tree . inclusions [ one ] or tree . inclusions [ two ] : nodetype = '(power)' if one > two : one , two = two , one shared = set ( tree . edges . get ( one , ( ) ) ) & set ( tree . edges . get ( two , ( ) ) ) if shared : yield ( f"WARNING mergeable {nodetype}nodes: {one} and {two}" f" are {inc_message}, and share" f" {len(shared)} neigbor{'s' if len(shared) > 1 else ''}" ) for one , two in it . combinations ( tree . roots , 2 ) : yield from gen_warnings ( one , two , inc_message = 'both roots' ) for parent , childs in tree . inclusions . items ( ) : for one , two in it . combinations ( childs , 2 ) : yield from gen_warnings ( one , two , inc_message = f'in the same level (under {parent})' ) | Yield message about mergables powernodes |
57,271 | def guild_details ( guild_id = None , name = None ) : if guild_id and name : warnings . warn ( "both guild_id and name are specified, " "name will be ignored" ) if guild_id : params = { "guild_id" : guild_id } cache_name = "guild_details.%s.json" % guild_id elif name : params = { "guild_name" : name } cache_name = "guild_details.%s.json" % name else : raise Exception ( "specify either guild_id or name" ) return get_cached ( "guild_details.json" , cache_name , params = params ) | This resource returns details about a guild . |
57,272 | def chunks ( stream , chunk_size , output_type = tuple ) : assert iterable ( stream ) , 'chunks needs stream to be iterable' assert ( isinstance ( chunk_size , int ) and chunk_size > 0 ) or callable ( chunk_size ) , 'chunks needs chunk_size to be a positive int or callable' assert callable ( output_type ) , 'chunks needs output_type to be callable' if callable ( chunk_size ) : for chunk in chunk_on ( stream , chunk_size , output_type ) : yield chunk else : it = iter ( stream ) marker = object ( ) iters = [ it ] * chunk_size pipeline = apply_to_last ( zip_longest ( * iters , fillvalue = marker ) , lambda last_chunk : tuple ( i for i in last_chunk if i is not marker ) ) if output_type is not tuple : pipeline = map ( output_type , pipeline ) for chunk in pipeline : yield chunk | returns chunks of a stream |
57,273 | def get_charset ( request ) : content_type = request . META . get ( 'CONTENT_TYPE' , None ) if content_type : return extract_charset ( content_type ) if content_type else None else : return None | Extract charset from the content type |
57,274 | def parse_accept_header ( accept ) : def parse_media_range ( accept_item ) : return accept_item . split ( '/' , 1 ) def comparator ( a , b ) : result = - cmp ( a [ 2 ] , b [ 2 ] ) if result is not 0 : return result mtype_a , subtype_a = parse_media_range ( a [ 0 ] ) mtype_b , subtype_b = parse_media_range ( b [ 0 ] ) if mtype_a == '*' and subtype_a == '*' : return 1 if mtype_b == '*' and subtype_b == '*' : return - 1 if subtype_a == '*' : return 1 if subtype_b == '*' : return - 1 return 0 if not accept : return [ ] result = [ ] for media_range in accept . split ( "," ) : parts = media_range . split ( ";" ) media_type = parts . pop ( 0 ) . strip ( ) media_params = [ ] q = 1.0 for part in parts : ( key , value ) = part . lstrip ( ) . split ( "=" , 1 ) if key == "q" : q = float ( value ) else : media_params . append ( ( key , value ) ) result . append ( ( media_type , tuple ( media_params ) , q ) ) result . sort ( comparator ) return result | Parse the Accept header |
57,275 | def in_session ( self ) : session = self . get_session ( ) try : yield session session . commit ( ) except IntegrityError : session . rollback ( ) raise DuplicateError ( "Duplicate unique value detected!" ) except ( OperationalError , DisconnectionError ) : session . rollback ( ) self . close ( ) logger . warn ( "Database Connection Lost!" ) raise DatabaseConnectionError ( ) except Exception : session . rollback ( ) raise finally : session . close ( ) | Provide a session scope around a series of operations . |
57,276 | def info ( self , req ) -> ResponseInfo : r = ResponseInfo ( ) r . version = "1.0" r . last_block_height = 0 r . last_block_app_hash = b'' return r | Since this will always respond with height = 0 Tendermint will resync this app from the begining |
57,277 | def check_tx ( self , tx ) -> ResponseCheckTx : value = decode_number ( tx ) if not value == ( self . txCount + 1 ) : return ResponseCheckTx ( code = 1 ) return ResponseCheckTx ( code = CodeTypeOk ) | Validate the Tx before entry into the mempool Checks the txs are submitted in order 1 2 3 ... If not an order a non - zero code is returned and the tx will be dropped . |
57,278 | def query ( self , req ) -> ResponseQuery : v = encode_number ( self . txCount ) return ResponseQuery ( code = CodeTypeOk , value = v , height = self . last_block_height ) | Return the last tx count |
57,279 | def commit ( self ) -> ResponseCommit : hash = struct . pack ( '>Q' , self . txCount ) return ResponseCommit ( data = hash ) | Return the current encode state value to tendermint |
57,280 | def track ( context , file_names ) : context . obj . find_repo_type ( ) for fn in file_names : context . obj . call ( [ context . obj . vc_name , 'add' , fn ] ) | Keep track of each file in list file_names . |
57,281 | def untrack ( context , file_names ) : context . obj . find_repo_type ( ) for fn in file_names : if context . obj . vc_name == 'git' : context . obj . call ( [ 'git' , 'rm' , '--cached' , fn ] ) elif context . obj . vc_name == 'hg' : context . obj . call ( [ 'hg' , 'forget' , fn ] ) | Forget about tracking each file in the list file_names |
57,282 | def commit ( context , message , name ) : context . obj . find_repo_type ( ) if context . obj . vc_name == 'git' : context . obj . call ( [ 'git' , 'commit' , '-a' , '-m' , message ] ) elif context . obj . vc_name == 'hg' : context . obj . call ( [ 'hg' , 'commit' , '-m' , message ] ) if name != '' and context . obj . vc_name == 'git' : context . obj . call ( [ 'git' , 'tag' , '-a' , name , '-m' , message ] ) elif name != '' and context . obj . vc_name == 'hg' : context . obj . call ( [ 'hg' , 'tag' , '-m' , message , name ] ) | Commit saved changes to the repository . message - commit message name - tag name |
57,283 | def revert ( context , file_names ) : context . obj . find_repo_type ( ) if len ( file_names ) == 0 : click . echo ( 'No file names to checkout specified.' ) click . echo ( 'The following have changed since the last check in.' ) context . invoke ( status ) for fn in file_names : if context . obj . vc_name == 'git' : context . obj . call ( [ 'git' , 'checkout' , '--' , fn ] ) elif context . obj . vc_name == 'hg' : context . obj . call ( [ 'hg' , 'revert' , '--no-backup' , fn ] ) | Revert each file in the list file_names back to version in repo |
57,284 | def status ( context ) : context . obj . find_repo_type ( ) context . obj . call ( [ context . obj . vc_name , 'status' ] ) | See which files have changed checked in and uploaded |
57,285 | def diff ( context , file_name ) : context . obj . find_repo_type ( ) if context . obj . vc_name == 'git' : context . obj . call ( [ 'git' , 'diff' , '--color-words' , '--ignore-space-change' , file_name ] ) elif context . obj . vc_name == 'hg' : context . obj . call ( [ 'hg' , 'diff' , file_name ] ) | See changes that occured since last check in |
57,286 | def find_repo_type ( self ) : is_git = self . call ( [ 'git' , 'rev-parse' , '--is-inside-work-tree' ] , devnull = True ) if is_git != 0 : if self . debug : click . echo ( 'not git' ) is_hg = self . call ( [ 'hg' , '-q' , 'stat' ] , devnull = True ) if is_hg != 0 : if self . debug : click . echo ( 'not hg' ) exit ( 1 ) else : self . vc_name = 'hg' | Check for git or hg repository |
57,287 | def main ( ) : argp = _cli_argument_parser ( ) args = argp . parse_args ( ) logging . basicConfig ( level = args . loglevel , format = "%(levelname)s %(message)s" ) console . display ( "Collecting documentation from files" ) collector_metrics = metrics . Metrics ( ) docs = collector . parse ( args . path , args . trace_parser , metrics = collector_metrics ) collector_metrics . display ( ) console . display ( "Rendering documentation" ) try : if args . output : template = renderer . template_from_filename ( args . output ) else : template = "json" out = renderer . render ( docs , template ) except ValueError as err : logging . error ( err ) sys . exit ( 1 ) except TemplateNotFound as err : logging . error ( "Template `{}` not found. Available templates are: {}" . format ( err . name , renderer . list_templates ( ) ) ) sys . exit ( 1 ) if not args . output : print ( out ) else : console . display ( "Writing documentation to" , args . output ) with io . open ( args . output , "w" , encoding = "utf-8" ) as fp : fp . write ( out ) | The main entry point of the program |
57,288 | def process_ioc ( args ) : client = IndicatorClient . from_config ( ) client . set_debug ( True ) if args . get : response = client . get_indicators ( ) elif args . single : response = client . add_indicators ( indicators = [ args . single ] , private = args . private , tags = args . tags ) else : if not os . path . isfile ( args . file ) : raise Exception ( "File path isn't valid!" ) indicators = list ( ) with open ( args . file , 'r' ) as handle : for line in handle : line = line . strip ( ) if line == '' : continue indicators . append ( line ) response = client . add_indicators ( indicators = indicators , private = args . private , tags = args . tags ) return response | Process actions related to the IOC switch . |
57,289 | def process_events ( args ) : client = EventsClient . from_config ( ) client . set_debug ( True ) if args . get : response = client . get_events ( ) elif args . flush : response = client . flush_events ( ) return response | Process actions related to events switch . |
57,290 | def main ( ) : parser = ArgumentParser ( description = "Blockade Analyst Bench" ) subs = parser . add_subparsers ( dest = 'cmd' ) ioc = subs . add_parser ( 'ioc' , help = "Perform actions with IOCs" ) ioc . add_argument ( '--single' , '-s' , help = "Send a single IOC" ) ioc . add_argument ( '--file' , '-f' , help = "Parse a file of IOCs" ) ioc . add_argument ( '--private' , '-p' , action = "store_true" , help = "Submit the IOCs to the node hashed, \ instead of in clear" ) ioc . add_argument ( '--tags' , '-t' , help = "Add a comma-separated list of tags to store \ with the indicators" ) ioc . add_argument ( '--get' , '-g' , action = "store_true" , help = "List indicators on the remote node" ) events = subs . add_parser ( 'events' , help = "Perform actions with Events" ) events . add_argument ( '--get' , '-g' , action = 'store_true' , help = "Get recent events" ) events . add_argument ( '--flush' , '-f' , action = 'store_true' , help = "Flush all events from cloud node" ) args , unknown = parser . parse_known_args ( ) try : if args . cmd == 'ioc' : if ( args . single and args . file ) : raise Exception ( "Can't use single and file together!" ) if ( not args . single and not args . file and not args . get ) : ioc . print_help ( ) sys . exit ( 1 ) response = process_ioc ( args ) elif args . cmd == 'events' : if ( not args . get and not args . flush ) : events . print_help ( ) sys . exit ( 1 ) response = process_events ( args ) else : parser . print_usage ( ) sys . exit ( 1 ) except ValueError as e : parser . print_usage ( ) sys . stderr . write ( '{}\n' . format ( str ( e ) ) ) sys . exit ( 1 ) print ( response . get ( 'message' , '' ) ) | Run the code . |
57,291 | def window ( iterable , size = 2 ) : iterable = iter ( iterable ) d = deque ( islice ( iterable , size - 1 ) , maxlen = size ) for _ in map ( d . append , iterable ) : yield tuple ( d ) | yields wondows of a given size |
57,292 | def payment_mode ( self , payment_mode ) : allowed_values = [ "authorize" , "capture" ] if payment_mode is not None and payment_mode not in allowed_values : raise ValueError ( "Invalid value for `payment_mode` ({0}), must be one of {1}" . format ( payment_mode , allowed_values ) ) self . _payment_mode = payment_mode | Sets the payment_mode of this CreditCardPayment . |
57,293 | def match_via_correlation_coefficient ( image , template , raw_tolerance = 1 , normed_tolerance = 0.9 ) : h , w = image . shape th , tw = template . shape temp_mean = np . mean ( template ) temp_minus_mean = template - temp_mean convolution = fftconvolve ( image , temp_minus_mean [ : : - 1 , : : - 1 ] ) convolution = convolution [ th - 1 : h , tw - 1 : w ] match_position_dict = get_tiles_at_potential_match_regions ( image , template , convolution , method = 'correlation coefficient' , raw_tolerance = raw_tolerance ) results = normalise_correlation_coefficient ( match_position_dict , convolution , template , normed_tolerance = normed_tolerance ) return results | Matching algorithm based on 2 - dimensional version of Pearson product - moment correlation coefficient . |
57,294 | def match_positions ( shape , list_of_coords ) : match_array = np . zeros ( shape ) try : match_array [ list_of_coords [ : , 0 ] , list_of_coords [ : , 1 ] ] = 1 labelled = label ( match_array ) objects = find_objects ( labelled [ 0 ] ) coords = [ { 'x' : ( slice_x . start , slice_x . stop ) , 'y' : ( slice_y . start , slice_y . stop ) } for ( slice_y , slice_x ) in objects ] final_positions = [ ( int ( np . mean ( coords [ i ] [ 'x' ] ) ) , int ( np . mean ( coords [ i ] [ 'y' ] ) ) ) for i in range ( len ( coords ) ) ] return final_positions except IndexError : print 'no matches found' return [ ] | In cases where we have multiple matches each highlighted by a region of coordinates we need to separate matches and find mean of each to return as match position |
57,295 | def is_empty ( self ) : return ( self . is_valid ( ) and not self . simple_lookups and not self . complex_conditions and not self . extra_conditions ) | Return True if form is valid and contains an empty lookup . |
57,296 | def load_inventory ( hosts_file = HOSTS_FILE ) : inventory = SafeConfigParser ( allow_no_value = True ) if os . path . exists ( hosts_file ) : inventory . read ( hosts_file ) else : logger . warn ( 'inventory file doesn\'t exist: %s' , hosts_file ) return inventory | Loads Ansible inventory from file . |
57,297 | def save_inventory ( inventory , hosts_file = HOSTS_FILE ) : with open ( hosts_file , 'w' ) as f : inventory . write ( f ) | Saves Ansible inventory to file . |
57,298 | def _init_config ( self , width , height , spi = None , spiMosi = None , spiDC = None , spiCS = None , spiReset = None , spiClk = None ) : self . _spi = spi self . _spi_mosi = spiMosi self . _spi_dc = spiDC self . _spi_cs = spiCS self . _spi_reset = spiReset self . _spi_clk = spiClk self . width = width self . height = height | ! SPI hardware and display width height initialization . |
57,299 | def _init_io ( self ) : GPIO . setwarnings ( False ) GPIO . setmode ( GPIO . BCM ) pins = [ self . _spi_dc ] for pin in pins : GPIO . setup ( pin , GPIO . OUT ) | ! GPIO initialization . Set GPIO into BCM mode and init other IOs mode |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.