idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
12,700 | def login ( config , api_key = "" ) : if not api_key : info_out ( "If you don't have an API Key, go to:\n" "https://bugzilla.mozilla.org/userprefs.cgi?tab=apikey\n" ) api_key = getpass . getpass ( "API Key: " ) url = urllib . parse . urljoin ( config . bugzilla_url , "/rest/whoami" ) assert url . startswith ( "https://... | Store your Bugzilla API Key |
12,701 | def logout ( config ) : state = read ( config . configfile ) if state . get ( "BUGZILLA" ) : remove ( config . configfile , "BUGZILLA" ) success_out ( "Forgotten" ) else : error_out ( "No stored Bugzilla credentials" ) | Remove and forget your Bugzilla credentials |
12,702 | def get_hypergeometric_stats ( N , indices ) : assert isinstance ( N , ( int , np . integer ) ) assert isinstance ( indices , np . ndarray ) and np . issubdtype ( indices . dtype , np . uint16 ) K = indices . size pvals = np . empty ( N + 1 , dtype = np . float64 ) folds = np . empty ( N + 1 , dtype = np . float64 ) pv... | Calculates hypergeom . p - values and fold enrichments for all cutoffs . |
12,703 | def parse ( self , prefix ) : self . _prefix = "" url = re . sub ( r'http://' , '' , prefix ) url = re . sub ( r'https://' , '' , url ) custom_prefix = self . detectCustomImportPaths ( url ) if custom_prefix != { } : url = custom_prefix [ "provider_prefix" ] info = self . _parsePrefix ( url ) self . _signature = info [... | Parse import path into provider project repository and other recognizable parts |
12,704 | def detectKnownRepo ( self , url ) : if url . startswith ( 'github.com' ) : return GITHUB if url . startswith ( 'code.google.com/p' ) : return GOOGLECODE if url . startswith ( 'golang.org/x' ) : return GOLANGORG if url . startswith ( 'gopkg.in' ) : return GOPKG if url . startswith ( 'bitbucket.org' ) : return BITBUCKET... | For given import path detect provider . |
12,705 | def get_qualifier_dict ( vocabularies , qualifier_vocab ) : if vocabularies . get ( qualifier_vocab , None ) is None : raise UNTLFormException ( 'Could not retrieve qualifier vocabulary "%s" for the form.' % ( qualifier_vocab ) ) else : return vocabularies . get ( qualifier_vocab ) | Get the qualifier dictionary based on the element s qualifier vocabulary . |
12,706 | def get_content_dict ( vocabularies , content_vocab ) : if vocabularies . get ( content_vocab , None ) is None : raise UNTLFormException ( 'Could not retrieve content vocabulary "%s" for the form.' % ( content_vocab ) ) else : return vocabularies . get ( content_vocab ) | Get the content dictionary based on the element s content vocabulary . |
12,707 | def get_group_usage_link ( self ) : first_element = self . group_list [ 0 ] usage_link = getattr ( first_element . form , 'usage_link' , None ) return usage_link | Get the usage link for the group element . |
12,708 | def get_adjustable_form ( self , element_dispatch ) : adjustable_form = { } for key in element_dispatch . keys ( ) : adjustable_form [ key ] = element_dispatch [ key ] ( ) return adjustable_form | Create an adjustable form from an element dispatch table . |
12,709 | def set_coverage_placeName ( self ) : if ( self . solr_response and self . solr_response != 'error' and self . solr_response . response != 'error' ) : location_list = self . solr_response . get_location_list_facet ( ) . facet_list else : location_list = [ ] form_dict = { 'view_type' : 'prefill' , 'value_json' : json . ... | Determine the properties for the placeName coverage field . |
12,710 | def get_meta_attributes ( self , ** kwargs ) : superuser = kwargs . get ( 'superuser' , False ) if ( self . untl_object . qualifier == 'recordStatus' or self . untl_object . qualifier == 'system' ) : if superuser : self . editable = True self . repeatable = True else : self . editable = False self . view_type = 'qualif... | Determine the form attributes for the meta field . |
12,711 | def _bit_mismatch ( int1 : int , int2 : int ) -> int : for i in range ( max ( int1 . bit_length ( ) , int2 . bit_length ( ) ) ) : if ( int1 >> i ) & 1 != ( int2 >> i ) & 1 : return i return - 1 | Returns the index of the first different bit or - 1 if the values are the same . |
12,712 | def searchRootOfTree ( reducibleChildren : Set [ LNode ] , nodeFromTree : LNode ) : while True : out_e = nodeFromTree . east [ 0 ] . outgoingEdges if not out_e : return nodeFromTree nextNode = out_e [ 0 ] . dsts [ 0 ] . parentNode if nextNode in reducibleChildren : nodeFromTree = nextNode else : return nodeFromTree | Walk tree of nodes to root |
12,713 | def collectNodesInTree ( treeRoot : LNode , reducibleChildren : Set [ LNode ] ) : inputEdges = [ ] reducedNodes = [ ] reducedNodesSet = set ( ) nodeStack = [ ] nodeStack . append ( ( treeRoot , None , None ) ) while nodeStack : node , p , e = nodeStack . pop ( ) if node in reducibleChildren and node not in reducedNodes... | Collect nodes which will be reduced and input nodes of tree for tree of nodes . |
12,714 | def __initLock ( self ) : self . _isLocked = False self . _timer = 0 self . _operation = False | Init lock for sending request to projector when it is busy . |
12,715 | def __setLock ( self , command ) : if command in ( TURN_ON , TURN_OFF ) : self . _operation = command elif command in INV_SOURCES : self . _operation = SOURCE else : self . _operation = ALL self . _isLocked = True self . _timer = time . time ( ) | Set lock on requests . |
12,716 | def __unLock ( self ) : self . _operation = False self . _timer = 0 self . _isLocked = False | Unlock sending requests to projector . |
12,717 | def __checkLock ( self ) : if self . _isLocked : if ( time . time ( ) - self . _timer ) > TIMEOUT_TIMES [ self . _operation ] : self . __unLock ( ) return False return True return False | Lock checking . |
12,718 | async def get_property ( self , command ) : _LOGGER . debug ( "Getting property %s" , command ) if self . __checkLock ( ) : return BUSY timeout = self . __get_timeout ( command ) response = await self . send_request ( timeout = timeout , params = EPSON_KEY_COMMANDS [ command ] , type = 'json_query' ) if not response : ... | Get property state from device . |
12,719 | async def send_command ( self , command ) : _LOGGER . debug ( "Sending command to projector %s" , command ) if self . __checkLock ( ) : return False self . __setLock ( command ) response = await self . send_request ( timeout = self . __get_timeout ( command ) , params = EPSON_KEY_COMMANDS [ command ] , type = 'directse... | Send command to Epson . |
12,720 | async def send_request ( self , params , timeout , type = 'json_query' , command = False ) : try : with async_timeout . timeout ( timeout ) : url = '{url}{type}' . format ( url = self . _http_url , type = type ) async with self . websession . get ( url = url , params = params , headers = self . _headers ) as response :... | Send request to Epson . |
12,721 | def remove_instances_by_prefix ( nova_api , prefix ) : for server in nova_api . servers . list ( ) : if server . name . startswith ( prefix ) : LOG . info ( "Remove instance '%s'" % server . name ) server . delete ( ) | Remove all the instances on which their name start by a prefix . |
12,722 | def purge_existing_ovb ( nova_api , neutron ) : LOG . info ( 'Cleaning up OVB environment from the tenant.' ) for server in nova_api . servers . list ( ) : if server . name in ( 'bmc' , 'undercloud' ) : server . delete ( ) if server . name . startswith ( 'baremetal_' ) : server . delete ( ) for router in neutron . list... | Purge any trace of an existing OVB deployment . |
12,723 | def initialize_network ( neutron ) : body_sample = { "network" : { "name" : 'provision_bob' , "admin_state_up" : True , } } netw = neutron . create_network ( body = body_sample ) [ 'network' ] body_create_subnet = { 'subnets' : [ { 'name' : 'rdo-m-subnet' , 'cidr' : '192.0.2.0/24' , 'ip_version' : 4 , 'network_id' : ne... | Initialize an OVB network called provision_bob . |
12,724 | def description_director ( ** kwargs ) : description_type = { 'physical' : DCFormat } qualifier = kwargs . get ( 'qualifier' ) element_class = description_type . get ( qualifier , DCDescription ) element = element_class ( qualifier = qualifier , content = kwargs . get ( 'content' ) , ) return element | Direct which class should be used based on the director qualifier . |
12,725 | def add_child ( self , child ) : if child : if child . tag in self . contained_children : self . children . append ( child ) else : raise DC_StructureException ( 'Invalid child "%s" for parent "%s"' % ( child . tag , self . tag ) ) | This adds a child object to the current one . It will check the contained_children list to make sure that the object is allowable and throw an exception if not . |
12,726 | def determine_vocab ( self , qualifier ) : vocab_value = VOCAB_INDEX . get ( self . tag , None ) if isinstance ( vocab_value , dict ) : if qualifier is None : qualifier = 'None' return vocab_value . get ( qualifier , None ) elif vocab_value is not None : return vocab_value else : return None | Determine the vocab from the qualifier . |
12,727 | def resolver ( self , vocab_data , attribute ) : term_list = vocab_data . get ( self . content_vocab , [ ] ) for term_dict in term_list : if term_dict [ 'name' ] == self . content : return term_dict [ attribute ] return self . content | Pull the requested attribute based on the given vocabulary and content . |
12,728 | def check_separator ( self , data ) : sep_list = [ r'\t' , r';' , r',' , r'|' , r'\s+' ] data_copy = data for sep in sep_list : splitted = data_copy . split ( "\n" ) parts = [ len ( re . split ( sep , line ) ) for line in splitted ] if sum ( parts ) == len ( splitted ) : continue diff = 0 for i in range ( len ( parts [... | THis method evaluates a list of separators on the input data to check which one is correct . This is done by first splitting the input by newline and then checking if the split by separator is equal for each input row except the last that might be incomplete due to the limited input data |
12,729 | def head ( self , file_path ) : processor = lambda path , node , tail_only = True , append = False : self . _handle_head ( path , node ) for item in self . _client . _find_items ( [ file_path ] , processor , include_toplevel = True , include_children = False , recurse = False ) : if item : return item | Onlye read the first packets that come try to max out at 1024kb |
12,730 | def packageExists ( self , package ) : url = "%s/packages" % self . base_url params = { "pattern" : package } response = requests . get ( url , params = params ) if response . status_code != requests . codes . ok : return False return True | Check if the package already exists |
12,731 | def getGolangPackages ( self ) : packages = { } url = "%s/packages" % self . base_url params = { "pattern" : "golang-*" , "limit" : 200 } response = requests . get ( url , params = params ) if response . status_code != requests . codes . ok : return { } data = response . json ( ) for package in data [ "packages" ] : pa... | Get a list of all golang packages for all available branches |
12,732 | def onClose ( self , wasClean ) : self . log . error ( 'lost connection to crossbar on session %' + str ( self . session_id ) ) for task in asyncio . Task . all_tasks ( ) : task . cancel ( ) asyncio . get_event_loop ( ) . stop ( ) | Disconnect when connection to message broker is lost |
12,733 | def onUserError ( self , fail , message ) : self . log . error ( fail ) self . log . error ( message ) | Handle user errors |
12,734 | async def show_sessions ( self ) : res = await self . call ( "wamp.session.list" ) for session_id in res : session = await self . call ( "wamp.session.get" , session_id ) self . log . info ( session ) | Returns an object with a lists of the session IDs for all sessions currently attached to the realm |
12,735 | async def lookup_session ( self , topic_name ) : res = await self . call ( "wamp.subscription.lookup" , topic_name ) self . log . info ( res ) | Attempts to find the session id for a given topic |
12,736 | def setup_runner ( self ) : runner = ApplicationRunner ( url = self . config [ 'transport_host' ] , realm = u'realm1' , extra = { 'config' : self . config , 'handlers' : self . handlers , } ) return runner | Setup instance of runner var |
12,737 | def reconnect ( self ) : connect_attempt = 0 max_retries = self . config [ 'max_reconnect_retries' ] logging . info ( 'attempting to reconnect to crossbar' ) runner = self . setup_runner ( ) while True : if connect_attempt == max_retries : logging . info ( 'max retries reached; stopping service' ) sys . exit ( 1 ) self... | Handle reconnect logic if connection to crossbar is lost |
12,738 | def reduceUselessAssignments ( root : LNode ) : for n in root . children : if n . children : reduceUselessAssignments ( n ) do_update = False for n in root . children : if isinstance ( n . originObj , Assignment ) and not n . originObj . indexes and len ( n . west ) == 1 : src = n . originObj . src if isinstance ( src ... | Remove assignments if it is only a direct connection and can be replaced with direct link |
12,739 | def _constructTypeQualifiedName ( self , type , full = False ) : t = type [ "type" ] if t == TYPE_IDENT : return type [ "def" ] elif t == TYPE_POINTER : return self . _constructTypeQualifiedName ( type [ "def" ] ) elif t == TYPE_SELECTOR : if full : return "%s.%s" % ( type [ "prefix" ] , type [ "item" ] ) else : return... | For given type construct its full qualified name . |
12,740 | def crop_to_bounding_box ( image , offset_height , offset_width , target_height , target_width , dynamic_shape = False ) : image = ops . convert_to_tensor ( image , name = 'image' ) _Check3DImage ( image , require_static = ( not dynamic_shape ) ) height , width , _ = _ImageDimensions ( image , dynamic_shape = dynamic_s... | Crops an image to a specified bounding box . |
12,741 | def pad_to_bounding_box ( image , offset_height , offset_width , target_height , target_width , dynamic_shape = False ) : image = ops . convert_to_tensor ( image , name = 'image' ) _Check3DImage ( image , require_static = ( not dynamic_shape ) ) height , width , depth = _ImageDimensions ( image , dynamic_shape = dynami... | Pad image with zeros to the specified height and width . |
12,742 | def determine_completeness ( py_untl ) : completeness_dict = { 'title' : { 'present' : False , 'weight' : 10 , } , 'description' : { 'present' : False , 'weight' : 1 , } , 'language' : { 'present' : False , 'weight' : 1 , } , 'collection' : { 'present' : False , 'weight' : 10 , } , 'institution' : { 'present' : False ,... | Take a Python untl and calculate the completeness . |
12,743 | def init_app ( self , app , config_prefix = None ) : self . kill_session = self . original_kill_session config_prefix = ( config_prefix or 'JIRA' ) . rstrip ( '_' ) . upper ( ) if not hasattr ( app , 'extensions' ) : app . extensions = dict ( ) if config_prefix . lower ( ) in app . extensions : raise ValueError ( 'Alre... | Actual method to read JIRA settings from app configuration and initialize the JIRA instance . |
12,744 | def zip_dict ( a : Dict [ str , A ] , b : Dict [ str , B ] ) -> Dict [ str , Tuple [ Optional [ A ] , Optional [ B ] ] ] : return { key : ( a . get ( key ) , b . get ( key ) ) for key in a . keys ( ) | b . keys ( ) } | Combine the values within two dictionaries by key . |
12,745 | def flattenPort ( port : LPort ) : yield port if port . children : for ch in port . children : yield from flattenPort ( ch ) port . children . clear ( ) | Flatten hierarchical ports |
12,746 | def _flattenPortsSide ( side : List [ LNode ] ) -> List [ LNode ] : new_side = [ ] for i in side : for new_p in flattenPort ( i ) : new_side . append ( new_p ) return new_side | Flatten hierarchical ports on node side |
12,747 | def flattenPorts ( root : LNode ) : for u in root . children : u . west = _flattenPortsSide ( u . west ) u . east = _flattenPortsSide ( u . east ) u . north = _flattenPortsSide ( u . north ) u . south = _flattenPortsSide ( u . south ) | Flatten ports to simplify layout generation |
12,748 | def set_missing_defaults ( self ) : if 'pub_options' not in self . config : self . config [ 'pub_options' ] = { 'acknowledge' : True , 'retain' : True } if 'sub_options' not in self . config : self . config [ 'sub_options' ] = { 'get_retained' : False } if 'subscribed_topics' not in self . config : self . config [ 'sub... | Ensure that minimal configuration is setup and set defaults for missing values |
12,749 | def config_sanity_check ( self ) : if 'name' not in self . config : raise EventifyConfigError ( ) if 'publish_topic' not in self . config : raise EventifyConfigError ( ) if 'topic' not in self . config [ 'publish_topic' ] : raise EventifyConfigError ( ) | Base configuration sanity checks |
12,750 | def load_config ( self ) : logger . debug ( 'loading config file: %s' , self . config_file ) if os . path . exists ( self . config_file ) : with open ( self . config_file ) as file_handle : return json . load ( file_handle ) else : logger . error ( 'configuration file is required for eventify' ) logger . error ( 'unabl... | Load configuration for the service |
12,751 | def check_event_loop ( ) : loop = asyncio . get_event_loop ( ) if loop . is_closed ( ) : asyncio . set_event_loop ( asyncio . new_event_loop ( ) ) | Check if event loop is closed and create a new event loop |
12,752 | def is_tomodir ( subdirectories ) : required = ( 'exe' , 'config' , 'rho' , 'mod' , 'inv' ) is_tomodir = True for subdir in required : if subdir not in subdirectories : is_tomodir = False return is_tomodir | provided with the subdirectories of a given directory check if this is a tomodir |
12,753 | def check_if_needs_modeling ( tomodir ) : print ( 'check for modeling' , tomodir ) required_files = ( 'config' + os . sep + 'config.dat' , 'rho' + os . sep + 'rho.dat' , 'grid' + os . sep + 'elem.dat' , 'grid' + os . sep + 'elec.dat' , 'exe' + os . sep + 'crmod.cfg' , ) not_allowed = ( 'mod' + os . sep + 'volt.dat' , )... | check of we need to run CRMod in a given tomodir |
12,754 | def check_if_needs_inversion ( tomodir ) : required_files = ( 'grid' + os . sep + 'elem.dat' , 'grid' + os . sep + 'elec.dat' , 'exe' + os . sep + 'crtomo.cfg' , ) needs_inversion = True for filename in required_files : if not os . path . isfile ( tomodir + os . sep + filename ) : needs_inversion = False if not os . pa... | check of we need to run CRTomo in a given tomodir |
12,755 | def add_boundary ( self , p1 , p2 , btype ) : index = self . add_line ( p1 , p2 , self . char_lengths [ 'boundary' ] ) self . BoundaryIndices . append ( index ) self . Boundaries . append ( ( p1 , p2 , btype ) ) | Add a boundary line |
12,756 | def add_line ( self , p1 , p2 , char_length ) : p1_id = self . get_point_id ( p1 , char_length ) p2_id = self . get_point_id ( p2 , char_length ) self . Lines . append ( ( p1_id , p2_id ) ) return len ( self . Lines ) | Add a line to the list . Check if the nodes already exist and add them if not . |
12,757 | def is_in ( self , search_list , pair ) : index = - 1 for nr , i in enumerate ( search_list ) : if ( np . all ( i == pair ) ) : return nr return index | If pair is in search_list return the index . Otherwise return - 1 |
12,758 | def read_electrodes ( self , electrodes ) : for nr , electrode in enumerate ( electrodes ) : index = self . get_point_id ( electrode , self . char_lengths [ 'electrode' ] ) self . Electrodes . append ( index ) | Read in electrodes check if points already exist |
12,759 | def write_electrodes ( self , filename ) : fid = open ( filename , 'w' ) for i in self . Electrodes : fid . write ( '{0} {1}\n' . format ( self . Points [ i ] [ 0 ] , self . Points [ i ] [ 1 ] ) ) fid . close ( ) | Write X Y coordinates of electrodes |
12,760 | def write_boundaries ( self , filename ) : fid = open ( filename , 'w' ) for i in self . Boundaries : print ( i ) fid . write ( '{0} {1} {2} {3} {4}\n' . format ( i [ 0 ] [ 0 ] , i [ 0 ] [ 1 ] , i [ 1 ] [ 0 ] , i [ 1 ] [ 1 ] , i [ 2 ] ) ) fid . close ( ) | Write boundary lines X1 Y1 X2 Y2 TYPE to file |
12,761 | def read_char_lengths ( self , filename , electrode_filename ) : if os . path . isfile ( filename ) : data = np . atleast_1d ( np . loadtxt ( filename ) ) if data . size == 4 : characteristic_length = data if characteristic_length [ 0 ] < 0 : try : elec_positions = np . loadtxt ( electrode_filename ) except : raise IOE... | Read characteristic lengths from the given file . |
12,762 | def write_points ( self , fid ) : for nr , point in enumerate ( self . Points ) : fid . write ( 'Point({0}) = {{{1}, {2}, 0, {3}}};\n' . format ( nr + 1 , point [ 0 ] , point [ 1 ] , self . Charlengths [ nr ] ) ) | Write the grid points to the GMSH - command file . |
12,763 | def get_output ( cls , response : requests . Response ) -> str : output = response . headers [ 'X-Lizzy-Output' ] output = output . replace ( '\\n' , '\n' ) lines = ( '[AGENT] {}' . format ( line ) for line in output . splitlines ( ) ) return '\n' . join ( lines ) | Extracts the senza cli output from the response |
12,764 | def new_stack ( self , keep_stacks : int , new_traffic : int , senza_yaml : dict , stack_version : str , disable_rollback : bool , parameters : List [ str ] , region : Optional [ str ] , dry_run : bool , tags : List [ str ] ) -> ( Dict [ str , str ] , str ) : header = make_header ( self . access_token ) data = { 'senza... | Requests a new stack . |
12,765 | def pot_ana ( r , rho ) : I = 1.0 sigma = 1.0 / rho phi = np . divide ( I , ( 2.0 * np . pi * sigma * r ) ) return phi | Return the analytical potential in distance r over a homogeneous half - space |
12,766 | def compute_potentials_analytical_hs ( grid , configs_raw , rho ) : potentials = [ ] nodes_sorted = grid . nodes [ 'sorted' ] nodes_raw = grid . nodes [ 'sorted' ] for config in configs_raw : print ( 'potential configs' , config ) e1_node = grid . get_electrode_node ( config [ 0 ] ) print ( 'e1_node' , e1_node ) electr... | Compute the potential superpositions of each current dipole in the configurations using the provided resistivity |
12,767 | def compute_voltages ( grid , configs_raw , potentials_raw ) : voltages = [ ] for config , potentials in zip ( configs_raw , potentials_raw ) : print ( 'config' , config ) e3_node = grid . get_electrode_node ( config [ 2 ] ) e4_node = grid . get_electrode_node ( config [ 3 ] ) print ( e3_node , e4_node ) print ( 'pot1'... | Given a list of potential distribution and corresponding four - point spreads compute the voltages |
12,768 | def vcf_writer ( parser , keep , extract , args ) : output = sys . stdout if args . output == "-" else open ( args . output , "w" ) try : samples = np . array ( parser . get_samples ( ) , dtype = str ) k = _get_sample_select ( samples = samples , keep = keep ) output . write ( _VCF_HEADER . format ( date = datetime . t... | Writes the data in VCF format . |
12,769 | def csv_writer ( parser , keep , extract , args ) : output = sys . stdout if args . output == "-" else open ( args . output , "w" ) try : samples = np . array ( parser . get_samples ( ) , dtype = str ) k = _get_sample_select ( samples = samples , keep = keep ) print ( "sample_id" , "variant_id" , "chromosome" , "positi... | Writes the data in CSV format . |
12,770 | def _get_generator ( parser , extract , keep , check_maf ) : if extract is not None : parser = Extractor ( parser , names = extract ) for data in parser . iter_genotypes ( ) : data . genotypes = data . genotypes [ keep ] if check_maf : data . code_minor ( ) yield data | Generates the data ( with extract markers and keep if required . |
12,771 | def bitterness ( self , ibu_method , early_og , batch_size ) : "Calculate bitterness based on chosen method" if ibu_method == "tinseth" : bitterness = 1.65 * math . pow ( 0.000125 , early_og - 1.0 ) * ( ( 1 - math . pow ( math . e , - 0.04 * self . time ) ) / 4.15 ) * ( ( self . alpha / 100.0 * self . amount * 1000000 ... | Calculate bitterness based on chosen method |
12,772 | def _check_error ( response ) : if ( not response . ok ) or ( response . status_code != 200 ) : raise Exception ( response . json ( ) [ 'error' ] + ': ' + response . json ( ) [ 'error_description' ] ) | Raises an exception if the Spark Cloud returned an error . |
12,773 | def _login ( self , username , password ) : data = { 'username' : username , 'password' : password , 'grant_type' : 'password' } r = self . spark_api . oauth . token . POST ( auth = ( 'spark' , 'spark' ) , data = data , timeout = self . timeout ) self . _check_error ( r ) return r . json ( ) [ 'access_token' ] | Proceed to login to the Spark Cloud and returns an access token . |
12,774 | def devices ( self ) : params = { 'access_token' : self . access_token } r = self . spark_api . GET ( params = params , timeout = self . timeout ) self . _check_error ( r ) json_list = r . json ( ) devices_dict = { } if json_list : allKeys = { 'functions' , 'variables' , 'api' , 'requires_deep_update' , 'status' } for ... | Create a dictionary of devices known to the user account . |
12,775 | def _get_device_info ( self , device_id ) : params = { 'access_token' : self . access_token } r = self . spark_api ( device_id ) . GET ( params = params , timeout = 30 ) self . _check_error ( r ) return r . json ( ) | Queries the Spark Cloud for detailed information about a device . |
12,776 | def make_device_class ( spark_cloud , entries , timeout = 30 ) : attrs = list ( set ( list ( entries ) + [ 'requires_deep_update' , 'functions' , 'variables' , 'api' , 'status' ] ) ) return type ( 'Device' , ( _BaseDevice , namedtuple ( 'Device' , attrs ) ) , { '__slots__' : ( ) , 'spark_cloud' : spark_cloud , 'timeout... | Returns a dynamic Device class based on what a GET device list from the Spark Cloud returns . spark_cloud parameter should be the caller instance of SparkCloud . entries parameter should be the list of fields the Spark Cloud API is returning . |
12,777 | def report_metric ( metric_name : str , value : int , fail_silently : bool = True ) : if metricz is None : return configuration = Configuration ( ) try : lizzy_domain = urlparse ( configuration . lizzy_url ) . netloc lizzy_name , _ = lizzy_domain . split ( '.' , 1 ) except Exception : lizzy_name = 'UNKNOWN' tags = { 'v... | Tries to report a metric ignoring all errors |
12,778 | def get_form_bound_field ( form , field_name ) : field = form . fields [ field_name ] field = field . get_bound_field ( form , field_name ) return field | Intends to get the bound field from the form regarding the field name |
12,779 | def read ( self , module_name ) : self . parser . read ( "{}/{}.ini" . format ( self . path , module_name . split ( "." ) [ - 1 ] ) ) | Read a particular config file |
12,780 | def get_for_nearest_ancestor ( self , cls , attribute_name ) : for family_cls in family ( cls ) : if self . has ( family_cls . __module__ , family_cls . __name__ , attribute_name ) : return self . get ( family_cls . __module__ , family_cls . __name__ , attribute_name ) ini_filename = cls . __module__ . split ( "." ) [ ... | Find a prior with the attribute analysis_path from the config for this class or one of its ancestors |
12,781 | def fib ( number : int ) -> int : if number < 2 : return number return fib ( number - 1 ) + fib ( number - 2 ) | Simple Fibonacci function . |
12,782 | def add_data ( self , data ) : subdata = np . atleast_2d ( data ) if subdata . shape [ 1 ] != self . grid . nr_of_nodes : if subdata . shape [ 0 ] == self . grid . nr_of_nodes : subdata = subdata . T else : raise Exception ( 'Number of values does not match the number of ' + 'nodes in the grid {0} grid nodes vs {1} dat... | Add data to the node value sets |
12,783 | def instance ( cls , * args , ** kwgs ) : if not hasattr ( cls , "_instance" ) : cls . _instance = cls ( * args , ** kwgs ) return cls . _instance | Will be the only instance |
12,784 | def configure_logger ( logger , filename , folder , log_level ) : fmt = logging . Formatter ( '%(asctime)s %(levelname)s: %(message)s' ) if folder is not None : log_file = os . path . join ( folder , filename ) hdl = logging . FileHandler ( log_file ) hdl . setFormatter ( fmt ) hdl . setLevel ( log_level ) logger . add... | Configure logging behvior for the simulations . |
12,785 | def _nargs ( f ) -> Optional [ int ] : if isinstance ( f , Function ) : return f . nargs spec = inspect . getfullargspec ( f ) if spec . varargs is not None : return None return len ( spec . args ) | number of positional arguments values . Dynamically computed from the arguments attribute . |
12,786 | def _ndefs ( f ) : if isinstance ( f , Function ) : return f . ndefs spec = inspect . getfullargspec ( f ) if spec . defaults is None : return 0 return len ( spec . defaults ) | number of any default values for positional or keyword parameters |
12,787 | def singledispatch ( * , nargs = None , nouts = None , ndefs = None ) : def wrapper ( f ) : return wraps ( f ) ( SingleDispatchFunction ( f , nargs = nargs , nouts = nouts , ndefs = ndefs ) ) return wrapper | singledispatch decorate of both functools . singledispatch and func |
12,788 | def multidispatch ( * , nargs = None , nouts = None ) : def wrapper ( f ) : return wraps ( f ) ( MultiDispatchFunction ( f , nargs = nargs , nouts = nouts ) ) return wrapper | multidispatch decorate of both functools . singledispatch and func |
12,789 | def flip ( f : Callable ) -> Function : nargs_ , nouts_ , ndefs_ = nargs ( f ) , nouts ( f ) , ndefs ( f ) return WrappedFunction ( lambda * args , ** kwargs : f ( args [ 1 ] , args [ 0 ] , * args [ 2 : ] , ** kwargs ) , nargs = nargs_ , nouts = nouts_ , ndefs = ndefs_ ) | flip order of first two arguments to function . |
12,790 | def tagfunc ( nargs = None , ndefs = None , nouts = None ) : def wrapper ( f ) : return wraps ( f ) ( FunctionWithTag ( f , nargs = nargs , nouts = nouts , ndefs = ndefs ) ) return wrapper | decorate of tagged function |
12,791 | def fmap ( self , f : 'WrappedFunction' ) -> 'WrappedFunction' : if not isinstance ( f , WrappedFunction ) : f = WrappedFunction ( f ) return WrappedFunction ( lambda * args , ** kwargs : self ( f ( * args , ** kwargs ) ) , nargs = f . nargs , nouts = self . nouts ) | function map for Wrapped Function . A forced transfermation to WrappedFunction would be applied . async def |
12,792 | def parse_atoms ( self ) : atom_site_header_tag = self . main_tag . getElementsByTagName ( "PDBx:atom_siteCategory" ) assert ( len ( atom_site_header_tag ) == 1 ) atom_site_header_tag = atom_site_header_tag [ 0 ] atom_site_tags = atom_site_header_tag . getElementsByTagName ( "PDBx:atom_site" ) residue_map = { } residue... | All ATOM lines are parsed even though only one per residue needs to be parsed . The reason for parsing all the lines is just to sanity - checks that the ATOMs within one residue are consistent with each other . |
12,793 | def parse_atom_site ( self , name , attributes ) : if name == "PDBx:pdbx_PDB_ins_code" : assert ( not ( self . current_atom_site . ATOMResidueiCodeIsNull ) ) if attributes . get ( 'xsi:nil' ) == 'true' : self . current_atom_site . ATOMResidueiCodeIsNull = True if name == "PDBx:auth_asym_id" : assert ( not ( self . curr... | Parse the atom tag attributes . Most atom tags do not have attributes . |
12,794 | def parse_atom_tag_data ( self , name , tag_content ) : current_atom_site = self . current_atom_site if current_atom_site . IsHETATM : return elif name == 'PDBx:atom_site' : self . _BLOCK = None current_atom_site = self . current_atom_site current_atom_site . validate ( ) if current_atom_site . IsATOM : r , seqres , Re... | Parse the atom tag data . |
12,795 | def create_atom_data ( self ) : current_atom_site = self . current_atom_site if current_atom_site . IsHETATM : return None , None , None , None elif current_atom_site . IsATOM : return current_atom_site . convert_to_residue ( self . modified_residues ) else : raise Exception ( 'current_atom_site' ) | The atom site work is split into two parts . This function type - converts the tags . |
12,796 | def import_source ( module , path , pass_errors = False ) : try : m = imp . load_source ( module , path ) return m except Exception as e : return None | Function imports a module given full path |
12,797 | def import_module ( module , pass_errors = False ) : frm = module . split ( '.' ) try : m = __import__ ( module , fromlist = [ frm [ 1 ] ] ) return m except ImportError as e : if pass_errors : return None else : print ( traceback . format_exc ( ) ) return None except Exception as e : print ( traceback . format_exc ( ) ... | Function imports a module given module name |
12,798 | def copytree ( src , dst , symlinks = False , ignore = None ) : if not os . path . exists ( dst ) : os . mkdir ( dst ) try : for item in os . listdir ( src ) : s = os . path . join ( src , item ) d = os . path . join ( dst , item ) if os . path . isdir ( s ) : shutil . copytree ( s , d , symlinks , ignore ) else : shut... | Function recursively copies from directory to directory . |
12,799 | def empty ( key , dict ) : if key in dict . keys ( ) : if dict [ key ] : return False return True | Function determines if the dict key exists or it is empty |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.