idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,300
def read_by ( cls , removed = False , ** kwargs ) : if not removed : kwargs [ 'time_removed' ] = 0 return cls . query . filter_by ( ** kwargs )
filter_by query helper that handles soft delete logic . If your query conditions require expressions use read .
12,301
def read ( cls , * criteria , ** kwargs ) : if not kwargs . get ( 'removed' , False ) : return cls . query . filter ( cls . time_removed == 0 , * criteria ) return cls . query . filter ( * criteria )
filter query helper that handles soft delete logic . If your query conditions do not require expressions consider using read_by .
12,302
def delete ( self , session , commit = True , soft = True ) : if soft : self . time_removed = sqlalchemy . func . unix_timestamp ( ) else : session . delete ( self ) if commit : session . commit ( )
Delete a row from the DB .
12,303
def walk_paths ( self , base : Optional [ pathlib . PurePath ] = pathlib . PurePath ( ) ) -> Iterator [ pathlib . PurePath ] : raise NotImplementedError ( )
Recursively traverse all paths inside this entity including the entity itself .
12,304
def _walk_paths ( self , base : pathlib . PurePath ) -> Iterator [ pathlib . PurePath ] : return self . walk_paths ( base )
Internal helper for walking paths . This is required to exclude the name of the root entity from the walk .
12,305
def from_path ( cls , path : pathlib . Path ) -> 'Entity' : if path . is_file ( ) : return File . from_path ( path ) return Directory . from_path ( path )
Create an entity from a local path .
12,306
def _md5 ( path : pathlib . PurePath ) : hash_ = hashlib . md5 ( ) with open ( path , 'rb' ) as f : for chunk in iter ( lambda : f . read ( 4096 ) , b'' ) : hash_ . update ( chunk ) return hash_ . hexdigest ( )
Calculate the MD5 checksum of a file .
12,307
def from_path ( cls , path : pathlib . Path ) -> 'File' : if not path . is_file ( ) : raise ValueError ( 'Path does not point to a file' ) return File ( path . name , path . stat ( ) . st_size , cls . _md5 ( path ) )
Create a file entity from a file path .
12,308
def from_path ( cls , path : pathlib . Path ) -> 'Directory' : if not path . is_dir ( ) : raise ValueError ( 'Path does not point to a directory' ) return Directory ( path . name , { entity . name : Entity . from_path ( entity ) for entity in path . iterdir ( ) } )
Create a directory entity from a directory path .
12,309
def best_result ( self ) : best_result = None for result in self . results : if best_result is None or result . figure_of_merit > best_result . figure_of_merit : best_result = result return best_result
The best result of the grid search . That is the result output by the non linear search that had the highest maximum figure of merit .
12,310
def make_lists ( self , grid_priors ) : return optimizer . make_lists ( len ( grid_priors ) , step_size = self . hyper_step_size , centre_steps = False )
Produces a list of lists of floats where each list of floats represents the values in each dimension for one step of the grid search .
12,311
def fit ( self , analysis , grid_priors ) : grid_priors = list ( set ( grid_priors ) ) results = [ ] lists = self . make_lists ( grid_priors ) results_list = [ list ( map ( self . variable . name_for_prior , grid_priors ) ) + [ "figure_of_merit" ] ] def write_results ( ) : with open ( "{}/results" . format ( self . pha...
Fit an analysis with a set of grid priors . The grid priors are priors associated with the model mapper of this instance that are replaced by uniform priors for each step of the grid search .
12,312
def portTryReduce ( root : LNode , port : LPort ) : if not port . children : return for p in port . children : portTryReduce ( root , p ) target_nodes = { } ch_cnt = countDirectlyConnected ( port , target_nodes ) if not target_nodes : return new_target , children_edge_to_destroy = max ( target_nodes . items ( ) , key =...
Check if majority of children is connected to same port if it is the case reduce children and connect this port instead children
12,313
def resolveSharedConnections ( root : LNode ) : for ch in root . children : resolveSharedConnections ( ch ) for ch in root . children : for p in ch . iterPorts ( ) : portTryReduce ( root , p )
Walk all ports on all nodes and group subinterface connections to only parent interface connection if it is possible
12,314
def countDirectlyConnected ( port : LPort , result : dict ) -> int : inEdges = port . incomingEdges outEdges = port . outgoingEdges if port . children : ch_cnt = 0 for ch in port . children : ch_cnt += countDirectlyConnected ( ch , result ) return ch_cnt elif not inEdges and not outEdges : if port . direction == PortTy...
Count how many ports are directly connected to other nodes
12,315
def deploy ( self , image_name , ip , flavor = 'm1.small' ) : body_value = { "port" : { "admin_state_up" : True , "name" : self . name + '_provision' , "network_id" : os_utils . get_network_id ( self . nova_api , 'provision_bob' ) , 'fixed_ips' : [ { 'ip_address' : ip } ] } } response = self . neutron . create_port ( b...
Create the node .
12,316
def pxe_netboot ( self , filename ) : new_port = { 'extra_dhcp_opts' : [ { 'opt_name' : 'bootfile-name' , 'opt_value' : 'http://192.0.2.240:8088/' + filename , 'ip_version' : 4 , } , { 'opt_name' : 'tftp-server' , 'opt_value' : '192.0.2.240' , 'ip_version' : '4' } , { 'opt_name' : 'server-ip-address' , 'opt_value' : '1...
Specify which file ipxe should load during the netboot .
12,317
def initialize ( self , size = 2 ) : for i in range ( 0 , size ) : self . nodes . append ( Baremetal ( self . nova_api , self . neutron , self . _keypair , self . _key_filename , self . _security_groups , name = 'baremetal_%d' % i ) ) with concurrent . futures . ThreadPoolExecutor ( max_workers = 5 ) as executor : for ...
Populate the node poll .
12,318
def create_bmc ( self , os_username , os_password , os_project_id , os_auth_url ) : bmc = ovb_bmc . OvbBmc ( nova_api = self . nova_api , neutron = self . neutron , keypair = self . _keypair , key_filename = self . _key_filename , security_groups = self . _security_groups , image_name = 'Fedora 23 x86_64' , ip = '192.0...
Deploy the BMC machine .
12,319
def untlxml2py ( untl_filename ) : parent_stack = [ ] for event , element in iterparse ( untl_filename , events = ( 'start' , 'end' ) ) : if NAMESPACE_REGEX . search ( element . tag , 0 ) : element_tag = NAMESPACE_REGEX . search ( element . tag , 0 ) . group ( 1 ) else : element_tag = element . tag if element_tag in PY...
Parse a UNTL XML file object into a pyuntl element tree .
12,320
def untldict2py ( untl_dict ) : untl_root = PYUNTL_DISPATCH [ 'metadata' ] ( ) untl_py_list = [ ] for element_name , element_list in untl_dict . items ( ) : for element_dict in element_list : qualifier = element_dict . get ( 'qualifier' , None ) content = element_dict . get ( 'content' , None ) child_list = [ ] if isin...
Convert a UNTL dictionary into a Python object .
12,321
def post2pydict ( post , ignore_list ) : root_element = PYUNTL_DISPATCH [ 'metadata' ] ( ) untl_form_dict = { } form_post = dict ( post . copy ( ) ) for key , value_list in form_post . items ( ) : if key not in ignore_list : ( element_tag , element_attribute ) = key . split ( '-' , 1 ) if element_tag not in untl_form_d...
Convert the UNTL posted data to a Python dictionary .
12,322
def untlpy2dcpy ( untl_elements , ** kwargs ) : sDate = None eDate = None ark = kwargs . get ( 'ark' , None ) domain_name = kwargs . get ( 'domain_name' , None ) scheme = kwargs . get ( 'scheme' , 'http' ) resolve_values = kwargs . get ( 'resolve_values' , None ) resolve_urls = kwargs . get ( 'resolve_urls' , None ) ve...
Convert the UNTL elements structure into a DC structure .
12,323
def untlpy2highwirepy ( untl_elements , ** kwargs ) : highwire_list = [ ] title = None publisher = None creation = None escape = kwargs . get ( 'escape' , False ) for element in untl_elements . children : if element . tag in HIGHWIRE_CONVERSION_DISPATCH : highwire_element = HIGHWIRE_CONVERSION_DISPATCH [ element . tag ...
Convert a UNTL Python object to a highwire Python object .
12,324
def untlpydict2dcformatteddict ( untl_dict , ** kwargs ) : ark = kwargs . get ( 'ark' , None ) domain_name = kwargs . get ( 'domain_name' , None ) scheme = kwargs . get ( 'scheme' , 'http' ) resolve_values = kwargs . get ( 'resolve_values' , None ) resolve_urls = kwargs . get ( 'resolve_urls' , None ) verbose_vocabular...
Convert a UNTL data dictionary to a formatted DC data dictionary .
12,325
def formatted_dc_dict ( dc_dict ) : for key , element_list in dc_dict . items ( ) : new_element_list = [ ] for element in element_list : new_element_list . append ( element [ 'content' ] ) dc_dict [ key ] = new_element_list return dc_dict
Change the formatting of the DC data dictionary .
12,326
def generate_dc_xml ( dc_dict ) : root_namespace = '{%s}' % DC_NAMESPACES [ 'oai_dc' ] elements_namespace = '{%s}' % DC_NAMESPACES [ 'dc' ] schema_location = ( 'http://www.openarchives.org/OAI/2.0/oai_dc/ ' 'http://www.openarchives.org/OAI/2.0/oai_dc.xsd' ) root_attributes = { '{%s}schemaLocation' % XSI : schema_locati...
Generate a DC XML string .
12,327
def generate_dc_json ( dc_dict ) : formatted_dict = formatted_dc_dict ( dc_dict ) return json . dumps ( formatted_dict , sort_keys = True , indent = 4 )
Generate DC JSON data .
12,328
def highwirepy2dict ( highwire_elements ) : highwire_dict = { } for element in highwire_elements : if element . name not in highwire_dict : highwire_dict [ element . name ] = [ ] highwire_dict [ element . name ] . append ( { 'content' : element . content } ) return highwire_dict
Convert a list of highwire elements into a dictionary .
12,329
def generate_highwire_json ( highwire_elements ) : highwire_dict = highwirepy2dict ( highwire_elements ) return json . dumps ( highwire_dict , sort_keys = True , indent = 4 )
Convert highwire elements into a JSON structure .
12,330
def dcdict2rdfpy ( dc_dict ) : ark_prefix = 'ark: ark:' uri = URIRef ( '' ) rdf_py = ConjunctiveGraph ( ) DC = Namespace ( 'http://purl.org/dc/elements/1.1/' ) for element_value in dc_dict [ 'identifier' ] : if element_value [ 'content' ] . startswith ( ark_prefix ) : uri = URIRef ( element_value [ 'content' ] . replac...
Convert a DC dictionary into an RDF Python object .
12,331
def add_empty_fields ( untl_dict ) : for element in UNTL_XML_ORDER : if element not in untl_dict : try : py_object = PYUNTL_DISPATCH [ element ] ( content = '' , qualifier = '' , ) except : try : py_object = PYUNTL_DISPATCH [ element ] ( content = '' ) except : try : py_object = PYUNTL_DISPATCH [ element ] ( ) except :...
Add empty values if UNTL fields don t have values .
12,332
def add_empty_etd_ms_fields ( etd_ms_dict ) : for element in ETD_MS_ORDER : if element not in etd_ms_dict : try : py_object = ETD_MS_CONVERSION_DISPATCH [ element ] ( content = '' , qualifier = '' , ) except : try : py_object = ETD_MS_CONVERSION_DISPATCH [ element ] ( content = '' ) except : try : py_object = ETD_MS_CO...
Add empty values for ETD_MS fields that don t have values .
12,333
def find_untl_errors ( untl_dict , ** kwargs ) : fix_errors = kwargs . get ( 'fix_errors' , False ) error_dict = { } for element_name in REQUIRES_QUALIFIER : for element in untl_dict . get ( element_name , [ ] ) : error_dict [ element_name ] = 'no_qualifier' if fix_errors : element . setdefault ( 'qualifier' , '' ) fou...
Add empty required qualifiers to create valid UNTL .
12,334
def untlpy2etd_ms ( untl_elements , ** kwargs ) : degree_children = { } date_exists = False seen_creation = False etd_ms_root = ETD_MS_CONVERSION_DISPATCH [ 'thesis' ] ( ) for element in untl_elements . children : etd_ms_element = None if element . tag in ETD_MS_CONVERSION_DISPATCH : if element . children : etd_ms_elem...
Convert the UNTL elements structure into an ETD_MS structure .
12,335
def etd_ms_dict2xmlfile ( filename , metadata_dict ) : try : f = open ( filename , 'w' ) f . write ( generate_etd_ms_xml ( metadata_dict ) . encode ( "utf-8" ) ) f . close ( ) except : raise MetadataGeneratorException ( 'Failed to create an XML file. Filename: %s' % ( filename ) )
Create an ETD MS XML file .
12,336
def signal_to_noise_map ( self ) : signal_to_noise_map = np . divide ( self . data , self . noise_map ) signal_to_noise_map [ signal_to_noise_map < 0 ] = 0 return signal_to_noise_map
The signal - to - noise_map of the data and noise - map which are fitted .
12,337
def structure ( cls ) : if cls . signature is NotImplemented : raise NotImplementedError ( "no signature defined" ) up = cls . cutter . elucidate ( ) down = str ( Seq ( up ) . reverse_complement ( ) ) ovhg = cls . cutter . ovhgseq upsig , downsig = cls . signature if cls . cutter . is_5overhang ( ) : upsite = "^{}_" . ...
Get the part structure as a DNA regex pattern .
12,338
def characterize ( cls , record ) : classes = list ( cls . __subclasses__ ( ) ) if not isabstract ( cls ) : classes . append ( cls ) for subclass in classes : entity = subclass ( record ) if entity . is_valid ( ) : return entity raise RuntimeError ( "could not find the type for '{}'" . format ( record . id ) )
Load the record in a concrete subclass of this type .
12,339
def global_request ( self , kind , data = None , wait = True ) : if wait : self . completion_event = threading . Event ( ) m = Message ( ) m . add_byte ( cMSG_GLOBAL_REQUEST ) m . add_string ( kind ) m . add_boolean ( wait ) if data is not None : m . add ( * data ) self . _log ( DEBUG , 'Sending global request "%s"' % ...
Make a global request to the remote host . These are normally extensions to the SSH2 protocol .
12,340
def _activate_inbound ( self ) : block_size = self . _cipher_info [ self . remote_cipher ] [ 'block-size' ] if self . server_mode : IV_in = self . _compute_key ( 'A' , block_size ) key_in = self . _compute_key ( 'C' , self . _cipher_info [ self . remote_cipher ] [ 'key-size' ] ) else : IV_in = self . _compute_key ( 'B'...
switch on newly negotiated encryption parameters for inbound traffic
12,341
def enable_user ( self , user ) : if user in self . ssh_pool . _ssh_clients : return if user == 'root' : _root_ssh_client = ssh . SshClient ( hostname = self . hostname , user = 'root' , key_filename = self . _key_filename , via_ip = self . via_ip ) _root_ssh_client . start ( ) result , _ = _root_ssh_client . run ( 'un...
Enable the root account on the remote host .
12,342
def send_file ( self , local_path , remote_path , user = 'root' , unix_mode = None ) : self . enable_user ( user ) return self . ssh_pool . send_file ( user , local_path , remote_path , unix_mode = unix_mode )
Upload a local file on the remote host .
12,343
def send_dir ( self , local_path , remote_path , user = 'root' ) : self . enable_user ( user ) return self . ssh_pool . send_dir ( user , local_path , remote_path )
Upload a directory on the remote host .
12,344
def create_file ( self , path , content , mode = 'w' , user = 'root' ) : self . enable_user ( user ) return self . ssh_pool . create_file ( user , path , content , mode )
Create a file on the remote host .
12,345
def yum_install ( self , packages , ignore_error = False ) : return self . run ( 'yum install -y --quiet ' + ' ' . join ( packages ) , ignore_error = ignore_error , retry = 5 )
Install some packages on the remote host .
12,346
def rhsm_register ( self , rhsm ) : login = rhsm . get ( 'login' ) password = rhsm . get ( 'password' , os . environ . get ( 'RHN_PW' ) ) pool_id = rhsm . get ( 'pool_id' ) self . run ( 'rm /etc/pki/product/69.pem' , ignore_error = True ) custom_log = 'subscription-manager register --username %s --password *******' % l...
Register the host on the RHSM .
12,347
def enable_repositories ( self , repositories ) : for r in repositories : if r [ 'type' ] != 'rhsm_channel' : continue if r [ 'name' ] not in self . rhsm_channels : self . rhsm_channels . append ( r [ 'name' ] ) if self . rhsm_active : subscription_cmd = "subscription-manager repos '--disable=*' --enable=" + ' --enable...
Enable a list of RHSM repositories .
12,348
def create_stack_user ( self ) : self . run ( 'adduser -m stack' , success_status = ( 0 , 9 ) ) self . create_file ( '/etc/sudoers.d/stack' , 'stack ALL=(root) NOPASSWD:ALL\n' ) self . run ( 'mkdir -p /home/stack/.ssh' ) self . run ( 'cp /root/.ssh/authorized_keys /home/stack/.ssh/authorized_keys' ) self . run ( 'chown...
Create the stack user on the machine .
12,349
def fetch_image ( self , path , dest , user = 'root' ) : self . run ( 'test -f %s || curl -L -s -o %s %s' % ( dest , dest , path ) , user = user , ignore_error = True )
Store in the user home directory an image from a remote location .
12,350
def clean_system ( self ) : self . run ( 'systemctl disable NetworkManager' , success_status = ( 0 , 1 ) ) self . run ( 'systemctl stop NetworkManager' , success_status = ( 0 , 5 ) ) self . run ( 'pkill -9 dhclient' , success_status = ( 0 , 1 ) ) self . yum_remove ( [ 'cloud-init' , 'NetworkManager' ] ) self . run ( 's...
Clean up unnecessary packages from the system .
12,351
def yum_update ( self , allow_reboot = False ) : self . run ( 'yum clean all' ) self . run ( 'test -f /usr/bin/subscription-manager && subscription-manager repos --list-enabled' , ignore_error = True ) self . run ( 'yum repolist' ) self . run ( 'yum update -y --quiet' , retry = 3 ) if allow_reboot : self . run ( 'grubb...
Do a yum update on the system .
12,352
def get_by_range ( model_cls , * args , ** kwargs ) : start_timestamp = kwargs . get ( 'start_timestamp' ) end_timestamp = kwargs . get ( 'end_timestamp' ) if ( start_timestamp is not None ) and ( end_timestamp is not None ) and ( start_timestamp > end_timestamp ) : raise InvalidTimestampRange models = model_cls . read...
Get ordered list of models for the specified time range . The timestamp on the earliest model will likely occur before start_timestamp . This is to ensure that we return the models for the entire range .
12,353
def read_time_range ( cls , * args , ** kwargs ) : criteria = list ( args ) start = kwargs . get ( 'start_timestamp' ) end = kwargs . get ( 'end_timestamp' ) if start is not None : criteria . append ( cls . time_order <= - start ) if end is not None : criteria . append ( cls . time_order >= - end ) return cls . read ( ...
Get all timezones set within a given time . Uses time_dsc_index
12,354
def add_data ( self , data , metadata = None ) : subdata = np . atleast_2d ( data ) if subdata . shape [ 1 ] != self . grid . nr_of_elements : if subdata . shape [ 0 ] == self . grid . nr_of_elements : subdata = subdata . T else : raise Exception ( 'Number of values does not match the number of ' + 'elements in the gri...
Add data to the parameter set
12,355
def load_model_from_file ( self , filename ) : assert os . path . isfile ( filename ) data = np . loadtxt ( filename ) . squeeze ( ) assert len ( data . shape ) == 1 pid = self . add_data ( data ) return pid
Load one parameter set from a file which contains one value per line
12,356
def load_from_sens_file ( self , filename ) : sens_data = np . loadtxt ( filename , skiprows = 1 ) nid_re = self . add_data ( sens_data [ : , 2 ] ) nid_im = self . add_data ( sens_data [ : , 3 ] ) return nid_re , nid_im
Load real and imaginary parts from a sens . dat file generated by CRMod
12,357
def save_to_rho_file ( self , filename , cid_mag , cid_pha = None ) : mag_data = self . parsets [ cid_mag ] if cid_pha is None : pha_data = np . zeros ( mag_data . shape ) else : pha_data = self . parsets [ cid_pha ] with open ( filename , 'wb' ) as fid : fid . write ( bytes ( '{0}\n' . format ( self . grid . nr_of_ele...
Save one or two parameter sets in the rho . dat forward model format
12,358
def _clean_pid ( self , pid ) : if isinstance ( pid , ( list , tuple ) ) : if len ( pid ) == 1 : return pid [ 0 ] else : return pid return pid
if pid is a number don t do anything . If pid is a list with one entry strip the list and return the number . If pid contains more than one entries do nothing .
12,359
def modify_area ( self , pid , xmin , xmax , zmin , zmax , value ) : area_polygon = shapgeo . Polygon ( ( ( xmin , zmax ) , ( xmax , zmax ) , ( xmax , zmin ) , ( xmin , zmin ) ) ) self . modify_polygon ( pid , area_polygon , value )
Modify the given dataset in the rectangular area given by the parameters and assign all parameters inside this area the given value .
12,360
def extract_points ( self , pid , points ) : xy = self . grid . get_element_centroids ( ) data = self . parsets [ pid ] iobj = spi . NearestNDInterpolator ( xy , data ) values = iobj ( points ) return values
Extract values at certain points in the grid from a given parameter set . Cells are selected by interpolating the centroids of the cells towards the line using a nearest scheme .
12,361
def extract_along_line ( self , pid , xy0 , xy1 , N = 10 ) : assert N >= 2 xy0 = np . array ( xy0 ) . squeeze ( ) xy1 = np . array ( xy1 ) . squeeze ( ) assert xy0 . size == 2 assert xy1 . size == 2 points = [ ( x , y ) for x , y in zip ( np . linspace ( xy0 [ 0 ] , xy1 [ 0 ] , N ) , np . linspace ( xy0 [ 1 ] , xy1 [ 1...
Extract parameter values along a given line .
12,362
def extract_polygon_area ( self , pid , polygon_points ) : polygon = shapgeo . Polygon ( polygon_points ) xy = self . grid . get_element_centroids ( ) in_poly = [ ] for nr , point in enumerate ( xy ) : if shapgeo . Point ( point ) . within ( polygon ) : in_poly . append ( nr ) values = self . parsets [ pid ] [ in_poly ...
Extract all data points whose element centroid lies within the given polygon .
12,363
def rotate_point ( xorigin , yorigin , x , y , angle ) : rotx = ( x - xorigin ) * np . cos ( angle ) - ( y - yorigin ) * np . sin ( angle ) roty = ( x - yorigin ) * np . sin ( angle ) + ( y - yorigin ) * np . cos ( angle ) return rotx , roty
Rotate the given point by angle
12,364
def get_R_mod ( options , rho0 ) : tomodir = tdManager . tdMan ( elem_file = options . elem_file , elec_file = options . elec_file , config_file = options . config_file , ) tomodir . add_homogeneous_model ( magnitude = rho0 ) Z = tomodir . measurements ( ) [ : , 0 ] return Z
Compute synthetic measurements over a homogeneous half - space
12,365
def make_and_return_path_from_path_and_folder_names ( path , folder_names ) : for folder_name in folder_names : path += folder_name + '/' try : os . makedirs ( path ) except FileExistsError : pass return path
For a given path create a directory structure composed of a set of folders and return the path to the \ inner - most folder .
12,366
def register_host ( self , bm_instance ) : bmc_ip = '10.130.%d.100' % ( self . _bmc_range_start + self . _nic_cpt ) bmc_net = '10.130.%d.0' % ( self . _bmc_range_start + self . _nic_cpt ) bmc_gw = '10.130.%d.1' % ( self . _bmc_range_start + self . _nic_cpt ) device = 'eth%d' % ( 2 + self . _nic_cpt ) body_create_subnet...
Register an existing nova VM .
12,367
def Godeps ( self ) : dict = [ ] for package in sorted ( self . _packages . keys ( ) ) : dict . append ( { "ImportPath" : str ( package ) , "Rev" : str ( self . _packages [ package ] ) } ) return dict
Return the snapshot in Godeps . json form
12,368
def GLOGFILE ( self ) : lines = [ ] for package in sorted ( self . _packages . keys ( ) ) : lines . append ( "%s %s" % ( str ( package ) , str ( self . _packages [ package ] ) ) ) return "\n" . join ( lines )
Return the snapshot in GLOGFILE form
12,369
def Glide ( self ) : dict = { "hash" : "???" , "updated" : str ( datetime . datetime . now ( tz = pytz . utc ) . isoformat ( ) ) , "imports" : [ ] , } decomposer = ImportPathsDecomposerBuilder ( ) . buildLocalDecomposer ( ) decomposer . decompose ( self . _packages . keys ( ) ) classes = decomposer . classes ( ) for ip...
Return the snapshot in glide . lock form
12,370
def render ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] , batch : Optional [ int ] = None ) -> None : non_fluents , initial_state , states , actions , interms , rewards = trajectories non_fluents = dict ( non_fluents ) states = dict ( ( name , fluent [ 0 ] ) for name , fluent ...
Render the simulated state - action trajectories for Navigation domain .
12,371
def persistent_timer ( func ) : @ functools . wraps ( func ) def timed_function ( optimizer_instance , * args , ** kwargs ) : start_time_path = "{}/.start_time" . format ( optimizer_instance . phase_output_path ) try : with open ( start_time_path ) as f : start = float ( f . read ( ) ) except FileNotFoundError : start ...
Times the execution of a function . If the process is stopped and restarted then timing is continued using saved files .
12,372
def backup_path ( self ) -> str : return "{}/{}/{}{}/optimizer_backup" . format ( conf . instance . output_path , self . phase_path , self . phase_name , self . phase_tag )
The path to the backed up optimizer folder .
12,373
def backup ( self ) : try : shutil . rmtree ( self . backup_path ) except FileNotFoundError : pass try : shutil . copytree ( self . opt_path , self . backup_path ) except shutil . Error as e : logger . exception ( e )
Copy files from the sym - linked optimizer folder to the backup folder in the workspace .
12,374
def restore ( self ) : if os . path . exists ( self . backup_path ) : for file in glob . glob ( self . backup_path + "/*" ) : shutil . copy ( file , self . path )
Copy files from the backup folder to the sym - linked optimizer folder .
12,375
def config ( self , attribute_name , attribute_type = str ) : return self . named_config . get ( self . __class__ . __name__ , attribute_name , attribute_type )
Get a config field from this optimizer s section in non_linear . ini by a key and value type .
12,376
def weighted_sample_instance_from_weighted_samples ( self , index ) : model , weight , likelihood = self . weighted_sample_model_from_weighted_samples ( index ) self . _weighted_sample_model = model return self . variable . instance_from_physical_vector ( model ) , weight , likelihood
Setup a model instance of a weighted sample including its weight and likelihood .
12,377
def weighted_sample_model_from_weighted_samples ( self , index ) : return list ( self . pdf . samples [ index ] ) , self . pdf . weights [ index ] , - 0.5 * self . pdf . loglikes [ index ]
From a weighted sample return the model weight and likelihood hood .
12,378
def compare_digest ( a , b ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _compare_digest_py3 ( a , b ) return _compare_digest_py2 ( a , b )
Compare 2 hash digest .
12,379
def _render_trajectories ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] ) -> None : if self . _verbose : non_fluents , initial_state , states , actions , interms , rewards = trajectories shape = states [ 0 ] [ 1 ] . shape batch_size , horizon , = shape [ 0 ] , shape [ 1 ] states...
Prints the first batch of simulated trajectories .
12,380
def _render_batch ( self , non_fluents : NonFluents , states : Fluents , actions : Fluents , interms : Fluents , rewards : np . array , horizon : Optional [ int ] = None ) -> None : if horizon is None : horizon = len ( states [ 0 ] [ 1 ] ) self . _render_round_init ( horizon , non_fluents ) for t in range ( horizon ) :...
Prints non_fluents states actions interms and rewards for given horizon .
12,381
def _render_timestep ( self , t : int , s : Fluents , a : Fluents , f : Fluents , r : np . float32 ) -> None : print ( "============================" ) print ( "TIME = {}" . format ( t ) ) print ( "============================" ) fluent_variables = self . _compiler . rddl . action_fluent_variables self . _render_fluent...
Prints fluents and rewards for the given timestep t .
12,382
def _render_fluent_timestep ( self , fluent_type : str , fluents : Sequence [ Tuple [ str , np . array ] ] , fluent_variables : Sequence [ Tuple [ str , List [ str ] ] ] ) -> None : for fluent_pair , variable_list in zip ( fluents , fluent_variables ) : name , fluent = fluent_pair _ , variables = variable_list print ( ...
Prints fluents of given fluent_type as list of instantiated variables with corresponding values .
12,383
def _render_reward ( self , r : np . float32 ) -> None : print ( "reward = {:.4f}" . format ( float ( r ) ) ) print ( )
Prints reward r .
12,384
def _render_round_init ( self , horizon : int , non_fluents : NonFluents ) -> None : print ( '*********************************************************' ) print ( '>>> ROUND INIT, horizon = {}' . format ( horizon ) ) print ( '*********************************************************' ) fluent_variables = self . _compil...
Prints round init information about horizon and non_fluents .
12,385
def _render_round_end ( self , rewards : np . array ) -> None : print ( "*********************************************************" ) print ( ">>> ROUND END" ) print ( "*********************************************************" ) total_reward = np . sum ( rewards ) print ( "==> Objective value = {}" . format ( total_re...
Prints round end information about rewards .
12,386
def _truncate_to_field ( model , field_name , value ) : field = model . _meta . get_field ( field_name ) if len ( value ) > field . max_length : midpoint = field . max_length // 2 len_after_midpoint = field . max_length - midpoint first = value [ : midpoint ] sep = '...' last = value [ len ( value ) - len_after_midpoin...
Shorten data to fit in the specified model field .
12,387
def on_failure ( self , exc , task_id , args , kwargs , einfo ) : if not FailedTask . objects . filter ( task_id = task_id , datetime_resolved = None ) . exists ( ) : FailedTask . objects . create ( task_name = _truncate_to_field ( FailedTask , 'task_name' , self . name ) , task_id = task_id , args = args , kwargs = kw...
If the task fails persist a record of the task .
12,388
def render ( self , trajectories : Tuple [ NonFluents , Fluents , Fluents , Fluents , np . array ] , batch : Optional [ int ] = None ) -> None : raise NotImplementedError
Renders the simulated trajectories for the given batch .
12,389
def distribution ( self , limit = 1024 ) : res = self . _qexec ( "%s, count(*) as __cnt" % self . name ( ) , group = "%s" % self . name ( ) , order = "__cnt DESC LIMIT %d" % limit ) dist = [ ] cnt = self . _table . size ( ) for i , r in enumerate ( res ) : dist . append ( list ( r ) + [ i , r [ 1 ] / float ( cnt ) ] ) ...
Build the distribution of distinct values
12,390
def parse ( self , name ) : name = name . strip ( ) groups = self . _parseFedora ( name ) if groups : self . _signature = DistributionNameSignature ( "Fedora" , groups . group ( 1 ) ) return self raise ValueError ( "Distribution name '%s' not recognized" % name )
Parse distribution string
12,391
def get_token ( url : str , scopes : str , credentials_dir : str ) -> dict : tokens . configure ( url = url , dir = credentials_dir ) tokens . manage ( 'lizzy' , [ scopes ] ) tokens . start ( ) return tokens . get ( 'lizzy' )
Get access token info .
12,392
def config ( config , fork_name = "" , origin_name = "" ) : state = read ( config . configfile ) any_set = False if fork_name : update ( config . configfile , { "FORK_NAME" : fork_name } ) success_out ( "fork-name set to: {}" . format ( fork_name ) ) any_set = True if origin_name : update ( config . configfile , { "ORI...
Setting various configuration options
12,393
def set_area_to_sip_signature ( self , xmin , xmax , zmin , zmax , spectrum ) : assert isinstance ( spectrum , ( sip_response , sip_response2 ) ) assert np . all ( self . frequencies == spectrum . frequencies ) for frequency , rmag , rpha in zip ( self . frequencies , spectrum . rmag , spectrum . rpha ) : td = self . t...
Parameterize the eit instance by supplying one SIP spectrum and the area to apply to .
12,394
def add_homogeneous_model ( self , magnitude , phase = 0 , frequency = None ) : if frequency is None : frequencies = self . frequencies else : assert isinstance ( frequency , Number ) frequencies = [ frequency , ] for freq in frequencies : pidm , pidp = self . tds [ freq ] . add_homogeneous_model ( magnitude , phase ) ...
Add homogeneous models to one or all tomodirs . Register those as forward models
12,395
def apply_crtomo_cfg ( self ) : for key in sorted ( self . tds . keys ( ) ) : self . tds [ key ] . crtomo_cfg = self . crtomo_cfg . copy ( )
Set the global crtomo_cfg for all frequencies
12,396
def apply_noise_models ( self ) : for key in sorted ( self . tds . keys ( ) ) : self . tds [ key ] . noise_model = self . noise_model
Set the global noise_model for all frequencies
12,397
def load_inversion_results ( self , sipdir ) : frequency_file = sipdir + os . sep + 'frequencies.dat' frequencies = np . loadtxt ( frequency_file ) self . _init_frequencies ( frequencies ) for nr , ( frequency_key , item ) in enumerate ( sorted ( self . tds . items ( ) ) ) : for label in ( 'rmag' , 'rpha' , 'cre' , 'ci...
Given an sEIT inversion directory load inversion results and store the corresponding parameter ids in self . assignments
12,398
def plot_forward_models ( self , maglim = None , phalim = None , ** kwargs ) : return_dict = { } N = len ( self . frequencies ) nrx = min ( N , 4 ) nrz = int ( np . ceil ( N / nrx ) ) for index , key , limits in zip ( ( 0 , 1 ) , ( 'rmag' , 'rpha' ) , ( maglim , phalim ) ) : if limits is None : cbmin = None cbmax = Non...
Create plots of the forward models
12,399
def add_to_configs ( self , configs ) : for f , td in self . tds . items ( ) : td . configs . add_to_configs ( configs )
Add configurations to all tomodirs