idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
12,200
def _tensors ( cls , fluents : Sequence [ FluentPair ] ) -> Iterable [ tf . Tensor ] : for _ , fluent in fluents : tensor = cls . _output_size ( fluent . tensor ) yield tensor
Yields the fluents tensors .
12,201
def _dtype ( cls , tensor : tf . Tensor ) -> tf . Tensor : if tensor . dtype != tf . float32 : tensor = tf . cast ( tensor , tf . float32 ) return tensor
Converts tensor to tf . float32 datatype if needed .
12,202
def _output ( cls , fluents : Sequence [ FluentPair ] ) -> Sequence [ tf . Tensor ] : return tuple ( cls . _dtype ( t ) for t in cls . _tensors ( fluents ) )
Returns output tensors for fluents .
12,203
def output_size ( self ) -> Tuple [ Sequence [ Shape ] , Sequence [ Shape ] , Sequence [ Shape ] , int ] : return self . _cell . output_size
Returns the simulation output size .
12,204
def timesteps ( self , horizon : int ) -> tf . Tensor : start , limit , delta = horizon - 1 , - 1 , - 1 timesteps_range = tf . range ( start , limit , delta , dtype = tf . float32 ) timesteps_range = tf . expand_dims ( timesteps_range , - 1 ) batch_timesteps = tf . stack ( [ timesteps_range ] * self . batch_size ) retu...
Returns the input tensor for the given horizon .
12,205
def trajectory ( self , horizon : int , initial_state : Optional [ StateTensor ] = None ) -> TrajectoryOutput : if initial_state is None : initial_state = self . _cell . initial_state ( ) with self . graph . as_default ( ) : self . inputs = self . timesteps ( horizon ) outputs , _ = tf . nn . dynamic_rnn ( self . _cell...
Returns the ops for the trajectory generation with given horizon and initial_state .
12,206
def run ( self , horizon : int , initial_state : Optional [ StateTensor ] = None ) -> SimulationOutput : trajectory = self . trajectory ( horizon , initial_state ) with tf . Session ( graph = self . graph ) as sess : sess . run ( tf . global_variables_initializer ( ) ) non_fluents = sess . run ( self . _non_fluents ) i...
Builds the MDP graph and simulates in batch the trajectories with given horizon . Returns the non - fluents states actions interms and rewards . Fluents and non - fluents are returned in factored form .
12,207
def _output ( cls , tensors : Sequence [ tf . Tensor ] , dtypes : Sequence [ tf . DType ] ) -> Sequence [ tf . Tensor ] : outputs = [ ] for tensor , dtype in zip ( tensors , dtypes ) : tensor = tensor [ 0 ] if tensor . dtype != dtype : tensor = tf . cast ( tensor , dtype ) outputs . append ( tensor ) return tuple ( out...
Converts tensors to the corresponding dtypes .
12,208
def _get_biallelic_variant ( self , variant , info , _check_alleles = True ) : info = info . iloc [ 0 , : ] assert not info . multiallelic self . _impute2_file . seek ( info . seek ) genotypes = self . _parse_impute2_line ( self . _impute2_file . readline ( ) ) variant_alleles = variant . _encode_alleles ( [ genotypes ...
Creates a bi - allelic variant .
12,209
def _fix_genotypes_object ( self , genotypes , variant_info ) : if self . has_index and variant_info . name != genotypes . variant . name : if not variant_info . name . startswith ( genotypes . variant . name ) : raise ValueError ( "Index file not synced with IMPUTE2 file" ) genotypes . variant . name = variant_info . ...
Fixes a genotypes object ( variant name multi - allelic value .
12,210
def _normalize_missing ( g ) : g = g . astype ( float ) g [ g == - 1.0 ] = np . nan return g
Normalize a plink genotype vector .
12,211
def maybe_download_and_extract ( ) : dest_directory = "/tmp/cifar" if not os . path . exists ( dest_directory ) : os . makedirs ( dest_directory ) filename = DATA_URL . split ( '/' ) [ - 1 ] filepath = os . path . join ( dest_directory , filename ) if not os . path . exists ( filepath ) : def _progress ( count , block_...
Download and extract the tarball from Alex s website .
12,212
def plot ( config , image , file ) : image = np . squeeze ( image ) print ( file , image . shape ) imsave ( file , image )
Plot a single CIFAR image .
12,213
def _get_seqtype_from_ext ( handle ) : if isinstance ( handle , basestring ) : name = handle elif hasattr ( handle , 'filename' ) : name = handle . filename elif hasattr ( handle , 'name' ) : name = handle . name else : raise ValueError ( "Unknown datatype for handle!" ) modifier = '' dummy , ext = path . splitext ( na...
Predict the filetype from a handle s name
12,214
def _guess_seqtype_from_file ( handle ) : "Guess the sequence type from the file's contents" if isinstance ( handle , basestring ) : handle = StringIO ( handle ) for line in handle : if not line . strip ( ) : continue if line . lstrip ( ) . split ( ) [ 0 ] in ( 'LOCUS' , 'FEATURES' , 'source' , 'CDS' , 'gene' ) : retur...
Guess the sequence type from the file s contents
12,215
def _unzip_handle ( handle ) : if isinstance ( handle , basestring ) : handle = _gzip_open_filename ( handle ) else : handle = _gzip_open_handle ( handle ) return handle
Transparently unzip the file handle
12,216
def sanity_check_insdcio ( handle , id_marker , fake_id_line ) : found_id = False found_end_marker = False for line in handle : line = line . strip ( ) if not line : continue if line . startswith ( id_marker ) : found_id = True break if line . startswith ( '//' ) : found_end_marker = True break handle . seek ( 0 ) if f...
Sanity check for insdcio style files
12,217
def sanity_check_fasta ( handle ) : header_found = False for line in handle : if line . startswith ( '>' ) : header_found = True break handle . seek ( 0 ) if header_found : return handle fake_header_line = ">DUMMY" new_handle = StringIO ( ) new_handle . write ( "%s\n" % fake_header_line ) new_handle . write ( handle . ...
Sanity check FASTA files .
12,218
def parse ( handle , seqtype = None , robust = False ) : if seqtype is None : seqtype = _get_seqtype_from_ext ( handle ) if seqtype . startswith ( 'gz-' ) : handle = _unzip_handle ( handle ) seqtype = seqtype [ 3 : ] if robust : if seqtype == "embl" : handle = sanity_check_embl ( handle ) elif seqtype == "genbank" : ha...
Wrap SeqIO . parse
12,219
def isOrderFixed ( self ) : return ( self == PortConstraints . FIXED_ORDER or self == PortConstraints . FIXED_RATIO or self == PortConstraints . FIXED_POS )
Returns whether the order of ports is fixed .
12,220
def _dicts_to_columns ( dicts ) : keys = dicts [ 0 ] . keys ( ) result = dict ( ( k , [ ] ) for k in keys ) for d in dicts : for k , v in d . items ( ) : result [ k ] += [ v ] return result
Given a List of Dictionaries with uniform keys returns a single Dictionary with keys holding a List of values matching the key in the original List .
12,221
def from_vertices_and_edges ( vertices , edges , vertex_name_key = 'name' , vertex_id_key = 'id' , edge_foreign_keys = ( 'source' , 'target' ) , directed = True ) : vertex_data = _dicts_to_columns ( vertices ) edge_data = _dicts_to_columns ( edges ) n = len ( vertices ) vertex_index = dict ( zip ( vertex_data [ vertex_...
This representation assumes that vertices and edges are encoded in two lists each list containing a Python dict for each vertex and each edge respectively . A distinguished element of the vertex dicts contain a vertex ID which is used in the edge dicts to refer to source and target vertices . All the remaining elements...
12,222
def from_edges ( edges , source_key = 'source' , target_key = 'target' , weight_key = 'weight' , directed = True ) : raw = list ( map ( lambda x : [ x [ source_key ] , x [ target_key ] , int ( x [ weight_key ] ) ] , edges ) ) g = IGraph . TupleList ( raw , weights = True , directed = directed ) g . vs [ 'indegree' ] = ...
Given a List of Dictionaries with source target and weight attributes return a weighted directed graph .
12,223
def flip_alleles ( genotypes ) : warnings . warn ( "deprecated: use 'Genotypes.flip_coded'" , DeprecationWarning ) genotypes . reference , genotypes . coded = ( genotypes . coded , genotypes . reference ) genotypes . genotypes = 2 - genotypes . genotypes return genotypes
Flip the alleles of an Genotypes instance .
12,224
def code_minor ( genotypes ) : warnings . warn ( "deprecated: use 'Genotypes.code_minor'" , DeprecationWarning ) _ , minor_coded = maf ( genotypes ) if not minor_coded : return flip_alleles ( genotypes ) return genotypes
Encode the genotypes with respect to the minor allele .
12,225
def maf ( genotypes ) : warnings . warn ( "deprecated: use 'Genotypes.maf'" , DeprecationWarning ) g = genotypes . genotypes maf = np . nansum ( g ) / ( 2 * np . sum ( ~ np . isnan ( g ) ) ) if maf > 0.5 : maf = 1 - maf return maf , False return maf , True
Computes the MAF and returns a boolean indicating if the minor allele is currently the coded allele .
12,226
def genotype_to_df ( g , samples , as_string = False ) : name = g . variant . name if g . variant . name else "genotypes" df = pd . DataFrame ( g . genotypes , index = samples , columns = [ name ] ) if as_string : df [ "alleles" ] = None hard_calls = df [ name ] . round ( ) df . loc [ hard_calls == 0 , "alleles" ] = "{...
Convert a genotype object to a pandas dataframe .
12,227
def compute_ld ( cur_geno , other_genotypes , r2 = False ) : norm_cur = normalize_genotypes ( cur_geno ) norm_others = np . stack ( tuple ( normalize_genotypes ( g ) for g in other_genotypes ) , axis = 1 , ) assert norm_cur . shape [ 0 ] == norm_others . shape [ 0 ] n = ( ~ np . isnan ( norm_cur . reshape ( norm_cur . ...
Compute LD between a marker and a list of markers .
12,228
def normalize_genotypes ( genotypes ) : genotypes = genotypes . genotypes return ( genotypes - np . nanmean ( genotypes ) ) / np . nanstd ( genotypes )
Normalize the genotypes .
12,229
def _get_tdm ( self , m ) : m = np . atleast_2d ( m ) assert len ( m . shape ) == 2 tdm = crtomo . tdMan ( grid = self . grid , tempdir = self . tempdir ) tdm . configs . add_to_configs ( self . configs ) pid_mag = tdm . parman . add_data ( m [ 0 , : ] ) tdm . register_magnitude_model ( pid_mag ) if m . shape [ 0 ] == ...
For a given model return a tdMan instance
12,230
def J ( self , log_sigma ) : m = 1.0 / np . exp ( log_sigma ) tdm = self . _get_tdm ( m ) tdm . model ( sensitivities = True , ) measurements = tdm . measurements ( ) sens_list = [ ] for config_nr , cids in sorted ( tdm . assignments [ 'sensitivities' ] . items ( ) ) : sens_list . append ( tdm . parman . parsets [ cids...
Return the sensitivity matrix
12,231
def set_ironic_uuid ( self , uuid_list ) : i = iter ( self . nodes ) for uuid in uuid_list : node = next ( i ) node . uuid = uuid
Map a list of Ironic UUID to BM nodes .
12,232
def find_resistance ( record ) : for feature in record . features : labels = set ( feature . qualifiers . get ( "label" , [ ] ) ) cassettes = labels . intersection ( _ANTIBIOTICS ) if len ( cassettes ) > 1 : raise RuntimeError ( "multiple resistance cassettes detected" ) elif len ( cassettes ) == 1 : return _ANTIBIOTIC...
Infer the antibiotics resistance of the given record .
12,233
def shell_cmd ( args , cwd = None ) : if cwd is None : cwd = os . path . abspath ( '.' ) if not isinstance ( args , ( list , tuple ) ) : args = [ args ] ps = Popen ( args , shell = True , cwd = cwd , stdout = PIPE , stderr = PIPE , close_fds = True ) stdout , stderr = ps . communicate ( ) if ps . returncode != 0 : if s...
Returns stdout as string or None on failure
12,234
def reverse_complement ( self , id = False , name = False , description = False , features = True , annotations = False , letter_annotations = True , dbxrefs = False , ) : return type ( self ) ( super ( CircularRecord , self ) . reverse_complement ( id = id , name = name , description = description , features = feature...
Return a new CircularRecord with reverse complement sequence .
12,235
def load_private_key ( self , priv_key ) : with open ( priv_key ) as fd : self . _private_key = paramiko . RSAKey . from_private_key ( fd )
Register the SSH private key .
12,236
def start ( self ) : if self . via_ip : connect_to = self . via_ip self . description = '[%s@%s via %s]' % ( self . _user , self . _hostname , self . via_ip ) else : connect_to = self . _hostname self . description = '[%s@%s]' % ( self . _user , self . _hostname ) exception = None for i in range ( 60 ) : try : self . _...
Start the ssh client and connect to the host .
12,237
def _get_channel ( self ) : channel = self . _transport . open_session ( ) channel . set_combine_stderr ( True ) channel . get_pty ( ) return channel
Returns a channel according to if there is a redirection to do or not .
12,238
def print_fields ( bf , * args , ** kwargs ) : vals = { k : hex ( v ) for k , v in bf . items ( ) } print ( bf . base , vals , * args , ** kwargs )
Print all the fields of a Bitfield object to stdout . This is primarly a diagnostic aid during debugging .
12,239
def clone ( self ) : temp = self . __class__ ( ) temp . base = self . base return temp
Return a new bitfield with the same value . The returned value is a copy and so is no longer linked to the original bitfield . This is important when the original is located at anything other than normal memory with accesses to it either slow or having side effects . Creating a clone and working against that clone mean...
12,240
def new ( self , base : pathlib . PurePath = pathlib . PurePath ( ) , include_intermediates : bool = True ) -> Iterator [ str ] : if self . is_new : yield str ( base / self . right . name )
Find the list of new paths in this comparison .
12,241
def modified ( self , base : pathlib . PurePath = pathlib . PurePath ( ) ) -> Iterator [ str ] : if self . is_modified : yield str ( base / self . right . name )
Find the paths of modified files . There is no option to include intermediate directories as all files and directories exist in both the left and right trees .
12,242
def deleted ( self , base : pathlib . PurePath = pathlib . PurePath ( ) , include_children : bool = True , include_directories : bool = True ) -> Iterator [ str ] : if self . is_deleted : yield str ( base / self . left . name )
Find the paths of entities deleted between the left and right entities in this comparison .
12,243
def compare ( left : Optional [ L ] , right : Optional [ R ] ) -> 'Comparison[L, R]' : if isinstance ( left , File ) and isinstance ( right , Directory ) : return FileDirectoryComparison ( left , right ) if isinstance ( left , Directory ) and isinstance ( right , File ) : return DirectoryFileComparison ( left , right )...
Calculate the comparison of two entities .
12,244
def print_hierarchy ( self , level : int = 0 , file : IO [ str ] = sys . stdout ) -> None : print ( ' ' * self . _INDENT_SIZE * level + str ( self ) , file = file )
Print this comparison and its children with indentation to represent nesting .
12,245
def is_modified ( self ) -> bool : if self . is_new or self . is_deleted : return False return self . left . md5 != self . right . md5
Find whether the files on the left and right are different . Note modified implies the contents of the file have changed which is predicated on the file existing on both the left and right . Therefore this will be false if the file on the left has been deleted or the file on the right is new .
12,246
def generate_index ( fn , cols = None , names = None , sep = " " ) : assert cols is not None , "'cols' was not set" assert names is not None , "'names' was not set" assert len ( cols ) == len ( names ) bgzip , open_func = get_open_func ( fn , return_fmt = True ) data = pd . read_csv ( fn , sep = sep , engine = "c" , us...
Build a index for the given file .
12,247
def get_open_func ( fn , return_fmt = False ) : bgzip = None with open ( fn , "rb" ) as i_file : bgzip = i_file . read ( 3 ) == b"\x1f\x8b\x08" if bgzip and not HAS_BIOPYTHON : raise ValueError ( "needs BioPython to index a bgzip file" ) open_func = open if bgzip : open_func = BgzfReader try : with open_func ( fn , "r"...
Get the opening function .
12,248
def get_index ( fn , cols , names , sep ) : if not has_index ( fn ) : return generate_index ( fn , cols , names , sep ) file_index = read_index ( get_index_fn ( fn ) ) if len ( set ( names ) - ( set ( file_index . columns ) - { 'seek' } ) ) != 0 : raise ValueError ( "{}: missing index columns: reindex" . format ( fn ) ...
Restores the index for a given file .
12,249
def write_index ( fn , index ) : with open ( fn , "wb" ) as o_file : o_file . write ( _CHECK_STRING ) o_file . write ( zlib . compress ( bytes ( index . to_csv ( None , index = False , encoding = "utf-8" ) , encoding = "utf-8" , ) ) )
Writes the index to file .
12,250
def read_index ( fn ) : index = None with open ( fn , "rb" ) as i_file : if i_file . read ( len ( _CHECK_STRING ) ) != _CHECK_STRING : raise ValueError ( "{}: not a valid index file" . format ( fn ) ) index = pd . read_csv ( io . StringIO ( zlib . decompress ( i_file . read ( ) ) . decode ( encoding = "utf-8" ) , ) ) r...
Reads index from file .
12,251
def make_path ( phase ) -> str : return "{}/{}{}{}" . format ( conf . instance . output_path , phase . phase_path , phase . phase_name , phase . phase_tag )
Create the path to the folder at which the metadata and optimizer pickle should be saved
12,252
def save_optimizer_for_phase ( phase ) : with open ( make_optimizer_pickle_path ( phase ) , "w+b" ) as f : f . write ( pickle . dumps ( phase . optimizer ) )
Save the optimizer associated with the phase as a pickle
12,253
def assert_optimizer_pickle_matches_for_phase ( phase ) : path = make_optimizer_pickle_path ( phase ) if os . path . exists ( path ) : with open ( path , "r+b" ) as f : loaded_optimizer = pickle . loads ( f . read ( ) ) if phase . optimizer != loaded_optimizer : raise exc . PipelineException ( f"Can't restart phase at ...
Assert that the previously saved optimizer is equal to the phase s optimizer if a saved optimizer is found .
12,254
def add ( self , phase_name , result ) : if phase_name in self . __result_dict : raise exc . PipelineException ( "Results from a phase called {} already exist in the pipeline" . format ( phase_name ) ) self . __result_list . append ( result ) self . __result_dict [ phase_name ] = result
Add the result of a phase .
12,255
def from_phase ( self , phase_name ) : try : return self . __result_dict [ phase_name ] except KeyError : raise exc . PipelineException ( "No previous phase named {} found in results ({})" . format ( phase_name , ", " . join ( self . __result_dict . keys ( ) ) ) )
Returns the result of a previous phase by its name
12,256
def save_metadata ( self , phase , data_name ) : with open ( "{}/.metadata" . format ( make_path ( phase ) ) , "w+" ) as f : f . write ( "pipeline={}\nphase={}\ndata={}" . format ( self . pipeline_name , phase . phase_name , data_name ) )
Save metadata associated with the phase such as the name of the pipeline the name of the phase and the name of the data being fit
12,257
def run_function ( self , func , data_name = None , assert_optimizer_pickle_matches = True ) : results = ResultsCollection ( ) for i , phase in enumerate ( self . phases ) : logger . info ( "Running Phase {} (Number {})" . format ( phase . optimizer . phase_name , i ) ) if assert_optimizer_pickle_matches : assert_optim...
Run the function for each phase in the pipeline .
12,258
def strtobytes ( input , encoding ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return _strtobytes_py3 ( input , encoding ) return _strtobytes_py2 ( input , encoding )
Take a str and transform it into a byte array .
12,259
def index_impute2 ( fn ) : logger . info ( "Indexing {} (IMPUTE2)" . format ( fn ) ) impute2_index ( fn , cols = [ 0 , 1 , 2 ] , names = [ "chrom" , "name" , "pos" ] , sep = " " ) logger . info ( "Index generated" )
Indexes an IMPUTE2 file .
12,260
def index_bgen ( fn , legacy = False ) : logger . info ( "Indexing {} (BGEN) using 'bgenix'{}" . format ( fn , " (legacy mode)" if legacy else "" , ) ) command = [ "bgenix" , "-g" , fn , "-index" ] if legacy : command . append ( "-with-rowid" ) try : logger . info ( "Executing '{}'" . format ( " " . join ( command ) ) ...
Indexes a BGEN file .
12,261
def create_untl_xml_subelement ( parent , element , prefix = '' ) : subelement = SubElement ( parent , prefix + element . tag ) if element . content is not None : subelement . text = element . content if element . qualifier is not None : subelement . attrib [ "qualifier" ] = element . qualifier if element . children > ...
Create a UNTL XML subelement .
12,262
def add_missing_children ( required_children , element_children ) : element_tags = [ element . tag for element in element_children ] for contained_element in required_children : if contained_element not in element_tags : try : added_child = PYUNTL_DISPATCH [ contained_element ] ( content = '' ) except : added_child = P...
Determine if there are elements not in the children that need to be included as blank elements in the form .
12,263
def set_qualifier ( self , value ) : if self . allows_qualifier : self . qualifier = value . strip ( ) else : raise UNTLStructureException ( 'Element "%s" does not allow a qualifier' % ( self . tag , ) )
Set the qualifier for the element .
12,264
def add_form ( self , ** kwargs ) : vocabularies = kwargs . get ( 'vocabularies' , None ) qualifier = kwargs . get ( 'qualifier' , None ) content = kwargs . get ( 'content' , None ) parent_tag = kwargs . get ( 'parent_tag' , None ) superuser = kwargs . get ( 'superuser' , False ) if qualifier is not None and content is...
Add the form attribute to the UNTL Python object .
12,265
def record_content_length ( self ) : untldict = py2dict ( self ) untldict . pop ( 'meta' , None ) return len ( str ( untldict ) )
Calculate length of record excluding metadata .
12,266
def create_form_data ( self , ** kwargs ) : children = kwargs . get ( 'children' , [ ] ) sort_order = kwargs . get ( 'sort_order' , None ) solr_response = kwargs . get ( 'solr_response' , None ) superuser = kwargs . get ( 'superuser' , False ) vocabularies = self . get_vocabularies ( ) for element in children : element...
Create groupings of form elements .
12,267
def create_form_groupings ( self , vocabularies , solr_response , element_group_dict , sort_order ) : element_list = [ ] for group_name , group_list in element_group_dict . items ( ) : element_group = UNTL_GROUP_DISPATCH [ group_name ] ( vocabularies = vocabularies , solr_response = solr_response , group_name = group_n...
Create a group object from groupings of element objects .
12,268
def get_vocabularies ( self ) : timeout = 15 socket . setdefaulttimeout ( timeout ) vocab_url = VOCABULARIES_URL . replace ( 'all' , 'all-verbose' ) try : vocab_dict = eval ( urllib2 . urlopen ( vocab_url ) . read ( ) ) except : raise UNTLStructureException ( 'Could not retrieve the vocabularies' ) return vocab_dict
Get the vocabularies to pull the qualifiers from .
12,269
def create_xml_string ( self ) : root = self . create_xml ( ) xml = '<?xml version="1.0" encoding="UTF-8"?>\n' + tostring ( root , pretty_print = True ) return xml
Create a UNTL document in a string from a UNTL metadata root object .
12,270
def create_xml ( self , useNamespace = False ) : UNTL_NAMESPACE = 'http://digital2.library.unt.edu/untl/' UNTL = '{%s}' % UNTL_NAMESPACE NSMAP = { 'untl' : UNTL_NAMESPACE } if useNamespace : root = Element ( UNTL + self . tag , nsmap = NSMAP ) else : root = Element ( self . tag ) self . sort_untl ( UNTL_XML_ORDER ) for...
Create an ElementTree representation of the object .
12,271
def create_element_dict ( self ) : untl_dict = { } for element in self . children : if element . tag not in untl_dict : untl_dict [ element . tag ] = [ ] element_dict = { } if element . qualifier is not None : element_dict [ 'qualifier' ] = element . qualifier if len ( element . contained_children ) > 0 : child_dict = ...
Convert a UNTL Python object into a UNTL Python dictionary .
12,272
def create_xml_file ( self , untl_filename ) : try : f = open ( untl_filename , 'w' ) f . write ( self . create_xml_string ( ) . encode ( 'utf-8' ) ) f . close ( ) except : raise UNTLStructureException ( 'Failed to create UNTL XML file. File: %s' % ( untl_filename ) )
Create a UNTL file .
12,273
def sort_untl ( self , sort_structure ) : self . children . sort ( key = lambda obj : sort_structure . index ( obj . tag ) )
Sort the UNTL Python object by the index of a sort structure pre - ordered list .
12,274
def generate_form_data ( self , ** kwargs ) : self . children = add_missing_children ( self . contained_children , self . children ) kwargs [ 'children' ] = self . children return FormGenerator ( ** kwargs )
Create a form dictionary with the key being the element name and the value being a list of form element objects .
12,275
def contributor_director ( ** kwargs ) : if kwargs . get ( 'qualifier' ) in ETD_MS_CONTRIBUTOR_EXPANSION : return ETD_MSContributor ( role = ETD_MS_CONTRIBUTOR_EXPANSION [ kwargs . get ( 'qualifier' ) ] , ** kwargs ) else : return None
Define the expanded qualifier name .
12,276
def date_director ( ** kwargs ) : if kwargs . get ( 'qualifier' ) == 'creation' : return ETD_MSDate ( content = kwargs . get ( 'content' ) . strip ( ) ) elif kwargs . get ( 'qualifier' ) != 'digitized' : return ETD_MSDate ( content = kwargs . get ( 'content' ) . strip ( ) ) else : return None
Direct which class should be used based on the date qualifier or if the date should be converted at all .
12,277
def subject_director ( ** kwargs ) : if kwargs . get ( 'qualifier' ) not in [ 'KWD' , '' ] : return ETD_MSSubject ( scheme = kwargs . get ( 'qualifier' ) , ** kwargs ) else : return ETD_MSSubject ( content = kwargs . get ( 'content' ) )
Direct how to handle a subject element .
12,278
def get_child_content ( self , children , element_name ) : for child in children : if child . tag == element_name : return child . content return ''
Get the requested element content from a list of children .
12,279
def shiftedColorMap ( cmap , start = 0 , midpoint = 0.5 , stop = 1.0 , name = 'shiftedcmap' ) : cdict = { 'red' : [ ] , 'green' : [ ] , 'blue' : [ ] , 'alpha' : [ ] } reg_index = np . linspace ( start , stop , 257 ) shift_index = np . hstack ( [ np . linspace ( 0.0 , midpoint , 128 , endpoint = False ) , np . linspace ...
Function to offset the center of a colormap . Useful for data with a negative min and positive max and you want the middle of the colormap s dynamic range to be at zero
12,280
def read_lastmodfile ( directory ) : filename = '{0}/exe/inv.lastmod' . format ( directory ) if ( not os . path . isfile ( filename ) ) : return None linestring = open ( filename , 'r' ) . readline ( ) . strip ( ) linestring = linestring . replace ( "\n" , '' ) linestring = linestring . replace ( ".mag" , '' ) linestri...
Return the number of the final inversion result .
12,281
def setHandler ( self , event_name , callback ) : if event_name not in self . handlers : raise ValueError ( '{} is not a valid event' . format ( event_name ) ) if callable ( event_name ) : raise TypeError ( '{} is not callable' . format ( callback ) ) self . handlers [ event_name ] = callback
Set an handler for given event .
12,282
def isHandlerPresent ( self , event_name ) : if event_name not in self . handlers : raise ValueError ( '{} is not a valid event' . format ( event_name ) ) return self . handlers [ event_name ] is not None
Check if an event has an handler .
12,283
def removeHandler ( self , event_name ) : if event_name not in self . handlers : raise ValueError ( '{} is not a valid event' . format ( event_name ) ) self . handlers [ event_name ] = None
Remove handler for given event .
12,284
def _get_fct_number_of_arg ( self , fct ) : py_version = sys . version_info [ 0 ] if py_version >= 3 : return len ( inspect . signature ( fct ) . parameters ) return len ( inspect . getargspec ( fct ) [ 0 ] )
Get the number of argument of a fuction .
12,285
def event_tracker ( func ) : @ wraps ( func ) async def wrapper ( * args , ** kwargs ) : event = Event ( args [ 0 ] ) session = kwargs [ 'session' ] service_name = session . name await track_event ( event , EventState . started , service_name ) await func ( * args , ** kwargs ) await track_event ( event , EventState . ...
Event tracking handler
12,286
def ensure_ajax ( valid_request_methods , error_response_context = None ) : def real_decorator ( view_func ) : def wrap_func ( request , * args , ** kwargs ) : if not isinstance ( request , HttpRequest ) : return generate_error_json_response ( "Invalid request!" , error_response_context ) elif not request . is_ajax ( )...
Intends to ensure the received the request is ajax request and it is included in the valid request methods
12,287
def generate_error_json_response ( error_dict , error_response_context = None ) : response = error_dict if isinstance ( error_dict , str ) : response = { "error" : response } if error_response_context is None : error_response_context = { 'draw' : 0 , 'recordsTotal' : 0 , 'recordsFiltered' : 0 , 'data' : [ ] } response ...
Intends to build an error json response . If the error_response_context is None then we generate this response using data tables format
12,288
def _mergeGoSymbols ( self , jsons = [ ] ) : symbols = { } symbols [ "types" ] = [ ] symbols [ "funcs" ] = [ ] symbols [ "vars" ] = [ ] for file_json in jsons : symbols [ "types" ] += file_json [ "types" ] symbols [ "funcs" ] += file_json [ "funcs" ] symbols [ "vars" ] += file_json [ "vars" ] return symbols
Exported symbols for a given package does not have any prefix . So I can drop all import paths that are file specific and merge all symbols . Assuming all files in the given package has mutual exclusive symbols .
12,289
def read ( self , n ) : out = ctypes . create_string_buffer ( n ) ctypes . windll . kernel32 . RtlMoveMemory ( out , self . view + self . pos , n ) self . pos += n return out . raw
Read n bytes from mapped view .
12,290
def _output ( cls , fluents : Sequence [ FluentPair ] ) -> Sequence [ tf . Tensor ] : output = [ ] for _ , fluent in fluents : tensor = fluent . tensor if tensor . dtype != tf . float32 : tensor = tf . cast ( tensor , tf . float32 ) output . append ( tensor ) return tuple ( output )
Converts fluents to tensors with datatype tf . float32 .
12,291
def set ( self , key , value ) : self . store [ key ] = value return self . store
Sets a hyperparameter . Can be used to set an array of hyperparameters .
12,292
def config_at ( self , i ) : selections = { } for key in self . store : value = self . store [ key ] if isinstance ( value , list ) : selected = i % len ( value ) i = i // len ( value ) selections [ key ] = value [ selected ] else : selections [ key ] = value return Config ( selections )
Gets the ith config
12,293
def top ( self , sort_by ) : sort = sorted ( self . results , key = sort_by ) return sort
Get the best results according to your custom sort method .
12,294
def load_or_create_config ( self , filename , config = None ) : os . makedirs ( os . path . dirname ( os . path . expanduser ( filename ) ) , exist_ok = True ) if os . path . exists ( filename ) : return self . load ( filename ) if ( config == None ) : config = self . random_config ( ) self . save ( filename , config )...
Loads a config from disk . Defaults to a random config if none is specified
12,295
def configure ( self , repositories ) : self . enable_repositories ( repositories ) self . create_stack_user ( ) self . install_base_packages ( ) self . clean_system ( ) self . yum_update ( allow_reboot = True ) self . install_osp ( ) self . set_selinux ( 'permissive' ) self . fix_hostname ( )
Prepare the system to be ready for an undercloud installation .
12,296
def openstack_undercloud_install ( self ) : instack_undercloud_ver , _ = self . run ( 'repoquery --whatprovides /usr/share/instack-undercloud/puppet-stack-config/puppet-stack-config.pp' ) if instack_undercloud_ver . rstrip ( '\n' ) == 'instack-undercloud-0:2.2.0-1.el7ost.noarch' : LOG . warn ( 'Workaround for BZ1298189...
Deploy an undercloud on the host .
12,297
def create_flavor ( self , name ) : self . add_environment_file ( user = 'stack' , filename = 'stackrc' ) self . run ( 'openstack flavor create --id auto --ram 4096 --disk 40 --vcpus 1 baremetal' , user = 'stack' , success_status = ( 0 , 1 ) ) self . run ( 'openstack flavor set --property "cpu_arch"="x86_64" --property...
Create a new baremetal flavor .
12,298
def list_nodes ( self ) : self . add_environment_file ( user = 'stack' , filename = 'stackrc' ) ret , _ = self . run ( "ironic node-list --fields uuid|awk '/-.*-/ {print $2}'" , user = 'stack' ) return ret . split ( )
List the Ironic nodes UUID .
12,299
def set_flavor ( self , node , flavor ) : command = ( 'ironic node-update {uuid} add ' 'properties/capabilities=profile:{flavor},boot_option:local' ) . format ( uuid = node . uuid , flavor = flavor ) node . flavor = flavor self . add_environment_file ( user = 'stack' , filename = 'stackrc' ) self . run ( command , user...
Set a flavor to a given ironic node .