idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,800
def parse ( self , resource = None ) : if resource is None : resource = self . __resources__ self . inventory = self . dispatcher . collection try : self . _parse ( resource ) except MyCapytain . errors . UndispatchedTextError as E : if self . RAISE_ON_UNDISPATCHED is True : raise UndispatchedTextError ( E ) self . inventory = self . dispatcher . collection return self . inventory
Parse a list of directories ans
53,801
def _dispatch ( self , textgroup , directory ) : self . dispatcher . dispatch ( textgroup , path = directory )
Sparql dispatcher do not need to dispatch works as the link is DB stored through Textgroup
53,802
def ask ( question , default_answer = False , default_answer_str = "no" ) : response = default_answer def should_ignore_tty ( ) : ret_to_bool = { "yes" : True , "no" : False , "true" : True , "false" : False } envs = [ os . getenv ( "CI" , default = "no" ) , os . getenv ( "TEST" , default = "no" ) ] vals = [ ret_to_bool [ val ] for val in envs if val in ret_to_bool ] return any ( vals ) ignore_stdin_istty = should_ignore_tty ( ) has_tty = sys . stdin . isatty ( ) and not ignore_stdin_istty if has_tty : response = query_yes_no ( question , default_answer_str ) else : LOG . debug ( "NoTTY: %s -> %s" , question , response ) return response
Ask for user input .
53,803
def load_csv ( path , delimiter = ',' ) : try : with open ( path , 'rb' ) as csvfile : reader = DictReader ( csvfile , delimiter = delimiter ) for row in reader : yield row except ( OSError , IOError ) : raise ClientException ( "File not found: {}" . format ( path ) )
Load CSV file from path and yield CSV rows
53,804
def partition_payload ( data , key , thresh ) : data = data [ key ] for i in range ( 0 , len ( data ) , thresh ) : yield { key : data [ i : i + thresh ] }
Yield partitions of a payload
53,805
def commands ( self ) : for child in self . _children : with open ( osp . join ( child , YAML_REPORT_FILE ) ) as istr : command = yaml . safe_load ( istr ) [ 'command' ] yield ' ' . join ( map ( six . moves . shlex_quote , command ) )
Get all commands of the benchmark category
53,806
def _module_env ( self , execution ) : env = copy . copy ( os . environ ) try : for mod in execution . get ( 'modules' ) or [ ] : Module . load ( mod ) os . environ . update ( execution . get ( 'environment' ) or { } ) yield finally : os . environ = env
Set current process environment according to execution environment and modules
53,807
def gather_metrics ( self , runs ) : for run_dirs in runs . values ( ) : with open ( JSON_METRICS_FILE , 'w' ) as ostr : ostr . write ( '[\n' ) for i in range ( len ( run_dirs ) ) : with open ( osp . join ( run_dirs [ i ] , YAML_REPORT_FILE ) ) as istr : data = yaml . safe_load ( istr ) data . pop ( 'category' , None ) data . pop ( 'command' , None ) data [ 'id' ] = run_dirs [ i ] json . dump ( data , ostr , indent = 2 ) if i != len ( run_dirs ) - 1 : ostr . write ( ',' ) ostr . write ( '\n' ) ostr . write ( ']\n' )
Write a JSON file with the result of every runs
53,808
def _check_metrics ( cls , schema , metrics ) : for name , value in metrics . items ( ) : metric = schema . get ( name ) if not metric : message = "Unexpected metric '{}' returned" . format ( name ) raise Exception ( message ) cls . _check_metric ( schema , metric , name , value )
Ensure that returned metrics are properly exposed
53,809
def load_frame_building_sample_data ( ) : number_of_storeys = 6 interstorey_height = 3.4 masses = 40.0e3 n_bays = 3 fb = models . BuildingFrame ( number_of_storeys , n_bays ) fb . interstorey_heights = interstorey_height * np . ones ( number_of_storeys ) fb . floor_length = 18.0 fb . floor_width = 16.0 fb . storey_masses = masses * np . ones ( number_of_storeys ) fb . bay_lengths = [ 6. , 6.0 , 6.0 ] fb . set_beam_prop ( "depth" , [ 0.5 , 0.5 , 0.5 ] , repeat = "up" ) fb . set_beam_prop ( "width" , [ 0.4 , 0.4 , 0.4 ] , repeat = "up" ) fb . set_column_prop ( "width" , [ 0.5 , 0.5 , 0.5 , 0.5 ] , repeat = "up" ) fb . set_column_prop ( "depth" , [ 0.5 , 0.5 , 0.5 , 0.5 ] , repeat = "up" ) fb . n_seismic_frames = 3 fb . n_gravity_frames = 0 return fb
Sample data for the BuildingFrame object
53,810
def match_subgroup ( sequence , pattern ) : for element in sequence : match = re . match ( pattern , element ) if match : yield match . groupdict ( )
Yield the sub - group element dictionary that match a regex pattern .
53,811
def add_regex_start_end ( pattern_function ) : @ wraps ( pattern_function ) def func_wrapper ( * args , ** kwargs ) : return r'^{}$' . format ( pattern_function ( * args , ** kwargs ) ) return func_wrapper
Decorator for adding regex pattern start and end characters .
53,812
def convert_stress_to_mass ( q , width , length , gravity ) : mass = q * width * length / gravity return mass
Converts a foundation stress to an equivalent mass .
53,813
def add_to_obj ( obj , dictionary , objs = None , exceptions = None , verbose = 0 ) : if exceptions is None : exceptions = [ ] for item in dictionary : if item in exceptions : continue if dictionary [ item ] is not None : if verbose : print ( "process: " , item , dictionary [ item ] ) key , value = get_key_value ( dictionary [ item ] , objs , key = item ) if verbose : print ( "assign: " , key , value ) try : setattr ( obj , key , value ) except AttributeError : raise AttributeError ( "Can't set {0}={1} on object: {2}" . format ( key , value , obj ) )
Cycles through a dictionary and adds the key - value pairs to an object .
53,814
def create_quantiles ( items : Sequence , lower_bound , upper_bound ) : interval = ( upper_bound - lower_bound ) / len ( items ) quantiles = ( ( g , ( x - interval , x ) ) for g , x in zip ( items , accumulate ( repeat ( interval , len ( items ) ) ) ) ) return quantiles
Create quantile start and end boundaries .
53,815
def tupleize ( element , ignore_types = ( str , bytes ) ) : if hasattr ( element , '__iter__' ) and not isinstance ( element , ignore_types ) : return element else : return tuple ( ( element , ) )
Cast a single element to a tuple .
53,816
def dictionize ( fields : Sequence , records : Sequence ) -> Generator : return ( dict ( zip ( fields , rec ) ) for rec in records )
Create dictionaries mapping fields to record data .
53,817
def flip_iterable_dict ( d : dict ) -> dict : value_keys = disjoint_union ( ( cartesian_product ( ( v , k ) ) for k , v in d . items ( ) ) ) return dict ( value_keys )
Transform dictionary to unpack values to map to respective key .
53,818
def call_next ( self , * args , ** kwargs ) -> t . List [ run . RunInfo ] : all_results = [ ] for ext in self . next_extensions : LOG . debug ( " %s " , ext ) results = ext ( * args , ** kwargs ) LOG . debug ( " %s => %s" , ext , results ) if results is None : LOG . warning ( "No result from: %s" , ext ) continue result_list = [ ] if isinstance ( results , c . Iterable ) : result_list . extend ( results ) else : result_list . append ( results ) all_results . extend ( result_list ) return all_results
Call all child extensions with the given arguments .
53,819
def print ( self , indent = 0 ) : LOG . info ( "%s:: %s" , indent * " " , self . __class__ ) for ext in self . next_extensions : ext . print ( indent = indent + 2 )
Print a structural view of the registered extensions .
53,820
def cached ( func ) : ret = None def call_or_cache ( * args , ** kwargs ) : nonlocal ret if ret is None : ret = func ( * args , ** kwargs ) return ret return call_or_cache
Memoize a function result .
53,821
def is_valid ( container , path ) : try : tmp_hash_path = container . filename + ".hash" with open ( tmp_hash_path , 'r' ) as tmp_file : tmp_hash = tmp_file . readline ( ) except IOError : LOG . info ( "No .hash-file in the tmp-directory." ) container_hash_path = local . path ( path ) / "gentoo.tar.bz2.hash" if container_hash_path . exists ( ) : with open ( container_hash_path , 'r' ) as hash_file : container_hash = hash_file . readline ( ) return container_hash == tmp_hash return False
Checks if a container exists and is unpacked .
53,822
def unpack ( container , path ) : from benchbuild . utils . run import run from benchbuild . utils . uchroot import no_args path = local . path ( path ) c_filename = local . path ( container . filename ) name = c_filename . basename if not path . exists ( ) : path . mkdir ( ) with local . cwd ( path ) : Wget ( container . remote , name ) uchroot = no_args ( ) uchroot = uchroot [ "-E" , "-A" , "-C" , "-r" , "/" , "-w" , os . path . abspath ( "." ) , "--" ] has_erlent = bash [ "-c" , "tar --list -f './{0}' | grep --silent '.erlent'" . format ( name ) ] has_erlent = ( has_erlent & TF ) untar = local [ "/bin/tar" ] [ "xf" , "./" + name ] if not has_erlent : untar = uchroot [ untar ] run ( untar [ "--exclude=dev/*" ] ) if not os . path . samefile ( name , container . filename ) : rm ( name ) else : LOG . warning ( "File contents do not match: %s != %s" , name , container . filename ) cp ( container . filename + ".hash" , path )
Unpack a container usable by uchroot .
53,823
def local ( self ) : assert self . name in CFG [ "container" ] [ "images" ] . value tmp_dir = local . path ( str ( CFG [ "tmp_dir" ] ) ) target_dir = tmp_dir / self . name if not target_dir . exists ( ) or not is_valid ( self , target_dir ) : unpack ( self , target_dir ) return target_dir
Finds the current location of a container . Also unpacks the project if necessary .
53,824
def src_file ( self ) : try : src_uri = ( curl [ Gentoo . _LATEST_TXT ] | tail [ "-n" , "+3" ] | cut [ "-f1" , "-d " ] ) ( ) . strip ( ) except ProcessExecutionError as proc_ex : src_uri = "NOT-FOUND" LOG . error ( "Could not determine latest stage3 src uri: %s" , str ( proc_ex ) ) return src_uri
Get the latest src_uri for a stage 3 tarball .
53,825
def version ( self ) : try : _version = ( curl [ Gentoo . _LATEST_TXT ] | awk [ 'NR==2{print}' ] | cut [ "-f2" , "-d=" ] ) ( ) . strip ( ) _version = datetime . utcfromtimestamp ( int ( _version ) ) . strftime ( "%Y-%m-%d" ) except ProcessExecutionError as proc_ex : _version = "unknown" LOG . error ( "Could not determine timestamp: %s" , str ( proc_ex ) ) return _version
Return the build date of the gentoo container .
53,826
def main ( argv = None ) : arguments = cli_common ( __doc__ , argv = argv ) csv_export = CSVExporter ( arguments [ 'CAMPAIGN-DIR' ] , arguments [ '--output' ] ) if arguments [ '--peek' ] : csv_export . peek ( ) else : fieldsstr = arguments . get ( '--fields' ) fields = fieldsstr . split ( ',' ) if fieldsstr else None csv_export . export ( fields ) if argv is not None : return csv_export
ben - csv entry point
53,827
def time_indices ( npts , dt , start , end , index ) : if index is False : if end != - 1 : e_index = int ( end / dt ) + 1 else : e_index = end s_index = int ( start / dt ) else : s_index = start e_index = end if e_index > npts : raise exceptions . ModelWarning ( "Cut point is greater than time series length" ) return s_index , e_index
Determine the new start and end indices of the time series .
53,828
def finished ( cls , jobid ) : output = subprocess . check_output ( [ SACCT , '-n' , '-X' , '-o' , "end" , '-j' , str ( jobid ) ] ) end = output . strip ( ) . decode ( ) return end not in { 'Unknown' , '' }
Check whether a SLURM job is finished or not
53,829
def begin_run_group ( project ) : from benchbuild . utils . db import create_run_group from datetime import datetime group , session = create_run_group ( project ) group . begin = datetime . now ( ) group . status = 'running' session . commit ( ) return group , session
Begin a run_group in the database .
53,830
def end_run_group ( group , session ) : from datetime import datetime group . end = datetime . now ( ) group . status = 'completed' session . commit ( )
End the run_group successfully .
53,831
def fail_run_group ( group , session ) : from datetime import datetime group . end = datetime . now ( ) group . status = 'failed' session . commit ( )
End the run_group unsuccessfully .
53,832
def exit_code_from_run_infos ( run_infos : t . List [ RunInfo ] ) -> int : assert run_infos is not None if not hasattr ( run_infos , "__iter__" ) : return run_infos . retcode rcs = [ ri . retcode for ri in run_infos ] max_rc = max ( rcs ) min_rc = min ( rcs ) if max_rc == 0 : return min_rc return max_rc
Generate a single exit code from a list of RunInfo objects .
53,833
def track_execution ( cmd , project , experiment , ** kwargs ) : runner = RunInfo ( cmd = cmd , project = project , experiment = experiment , ** kwargs ) yield runner runner . commit ( )
Guard the execution of the given command .
53,834
def with_env_recursive ( cmd , ** envvars ) : from plumbum . commands . base import BoundCommand , BoundEnvCommand if isinstance ( cmd , BoundCommand ) : cmd . cmd = with_env_recursive ( cmd . cmd , ** envvars ) elif isinstance ( cmd , BoundEnvCommand ) : cmd . envvars . update ( envvars ) cmd . cmd = with_env_recursive ( cmd . cmd , ** envvars ) return cmd
Recursively updates the environment of cmd and all its subcommands .
53,835
def in_builddir ( sub = '.' ) : from functools import wraps def wrap_in_builddir ( func ) : @ wraps ( func ) def wrap_in_builddir_func ( self , * args , ** kwargs ) : p = local . path ( self . builddir ) / sub if not p . exists ( ) : LOG . error ( "%s does not exist." , p ) if p == local . cwd : LOG . debug ( "CWD already is %s" , p ) return func ( self , * args , * kwargs ) with local . cwd ( p ) : return func ( self , * args , ** kwargs ) return wrap_in_builddir_func return wrap_in_builddir
Decorate a project phase with a local working directory change .
53,836
def store_config ( func ) : from functools import wraps @ wraps ( func ) def wrap_store_config ( self , * args , ** kwargs ) : CFG . store ( local . path ( self . builddir ) / ".benchbuild.yml" ) return func ( self , * args , ** kwargs ) return wrap_store_config
Decorator for storing the configuration in the project s builddir .
53,837
def clean_directories ( builddir , in_dir = True , out_dir = True ) : container_in = local . path ( builddir ) / "container-in" container_out = local . path ( builddir ) / "container-out" if in_dir and container_in . exists ( ) : if ui . ask ( "Should I delete '{0}'?" . format ( container_in ) ) : container_in . delete ( ) if out_dir and container_out . exists ( ) : if ui . ask ( "Should I delete '{0}'?" . format ( container_out ) ) : container_out . delete ( )
Remove the in and out of the container if confirmed by the user .
53,838
def setup_directories ( builddir ) : build_dir = local . path ( builddir ) in_dir = build_dir / "container-in" out_dir = build_dir / "container-out" if not in_dir . exists ( ) : in_dir . mkdir ( ) if not out_dir . exists ( ) : out_dir . mkdir ( )
Create the in and out directories of the container .
53,839
def setup_container ( builddir , _container ) : build_dir = local . path ( builddir ) in_dir = build_dir / "container-in" container_path = local . path ( _container ) with local . cwd ( builddir ) : container_bin = container_path . basename container_in = in_dir / container_bin download . Copy ( _container , container_in ) uchrt = uchroot . no_args ( ) with local . cwd ( "container-in" ) : uchrt = uchrt [ "-E" , "-A" , "-u" , "0" , "-g" , "0" , "-C" , "-r" , "/" , "-w" , os . path . abspath ( "." ) , "--" ] has_erlent = bash [ "-c" , "tar --list -f './{0}' | grep --silent '.erlent'" . format ( container_in ) ] has_erlent = ( has_erlent & TF ) if not has_erlent : cmd = local [ "/bin/tar" ] [ "xf" ] cmd = uchrt [ cmd [ container_bin ] ] else : cmd = tar [ "xf" ] cmd = cmd [ container_in ] with local . cwd ( "container-in" ) : cmd ( "--exclude=dev/*" ) rm ( container_in ) return in_dir
Prepare the container and returns the path where it can be found .
53,840
def run_in_container ( command , container_dir ) : container_p = local . path ( container_dir ) with local . cwd ( container_p ) : uchrt = uchroot . with_mounts ( ) uchrt = uchrt [ "-E" , "-A" , "-u" , "0" , "-g" , "0" , "-C" , "-w" , "/" , "-r" , container_p ] uchrt = uchrt [ "--" ] cmd_path = container_p / command [ 0 ] . lstrip ( '/' ) if not cmd_path . exists ( ) : LOG . error ( "The command does not exist inside the container! %s" , cmd_path ) return cmd = uchrt [ command ] return cmd & FG
Run a given command inside a container .
53,841
def pack_container ( in_container , out_file ) : container_filename = local . path ( out_file ) . basename out_container = local . cwd / "container-out" / container_filename out_dir = out_container . dirname with local . cwd ( in_container ) : tar ( "cjf" , out_container , "." ) c_hash = download . update_hash ( out_container ) if out_dir . exists ( ) : mkdir ( "-p" , out_dir ) mv ( out_container , out_file ) mv ( out_container + ".hash" , out_file + ".hash" ) new_container = { "path" : out_file , "hash" : str ( c_hash ) } CFG [ "container" ] [ "known" ] += new_container
Pack a container image into a . tar . bz2 archive .
53,842
def setup_bash_in_container ( builddir , _container , outfile , shell ) : with local . cwd ( builddir ) : print ( "Entering bash inside User-Chroot. Prepare your image and " "type 'exit' when you are done. If bash exits with a non-zero" "exit code, no new container will be stored." ) store_new_container = True try : run_in_container ( shell , _container ) except ProcessExecutionError : store_new_container = False if store_new_container : print ( "Packing new container image." ) pack_container ( _container , outfile ) config_path = str ( CFG [ "config_file" ] ) CFG . store ( config_path ) print ( "Storing config in {0}" . format ( os . path . abspath ( config_path ) ) )
Setup a bash environment inside a container .
53,843
def set_input_container ( _container , cfg ) : if not _container : return False if _container . exists ( ) : cfg [ "container" ] [ "input" ] = str ( _container ) return True return False
Save the input for the container in the configurations .
53,844
def run ( self , context ) : if not sys . stdout . isatty ( ) : return with local . cwd ( context . in_container ) : from benchbuild . projects . gentoo import gentoo gentoo . setup_networking ( ) gentoo . configure_portage ( ) sed_in_chroot = uchroot . uchroot ( ) [ "/bin/sed" ] emerge_in_chroot = uchroot . uchroot ( ) [ "/usr/bin/emerge" ] has_pkg = uchroot . uchroot ( ) [ "/usr/bin/qlist" , "-I" ] run . run ( sed_in_chroot [ "-i" , '/CC=/d' , "/etc/portage/make.conf" ] ) run . run ( sed_in_chroot [ "-i" , '/CXX=/d' , "/etc/portage/make.conf" ] ) want_sync = bool ( CFG [ "container" ] [ "strategy" ] [ "polyjit" ] [ "sync" ] ) want_upgrade = bool ( CFG [ "container" ] [ "strategy" ] [ "polyjit" ] [ "upgrade" ] ) packages = CFG [ "container" ] [ "strategy" ] [ "polyjit" ] [ "packages" ] . value with local . env ( MAKEOPTS = "-j{0}" . format ( int ( CFG [ "jobs" ] ) ) ) : if want_sync : LOG . debug ( "Synchronizing portage." ) run . run ( emerge_in_chroot [ "--sync" ] ) if want_upgrade : LOG . debug ( "Upgrading world." ) run . run ( emerge_in_chroot [ "--autounmask-only=y" , "-uUDN" , "--with-bdeps=y" , "@world" ] ) for pkg in packages : if has_pkg [ pkg [ "name" ] ] & TF : continue env = pkg [ "env" ] with local . env ( ** env ) : run . run ( emerge_in_chroot [ pkg [ "name" ] ] ) gentoo . setup_benchbuild ( ) print ( "Packing new container image." ) with local . cwd ( context . builddir ) : pack_container ( context . in_container , context . out_container )
Setup a gentoo container suitable for PolyJIT .
53,845
def input_file ( self , _container ) : p = local . path ( _container ) if set_input_container ( p , CFG ) : return p = find_hash ( CFG [ "container" ] [ "known" ] . value , container ) if set_input_container ( p , CFG ) : return raise ValueError ( "The path '{0}' does not exist." . format ( p ) )
Find the input path of a uchroot container .
53,846
def output_file ( self , _container ) : p = local . path ( _container ) if p . exists ( ) : if not ui . ask ( "Path '{0}' already exists." " Overwrite?" . format ( p ) ) : sys . exit ( 0 ) CFG [ "container" ] [ "output" ] = str ( p )
Find and writes the output path of a chroot container .
53,847
def discretize_soil_profile ( sp , incs = None , target = 1.0 ) : if incs is None : incs = np . ones ( sp . n_layers ) * target dd = { } dd [ "thickness" ] = [ ] dd [ "unit_mass" ] = [ ] dd [ "shear_vel" ] = [ ] cum_thickness = 0 for i in range ( sp . n_layers ) : sl = sp . layer ( i + 1 ) thickness = sp . layer_height ( i + 1 ) n_slices = max ( int ( thickness / incs [ i ] ) , 1 ) slice_thickness = float ( thickness ) / n_slices for j in range ( n_slices ) : cum_thickness += slice_thickness if cum_thickness >= sp . gwl : rho = sl . unit_sat_mass saturation = True else : rho = sl . unit_dry_mass saturation = False if hasattr ( sl , "get_shear_vel_at_v_eff_stress" ) : v_eff = sp . vertical_effective_stress ( cum_thickness ) vs = sl . get_shear_vel_at_v_eff_stress ( v_eff , saturation ) else : vs = sl . calc_shear_vel ( saturation ) dd [ "shear_vel" ] . append ( vs ) dd [ "unit_mass" ] . append ( rho ) dd [ "thickness" ] . append ( slice_thickness ) for item in dd : dd [ item ] = np . array ( dd [ item ] ) return dd
Splits the soil profile into slices and stores as dictionary
53,848
def override ( self , item , value ) : if not hasattr ( self , item ) : raise KeyError ( "Soil Object does not have property: %s" , item ) try : setattr ( self , item , value ) return [ ] except ModelError : pass temp_stack = list ( self . stack ) temp_stack [ : ] = ( value for value in temp_stack if value [ 0 ] != item ) temp_stack . insert ( 0 , ( item , value ) ) self . reset_all ( ) conflicts = [ ] for item , value in temp_stack : try : setattr ( self , item , value ) except ModelError : conflicts . append ( item ) return conflicts
Can set a parameter to a value that is inconsistent with existing values .
53,849
def reset_all ( self ) : for item in self . inputs : setattr ( self , "_%s" % item , None ) self . stack = [ ]
Resets all parameters to None
53,850
def get_shear_vel ( self , saturated ) : try : if saturated : return np . sqrt ( self . g_mod / self . unit_sat_mass ) else : return np . sqrt ( self . g_mod / self . unit_dry_mass ) except TypeError : return None
Calculate the shear wave velocity
53,851
def saturation ( self , value ) : value = clean_float ( value ) if value is None : return try : unit_moisture_weight = self . unit_moist_weight - self . unit_dry_weight unit_moisture_volume = unit_moisture_weight / self . _pw saturation = unit_moisture_volume / self . _calc_unit_void_volume ( ) if saturation is not None and not ct . isclose ( saturation , value , rel_tol = self . _tolerance ) : raise ModelError ( "New saturation (%.3f) is inconsistent " "with calculated value (%.3f)" % ( value , saturation ) ) except TypeError : pass old_value = self . saturation self . _saturation = value try : self . recompute_all_weights_and_void ( ) self . _add_to_stack ( "saturation" , value ) except ModelError as e : self . _saturation = old_value raise ModelError ( e )
Volume of water to volume of voids
53,852
def specific_gravity ( self , value ) : value = clean_float ( value ) if value is None : return specific_gravity = self . _calc_specific_gravity ( ) if specific_gravity is not None and not ct . isclose ( specific_gravity , value , rel_tol = self . _tolerance ) : raise ModelError ( "specific gravity is inconsistent with set unit_dry_weight and void_ratio" ) self . _specific_gravity = float ( value ) self . stack . append ( ( "specific_gravity" , float ( value ) ) ) self . recompute_all_weights_and_void ( )
Set the relative weight of the solid
53,853
def add_layer ( self , depth , soil ) : self . _layers [ depth ] = soil self . _sort_layers ( ) if self . hydrostatic : if depth >= self . gwl : soil . saturation = 1.0 else : li = self . get_layer_index_by_depth ( depth ) layer_height = self . layer_height ( li ) if layer_height is None : soil . saturation = 0.0 elif depth + layer_height <= self . gwl : soil . saturation = 0.0 else : sat_height = depth + self . layer_height ( li ) - self . gwl soil . saturation = sat_height / self . layer_height ( li )
Adds a soil to the SoilProfile at a set depth .
53,854
def _sort_layers ( self ) : self . _layers = OrderedDict ( sorted ( self . _layers . items ( ) , key = lambda t : t [ 0 ] ) )
Sort the layers by depth .
53,855
def layer_height ( self , layer_int ) : if layer_int == self . n_layers : if self . height is None : return None return self . height - self . layer_depth ( layer_int ) else : return self . layer_depth ( layer_int + 1 ) - self . layer_depth ( layer_int )
Get the layer height by layer id number .
53,856
def equivalent_crust_cohesion ( self ) : deprecation ( "Will be moved to a function" ) if len ( self . layers ) > 1 : crust = self . layer ( 0 ) crust_phi_r = np . radians ( crust . phi ) equivalent_cohesion = crust . cohesion + crust . k_0 * self . crust_effective_unit_weight * self . layer_depth ( 1 ) / 2 * np . tan ( crust_phi_r ) return equivalent_cohesion
Calculate the equivalent crust cohesion strength according to Karamitros et al . 2013 sett pg 8 eq . 14
53,857
def get_v_total_stress_at_depth ( self , z ) : if not hasattr ( z , "__len__" ) : return self . one_vertical_total_stress ( z ) else : sigma_v_effs = [ ] for value in z : sigma_v_effs . append ( self . one_vertical_total_stress ( value ) ) return np . array ( sigma_v_effs )
Determine the vertical total stress at depth z where z can be a number or an array of numbers .
53,858
def one_vertical_total_stress ( self , z_c ) : total_stress = 0.0 depths = self . depths end = 0 for layer_int in range ( 1 , len ( depths ) + 1 ) : l_index = layer_int - 1 if z_c > depths [ layer_int - 1 ] : if l_index < len ( depths ) - 1 and z_c > depths [ l_index + 1 ] : height = depths [ l_index + 1 ] - depths [ l_index ] bottom_depth = depths [ l_index + 1 ] else : end = 1 height = z_c - depths [ l_index ] bottom_depth = z_c if bottom_depth <= self . gwl : total_stress += height * self . layer ( layer_int ) . unit_dry_weight else : if self . layer ( layer_int ) . unit_sat_weight is None : raise AnalysisError ( "Saturated unit weight not defined for layer %i." % layer_int ) sat_height = bottom_depth - max ( self . gwl , depths [ l_index ] ) dry_height = height - sat_height total_stress += dry_height * self . layer ( layer_int ) . unit_dry_weight + sat_height * self . layer ( layer_int ) . unit_sat_weight else : end = 1 if end : break return total_stress
Determine the vertical total stress at a single depth z_c .
53,859
def get_v_eff_stress_at_depth ( self , y_c ) : sigma_v_c = self . get_v_total_stress_at_depth ( y_c ) pp = self . get_hydrostatic_pressure_at_depth ( y_c ) sigma_veff_c = sigma_v_c - pp return sigma_veff_c
Determine the vertical effective stress at a single depth z_c .
53,860
def shear_vel_at_depth ( self , y_c ) : sl = self . get_soil_at_depth ( y_c ) if y_c <= self . gwl : saturation = False else : saturation = True if hasattr ( sl , "get_shear_vel_at_v_eff_stress" ) : v_eff = self . get_v_eff_stress_at_depth ( y_c ) vs = sl . get_shear_vel_at_v_eff_stress ( v_eff , saturation ) else : vs = sl . get_shear_vel ( saturation ) return vs
Get the shear wave velocity at a depth .
53,861
def path ( self ) : if not isinstance ( self . ref , str ) : return None u = parse_app_url ( self . ref ) if u . inner . proto != 'file' : return None return u . path
Return the path to the file if the ref is a file
53,862
def doc_dir ( self ) : from os . path import abspath if not self . ref : return None u = parse_app_url ( self . ref ) return abspath ( dirname ( u . path ) )
The absolute directory of the document
53,863
def remove_term ( self , t ) : try : self . terms . remove ( t ) except ValueError : pass if t . section and t . parent_term_lc == 'root' : t . section = self . add_section ( t . section ) t . section . remove_term ( t , remove_from_doc = False ) if t . parent : try : t . parent . remove_child ( t ) except ValueError : pass
Only removes top - level terms . Child terms can be removed at the parent .
53,864
def new_section ( self , name , params = None ) : self . sections [ name . lower ( ) ] = SectionTerm ( None , name , term_args = params , doc = self ) s = self . sections [ name . lower ( ) ] if name . lower ( ) in self . decl_sections : s . args = self . decl_sections [ name . lower ( ) ] [ 'args' ] return s
Return a new section
53,865
def get_or_new_section ( self , name , params = None ) : if name not in self . sections : self . sections [ name . lower ( ) ] = SectionTerm ( None , name , term_args = params , doc = self ) return self . sections [ name . lower ( ) ]
Create a new section or return an existing one of the same name
53,866
def sort_sections ( self , order ) : order_lc = [ e . lower ( ) for e in order ] sections = OrderedDict ( ( k , self . sections [ k ] ) for k in order_lc if k in self . sections ) sections . update ( ( k , self . sections [ k ] ) for k in self . sections . keys ( ) if k not in order_lc ) assert len ( self . sections ) == len ( sections ) self . sections = sections
Sort sections according to the section names in the order list . All remaining sections are added to the end in their original order
53,867
def find ( self , term , value = False , section = None , _expand_derived = True , ** kwargs ) : import itertools if kwargs : terms = self . find ( term , value , section ) found_terms = [ ] for t in terms : if all ( t . get_value ( k ) == v for k , v in kwargs . items ( ) ) : found_terms . append ( t ) return found_terms def in_section ( term , section ) : if section is None : return True if term . section is None : return False if isinstance ( section , ( list , tuple ) ) : return any ( in_section ( t , e ) for e in section ) else : return section . lower ( ) == term . section . name . lower ( ) if _expand_derived : try : try : term = list ( self . derived_terms [ term . lower ( ) ] ) + [ term ] except AttributeError : terms = [ ] for t in term : terms . append ( term ) for dt in self . derived_terms [ t . lower ( ) ] : terms . append ( dt ) except KeyError as e : pass if isinstance ( term , ( list , tuple ) ) : return list ( itertools . chain ( * [ self . find ( e , value = value , section = section , _expand_derived = False ) for e in term ] ) ) else : term = term . lower ( ) found = [ ] if not '.' in term : term = 'root.' + term if term . startswith ( 'root.' ) : term_gen = self . terms else : term_gen = self . all_terms for t in term_gen : if t . join_lc == 'root.root' : continue assert t . section or t . join_lc == 'root.root' or t . join_lc == 'root.section' , t if ( t . term_is ( term ) and in_section ( t , section ) and ( value is False or value == t . value ) ) : found . append ( t ) return found
Return a list of terms possibly in a particular section . Use joined term notation such as Root . Name The kwargs arg is used to set term properties all of which match returned terms so name = foobar will match terms that have a name property of foobar
53,868
def get ( self , term , default = None ) : v = self . find_first ( term ) if not v : return default else : return v
Return the first term returning the default if no term is found
53,869
def get_value ( self , term , default = None , section = None ) : term = self . find_first ( term , value = False , section = section ) if term is None : return default else : return term . value
Return the first value returning the default if no term is found
53,870
def load_terms ( self , terms ) : for t in terms : t . doc = self if t . term_is ( 'root.root' ) : if not self . root : self . root = t self . add_section ( t ) continue if t . term_is ( 'root.section' ) : self . add_section ( t ) elif t . parent_term_lc == 'root' : self . add_term ( t ) else : assert t . parent is not None try : dd = terms . declare_dict self . decl_terms . update ( dd [ 'terms' ] ) self . decl_sections . update ( dd [ 'sections' ] ) self . super_terms . update ( terms . super_terms ( ) ) kf = lambda e : e [ 1 ] self . derived_terms = { k : set ( e [ 0 ] for e in g ) for k , g in groupby ( sorted ( self . super_terms . items ( ) , key = kf ) , kf ) } except AttributeError as e : pass try : self . errors = terms . errors_as_dict ( ) except AttributeError : self . errors = { } return self
Create a builder from a sequence of terms usually a TermInterpreter
53,871
def cleanse ( self ) : from . util import slugify self . ensure_identifier ( ) try : self . update_name ( ) except MetatabError : identifier = self [ 'Root' ] . find_first ( 'Root.Identifier' ) name = self [ 'Root' ] . find_first ( 'Root.Name' ) if name and name . value : name . value = slugify ( name . value ) elif name : name . value = slugify ( identifier . value ) else : self [ 'Root' ] . get_or_new_term ( 'Root.Name' ) . value = slugify ( identifier . value )
Clean up some terms like ensuring that the name is a slug
53,872
def update_name ( self , force = False , create_term = False , report_unchanged = True ) : updates = [ ] self . ensure_identifier ( ) name_term = self . find_first ( 'Root.Name' ) if not name_term : if create_term : name_term = self [ 'Root' ] . new_term ( 'Root.Name' , '' ) else : updates . append ( "No Root.Name, can't update name" ) return updates orig_name = name_term . value identifier = self . get_value ( 'Root.Identifier' ) datasetname = self . get_value ( 'Root.Dataset' ) if datasetname : name = self . _generate_identity_name ( ) if name != orig_name or force : name_term . value = name updates . append ( "Changed Name" ) else : if report_unchanged : updates . append ( "Name did not change" ) elif not orig_name : if not identifier : updates . append ( "Failed to find DatasetName term or Identity term. Giving up" ) else : updates . append ( "Setting the name to the identifier" ) name_term . value = identifier elif orig_name == identifier : if report_unchanged : updates . append ( "Name did not change" ) else : updates . append ( "No Root.Dataset, so can't update the name" ) return updates
Generate the Root . Name term from DatasetName Version Origin TIme and Space
53,873
def as_dict ( self , replace_value_names = True ) : r = RootSectionTerm ( doc = self ) for s in self : for t in s : r . terms . append ( t ) return r . as_dict ( replace_value_names )
Iterate link terms and convert to a dict
53,874
def rows ( self ) : for s_name , s in self . sections . items ( ) : if s . name != 'Root' : yield [ '' ] yield [ 'Section' , s . value ] + s . property_names for row in s . rows : term , value = row term = term . replace ( 'root.' , '' ) . title ( ) try : yield [ term ] + value except : yield [ term ] + [ value ]
Iterate over all of the rows
53,875
def all_terms ( self ) : for s_name , s in self . sections . items ( ) : if s . name != 'Root' : yield s for rterm in s : yield rterm for d in rterm . descendents : yield d
Iterate over all of the terms . The self . terms property has only root level terms . This iterator iterates over all terms
53,876
def as_csv ( self ) : from io import StringIO s = StringIO ( ) w = csv . writer ( s ) for row in self . rows : w . writerow ( row ) return s . getvalue ( )
Return a CSV representation as a string
53,877
def as_lines ( self ) : out_lines = [ ] for t , v in self . lines : if t == 'Section' : out_lines . append ( '' ) out_lines . append ( '{}: {}' . format ( t , v if v is not None else '' ) ) return '\n' . join ( out_lines )
Return a Lines representation as a string
53,878
def _restructure_if_volume_follows_journal ( left , right ) : def _get_volume_keyword_op_and_remaining_subtree ( right_subtree ) : if isinstance ( right_subtree , NotOp ) and isinstance ( right_subtree . op , KeywordOp ) and right_subtree . op . left == Keyword ( 'volume' ) : return None , None elif isinstance ( right_subtree , AndOp ) and isinstance ( right_subtree . left , NotOp ) and isinstance ( right_subtree . left . op , KeywordOp ) and right_subtree . left . op . left == Keyword ( 'volume' ) : return None , right_subtree . right elif isinstance ( right_subtree , KeywordOp ) and right_subtree . left == Keyword ( 'volume' ) : return right_subtree , None elif isinstance ( right_subtree , AndOp ) and right_subtree . left . left == Keyword ( 'volume' ) : return right_subtree . left , right_subtree . right journal_value = left . right . value volume_and_remaining_subtree = _get_volume_keyword_op_and_remaining_subtree ( right ) if not volume_and_remaining_subtree : return volume_node , remaining_subtree = volume_and_remaining_subtree if volume_node : left . right . value = ',' . join ( [ journal_value , volume_node . right . value ] ) return AndOp ( left , remaining_subtree ) if remaining_subtree else left
Remove volume node if it follows a journal logically in the tree hierarchy .
53,879
def _convert_simple_value_boolean_query_to_and_boolean_queries ( tree , keyword ) : def _create_operator_node ( value_node ) : base_node = value_node . op if isinstance ( value_node , NotOp ) else value_node updated_base_node = KeywordOp ( keyword , base_node ) if keyword else ValueOp ( base_node ) return NotOp ( updated_base_node ) if isinstance ( value_node , NotOp ) else updated_base_node def _get_bool_op_type ( bool_op ) : return AndOp if isinstance ( bool_op , And ) else OrOp new_tree_root = _get_bool_op_type ( tree . bool_op ) ( None , None ) current_tree = new_tree_root previous_tree = tree while True : current_tree . left = _create_operator_node ( previous_tree . left ) if not isinstance ( previous_tree . right , SimpleValueBooleanQuery ) : current_tree . right = _create_operator_node ( previous_tree . right ) break previous_tree = previous_tree . right current_tree . right = _get_bool_op_type ( previous_tree . bool_op ) ( None , None ) current_tree = current_tree . right return new_tree_root
Chain SimpleValueBooleanQuery values into chained AndOp queries with the given current Keyword .
53,880
def visit_boolean_query ( self , node ) : left = node . left . accept ( self ) right = node . right . accept ( self ) is_journal_keyword_op = isinstance ( left , KeywordOp ) and left . left == Keyword ( 'journal' ) if is_journal_keyword_op : journal_and_volume_conjunction = _restructure_if_volume_follows_journal ( left , right ) if journal_and_volume_conjunction : return journal_and_volume_conjunction return AndOp ( left , right ) if isinstance ( node . bool_op , And ) else OrOp ( left , right )
Convert BooleanRule into AndOp or OrOp nodes .
53,881
async def stop ( self , force : bool = False ) -> None : Log . debug ( 'stopping task %s' , self . name ) self . running = False if force : self . task . cancel ( )
Cancel the task if it hasn t yet started or tell it to gracefully stop running if it has .
53,882
def author_name_contains_fullnames ( author_name ) : def _is_initial ( name_part ) : return len ( name_part ) == 1 or u'.' in name_part parsed_name = ParsedName ( author_name ) if len ( parsed_name ) == 1 : return False elif any ( [ _is_initial ( name_part ) for name_part in parsed_name ] ) : return False return True
Recognizes whether the name contains full name parts and not initials or only lastname .
53,883
def _name_variation_has_only_initials ( name ) : def _is_initial ( name_variation ) : return len ( name_variation ) == 1 or u'.' in name_variation parsed_name = ParsedName . loads ( name ) return all ( [ _is_initial ( name_part ) for name_part in parsed_name ] )
Detects whether the name variation consists only from initials .
53,884
def generate_minimal_name_variations ( author_name ) : parsed_name = ParsedName . loads ( unidecode ( author_name ) ) if len ( parsed_name ) > 1 : lastnames = parsed_name . last . replace ( '-' , ' ' ) non_lastnames = ' ' . join ( parsed_name . first_list + parsed_name . suffix_list ) non_lastnames = non_lastnames . strip ( ) . replace ( '-' , ' ' ) return list ( { name_variation . lower ( ) for name_variation in [ lastnames + ' ' + non_lastnames , lastnames + ' ' + non_lastnames [ 0 ] , non_lastnames + ' ' + lastnames , non_lastnames + ' ' + lastnames [ 0 ] , ] if not _name_variation_has_only_initials ( name_variation ) } ) else : return [ parsed_name . dumps ( ) . replace ( '-' , ' ' ) . lower ( ) ]
Generate a small number of name variations .
53,885
def register_date_conversion_handler ( date_specifier_patterns ) : def _decorator ( func ) : global DATE_SPECIFIERS_CONVERSION_HANDLERS DATE_SPECIFIERS_CONVERSION_HANDLERS [ DATE_SPECIFIERS_REGEXES [ date_specifier_patterns ] ] = func return func return _decorator
Decorator for registering handlers that convert text dates to dates .
53,886
def _truncate_wildcard_from_date ( date_value ) : if ' ' in date_value : date_parts = date_value . split ( ' ' ) elif '-' in date_value : date_parts = date_value . split ( '-' ) else : raise ValueError ( "Erroneous date value: %s." , date_value ) if GenericValue . WILDCARD_TOKEN in date_parts [ - 1 ] : del date_parts [ - 1 ] return '-' . join ( date_parts )
Truncate wildcard from date parts .
53,887
def _get_next_date_from_partial_date ( partial_date ) : relativedelta_arg = 'years' if partial_date . month : relativedelta_arg = 'months' if partial_date . day : relativedelta_arg = 'days' next_date = parse ( partial_date . dumps ( ) ) + relativedelta ( ** { relativedelta_arg : 1 } ) return PartialDate . from_parts ( next_date . year , next_date . month if partial_date . month else None , next_date . day if partial_date . day else None )
Calculates the next date from the given partial date .
53,888
def _get_proper_elastic_search_date_rounding_format ( partial_date ) : es_date_math_unit = ES_DATE_MATH_ROUNDING_YEAR if partial_date . month : es_date_math_unit = ES_DATE_MATH_ROUNDING_MONTH if partial_date . day : es_date_math_unit = ES_DATE_MATH_ROUNDING_DAY return es_date_math_unit
Returns the proper ES date math unit according to the resolution of the partial_date .
53,889
def generate_match_query ( field , value , with_operator_and ) : parsed_value = None try : parsed_value = json . loads ( value . lower ( ) ) except ( ValueError , TypeError , AttributeError ) : pass if isinstance ( value , bool ) : return { 'match' : { field : value } } elif isinstance ( parsed_value , bool ) : return { 'match' : { field : value . lower ( ) } } if with_operator_and : return { 'match' : { field : { 'query' : value , 'operator' : 'and' } } } return { 'match' : { field : value } }
Helper for generating a match query .
53,890
def wrap_query_in_nested_if_field_is_nested ( query , field , nested_fields ) : for element in nested_fields : match_pattern = r'^{}.' . format ( element ) if re . match ( match_pattern , field ) : return generate_nested_query ( element , query ) return query
Helper for wrapping a query into a nested if the fields within the query are nested
53,891
def is_within_limits ( self , limit , date , dates ) : return any ( ( self . second_diff ( date , d ) <= limit for d in dates ) )
Returns True if the difference between date and any value in dates is less than or equal to limit .
53,892
def emit_tree_format ( tree , verbose = False ) : if verbose : print ( "Converting: " + repr ( tree ) ) ret_str = __recursive_formatter ( tree ) return ret_str
Returns a tree representation of a parse tree .
53,893
def calculate_retry_delay ( attempt , max_delay = 300 ) : delay = int ( random . uniform ( 2 , 4 ) ** attempt ) if delay > max_delay : delay = int ( random . uniform ( max_delay - 20 , max_delay + 20 ) ) return delay
Calculates an exponential backoff for retry attempts with a small amount of jitter .
53,894
def parse_query ( query_str ) : def _generate_match_all_fields_query ( ) : stripped_query_str = ' ' . join ( query_str . replace ( ':' , ' ' ) . split ( ) ) return { 'multi_match' : { 'query' : stripped_query_str , 'fields' : [ '_all' ] , 'zero_terms_query' : 'all' } } if not isinstance ( query_str , six . text_type ) : query_str = six . text_type ( query_str . decode ( 'utf-8' ) ) logger . info ( 'Parsing: "' + query_str + '\".' ) parser = StatefulParser ( ) rst_visitor = RestructuringVisitor ( ) es_visitor = ElasticSearchVisitor ( ) try : unrecognized_text , parse_tree = parser . parse ( query_str , Query ) if unrecognized_text : msg = 'Parser returned unrecognized text: "' + unrecognized_text + '" for query: "' + query_str + '".' if query_str == unrecognized_text and parse_tree is None : logger . warn ( msg ) return _generate_match_all_fields_query ( ) else : msg += 'Continuing with recognized parse tree.' logger . warn ( msg ) except SyntaxError as e : logger . warn ( 'Parser syntax error (' + six . text_type ( e ) + ') with query: "' + query_str + '". Continuing with a match_all with the given query.' ) return _generate_match_all_fields_query ( ) try : restructured_parse_tree = parse_tree . accept ( rst_visitor ) logger . debug ( 'Parse tree: \n' + emit_tree_format ( restructured_parse_tree ) ) except Exception as e : logger . exception ( RestructuringVisitor . __name__ + " crashed" + ( ": " + six . text_type ( e ) + "." ) if six . text_type ( e ) else '.' ) return _generate_match_all_fields_query ( ) try : es_query = restructured_parse_tree . accept ( es_visitor ) except Exception as e : logger . exception ( ElasticSearchVisitor . __name__ + " crashed" + ( ": " + six . text_type ( e ) + "." ) if six . text_type ( e ) else '.' ) return _generate_match_all_fields_query ( ) if not es_query : return _generate_match_all_fields_query ( ) return es_query
Drives the whole logic by parsing restructuring and finally generating an ElasticSearch query .
53,895
def task ( self , name_or_class : Any ) -> Task : if name_or_class in self . all_tasks : return self . all_tasks [ name_or_class ] try : return self . all_tasks . get ( name_or_class . __class__ . __name__ , None ) except AttributeError : return None
Return a running Task object matching the given name or class .
53,896
async def init ( self ) -> None : self . stats = await self . insert ( self . stats ) self . configuration = await self . insert ( self . configuration ) if not self . executor : try : max_workers = self . config . get ( 'executor_workers' ) except Exception : max_workers = None self . executor = ThreadPoolExecutor ( max_workers = max_workers ) for task in self . initial_tasks : await self . insert ( task ) self . monitor = asyncio . ensure_future ( self . monitor_tasks ( ) ) self . counters [ 'alive_since' ] = time . time ( )
Initialize configuration and start tasks .
53,897
async def insert ( self , task : Task ) -> None : if not isinstance ( task , Task ) : task = task ( ) if task . name not in self . all_tasks : task . tasky = self self . all_tasks [ task . name ] = task await task . init ( ) elif task != self . all_tasks [ task . name ] : raise Exception ( 'Duplicate task %s' % task . name ) if task . enabled : task . task = asyncio . ensure_future ( self . start_task ( task ) ) self . running_tasks . add ( task ) else : task . task = None return task
Insert the given task class into the Tasky event loop .
53,898
async def execute ( self , fn , * args , ** kwargs ) -> None : fn = functools . partial ( fn , * args , ** kwargs ) return await self . loop . run_in_executor ( self . executor , fn )
Execute an arbitrary function outside the event loop using a shared Executor .
53,899
async def start_task ( self , task : Task ) -> None : try : Log . debug ( 'task %s starting' , task . name ) before = time . time ( ) task . counters [ 'last_run' ] = before task . running = True self . running_tasks . add ( task ) await task . run_task ( ) Log . debug ( 'task %s completed' , task . name ) except CancelledError : Log . debug ( 'task %s cancelled' , task . name ) except Exception : Log . exception ( 'unhandled exception in task %s' , task . name ) finally : self . running_tasks . discard ( task ) task . running = False task . task = None after = time . time ( ) total = after - before task . counters [ 'last_completed' ] = after task . counters [ 'duration' ] = total
Initialize the task queue it for execution add the done callback and keep track of it for when tasks need to be stopped .