idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
52,200
def add_to_package_numpy ( self , root , ndarray , node_path , target , source_path , transform , custom_meta ) : filehash = self . save_numpy ( ndarray ) metahash = self . save_metadata ( custom_meta ) self . _add_to_package_contents ( root , node_path , [ filehash ] , target , source_path , transform , metahash )
Save a Numpy array to the store .
52,201
def add_to_package_package_tree ( self , root , node_path , pkgnode ) : if node_path : ptr = root for node in node_path [ : - 1 ] : ptr = ptr . children . setdefault ( node , GroupNode ( dict ( ) ) ) ptr . children [ node_path [ - 1 ] ] = pkgnode else : if root . children : raise PackageException ( "Attempting to overwrite root node of a non-empty package." ) root . children = pkgnode . children . copy ( )
Adds a package or sub - package tree from an existing package to this package s contents .
52,202
def _install_interrupt_handler ( ) : import os import sys import signal import pkg_resources from . tools import const quilt = pkg_resources . get_distribution ( 'quilt' ) executable = os . path . basename ( sys . argv [ 0 ] ) entry_points = quilt . get_entry_map ( ) . get ( 'console_scripts' , [ ] ) if executable == '-c' : if len ( sys . argv ) > 1 and sys . argv [ 1 ] == 'quilt testing' : entry_points [ '-c' ] = 'blah' sys . argv . pop ( 1 ) if executable not in entry_points : return def handle_interrupt ( signum , stack ) : if _DEV_MODE is None : dev_mode = True if len ( sys . argv ) > 1 and sys . argv [ 1 ] == '--dev' else False dev_mode = True if os . environ . get ( 'QUILT_DEV_MODE' , '' ) . strip ( ) . lower ( ) == 'true' else dev_mode else : dev_mode = _DEV_MODE if dev_mode : raise KeyboardInterrupt ( ) print ( ) exit ( const . EXIT_KB_INTERRUPT ) return signal . signal ( signal . SIGINT , handle_interrupt )
Suppress KeyboardInterrupt traceback display in specific situations
52,203
def _data_keys ( self ) : return [ name for name , child in iteritems ( self . _children ) if not isinstance ( child , GroupNode ) ]
every child key referencing a dataframe
52,204
def _group_keys ( self ) : return [ name for name , child in iteritems ( self . _children ) if isinstance ( child , GroupNode ) ]
every child key referencing a group that is not a dataframe
52,205
def _data ( self , asa = None ) : hash_list = [ ] stack = [ self ] alldfs = True store = None while stack : node = stack . pop ( ) if isinstance ( node , GroupNode ) : stack . extend ( child for _ , child in sorted ( node . _items ( ) , reverse = True ) ) else : if node . _target ( ) != TargetType . PANDAS : alldfs = False if node . _store is None or node . _hashes is None : msg = "Can only merge built dataframes. Build this package and try again." raise NotImplementedError ( msg ) node_store = node . _store if store is None : store = node_store if node_store != store : raise NotImplementedError ( "Can only merge dataframes from the same store" ) hash_list += node . _hashes if asa is None : if not hash_list : return None if not alldfs : raise ValueError ( "Group contains non-dataframe nodes" ) return store . load_dataframe ( hash_list ) else : if hash_list : assert store is not None return asa ( self , [ store . object_path ( obj ) for obj in hash_list ] ) else : return asa ( self , [ ] )
Merges all child dataframes . Only works for dataframes stored on disk - not in memory .
52,206
def _set ( self , path , value , build_dir = '' ) : assert isinstance ( path , list ) and len ( path ) > 0 if isinstance ( value , pd . DataFrame ) : metadata = { SYSTEM_METADATA : { 'target' : TargetType . PANDAS . value } } elif isinstance ( value , np . ndarray ) : metadata = { SYSTEM_METADATA : { 'target' : TargetType . NUMPY . value } } elif isinstance ( value , string_types + ( bytes , ) ) : value = value . decode ( ) if isinstance ( value , bytes ) else value if os . path . isabs ( value ) : raise ValueError ( "Invalid path: expected a relative path, but received {!r}" . format ( value ) ) metadata = { SYSTEM_METADATA : { 'filepath' : value , 'transform' : 'id' } } if build_dir : value = os . path . join ( build_dir , value ) else : accepted_types = tuple ( set ( ( pd . DataFrame , np . ndarray , bytes ) + string_types ) ) raise TypeError ( "Bad value type: Expected instance of any type {!r}, but received type {!r}" . format ( accepted_types , type ( value ) ) , repr ( value ) [ 0 : 100 ] ) for key in path : if not is_nodename ( key ) : raise ValueError ( "Invalid name for node: {}" . format ( key ) ) node = self for key in path [ : - 1 ] : child = node . _get ( key ) if not isinstance ( child , GroupNode ) : child = GroupNode ( { } ) node [ key ] = child node = child key = path [ - 1 ] node [ key ] = DataNode ( None , None , value , metadata )
Create and set a node by path
52,207
def handle_api_exception ( error ) : _mp_track ( type = "exception" , status_code = error . status_code , message = error . message , ) response = jsonify ( dict ( message = error . message ) ) response . status_code = error . status_code return response
Converts an API exception into an error response .
52,208
def api ( require_login = True , schema = None , enabled = True , require_admin = False , require_anonymous = False ) : if require_admin : require_login = True if schema is not None : Draft4Validator . check_schema ( schema ) validator = Draft4Validator ( schema ) else : validator = None assert not ( require_login and require_anonymous ) , ( "Can't both require login and require anonymous access." ) def innerdec ( f ) : @ wraps ( f ) def wrapper ( * args , ** kwargs ) : g . auth = Auth ( user = None , email = None , is_logged_in = False , is_admin = False , is_active = True ) user_agent_str = request . headers . get ( 'user-agent' , '' ) g . user_agent = httpagentparser . detect ( user_agent_str , fill_none = True ) if not enabled : raise ApiException ( requests . codes . bad_request , "This endpoint is not enabled." ) if validator is not None : try : validator . validate ( request . get_json ( cache = True ) ) except ValidationError as ex : raise ApiException ( requests . codes . bad_request , ex . message ) auth = request . headers . get ( AUTHORIZATION_HEADER ) g . auth_header = auth if auth is None : if not require_anonymous : if require_login or not ALLOW_ANONYMOUS_ACCESS : raise ApiException ( requests . codes . unauthorized , "Not logged in" ) else : token = auth if token . startswith ( "Bearer " ) : token = token [ 7 : ] try : user = verify_token_string ( token ) except AuthException : raise ApiException ( requests . codes . unauthorized , "Token invalid." ) g . user = user g . auth = Auth ( user = user . name , email = user . email , is_logged_in = True , is_admin = user . is_admin , is_active = user . is_active ) g . auth_token = token if not g . auth . is_active : raise ApiException ( requests . codes . forbidden , "Account is inactive. Must have an active account." ) if require_admin and not g . auth . is_admin : raise ApiException ( requests . codes . forbidden , "Must be authenticated as an admin to use this endpoint." ) return f ( * args , ** kwargs ) return wrapper return innerdec
Decorator for API requests . Handles auth and adds the username as the first argument .
52,209
def _private_packages_allowed ( ) : if not HAVE_PAYMENTS or TEAM_ID : return True customer = _get_or_create_customer ( ) plan = _get_customer_plan ( customer ) return plan != PaymentPlan . FREE
Checks if the current user is allowed to create private packages .
52,210
def _create_auth ( team , timeout = None ) : url = get_registry_url ( team ) contents = _load_auth ( ) auth = contents . get ( url ) if auth is not None : if auth [ 'expires_at' ] < time . time ( ) + 60 : try : auth = _update_auth ( team , auth [ 'refresh_token' ] , timeout ) except CommandException as ex : raise CommandException ( "Failed to update the access token (%s). Run `quilt login%s` again." % ( ex , ' ' + team if team else '' ) ) contents [ url ] = auth _save_auth ( contents ) return auth
Reads the credentials updates the access token if necessary and returns it .
52,211
def _create_session ( team , auth ) : session = requests . Session ( ) session . hooks . update ( dict ( response = partial ( _handle_response , team ) ) ) session . headers . update ( { "Content-Type" : "application/json" , "Accept" : "application/json" , "User-Agent" : "quilt-cli/%s (%s %s) %s/%s" % ( VERSION , platform . system ( ) , platform . release ( ) , platform . python_implementation ( ) , platform . python_version ( ) ) } ) if auth is not None : session . headers [ "Authorization" ] = "Bearer %s" % auth [ 'access_token' ] return session
Creates a session object to be used for push install etc .
52,212
def _get_session ( team , timeout = None ) : global _sessions session = _sessions . get ( team ) if session is None : auth = _create_auth ( team , timeout ) _sessions [ team ] = session = _create_session ( team , auth ) assert session is not None return session
Creates a session or returns an existing session .
52,213
def _check_team_login ( team ) : contents = _load_auth ( ) for auth in itervalues ( contents ) : existing_team = auth . get ( 'team' ) if team and team != existing_team : raise CommandException ( "Can't log in as team %r; log out first." % team ) elif not team and existing_team : raise CommandException ( "Can't log in as a public user; log out from team %r first." % existing_team )
Disallow simultaneous public cloud and team logins .
52,214
def _check_team_exists ( team ) : if team is None : return hostname = urlparse ( get_registry_url ( team ) ) . hostname try : socket . gethostbyname ( hostname ) except IOError : try : socket . gethostbyname ( 'quiltdata.com' ) except IOError : message = "Can't find quiltdata.com. Check your internet connection." else : message = "Unable to connect to registry. Is the team name %r correct?" % team raise CommandException ( message )
Check that the team registry actually exists .
52,215
def login_with_token ( refresh_token , team = None ) : _check_team_id ( team ) auth = _update_auth ( team , refresh_token ) url = get_registry_url ( team ) contents = _load_auth ( ) contents [ url ] = auth _save_auth ( contents ) _clear_session ( team )
Authenticate using an existing token .
52,216
def generate ( directory , outfilename = DEFAULT_BUILDFILE ) : try : buildfilepath = generate_build_file ( directory , outfilename = outfilename ) except BuildException as builderror : raise CommandException ( str ( builderror ) ) print ( "Generated build-file %s." % ( buildfilepath ) )
Generate a build - file for quilt build from a directory of source files .
52,217
def build ( package , path = None , dry_run = False , env = 'default' , force = False , build_file = False ) : team , _ , _ , subpath = parse_package ( package , allow_subpath = True ) _check_team_id ( team ) logged_in_team = _find_logged_in_team ( ) if logged_in_team is not None and team is None and force is False : answer = input ( "You're logged in as a team member, but you aren't specifying " "a team for the package you're currently building. Maybe you meant:\n" "quilt build {team}:{package}\n" "Are you sure you want to continue? (y/N) " . format ( team = logged_in_team , package = package ) ) if answer . lower ( ) != 'y' : return if not subpath : build_file = True package_hash = hashlib . md5 ( package . encode ( 'utf-8' ) ) . hexdigest ( ) try : _build_internal ( package , path , dry_run , env , build_file ) except Exception as ex : _log ( team , type = 'build' , package = package_hash , dry_run = dry_run , env = env , error = str ( ex ) ) raise _log ( team , type = 'build' , package = package_hash , dry_run = dry_run , env = env )
Compile a Quilt data package either from a build file or an existing package node .
52,218
def build_from_node ( package , node ) : team , owner , pkg , subpath = parse_package ( package , allow_subpath = True ) _check_team_id ( team ) store = PackageStore ( ) pkg_root = get_or_create_package ( store , team , owner , pkg , subpath ) if not subpath and not isinstance ( node , nodes . GroupNode ) : raise CommandException ( "Top-level node must be a group" ) def _process_node ( node , path ) : if not isinstance ( node . _meta , dict ) : raise CommandException ( "Error in %s: value must be a dictionary" % '.' . join ( path + [ '_meta' ] ) ) meta = dict ( node . _meta ) system_meta = meta . pop ( SYSTEM_METADATA , { } ) if not isinstance ( system_meta , dict ) : raise CommandException ( "Error in %s: %s overwritten. %s is a reserved metadata key. Try a different key." % ( '.' . join ( path + [ '_meta' ] ) , SYSTEM_METADATA , SYSTEM_METADATA ) ) if isinstance ( node , nodes . GroupNode ) : store . add_to_package_group ( pkg_root , path , meta ) for key , child in node . _items ( ) : _process_node ( child , path + [ key ] ) elif isinstance ( node , nodes . DataNode ) : data = node . _data ( ) filepath = system_meta . get ( 'filepath' ) transform = system_meta . get ( 'transform' ) if isinstance ( data , pd . DataFrame ) : store . add_to_package_df ( pkg_root , data , path , TargetType . PANDAS , filepath , transform , meta ) elif isinstance ( data , np . ndarray ) : store . add_to_package_numpy ( pkg_root , data , path , TargetType . NUMPY , filepath , transform , meta ) elif isinstance ( data , string_types ) : store . add_to_package_file ( pkg_root , data , path , TargetType . FILE , filepath , transform , meta ) else : assert False , "Unexpected data type: %r" % data else : assert False , "Unexpected node type: %r" % node try : _process_node ( node , subpath ) except StoreException as ex : raise CommandException ( "Failed to build the package: %s" % ex ) store . save_package_contents ( pkg_root , team , owner , pkg )
Compile a Quilt data package from an existing package node .
52,219
def build_from_path ( package , path , dry_run = False , env = 'default' , outfilename = DEFAULT_BUILDFILE ) : team , owner , pkg , subpath = parse_package ( package , allow_subpath = True ) if not os . path . exists ( path ) : raise CommandException ( "%s does not exist." % path ) try : if os . path . isdir ( path ) : buildpath = os . path . join ( path , outfilename ) if os . path . exists ( buildpath ) : raise CommandException ( "Build file already exists. Run `quilt build %r` instead." % buildpath ) contents = generate_contents ( path , outfilename ) build_package_from_contents ( team , owner , pkg , subpath , path , contents , dry_run = dry_run , env = env ) else : build_package ( team , owner , pkg , subpath , path , dry_run = dry_run , env = env ) if not dry_run : print ( "Built %s successfully." % package ) except BuildException as ex : raise CommandException ( "Failed to build the package: %s" % ex )
Compile a Quilt data package from a build file . Path can be a directory in which case the build file will be generated automatically .
52,220
def log ( package ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) response = session . get ( "{url}/api/log/{owner}/{pkg}/" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg ) ) table = [ ( "Hash" , "Pushed" , "Author" , "Tags" , "Versions" ) ] for entry in reversed ( response . json ( ) [ 'logs' ] ) : ugly = datetime . fromtimestamp ( entry [ 'created' ] ) nice = ugly . strftime ( "%Y-%m-%d %H:%M:%S" ) table . append ( ( entry [ 'hash' ] , nice , entry [ 'author' ] , str ( entry . get ( 'tags' , [ ] ) ) , str ( entry . get ( 'versions' , [ ] ) ) ) ) _print_table ( table )
List all of the changes to a package on the server .
52,221
def push ( package , is_public = False , is_team = False , reupload = False , hash = None ) : team , owner , pkg , subpath = parse_package ( package , allow_subpath = True ) _check_team_id ( team ) session = _get_session ( team ) store , pkgroot = PackageStore . find_package ( team , owner , pkg , pkghash = hash ) if pkgroot is None : raise CommandException ( "Package {package} not found." . format ( package = package ) ) pkghash = hash_contents ( pkgroot ) if hash is not None : assert pkghash == hash contents = pkgroot for component in subpath : try : contents = contents . children [ component ] except ( AttributeError , KeyError ) : raise CommandException ( "Invalid subpath: %r" % component ) def _push_package ( dry_run = False , sizes = dict ( ) ) : data = json . dumps ( dict ( dry_run = dry_run , is_public = is_public , is_team = is_team , contents = contents , description = "" , sizes = sizes ) , default = encode_node ) compressed_data = gzip_compress ( data . encode ( 'utf-8' ) ) if subpath : return session . post ( "{url}/api/package_update/{owner}/{pkg}/{subpath}" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg , subpath = '/' . join ( subpath ) ) , data = compressed_data , headers = { 'Content-Encoding' : 'gzip' } ) else : return session . put ( "{url}/api/package/{owner}/{pkg}/{hash}" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg , hash = pkghash ) , data = compressed_data , headers = { 'Content-Encoding' : 'gzip' } ) print ( "Fetching upload URLs from the registry..." ) resp = _push_package ( dry_run = True ) obj_urls = resp . json ( ) [ 'upload_urls' ] assert set ( obj_urls ) == set ( find_object_hashes ( contents ) ) obj_sizes = { obj_hash : os . path . getsize ( store . object_path ( obj_hash ) ) for obj_hash in obj_urls } success = upload_fragments ( store , obj_urls , obj_sizes , reupload = reupload ) if not success : raise CommandException ( "Failed to upload fragments" ) print ( "Uploading package metadata..." ) resp = _push_package ( sizes = obj_sizes ) package_url = resp . json ( ) [ 'package_url' ] if not subpath : print ( "Updating the 'latest' tag..." ) session . put ( "{url}/api/tag/{owner}/{pkg}/{tag}" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg , tag = LATEST_TAG ) , data = json . dumps ( dict ( hash = pkghash ) ) ) print ( "Push complete. %s is live:\n%s" % ( package , package_url ) )
Push a Quilt data package to the server
52,222
def version_list ( package ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) response = session . get ( "{url}/api/version/{owner}/{pkg}/" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg ) ) for version in response . json ( ) [ 'versions' ] : print ( "%s: %s" % ( version [ 'version' ] , version [ 'hash' ] ) )
List the versions of a package .
52,223
def version_add ( package , version , pkghash , force = False ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) try : Version ( version ) except ValueError : url = "https://www.python.org/dev/peps/pep-0440/#examples-of-compliant-version-schemes" raise CommandException ( "Invalid version format; see %s" % url ) if not force : answer = input ( "Versions cannot be modified or deleted; are you sure? (y/n) " ) if answer . lower ( ) != 'y' : return session . put ( "{url}/api/version/{owner}/{pkg}/{version}" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg , version = version ) , data = json . dumps ( dict ( hash = _match_hash ( package , pkghash ) ) ) )
Add a new version for a given package hash .
52,224
def tag_list ( package ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) response = session . get ( "{url}/api/tag/{owner}/{pkg}/" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg ) ) for tag in response . json ( ) [ 'tags' ] : print ( "%s: %s" % ( tag [ 'tag' ] , tag [ 'hash' ] ) )
List the tags of a package .
52,225
def tag_add ( package , tag , pkghash ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) session . put ( "{url}/api/tag/{owner}/{pkg}/{tag}" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg , tag = tag ) , data = json . dumps ( dict ( hash = _match_hash ( package , pkghash ) ) ) )
Add a new tag for a given package hash .
52,226
def install_via_requirements ( requirements_str , force = False ) : if requirements_str [ 0 ] == '@' : path = requirements_str [ 1 : ] if os . path . isfile ( path ) : yaml_data = load_yaml ( path ) if 'packages' not in yaml_data . keys ( ) : raise CommandException ( 'Error in {filename}: missing "packages" node' . format ( filename = path ) ) else : raise CommandException ( "Requirements file not found: {filename}" . format ( filename = path ) ) else : yaml_data = yaml . safe_load ( requirements_str ) for pkginfo in yaml_data [ 'packages' ] : info = parse_package_extended ( pkginfo ) install ( info . full_name , info . hash , info . version , info . tag , force = force )
Download multiple Quilt data packages via quilt . xml requirements file .
52,227
def access_list ( package ) : team , owner , pkg = parse_package ( package ) session = _get_session ( team ) lookup_url = "{url}/api/access/{owner}/{pkg}/" . format ( url = get_registry_url ( team ) , owner = owner , pkg = pkg ) response = session . get ( lookup_url ) data = response . json ( ) users = data [ 'users' ] print ( '\n' . join ( users ) )
Print list of users who can access a package .
52,228
def delete ( package ) : team , owner , pkg = parse_package ( package ) answer = input ( "Are you sure you want to delete this package and its entire history? " "Type '%s' to confirm: " % package ) if answer != package : print ( "Not deleting." ) return 1 session = _get_session ( team ) session . delete ( "%s/api/package/%s/%s/" % ( get_registry_url ( team ) , owner , pkg ) ) print ( "Deleted." )
Delete a package from the server .
52,229
def search ( query , team = None ) : if team is None : team = _find_logged_in_team ( ) if team is not None : session = _get_session ( team ) response = session . get ( "%s/api/search/" % get_registry_url ( team ) , params = dict ( q = query ) ) print ( "* Packages in team %s" % team ) packages = response . json ( ) [ 'packages' ] for pkg in packages : print ( ( "%s:" % team ) + ( "%(owner)s/%(name)s" % pkg ) ) if len ( packages ) == 0 : print ( "(No results)" ) print ( "* Packages in public cloud" ) public_session = _get_session ( None ) response = public_session . get ( "%s/api/search/" % get_registry_url ( None ) , params = dict ( q = query ) ) packages = response . json ( ) [ 'packages' ] for pkg in packages : print ( "%(owner)s/%(name)s" % pkg ) if len ( packages ) == 0 : print ( "(No results)" )
Search for packages
52,230
def ls ( ) : for pkg_dir in PackageStore . find_store_dirs ( ) : print ( "%s" % pkg_dir ) packages = PackageStore ( pkg_dir ) . ls_packages ( ) for package , tag , pkghash in sorted ( packages ) : print ( "{0:30} {1:20} {2}" . format ( package , tag , pkghash ) )
List all installed Quilt data packages
52,231
def inspect ( package ) : team , owner , pkg = parse_package ( package ) store , pkgroot = PackageStore . find_package ( team , owner , pkg ) if pkgroot is None : raise CommandException ( "Package {package} not found." . format ( package = package ) ) def _print_children ( children , prefix , path ) : for idx , ( name , child ) in enumerate ( children ) : if idx == len ( children ) - 1 : new_prefix = u"└─" new_child_prefix = u" " else : new_prefix = u"├─" new_child_prefix = u"│ " _print_node ( child , prefix + new_prefix , prefix + new_child_prefix , name , path ) def _print_node ( node , prefix , child_prefix , name , path ) : name_prefix = u"─ " if isinstance ( node , GroupNode ) : children = list ( node . children . items ( ) ) if children : name_prefix = u"┬ " print ( prefix + name_prefix + name ) _print_children ( children , child_prefix , path + name ) elif node . metadata [ 'q_target' ] == TargetType . PANDAS . value : df = store . load_dataframe ( node . hashes ) assert isinstance ( df , pd . DataFrame ) types = ", " . join ( "%r: %s" % ( name , dtype ) for name , dtype in df . dtypes . items ( ) ) if len ( types ) > 64 : types = types [ : 63 ] + u"…" info = "shape %s, types %s" % ( df . shape , types ) print ( prefix + name_prefix + name + ": " + info ) else : print ( prefix + name_prefix + name ) print ( store . package_path ( team , owner , pkg ) ) _print_children ( children = pkgroot . children . items ( ) , prefix = '' , path = '' )
Inspect package details
52,232
def load ( pkginfo , hash = None ) : node , pkgroot , info = _load ( pkginfo , hash ) for subnode_name in info . subpath : node = node [ subnode_name ] return node
functional interface to from quilt . data . USER import PKG
52,233
def c_ideal_gas ( T , k , MW ) : r Rspecific = R * 1000. / MW return ( k * Rspecific * T ) ** 0.5
r Calculates speed of sound c in an ideal gas at temperature T .
52,234
def Reynolds ( V , D , rho = None , mu = None , nu = None ) : r if rho and mu : nu = mu / rho elif not nu : raise Exception ( 'Either density and viscosity, or dynamic viscosity, \ is needed' ) return V * D / nu
r Calculates Reynolds number or Re for a fluid with the given properties for the specified velocity and diameter .
52,235
def Peclet_heat ( V , L , rho = None , Cp = None , k = None , alpha = None ) : r if rho and Cp and k : alpha = k / ( rho * Cp ) elif not alpha : raise Exception ( 'Either heat capacity and thermal conductivity and\ density, or thermal diffusivity is needed' ) return V * L / alpha
r Calculates heat transfer Peclet number or Pe for a specified velocity V characteristic length L and specified properties for the given fluid .
52,236
def Fourier_heat ( t , L , rho = None , Cp = None , k = None , alpha = None ) : r if rho and Cp and k : alpha = k / ( rho * Cp ) elif not alpha : raise Exception ( 'Either heat capacity and thermal conductivity and \density, or thermal diffusivity is needed' ) return t * alpha / L ** 2
r Calculates heat transfer Fourier number or Fo for a specified time t characteristic length L and specified properties for the given fluid .
52,237
def Graetz_heat ( V , D , x , rho = None , Cp = None , k = None , alpha = None ) : r if rho and Cp and k : alpha = k / ( rho * Cp ) elif not alpha : raise Exception ( 'Either heat capacity and thermal conductivity and\ density, or thermal diffusivity is needed' ) return V * D ** 2 / ( x * alpha )
r Calculates Graetz number or Gz for a specified velocity V diameter D axial distance x and specified properties for the given fluid .
52,238
def Schmidt ( D , mu = None , nu = None , rho = None ) : r if rho and mu : return mu / ( rho * D ) elif nu : return nu / D else : raise Exception ( 'Insufficient information provided for Schmidt number calculation' )
r Calculates Schmidt number or Sc for a fluid with the given parameters .
52,239
def Lewis ( D = None , alpha = None , Cp = None , k = None , rho = None ) : r if k and Cp and rho : alpha = k / ( rho * Cp ) elif alpha : pass else : raise Exception ( 'Insufficient information provided for Le calculation' ) return alpha / D
r Calculates Lewis number or Le for a fluid with the given parameters .
52,240
def Confinement ( D , rhol , rhog , sigma , g = g ) : r return ( sigma / ( g * ( rhol - rhog ) ) ) ** 0.5 / D
r Calculates Confinement number or Co for a fluid in a channel of diameter D with liquid and gas densities rhol and rhog and surface tension sigma under the influence of gravitational force g .
52,241
def Morton ( rhol , rhog , mul , sigma , g = g ) : r mul2 = mul * mul return g * mul2 * mul2 * ( rhol - rhog ) / ( rhol * rhol * sigma * sigma * sigma )
r Calculates Morton number or Mo for a liquid and vapor with the specified properties under the influence of gravitational force g .
52,242
def Prandtl ( Cp = None , k = None , mu = None , nu = None , rho = None , alpha = None ) : r if k and Cp and mu : return Cp * mu / k elif nu and rho and Cp and k : return nu * rho * Cp / k elif nu and alpha : return nu / alpha else : raise Exception ( 'Insufficient information provided for Pr calculation' )
r Calculates Prandtl number or Pr for a fluid with the given parameters .
52,243
def Grashof ( L , beta , T1 , T2 = 0 , rho = None , mu = None , nu = None , g = g ) : r if rho and mu : nu = mu / rho elif not nu : raise Exception ( 'Either density and viscosity, or dynamic viscosity, \ is needed' ) return g * beta * abs ( T2 - T1 ) * L ** 3 / nu ** 2
r Calculates Grashof number or Gr for a fluid with the given properties temperature difference and characteristic length .
52,244
def Froude ( V , L , g = g , squared = False ) : r Fr = V / ( L * g ) ** 0.5 if squared : Fr *= Fr return Fr
r Calculates Froude number Fr for velocity V and geometric length L . If desired gravity can be specified as well . Normally the function returns the result of the equation below ; Froude number is also often said to be defined as the square of the equation below .
52,245
def Stokes_number ( V , Dp , D , rhop , mu ) : r return rhop * V * ( Dp * Dp ) / ( 18.0 * mu * D )
r Calculates Stokes Number for a given characteristic velocity V particle diameter Dp characteristic diameter D particle density rhop and fluid viscosity mu .
52,246
def Suratman ( L , rho , mu , sigma ) : r return rho * sigma * L / ( mu * mu )
r Calculates Suratman number Su for a fluid with the given characteristic length density viscosity and surface tension .
52,247
def nu_mu_converter ( rho , mu = None , nu = None ) : r if ( nu and mu ) or not rho or ( not nu and not mu ) : raise Exception ( 'Inputs must be rho and one of mu and nu.' ) if mu : return mu / rho elif nu : return nu * rho
r Calculates either kinematic or dynamic viscosity depending on inputs . Used when one type of viscosity is known as well as density to obtain the other type . Raises an error if both types of viscosity or neither type of viscosity is provided .
52,248
def Engauge_2d_parser ( lines , flat = False ) : z_values = [ ] x_lists = [ ] y_lists = [ ] working_xs = [ ] working_ys = [ ] new_curve = True for line in lines : if line . strip ( ) == '' : new_curve = True elif new_curve : z = float ( line . split ( ',' ) [ 1 ] ) z_values . append ( z ) if working_xs and working_ys : x_lists . append ( working_xs ) y_lists . append ( working_ys ) working_xs = [ ] working_ys = [ ] new_curve = False else : x , y = [ float ( i ) for i in line . strip ( ) . split ( ',' ) ] working_xs . append ( x ) working_ys . append ( y ) x_lists . append ( working_xs ) y_lists . append ( working_ys ) if flat : all_zs = [ ] all_xs = [ ] all_ys = [ ] for z , xs , ys in zip ( z_values , x_lists , y_lists ) : for x , y in zip ( xs , ys ) : all_zs . append ( z ) all_xs . append ( x ) all_ys . append ( y ) return all_zs , all_xs , all_ys return z_values , x_lists , y_lists
Not exposed function to read a 2D file generated by engauge - digitizer ; for curve fitting .
52,249
def isothermal_work_compression ( P1 , P2 , T , Z = 1 ) : r return Z * R * T * log ( P2 / P1 )
r Calculates the work of compression or expansion of a gas going through an isothermal process .
52,250
def isentropic_T_rise_compression ( T1 , P1 , P2 , k , eta = 1 ) : r dT = T1 * ( ( P2 / P1 ) ** ( ( k - 1.0 ) / k ) - 1.0 ) / eta return T1 + dT
r Calculates the increase in temperature of a fluid which is compressed or expanded under isentropic adiabatic conditions assuming constant Cp and Cv . The polytropic model is the same equation ; just provide n instead of k and use a polytropic efficienty for eta instead of a isentropic efficiency .
52,251
def isentropic_efficiency ( P1 , P2 , k , eta_s = None , eta_p = None ) : r if eta_s is None and eta_p : return ( ( P2 / P1 ) ** ( ( k - 1.0 ) / k ) - 1.0 ) / ( ( P2 / P1 ) ** ( ( k - 1.0 ) / ( k * eta_p ) ) - 1.0 ) elif eta_p is None and eta_s : return ( k - 1.0 ) * log ( P2 / P1 ) / ( k * log ( ( eta_s + ( P2 / P1 ) ** ( ( k - 1.0 ) / k ) - 1.0 ) / eta_s ) ) else : raise Exception ( 'Either eta_s or eta_p is required' )
r Calculates either isentropic or polytropic efficiency from the other type of efficiency .
52,252
def P_isothermal_critical_flow ( P , fd , D , L ) : r lambert_term = float ( lambertw ( - exp ( ( - D - L * fd ) / D ) , - 1 ) . real ) return P * exp ( ( D * ( lambert_term + 1.0 ) + L * fd ) / ( 2.0 * D ) )
r Calculates critical flow pressure Pcf for a fluid flowing isothermally and suffering pressure drop caused by a pipe s friction factor .
52,253
def P_upstream_isothermal_critical_flow ( P , fd , D , L ) : lambertw_term = float ( lambertw ( - exp ( - ( fd * L + D ) / D ) , - 1 ) . real ) return exp ( - 0.5 * ( D * lambertw_term + fd * L + D ) / D ) * P
Not part of the public API . Reverses P_isothermal_critical_flow .
52,254
def is_critical_flow ( P1 , P2 , k ) : r Pcf = P_critical_flow ( P1 , k ) return Pcf > P2
r Determines if a flow of a fluid driven by pressure gradient P1 - P2 is critical for a fluid with the given isentropic coefficient . This function calculates critical flow pressure and checks if this is larger than P2 . If so the flow is critical and choked .
52,255
def one_phase_dP ( m , rho , mu , D , roughness = 0 , L = 1 , Method = None ) : r D2 = D * D V = m / ( 0.25 * pi * D2 * rho ) Re = Reynolds ( V = V , rho = rho , mu = mu , D = D ) fd = friction_factor ( Re = Re , eD = roughness / D , Method = Method ) dP = fd * L / D * ( 0.5 * rho * V * V ) return dP
r Calculates single - phase pressure drop . This is a wrapper around other methods .
52,256
def discharge_coefficient_to_K ( D , Do , C ) : r beta = Do / D beta2 = beta * beta beta4 = beta2 * beta2 return ( ( 1.0 - beta4 * ( 1.0 - C * C ) ) ** 0.5 / ( C * beta2 ) - 1.0 ) ** 2
r Converts a discharge coefficient to a standard loss coefficient for use in computation of the actual pressure drop of an orifice or other device .
52,257
def dn ( self , fraction , n = None ) : r if fraction == 1.0 : fraction = 1.0 - epsilon if fraction < 0 : raise ValueError ( 'Fraction must be more than 0' ) elif fraction == 0 : if self . truncated : return self . d_min return 0.0 elif fraction > 1 : raise ValueError ( 'Fraction less than 1' ) return brenth ( lambda d : self . cdf ( d , n = n ) - fraction , self . d_minimum , self . d_excessive , maxiter = 1000 , xtol = 1E-200 )
r Computes the diameter at which a specified fraction of the distribution falls under . Utilizes a bounded solver to search for the desired diameter .
52,258
def fit ( self , x0 = None , distribution = 'lognormal' , n = None , ** kwargs ) : dist = { 'lognormal' : PSDLognormal , 'GGS' : PSDGatesGaudinSchuhman , 'RR' : PSDRosinRammler } [ distribution ] if distribution == 'lognormal' : if x0 is None : d_characteristic = sum ( [ fi * di for fi , di in zip ( self . fractions , self . Dis ) ] ) s = 0.4 x0 = [ d_characteristic , s ] elif distribution == 'GGS' : if x0 is None : d_characteristic = sum ( [ fi * di for fi , di in zip ( self . fractions , self . Dis ) ] ) m = 1.5 x0 = [ d_characteristic , m ] elif distribution == 'RR' : if x0 is None : x0 = [ 5E-6 , 1e-2 ] from scipy . optimize import minimize return minimize ( self . _fit_obj_function , x0 , args = ( dist , n ) , ** kwargs )
Incomplete method to fit experimental values to a curve . It is very hard to get good initial guesses which are really required for this . Differential evolution is promissing . This API is likely to change in the future .
52,259
def Dis ( self ) : return [ self . di_power ( i , power = 1 ) for i in range ( self . N ) ]
Representative diameters of each bin .
52,260
def SA_tank ( D , L , sideA = None , sideB = None , sideA_a = 0 , sideB_a = 0 , sideA_f = None , sideA_k = None , sideB_f = None , sideB_k = None , full_output = False ) : r if sideA == 'conical' : sideA_SA = SA_conical_head ( D = D , a = sideA_a ) elif sideA == 'ellipsoidal' : sideA_SA = SA_ellipsoidal_head ( D = D , a = sideA_a ) elif sideA == 'guppy' : sideA_SA = SA_guppy_head ( D = D , a = sideA_a ) elif sideA == 'spherical' : sideA_SA = SA_partial_sphere ( D = D , h = sideA_a ) elif sideA == 'torispherical' : sideA_SA = SA_torispheroidal ( D = D , fd = sideA_f , fk = sideA_k ) else : sideA_SA = pi / 4 * D ** 2 if sideB == 'conical' : sideB_SA = SA_conical_head ( D = D , a = sideB_a ) elif sideB == 'ellipsoidal' : sideB_SA = SA_ellipsoidal_head ( D = D , a = sideB_a ) elif sideB == 'guppy' : sideB_SA = SA_guppy_head ( D = D , a = sideB_a ) elif sideB == 'spherical' : sideB_SA = SA_partial_sphere ( D = D , h = sideB_a ) elif sideB == 'torispherical' : sideB_SA = SA_torispheroidal ( D = D , fd = sideB_f , fk = sideB_k ) else : sideB_SA = pi / 4 * D ** 2 lateral_SA = pi * D * L SA = sideA_SA + sideB_SA + lateral_SA if full_output : return SA , ( sideA_SA , sideB_SA , lateral_SA ) else : return SA
r Calculates the surface are of a cylindrical tank with optional heads . In the degenerate case of being provided with only D and L provides the surface area of a cylinder .
52,261
def pitch_angle_solver ( angle = None , pitch = None , pitch_parallel = None , pitch_normal = None ) : r if angle is not None and pitch is not None : pitch_normal = pitch * sin ( radians ( angle ) ) pitch_parallel = pitch * cos ( radians ( angle ) ) elif angle is not None and pitch_normal is not None : pitch = pitch_normal / sin ( radians ( angle ) ) pitch_parallel = pitch * cos ( radians ( angle ) ) elif angle is not None and pitch_parallel is not None : pitch = pitch_parallel / cos ( radians ( angle ) ) pitch_normal = pitch * sin ( radians ( angle ) ) elif pitch_normal is not None and pitch is not None : angle = degrees ( asin ( pitch_normal / pitch ) ) pitch_parallel = pitch * cos ( radians ( angle ) ) elif pitch_parallel is not None and pitch is not None : angle = degrees ( acos ( pitch_parallel / pitch ) ) pitch_normal = pitch * sin ( radians ( angle ) ) elif pitch_parallel is not None and pitch_normal is not None : angle = degrees ( asin ( pitch_normal / ( pitch_normal ** 2 + pitch_parallel ** 2 ) ** 0.5 ) ) pitch = ( pitch_normal ** 2 + pitch_parallel ** 2 ) ** 0.5 else : raise Exception ( 'Two of the arguments are required' ) return angle , pitch , pitch_parallel , pitch_normal
r Utility to take any two of angle pitch pitch_parallel and pitch_normal and calculate the other two . This is useful for applications with tube banks as in shell and tube heat exchangers or air coolers and allows for a wider range of user input .
52,262
def A_hollow_cylinder ( Di , Do , L ) : r side_o = pi * Do * L side_i = pi * Di * L cap_circle = pi * Do ** 2 / 4 * 2 cap_removed = pi * Di ** 2 / 4 * 2 return side_o + side_i + cap_circle - cap_removed
r Returns the surface area of a hollow cylinder .
52,263
def A_multiple_hole_cylinder ( Do , L , holes ) : r side_o = pi * Do * L cap_circle = pi * Do ** 2 / 4 * 2 A = cap_circle + side_o for Di , n in holes : side_i = pi * Di * L cap_removed = pi * Di ** 2 / 4 * 2 A = A + side_i * n - cap_removed * n return A
r Returns the surface area of a cylinder with multiple holes . Calculation will naively return a negative value or other impossible result if the number of cylinders added is physically impossible . Holes may be of different shapes but must be perpendicular to the axis of the cylinder .
52,264
def V_from_h ( self , h , method = 'full' ) : r if method == 'full' : return V_from_h ( h , self . D , self . L , self . horizontal , self . sideA , self . sideB , self . sideA_a , self . sideB_a , self . sideA_f , self . sideA_k , self . sideB_f , self . sideB_k ) elif method == 'chebyshev' : if not self . chebyshev : self . set_chebyshev_approximators ( ) return self . V_from_h_cheb ( h ) else : raise Exception ( "Allowable methods are 'full' or 'chebyshev'." )
r Method to calculate the volume of liquid in a fully defined tank given a specified height h . h must be under the maximum height . If the method is chebyshev and the coefficients have not yet been calculated they are created by calling set_chebyshev_approximators .
52,265
def h_from_V ( self , V , method = 'spline' ) : r if method == 'spline' : if not self . table : self . set_table ( ) return float ( self . interp_h_from_V ( V ) ) elif method == 'chebyshev' : if not self . chebyshev : self . set_chebyshev_approximators ( ) return self . h_from_V_cheb ( V ) elif method == 'brenth' : to_solve = lambda h : self . V_from_h ( h , method = 'full' ) - V return brenth ( to_solve , self . h_max , 0 ) else : raise Exception ( "Allowable methods are 'full' or 'chebyshev', " "or 'brenth'." )
r Method to calculate the height of liquid in a fully defined tank given a specified volume of liquid in it V . V must be under the maximum volume . If the method is spline and the interpolation table is not yet defined creates it by calling the method set_table . If the method is chebyshev and the coefficients have not yet been calculated they are created by calling set_chebyshev_approximators .
52,266
def set_table ( self , n = 100 , dx = None ) : r if dx : self . heights = np . linspace ( 0 , self . h_max , int ( self . h_max / dx ) + 1 ) else : self . heights = np . linspace ( 0 , self . h_max , n ) self . volumes = [ self . V_from_h ( h ) for h in self . heights ] from scipy . interpolate import UnivariateSpline self . interp_h_from_V = UnivariateSpline ( self . volumes , self . heights , ext = 3 , s = 0.0 ) self . table = True
r Method to set an interpolation table of liquids levels versus volumes in the tank for a fully defined tank . Normally run by the h_from_V method this may be run prior to its use with a custom specification . Either the number of points on the table or the vertical distance between steps may be specified .
52,267
def _V_solver_error ( self , Vtarget , D , L , horizontal , sideA , sideB , sideA_a , sideB_a , sideA_f , sideA_k , sideB_f , sideB_k , sideA_a_ratio , sideB_a_ratio ) : a = TANK ( D = float ( D ) , L = float ( L ) , horizontal = horizontal , sideA = sideA , sideB = sideB , sideA_a = sideA_a , sideB_a = sideB_a , sideA_f = sideA_f , sideA_k = sideA_k , sideB_f = sideB_f , sideB_k = sideB_k , sideA_a_ratio = sideA_a_ratio , sideB_a_ratio = sideB_a_ratio ) error = abs ( Vtarget - a . V_total ) return error
Function which uses only the variables given and the TANK class itself to determine how far from the desired volume Vtarget the volume produced by the specified parameters in a new TANK instance is . Should only be used by solve_tank_for_V method .
52,268
def plate_exchanger_identifier ( self ) : s = ( 'L' + str ( round ( self . wavelength * 1000 , 2 ) ) + 'A' + str ( round ( self . amplitude * 1000 , 2 ) ) + 'B' + '-' . join ( [ str ( i ) for i in self . chevron_angles ] ) ) return s
Method to create an identifying string in format L + wavelength + A + amplitude + B + chevron angle - chevron angle . Wavelength and amplitude are specified in units of mm and rounded to two decimal places .
52,269
def linspace ( start , stop , num = 50 , endpoint = True , retstep = False , dtype = None ) : num = int ( num ) start = start * 1. stop = stop * 1. if num <= 0 : return [ ] if endpoint : if num == 1 : return [ start ] step = ( stop - start ) / float ( ( num - 1 ) ) if num == 1 : step = nan y = [ start ] for _ in range ( num - 2 ) : y . append ( y [ - 1 ] + step ) y . append ( stop ) else : step = ( stop - start ) / float ( num ) if num == 1 : step = nan y = [ start ] for _ in range ( num - 1 ) : y . append ( y [ - 1 ] + step ) if retstep : return y , step else : return y
Port of numpy s linspace to pure python . Does not support dtype and returns lists of floats .
52,270
def derivative ( func , x0 , dx = 1.0 , n = 1 , args = ( ) , order = 3 ) : if order < n + 1 : raise ValueError if order % 2 == 0 : raise ValueError weights = central_diff_weights ( order , n ) tot = 0.0 ho = order >> 1 for k in range ( order ) : tot += weights [ k ] * func ( x0 + ( k - ho ) * dx , * args ) return tot / product ( [ dx ] * n )
Reimplementation of SciPy s derivative function with more cached coefficients and without using numpy . If new coefficients not cached are needed they are only calculated once and are remembered .
52,271
def polyder ( c , m = 1 , scl = 1 , axis = 0 ) : c = list ( c ) cnt = int ( m ) if cnt == 0 : return c n = len ( c ) if cnt >= n : c = c [ : 1 ] * 0 else : for i in range ( cnt ) : n = n - 1 c *= scl der = [ 0.0 for _ in range ( n ) ] for j in range ( n , 0 , - 1 ) : der [ j - 1 ] = j * c [ j ] c = der return c
not quite a copy of numpy s version because this was faster to implement .
52,272
def horner_log ( coeffs , log_coeff , x ) : tot = 0.0 for c in coeffs : tot = tot * x + c return tot + log_coeff * log ( x )
Technically possible to save one addition of the last term of coeffs is removed but benchmarks said nothing was saved
52,273
def implementation_optimize_tck ( tck ) : if IS_PYPY : return tck else : if len ( tck ) == 3 : tck [ 0 ] = np . array ( tck [ 0 ] ) tck [ 1 ] = np . array ( tck [ 1 ] ) elif len ( tck ) == 5 : tck [ 0 ] = np . array ( tck [ 0 ] ) tck [ 1 ] = np . array ( tck [ 1 ] ) tck [ 2 ] = np . array ( tck [ 2 ] ) else : raise NotImplementedError return tck
Converts 1 - d or 2 - d splines calculated with SciPy s splrep or bisplrep to a format for fastest computation - lists in PyPy and numpy arrays otherwise . Only implemented for 3 and 5 length tck s .
52,274
def py_bisect ( f , a , b , args = ( ) , xtol = _xtol , rtol = _rtol , maxiter = _iter , ytol = None , full_output = False , disp = True ) : fa = f ( a , * args ) fb = f ( b , * args ) if fa * fb > 0.0 : raise ValueError ( "f(a) and f(b) must have different signs" ) elif fa == 0.0 : return a elif fb == 0.0 : return b dm = b - a iterations = 0.0 for i in range ( maxiter ) : dm *= 0.5 xm = a + dm fm = f ( xm , * args ) if fm * fa >= 0.0 : a = xm abs_dm = fabs ( dm ) if fm == 0.0 : return xm elif ytol is not None : if ( abs_dm < xtol + rtol * abs_dm ) and abs ( fm ) < ytol : return xm elif ( abs_dm < xtol + rtol * abs_dm ) : return xm raise UnconvergedError ( "Failed to converge after %d iterations" % maxiter )
Port of SciPy s C bisect routine .
52,275
def is_choked_turbulent_l ( dP , P1 , Psat , FF , FL = None , FLP = None , FP = None ) : r if FLP and FP : return dP >= ( FLP / FP ) ** 2 * ( P1 - FF * Psat ) elif FL : return dP >= FL ** 2 * ( P1 - FF * Psat ) else : raise Exception ( 'Either (FLP and FP) or FL is needed' )
r Calculates if a liquid flow in IEC 60534 calculations is critical or not for use in IEC 60534 liquid valve sizing calculations . Either FL may be provided or FLP and FP depending on the calculation process .
52,276
def is_choked_turbulent_g ( x , Fgamma , xT = None , xTP = None ) : r if xT : return x >= Fgamma * xT elif xTP : return x >= Fgamma * xTP else : raise Exception ( 'Either xT or xTP is needed' )
r Calculates if a gas flow in IEC 60534 calculations is critical or not for use in IEC 60534 gas valve sizing calculations . Either xT or xTP must be provided depending on the calculation process .
52,277
def Reynolds_valve ( nu , Q , D1 , FL , Fd , C ) : r return N4 * Fd * Q / nu / ( C * FL ) ** 0.5 * ( FL ** 2 * C ** 2 / ( N2 * D1 ** 4 ) + 1 ) ** 0.25
r Calculates Reynolds number of a control valve for a liquid or gas flowing through it at a specified Q for a specified D1 FL Fd C and with kinematic viscosity nu according to IEC 60534 calculations .
52,278
def Reynolds_factor ( FL , C , d , Rev , full_trim = True ) : r if full_trim : n1 = N2 / ( min ( C / d ** 2 , 0.04 ) ) ** 2 FR_1a = 1 + ( 0.33 * FL ** 0.5 ) / n1 ** 0.25 * log10 ( Rev / 10000. ) FR_2 = 0.026 / FL * ( n1 * Rev ) ** 0.5 if Rev < 10 : FR = FR_2 else : FR = min ( FR_2 , FR_1a ) else : n2 = 1 + N32 * ( C / d ** 2 ) ** ( 2 / 3. ) FR_3a = 1 + ( 0.33 * FL ** 0.5 ) / n2 ** 0.25 * log10 ( Rev / 10000. ) FR_4 = min ( 0.026 / FL * ( n2 * Rev ) ** 0.5 , 1 ) if Rev < 10 : FR = FR_4 else : FR = min ( FR_3a , FR_4 ) return FR
r Calculates the Reynolds number factor FR for a valve with a Reynolds number Rev diameter d flow coefficient C liquid pressure recovery factor FL and with either full or reduced trim all according to IEC 60534 calculations .
52,279
def func_args ( func ) : try : return tuple ( inspect . signature ( func ) . parameters ) except : return tuple ( inspect . getargspec ( func ) . args )
Basic function which returns a tuple of arguments of a function or method .
52,280
def cast_scalar ( method ) : @ wraps ( method ) def new_method ( self , other ) : if np . isscalar ( other ) : other = type ( self ) ( [ other ] , self . domain ( ) ) return method ( self , other ) return new_method
Cast scalars to constant interpolating objects
52,281
def dct ( data ) : N = len ( data ) // 2 fftdata = fftpack . fft ( data , axis = 0 ) [ : N + 1 ] fftdata /= N fftdata [ 0 ] /= 2. fftdata [ - 1 ] /= 2. if np . isrealobj ( data ) : data = np . real ( fftdata ) else : data = fftdata return data
Compute DCT using FFT
52,282
def _cutoff ( self , coeffs , vscale ) : bnd = self . _threshold ( vscale ) inds = np . nonzero ( abs ( coeffs ) >= bnd ) if len ( inds [ 0 ] ) : N = inds [ 0 ] [ - 1 ] else : N = 0 return N + 1
Compute cutoff index after which the coefficients are deemed negligible .
52,283
def same_domain ( self , fun2 ) : return np . allclose ( self . domain ( ) , fun2 . domain ( ) , rtol = 1e-14 , atol = 1e-14 )
Returns True if the domains of two objects are the same .
52,284
def restrict ( self , subinterval ) : if ( subinterval [ 0 ] < self . _domain [ 0 ] ) or ( subinterval [ 1 ] > self . _domain [ 1 ] ) : raise ValueError ( "Can only restrict to subinterval" ) return self . from_function ( self , subinterval )
Return a Polyfun that matches self on subinterval .
52,285
def basis ( self , n ) : if n == 0 : return self ( np . array ( [ 1. ] ) ) vals = np . ones ( n + 1 ) vals [ 1 : : 2 ] = - 1 return self ( vals )
Chebyshev basis functions T_n .
52,286
def sum ( self ) : ak = self . coefficients ( ) ak2 = ak [ : : 2 ] n = len ( ak2 ) Tints = 2 / ( 1 - ( 2 * np . arange ( n ) ) ** 2 ) val = np . sum ( ( Tints * ak2 . T ) . T , axis = 0 ) a_ , b_ = self . domain ( ) return 0.5 * ( b_ - a_ ) * val
Evaluate the integral over the given interval using Clenshaw - Curtis quadrature .
52,287
def integrate ( self ) : coeffs = self . coefficients ( ) a , b = self . domain ( ) int_coeffs = 0.5 * ( b - a ) * poly . chebyshev . chebint ( coeffs ) antiderivative = self . from_coeff ( int_coeffs , domain = self . domain ( ) ) return antiderivative - antiderivative ( a )
Return the object representing the primitive of self over the domain . The output starts at zero on the left - hand side of the domain .
52,288
def differentiate ( self , n = 1 ) : ak = self . coefficients ( ) a_ , b_ = self . domain ( ) for _ in range ( n ) : ak = self . differentiator ( ak ) return self . from_coeff ( ( 2. / ( b_ - a_ ) ) ** n * ak , domain = self . domain ( ) )
n - th derivative default 1 .
52,289
def sample_function ( self , f , N ) : x = self . interpolation_points ( N + 1 ) return f ( x )
Sample a function on N + 1 Chebyshev points .
52,290
def interpolator ( self , x , values ) : p = Bary ( [ 0. ] ) N = len ( values ) weights = np . ones ( N ) weights [ 0 ] = .5 weights [ 1 : : 2 ] = - 1 weights [ - 1 ] *= .5 p . wi = weights p . xi = x p . set_yi ( values ) return p
Returns a polynomial with vector coefficients which interpolates the values at the Chebyshev points x
52,291
def drag_sphere ( Re , Method = None , AvailableMethods = False ) : r def list_methods ( ) : methods = [ ] for key , ( func , Re_min , Re_max ) in drag_sphere_correlations . items ( ) : if ( Re_min is None or Re > Re_min ) and ( Re_max is None or Re < Re_max ) : methods . append ( key ) return methods if AvailableMethods : return list_methods ( ) if not Method : if Re > 0.1 : if Re <= 212963.26847812787 : return Barati ( Re ) elif Re <= 1E6 : return Barati_high ( Re ) else : raise ValueError ( 'No models implement a solution for Re > 1E6' ) elif Re >= 0.01 : ratio = ( Re - 0.01 ) / ( 0.1 - 0.01 ) return ratio * Barati ( Re ) + ( 1 - ratio ) * Stokes ( Re ) else : return Stokes ( Re ) if Method in drag_sphere_correlations : return drag_sphere_correlations [ Method ] [ 0 ] ( Re ) else : raise Exception ( 'Failure in in function' )
r This function handles calculation of drag coefficient on spheres . Twenty methods are available all requiring only the Reynolds number of the sphere . Most methods are valid from Re = 0 to Re = 200 000 . A correlation will be automatically selected if none is specified . The full list of correlations valid for a given Reynolds number can be obtained with the AvailableMethods flag .
52,292
def v_terminal ( D , rhop , rho , mu , Method = None ) : r v_lam = g * D * D * ( rhop - rho ) / ( 18 * mu ) Re_lam = Reynolds ( V = v_lam , D = D , rho = rho , mu = mu ) if Re_lam < 0.01 or Method == 'Stokes' : return v_lam Re_almost = rho * D / mu main = 4 / 3. * g * D * ( rhop - rho ) / rho V_max = 1E6 / rho / D * mu def err ( V ) : Cd = drag_sphere ( Re_almost * V , Method = Method ) return V - ( main / Cd ) ** 0.5 return float ( newton ( err , V_max / 100 , tol = 1E-12 ) )
r Calculates terminal velocity of a falling sphere using any drag coefficient method supported by drag_sphere . The laminar solution for Re < 0 . 01 is first tried ; if the resulting terminal velocity does not put it in the laminar regime a numerical solution is used .
52,293
def integrate_drag_sphere ( D , rhop , rho , mu , t , V = 0 , Method = None , distance = False ) : r laminar_initial = Reynolds ( V = V , rho = rho , D = D , mu = mu ) < 0.01 v_laminar_end_assumed = v_terminal ( D = D , rhop = rhop , rho = rho , mu = mu , Method = Method ) laminar_end = Reynolds ( V = v_laminar_end_assumed , rho = rho , D = D , mu = mu ) < 0.01 if Method == 'Stokes' or ( laminar_initial and laminar_end and Method is None ) : try : t1 = 18.0 * mu / ( D * D * rhop ) t2 = g * ( rhop - rho ) / rhop V_end = exp ( - t1 * t ) * ( t1 * V + t2 * ( exp ( t1 * t ) - 1.0 ) ) / t1 x_end = exp ( - t1 * t ) * ( V * t1 * ( exp ( t1 * t ) - 1.0 ) + t2 * exp ( t1 * t ) * ( t1 * t - 1.0 ) + t2 ) / ( t1 * t1 ) if distance : return V_end , x_end else : return V_end except OverflowError : t_to_terminal = time_v_terminal_Stokes ( D , rhop , rho , mu , V0 = V , tol = 1e-9 ) if t_to_terminal > t : raise Exception ( 'Should never happen' ) V_end , x_end = integrate_drag_sphere ( D = D , rhop = rhop , rho = rho , mu = mu , t = t_to_terminal , V = V , Method = 'Stokes' , distance = True ) if distance : return V_end , x_end + V_end * ( t - t_to_terminal ) else : return V_end Re_ish = rho * D / mu c1 = g * ( rhop - rho ) / rhop c2 = - 0.75 * rho / ( D * rhop ) def dv_dt ( V , t ) : if V == 0 : t2 = 0.0 else : t2 = c2 * V * V * drag_sphere ( Re_ish * V , Method = Method ) return c1 + t2 pts = 1000 if distance else 2 ts = np . linspace ( 0 , t , pts ) from scipy . integrate import odeint , cumtrapz Vs = odeint ( dv_dt , [ V ] , ts ) V_end = float ( Vs [ - 1 ] ) if distance : x = float ( cumtrapz ( np . ravel ( Vs ) , ts ) [ - 1 ] ) return V_end , x else : return V_end
r Integrates the velocity and distance traveled by a particle moving at a speed which will converge to its terminal velocity .
52,294
def bend_rounded_Ito ( Di , angle , Re , rc = None , bend_diameters = None , roughness = 0.0 ) : if not rc : if bend_diameters is None : bend_diameters = 5.0 rc = Di * bend_diameters radius_ratio = rc / Di angle_rad = radians ( angle ) De2 = Re * ( Di / rc ) ** 2.0 if rc > 50.0 * Di : alpha = 1.0 else : alpha_45 = 1.0 + 5.13 * ( Di / rc ) ** 1.47 alpha_90 = 0.95 + 4.42 * ( Di / rc ) ** 1.96 if rc / Di < 9.85 else 1.0 alpha_180 = 1.0 + 5.06 * ( Di / rc ) ** 4.52 alpha = interp ( angle , _Ito_angles , [ alpha_45 , alpha_90 , alpha_180 ] ) if De2 <= 360.0 : fc = friction_factor_curved ( Re = Re , Di = Di , Dc = 2.0 * rc , roughness = roughness , Rec_method = 'Srinivasan' , laminar_method = 'White' , turbulent_method = 'Srinivasan turbulent' ) K = 0.0175 * alpha * fc * angle * rc / Di else : K = 0.00431 * alpha * angle * Re ** - 0.17 * ( rc / Di ) ** 0.84 return K
Ito method as shown in Blevins . Curved friction factor as given in Blevins with minor tweaks to be more accurate to the original methods .
52,295
def geopy_geolocator ( ) : global geolocator if geolocator is None : try : from geopy . geocoders import Nominatim except ImportError : return None geolocator = Nominatim ( user_agent = geolocator_user_agent ) return geolocator return geolocator
Lazy loader for geocoder from geopy . This currently loads the Nominatim geocode and returns an instance of it taking ~2 us .
52,296
def heating_degree_days ( T , T_base = F2K ( 65 ) , truncate = True ) : r dd = T - T_base if truncate and dd < 0.0 : dd = 0.0 return dd
r Calculates the heating degree days for a period of time .
52,297
def cooling_degree_days ( T , T_base = 283.15 , truncate = True ) : r dd = T_base - T if truncate and dd < 0.0 : dd = 0.0 return dd
r Calculates the cooling degree days for a period of time .
52,298
def get_station_year_text ( WMO , WBAN , year ) : if WMO is None : WMO = 999999 if WBAN is None : WBAN = 99999 station = str ( int ( WMO ) ) + '-' + str ( int ( WBAN ) ) gsod_year_dir = os . path . join ( data_dir , 'gsod' , str ( year ) ) path = os . path . join ( gsod_year_dir , station + '.op' ) if os . path . exists ( path ) : data = open ( path ) . read ( ) if data and data != 'Exception' : return data else : raise Exception ( data ) toget = ( 'ftp://ftp.ncdc.noaa.gov/pub/data/gsod/' + str ( year ) + '/' + station + '-' + str ( year ) + '.op.gz' ) try : data = urlopen ( toget , timeout = 5 ) except Exception as e : if not os . path . exists ( gsod_year_dir ) : os . makedirs ( gsod_year_dir ) open ( path , 'w' ) . write ( 'Exception' ) raise Exception ( 'Could not obtain desired data; check ' 'if the year has data published for the ' 'specified station and the station was specified ' 'in the correct form. The full error is %s' % ( e ) ) data = data . read ( ) data_thing = StringIO ( data ) f = gzip . GzipFile ( fileobj = data_thing , mode = "r" ) year_station_data = f . read ( ) try : year_station_data = year_station_data . decode ( 'utf-8' ) except : pass if not os . path . exists ( gsod_year_dir ) : os . makedirs ( gsod_year_dir ) open ( path , 'w' ) . write ( year_station_data ) return year_station_data
Basic method to download data from the GSOD database given a station identifier and year .
52,299
def _Stichlmair_flood_f ( inputs , Vl , rhog , rhol , mug , voidage , specific_area , C1 , C2 , C3 , H ) : Vg , dP_irr = float ( inputs [ 0 ] ) , float ( inputs [ 1 ] ) dp = 6.0 * ( 1.0 - voidage ) / specific_area Re = Vg * rhog * dp / mug f0 = C1 / Re + C2 / Re ** 0.5 + C3 dP_dry = 0.75 * f0 * ( 1.0 - voidage ) / voidage ** 4.65 * rhog * H / dp * Vg * Vg c = ( - C1 / Re - 0.5 * C2 * Re ** - 0.5 ) / f0 Frl = Vl * Vl * specific_area / ( g * voidage ** 4.65 ) h0 = 0.555 * Frl ** ( 1 / 3. ) hT = h0 * ( 1.0 + 20.0 * ( dP_irr / H / rhol / g ) ** 2 ) err1 = dP_dry / H * ( ( 1.0 - voidage + hT ) / ( 1.0 - voidage ) ) ** ( ( 2.0 + c ) / 3. ) * ( voidage / ( voidage - hT ) ) ** 4.65 - dP_irr / H term = ( dP_irr / ( rhol * g * H ) ) ** 2 err2 = ( 1. / term - 40.0 * ( ( 2.0 + c ) / 3. ) * h0 / ( 1.0 - voidage + h0 * ( 1.0 + 20.0 * term ) ) - 186.0 * h0 / ( voidage - h0 * ( 1.0 + 20.0 * term ) ) ) return err1 , err2
Internal function which calculates the errors of the two Stichlmair objective functions and their jacobian .