idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
5,000
|
def generate_feature_matrix ( self , mode = 'tfidf' ) : result = [ ] for doc in self . _documents : result . append ( self . generate_document_vector ( doc , mode ) ) return result
|
Returns a feature matrix in the form of a list of lists which represents the terms and documents in this Inverted Index using the tf - idf weighting by default . The term counts in each document can alternatively be used by specifying scheme = count .
|
5,001
|
def find_class_in_list ( klass , lst ) : filtered = list ( filter ( lambda x : x . __class__ == klass , lst ) ) if filtered : return filtered [ 0 ] return None
|
Returns the first occurrence of an instance of type klass in the given list or None if no such instance is present .
|
5,002
|
def _build_parmlist ( self , parameters ) : args = [ ] for key , value in parameters . items ( ) : if not value is None : try : classinfo = unicode except NameError : classinfo = str if isinstance ( value , classinfo ) : key = '%s[%d]' % ( key . upper ( ) , len ( value . encode ( 'utf-8' ) ) ) else : key = '%s[%d]' % ( key . upper ( ) , len ( str ( value ) ) ) args . append ( '%s=%s' % ( key , value ) ) args . sort ( ) parmlist = '&' . join ( args ) return parmlist
|
Converts a dictionary of name and value pairs into a PARMLIST string value acceptable to the Payflow Pro API .
|
5,003
|
def from_model ( cls , model_name , ** kwargs ) : settings = _get_model_info ( model_name ) model = settings . pop ( 'model_name' ) for k , v in list ( kwargs . items ( ) ) : if k in ( 'resolution' , 'Psurf' ) : settings [ k ] = v return cls ( model , ** settings )
|
Define a grid using the specifications of a given model .
|
5,004
|
def copy_from_model ( cls , model_name , reference , ** kwargs ) : if isinstance ( reference , cls ) : settings = reference . __dict__ . copy ( ) settings . pop ( 'model' ) else : settings = _get_model_info ( reference ) settings . pop ( 'model_name' ) settings . update ( kwargs ) settings [ 'reference' ] = reference return cls ( model_name , ** settings )
|
Set - up a user - defined grid using specifications of a reference grid model .
|
5,005
|
def get_layers ( self , Psurf = 1013.25 , Ptop = 0.01 , ** kwargs ) : Psurf = np . asarray ( Psurf ) output_ndims = Psurf . ndim + 1 if output_ndims > 3 : raise ValueError ( "`Psurf` argument must be a float or an array" " with <= 2 dimensions (or None)" ) SIGe = None SIGc = None ETAe = None ETAc = None if self . hybrid : try : Ap = broadcast_1d_array ( self . Ap , output_ndims ) Bp = broadcast_1d_array ( self . Bp , output_ndims ) except KeyError : raise ValueError ( "Impossible to compute vertical levels," " data is missing (Ap, Bp)" ) Cp = 0. else : try : Bp = SIGe = broadcast_1d_array ( self . esig , output_ndims ) SIGc = broadcast_1d_array ( self . csig , output_ndims ) except KeyError : raise ValueError ( "Impossible to compute vertical levels," " data is missing (esig, csig)" ) Ap = Cp = Ptop Pe = Ap + Bp * ( Psurf - Cp ) Pc = 0.5 * ( Pe [ 0 : - 1 ] + Pe [ 1 : ] ) if self . hybrid : ETAe = ( Pe - Ptop ) / ( Psurf - Ptop ) ETAc = ( Pc - Ptop ) / ( Psurf - Ptop ) else : SIGe = SIGe * np . ones_like ( Psurf ) SIGc = SIGc * np . ones_like ( Psurf ) Ze = prof_altitude ( Pe , ** kwargs ) Zc = prof_altitude ( Pc , ** kwargs ) all_vars = { 'eta_edges' : ETAe , 'eta_centers' : ETAc , 'sigma_edges' : SIGe , 'sigma_centers' : SIGc , 'pressure_edges' : Pe , 'pressure_centers' : Pc , 'altitude_edges' : Ze , 'altitude_centers' : Zc } return all_vars
|
Compute scalars or coordinates associated to the vertical layers .
|
5,006
|
def _get_template_dirs ( ) : return filter ( lambda x : os . path . exists ( x ) , [ os . path . join ( os . path . expanduser ( '~' ) , '.py2pack' , 'templates' ) , os . path . join ( '/' , 'usr' , 'share' , 'py2pack' , 'templates' ) , os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'templates' ) , ] )
|
existing directories where to search for jinja2 templates . The order is important . The first found template from the first found dir wins!
|
5,007
|
def _license_from_classifiers ( data ) : classifiers = data . get ( 'classifiers' , [ ] ) found_license = None for c in classifiers : if c . startswith ( "License :: OSI Approved :: " ) : found_license = c . replace ( "License :: OSI Approved :: " , "" ) return found_license
|
try to get a license from the classifiers
|
5,008
|
def _normalize_license ( data ) : license = data . get ( 'license' , None ) if not license : license = _license_from_classifiers ( data ) if license : if license in SDPX_LICENSES . keys ( ) : data [ 'license' ] = SDPX_LICENSES [ license ] else : data [ 'license' ] = "%s (FIXME:No SPDX)" % ( license ) else : data [ 'license' ] = ""
|
try to get SDPX license
|
5,009
|
def wrap_prompts_class ( Klass ) : try : from prompt_toolkit . token import ZeroWidthEscape except ImportError : return Klass class ITerm2IPythonPrompt ( Klass ) : def in_prompt_tokens ( self , cli = None ) : return [ ( ZeroWidthEscape , last_status ( self . shell ) + BEFORE_PROMPT ) , ] + super ( ITerm2IPythonPrompt , self ) . in_prompt_tokens ( cli ) + [ ( ZeroWidthEscape , AFTER_PROMPT ) ] return ITerm2IPythonPrompt
|
Wrap an IPython s Prompt class
|
5,010
|
def get_all_keys ( self , start = None ) : s = self . stream if not start : start = HEADER_SIZE + self . block_size * self . root_block s . seek ( start ) block_type = s . read ( 2 ) if block_type == LEAF : reader = LeafReader ( self ) num_keys = struct . unpack ( '>i' , reader . read ( 4 ) ) [ 0 ] for _ in range ( num_keys ) : cur_key = reader . read ( self . key_size ) cur_pos = s . tell ( ) yield cur_key s . seek ( cur_pos ) length = sbon . read_varint ( reader ) reader . seek ( length , 1 ) elif block_type == INDEX : ( _ , num_keys , first_child ) = struct . unpack ( '>Bii' , s . read ( 9 ) ) children = [ first_child ] for _ in range ( num_keys ) : _ = s . read ( self . key_size ) next_child = struct . unpack ( '>i' , s . read ( 4 ) ) [ 0 ] children . append ( next_child ) for child_loc in children : for key in self . get_all_keys ( HEADER_SIZE + self . block_size * child_loc ) : yield key elif block_type == FREE : pass else : raise Exception ( 'Unhandled block type: {}' . format ( block_type ) )
|
A generator which yields a list of all valid keys starting at the given start offset . If start is None we will start from the root of the tree .
|
5,011
|
def readline ( self , fmt = None ) : prefix_size = self . _fix ( ) if fmt is None : content = self . read ( prefix_size ) else : fmt = self . endian + fmt fmt = _replace_star ( fmt , prefix_size ) content = struct . unpack ( fmt , self . read ( prefix_size ) ) try : suffix_size = self . _fix ( ) except EOFError : suffix_size = - 1 if prefix_size != suffix_size : raise IOError ( _FIX_ERROR ) return content
|
Return next unformatted line . If format is given unpack content otherwise return byte string .
|
5,012
|
def skipline ( self ) : position = self . tell ( ) prefix = self . _fix ( ) self . seek ( prefix , 1 ) suffix = self . _fix ( ) if prefix != suffix : raise IOError ( _FIX_ERROR ) return position , prefix
|
Skip the next line and returns position and size of line . Raises IOError if pre - and suffix of line do not match .
|
5,013
|
def writelines ( self , lines , fmt ) : if isinstance ( fmt , basestring ) : fmt = [ fmt ] * len ( lines ) for f , line in zip ( fmt , lines ) : self . writeline ( f , line , self . endian )
|
Write lines with given format .
|
5,014
|
def read_varint ( stream ) : value = 0 while True : byte = ord ( stream . read ( 1 ) ) if not byte & 0b10000000 : return value << 7 | byte value = value << 7 | ( byte & 0b01111111 )
|
Read while the most significant bit is set then put the 7 least significant bits of all read bytes together to create a number .
|
5,015
|
def open_bpchdataset ( filename , fields = [ ] , categories = [ ] , tracerinfo_file = 'tracerinfo.dat' , diaginfo_file = 'diaginfo.dat' , endian = ">" , decode_cf = True , memmap = True , dask = True , return_store = False ) : store = BPCHDataStore ( filename , fields = fields , categories = categories , tracerinfo_file = tracerinfo_file , diaginfo_file = diaginfo_file , endian = endian , use_mmap = memmap , dask_delayed = dask ) ds = xr . Dataset . load_store ( store ) ds . _file_obj = store . _bpch if decode_cf : decoded_vars = OrderedDict ( ) rename_dict = { } for v in ds . variables : cf_name = cf . get_valid_varname ( v ) rename_dict [ v ] = cf_name new_var = cf . enforce_cf_variable ( ds [ v ] ) decoded_vars [ cf_name ] = new_var ds = xr . Dataset ( decoded_vars , attrs = ds . attrs . copy ( ) ) ts = get_timestamp ( ) ds . attrs . update ( dict ( Conventions = 'CF1.6' , source = filename , tracerinfo = tracerinfo_file , diaginfo = diaginfo_file , filetype = store . _bpch . filetype , filetitle = store . _bpch . filetitle , history = ( "{}: Processed/loaded by xbpch-{} from {}" . format ( ts , ver , filename ) ) , ) ) if return_store : return ds , store else : return ds
|
Open a GEOS - Chem BPCH file output as an xarray Dataset .
|
5,016
|
def open_mfbpchdataset ( paths , concat_dim = 'time' , compat = 'no_conflicts' , preprocess = None , lock = None , ** kwargs ) : from xarray . backends . api import _MultiFileCloser dask = kwargs . pop ( 'dask' , False ) if not dask : raise ValueError ( "Reading multiple files without dask is not supported" ) kwargs [ 'dask' ] = True if isinstance ( paths , basestring ) : paths = sorted ( glob ( paths ) ) if not paths : raise IOError ( "No paths to files were passed into open_mfbpchdataset" ) datasets = [ open_bpchdataset ( filename , ** kwargs ) for filename in paths ] bpch_objs = [ ds . _file_obj for ds in datasets ] if preprocess is not None : datasets = [ preprocess ( ds ) for ds in datasets ] combined = xr . auto_combine ( datasets , compat = compat , concat_dim = concat_dim ) combined . _file_obj = _MultiFileCloser ( bpch_objs ) combined . attrs = datasets [ 0 ] . attrs ts = get_timestamp ( ) fns_str = " " . join ( paths ) combined . attrs [ 'history' ] = ( "{}: Processed/loaded by xbpch-{} from {}" . format ( ts , ver , fns_str ) ) return combined
|
Open multiple bpch files as a single dataset .
|
5,017
|
def image_bytes ( b , filename = None , inline = 1 , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : if preserve_aspect_ratio is None : if width != 'auto' and height != 'auto' : preserve_aspect_ratio = False else : preserve_aspect_ratio = True data = { 'name' : base64 . b64encode ( ( filename or 'Unnamed file' ) . encode ( 'utf-8' ) ) . decode ( 'ascii' ) , 'inline' : inline , 'size' : len ( b ) , 'base64_img' : base64 . b64encode ( b ) . decode ( 'ascii' ) , 'width' : width , 'height' : height , 'preserve_aspect_ratio' : int ( preserve_aspect_ratio ) , } return IMAGE_CODE . format ( ** data ) . encode ( 'ascii' )
|
Return a bytes string that displays image given by bytes b in the terminal
|
5,018
|
def display_image_bytes ( b , filename = None , inline = 1 , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : sys . stdout . buffer . write ( image_bytes ( b , filename = filename , inline = inline , width = width , height = height , preserve_aspect_ratio = preserve_aspect_ratio ) ) sys . stdout . write ( '\n' )
|
Display the image given by the bytes b in the terminal .
|
5,019
|
def display_image_file ( fn , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : with open ( os . path . realpath ( os . path . expanduser ( fn ) ) , 'rb' ) as f : sys . stdout . buffer . write ( image_bytes ( f . read ( ) , filename = fn , width = width , height = height , preserve_aspect_ratio = preserve_aspect_ratio ) )
|
Display an image in the terminal .
|
5,020
|
def get_entity_uuid_coords ( self , uuid ) : if uuid in self . _entity_to_region_map : coords = self . _entity_to_region_map [ uuid ] entities = self . get_entities ( * coords ) for entity in entities : if 'uniqueId' in entity . data and entity . data [ 'uniqueId' ] == uuid : return tuple ( entity . data [ 'tilePosition' ] ) return None
|
Returns the coordinates of the given entity UUID inside this world or None if the UUID is not found .
|
5,021
|
def create_fuzzy_pattern ( pattern ) : return re . compile ( ".*" . join ( map ( re . escape , pattern ) ) , re . IGNORECASE )
|
Convert a string into a fuzzy regular expression pattern .
|
5,022
|
def fuzzy_search ( self , * filters ) : matches = [ ] logger . verbose ( "Performing fuzzy search on %s (%s) .." , pluralize ( len ( filters ) , "pattern" ) , concatenate ( map ( repr , filters ) ) ) patterns = list ( map ( create_fuzzy_pattern , filters ) ) for entry in self . filtered_entries : if all ( p . search ( entry . name ) for p in patterns ) : matches . append ( entry ) logger . log ( logging . INFO if matches else logging . VERBOSE , "Matched %s using fuzzy search." , pluralize ( len ( matches ) , "password" ) , ) return matches
|
Perform a fuzzy search that matches the given characters in the given order .
|
5,023
|
def select_entry ( self , * arguments ) : matches = self . smart_search ( * arguments ) if len ( matches ) > 1 : logger . info ( "More than one match, prompting for choice .." ) labels = [ entry . name for entry in matches ] return matches [ labels . index ( prompt_for_choice ( labels ) ) ] else : logger . info ( "Matched one entry: %s" , matches [ 0 ] . name ) return matches [ 0 ]
|
Select a password from the available choices .
|
5,024
|
def simple_search ( self , * keywords ) : matches = [ ] keywords = [ kw . lower ( ) for kw in keywords ] logger . verbose ( "Performing simple search on %s (%s) .." , pluralize ( len ( keywords ) , "keyword" ) , concatenate ( map ( repr , keywords ) ) , ) for entry in self . filtered_entries : normalized = entry . name . lower ( ) if all ( kw in normalized for kw in keywords ) : matches . append ( entry ) logger . log ( logging . INFO if matches else logging . VERBOSE , "Matched %s using simple search." , pluralize ( len ( matches ) , "password" ) , ) return matches
|
Perform a simple search for case insensitive substring matches .
|
5,025
|
def smart_search ( self , * arguments ) : matches = self . simple_search ( * arguments ) if not matches : logger . verbose ( "Falling back from substring search to fuzzy search .." ) matches = self . fuzzy_search ( * arguments ) if not matches : if len ( self . filtered_entries ) > 0 : raise NoMatchingPasswordError ( format ( "No passwords matched the given arguments! (%s)" , concatenate ( map ( repr , arguments ) ) ) ) else : msg = "You don't have any passwords yet! (no *.gpg files found)" raise EmptyPasswordStoreError ( msg ) return matches
|
Perform a smart search on the given keywords or patterns .
|
5,026
|
def get_diaginfo ( diaginfo_file ) : widths = [ rec . width for rec in diag_recs ] col_names = [ rec . name for rec in diag_recs ] dtypes = [ rec . type for rec in diag_recs ] usecols = [ name for name in col_names if not name . startswith ( '-' ) ] diag_df = pd . read_fwf ( diaginfo_file , widths = widths , names = col_names , dtypes = dtypes , comment = "#" , header = None , usecols = usecols ) diag_desc = { diag . name : diag . desc for diag in diag_recs if not diag . name . startswith ( '-' ) } return diag_df , diag_desc
|
Read an output s diaginfo . dat file and parse into a DataFrame for use in selecting and parsing categories .
|
5,027
|
def get_tracerinfo ( tracerinfo_file ) : widths = [ rec . width for rec in tracer_recs ] col_names = [ rec . name for rec in tracer_recs ] dtypes = [ rec . type for rec in tracer_recs ] usecols = [ name for name in col_names if not name . startswith ( '-' ) ] tracer_df = pd . read_fwf ( tracerinfo_file , widths = widths , names = col_names , dtypes = dtypes , comment = "#" , header = None , usecols = usecols ) na_free = tracer_df . dropna ( subset = [ 'tracer' , 'scale' ] ) only_na = tracer_df [ ~ tracer_df . index . isin ( na_free . index ) ] if len ( only_na ) > 0 : warn ( "At least one row in {} wasn't decoded correctly; we strongly" " recommend you manually check that file to see that all" " tracers are properly recorded." . format ( tracerinfo_file ) ) tracer_desc = { tracer . name : tracer . desc for tracer in tracer_recs if not tracer . name . startswith ( '-' ) } def _assign_hydrocarbon ( row ) : if row [ 'C' ] != 1 : row [ 'hydrocarbon' ] = True row [ 'molwt' ] = C_MOLECULAR_WEIGHT else : row [ 'hydrocarbon' ] = False return row tracer_df = ( tracer_df . apply ( _assign_hydrocarbon , axis = 1 ) . assign ( chemical = lambda x : x [ 'molwt' ] . astype ( bool ) ) ) return tracer_df , tracer_desc
|
Read an output s tracerinfo . dat file and parse into a DataFrame for use in selecting and parsing categories .
|
5,028
|
def read_from_bpch ( filename , file_position , shape , dtype , endian , use_mmap = False ) : offset = file_position + 4 if use_mmap : d = np . memmap ( filename , dtype = dtype , mode = 'r' , shape = shape , offset = offset , order = 'F' ) else : with FortranFile ( filename , 'rb' , endian ) as ff : ff . seek ( file_position ) d = np . array ( ff . readline ( '*f' ) ) d = d . reshape ( shape , order = 'F' ) if ( d . shape != shape ) : raise IOError ( "Data chunk read from {} does not have the right shape," " (expected {} but got {})" . format ( filename , shape , d . shape ) ) return d
|
Read a chunk of data from a bpch output file .
|
5,029
|
def _read ( self ) : if self . _dask : d = da . from_delayed ( delayed ( read_from_bpch , ) ( self . filename , self . file_position , self . shape , self . dtype , self . endian , use_mmap = self . _mmap ) , self . shape , self . dtype ) else : d = read_from_bpch ( self . filename , self . file_position , self . shape , self . dtype , self . endian , use_mmap = self . _mmap ) return d
|
Helper function to load the data referenced by this bundle .
|
5,030
|
def close ( self ) : if not self . fp . closed : for v in list ( self . var_data ) : del self . var_data [ v ] self . fp . close ( )
|
Close this bpch file .
|
5,031
|
def _read_metadata ( self ) : filetype = self . fp . readline ( ) . strip ( ) filetitle = self . fp . readline ( ) . strip ( ) try : filetype = str ( filetype , 'utf-8' ) filetitle = str ( filetitle , 'utf-8' ) except : pass self . __setattr__ ( 'filetype' , filetype ) self . __setattr__ ( 'filetitle' , filetitle )
|
Read the main metadata packaged within a bpch file indicating the output filetype and its title .
|
5,032
|
def _read_var_data ( self ) : var_bundles = OrderedDict ( ) var_attrs = OrderedDict ( ) n_vars = 0 while self . fp . tell ( ) < self . fsize : var_attr = OrderedDict ( ) line = self . fp . readline ( '20sffii' ) modelname , res0 , res1 , halfpolar , center180 = line line = self . fp . readline ( '40si40sdd40s7i' ) category_name , number , unit , tau0 , tau1 , reserved = line [ : 6 ] dim0 , dim1 , dim2 , dim3 , dim4 , dim5 , skip = line [ 6 : ] var_attr [ 'number' ] = number category_name = str ( category_name , 'utf-8' ) var_attr [ 'category' ] = category_name . strip ( ) unit = str ( unit , 'utf-8' ) try : cat_df = self . diaginfo_df [ self . diaginfo_df . name == category_name . strip ( ) ] cat = cat_df . T . squeeze ( ) tracer_num = int ( cat . offset ) + int ( number ) diag_df = self . tracerinfo_df [ self . tracerinfo_df . tracer == tracer_num ] diag = diag_df . T . squeeze ( ) diag_attr = diag . to_dict ( ) if not unit . strip ( ) : unit = diag_attr [ 'unit' ] var_attr . update ( diag_attr ) except : diag = { 'name' : '' , 'scale' : 1 } var_attr . update ( diag ) var_attr [ 'unit' ] = unit vname = diag [ 'name' ] fullname = category_name . strip ( ) + "_" + vname if dim2 == 1 : data_shape = ( dim0 , dim1 ) else : data_shape = ( dim0 , dim1 , dim2 ) var_attr [ 'original_shape' ] = data_shape data_shape = tuple ( [ 1 , ] + list ( data_shape ) ) origin = ( dim3 , dim4 , dim5 ) var_attr [ 'origin' ] = origin timelo , timehi = cf . tau2time ( tau0 ) , cf . tau2time ( tau1 ) pos = self . fp . tell ( ) var_bundle = BPCHDataBundle ( data_shape , self . endian , self . filename , pos , [ timelo , timehi ] , metadata = var_attr , use_mmap = self . use_mmap , dask_delayed = self . dask_delayed ) self . fp . skipline ( ) if fullname in var_bundles : var_bundles [ fullname ] . append ( var_bundle ) else : var_bundles [ fullname ] = [ var_bundle , ] var_attrs [ fullname ] = var_attr n_vars += 1 self . var_data = var_bundles self . var_attrs = var_attrs
|
Iterate over the block of this bpch file and return handlers in the form of BPCHDataBundle s for access to the data contained therein .
|
5,033
|
def get_timestamp ( time = True , date = True , fmt = None ) : time_format = "%H:%M:%S" date_format = "%m-%d-%Y" if fmt is None : if time and date : fmt = time_format + " " + date_format elif time : fmt = time_format elif date : fmt = date_format else : raise ValueError ( "One of `date` or `time` must be True!" ) return datetime . now ( ) . strftime ( fmt )
|
Return the current timestamp in machine local time .
|
5,034
|
def fix_attr_encoding ( ds ) : def _maybe_del_attr ( da , attr ) : if attr in da . attrs : del da . attrs [ attr ] return da def _maybe_decode_attr ( da , attr ) : if ( attr in da . attrs ) and ( type ( da . attrs [ attr ] == bool ) ) : da . attrs [ attr ] = int ( da . attrs [ attr ] ) return da for v in ds . data_vars : da = ds [ v ] da = _maybe_del_attr ( da , 'scale_factor' ) da = _maybe_del_attr ( da , 'units' ) da = _maybe_decode_attr ( da , 'hydrocarbon' ) da = _maybe_decode_attr ( da , 'chemical' ) if hasattr ( ds , 'time' ) : times = ds . time times = _maybe_del_attr ( times , 'units' ) return ds
|
This is a temporary hot - fix to handle the way metadata is encoded when we read data directly from bpch files . It removes the scale_factor and units attributes we encode with the data we ingest converts the hydrocarbon and chemical attribute to a binary integer instead of a boolean and removes the units attribute from the time dimension since that too is implicitly encoded .
|
5,035
|
def after_output ( command_status ) : if command_status not in range ( 256 ) : raise ValueError ( "command_status must be an integer in the range 0-255" ) sys . stdout . write ( AFTER_OUTPUT . format ( command_status = command_status ) ) sys . stdout . flush ( )
|
Shell sequence to be run after the command output .
|
5,036
|
def enforce_cf_variable ( var , mask_and_scale = True ) : var = as_variable ( var ) data = var . _data dims = var . dims attrs = var . attrs . copy ( ) encoding = var . encoding . copy ( ) orig_dtype = data . dtype if 'scale' in attrs : scale = attrs . pop ( 'scale' ) attrs [ 'scale_factor' ] = scale encoding [ 'scale_factor' ] = scale if mask_and_scale : data = scale * data if 'unit' in attrs : unit = attrs . pop ( 'unit' ) unit = get_cfcompliant_units ( unit ) attrs [ 'units' ] = unit return Variable ( dims , data , attrs , encoding = encoding )
|
Given a Variable constructed from GEOS - Chem output enforce CF - compliant metadata and formatting .
|
5,037
|
def published ( self , check_language = True , language = None , kwargs = None , exclude_kwargs = None ) : if check_language : qs = NewsEntry . objects . language ( language or get_language ( ) ) . filter ( is_published = True ) else : qs = self . get_queryset ( ) qs = qs . filter ( models . Q ( pub_date__lte = now ( ) ) | models . Q ( pub_date__isnull = True ) ) if kwargs is not None : qs = qs . filter ( ** kwargs ) if exclude_kwargs is not None : qs = qs . exclude ( ** exclude_kwargs ) return qs . distinct ( ) . order_by ( '-pub_date' )
|
Returns all entries which publication date has been hit or which have no date and which language matches the current language .
|
5,038
|
def recent ( self , check_language = True , language = None , limit = 3 , exclude = None , kwargs = None , category = None ) : if category : if not kwargs : kwargs = { } kwargs [ 'categories__in' ] = [ category ] qs = self . published ( check_language = check_language , language = language , kwargs = kwargs ) if exclude : qs = qs . exclude ( pk = exclude . pk ) return qs [ : limit ]
|
Returns recently published new entries .
|
5,039
|
def get_newsentry_meta_description ( newsentry ) : if newsentry . meta_description : return newsentry . meta_description text = newsentry . get_description ( ) if len ( text ) > 160 : return u'{}...' . format ( text [ : 160 ] ) return text
|
Returns the meta description for the given entry .
|
5,040
|
def _requirement_filter_by_marker ( req ) : if hasattr ( req , 'marker' ) and req . marker : marker_env = { 'python_version' : '.' . join ( map ( str , sys . version_info [ : 2 ] ) ) , 'sys_platform' : sys . platform } if not req . marker . evaluate ( environment = marker_env ) : return False return True
|
Check if the requirement is satisfied by the marker .
|
5,041
|
def _requirement_find_lowest_possible ( req ) : version_dep = None version_comp = None for dep in req . specs : version = pkg_resources . parse_version ( dep [ 1 ] ) if dep [ 0 ] == '!=' : continue if ( not version_dep or version < pkg_resources . parse_version ( version_dep ) ) : version_dep = dep [ 1 ] version_comp = dep [ 0 ] assert ( version_dep is None and version_comp is None ) or ( version_dep is not None and version_comp is not None ) return [ x for x in ( req . unsafe_name , version_comp , version_dep ) if x is not None ]
|
Find lowest required version .
|
5,042
|
def _ensure_coroutine_function ( func ) : if asyncio . iscoroutinefunction ( func ) : return func else : @ asyncio . coroutine def coroutine_function ( evt ) : func ( evt ) yield return coroutine_function
|
Return a coroutine function .
|
5,043
|
def location ( self ) : if self . _location is None : self . _location = "{}/{}-{}" . format ( self . stream , self . type , self . sequence , ) return self . _location
|
Return a string uniquely identifying the event .
|
5,044
|
async def find_backwards ( self , stream_name , predicate , predicate_label = 'predicate' ) : logger = self . _logger . getChild ( predicate_label ) logger . info ( 'Fetching first matching event' ) uri = self . _head_uri try : page = await self . _fetcher . fetch ( uri ) except HttpNotFoundError as e : raise StreamNotFoundError ( ) from e while True : evt = next ( page . iter_events_matching ( predicate ) , None ) if evt is not None : return evt uri = page . get_link ( "next" ) if uri is None : logger . warning ( "No matching event found" ) return None page = await self . _fetcher . fetch ( uri )
|
Return first event matching predicate or None if none exists .
|
5,045
|
def main ( ) : coloredlogs . install ( ) action = show_matching_entry program_opts = dict ( exclude_list = [ ] ) show_opts = dict ( filters = [ ] , use_clipboard = is_clipboard_supported ( ) ) verbosity = 0 try : options , arguments = getopt . gnu_getopt ( sys . argv [ 1 : ] , "elnp:f:x:vqh" , [ "edit" , "list" , "no-clipboard" , "password-store=" , "filter=" , "exclude=" , "verbose" , "quiet" , "help" ] , ) for option , value in options : if option in ( "-e" , "--edit" ) : action = edit_matching_entry elif option in ( "-l" , "--list" ) : action = list_matching_entries elif option in ( "-n" , "--no-clipboard" ) : show_opts [ "use_clipboard" ] = False elif option in ( "-p" , "--password-store" ) : stores = program_opts . setdefault ( "stores" , [ ] ) stores . append ( PasswordStore ( directory = value ) ) elif option in ( "-f" , "--filter" ) : show_opts [ "filters" ] . append ( value ) elif option in ( "-x" , "--exclude" ) : program_opts [ "exclude_list" ] . append ( value ) elif option in ( "-v" , "--verbose" ) : coloredlogs . increase_verbosity ( ) verbosity += 1 elif option in ( "-q" , "--quiet" ) : coloredlogs . decrease_verbosity ( ) verbosity -= 1 elif option in ( "-h" , "--help" ) : usage ( __doc__ ) return else : raise Exception ( "Unhandled option! (programming error)" ) if not ( arguments or action == list_matching_entries ) : usage ( __doc__ ) return except Exception as e : warning ( "Error: %s" , e ) sys . exit ( 1 ) try : show_opts [ "quiet" ] = verbosity < 0 kw = show_opts if action == show_matching_entry else { } action ( QuickPass ( ** program_opts ) , arguments , ** kw ) except PasswordStoreError as e : logger . error ( "%s" , e ) sys . exit ( 1 ) except KeyboardInterrupt : sys . exit ( 1 )
|
Command line interface for the qpass program .
|
5,046
|
def edit_matching_entry ( program , arguments ) : entry = program . select_entry ( * arguments ) entry . context . execute ( "pass" , "edit" , entry . name )
|
Edit the matching entry .
|
5,047
|
def SVGdocument ( ) : "Create default SVG document" import xml . dom . minidom implementation = xml . dom . minidom . getDOMImplementation ( ) doctype = implementation . createDocumentType ( "svg" , "-//W3C//DTD SVG 1.1//EN" , "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" ) document = implementation . createDocument ( None , "svg" , doctype ) document . documentElement . setAttribute ( 'xmlns' , 'http://www.w3.org/2000/svg' ) return document
|
Create default SVG document
|
5,048
|
def polyline ( document , coords ) : "polyline with more then 2 vertices" points = [ ] for i in range ( 0 , len ( coords ) , 2 ) : points . append ( "%s,%s" % ( coords [ i ] , coords [ i + 1 ] ) ) return setattribs ( document . createElement ( 'polyline' ) , points = ' ' . join ( points ) , )
|
polyline with more then 2 vertices
|
5,049
|
def cubic_bezier ( document , coords ) : "cubic bezier polyline" element = document . createElement ( 'path' ) points = [ ( coords [ i ] , coords [ i + 1 ] ) for i in range ( 0 , len ( coords ) , 2 ) ] path = [ "M%s %s" % points [ 0 ] ] for n in xrange ( 1 , len ( points ) , 3 ) : A , B , C = points [ n : n + 3 ] path . append ( "C%s,%s %s,%s %s,%s" % ( A [ 0 ] , A [ 1 ] , B [ 0 ] , B [ 1 ] , C [ 0 ] , C [ 1 ] ) ) element . setAttribute ( 'd' , ' ' . join ( path ) ) return element
|
cubic bezier polyline
|
5,050
|
def smoothpolygon ( document , coords ) : "smoothed filled polygon" element = document . createElement ( 'path' ) path = [ ] points = [ ( coords [ i ] , coords [ i + 1 ] ) for i in range ( 0 , len ( coords ) , 2 ) ] def pt ( points ) : p = points n = len ( points ) for i in range ( 0 , len ( points ) ) : a = p [ ( i - 1 ) % n ] b = p [ i ] c = p [ ( i + 1 ) % n ] yield lerp ( a , b , 0.5 ) , b , lerp ( b , c , 0.5 ) for i , ( A , B , C ) in enumerate ( pt ( points ) ) : if i == 0 : path . append ( "M%s,%s Q%s,%s %s,%s" % ( A [ 0 ] , A [ 1 ] , B [ 0 ] , B [ 1 ] , C [ 0 ] , C [ 1 ] ) ) else : path . append ( "T%s,%s" % ( C [ 0 ] , C [ 1 ] ) ) path . append ( "z" ) element . setAttribute ( 'd' , ' ' . join ( path ) ) return element
|
smoothed filled polygon
|
5,051
|
def font_actual ( tkapp , font ) : "actual font parameters" tmp = tkapp . call ( 'font' , 'actual' , font ) return dict ( ( tmp [ i ] [ 1 : ] , tmp [ i + 1 ] ) for i in range ( 0 , len ( tmp ) , 2 ) )
|
actual font parameters
|
5,052
|
def parse_dash ( string , width ) : "parse dash pattern specified with string" w = max ( 1 , int ( width + 0.5 ) ) n = len ( string ) result = [ ] for i , c in enumerate ( string ) : if c == " " and len ( result ) : result [ - 1 ] += w + 1 elif c == "_" : result . append ( 8 * w ) result . append ( 4 * w ) elif c == "-" : result . append ( 6 * w ) result . append ( 4 * w ) elif c == "," : result . append ( 4 * w ) result . append ( 4 * w ) elif c == "." : result . append ( 2 * w ) result . append ( 4 * w ) return result
|
parse dash pattern specified with string
|
5,053
|
def prof_altitude ( pressure , p_coef = ( - 0.028389 , - 0.0493698 , 0.485718 , 0.278656 , - 17.5703 , 48.0926 ) ) : pressure = np . asarray ( pressure ) altitude = np . polyval ( p_coef , np . log10 ( pressure . flatten ( ) ) ) return altitude . reshape ( pressure . shape )
|
Return altitude for given pressure .
|
5,054
|
def prof_pressure ( altitude , z_coef = ( 1.94170e-9 , - 5.14580e-7 , 4.57018e-5 , - 1.55620e-3 , - 4.61994e-2 , 2.99955 ) ) : altitude = np . asarray ( altitude ) pressure = np . power ( 10 , np . polyval ( z_coef , altitude . flatten ( ) ) ) return pressure . reshape ( altitude . shape )
|
Return pressure for given altitude .
|
5,055
|
def _get_model_info ( model_name ) : split_name = re . split ( r'[\-_\s]' , model_name . strip ( ) . upper ( ) ) sep_chars = ( '' , ' ' , '-' , '_' ) gen_seps = itertools . combinations_with_replacement ( sep_chars , len ( split_name ) - 1 ) test_names = ( "" . join ( ( n for n in itertools . chain ( * list ( zip ( split_name , s + ( '' , ) ) ) ) ) ) for s in gen_seps ) match_names = list ( [ name for name in test_names if name in _get_supported_models ( ) ] ) if not len ( match_names ) : raise ValueError ( "Model '{0}' is not supported" . format ( model_name ) ) elif len ( match_names ) > 1 : raise ValueError ( "Multiple matched models for given model name '{0}'" . format ( model_name ) ) valid_model_name = match_names [ 0 ] parent_models = _find_references ( valid_model_name ) model_spec = dict ( ) for m in parent_models : model_spec . update ( MODELS [ m ] ) model_spec . pop ( 'reference' ) model_spec [ 'model_family' ] = parent_models [ 0 ] model_spec [ 'model_name' ] = valid_model_name return model_spec
|
Get the grid specifications for a given model .
|
5,056
|
def _get_archive_filelist ( filename ) : names = [ ] if tarfile . is_tarfile ( filename ) : with tarfile . open ( filename ) as tar_file : names = sorted ( tar_file . getnames ( ) ) elif zipfile . is_zipfile ( filename ) : with zipfile . ZipFile ( filename ) as zip_file : names = sorted ( zip_file . namelist ( ) ) else : raise ValueError ( "Can not get filenames from '{!s}'. " "Not a tar or zip file" . format ( filename ) ) if "./" in names : names . remove ( "./" ) return names
|
Extract the list of files from a tar or zip archive .
|
5,057
|
def _augment_book ( self , uuid , event ) : try : if not isbnmeta : self . log ( "No isbntools found! Install it to get full " "functionality!" , lvl = warn ) return new_book = objectmodels [ 'book' ] . find_one ( { 'uuid' : uuid } ) try : if len ( new_book . isbn ) != 0 : self . log ( 'Got a lookup candidate: ' , new_book . _fields ) try : meta = isbnmeta ( new_book . isbn , service = self . config . isbnservice ) mapping = libraryfieldmapping [ self . config . isbnservice ] new_meta = { } for key in meta . keys ( ) : if key in mapping : if isinstance ( mapping [ key ] , tuple ) : name , conv = mapping [ key ] try : new_meta [ name ] = conv ( meta [ key ] ) except ValueError : self . log ( 'Bad value from lookup:' , name , conv , key ) else : new_meta [ mapping [ key ] ] = meta [ key ] new_book . update ( new_meta ) new_book . save ( ) self . _notify_result ( event , new_book ) self . log ( "Book successfully augmented from " , self . config . isbnservice ) except Exception as e : self . log ( "Error during meta lookup: " , e , type ( e ) , new_book . isbn , lvl = error , exc = True ) error_response = { 'component' : 'hfos.alert.manager' , 'action' : 'notify' , 'data' : { 'type' : 'error' , 'message' : 'Could not look up metadata, sorry:' + str ( e ) } } self . log ( event , event . client , pretty = True ) self . fireEvent ( send ( event . client . uuid , error_response ) ) except Exception as e : self . log ( "Error during book update." , e , type ( e ) , exc = True , lvl = error ) except Exception as e : self . log ( "Book creation notification error: " , uuid , e , type ( e ) , lvl = error , exc = True )
|
Checks if the newly created object is a book and only has an ISBN . If so tries to fetch the book data off the internet .
|
5,058
|
def opened ( self , * args ) : self . _serial_open = True self . log ( "Opened: " , args , lvl = debug ) self . _send_command ( b'l,1' ) self . log ( "Turning off engine, pump and neutralizing rudder" ) self . _send_command ( b'v' ) self . _handle_servo ( self . _machine_channel , 0 ) self . _handle_servo ( self . _rudder_channel , 127 ) self . _set_digital_pin ( self . _pump_channel , 0 ) self . _send_command ( b'l,0' ) self . _send_command ( b'm,HFOS Control' )
|
Initiates communication with the remote controlled device .
|
5,059
|
def on_machinerequest ( self , event ) : self . log ( "Updating new machine power: " , event . controlvalue ) self . _handle_servo ( self . _machine_channel , event . controlvalue )
|
Sets a new machine speed .
|
5,060
|
def on_rudderrequest ( self , event ) : self . log ( "Updating new rudder angle: " , event . controlvalue ) self . _handle_servo ( self . _rudder_channel , event . controlvalue )
|
Sets a new rudder angle .
|
5,061
|
def on_pumprequest ( self , event ) : self . log ( "Updating pump status: " , event . controlvalue ) self . _set_digital_pin ( self . _pump_channel , event . controlvalue )
|
Activates or deactivates a connected pump .
|
5,062
|
def provisionList ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : log ( 'Provisioning' , items , database_name , lvl = debug ) system_user = None def get_system_user ( ) : user = objectmodels [ 'user' ] . find_one ( { 'name' : 'System' } ) try : log ( 'System user uuid: ' , user . uuid , lvl = verbose ) return user . uuid except AttributeError as e : log ( 'No system user found:' , e , lvl = warn ) log ( 'Please install the user provision to setup a system user or check your database configuration' , lvl = error ) return False def needs_owner ( obj ) : for privilege in obj . _fields . get ( 'perms' , None ) : if 'owner' in obj . _fields [ 'perms' ] [ privilege ] : return True return False import pymongo from hfos . database import objectmodels , dbhost , dbport , dbname database_object = objectmodels [ database_name ] log ( dbhost , dbname ) client = pymongo . MongoClient ( dbhost , dbport ) db = client [ dbname ] if not skip_user_check : system_user = get_system_user ( ) if not system_user : return else : system_user = '0ba87daa-d315-462e-9f2e-6091d768fd36' col_name = database_object . collection_name ( ) if clear is True : log ( "Clearing collection for" , col_name , lvl = warn ) db . drop_collection ( col_name ) counter = 0 for no , item in enumerate ( items ) : new_object = None item_uuid = item [ 'uuid' ] log ( "Validating object (%i/%i):" % ( no + 1 , len ( items ) ) , item_uuid , lvl = debug ) if database_object . count ( { 'uuid' : item_uuid } ) > 0 : log ( 'Object already present' , lvl = warn ) if overwrite is False : log ( "Not updating item" , item , lvl = warn ) else : log ( "Overwriting item: " , item_uuid , lvl = warn ) new_object = database_object . find_one ( { 'uuid' : item_uuid } ) new_object . _fields . update ( item ) else : new_object = database_object ( item ) if new_object is not None : try : if needs_owner ( new_object ) : if not hasattr ( new_object , 'owner' ) : log ( 'Adding system owner to object.' , lvl = verbose ) new_object . owner = system_user except Exception as e : log ( 'Error during ownership test:' , e , type ( e ) , exc = True , lvl = error ) try : new_object . validate ( ) new_object . save ( ) counter += 1 except ValidationError as e : raise ValidationError ( "Could not provision object: " + str ( item_uuid ) , e ) log ( "Provisioned %i out of %i items successfully." % ( counter , len ( items ) ) )
|
Provisions a list of items according to their schema
|
5,063
|
def DefaultExtension ( schema_obj , form_obj , schemata = None ) : if schemata is None : schemata = [ 'systemconfig' , 'profile' , 'client' ] DefaultExtends = { 'schema' : { "properties/modules" : [ schema_obj ] } , 'form' : { 'modules' : { 'items/' : form_obj } } } output = { } for schema in schemata : output [ schema ] = DefaultExtends return output
|
Create a default field
|
5,064
|
def copytree ( root_src_dir , root_dst_dir , hardlink = True ) : for src_dir , dirs , files in os . walk ( root_src_dir ) : dst_dir = src_dir . replace ( root_src_dir , root_dst_dir , 1 ) if not os . path . exists ( dst_dir ) : os . makedirs ( dst_dir ) for file_ in files : src_file = os . path . join ( src_dir , file_ ) dst_file = os . path . join ( dst_dir , file_ ) try : if os . path . exists ( dst_file ) : if hardlink : hfoslog ( 'Removing frontend link:' , dst_file , emitter = 'BUILDER' , lvl = verbose ) os . remove ( dst_file ) else : hfoslog ( 'Overwriting frontend file:' , dst_file , emitter = 'BUILDER' , lvl = verbose ) hfoslog ( 'Hardlinking ' , src_file , dst_dir , emitter = 'BUILDER' , lvl = verbose ) if hardlink : os . link ( src_file , dst_file ) else : copy ( src_file , dst_dir ) except PermissionError as e : hfoslog ( " No permission to remove/create target %s for " "frontend:" % ( 'link' if hardlink else 'copy' ) , dst_dir , e , emitter = 'BUILDER' , lvl = error ) except Exception as e : hfoslog ( "Error during" , 'link' if hardlink else 'copy' , "creation:" , type ( e ) , e , emitter = 'BUILDER' , lvl = error ) hfoslog ( 'Done linking' , root_dst_dir , emitter = 'BUILDER' , lvl = verbose )
|
Copies a whole directory tree
|
5,065
|
def delete ( ctx , componentname ) : col = ctx . obj [ 'col' ] if col . count ( { 'name' : componentname } ) > 1 : log ( 'More than one component configuration of this name! Try ' 'one of the uuids as argument. Get a list with "config ' 'list"' ) return log ( 'Deleting component configuration' , componentname , emitter = 'MANAGE' ) configuration = col . find_one ( { 'name' : componentname } ) if configuration is None : configuration = col . find_one ( { 'uuid' : componentname } ) if configuration is None : log ( 'Component configuration not found:' , componentname , emitter = 'MANAGE' ) return configuration . delete ( ) log ( 'Done' )
|
Delete an existing component configuration . This will trigger the creation of its default configuration upon next restart .
|
5,066
|
def show ( ctx , component ) : col = ctx . obj [ 'col' ] if col . count ( { 'name' : component } ) > 1 : log ( 'More than one component configuration of this name! Try ' 'one of the uuids as argument. Get a list with "config ' 'list"' ) return if component is None : configurations = col . find ( ) for configuration in configurations : log ( "%-15s : %s" % ( configuration . name , configuration . uuid ) , emitter = 'MANAGE' ) else : configuration = col . find_one ( { 'name' : component } ) if configuration is None : configuration = col . find_one ( { 'uuid' : component } ) if configuration is None : log ( 'No component with that name or uuid found.' ) return print ( json . dumps ( configuration . serializablefields ( ) , indent = 4 ) )
|
Show the stored active configuration of a component .
|
5,067
|
def debugrequest ( self , event ) : try : self . log ( "Event: " , event . __dict__ , lvl = critical ) if event . data == "storejson" : self . log ( "Storing received object to /tmp" , lvl = critical ) fp = open ( '/tmp/hfosdebugger_' + str ( event . user . useruuid ) + "_" + str ( uuid4 ( ) ) , "w" ) json . dump ( event . data , fp , indent = True ) fp . close ( ) if event . data == "memdebug" : self . log ( "Memory hogs:" , lvl = critical ) objgraph . show_most_common_types ( limit = 20 ) if event . data == "growth" : self . log ( "Memory growth since last call:" , lvl = critical ) objgraph . show_growth ( ) if event . data == "graph" : self . _drawgraph ( ) if event . data == "exception" : class TestException ( BaseException ) : pass raise TestException if event . data == "heap" : self . log ( "Heap log:" , self . heapy . heap ( ) , lvl = critical ) if event . data == "buildfrontend" : self . log ( "Sending frontend build command" ) self . fireEvent ( frontendbuildrequest ( force = True ) , "setup" ) if event . data == "logtail" : self . fireEvent ( logtailrequest ( event . user , None , None , event . client ) , "logger" ) if event . data == "trigger_anchorwatch" : from hfos . anchor . anchorwatcher import cli_trigger_anchorwatch self . fireEvent ( cli_trigger_anchorwatch ( ) ) except Exception as e : self . log ( "Exception during debug handling:" , e , type ( e ) , lvl = critical )
|
Handler for client - side debug requests
|
5,068
|
def register_event ( self , event ) : self . log ( 'Registering event hook:' , event . cmd , event . thing , pretty = True , lvl = verbose ) self . hooks [ event . cmd ] = event . thing
|
Registers a new command line interface event hook as command
|
5,069
|
def populate_user_events ( ) : global AuthorizedEvents global AnonymousEvents def inheritors ( klass ) : subclasses = { } subclasses_set = set ( ) work = [ klass ] while work : parent = work . pop ( ) for child in parent . __subclasses__ ( ) : if child not in subclasses_set : name = child . __module__ + "." + child . __name__ if name . startswith ( 'hfos' ) : subclasses_set . add ( child ) event = { 'event' : child , 'name' : name , 'doc' : child . __doc__ , 'args' : [ ] } if child . __module__ in subclasses : subclasses [ child . __module__ ] [ child . __name__ ] = event else : subclasses [ child . __module__ ] = { child . __name__ : event } work . append ( child ) return subclasses AuthorizedEvents = inheritors ( authorizedevent ) AnonymousEvents = inheritors ( anonymousevent )
|
Generate a list of all registered authorized and anonymous events
|
5,070
|
def clear ( ctx , schema ) : response = _ask ( 'Are you sure you want to delete the collection "%s"' % ( schema ) , default = 'N' , data_type = 'bool' ) if response is True : host , port = ctx . obj [ 'dbhost' ] . split ( ':' ) client = pymongo . MongoClient ( host = host , port = int ( port ) ) database = client [ ctx . obj [ 'dbname' ] ] log ( "Clearing collection for" , schema , lvl = warn , emitter = 'MANAGE' ) result = database . drop_collection ( schema ) if not result [ 'ok' ] : log ( "Could not drop collection:" , lvl = error ) log ( result , pretty = True , lvl = error ) else : log ( "Done" )
|
Clears an entire database collection irrevocably . Use with caution!
|
5,071
|
def provision_system_config ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : from hfos . provisions . base import provisionList from hfos . database import objectmodels default_system_config_count = objectmodels [ 'systemconfig' ] . count ( { 'name' : 'Default System Configuration' } ) if default_system_config_count == 0 or ( clear or overwrite ) : provisionList ( [ SystemConfiguration ] , 'systemconfig' , overwrite , clear , skip_user_check ) hfoslog ( 'Provisioning: System: Done.' , emitter = 'PROVISIONS' ) else : hfoslog ( 'Default system configuration already present.' , lvl = warn , emitter = 'PROVISIONS' )
|
Provision a basic system configuration
|
5,072
|
def userlogin ( self , event ) : try : user_uuid = event . useruuid user = objectmodels [ 'user' ] . find_one ( { 'uuid' : user_uuid } ) if user_uuid not in self . lastlogs : self . log ( 'Setting up lastlog for a new user.' , lvl = debug ) lastlog = objectmodels [ 'chatlastlog' ] ( { 'owner' : user_uuid , 'uuid' : std_uuid ( ) , 'channels' : { } } ) lastlog . save ( ) self . lastlogs [ user_uuid ] = lastlog self . users [ user_uuid ] = user self . user_attention [ user_uuid ] = None self . _send_status ( user_uuid , event . clientuuid ) except Exception as e : self . log ( 'Error during chat setup of user:' , e , type ( e ) , exc = True )
|
Provides the newly authenticated user with a backlog and general channel status information
|
5,073
|
def install_docs ( instance , clear_target ) : _check_root ( ) def make_docs ( ) : log ( "Generating HTML documentation" ) try : build = Popen ( [ 'make' , 'html' ] , cwd = 'docs/' ) build . wait ( ) except Exception as e : log ( "Problem during documentation building: " , e , type ( e ) , exc = True , lvl = error ) return False return True make_docs ( ) target = os . path . join ( '/var/lib/hfos' , instance , 'frontend/docs' ) source = 'docs/build/html' log ( "Updating documentation directory:" , target ) if not os . path . exists ( os . path . join ( os . path . curdir , source ) ) : log ( "Documentation not existing yet. Run python setup.py " "build_sphinx first." , lvl = error ) return if os . path . exists ( target ) : log ( "Path already exists: " + target ) if clear_target : log ( "Cleaning up " + target , lvl = warn ) shutil . rmtree ( target ) log ( "Copying docs to " + target ) copy_tree ( source , target ) log ( "Done: Install Docs" )
|
Builds and installs the complete HFOS documentation .
|
5,074
|
def install_modules ( wip ) : def install_module ( hfos_module ) : try : setup = Popen ( [ sys . executable , 'setup.py' , 'develop' ] , cwd = 'modules/' + hfos_module + "/" ) setup . wait ( ) except Exception as e : log ( "Problem during module installation: " , hfos_module , e , type ( e ) , exc = True , lvl = error ) return False return True modules_production = [ 'navdata' , 'alert' , 'automat' , 'busrepeater' , 'calendar' , 'countables' , 'dash' , 'enrol' , 'mail' , 'maps' , 'nmea' , 'nodestate' , 'project' , 'webguides' , 'wiki' ] modules_wip = [ 'calc' , 'camera' , 'chat' , 'comms' , 'contacts' , 'crew' , 'equipment' , 'filemanager' , 'garden' , 'heroic' , 'ldap' , 'library' , 'logbook' , 'protocols' , 'polls' , 'mesh' , 'robot' , 'switchboard' , 'shareables' , ] installables = modules_production if wip : installables . extend ( modules_wip ) success = [ ] failed = [ ] for installable in installables : log ( 'Installing module ' , installable ) if install_module ( installable ) : success . append ( installable ) else : failed . append ( installable ) log ( 'Installed modules: ' , success ) if len ( failed ) > 0 : log ( 'Failed modules: ' , failed ) log ( 'Done: Install Modules' )
|
Install the plugin modules
|
5,075
|
def install_cert ( selfsigned ) : _check_root ( ) if selfsigned : log ( 'Generating self signed (insecure) certificate/key ' 'combination' ) try : os . mkdir ( '/etc/ssl/certs/hfos' ) except FileExistsError : pass except PermissionError : log ( "Need root (e.g. via sudo) to generate ssl certificate" ) sys . exit ( 1 ) def create_self_signed_cert ( ) : k = crypto . PKey ( ) k . generate_key ( crypto . TYPE_RSA , 1024 ) if os . path . exists ( cert_file ) : try : certificate = open ( cert_file , "rb" ) . read ( ) old_cert = crypto . load_certificate ( crypto . FILETYPE_PEM , certificate ) serial = old_cert . get_serial_number ( ) + 1 except ( crypto . Error , OSError ) as e : log ( 'Could not read old certificate to increment ' 'serial:' , type ( e ) , e , exc = True , lvl = warn ) serial = 1 else : serial = 1 certificate = crypto . X509 ( ) certificate . get_subject ( ) . C = "DE" certificate . get_subject ( ) . ST = "Berlin" certificate . get_subject ( ) . L = "Berlin" certificate . get_subject ( ) . O = "Hackerfleet" certificate . get_subject ( ) . OU = "Hackerfleet" certificate . get_subject ( ) . CN = gethostname ( ) certificate . set_serial_number ( serial ) certificate . gmtime_adj_notBefore ( 0 ) certificate . gmtime_adj_notAfter ( 10 * 365 * 24 * 60 * 60 ) certificate . set_issuer ( certificate . get_subject ( ) ) certificate . set_pubkey ( k ) certificate . sign ( k , b'sha512' ) open ( key_file , "wt" ) . write ( str ( crypto . dump_privatekey ( crypto . FILETYPE_PEM , k ) , encoding = "ASCII" ) ) open ( cert_file , "wt" ) . write ( str ( crypto . dump_certificate ( crypto . FILETYPE_PEM , certificate ) , encoding = "ASCII" ) ) open ( combined_file , "wt" ) . write ( str ( crypto . dump_certificate ( crypto . FILETYPE_PEM , certificate ) , encoding = "ASCII" ) + str ( crypto . dump_privatekey ( crypto . FILETYPE_PEM , k ) , encoding = "ASCII" ) ) create_self_signed_cert ( ) log ( 'Done: Install Cert' ) else : log ( 'Not implemented yet. You can build your own certificate and ' 'store it in /etc/ssl/certs/hfos/server-cert.pem - it should ' 'be a certificate with key, as this is used server side and ' 'there is no way to enter a separate key.' , lvl = error )
|
Install a local SSL certificate
|
5,076
|
def frontend ( ctx , dev , rebuild , no_install , build_type ) : install_frontend ( instance = ctx . obj [ 'instance' ] , forcerebuild = rebuild , development = dev , install = not no_install , build_type = build_type )
|
Build and install frontend
|
5,077
|
def install_all ( ctx , clear_all ) : _check_root ( ) instance = ctx . obj [ 'instance' ] dbhost = ctx . obj [ 'dbhost' ] dbname = ctx . obj [ 'dbname' ] port = ctx . obj [ 'port' ] install_system_user ( ) install_cert ( selfsigned = True ) install_var ( instance , clear_target = clear_all , clear_all = clear_all ) install_modules ( wip = False ) install_provisions ( provision = None , clear_provisions = clear_all ) install_docs ( instance , clear_target = clear_all ) install_service ( instance , dbhost , dbname , port ) install_nginx ( instance , dbhost , dbname , port ) log ( 'Done' )
|
Default - Install everything installable
|
5,078
|
def uninstall ( ) : _check_root ( ) response = _ask ( "This will delete all data of your HFOS installations! Type" "YES to continue:" , default = "N" , show_hint = False ) if response == 'YES' : shutil . rmtree ( '/var/lib/hfos' ) shutil . rmtree ( '/var/cache/hfos' )
|
Uninstall data and resource locations
|
5,079
|
def update ( ctx , no_restart , no_rebuild ) : instance = ctx . obj [ 'instance' ] log ( 'Pulling github updates' ) run_process ( '.' , [ 'git' , 'pull' , 'origin' , 'master' ] ) run_process ( './frontend' , [ 'git' , 'pull' , 'origin' , 'master' ] ) if not no_rebuild : log ( 'Rebuilding frontend' ) install_frontend ( instance , forcerebuild = True , install = False , development = True ) if not no_restart : log ( 'Restaring service' ) if instance != 'hfos' : instance = 'hfos-' + instance run_process ( '.' , [ 'sudo' , 'systemctl' , 'restart' , instance ] ) log ( 'Done' )
|
Update a HFOS node
|
5,080
|
def _build_model_factories ( store ) : result = { } for schemaname in store : schema = None try : schema = store [ schemaname ] [ 'schema' ] except KeyError : schemata_log ( "No schema found for " , schemaname , lvl = critical , exc = True ) try : result [ schemaname ] = warmongo . model_factory ( schema ) except Exception as e : schemata_log ( "Could not create factory for schema " , schemaname , schema , lvl = critical , exc = True ) return result
|
Generate factories to construct objects from schemata
|
5,081
|
def _build_collections ( store ) : result = { } client = pymongo . MongoClient ( host = dbhost , port = dbport ) db = client [ dbname ] for schemaname in store : schema = None indices = None try : schema = store [ schemaname ] [ 'schema' ] indices = store [ schemaname ] . get ( 'indices' , None ) except KeyError : db_log ( "No schema found for " , schemaname , lvl = critical ) try : result [ schemaname ] = db [ schemaname ] except Exception : db_log ( "Could not get collection for schema " , schemaname , schema , lvl = critical , exc = True ) if indices is not None : col = db [ schemaname ] db_log ( 'Adding indices to' , schemaname , lvl = debug ) i = 0 keys = list ( indices . keys ( ) ) while i < len ( indices ) : index_name = keys [ i ] index = indices [ index_name ] index_type = index . get ( 'type' , None ) index_unique = index . get ( 'unique' , False ) index_sparse = index . get ( 'sparse' , True ) index_reindex = index . get ( 'reindex' , False ) if index_type in ( None , 'text' ) : index_type = pymongo . TEXT elif index_type == '2dsphere' : index_type = pymongo . GEOSPHERE def do_index ( ) : col . ensure_index ( [ ( index_name , index_type ) ] , unique = index_unique , sparse = index_sparse ) db_log ( 'Enabling index of type' , index_type , 'on' , index_name , lvl = debug ) try : do_index ( ) i += 1 except pymongo . errors . OperationFailure : db_log ( col . list_indexes ( ) . __dict__ , pretty = True , lvl = verbose ) if not index_reindex : db_log ( 'Index was not created!' , lvl = warn ) i += 1 else : try : col . drop_index ( index_name ) do_index ( ) i += 1 except pymongo . errors . OperationFailure as e : db_log ( 'Index recreation problem:' , exc = True , lvl = error ) col . drop_indexes ( ) i = 0 return result
|
Generate database collections with indices from the schemastore
|
5,082
|
def initialize ( address = '127.0.0.1:27017' , database_name = 'hfos' , instance_name = "default" , reload = False ) : global schemastore global l10n_schemastore global objectmodels global collections global dbhost global dbport global dbname global instance global initialized if initialized and not reload : hfoslog ( 'Already initialized and not reloading.' , lvl = warn , emitter = "DB" , frame_ref = 2 ) return dbhost = address . split ( ':' ) [ 0 ] dbport = int ( address . split ( ":" ) [ 1 ] ) if ":" in address else 27017 dbname = database_name db_log ( "Using database:" , dbname , '@' , dbhost , ':' , dbport ) try : client = pymongo . MongoClient ( host = dbhost , port = dbport ) db = client [ dbname ] db_log ( "Database: " , db . command ( 'buildinfo' ) , lvl = debug ) except Exception as e : db_log ( "No database available! Check if you have mongodb > 3.0 " "installed and running as well as listening on port 27017 " "of localhost. (Error: %s) -> EXIT" % e , lvl = critical ) sys . exit ( 5 ) warmongo . connect ( database_name ) schemastore = _build_schemastore_new ( ) l10n_schemastore = _build_l10n_schemastore ( schemastore ) objectmodels = _build_model_factories ( schemastore ) collections = _build_collections ( schemastore ) instance = instance_name initialized = True
|
Initializes the database connectivity schemata and finally object models
|
5,083
|
def profile ( schemaname = 'sensordata' , profiletype = 'pjs' ) : db_log ( "Profiling " , schemaname ) schema = schemastore [ schemaname ] [ 'schema' ] db_log ( "Schema: " , schema , lvl = debug ) testclass = None if profiletype == 'warmongo' : db_log ( "Running Warmongo benchmark" ) testclass = warmongo . model_factory ( schema ) elif profiletype == 'pjs' : db_log ( "Running PJS benchmark" ) try : import python_jsonschema_objects as pjs except ImportError : db_log ( "PJS benchmark selected but not available. Install " "python_jsonschema_objects (PJS)" ) return db_log ( ) builder = pjs . ObjectBuilder ( schema ) ns = builder . build_classes ( ) pprint ( ns ) testclass = ns [ schemaname ] db_log ( "ns: " , ns , lvl = warn ) if testclass is not None : db_log ( "Instantiating elements..." ) for i in range ( 100 ) : testclass ( ) else : db_log ( "No Profiletype available!" ) db_log ( "Profiling done" )
|
Profiles object model handling with a very simple benchmarking test
|
5,084
|
def _check_collections ( self ) : self . collection_sizes = { } self . collection_total = 0 for col in self . db . collection_names ( include_system_collections = False ) : self . collection_sizes [ col ] = self . db . command ( 'collstats' , col ) . get ( 'storageSize' , 0 ) self . collection_total += self . collection_sizes [ col ] sorted_x = sorted ( self . collection_sizes . items ( ) , key = operator . itemgetter ( 1 ) ) for item in sorted_x : self . log ( "Collection size (%s): %.2f MB" % ( item [ 0 ] , item [ 1 ] / 1024.0 / 1024 ) , lvl = verbose ) self . log ( "Total collection sizes: %.2f MB" % ( self . collection_total / 1024.0 / 1024 ) )
|
Checks node local collection storage sizes
|
5,085
|
def _check_free_space ( self ) : def get_folder_size ( path ) : total_size = 0 for item in walk ( path ) : for file in item [ 2 ] : try : total_size = total_size + getsize ( join ( item [ 0 ] , file ) ) except ( OSError , PermissionError ) as e : self . log ( "error with file: " + join ( item [ 0 ] , file ) , e ) return total_size for name , checkpoint in self . config . locations . items ( ) : try : stats = statvfs ( checkpoint [ 'location' ] ) except ( OSError , PermissionError ) as e : self . log ( 'Location unavailable:' , name , e , type ( e ) , lvl = error , exc = True ) continue free_space = stats . f_frsize * stats . f_bavail used_space = get_folder_size ( checkpoint [ 'location' ] ) / 1024.0 / 1024 self . log ( 'Location %s uses %.2f MB' % ( name , used_space ) ) if free_space < checkpoint [ 'minimum' ] : self . log ( 'Short of free space on %s: %.2f MB left' % ( name , free_space / 1024.0 / 1024 / 1024 ) , lvl = warn )
|
Checks used filesystem storage sizes
|
5,086
|
def send_mail_worker ( config , mail , event ) : log = "" try : if config . mail_ssl : server = SMTP_SSL ( config . mail_server , port = config . mail_server_port , timeout = 30 ) else : server = SMTP ( config . mail_server , port = config . mail_server_port , timeout = 30 ) if config . mail_tls : log += 'Starting TLS\n' server . starttls ( ) if config . mail_username != '' : log += 'Logging in with ' + str ( config . mail_username ) + "\n" server . login ( config . mail_username , config . mail_password ) else : log += 'No username, trying anonymous access\n' log += 'Sending Mail\n' response_send = server . send_message ( mail ) server . quit ( ) except timeout as e : log += 'Could not send email to enrollee, mailserver timeout: ' + str ( e ) + "\n" return False , log , event log += 'Server response:' + str ( response_send ) return True , log , event
|
Worker task to send out an email which blocks the process unless it is threaded
|
5,087
|
def reload_configuration ( self , event ) : super ( EnrolManager , self ) . reload_configuration ( event ) self . log ( 'Reloaded configuration.' ) self . _setup ( )
|
Reload the current configuration and set up everything depending on it
|
5,088
|
def change ( self , event ) : uuid = event . data [ 'uuid' ] status = event . data [ 'status' ] if status not in [ 'Open' , 'Pending' , 'Accepted' , 'Denied' , 'Resend' ] : self . log ( 'Erroneous status for enrollment requested!' , lvl = warn ) return self . log ( 'Changing status of an enrollment' , uuid , 'to' , status ) enrollment = objectmodels [ 'enrollment' ] . find_one ( { 'uuid' : uuid } ) if enrollment is not None : self . log ( 'Enrollment found' , lvl = debug ) else : return if status == 'Resend' : enrollment . timestamp = std_now ( ) enrollment . save ( ) self . _send_invitation ( enrollment , event ) reply = { True : 'Resent' } else : enrollment . status = status enrollment . save ( ) reply = { True : enrollment . serializablefields ( ) } if status == 'Accepted' and enrollment . method == 'Enrolled' : self . _create_user ( enrollment . name , enrollment . password , enrollment . email , 'Invited' , event . client . uuid ) self . _send_acceptance ( enrollment , None , event ) packet = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'change' , 'data' : reply } self . log ( 'packet:' , packet , lvl = verbose ) self . fireEvent ( send ( event . client . uuid , packet ) ) self . log ( 'Enrollment changed' , lvl = debug )
|
An admin user requests a change to an enrolment
|
5,089
|
def changepassword ( self , event ) : old = event . data [ 'old' ] new = event . data [ 'new' ] uuid = event . user . uuid user = objectmodels [ 'user' ] . find_one ( { 'uuid' : uuid } ) if std_hash ( old , self . salt ) == user . passhash : user . passhash = std_hash ( new , self . salt ) user . save ( ) packet = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'changepassword' , 'data' : True } self . fireEvent ( send ( event . client . uuid , packet ) ) self . log ( 'Successfully changed password for user' , uuid ) else : packet = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'changepassword' , 'data' : False } self . fireEvent ( send ( event . client . uuid , packet ) ) self . log ( 'User tried to change password without supplying old one' , lvl = warn )
|
An enrolled user wants to change their password
|
5,090
|
def invite ( self , event ) : self . log ( 'Inviting new user to enrol' ) name = event . data [ 'name' ] email = event . data [ 'email' ] method = event . data [ 'method' ] self . _invite ( name , method , email , event . client . uuid , event )
|
A new user has been invited to enrol by an admin user
|
5,091
|
def enrol ( self , event ) : if self . config . allow_registration is False : self . log ( 'Someone tried to register although enrolment is closed.' ) return self . log ( 'Client trying to register a new account:' , event , pretty = True ) uuid = event . client . uuid if uuid in self . captchas and event . data . get ( 'captcha' , None ) == self . captchas [ uuid ] [ 'text' ] : self . log ( 'Captcha solved!' ) else : self . log ( 'Captcha failed!' ) self . _fail ( event , _ ( 'You did not solve the captcha correctly.' , event ) ) self . _generate_captcha ( event ) return mail = event . data . get ( 'mail' , None ) if mail is None : self . _fail ( event , _ ( 'You have to supply all required fields.' , event ) ) return elif not validate_email ( mail ) : self . _fail ( event , _ ( 'The supplied email address seems invalid' , event ) ) return if objectmodels [ 'user' ] . count ( { 'mail' : mail } ) > 0 : self . _fail ( event , _ ( 'Your mail address cannot be used.' , event ) ) return password = event . data . get ( 'password' , None ) if password is None or len ( password ) < 5 : self . _fail ( event , _ ( 'Your password is not long enough.' , event ) ) return username = event . data . get ( 'username' , None ) if username is None or len ( username ) < 1 : self . _fail ( event , _ ( 'Your username is not long enough.' , event ) ) return elif ( objectmodels [ 'user' ] . count ( { 'name' : username } ) > 0 ) or ( objectmodels [ 'enrollment' ] . count ( { 'name' : username } ) > 0 ) : self . _fail ( event , _ ( 'The username you supplied is not available.' , event ) ) return self . log ( 'Provided data is good to enrol.' ) if self . config . no_verify : self . _create_user ( username , password , mail , 'Enrolled' , uuid ) else : self . _invite ( username , 'Enrolled' , mail , uuid , event , password )
|
A user tries to self - enrol with the enrolment form
|
5,092
|
def status ( self , event ) : self . log ( 'Registration status requested' ) response = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'status' , 'data' : self . config . allow_registration } self . fire ( send ( event . client . uuid , response ) )
|
An anonymous client wants to know if we re open for enrollment
|
5,093
|
def request_reset ( self , event ) : self . log ( 'Password reset request received:' , event . __dict__ , lvl = hilight ) user_object = objectmodels [ 'user' ] email = event . data . get ( 'email' , None ) email_user = None if email is not None and user_object . count ( { 'mail' : email } ) > 0 : email_user = user_object . find_one ( { 'mail' : email } ) if email_user is None : self . _fail ( event , msg = "Mail address unknown" ) return
|
An anonymous client requests a password reset
|
5,094
|
def captcha_transmit ( self , captcha , uuid ) : self . log ( 'Transmitting captcha' ) response = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'captcha' , 'data' : b64encode ( captcha [ 'image' ] . getvalue ( ) ) . decode ( 'utf-8' ) } self . fire ( send ( uuid , response ) )
|
Delayed transmission of a requested captcha
|
5,095
|
def _invite ( self , name , method , email , uuid , event , password = "" ) : props = { 'uuid' : std_uuid ( ) , 'status' : 'Open' , 'name' : name , 'method' : method , 'email' : email , 'password' : password , 'timestamp' : std_now ( ) } enrollment = objectmodels [ 'enrollment' ] ( props ) enrollment . save ( ) self . log ( 'Enrollment stored' , lvl = debug ) self . _send_invitation ( enrollment , event ) packet = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'invite' , 'data' : [ True , email ] } self . fireEvent ( send ( uuid , packet ) )
|
Actually invite a given user
|
5,096
|
def _create_user ( self , username , password , mail , method , uuid ) : try : if method == 'Invited' : config_role = self . config . group_accept_invited else : config_role = self . config . group_accept_enrolled roles = [ ] if ',' in config_role : for item in config_role . split ( ',' ) : roles . append ( item . lstrip ( ) . rstrip ( ) ) else : roles = [ config_role ] newuser = objectmodels [ 'user' ] ( { 'name' : username , 'passhash' : std_hash ( password , self . salt ) , 'mail' : mail , 'uuid' : std_uuid ( ) , 'roles' : roles , 'created' : std_now ( ) } ) if method == 'Invited' : newuser . needs_password_change = True newuser . save ( ) except Exception as e : self . log ( "Problem creating new user: " , type ( e ) , e , lvl = error ) return try : newprofile = objectmodels [ 'profile' ] ( { 'uuid' : std_uuid ( ) , 'owner' : newuser . uuid } ) self . log ( "New profile uuid: " , newprofile . uuid , lvl = verbose ) newprofile . save ( ) packet = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'enrol' , 'data' : [ True , mail ] } self . fireEvent ( send ( uuid , packet ) ) except Exception as e : self . log ( "Problem creating new profile: " , type ( e ) , e , lvl = error )
|
Create a new user and all initial data
|
5,097
|
def _send_invitation ( self , enrollment , event ) : self . log ( 'Sending enrollment status mail to user' ) self . _send_mail ( self . config . invitation_subject , self . config . invitation_mail , enrollment , event )
|
Send an invitation mail to an open enrolment
|
5,098
|
def _send_acceptance ( self , enrollment , password , event ) : self . log ( 'Sending acceptance status mail to user' ) if password is not "" : password_hint = '\n\nPS: Your new password is ' + password + ' - please change it after your first login!' acceptance_text = self . config . acceptance_mail + password_hint else : acceptance_text = self . config . acceptance_mail self . _send_mail ( self . config . acceptance_subject , acceptance_text , enrollment , event )
|
Send an acceptance mail to an open enrolment
|
5,099
|
def add_auth_hook ( self , event ) : self . log ( 'Adding authentication hook for' , event . authenticator_name ) self . auth_hooks [ event . authenticator_name ] = event . event
|
Register event hook on reception of add_auth_hook - event
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.