idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
5,000
def generate_feature_matrix ( self , mode = 'tfidf' ) : result = [ ] for doc in self . _documents : result . append ( self . generate_document_vector ( doc , mode ) ) return result
Returns a feature matrix in the form of a list of lists which represents the terms and documents in this Inverted Index using the tf - idf weighting by default . The term counts in each document can alternatively be used by specifying scheme = count .
5,001
def find_class_in_list ( klass , lst ) : filtered = list ( filter ( lambda x : x . __class__ == klass , lst ) ) if filtered : return filtered [ 0 ] return None
Returns the first occurrence of an instance of type klass in the given list or None if no such instance is present .
5,002
def _build_parmlist ( self , parameters ) : args = [ ] for key , value in parameters . items ( ) : if not value is None : try : classinfo = unicode except NameError : classinfo = str if isinstance ( value , classinfo ) : key = '%s[%d]' % ( key . upper ( ) , len ( value . encode ( 'utf-8' ) ) ) else : key = '%s[%d]' % (...
Converts a dictionary of name and value pairs into a PARMLIST string value acceptable to the Payflow Pro API .
5,003
def from_model ( cls , model_name , ** kwargs ) : settings = _get_model_info ( model_name ) model = settings . pop ( 'model_name' ) for k , v in list ( kwargs . items ( ) ) : if k in ( 'resolution' , 'Psurf' ) : settings [ k ] = v return cls ( model , ** settings )
Define a grid using the specifications of a given model .
5,004
def copy_from_model ( cls , model_name , reference , ** kwargs ) : if isinstance ( reference , cls ) : settings = reference . __dict__ . copy ( ) settings . pop ( 'model' ) else : settings = _get_model_info ( reference ) settings . pop ( 'model_name' ) settings . update ( kwargs ) settings [ 'reference' ] = reference r...
Set - up a user - defined grid using specifications of a reference grid model .
5,005
def get_layers ( self , Psurf = 1013.25 , Ptop = 0.01 , ** kwargs ) : Psurf = np . asarray ( Psurf ) output_ndims = Psurf . ndim + 1 if output_ndims > 3 : raise ValueError ( "`Psurf` argument must be a float or an array" " with <= 2 dimensions (or None)" ) SIGe = None SIGc = None ETAe = None ETAc = None if self . hybri...
Compute scalars or coordinates associated to the vertical layers .
5,006
def _get_template_dirs ( ) : return filter ( lambda x : os . path . exists ( x ) , [ os . path . join ( os . path . expanduser ( '~' ) , '.py2pack' , 'templates' ) , os . path . join ( '/' , 'usr' , 'share' , 'py2pack' , 'templates' ) , os . path . join ( os . path . dirname ( os . path . abspath ( __file__ ) ) , 'temp...
existing directories where to search for jinja2 templates . The order is important . The first found template from the first found dir wins!
5,007
def _license_from_classifiers ( data ) : classifiers = data . get ( 'classifiers' , [ ] ) found_license = None for c in classifiers : if c . startswith ( "License :: OSI Approved :: " ) : found_license = c . replace ( "License :: OSI Approved :: " , "" ) return found_license
try to get a license from the classifiers
5,008
def _normalize_license ( data ) : license = data . get ( 'license' , None ) if not license : license = _license_from_classifiers ( data ) if license : if license in SDPX_LICENSES . keys ( ) : data [ 'license' ] = SDPX_LICENSES [ license ] else : data [ 'license' ] = "%s (FIXME:No SPDX)" % ( license ) else : data [ 'lic...
try to get SDPX license
5,009
def wrap_prompts_class ( Klass ) : try : from prompt_toolkit . token import ZeroWidthEscape except ImportError : return Klass class ITerm2IPythonPrompt ( Klass ) : def in_prompt_tokens ( self , cli = None ) : return [ ( ZeroWidthEscape , last_status ( self . shell ) + BEFORE_PROMPT ) , ] + super ( ITerm2IPythonPrompt ,...
Wrap an IPython s Prompt class
5,010
def get_all_keys ( self , start = None ) : s = self . stream if not start : start = HEADER_SIZE + self . block_size * self . root_block s . seek ( start ) block_type = s . read ( 2 ) if block_type == LEAF : reader = LeafReader ( self ) num_keys = struct . unpack ( '>i' , reader . read ( 4 ) ) [ 0 ] for _ in range ( num...
A generator which yields a list of all valid keys starting at the given start offset . If start is None we will start from the root of the tree .
5,011
def readline ( self , fmt = None ) : prefix_size = self . _fix ( ) if fmt is None : content = self . read ( prefix_size ) else : fmt = self . endian + fmt fmt = _replace_star ( fmt , prefix_size ) content = struct . unpack ( fmt , self . read ( prefix_size ) ) try : suffix_size = self . _fix ( ) except EOFError : suffi...
Return next unformatted line . If format is given unpack content otherwise return byte string .
5,012
def skipline ( self ) : position = self . tell ( ) prefix = self . _fix ( ) self . seek ( prefix , 1 ) suffix = self . _fix ( ) if prefix != suffix : raise IOError ( _FIX_ERROR ) return position , prefix
Skip the next line and returns position and size of line . Raises IOError if pre - and suffix of line do not match .
5,013
def writelines ( self , lines , fmt ) : if isinstance ( fmt , basestring ) : fmt = [ fmt ] * len ( lines ) for f , line in zip ( fmt , lines ) : self . writeline ( f , line , self . endian )
Write lines with given format .
5,014
def read_varint ( stream ) : value = 0 while True : byte = ord ( stream . read ( 1 ) ) if not byte & 0b10000000 : return value << 7 | byte value = value << 7 | ( byte & 0b01111111 )
Read while the most significant bit is set then put the 7 least significant bits of all read bytes together to create a number .
5,015
def open_bpchdataset ( filename , fields = [ ] , categories = [ ] , tracerinfo_file = 'tracerinfo.dat' , diaginfo_file = 'diaginfo.dat' , endian = ">" , decode_cf = True , memmap = True , dask = True , return_store = False ) : store = BPCHDataStore ( filename , fields = fields , categories = categories , tracerinfo_fil...
Open a GEOS - Chem BPCH file output as an xarray Dataset .
5,016
def open_mfbpchdataset ( paths , concat_dim = 'time' , compat = 'no_conflicts' , preprocess = None , lock = None , ** kwargs ) : from xarray . backends . api import _MultiFileCloser dask = kwargs . pop ( 'dask' , False ) if not dask : raise ValueError ( "Reading multiple files without dask is not supported" ) kwargs [ ...
Open multiple bpch files as a single dataset .
5,017
def image_bytes ( b , filename = None , inline = 1 , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : if preserve_aspect_ratio is None : if width != 'auto' and height != 'auto' : preserve_aspect_ratio = False else : preserve_aspect_ratio = True data = { 'name' : base64 . b64encode ( ( filename or 'Un...
Return a bytes string that displays image given by bytes b in the terminal
5,018
def display_image_bytes ( b , filename = None , inline = 1 , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : sys . stdout . buffer . write ( image_bytes ( b , filename = filename , inline = inline , width = width , height = height , preserve_aspect_ratio = preserve_aspect_ratio ) ) sys . stdout . wr...
Display the image given by the bytes b in the terminal .
5,019
def display_image_file ( fn , width = 'auto' , height = 'auto' , preserve_aspect_ratio = None ) : with open ( os . path . realpath ( os . path . expanduser ( fn ) ) , 'rb' ) as f : sys . stdout . buffer . write ( image_bytes ( f . read ( ) , filename = fn , width = width , height = height , preserve_aspect_ratio = pres...
Display an image in the terminal .
5,020
def get_entity_uuid_coords ( self , uuid ) : if uuid in self . _entity_to_region_map : coords = self . _entity_to_region_map [ uuid ] entities = self . get_entities ( * coords ) for entity in entities : if 'uniqueId' in entity . data and entity . data [ 'uniqueId' ] == uuid : return tuple ( entity . data [ 'tilePositio...
Returns the coordinates of the given entity UUID inside this world or None if the UUID is not found .
5,021
def create_fuzzy_pattern ( pattern ) : return re . compile ( ".*" . join ( map ( re . escape , pattern ) ) , re . IGNORECASE )
Convert a string into a fuzzy regular expression pattern .
5,022
def fuzzy_search ( self , * filters ) : matches = [ ] logger . verbose ( "Performing fuzzy search on %s (%s) .." , pluralize ( len ( filters ) , "pattern" ) , concatenate ( map ( repr , filters ) ) ) patterns = list ( map ( create_fuzzy_pattern , filters ) ) for entry in self . filtered_entries : if all ( p . search ( ...
Perform a fuzzy search that matches the given characters in the given order .
5,023
def select_entry ( self , * arguments ) : matches = self . smart_search ( * arguments ) if len ( matches ) > 1 : logger . info ( "More than one match, prompting for choice .." ) labels = [ entry . name for entry in matches ] return matches [ labels . index ( prompt_for_choice ( labels ) ) ] else : logger . info ( "Matc...
Select a password from the available choices .
5,024
def simple_search ( self , * keywords ) : matches = [ ] keywords = [ kw . lower ( ) for kw in keywords ] logger . verbose ( "Performing simple search on %s (%s) .." , pluralize ( len ( keywords ) , "keyword" ) , concatenate ( map ( repr , keywords ) ) , ) for entry in self . filtered_entries : normalized = entry . name...
Perform a simple search for case insensitive substring matches .
5,025
def smart_search ( self , * arguments ) : matches = self . simple_search ( * arguments ) if not matches : logger . verbose ( "Falling back from substring search to fuzzy search .." ) matches = self . fuzzy_search ( * arguments ) if not matches : if len ( self . filtered_entries ) > 0 : raise NoMatchingPasswordError ( f...
Perform a smart search on the given keywords or patterns .
5,026
def get_diaginfo ( diaginfo_file ) : widths = [ rec . width for rec in diag_recs ] col_names = [ rec . name for rec in diag_recs ] dtypes = [ rec . type for rec in diag_recs ] usecols = [ name for name in col_names if not name . startswith ( '-' ) ] diag_df = pd . read_fwf ( diaginfo_file , widths = widths , names = co...
Read an output s diaginfo . dat file and parse into a DataFrame for use in selecting and parsing categories .
5,027
def get_tracerinfo ( tracerinfo_file ) : widths = [ rec . width for rec in tracer_recs ] col_names = [ rec . name for rec in tracer_recs ] dtypes = [ rec . type for rec in tracer_recs ] usecols = [ name for name in col_names if not name . startswith ( '-' ) ] tracer_df = pd . read_fwf ( tracerinfo_file , widths = width...
Read an output s tracerinfo . dat file and parse into a DataFrame for use in selecting and parsing categories .
5,028
def read_from_bpch ( filename , file_position , shape , dtype , endian , use_mmap = False ) : offset = file_position + 4 if use_mmap : d = np . memmap ( filename , dtype = dtype , mode = 'r' , shape = shape , offset = offset , order = 'F' ) else : with FortranFile ( filename , 'rb' , endian ) as ff : ff . seek ( file_p...
Read a chunk of data from a bpch output file .
5,029
def _read ( self ) : if self . _dask : d = da . from_delayed ( delayed ( read_from_bpch , ) ( self . filename , self . file_position , self . shape , self . dtype , self . endian , use_mmap = self . _mmap ) , self . shape , self . dtype ) else : d = read_from_bpch ( self . filename , self . file_position , self . shape...
Helper function to load the data referenced by this bundle .
5,030
def close ( self ) : if not self . fp . closed : for v in list ( self . var_data ) : del self . var_data [ v ] self . fp . close ( )
Close this bpch file .
5,031
def _read_metadata ( self ) : filetype = self . fp . readline ( ) . strip ( ) filetitle = self . fp . readline ( ) . strip ( ) try : filetype = str ( filetype , 'utf-8' ) filetitle = str ( filetitle , 'utf-8' ) except : pass self . __setattr__ ( 'filetype' , filetype ) self . __setattr__ ( 'filetitle' , filetitle )
Read the main metadata packaged within a bpch file indicating the output filetype and its title .
5,032
def _read_var_data ( self ) : var_bundles = OrderedDict ( ) var_attrs = OrderedDict ( ) n_vars = 0 while self . fp . tell ( ) < self . fsize : var_attr = OrderedDict ( ) line = self . fp . readline ( '20sffii' ) modelname , res0 , res1 , halfpolar , center180 = line line = self . fp . readline ( '40si40sdd40s7i' ) cate...
Iterate over the block of this bpch file and return handlers in the form of BPCHDataBundle s for access to the data contained therein .
5,033
def get_timestamp ( time = True , date = True , fmt = None ) : time_format = "%H:%M:%S" date_format = "%m-%d-%Y" if fmt is None : if time and date : fmt = time_format + " " + date_format elif time : fmt = time_format elif date : fmt = date_format else : raise ValueError ( "One of `date` or `time` must be True!" ) retur...
Return the current timestamp in machine local time .
5,034
def fix_attr_encoding ( ds ) : def _maybe_del_attr ( da , attr ) : if attr in da . attrs : del da . attrs [ attr ] return da def _maybe_decode_attr ( da , attr ) : if ( attr in da . attrs ) and ( type ( da . attrs [ attr ] == bool ) ) : da . attrs [ attr ] = int ( da . attrs [ attr ] ) return da for v in ds . data_vars...
This is a temporary hot - fix to handle the way metadata is encoded when we read data directly from bpch files . It removes the scale_factor and units attributes we encode with the data we ingest converts the hydrocarbon and chemical attribute to a binary integer instead of a boolean and removes the units attribute fro...
5,035
def after_output ( command_status ) : if command_status not in range ( 256 ) : raise ValueError ( "command_status must be an integer in the range 0-255" ) sys . stdout . write ( AFTER_OUTPUT . format ( command_status = command_status ) ) sys . stdout . flush ( )
Shell sequence to be run after the command output .
5,036
def enforce_cf_variable ( var , mask_and_scale = True ) : var = as_variable ( var ) data = var . _data dims = var . dims attrs = var . attrs . copy ( ) encoding = var . encoding . copy ( ) orig_dtype = data . dtype if 'scale' in attrs : scale = attrs . pop ( 'scale' ) attrs [ 'scale_factor' ] = scale encoding [ 'scale_...
Given a Variable constructed from GEOS - Chem output enforce CF - compliant metadata and formatting .
5,037
def published ( self , check_language = True , language = None , kwargs = None , exclude_kwargs = None ) : if check_language : qs = NewsEntry . objects . language ( language or get_language ( ) ) . filter ( is_published = True ) else : qs = self . get_queryset ( ) qs = qs . filter ( models . Q ( pub_date__lte = now ( )...
Returns all entries which publication date has been hit or which have no date and which language matches the current language .
5,038
def recent ( self , check_language = True , language = None , limit = 3 , exclude = None , kwargs = None , category = None ) : if category : if not kwargs : kwargs = { } kwargs [ 'categories__in' ] = [ category ] qs = self . published ( check_language = check_language , language = language , kwargs = kwargs ) if exclud...
Returns recently published new entries .
5,039
def get_newsentry_meta_description ( newsentry ) : if newsentry . meta_description : return newsentry . meta_description text = newsentry . get_description ( ) if len ( text ) > 160 : return u'{}...' . format ( text [ : 160 ] ) return text
Returns the meta description for the given entry .
5,040
def _requirement_filter_by_marker ( req ) : if hasattr ( req , 'marker' ) and req . marker : marker_env = { 'python_version' : '.' . join ( map ( str , sys . version_info [ : 2 ] ) ) , 'sys_platform' : sys . platform } if not req . marker . evaluate ( environment = marker_env ) : return False return True
Check if the requirement is satisfied by the marker .
5,041
def _requirement_find_lowest_possible ( req ) : version_dep = None version_comp = None for dep in req . specs : version = pkg_resources . parse_version ( dep [ 1 ] ) if dep [ 0 ] == '!=' : continue if ( not version_dep or version < pkg_resources . parse_version ( version_dep ) ) : version_dep = dep [ 1 ] version_comp =...
Find lowest required version .
5,042
def _ensure_coroutine_function ( func ) : if asyncio . iscoroutinefunction ( func ) : return func else : @ asyncio . coroutine def coroutine_function ( evt ) : func ( evt ) yield return coroutine_function
Return a coroutine function .
5,043
def location ( self ) : if self . _location is None : self . _location = "{}/{}-{}" . format ( self . stream , self . type , self . sequence , ) return self . _location
Return a string uniquely identifying the event .
5,044
async def find_backwards ( self , stream_name , predicate , predicate_label = 'predicate' ) : logger = self . _logger . getChild ( predicate_label ) logger . info ( 'Fetching first matching event' ) uri = self . _head_uri try : page = await self . _fetcher . fetch ( uri ) except HttpNotFoundError as e : raise StreamNot...
Return first event matching predicate or None if none exists .
5,045
def main ( ) : coloredlogs . install ( ) action = show_matching_entry program_opts = dict ( exclude_list = [ ] ) show_opts = dict ( filters = [ ] , use_clipboard = is_clipboard_supported ( ) ) verbosity = 0 try : options , arguments = getopt . gnu_getopt ( sys . argv [ 1 : ] , "elnp:f:x:vqh" , [ "edit" , "list" , "no-c...
Command line interface for the qpass program .
5,046
def edit_matching_entry ( program , arguments ) : entry = program . select_entry ( * arguments ) entry . context . execute ( "pass" , "edit" , entry . name )
Edit the matching entry .
5,047
def SVGdocument ( ) : "Create default SVG document" import xml . dom . minidom implementation = xml . dom . minidom . getDOMImplementation ( ) doctype = implementation . createDocumentType ( "svg" , "-//W3C//DTD SVG 1.1//EN" , "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" ) document = implementation . createDocume...
Create default SVG document
5,048
def polyline ( document , coords ) : "polyline with more then 2 vertices" points = [ ] for i in range ( 0 , len ( coords ) , 2 ) : points . append ( "%s,%s" % ( coords [ i ] , coords [ i + 1 ] ) ) return setattribs ( document . createElement ( 'polyline' ) , points = ' ' . join ( points ) , )
polyline with more then 2 vertices
5,049
def cubic_bezier ( document , coords ) : "cubic bezier polyline" element = document . createElement ( 'path' ) points = [ ( coords [ i ] , coords [ i + 1 ] ) for i in range ( 0 , len ( coords ) , 2 ) ] path = [ "M%s %s" % points [ 0 ] ] for n in xrange ( 1 , len ( points ) , 3 ) : A , B , C = points [ n : n + 3 ] path ...
cubic bezier polyline
5,050
def smoothpolygon ( document , coords ) : "smoothed filled polygon" element = document . createElement ( 'path' ) path = [ ] points = [ ( coords [ i ] , coords [ i + 1 ] ) for i in range ( 0 , len ( coords ) , 2 ) ] def pt ( points ) : p = points n = len ( points ) for i in range ( 0 , len ( points ) ) : a = p [ ( i - ...
smoothed filled polygon
5,051
def font_actual ( tkapp , font ) : "actual font parameters" tmp = tkapp . call ( 'font' , 'actual' , font ) return dict ( ( tmp [ i ] [ 1 : ] , tmp [ i + 1 ] ) for i in range ( 0 , len ( tmp ) , 2 ) )
actual font parameters
5,052
def parse_dash ( string , width ) : "parse dash pattern specified with string" w = max ( 1 , int ( width + 0.5 ) ) n = len ( string ) result = [ ] for i , c in enumerate ( string ) : if c == " " and len ( result ) : result [ - 1 ] += w + 1 elif c == "_" : result . append ( 8 * w ) result . append ( 4 * w ) elif c == "-...
parse dash pattern specified with string
5,053
def prof_altitude ( pressure , p_coef = ( - 0.028389 , - 0.0493698 , 0.485718 , 0.278656 , - 17.5703 , 48.0926 ) ) : pressure = np . asarray ( pressure ) altitude = np . polyval ( p_coef , np . log10 ( pressure . flatten ( ) ) ) return altitude . reshape ( pressure . shape )
Return altitude for given pressure .
5,054
def prof_pressure ( altitude , z_coef = ( 1.94170e-9 , - 5.14580e-7 , 4.57018e-5 , - 1.55620e-3 , - 4.61994e-2 , 2.99955 ) ) : altitude = np . asarray ( altitude ) pressure = np . power ( 10 , np . polyval ( z_coef , altitude . flatten ( ) ) ) return pressure . reshape ( altitude . shape )
Return pressure for given altitude .
5,055
def _get_model_info ( model_name ) : split_name = re . split ( r'[\-_\s]' , model_name . strip ( ) . upper ( ) ) sep_chars = ( '' , ' ' , '-' , '_' ) gen_seps = itertools . combinations_with_replacement ( sep_chars , len ( split_name ) - 1 ) test_names = ( "" . join ( ( n for n in itertools . chain ( * list ( zip ( spl...
Get the grid specifications for a given model .
5,056
def _get_archive_filelist ( filename ) : names = [ ] if tarfile . is_tarfile ( filename ) : with tarfile . open ( filename ) as tar_file : names = sorted ( tar_file . getnames ( ) ) elif zipfile . is_zipfile ( filename ) : with zipfile . ZipFile ( filename ) as zip_file : names = sorted ( zip_file . namelist ( ) ) else...
Extract the list of files from a tar or zip archive .
5,057
def _augment_book ( self , uuid , event ) : try : if not isbnmeta : self . log ( "No isbntools found! Install it to get full " "functionality!" , lvl = warn ) return new_book = objectmodels [ 'book' ] . find_one ( { 'uuid' : uuid } ) try : if len ( new_book . isbn ) != 0 : self . log ( 'Got a lookup candidate: ' , new_...
Checks if the newly created object is a book and only has an ISBN . If so tries to fetch the book data off the internet .
5,058
def opened ( self , * args ) : self . _serial_open = True self . log ( "Opened: " , args , lvl = debug ) self . _send_command ( b'l,1' ) self . log ( "Turning off engine, pump and neutralizing rudder" ) self . _send_command ( b'v' ) self . _handle_servo ( self . _machine_channel , 0 ) self . _handle_servo ( self . _rud...
Initiates communication with the remote controlled device .
5,059
def on_machinerequest ( self , event ) : self . log ( "Updating new machine power: " , event . controlvalue ) self . _handle_servo ( self . _machine_channel , event . controlvalue )
Sets a new machine speed .
5,060
def on_rudderrequest ( self , event ) : self . log ( "Updating new rudder angle: " , event . controlvalue ) self . _handle_servo ( self . _rudder_channel , event . controlvalue )
Sets a new rudder angle .
5,061
def on_pumprequest ( self , event ) : self . log ( "Updating pump status: " , event . controlvalue ) self . _set_digital_pin ( self . _pump_channel , event . controlvalue )
Activates or deactivates a connected pump .
5,062
def provisionList ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : log ( 'Provisioning' , items , database_name , lvl = debug ) system_user = None def get_system_user ( ) : user = objectmodels [ 'user' ] . find_one ( { 'name' : 'System' } ) try : log ( 'System user uuid: ' , use...
Provisions a list of items according to their schema
5,063
def DefaultExtension ( schema_obj , form_obj , schemata = None ) : if schemata is None : schemata = [ 'systemconfig' , 'profile' , 'client' ] DefaultExtends = { 'schema' : { "properties/modules" : [ schema_obj ] } , 'form' : { 'modules' : { 'items/' : form_obj } } } output = { } for schema in schemata : output [ schema...
Create a default field
5,064
def copytree ( root_src_dir , root_dst_dir , hardlink = True ) : for src_dir , dirs , files in os . walk ( root_src_dir ) : dst_dir = src_dir . replace ( root_src_dir , root_dst_dir , 1 ) if not os . path . exists ( dst_dir ) : os . makedirs ( dst_dir ) for file_ in files : src_file = os . path . join ( src_dir , file_...
Copies a whole directory tree
5,065
def delete ( ctx , componentname ) : col = ctx . obj [ 'col' ] if col . count ( { 'name' : componentname } ) > 1 : log ( 'More than one component configuration of this name! Try ' 'one of the uuids as argument. Get a list with "config ' 'list"' ) return log ( 'Deleting component configuration' , componentname , emitter...
Delete an existing component configuration . This will trigger the creation of its default configuration upon next restart .
5,066
def show ( ctx , component ) : col = ctx . obj [ 'col' ] if col . count ( { 'name' : component } ) > 1 : log ( 'More than one component configuration of this name! Try ' 'one of the uuids as argument. Get a list with "config ' 'list"' ) return if component is None : configurations = col . find ( ) for configuration in ...
Show the stored active configuration of a component .
5,067
def debugrequest ( self , event ) : try : self . log ( "Event: " , event . __dict__ , lvl = critical ) if event . data == "storejson" : self . log ( "Storing received object to /tmp" , lvl = critical ) fp = open ( '/tmp/hfosdebugger_' + str ( event . user . useruuid ) + "_" + str ( uuid4 ( ) ) , "w" ) json . dump ( eve...
Handler for client - side debug requests
5,068
def register_event ( self , event ) : self . log ( 'Registering event hook:' , event . cmd , event . thing , pretty = True , lvl = verbose ) self . hooks [ event . cmd ] = event . thing
Registers a new command line interface event hook as command
5,069
def populate_user_events ( ) : global AuthorizedEvents global AnonymousEvents def inheritors ( klass ) : subclasses = { } subclasses_set = set ( ) work = [ klass ] while work : parent = work . pop ( ) for child in parent . __subclasses__ ( ) : if child not in subclasses_set : name = child . __module__ + "." + child . _...
Generate a list of all registered authorized and anonymous events
5,070
def clear ( ctx , schema ) : response = _ask ( 'Are you sure you want to delete the collection "%s"' % ( schema ) , default = 'N' , data_type = 'bool' ) if response is True : host , port = ctx . obj [ 'dbhost' ] . split ( ':' ) client = pymongo . MongoClient ( host = host , port = int ( port ) ) database = client [ ctx...
Clears an entire database collection irrevocably . Use with caution!
5,071
def provision_system_config ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : from hfos . provisions . base import provisionList from hfos . database import objectmodels default_system_config_count = objectmodels [ 'systemconfig' ] . count ( { 'name' : 'Default System Configurati...
Provision a basic system configuration
5,072
def userlogin ( self , event ) : try : user_uuid = event . useruuid user = objectmodels [ 'user' ] . find_one ( { 'uuid' : user_uuid } ) if user_uuid not in self . lastlogs : self . log ( 'Setting up lastlog for a new user.' , lvl = debug ) lastlog = objectmodels [ 'chatlastlog' ] ( { 'owner' : user_uuid , 'uuid' : std...
Provides the newly authenticated user with a backlog and general channel status information
5,073
def install_docs ( instance , clear_target ) : _check_root ( ) def make_docs ( ) : log ( "Generating HTML documentation" ) try : build = Popen ( [ 'make' , 'html' ] , cwd = 'docs/' ) build . wait ( ) except Exception as e : log ( "Problem during documentation building: " , e , type ( e ) , exc = True , lvl = error ) re...
Builds and installs the complete HFOS documentation .
5,074
def install_modules ( wip ) : def install_module ( hfos_module ) : try : setup = Popen ( [ sys . executable , 'setup.py' , 'develop' ] , cwd = 'modules/' + hfos_module + "/" ) setup . wait ( ) except Exception as e : log ( "Problem during module installation: " , hfos_module , e , type ( e ) , exc = True , lvl = error ...
Install the plugin modules
5,075
def install_cert ( selfsigned ) : _check_root ( ) if selfsigned : log ( 'Generating self signed (insecure) certificate/key ' 'combination' ) try : os . mkdir ( '/etc/ssl/certs/hfos' ) except FileExistsError : pass except PermissionError : log ( "Need root (e.g. via sudo) to generate ssl certificate" ) sys . exit ( 1 ) ...
Install a local SSL certificate
5,076
def frontend ( ctx , dev , rebuild , no_install , build_type ) : install_frontend ( instance = ctx . obj [ 'instance' ] , forcerebuild = rebuild , development = dev , install = not no_install , build_type = build_type )
Build and install frontend
5,077
def install_all ( ctx , clear_all ) : _check_root ( ) instance = ctx . obj [ 'instance' ] dbhost = ctx . obj [ 'dbhost' ] dbname = ctx . obj [ 'dbname' ] port = ctx . obj [ 'port' ] install_system_user ( ) install_cert ( selfsigned = True ) install_var ( instance , clear_target = clear_all , clear_all = clear_all ) ins...
Default - Install everything installable
5,078
def uninstall ( ) : _check_root ( ) response = _ask ( "This will delete all data of your HFOS installations! Type" "YES to continue:" , default = "N" , show_hint = False ) if response == 'YES' : shutil . rmtree ( '/var/lib/hfos' ) shutil . rmtree ( '/var/cache/hfos' )
Uninstall data and resource locations
5,079
def update ( ctx , no_restart , no_rebuild ) : instance = ctx . obj [ 'instance' ] log ( 'Pulling github updates' ) run_process ( '.' , [ 'git' , 'pull' , 'origin' , 'master' ] ) run_process ( './frontend' , [ 'git' , 'pull' , 'origin' , 'master' ] ) if not no_rebuild : log ( 'Rebuilding frontend' ) install_frontend ( ...
Update a HFOS node
5,080
def _build_model_factories ( store ) : result = { } for schemaname in store : schema = None try : schema = store [ schemaname ] [ 'schema' ] except KeyError : schemata_log ( "No schema found for " , schemaname , lvl = critical , exc = True ) try : result [ schemaname ] = warmongo . model_factory ( schema ) except Excep...
Generate factories to construct objects from schemata
5,081
def _build_collections ( store ) : result = { } client = pymongo . MongoClient ( host = dbhost , port = dbport ) db = client [ dbname ] for schemaname in store : schema = None indices = None try : schema = store [ schemaname ] [ 'schema' ] indices = store [ schemaname ] . get ( 'indices' , None ) except KeyError : db_l...
Generate database collections with indices from the schemastore
5,082
def initialize ( address = '127.0.0.1:27017' , database_name = 'hfos' , instance_name = "default" , reload = False ) : global schemastore global l10n_schemastore global objectmodels global collections global dbhost global dbport global dbname global instance global initialized if initialized and not reload : hfoslog ( ...
Initializes the database connectivity schemata and finally object models
5,083
def profile ( schemaname = 'sensordata' , profiletype = 'pjs' ) : db_log ( "Profiling " , schemaname ) schema = schemastore [ schemaname ] [ 'schema' ] db_log ( "Schema: " , schema , lvl = debug ) testclass = None if profiletype == 'warmongo' : db_log ( "Running Warmongo benchmark" ) testclass = warmongo . model_factor...
Profiles object model handling with a very simple benchmarking test
5,084
def _check_collections ( self ) : self . collection_sizes = { } self . collection_total = 0 for col in self . db . collection_names ( include_system_collections = False ) : self . collection_sizes [ col ] = self . db . command ( 'collstats' , col ) . get ( 'storageSize' , 0 ) self . collection_total += self . collectio...
Checks node local collection storage sizes
5,085
def _check_free_space ( self ) : def get_folder_size ( path ) : total_size = 0 for item in walk ( path ) : for file in item [ 2 ] : try : total_size = total_size + getsize ( join ( item [ 0 ] , file ) ) except ( OSError , PermissionError ) as e : self . log ( "error with file: " + join ( item [ 0 ] , file ) , e ) retu...
Checks used filesystem storage sizes
5,086
def send_mail_worker ( config , mail , event ) : log = "" try : if config . mail_ssl : server = SMTP_SSL ( config . mail_server , port = config . mail_server_port , timeout = 30 ) else : server = SMTP ( config . mail_server , port = config . mail_server_port , timeout = 30 ) if config . mail_tls : log += 'Starting TLS\...
Worker task to send out an email which blocks the process unless it is threaded
5,087
def reload_configuration ( self , event ) : super ( EnrolManager , self ) . reload_configuration ( event ) self . log ( 'Reloaded configuration.' ) self . _setup ( )
Reload the current configuration and set up everything depending on it
5,088
def change ( self , event ) : uuid = event . data [ 'uuid' ] status = event . data [ 'status' ] if status not in [ 'Open' , 'Pending' , 'Accepted' , 'Denied' , 'Resend' ] : self . log ( 'Erroneous status for enrollment requested!' , lvl = warn ) return self . log ( 'Changing status of an enrollment' , uuid , 'to' , sta...
An admin user requests a change to an enrolment
5,089
def changepassword ( self , event ) : old = event . data [ 'old' ] new = event . data [ 'new' ] uuid = event . user . uuid user = objectmodels [ 'user' ] . find_one ( { 'uuid' : uuid } ) if std_hash ( old , self . salt ) == user . passhash : user . passhash = std_hash ( new , self . salt ) user . save ( ) packet = { 'c...
An enrolled user wants to change their password
5,090
def invite ( self , event ) : self . log ( 'Inviting new user to enrol' ) name = event . data [ 'name' ] email = event . data [ 'email' ] method = event . data [ 'method' ] self . _invite ( name , method , email , event . client . uuid , event )
A new user has been invited to enrol by an admin user
5,091
def enrol ( self , event ) : if self . config . allow_registration is False : self . log ( 'Someone tried to register although enrolment is closed.' ) return self . log ( 'Client trying to register a new account:' , event , pretty = True ) uuid = event . client . uuid if uuid in self . captchas and event . data . get (...
A user tries to self - enrol with the enrolment form
5,092
def status ( self , event ) : self . log ( 'Registration status requested' ) response = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'status' , 'data' : self . config . allow_registration } self . fire ( send ( event . client . uuid , response ) )
An anonymous client wants to know if we re open for enrollment
5,093
def request_reset ( self , event ) : self . log ( 'Password reset request received:' , event . __dict__ , lvl = hilight ) user_object = objectmodels [ 'user' ] email = event . data . get ( 'email' , None ) email_user = None if email is not None and user_object . count ( { 'mail' : email } ) > 0 : email_user = user_obje...
An anonymous client requests a password reset
5,094
def captcha_transmit ( self , captcha , uuid ) : self . log ( 'Transmitting captcha' ) response = { 'component' : 'hfos.enrol.enrolmanager' , 'action' : 'captcha' , 'data' : b64encode ( captcha [ 'image' ] . getvalue ( ) ) . decode ( 'utf-8' ) } self . fire ( send ( uuid , response ) )
Delayed transmission of a requested captcha
5,095
def _invite ( self , name , method , email , uuid , event , password = "" ) : props = { 'uuid' : std_uuid ( ) , 'status' : 'Open' , 'name' : name , 'method' : method , 'email' : email , 'password' : password , 'timestamp' : std_now ( ) } enrollment = objectmodels [ 'enrollment' ] ( props ) enrollment . save ( ) self . ...
Actually invite a given user
5,096
def _create_user ( self , username , password , mail , method , uuid ) : try : if method == 'Invited' : config_role = self . config . group_accept_invited else : config_role = self . config . group_accept_enrolled roles = [ ] if ',' in config_role : for item in config_role . split ( ',' ) : roles . append ( item . lstr...
Create a new user and all initial data
5,097
def _send_invitation ( self , enrollment , event ) : self . log ( 'Sending enrollment status mail to user' ) self . _send_mail ( self . config . invitation_subject , self . config . invitation_mail , enrollment , event )
Send an invitation mail to an open enrolment
5,098
def _send_acceptance ( self , enrollment , password , event ) : self . log ( 'Sending acceptance status mail to user' ) if password is not "" : password_hint = '\n\nPS: Your new password is ' + password + ' - please change it after your first login!' acceptance_text = self . config . acceptance_mail + password_hint els...
Send an acceptance mail to an open enrolment
5,099
def add_auth_hook ( self , event ) : self . log ( 'Adding authentication hook for' , event . authenticator_name ) self . auth_hooks [ event . authenticator_name ] = event . event
Register event hook on reception of add_auth_hook - event