idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
5,200
def modify ( ctx , schema , uuid , object_filter , field , value ) : database = ctx . obj [ 'db' ] model = database . objectmodels [ schema ] obj = None if uuid : obj = model . find_one ( { 'uuid' : uuid } ) elif object_filter : obj = model . find_one ( literal_eval ( object_filter ) ) else : log ( 'No object uuid or filter specified.' , lvl = error ) if obj is None : log ( 'No object found' , lvl = error ) return log ( 'Object found, modifying' , lvl = debug ) try : new_value = literal_eval ( value ) except ValueError : log ( 'Interpreting value as string' ) new_value = str ( value ) obj . _fields [ field ] = new_value obj . validate ( ) log ( 'Changed object validated' , lvl = debug ) obj . save ( ) log ( 'Done' )
Modify field values of objects
5,201
def view ( ctx , schema , uuid , object_filter ) : database = ctx . obj [ 'db' ] if schema is None : log ( 'No schema given. Read the help' , lvl = warn ) return model = database . objectmodels [ schema ] if uuid : obj = model . find ( { 'uuid' : uuid } ) elif object_filter : obj = model . find ( literal_eval ( object_filter ) ) else : obj = model . find ( ) for item in obj : pprint ( item . _fields )
Show stored objects
5,202
def validate ( ctx , schema , all_schemata ) : database = ctx . obj [ 'db' ] if schema is None : if all_schemata is False : log ( 'No schema given. Read the help' , lvl = warn ) return else : schemata = database . objectmodels . keys ( ) else : schemata = [ schema ] for schema in schemata : try : things = database . objectmodels [ schema ] with click . progressbar ( things . find ( ) , length = things . count ( ) , label = 'Validating %15s' % schema ) as object_bar : for obj in object_bar : obj . validate ( ) except Exception as e : log ( 'Exception while validating:' , schema , e , type ( e ) , '\n\nFix this object and rerun validation!' , emitter = 'MANAGE' , lvl = error ) log ( 'Done' )
Validates all objects or all objects of a given schema .
5,203
def find_field ( ctx , search , by_type , obj ) : if search is not None : search = search else : search = _ask ( "Enter search term" ) database = ctx . obj [ 'db' ] def find ( search_schema , search_field , find_result = None , key = "" ) : if find_result is None : find_result = [ ] fields = search_schema [ 'properties' ] if not by_type : if search_field in fields : find_result . append ( key ) else : for field in fields : try : if "type" in fields [ field ] : if fields [ field ] [ "type" ] == search_field : find_result . append ( ( key , field ) ) except KeyError as e : log ( "Field access error:" , e , type ( e ) , exc = True , lvl = debug ) if 'properties' in fields : find_result . append ( find ( fields [ 'properties' ] , search_field , find_result , key = fields [ 'name' ] ) ) for field in fields : if 'items' in fields [ field ] : if 'properties' in fields [ field ] [ 'items' ] : find_result . append ( find ( fields [ field ] [ 'items' ] , search_field , find_result , key = field ) ) else : pass return find_result if obj is not None : schema = database . objectmodels [ obj ] . _schema result = find ( schema , search , [ ] , key = "top" ) if result : print ( obj ) pprint ( result ) else : for model , thing in database . objectmodels . items ( ) : schema = thing . _schema result = find ( schema , search , [ ] , key = "top" ) if result : print ( model ) print ( result )
Find fields in registered data models .
5,204
def Distance ( lat1 , lon1 , lat2 , lon2 ) : az12 , az21 , dist = wgs84_geod . inv ( lon1 , lat1 , lon2 , lat2 ) return az21 , dist
Get distance between pairs of lat - lon points
5,205
def client_details ( self , * args ) : self . log ( _ ( 'Client details:' , lang = 'de' ) ) client = self . _clients [ args [ 0 ] ] self . log ( 'UUID:' , client . uuid , 'IP:' , client . ip , 'Name:' , client . name , 'User:' , self . _users [ client . useruuid ] , pretty = True )
Display known details about a given client
5,206
def client_list ( self , * args ) : if len ( self . _clients ) == 0 : self . log ( 'No clients connected' ) else : self . log ( self . _clients , pretty = True )
Display a list of connected clients
5,207
def users_list ( self , * args ) : if len ( self . _users ) == 0 : self . log ( 'No users connected' ) else : self . log ( self . _users , pretty = True )
Display a list of connected users
5,208
def who ( self , * args ) : if len ( self . _users ) == 0 : self . log ( 'No users connected' ) if len ( self . _clients ) == 0 : self . log ( 'No clients connected' ) return Row = namedtuple ( "Row" , [ 'User' , 'Client' , 'IP' ] ) rows = [ ] for user in self . _users . values ( ) : for key , client in self . _clients . items ( ) : if client . useruuid == user . uuid : row = Row ( user . account . name , key , client . ip ) rows . append ( row ) for key , client in self . _clients . items ( ) : if client . useruuid is None : row = Row ( 'ANON' , key , client . ip ) rows . append ( row ) self . log ( "\n" + std_table ( rows ) )
Display a table of connected users and clients
5,209
def disconnect ( self , sock ) : self . log ( "Disconnect " , sock , lvl = debug ) try : if sock in self . _sockets : self . log ( "Getting socket" , lvl = debug ) sockobj = self . _sockets [ sock ] self . log ( "Getting clientuuid" , lvl = debug ) clientuuid = sockobj . clientuuid self . log ( "getting useruuid" , lvl = debug ) useruuid = self . _clients [ clientuuid ] . useruuid self . log ( "Firing disconnect event" , lvl = debug ) self . fireEvent ( clientdisconnect ( clientuuid , self . _clients [ clientuuid ] . useruuid ) ) self . log ( "Logging out relevant client" , lvl = debug ) if useruuid is not None : self . log ( "Client was logged in" , lvl = debug ) try : self . _logoutclient ( useruuid , clientuuid ) self . log ( "Client logged out" , useruuid , clientuuid ) except Exception as e : self . log ( "Couldn't clean up logged in user! " , self . _users [ useruuid ] , e , type ( e ) , lvl = critical ) self . log ( "Deleting Client (" , self . _clients . keys , ")" , lvl = debug ) del self . _clients [ clientuuid ] self . log ( "Deleting Socket" , lvl = debug ) del self . _sockets [ sock ] except Exception as e : self . log ( "Error during disconnect handling: " , e , type ( e ) , lvl = critical )
Handles socket disconnections
5,210
def _logoutclient ( self , useruuid , clientuuid ) : self . log ( "Cleaning up client of logged in user." , lvl = debug ) try : self . _users [ useruuid ] . clients . remove ( clientuuid ) if len ( self . _users [ useruuid ] . clients ) == 0 : self . log ( "Last client of user disconnected." , lvl = verbose ) self . fireEvent ( userlogout ( useruuid , clientuuid ) ) del self . _users [ useruuid ] self . _clients [ clientuuid ] . useruuid = None except Exception as e : self . log ( "Error during client logout: " , e , type ( e ) , clientuuid , useruuid , lvl = error , exc = True )
Log out a client and possibly associated user
5,211
def connect ( self , * args ) : self . log ( "Connect " , args , lvl = verbose ) try : sock = args [ 0 ] ip = args [ 1 ] if sock not in self . _sockets : self . log ( "New client connected:" , ip , lvl = debug ) clientuuid = str ( uuid4 ( ) ) self . _sockets [ sock ] = Socket ( ip , clientuuid ) self . _clients [ clientuuid ] = Client ( sock = sock , ip = ip , clientuuid = clientuuid , ) self . log ( "Client connected:" , clientuuid , lvl = debug ) else : self . log ( "Old IP reconnected!" , lvl = warn ) except Exception as e : self . log ( "Error during connect: " , e , type ( e ) , lvl = critical )
Registers new sockets and their clients and allocates uuids
5,212
def send ( self , event ) : try : jsonpacket = json . dumps ( event . packet , cls = ComplexEncoder ) if event . sendtype == "user" : if event . uuid is None : userobject = objectmodels [ 'user' ] . find_one ( { 'name' : event . username } ) else : userobject = objectmodels [ 'user' ] . find_one ( { 'uuid' : event . uuid } ) if userobject is None : self . log ( "No user by that name known." , lvl = warn ) return else : uuid = userobject . uuid self . log ( "Broadcasting to all of users clients: '%s': '%s" % ( uuid , str ( event . packet ) [ : 20 ] ) , lvl = network ) if uuid not in self . _users : self . log ( "User not connected!" , event , lvl = critical ) return clients = self . _users [ uuid ] . clients for clientuuid in clients : sock = self . _clients [ clientuuid ] . sock if not event . raw : self . log ( "Sending json to client" , jsonpacket [ : 50 ] , lvl = network ) self . fireEvent ( write ( sock , jsonpacket ) , "wsserver" ) else : self . log ( "Sending raw data to client" ) self . fireEvent ( write ( sock , event . packet ) , "wsserver" ) else : self . log ( "Sending to user's client: '%s': '%s'" % ( event . uuid , jsonpacket [ : 20 ] ) , lvl = network ) if event . uuid not in self . _clients : if not event . fail_quiet : self . log ( "Unknown client!" , event . uuid , lvl = critical ) self . log ( "Clients:" , self . _clients , lvl = debug ) return sock = self . _clients [ event . uuid ] . sock if not event . raw : self . fireEvent ( write ( sock , jsonpacket ) , "wsserver" ) else : self . log ( "Sending raw data to client" , lvl = network ) self . fireEvent ( write ( sock , event . packet [ : 20 ] ) , "wsserver" ) except Exception as e : self . log ( "Exception during sending: %s (%s)" % ( e , type ( e ) ) , lvl = critical , exc = True )
Sends a packet to an already known user or one of his clients by UUID
5,213
def broadcast ( self , event ) : try : if event . broadcasttype == "users" : if len ( self . _users ) > 0 : self . log ( "Broadcasting to all users:" , event . content , lvl = network ) for useruuid in self . _users . keys ( ) : self . fireEvent ( send ( useruuid , event . content , sendtype = "user" ) ) elif event . broadcasttype == "clients" : if len ( self . _clients ) > 0 : self . log ( "Broadcasting to all clients: " , event . content , lvl = network ) for client in self . _clients . values ( ) : self . fireEvent ( write ( client . sock , event . content ) , "wsserver" ) elif event . broadcasttype == "socks" : if len ( self . _sockets ) > 0 : self . log ( "Emergency?! Broadcasting to all sockets: " , event . content ) for sock in self . _sockets : self . fireEvent ( write ( sock , event . content ) , "wsserver" ) except Exception as e : self . log ( "Error during broadcast: " , e , type ( e ) , lvl = critical )
Broadcasts an event either to all users or clients depending on event flag
5,214
def _checkPermissions ( self , user , event ) : for role in user . account . roles : if role in event . roles : self . log ( 'Access granted' , lvl = verbose ) return True self . log ( 'Access denied' , lvl = verbose ) return False
Checks if the user has in any role that allows to fire the event .
5,215
def _handleAuthorizedEvents ( self , component , action , data , user , client ) : try : if component == "debugger" : self . log ( component , action , data , user , client , lvl = info ) if not user and component in self . authorized_events . keys ( ) : self . log ( "Unknown client tried to do an authenticated " "operation: %s" , component , action , data , user ) return event = self . authorized_events [ component ] [ action ] [ 'event' ] ( user , action , data , client ) self . log ( 'Authorized event roles:' , event . roles , lvl = verbose ) if not self . _checkPermissions ( user , event ) : result = { 'component' : 'hfos.ui.clientmanager' , 'action' : 'Permission' , 'data' : _ ( 'You have no role that allows this action.' , lang = 'de' ) } self . fireEvent ( send ( event . client . uuid , result ) ) return self . log ( "Firing authorized event: " , component , action , str ( data ) [ : 100 ] , lvl = debug ) self . fireEvent ( event ) except Exception as e : self . log ( "Critical error during authorized event handling:" , component , action , e , type ( e ) , lvl = critical , exc = True )
Isolated communication link for authorized events .
5,216
def _handleAuthenticationEvents ( self , requestdata , requestaction , clientuuid , sock ) : if requestaction in ( "login" , "autologin" ) : try : self . log ( "Login request" , lvl = verbose ) if requestaction == "autologin" : username = password = None requestedclientuuid = requestdata auto = True self . log ( "Autologin for" , requestedclientuuid , lvl = debug ) else : username = requestdata [ 'username' ] password = requestdata [ 'password' ] if 'clientuuid' in requestdata : requestedclientuuid = requestdata [ 'clientuuid' ] else : requestedclientuuid = None auto = False self . log ( "Auth request by" , username , lvl = verbose ) self . fireEvent ( authenticationrequest ( username , password , clientuuid , requestedclientuuid , sock , auto , ) , "auth" ) return except Exception as e : self . log ( "Login failed: " , e , type ( e ) , lvl = warn , exc = True ) elif requestaction == "logout" : self . log ( "User logged out, refreshing client." , lvl = network ) try : if clientuuid in self . _clients : client = self . _clients [ clientuuid ] user_id = client . useruuid if client . useruuid : self . log ( "Logout client uuid: " , clientuuid ) self . _logoutclient ( client . useruuid , clientuuid ) self . fireEvent ( clientdisconnect ( clientuuid ) ) else : self . log ( "Client is not connected!" , lvl = warn ) except Exception as e : self . log ( "Error during client logout: " , e , type ( e ) , lvl = error , exc = True ) else : self . log ( "Unsupported auth action requested:" , requestaction , lvl = warn )
Handler for authentication events
5,217
def _reset_flood_offenders ( self , * args ) : offenders = [ ] for offender , offence_time in self . _flooding . items ( ) : if time ( ) - offence_time < 10 : self . log ( 'Removed offender from flood list:' , offender ) offenders . append ( offender ) for offender in offenders : del self . _flooding [ offender ]
Resets the list of flood offenders on event trigger
5,218
def _check_flood_protection ( self , component , action , clientuuid ) : if clientuuid not in self . _flood_counter : self . _flood_counter [ clientuuid ] = 0 self . _flood_counter [ clientuuid ] += 1 if self . _flood_counter [ clientuuid ] > 100 : packet = { 'component' : 'hfos.ui.clientmanager' , 'action' : 'Flooding' , 'data' : True } self . fireEvent ( send ( clientuuid , packet ) ) self . log ( 'Flooding from' , clientuuid ) return True
Checks if any clients have been flooding the node
5,219
def authentication ( self , event ) : try : self . log ( "Authorization has been granted by DB check:" , event . username , lvl = debug ) account , profile , clientconfig = event . userdata useruuid = event . useruuid originatingclientuuid = event . clientuuid clientuuid = clientconfig . uuid if clientuuid != originatingclientuuid : self . log ( "Mutating client uuid to request id:" , clientuuid , lvl = network ) if useruuid in self . _users : signedinuser = self . _users [ useruuid ] else : signedinuser = User ( account , profile , useruuid ) self . _users [ account . uuid ] = signedinuser if clientuuid in signedinuser . clients : self . log ( "Client configuration already logged in." , lvl = critical ) else : signedinuser . clients . append ( clientuuid ) self . log ( "Active client (" , clientuuid , ") registered to " "user" , useruuid , lvl = debug ) socket = self . _sockets [ event . sock ] socket . clientuuid = clientuuid self . _sockets [ event . sock ] = socket try : language = clientconfig . language except AttributeError : language = "en" newclient = Client ( sock = event . sock , ip = socket . ip , clientuuid = clientuuid , useruuid = useruuid , name = clientconfig . name , config = clientconfig , language = language ) del ( self . _clients [ originatingclientuuid ] ) self . _clients [ clientuuid ] = newclient authpacket = { "component" : "auth" , "action" : "login" , "data" : account . serializablefields ( ) } self . log ( "Transmitting Authorization to client" , authpacket , lvl = network ) self . fireEvent ( write ( event . sock , json . dumps ( authpacket ) ) , "wsserver" ) profilepacket = { "component" : "profile" , "action" : "get" , "data" : profile . serializablefields ( ) } self . log ( "Transmitting Profile to client" , profilepacket , lvl = network ) self . fireEvent ( write ( event . sock , json . dumps ( profilepacket ) ) , "wsserver" ) clientconfigpacket = { "component" : "clientconfig" , "action" : "get" , "data" : clientconfig . serializablefields ( ) } self . log ( "Transmitting client configuration to client" , clientconfigpacket , lvl = network ) self . fireEvent ( write ( event . sock , json . dumps ( clientconfigpacket ) ) , "wsserver" ) self . fireEvent ( userlogin ( clientuuid , useruuid , clientconfig , signedinuser ) ) self . log ( "User configured: Name" , signedinuser . account . name , "Profile" , signedinuser . profile . uuid , "Clients" , signedinuser . clients , lvl = debug ) except Exception as e : self . log ( "Error (%s, %s) during auth grant: %s" % ( type ( e ) , e , event ) , lvl = error )
Links the client to the granted account and profile then notifies the client
5,220
def selectlanguage ( self , event ) : self . log ( 'Language selection event:' , event . client , pretty = True ) if event . data not in all_languages ( ) : self . log ( 'Unavailable language selected:' , event . data , lvl = warn ) language = None else : language = event . data if language is None : language = 'en' event . client . language = language if event . client . config is not None : event . client . config . language = language event . client . config . save ( )
Store client s selection of a new translation
5,221
def getlanguages ( self , event ) : self . log ( 'Client requests all languages.' , lvl = verbose ) result = { 'component' : 'hfos.ui.clientmanager' , 'action' : 'getlanguages' , 'data' : language_token_to_name ( all_languages ( ) ) } self . fireEvent ( send ( event . client . uuid , result ) )
Compile and return a human readable list of registered translations
5,222
def convert ( self , lat , lon , source , dest , height = 0 , datetime = None , precision = 1e-10 , ssheight = 50 * 6371 ) : if datetime is None and ( 'mlt' in [ source , dest ] ) : raise ValueError ( 'datetime must be given for MLT calculations' ) lat = helpers . checklat ( lat ) if source == dest : return lat , lon elif source == 'geo' and dest == 'apex' : lat , lon = self . geo2apex ( lat , lon , height ) elif source == 'geo' and dest == 'qd' : lat , lon = self . geo2qd ( lat , lon , height ) elif source == 'geo' and dest == 'mlt' : lat , lon = self . geo2apex ( lat , lon , height ) lon = self . mlon2mlt ( lon , datetime , ssheight = ssheight ) elif source == 'apex' and dest == 'geo' : lat , lon , _ = self . apex2geo ( lat , lon , height , precision = precision ) elif source == 'apex' and dest == 'qd' : lat , lon = self . apex2qd ( lat , lon , height = height ) elif source == 'apex' and dest == 'mlt' : lon = self . mlon2mlt ( lon , datetime , ssheight = ssheight ) elif source == 'qd' and dest == 'geo' : lat , lon , _ = self . qd2geo ( lat , lon , height , precision = precision ) elif source == 'qd' and dest == 'apex' : lat , lon = self . qd2apex ( lat , lon , height = height ) elif source == 'qd' and dest == 'mlt' : lat , lon = self . qd2apex ( lat , lon , height = height ) lon = self . mlon2mlt ( lon , datetime , ssheight = ssheight ) elif source == 'mlt' and dest == 'geo' : lon = self . mlt2mlon ( lon , datetime , ssheight = ssheight ) lat , lon , _ = self . apex2geo ( lat , lon , height , precision = precision ) elif source == 'mlt' and dest == 'apex' : lon = self . mlt2mlon ( lon , datetime , ssheight = ssheight ) elif source == 'mlt' and dest == 'qd' : lon = self . mlt2mlon ( lon , datetime , ssheight = ssheight ) lat , lon = self . apex2qd ( lat , lon , height = height ) else : estr = 'Unknown coordinate transformation: ' estr += '{} -> {}' . format ( source , dest ) raise NotImplementedError ( estr ) return lat , lon
Converts between geodetic modified apex quasi - dipole and MLT .
5,223
def geo2apex ( self , glat , glon , height ) : glat = helpers . checklat ( glat , name = 'glat' ) alat , alon = self . _geo2apex ( glat , glon , height ) if np . any ( np . float64 ( alat ) == - 9999 ) : warnings . warn ( 'Apex latitude set to -9999 where undefined ' '(apex height may be < reference height)' ) return np . float64 ( alat ) , np . float64 ( alon )
Converts geodetic to modified apex coordinates .
5,224
def apex2geo ( self , alat , alon , height , precision = 1e-10 ) : alat = helpers . checklat ( alat , name = 'alat' ) qlat , qlon = self . apex2qd ( alat , alon , height = height ) glat , glon , error = self . qd2geo ( qlat , qlon , height , precision = precision ) return glat , glon , error
Converts modified apex to geodetic coordinates .
5,225
def geo2qd ( self , glat , glon , height ) : glat = helpers . checklat ( glat , name = 'glat' ) qlat , qlon = self . _geo2qd ( glat , glon , height ) return np . float64 ( qlat ) , np . float64 ( qlon )
Converts geodetic to quasi - dipole coordinates .
5,226
def qd2geo ( self , qlat , qlon , height , precision = 1e-10 ) : qlat = helpers . checklat ( qlat , name = 'qlat' ) glat , glon , error = self . _qd2geo ( qlat , qlon , height , precision ) return np . float64 ( glat ) , np . float64 ( glon ) , np . float64 ( error )
Converts quasi - dipole to geodetic coordinates .
5,227
def apex2qd ( self , alat , alon , height ) : qlat , qlon = self . _apex2qd ( alat , alon , height ) return np . float64 ( qlat ) , np . float64 ( qlon )
Converts modified apex to quasi - dipole coordinates .
5,228
def qd2apex ( self , qlat , qlon , height ) : alat , alon = self . _qd2apex ( qlat , qlon , height ) return np . float64 ( alat ) , np . float64 ( alon )
Converts quasi - dipole to modified apex coordinates .
5,229
def mlon2mlt ( self , mlon , datetime , ssheight = 50 * 6371 ) : ssglat , ssglon = helpers . subsol ( datetime ) ssalat , ssalon = self . geo2apex ( ssglat , ssglon , ssheight ) return ( 180 + np . float64 ( mlon ) - ssalon ) / 15 % 24
Computes the magnetic local time at the specified magnetic longitude and UT .
5,230
def mlt2mlon ( self , mlt , datetime , ssheight = 50 * 6371 ) : ssglat , ssglon = helpers . subsol ( datetime ) ssalat , ssalon = self . geo2apex ( ssglat , ssglon , ssheight ) return ( 15 * np . float64 ( mlt ) - 180 + ssalon + 360 ) % 360
Computes the magnetic longitude at the specified magnetic local time and UT .
5,231
def map_to_height ( self , glat , glon , height , newheight , conjugate = False , precision = 1e-10 ) : alat , alon = self . geo2apex ( glat , glon , height ) if conjugate : alat = - alat try : newglat , newglon , error = self . apex2geo ( alat , alon , newheight , precision = precision ) except ApexHeightError : raise ApexHeightError ( "newheight is > apex height" ) return newglat , newglon , error
Performs mapping of points along the magnetic field to the closest or conjugate hemisphere .
5,232
def map_E_to_height ( self , alat , alon , height , newheight , E ) : return self . _map_EV_to_height ( alat , alon , height , newheight , E , 'E' )
Performs mapping of electric field along the magnetic field .
5,233
def map_V_to_height ( self , alat , alon , height , newheight , V ) : return self . _map_EV_to_height ( alat , alon , height , newheight , V , 'V' )
Performs mapping of electric drift velocity along the magnetic field .
5,234
def basevectors_qd ( self , lat , lon , height , coords = 'geo' , precision = 1e-10 ) : glat , glon = self . convert ( lat , lon , coords , 'geo' , height = height , precision = precision ) f1 , f2 = self . _basevec ( glat , glon , height ) if f1 . dtype == object : f1 = np . vstack ( f1 ) . T f2 = np . vstack ( f2 ) . T return f1 , f2
Returns quasi - dipole base vectors f1 and f2 at the specified coordinates .
5,235
def get_apex ( self , lat , height = None ) : lat = helpers . checklat ( lat , name = 'alat' ) if height is None : height = self . refh cos_lat_squared = np . cos ( np . radians ( lat ) ) ** 2 apex_height = ( self . RE + height ) / cos_lat_squared - self . RE return apex_height
Calculate apex height
5,236
def set_epoch ( self , year ) : fa . loadapxsh ( self . datafile , np . float ( year ) ) self . year = year
Updates the epoch for all subsequent conversions .
5,237
def basic_parser ( patterns , with_name = None ) : def parse ( line ) : output = None highest_order = 0 highest_pattern_name = None for pattern in patterns : results = pattern . findall ( line ) if results and any ( results ) : if pattern . order > highest_order : output = results highest_order = pattern . order if with_name : highest_pattern_name = pattern . name if with_name : return output , highest_pattern_name return output return parse
Basic ordered parser .
5,238
def parser ( parser_type = basic_parser , functions = None , patterns = None , expressions = None , patterns_yaml_path = None , expressions_yaml_path = None ) : from reparse . builders import build_all from reparse . validators import validate def _load_yaml ( file_path ) : import yaml with open ( file_path ) as f : return yaml . safe_load ( f ) assert expressions or expressions_yaml_path , "Reparse can't build a parser without expressions" assert patterns or patterns_yaml_path , "Reparse can't build a parser without patterns" assert functions , "Reparse can't build without a functions" if patterns_yaml_path : patterns = _load_yaml ( patterns_yaml_path ) if expressions_yaml_path : expressions = _load_yaml ( expressions_yaml_path ) validate ( patterns , expressions ) return parser_type ( build_all ( patterns , expressions , functions ) )
A Reparse parser description . Simply provide the functions patterns & expressions to build . If you are using YAML for expressions + patterns you can use expressions_yaml_path & patterns_yaml_path for convenience .
5,239
def _translate ( self , input_filename , output_filename ) : command = [ self . translate_binary , '-f' , 'GeoJSON' , output_filename , input_filename ] result = self . _runcommand ( command ) self . log ( 'Result (Translate): ' , result , lvl = debug )
Translate KML file to geojson for import
5,240
def _update_guide ( self , guide , update = False , clear = True ) : kml_filename = os . path . join ( self . cache_path , guide + '.kml' ) geojson_filename = os . path . join ( self . cache_path , guide + '.geojson' ) if not os . path . exists ( geojson_filename ) or update : try : data = request . urlopen ( self . guides [ guide ] ) . read ( ) . decode ( 'utf-8' ) except ( request . URLError , request . HTTPError ) as e : self . log ( 'Could not get web guide data:' , e , type ( e ) , lvl = warn ) return with open ( kml_filename , 'w' ) as f : f . write ( data ) self . _translate ( kml_filename , geojson_filename ) with open ( geojson_filename , 'r' ) as f : json_data = json . loads ( f . read ( ) ) if len ( json_data [ 'features' ] ) == 0 : self . log ( 'No features found!' , lvl = warn ) return layer = objectmodels [ 'layer' ] . find_one ( { 'name' : guide } ) if clear and layer is not None : layer . delete ( ) layer = None if layer is None : layer_uuid = std_uuid ( ) layer = objectmodels [ 'layer' ] ( { 'uuid' : layer_uuid , 'name' : guide , 'type' : 'geoobjects' } ) layer . save ( ) else : layer_uuid = layer . uuid if clear : for item in objectmodels [ 'geoobject' ] . find ( { 'layer' : layer_uuid } ) : self . log ( 'Deleting old guide location' , lvl = debug ) item . delete ( ) locations = [ ] for item in json_data [ 'features' ] : self . log ( 'Adding new guide location:' , item , lvl = verbose ) location = objectmodels [ 'geoobject' ] ( { 'uuid' : std_uuid ( ) , 'layer' : layer_uuid , 'geojson' : item , 'type' : 'Skipperguide' , 'name' : 'Guide for %s' % ( item [ 'properties' ] [ 'Name' ] ) } ) locations . append ( location ) self . log ( 'Bulk inserting guide locations' , lvl = debug ) objectmodels [ 'geoobject' ] . bulk_create ( locations )
Update a single specified guide
5,241
def send_mail_worker ( config , mail , event ) : log = "" try : if config . get ( 'ssl' , True ) : server = SMTP_SSL ( config [ 'server' ] , port = config [ 'port' ] , timeout = 30 ) else : server = SMTP ( config [ 'server' ] , port = config [ 'port' ] , timeout = 30 ) if config [ 'tls' ] : log += 'Starting TLS\n' server . starttls ( ) if config [ 'username' ] != '' : log += 'Logging in with ' + str ( config [ 'username' ] ) + "\n" server . login ( config [ 'username' ] , config [ 'password' ] ) else : log += 'No username, trying anonymous access\n' log += 'Sending Mail\n' response_send = server . send_message ( mail ) server . quit ( ) except timeout as e : log += 'Could not send email: ' + str ( e ) + "\n" return False , log , event log += 'Server response:' + str ( response_send ) return True , log , event
Worker task to send out an email which is a blocking process unless it is threaded
5,242
def provision_system_user ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : from hfos . provisions . base import provisionList from hfos . database import objectmodels if overwrite is True : hfoslog ( 'Refusing to overwrite system user!' , lvl = warn , emitter = 'PROVISIONS' ) overwrite = False system_user_count = objectmodels [ 'user' ] . count ( { 'name' : 'System' } ) if system_user_count == 0 or clear is False : provisionList ( Users , 'user' , overwrite , clear , skip_user_check = True ) hfoslog ( 'Provisioning: Users: Done.' , emitter = "PROVISIONS" ) else : hfoslog ( 'System user already present.' , lvl = warn , emitter = 'PROVISIONS' )
Provision a system user
5,243
def Group ( expressions , final_function , inbetweens , name = "" ) : lengths = [ ] functions = [ ] regex = "" i = 0 for expression in expressions : regex += inbetweens [ i ] regex += "(?:" + expression . regex + ")" lengths . append ( sum ( expression . group_lengths ) ) functions . append ( expression . run ) i += 1 regex += inbetweens [ i ] return Expression ( regex , functions , lengths , final_function , name )
Group expressions together with inbetweens and with the output of a final_functions .
5,244
def findall ( self , string ) : output = [ ] for match in self . pattern . findall ( string ) : if hasattr ( match , 'strip' ) : match = [ match ] self . _list_add ( output , self . run ( match ) ) return output
Parse string returning all outputs as parsed by functions
5,245
def scan ( self , string ) : return list ( self . _scanner_to_matches ( self . pattern . scanner ( string ) , self . run ) )
Like findall but also returning matching start and end string locations
5,246
def run ( self , matches ) : def _run ( matches ) : group_starting_pos = 0 for current_pos , ( group_length , group_function ) in enumerate ( zip ( self . group_lengths , self . group_functions ) ) : start_pos = current_pos + group_starting_pos end_pos = current_pos + group_starting_pos + group_length yield group_function ( matches [ start_pos : end_pos ] ) group_starting_pos += group_length - 1 return self . final_function ( list ( _run ( matches ) ) )
Run group functions over matches
5,247
def set_logfile ( path , instance ) : global logfile logfile = os . path . normpath ( path ) + '/hfos.' + instance + '.log'
Specify logfile path
5,248
def is_muted ( what ) : state = False for item in solo : if item not in what : state = True else : state = False break for item in mute : if item in what : state = True break return state
Checks if a logged event is to be muted for debugging purposes .
5,249
def get_tagged ( self , event ) : self . log ( "Tagged objects request for" , event . data , "from" , event . user , lvl = debug ) if event . data in self . tags : tagged = self . _get_tagged ( event . data ) response = { 'component' : 'hfos.events.schemamanager' , 'action' : 'get' , 'data' : tagged } self . fireEvent ( send ( event . client . uuid , response ) ) else : self . log ( "Unavailable schema requested!" , lvl = warn )
Return a list of tagged objects for a schema
5,250
def provision_system_vessel ( items , database_name , overwrite = False , clear = False , skip_user_check = False ) : from hfos . provisions . base import provisionList from hfos . database import objectmodels vessel = objectmodels [ 'vessel' ] . find_one ( { 'name' : 'Default System Vessel' } ) if vessel is not None : if overwrite is False : hfoslog ( 'Default vessel already existing. Skipping provisions.' ) return else : vessel . delete ( ) provisionList ( [ SystemVessel ] , 'vessel' , overwrite , clear , skip_user_check ) sysconfig = objectmodels [ 'systemconfig' ] . find_one ( { 'active' : True } ) hfoslog ( 'Adapting system config for default vessel:' , sysconfig ) sysconfig . vesseluuid = SystemVessel [ 'uuid' ] sysconfig . save ( ) hfoslog ( 'Provisioning: Vessel: Done.' , emitter = 'PROVISIONS' )
Provisions the default system vessel
5,251
def towgs84 ( E , N , pkm = False , presentation = None ) : _lng0 = lng0pkm if pkm else lng0 E /= 1000.0 N /= 1000.0 epsilon = ( N - N0 ) / ( k0 * A ) eta = ( E - E0 ) / ( k0 * A ) epsilonp = epsilon - beta1 * sin ( 2 * 1 * epsilon ) * cosh ( 2 * 1 * eta ) - beta2 * sin ( 2 * 2 * epsilon ) * cosh ( 2 * 2 * eta ) - beta3 * sin ( 2 * 3 * epsilon ) * cosh ( 2 * 3 * eta ) etap = eta - beta1 * cos ( 2 * 1 * epsilon ) * sinh ( 2 * 1 * eta ) - beta2 * cos ( 2 * 2 * epsilon ) * sinh ( 2 * 2 * eta ) - beta3 * cos ( 2 * 3 * epsilon ) * sinh ( 2 * 3 * eta ) sigmap = 1 - 2 * 1 * beta1 * cos ( 2 * 1 * epsilon ) * cosh ( 2 * 1 * eta ) - 2 * 2 * beta2 * cos ( 2 * 2 * epsilon ) * cosh ( 2 * 2 * eta ) - 2 * 3 * beta3 * cos ( 2 * 3 * epsilon ) * cosh ( 2 * 3 * eta ) taup = 2 * 1 * beta1 * sin ( 2 * 1 * epsilon ) * sinh ( 2 * 1 * eta ) + 2 * 2 * beta2 * sin ( 2 * 2 * epsilon ) * sinh ( 2 * 2 * eta ) + 2 * 3 * beta3 * sin ( 2 * 3 * epsilon ) * sinh ( 2 * 3 * eta ) chi = asin ( sin ( epsilonp ) / cosh ( etap ) ) latitude = chi + delta1 * sin ( 2 * 1 * chi ) + delta2 * sin ( 2 * 2 * chi ) + delta3 * sin ( 2 * 3 * chi ) longitude = _lng0 + atan ( sinh ( etap ) / cos ( epsilonp ) ) func = None presentation = 'to%s' % presentation if presentation else None if presentation in presentations : func = getattr ( sys . modules [ __name__ ] , presentation ) if func and func != 'todegdec' : return func ( degrees ( latitude ) ) , func ( degrees ( longitude ) ) return ( degrees ( latitude ) , degrees ( longitude ) )
Convert coordintes from TWD97 to WGS84
5,252
def fromwgs84 ( lat , lng , pkm = False ) : _lng0 = lng0pkm if pkm else lng0 lat = radians ( todegdec ( lat ) ) lng = radians ( todegdec ( lng ) ) t = sinh ( ( atanh ( sin ( lat ) ) - 2 * pow ( n , 0.5 ) / ( 1 + n ) * atanh ( 2 * pow ( n , 0.5 ) / ( 1 + n ) * sin ( lat ) ) ) ) epsilonp = atan ( t / cos ( lng - _lng0 ) ) etap = atan ( sin ( lng - _lng0 ) / pow ( 1 + t * t , 0.5 ) ) E = E0 + k0 * A * ( etap + alpha1 * cos ( 2 * 1 * epsilonp ) * sinh ( 2 * 1 * etap ) + alpha2 * cos ( 2 * 2 * epsilonp ) * sinh ( 2 * 2 * etap ) + alpha3 * cos ( 2 * 3 * epsilonp ) * sinh ( 2 * 3 * etap ) ) N = N0 + k0 * A * ( epsilonp + alpha1 * sin ( 2 * 1 * epsilonp ) * cosh ( 2 * 1 * etap ) + alpha2 * sin ( 2 * 2 * epsilonp ) * cosh ( 2 * 2 * etap ) + alpha3 * sin ( 2 * 3 * epsilonp ) * cosh ( 2 * 3 * etap ) ) return E * 1000 , N * 1000
Convert coordintes from WGS84 to TWD97
5,253
def clipValue ( self , value , minValue , maxValue ) : return min ( max ( value , minValue ) , maxValue )
Makes sure that value is within a specific range . If not then the lower or upper bounds is returned
5,254
def getGroundResolution ( self , latitude , level ) : latitude = self . clipValue ( latitude , self . min_lat , self . max_lat ) mapSize = self . getMapDimensionsByZoomLevel ( level ) return math . cos ( latitude * math . pi / 180 ) * 2 * math . pi * self . earth_radius / mapSize
returns the ground resolution for based on latitude and zoom level .
5,255
def getMapScale ( self , latitude , level , dpi = 96 ) : dpm = dpi / 0.0254 return self . getGroundResolution ( latitude , level ) * dpm
returns the map scale on the dpi of the screen
5,256
def convertLatLngToPixelXY ( self , lat , lng , level ) : mapSize = self . getMapDimensionsByZoomLevel ( level ) lat = self . clipValue ( lat , self . min_lat , self . max_lat ) lng = self . clipValue ( lng , self . min_lng , self . max_lng ) x = ( lng + 180 ) / 360 sinlat = math . sin ( lat * math . pi / 180 ) y = 0.5 - math . log ( ( 1 + sinlat ) / ( 1 - sinlat ) ) / ( 4 * math . pi ) pixelX = int ( self . clipValue ( x * mapSize + 0.5 , 0 , mapSize - 1 ) ) pixelY = int ( self . clipValue ( y * mapSize + 0.5 , 0 , mapSize - 1 ) ) return ( pixelX , pixelY )
returns the x and y values of the pixel corresponding to a latitude and longitude .
5,257
def convertPixelXYToLngLat ( self , pixelX , pixelY , level ) : mapSize = self . getMapDimensionsByZoomLevel ( level ) x = ( self . clipValue ( pixelX , 0 , mapSize - 1 ) / mapSize ) - 0.5 y = 0.5 - ( self . clipValue ( pixelY , 0 , mapSize - 1 ) / mapSize ) lat = 90 - 360 * math . atan ( math . exp ( - y * 2 * math . pi ) ) / math . pi lng = 360 * x return ( lng , lat )
converts a pixel x y to a latitude and longitude .
5,258
def tileXYZToQuadKey ( self , x , y , z ) : quadKey = '' for i in range ( z , 0 , - 1 ) : digit = 0 mask = 1 << ( i - 1 ) if ( x & mask ) != 0 : digit += 1 if ( y & mask ) != 0 : digit += 2 quadKey += str ( digit ) return quadKey
Computes quadKey value based on tile x y and z values .
5,259
def quadKeyToTileXYZ ( self , quadKey ) : tileX = 0 tileY = 0 tileZ = len ( quadKey ) for i in range ( tileZ , 0 , - 1 ) : mask = 1 << ( i - 1 ) value = quadKey [ tileZ - i ] if value == '0' : continue elif value == '1' : tileX |= mask elif value == '2' : tileY |= mask elif value == '3' : tileX |= mask tileY |= mask else : raise Exception ( 'Invalid QuadKey' ) return ( tileX , tileY , tileZ )
Computes tile x y and z values based on quadKey .
5,260
def getTileUrlsByLatLngExtent ( self , xmin , ymin , xmax , ymax , level ) : tileXMin , tileYMin = self . tileUtils . convertLngLatToTileXY ( xmin , ymax , level ) tileXMax , tileYMax = self . tileUtils . convertLngLatToTileXY ( xmax , ymin , level ) tileUrls = [ ] for y in range ( tileYMax , tileYMin - 1 , - 1 ) : for x in range ( tileXMin , tileXMax + 1 , 1 ) : tileUrls . append ( self . createTileUrl ( x , y , level ) ) return tileUrls
Returns a list of tile urls by extent
5,261
def createTileUrl ( self , x , y , z ) : return self . tileTemplate . replace ( '{{x}}' , str ( x ) ) . replace ( '{{y}}' , str ( y ) ) . replace ( '{{z}}' , str ( z ) )
returns new tile url based on template
5,262
def userlogin ( self , event ) : client_uuid = event . clientuuid self . log ( event . user , pretty = True , lvl = verbose ) self . log ( 'Adding client' ) self . clients [ event . clientuuid ] = event . user for topic , alert in self . alerts . items ( ) : self . alert ( client_uuid , alert )
Checks if an alert is ongoing and alerts the newly connected client if so .
5,263
def cli ( ctx , instance , quiet , verbose , log_level , dbhost , dbname ) : ctx . obj [ 'instance' ] = instance if dbname == db_default and instance != 'default' : dbname = instance ctx . obj [ 'quiet' ] = quiet ctx . obj [ 'verbose' ] = verbose verbosity [ 'console' ] = log_level verbosity [ 'global' ] = log_level ctx . obj [ 'dbhost' ] = dbhost ctx . obj [ 'dbname' ] = dbname
Isomer Management Tool
5,264
def main ( ) : from datetime import datetime beg_time = datetime . now ( ) args , sub_clargs = load_command_line_args ( ) if args is None : return log = load_log ( args ) if args . command == 'setup' : setup_user_config ( log ) return if not os . path . isfile ( _CONFIG_PATH ) : raise RuntimeError ( "'{}' does not exist. " "Run `astrocats setup` to configure." "" . format ( _CONFIG_PATH ) ) git_vers = get_git ( ) title_str = "Astrocats, version: {}, SHA: {}" . format ( __version__ , git_vers ) log . warning ( "\n\n{}\n{}\n{}\n" . format ( title_str , '=' * len ( title_str ) , beg_time . ctime ( ) ) ) args = load_user_config ( args , log ) mod_name = args . command log . debug ( "Importing specified module: '{}'" . format ( mod_name ) ) try : mod = importlib . import_module ( '.' + mod_name , package = 'astrocats' ) except Exception as err : log . error ( "Import of specified module '{}' failed." . format ( mod_name ) ) log_raise ( log , str ( err ) , type ( err ) ) log . debug ( "Running `main.main()`" ) mod . main . main ( args , sub_clargs , log ) end_time = datetime . now ( ) log . warning ( "\nAll complete at {}, After {}" . format ( end_time , end_time - beg_time ) ) return
Primary entry point for all AstroCats catalogs .
5,265
def setup_user_config ( log ) : log . warning ( "AstroCats Setup" ) log . warning ( "Configure filepath: '{}'" . format ( _CONFIG_PATH ) ) config_path_dir = os . path . split ( _CONFIG_PATH ) [ 0 ] if not os . path . exists ( config_path_dir ) : log . debug ( "Creating config directory '{}'" . format ( config_path_dir ) ) os . makedirs ( config_path_dir ) if not os . path . isdir ( config_path_dir ) : log_raise ( log , "Configure path error '{}'" . format ( config_path_dir ) ) def_base_path = os . path . abspath ( os . path . dirname ( os . path . abspath ( __file__ ) ) ) log . warning ( "Setting '{}' to default path: '{}'" . format ( _BASE_PATH_KEY , def_base_path ) ) config = { _BASE_PATH_KEY : def_base_path } json . dump ( config , open ( _CONFIG_PATH , 'w' ) ) if not os . path . exists ( def_base_path ) : log_raise ( log , "Problem creating configuration file." ) return
Setup a configuration file in the user s home directory .
5,266
def load_user_config ( args , log ) : if not os . path . exists ( _CONFIG_PATH ) : err_str = ( "Configuration file does not exists ({}).\n" . format ( _CONFIG_PATH ) + "Run `python -m astrocats setup` to configure." ) log_raise ( log , err_str ) config = json . load ( open ( _CONFIG_PATH , 'r' ) ) setattr ( args , _BASE_PATH_KEY , config [ _BASE_PATH_KEY ] ) log . debug ( "Loaded configuration: {}: {}" . format ( _BASE_PATH_KEY , config [ _BASE_PATH_KEY ] ) ) return args
Load settings from the user s confiuration file and add them to args .
5,267
def load_command_line_args ( clargs = None ) : import argparse git_vers = get_git ( ) parser = argparse . ArgumentParser ( prog = 'astrocats' , description = 'Generate catalogs for astronomical data.' ) parser . add_argument ( 'command' , nargs = '?' , default = None ) parser . add_argument ( '--version' , action = 'version' , version = 'AstroCats v{}, SHA: {}' . format ( __version__ , git_vers ) ) parser . add_argument ( '--verbose' , '-v' , dest = 'verbose' , default = False , action = 'store_true' , help = 'Print more messages to the screen.' ) parser . add_argument ( '--debug' , '-d' , dest = 'debug' , default = False , action = 'store_true' , help = 'Print excessive messages to the screen.' ) parser . add_argument ( '--include-private' , dest = 'private' , default = False , action = 'store_true' , help = 'Include private data in import.' ) parser . add_argument ( '--travis' , '-t' , dest = 'travis' , default = False , action = 'store_true' , help = 'Run import script in test mode for Travis.' ) parser . add_argument ( '--clone-depth' , dest = 'clone_depth' , default = 0 , type = int , help = ( 'When cloning git repos, only clone out to this depth ' '(default: 0 = all levels).' ) ) parser . add_argument ( '--purge-outputs' , dest = 'purge_outputs' , default = False , action = 'store_true' , help = ( 'Purge git outputs after cloning.' ) ) parser . add_argument ( '--log' , dest = 'log_filename' , default = None , help = 'Filename to which to store logging information.' ) write_group = parser . add_mutually_exclusive_group ( ) write_group . add_argument ( '--write' , action = 'store_true' , dest = 'write_entries' , default = True , help = 'Write entries to files [default].' ) write_group . add_argument ( '--no-write' , action = 'store_false' , dest = 'write_entries' , default = True , help = 'do not write entries to file.' ) delete_group = parser . add_mutually_exclusive_group ( ) delete_group . add_argument ( '--predelete' , action = 'store_true' , dest = 'delete_old' , default = True , help = 'Delete all old event files to begin [default].' ) delete_group . add_argument ( '--no-predelete' , action = 'store_false' , dest = 'delete_old' , default = True , help = 'Do not delete all old event files to start.' ) args , sub_clargs = parser . parse_known_args ( args = clargs ) if args . command is None : parser . print_help ( ) return None , None return args , sub_clargs
Load and parse command - line arguments .
5,268
def load_log ( args ) : from astrocats . catalog . utils import logger log_stream_level = None if args . debug : log_stream_level = logger . DEBUG elif args . verbose : log_stream_level = logger . INFO log = logger . get_logger ( stream_level = log_stream_level , tofile = args . log_filename ) log . _verbose = args . verbose log . _debug = args . debug return log
Load a logging . Logger object .
5,269
def compare_dicts ( old_full , new_full , old_data , new_data , depth = 0 ) : depth = depth + 1 indent = " " * depth def my_print ( str ) : print ( "{}{}" . format ( indent , str ) ) old_keys = list ( old_data . keys ( ) ) for key in old_keys : old_vals = old_data . pop ( key ) my_print ( "{}" . format ( key ) ) if key not in new_data : my_print ( "Key '{}' not in new_data." . format ( key ) ) my_print ( "Old:" ) my_print ( pprint ( new_data ) ) my_print ( "New:" ) my_print ( pprint ( new_data ) ) return False new_vals = new_data . pop ( key ) if isinstance ( old_vals , dict ) and isinstance ( new_vals , dict ) : if not compare_dicts ( old_full , new_full , old_vals , new_vals , depth = depth ) : return False elif ( isinstance ( old_vals , list ) and isinstance ( old_vals [ 0 ] , dict ) and isinstance ( old_vals , list ) and isinstance ( old_vals [ 0 ] , dict ) ) : for old_elem , new_elem in zip_longest ( old_vals , new_vals ) : if old_elem is None or new_elem is None : my_print ( "Missing element!" ) my_print ( "\tOld: '{}'" . format ( old_elem ) ) my_print ( "\tNew: '{}'" . format ( new_elem ) ) else : if not compare_dicts ( old_full , new_full , old_elem , new_elem , depth = depth ) : return False else : if ( not isinstance ( old_vals , list ) and not isinstance ( new_vals , list ) ) : old_vals = [ old_vals ] new_vals = [ new_vals ] old_vals = sorted ( old_vals ) new_vals = sorted ( new_vals ) for oldv , newv in zip_longest ( old_vals , new_vals ) : if oldv is None or newv is None : my_print ( "Missing element!" ) my_print ( "\tOld: '{}'" . format ( oldv ) ) my_print ( "\tNew: '{}'" . format ( newv ) ) elif oldv == newv : my_print ( "Good Match: '{}'" . format ( key ) ) else : my_print ( "Bad Match: '{}'" . format ( key ) ) my_print ( "\tOld: '{}'" . format ( oldv ) ) my_print ( "\tNew: '{}'" . format ( newv ) ) return False return True
Function compares dictionaries by key - value recursively .
5,270
def cohensutherland ( xmin , ymax , xmax , ymin , x1 , y1 , x2 , y2 ) : INSIDE , LEFT , RIGHT , LOWER , UPPER = 0 , 1 , 2 , 4 , 8 def _getclip ( xa , ya ) : p = INSIDE if xa < xmin : p |= LEFT elif xa > xmax : p |= RIGHT if ya < ymin : p |= LOWER elif ya > ymax : p |= UPPER return p k1 = _getclip ( x1 , y1 ) k2 = _getclip ( x2 , y2 ) while ( k1 | k2 ) != 0 : if ( k1 & k2 ) != 0 : return None , None , None , None opt = k1 or k2 if opt & UPPER : x = x1 + ( x2 - x1 ) * ( ymax - y1 ) / ( y2 - y1 ) y = ymax elif opt & LOWER : x = x1 + ( x2 - x1 ) * ( ymin - y1 ) / ( y2 - y1 ) y = ymin elif opt & RIGHT : y = y1 + ( y2 - y1 ) * ( xmax - x1 ) / ( x2 - x1 ) x = xmax elif opt & LEFT : y = y1 + ( y2 - y1 ) * ( xmin - x1 ) / ( x2 - x1 ) x = xmin else : raise RuntimeError ( 'Undefined clipping state' ) if opt == k1 : x1 , y1 = x , y k1 = _getclip ( x1 , y1 ) elif opt == k2 : x2 , y2 = x , y k2 = _getclip ( x2 , y2 ) return x1 , y1 , x2 , y2
Clips a line to a rectangular area .
5,271
def setupuv ( rc ) : if cv is not None : ( r , c ) = rc u = cv . CreateMat ( r , c , cv . CV_32FC1 ) v = cv . CreateMat ( r , c , cv . CV_32FC1 ) return ( u , v ) else : return [ None ] * 2
Horn Schunck legacy OpenCV function requires we use these old - fashioned cv matrices not numpy array
5,272
def _init_cat_dict ( self , cat_dict_class , key_in_self , ** kwargs ) : try : new_entry = cat_dict_class ( self , key = key_in_self , ** kwargs ) except CatDictError as err : if err . warn : self . _log . info ( "'{}' Not adding '{}': '{}'" . format ( self [ self . _KEYS . NAME ] , key_in_self , str ( err ) ) ) return None return new_entry
Initialize a CatDict object checking for errors .
5,273
def _add_cat_dict ( self , cat_dict_class , key_in_self , check_for_dupes = True , ** kwargs ) : new_entry = self . _init_cat_dict ( cat_dict_class , key_in_self , ** kwargs ) if new_entry is None : return False if cat_dict_class != Error : for item in self . get ( key_in_self , [ ] ) : if new_entry . is_duplicate_of ( item ) : item . append_sources_from ( new_entry ) return new_entry self . setdefault ( key_in_self , [ ] ) . append ( new_entry ) return True
Add a CatDict to this Entry if initialization succeeds and it doesn t already exist within the Entry .
5,274
def pbar ( iter , desc = '' , ** kwargs ) : return tqdm ( iter , desc = ( '<' + str ( datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) ) + '> ' + desc ) , dynamic_ncols = True , ** kwargs )
Wrapper for tqdm progress bar .
5,275
def pbar_strings ( files , desc = '' , ** kwargs ) : return tqdm ( sorted ( files , key = lambda s : s . lower ( ) ) , desc = ( '<' + str ( datetime . now ( ) . strftime ( "%Y-%m-%d %H:%M:%S" ) ) + '> ' + desc ) , dynamic_ncols = True , ** kwargs )
Wrapper for tqdm progress bar which also sorts list of strings
5,276
def _get_task_priority ( tasks , task_priority ) : if task_priority is None : return None if is_integer ( task_priority ) : return task_priority if isinstance ( task_priority , basestring ) : if task_priority in tasks : return tasks [ task_priority ] . priority raise ValueError ( "Unrecognized task priority '{}'" . format ( task_priority ) )
Get the task priority corresponding to the given task_priority .
5,277
def import_data ( self ) : tasks_list = self . load_task_list ( ) warnings . filterwarnings ( 'ignore' , r'Warning: converting a masked element to nan.' ) warnings . filterwarnings ( 'ignore' , category = DeprecationWarning ) if self . args . delete_old : self . log . warning ( "Deleting all old entry files." ) self . delete_old_entry_files ( ) if self . args . load_stubs or self . args . update : self . load_stubs ( ) if self . args . travis : self . log . warning ( "Running in `travis` mode." ) prev_priority = 0 prev_task_name = '' for task_name , task_obj in tasks_list . items ( ) : if not task_obj . active : continue self . log . warning ( "Task: '{}'" . format ( task_name ) ) nice_name = task_obj . nice_name mod_name = task_obj . module func_name = task_obj . function priority = task_obj . priority if priority < prev_priority and priority > 0 : raise RuntimeError ( "Priority for '{}': '{}', less than prev," "'{}': '{}'.\n{}" . format ( task_name , priority , prev_task_name , prev_priority , task_obj ) ) self . log . debug ( "\t{}, {}, {}, {}" . format ( nice_name , priority , mod_name , func_name ) ) mod = importlib . import_module ( '.' + mod_name , package = 'astrocats' ) self . current_task = task_obj getattr ( mod , func_name ) ( self ) num_events , num_stubs = self . count ( ) self . log . warning ( "Task finished. Events: {}, Stubs: {}" . format ( num_events , num_stubs ) ) self . journal_entries ( ) num_events , num_stubs = self . count ( ) self . log . warning ( "Journal finished. Events: {}, Stubs: {}" . format ( num_events , num_stubs ) ) prev_priority = priority prev_task_name = task_name process = psutil . Process ( os . getpid ( ) ) memory = process . memory_info ( ) . rss self . log . warning ( 'Memory used (MBs): ' '{:,}' . format ( memory / 1024. / 1024. ) ) return
Run all of the import tasks .
5,278
def add_entry ( self , name , load = True , delete = True ) : newname = self . clean_entry_name ( name ) if not newname : raise ( ValueError ( 'Fatal: Attempted to add entry with no name.' ) ) if newname in self . entries : self . log . debug ( "`newname`: '{}' (name: '{}') already exists." . format ( newname , name ) ) if self . entries [ newname ] . _stub : self . log . debug ( "'{}' is a stub" . format ( newname ) ) else : self . log . debug ( "'{}' is not a stub, returning" . format ( newname ) ) return newname match_name = self . find_entry_name_of_alias ( newname ) if match_name is not None : self . log . debug ( "`newname`: '{}' (name: '{}') already exists as alias for " "'{}'." . format ( newname , name , match_name ) ) newname = match_name if load : loaded_name = self . load_entry_from_name ( newname , delete = delete ) if loaded_name : return loaded_name if match_name is not None : return match_name new_entry = self . proto ( catalog = self , name = newname ) new_entry [ self . proto . _KEYS . SCHEMA ] = self . SCHEMA . URL self . log . log ( self . log . _LOADED , "Created new entry for '{}'" . format ( newname ) ) self . entries [ newname ] = new_entry return newname
Find an existing entry in or add a new one to the entries dict .
5,279
def find_entry_name_of_alias ( self , alias ) : if alias in self . aliases : name = self . aliases [ alias ] if name in self . entries : return name else : for name , entry in self . entries . items ( ) : aliases = entry . get_aliases ( includename = False ) if alias in aliases : if ( ENTRY . DISTINCT_FROM not in entry or alias not in entry [ ENTRY . DISTINCT_FROM ] ) : return name return None
Return the first entry name with the given alias included in its list of aliases .
5,280
def copy_entry_to_entry ( self , fromentry , destentry , check_for_dupes = True , compare_to_existing = True ) : self . log . info ( "Copy entry object '{}' to '{}'" . format ( fromentry [ fromentry . _KEYS . NAME ] , destentry [ destentry . _KEYS . NAME ] ) ) newsourcealiases = { } if self . proto . _KEYS . SOURCES in fromentry : for source in fromentry [ self . proto . _KEYS . SOURCES ] : alias = source . pop ( SOURCE . ALIAS ) newsourcealiases [ alias ] = source newmodelaliases = { } if self . proto . _KEYS . MODELS in fromentry : for model in fromentry [ self . proto . _KEYS . MODELS ] : alias = model . pop ( MODEL . ALIAS ) newmodelaliases [ alias ] = model if self . proto . _KEYS . ERRORS in fromentry : for err in fromentry [ self . proto . _KEYS . ERRORS ] : destentry . setdefault ( self . proto . _KEYS . ERRORS , [ ] ) . append ( err ) for rkey in fromentry : key = fromentry . _KEYS . get_key_by_name ( rkey ) if key . no_source : continue for item in fromentry [ key ] : if 'source' not in item : raise ValueError ( "Item has no source!" ) nsid = [ ] for sid in item [ 'source' ] . split ( ',' ) : if sid in newsourcealiases : source = newsourcealiases [ sid ] nsid . append ( destentry . add_source ( ** source ) ) else : raise ValueError ( "Couldn't find source alias!" ) item [ 'source' ] = uniq_cdl ( nsid ) if 'model' in item : nmid = [ ] for mid in item [ 'model' ] . split ( ',' ) : if mid in newmodelaliases : model = newmodelaliases [ mid ] nmid . append ( destentry . add_model ( ** model ) ) else : raise ValueError ( "Couldn't find model alias!" ) item [ 'model' ] = uniq_cdl ( nmid ) if key == ENTRY . PHOTOMETRY : destentry . add_photometry ( compare_to_existing = compare_to_existing , ** item ) elif key == ENTRY . SPECTRA : destentry . add_spectrum ( compare_to_existing = compare_to_existing , ** item ) elif key == ENTRY . ERRORS : destentry . add_error ( ** item ) elif key == ENTRY . MODELS : continue else : destentry . add_quantity ( compare_to_existing = compare_to_existing , check_for_dupes = False , quantities = key , ** item ) return
Used by merge_duplicates
5,281
def _delete_entry_file ( self , entry_name = None , entry = None ) : if entry_name is None and entry is None : raise RuntimeError ( "Either `entry_name` or `entry` must be given." ) elif entry_name is not None and entry is not None : raise RuntimeError ( "Cannot use both `entry_name` and `entry`." ) if entry_name is not None : entry = self . entries [ entry_name ] else : entry_name = entry [ ENTRY . NAME ] entry_filename = self . entry_filename ( entry_name ) if self . args . write_entries : self . log . info ( "Deleting entry file '{}' of entry '{}'" . format ( entry_filename , entry_name ) ) if not os . path . exists ( entry_filename ) : self . log . error ( "Filename '{}' does not exist" . format ( entry_filename ) ) os . remove ( entry_filename ) else : self . log . debug ( "Not deleting '{}' because `write_entries`" " is False" . format ( entry_filename ) ) return
Delete the file associated with the given entry .
5,282
def journal_entries ( self , clear = True , gz = False , bury = False , write_stubs = False , final = False ) : for name in list ( self . entries . keys ( ) ) : if self . args . write_entries : if self . entries [ name ] . _stub and not write_stubs : continue bury_entry = False save_entry = True if bury : ( bury_entry , save_entry ) = self . should_bury ( name ) if save_entry : save_name = self . entries [ name ] . save ( bury = bury_entry , final = final ) self . log . info ( "Saved {} to '{}'." . format ( name . ljust ( 20 ) , save_name ) ) if ( gz and os . path . getsize ( save_name ) > self . COMPRESS_ABOVE_FILESIZE ) : save_name = compress_gz ( save_name ) self . log . debug ( "Compressed '{}' to '{}'" . format ( name , save_name ) ) outdir , filename = os . path . split ( save_name ) filename = filename . split ( '.' ) [ 0 ] os . system ( 'cd ' + outdir + '; git rm --cached ' + filename + '.json; git add -f ' + filename + '.json.gz; cd ' + self . PATHS . PATH_BASE ) if clear : self . entries [ name ] = self . entries [ name ] . get_stub ( ) self . log . debug ( "Entry for '{}' converted to stub" . format ( name ) ) return
Write all entries in entries to files and clear . Depending on arguments and tasks .
5,283
def set_preferred_names ( self ) : if len ( self . entries ) == 0 : self . log . error ( "WARNING: `entries` is empty, loading stubs" ) self . load_stubs ( ) task_str = self . get_current_task_str ( ) for ni , oname in enumerate ( pbar ( self . entries , task_str ) ) : name = self . add_entry ( oname ) self . entries [ name ] . set_preferred_name ( ) if self . args . travis and ni > self . TRAVIS_QUERY_LIMIT : break return
Choose between each entries given name and its possible aliases for the best one .
5,284
def _prep_git_add_file_list ( self , repo , size_limit , fail = True , file_types = None ) : add_files = [ ] if file_types is None : file_patterns = [ '*' ] else : self . log . error ( "WARNING: uncertain behavior with specified file types!" ) file_patterns = [ '*.' + ft for ft in file_types ] file_patterns = [ os . path . join ( repo , fp ) for fp in file_patterns ] for pattern in file_patterns : file_list = glob ( pattern ) for ff in file_list : fsize = os . path . getsize ( ff ) fname = str ( ff ) comp_failed = False if fsize > size_limit : self . log . debug ( "File '{}' size '{}' MB." . format ( fname , fsize / 1028 / 1028 ) ) if ff . endswith ( '.gz' ) : self . log . error ( "File '{}' is already compressed." . format ( fname ) ) comp_failed = True else : fname = compress_gz ( fname ) fsize = os . path . getsize ( fname ) self . log . info ( "Compressed to '{}', size '{}' MB" . format ( fname , fsize / 1028 / 1028 ) ) if fsize > size_limit : comp_failed = True if comp_failed : if fail : raise RuntimeError ( "File '{}' cannot be added!" . format ( fname ) ) self . log . info ( "Skipping file." ) continue add_files . append ( fname ) return add_files
Get a list of files which should be added to the given repository .
5,285
def download_url ( self , url , timeout , fail = False , post = None , verify = True ) : _CODE_ERRORS = [ 500 , 307 , 404 ] import requests session = requests . Session ( ) try : headers = { 'User-Agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X ' '10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/39.0.2171.95 Safari/537.36' } if post : response = session . post ( url , timeout = timeout , headers = headers , data = post , verify = verify ) else : response = session . get ( url , timeout = timeout , headers = headers , verify = verify ) response . raise_for_status ( ) for xx in response . history : xx . raise_for_status ( ) if xx . status_code in _CODE_ERRORS : self . log . error ( "URL response returned status code '{}'" . format ( xx . status_code ) ) raise url_txt = response . text self . log . debug ( "Task {}: Loaded `url_txt` from '{}'." . format ( self . current_task . name , url ) ) except ( KeyboardInterrupt , SystemExit ) : raise except Exception as err : err_str = ( "URL Download of '{}' failed ('{}')." . format ( url , str ( err ) ) ) if fail : err_str += " and `fail` is set." self . log . error ( err_str ) raise RuntimeError ( err_str ) else : self . log . warning ( err_str ) return None return url_txt
Download text from the given url .
5,286
def append_sources_from ( self , other ) : self_aliases = self [ self . _KEYS . SOURCE ] . split ( ',' ) other_aliases = other [ self . _KEYS . SOURCE ] . split ( ',' ) self [ self . _KEYS . SOURCE ] = uniq_cdl ( self_aliases + other_aliases ) return
Merge the source alias lists of two CatDicts .
5,287
def current_task ( self , args ) : ctask = self . nice_name if self . nice_name is not None else self . name if args is not None : if args . update : ctask = ctask . replace ( '%pre' , 'Updating' ) else : ctask = ctask . replace ( '%pre' , 'Loading' ) return ctask
Name of current action for progress - bar output .
5,288
def load_archive ( self , args ) : import warnings warnings . warn ( "`Task.load_archive()` is deprecated! " "`Catalog.load_url` handles the same functionality." ) return self . archived or args . archived
Whether previously archived data should be loaded .
5,289
def git_add_commit_push_all_repos ( cat ) : log = cat . log log . debug ( "gitter.git_add_commit_push_all_repos()" ) all_repos = cat . PATHS . get_all_repo_folders ( private = False ) for repo in all_repos : log . info ( "Repo in: '{}'" . format ( repo ) ) sha_beg = get_sha ( repo ) log . debug ( "Current SHA: '{}'" . format ( sha_beg ) ) add_files = cat . _prep_git_add_file_list ( repo , cat . COMPRESS_ABOVE_FILESIZE ) log . info ( "Found {} Files to add." . format ( len ( add_files ) ) ) if len ( add_files ) == 0 : continue try : git_comm = [ "git" , "add" ] if cat . args . travis : git_comm . append ( "-f" ) git_comm . extend ( add_files ) _call_command_in_repo ( git_comm , repo , cat . log , fail = True , log_flag = False ) commit_msg = "'push' - adding all files." commit_msg = "{} : {}" . format ( cat . _version_long , commit_msg ) log . info ( commit_msg ) git_comm = [ "git" , "commit" , "-am" , commit_msg ] _call_command_in_repo ( git_comm , repo , cat . log ) git_comm = [ "git" , "push" ] if not cat . args . travis : _call_command_in_repo ( git_comm , repo , cat . log , fail = True ) except Exception as err : try : git_comm = [ "git" , "reset" , "HEAD" ] _call_command_in_repo ( git_comm , repo , cat . log , fail = True ) except : pass raise err return
Add all files in each data repository tree commit push .
5,290
def git_pull_all_repos ( cat , strategy_recursive = True , strategy = 'theirs' ) : log = cat . log log . debug ( "gitter.git_pull_all_repos()" ) log . warning ( "WARNING: using experimental `git_pull_all_repos()`!" ) all_repos = cat . PATHS . get_all_repo_folders ( ) for repo_name in all_repos : log . info ( "Repo in: '{}'" . format ( repo_name ) ) sha_beg = get_sha ( repo_name ) log . debug ( "Current SHA: '{}'" . format ( sha_beg ) ) repo = git . Repo ( repo_name ) git_comm = "git pull --verbose" if strategy_recursive : git_comm += " -s recursive" if strategy is not None : git_comm += " -X {:s}" . format ( strategy ) log . debug ( "Calling '{}'" . format ( git_comm ) ) code , out , err = repo . git . execute ( git_comm . split ( ) , with_stdout = True , with_extended_output = True , with_exceptions = False ) if len ( out ) : log . info ( out ) if len ( err ) : log . info ( err ) if code != 0 : err_str = "Command '{}' returned exit code '{}'!" . format ( git_comm , code ) err_str += "\n\tout: '{}'\n\terr: '{}'" . format ( out , err ) log . error ( err_str ) raise RuntimeError ( err_str ) sha_end = get_sha ( repo_name ) if sha_end != sha_beg : log . info ( "Updated SHA: '{}'" . format ( sha_end ) ) return
Perform a git pull in each data repository .
5,291
def git_clone_all_repos ( cat ) : log = cat . log log . debug ( "gitter.git_clone_all_repos()" ) all_repos = cat . PATHS . get_all_repo_folders ( ) out_repos = cat . PATHS . get_repo_output_folders ( ) for repo in all_repos : log . info ( "Repo in: '{}'" . format ( repo ) ) if os . path . isdir ( repo ) : log . info ( "Directory exists." ) else : log . debug ( "Cloning directory..." ) clone ( repo , cat . log , depth = max ( cat . args . clone_depth , 1 ) ) if cat . args . purge_outputs and repo in out_repos : for fil in glob ( os . path . join ( repo , '*.json' ) ) : os . remove ( fil ) grepo = git . cmd . Git ( repo ) try : grepo . status ( ) except git . GitCommandError : log . error ( "Repository does not exist!" ) raise sha_beg = get_sha ( repo ) log . debug ( "Current SHA: '{}'" . format ( sha_beg ) ) return
Perform a git clone for each data repository that doesnt exist .
5,292
def git_reset_all_repos ( cat , hard = True , origin = False , clean = True ) : log = cat . log log . debug ( "gitter.git_reset_all_repos()" ) all_repos = cat . PATHS . get_all_repo_folders ( ) for repo in all_repos : log . warning ( "Repo in: '{}'" . format ( repo ) ) sha_beg = get_sha ( repo ) log . debug ( "Current SHA: '{}'" . format ( sha_beg ) ) grepo = git . cmd . Git ( repo ) log . info ( "fetching" ) grepo . fetch ( ) args = [ ] if hard : args . append ( '--hard' ) if origin : args . append ( 'origin/master' ) log . info ( "resetting" ) retval = grepo . reset ( * args ) if len ( retval ) : log . warning ( "Git says: '{}'" . format ( retval ) ) if clean : log . info ( "cleaning" ) retval = grepo . clean ( '-qdf' ) if len ( retval ) : log . warning ( "Git says: '{}'" . format ( retval ) ) sha_end = get_sha ( repo ) if sha_end != sha_beg : log . debug ( "Updated SHA: '{}'" . format ( sha_end ) ) return
Perform a git reset in each data repository .
5,293
def git_status_all_repos ( cat , hard = True , origin = False , clean = True ) : log = cat . log log . debug ( "gitter.git_status_all_repos()" ) all_repos = cat . PATHS . get_all_repo_folders ( ) for repo_name in all_repos : log . info ( "Repo in: '{}'" . format ( repo_name ) ) sha_beg = get_sha ( repo_name ) log . debug ( "Current SHA: '{}'" . format ( sha_beg ) ) log . info ( "Fetching" ) fetch ( repo_name , log = cat . log ) git_comm = [ "git" , "status" ] _call_command_in_repo ( git_comm , repo_name , cat . log , fail = True , log_flag = True ) sha_end = get_sha ( repo_name ) if sha_end != sha_beg : log . info ( "Updated SHA: '{}'" . format ( sha_end ) ) return
Perform a git status in each data repository .
5,294
def clone ( repo , log , depth = 1 ) : kwargs = { } if depth > 0 : kwargs [ 'depth' ] = depth try : repo_name = os . path . split ( repo ) [ - 1 ] repo_name = "https://github.com/astrocatalogs/" + repo_name + ".git" log . warning ( "Cloning '{}' (only needs to be done " . format ( repo ) + "once, may take few minutes per repo)." ) grepo = git . Repo . clone_from ( repo_name , repo , ** kwargs ) except : log . error ( "CLONING '{}' INTERRUPTED" . format ( repo ) ) raise return grepo
Given a list of repositories make sure they re all cloned .
5,295
def _check ( self ) : super ( Spectrum , self ) . _check ( ) err_str = None has_data = self . _KEYS . DATA in self has_wave = self . _KEYS . WAVELENGTHS in self has_flux = self . _KEYS . FLUXES in self has_filename = self . _KEYS . FILENAME in self if not has_data : if ( not has_wave or not has_flux ) and not has_filename : err_str = ( "If `{}` not given" . format ( self . _KEYS . DATA ) + "; `{}` or `{}` needed" . format ( self . _KEYS . WAVELENGTHS , self . _KEYS . FLUXES ) ) if err_str is not None : raise ValueError ( err_str ) return
Check that spectrum has legal combination of attributes .
5,296
def is_duplicate_of ( self , other ) : if super ( Spectrum , self ) . is_duplicate_of ( other ) : return True row_matches = 0 for ri , row in enumerate ( self . get ( self . _KEYS . DATA , [ ] ) ) : lambda1 , flux1 = tuple ( row [ 0 : 2 ] ) if ( self . _KEYS . DATA not in other or ri > len ( other [ self . _KEYS . DATA ] ) ) : break lambda2 , flux2 = tuple ( other [ self . _KEYS . DATA ] [ ri ] [ 0 : 2 ] ) minlambdalen = min ( len ( lambda1 ) , len ( lambda2 ) ) minfluxlen = min ( len ( flux1 ) , len ( flux2 ) ) if ( lambda1 [ : minlambdalen + 1 ] == lambda2 [ : minlambdalen + 1 ] and flux1 [ : minfluxlen + 1 ] == flux2 [ : minfluxlen + 1 ] and float ( flux1 [ : minfluxlen + 1 ] ) != 0.0 ) : row_matches += 1 if row_matches >= 5 : return True if ri >= 10 : break return False
Check if spectrum is duplicate of another .
5,297
def sort_func ( self , key ) : if key == self . _KEYS . TIME : return 'aaa' if key == self . _KEYS . DATA : return 'zzy' if key == self . _KEYS . SOURCE : return 'zzz' return key
Logic for sorting keys in a Spectrum relative to one another .
5,298
def sort_func ( self , key ) : if key == self . _KEYS . VALUE : return 'aaa' if key == self . _KEYS . SOURCE : return 'zzz' return key
Sorting logic for Quantity objects .
5,299
def pretty ( self ) : retval = ( "Key(name={}, type={}, listable={}, compare={}, " "priority={}, kind_preference={}, " "replace_better={})" ) . format ( self . name , self . type , self . listable , self . compare , self . priority , self . kind_preference , self . replace_better ) return retval
Return a pretty string representation of this Key .