idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
49,700
def loads_with_timestamp ( value , salt ) : try : signing . loads ( value , salt = salt , max_age = - 999999 ) except signing . SignatureExpired as e : age = float ( str ( e ) . split ( 'Signature age ' ) [ 1 ] . split ( ' >' ) [ 0 ] ) timestamp = timezone . now ( ) - datetime . timedelta ( seconds = age ) return timestamp , signing . loads ( value , salt = salt )
Returns the unsigned value along with its timestamp the time when it got dumped .
49,701
def set_keepalive ( sock , after_idle_sec = 1 , interval_sec = 3 , max_fails = 5 ) : if hasattr ( socket , "SO_KEEPALIVE" ) : sock . setsockopt ( socket . SOL_SOCKET , socket . SO_KEEPALIVE , 1 ) if hasattr ( socket , "TCP_KEEPIDLE" ) : sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_KEEPIDLE , after_idle_sec ) if hasattr ( socket , "TCP_KEEPINTVL" ) : sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_KEEPINTVL , interval_sec ) if hasattr ( socket , "TCP_KEEPCNT" ) : sock . setsockopt ( socket . IPPROTO_TCP , socket . TCP_KEEPCNT , max_fails )
Set TCP keepalive on an open socket .
49,702
def _create_dictionary_of_veron ( self ) : self . log . debug ( 'starting the ``_create_dictionary_of_veron`` method' ) dictList = [ ] lines = string . split ( self . catData , '\n' ) totalCount = len ( lines ) count = 0 switch = 0 for line in lines : if ( len ( line ) == 0 or line [ 0 ] in [ "#" , " " ] ) and switch == 0 : continue else : switch = 1 count += 1 if count > 1 : sys . stdout . write ( "\x1b[1A\x1b[2K" ) print "%(count)s / %(totalCount)s veron data added to memory" % locals ( ) if count == 1 : theseKeys = [ ] someKeys = string . split ( line , '|' ) for key in someKeys : if key == "_RAJ2000" : key = "raDeg" if key == "_DEJ2000" : key = "decDeg" if key == "Cl" : key = "class" if key == "nR" : key = "not_radio" if key == "Name" : key = "name" if key == "l_z" : key = "redshift_flag" if key == "z" : key = "redshift" if key == "Sp" : key = "spectral_classification" if key == "n_Vmag" : key = "magnitude_filter" if key == "Vmag" : key = "magnitude" if key == "B-V" : key = "B_V" if key == "U-B" : key = "U_B" if key == "Mabs" : key = "abs_magnitude" theseKeys . append ( key ) continue if count in [ 2 , 3 ] : continue thisDict = { } theseValues = string . split ( line , '|' ) for k , v in zip ( theseKeys , theseValues ) : v = v . strip ( ) if len ( v ) == 0 or v == "-" : v = None thisDict [ k ] = v dictList . append ( thisDict ) self . log . debug ( 'completed the ``_create_dictionary_of_veron`` method' ) return dictList
create a list of dictionaries containing all the rows in the veron catalogue
49,703
def display ( self , display_before = False ) : try : copy = self . image . copy ( ) except AttributeError : raise Exception ( "You need to set the Filter.image attribute for displaying" ) copy = BrightnessProcessor ( brightness = 0.6 ) . process ( copy ) s , g = self . _input , self . good_segments_indexes draw_segments ( copy , s [ g ] , ( 0 , 255 , 0 ) ) draw_segments ( copy , s [ True ^ g ] , ( 0 , 0 , 255 ) ) show_image_and_wait_for_key ( copy , "segments filtered by " + self . __class__ . __name__ )
shows the effect of this filter
49,704
def parse_mip_analysis ( mip_config_raw : dict , qcmetrics_raw : dict , sampleinfo_raw : dict ) -> dict : outdata = _define_output_dict ( ) _config ( mip_config_raw , outdata ) _qc_metrics ( outdata , qcmetrics_raw ) _qc_sample_info ( outdata , sampleinfo_raw ) return outdata
Parse the output analysis files from MIP for adding info to trend database
49,705
def _updated_row_counts_in_tcs_helper_catalogue_tables_info ( self ) : self . log . debug ( 'starting the ``_updated_row_counts_in_tcs_helper_catalogue_tables_info`` method' ) sqlQuery = u % locals ( ) rows = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) for row in rows : tbName = row [ "table_name" ] sqlQuery = u % locals ( ) writequery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , ) sqlQuery = u % locals ( ) rows = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) for row in rows : tbName = row [ "view_name" ] print tbName sqlQuery = u % locals ( ) writequery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , ) self . log . debug ( 'completed the ``_updated_row_counts_in_tcs_helper_catalogue_tables_info`` method' ) return None
updated row counts in tcs catalogue tables
49,706
def restart_listener ( self , topics ) : if self . listener is not None : if self . listener . running : self . stop ( ) self . __init__ ( topics = topics )
Restart listener after configuration update .
49,707
def stop ( self ) : self . logger . debug ( "Stopping listener." ) self . listener . stop ( ) if self . thread is not None : self . thread . join ( ) self . thread = None self . logger . debug ( "Listener stopped." )
Stop listener .
49,708
def create_subscriber ( self ) : if self . subscriber is None : if self . topics : self . subscriber = NSSubscriber ( self . services , self . topics , addr_listener = True , addresses = self . addresses , nameserver = self . nameserver ) self . recv = self . subscriber . start ( ) . recv
Create a subscriber instance using specified addresses and message types .
49,709
def stop ( self ) : self . running = False time . sleep ( 1 ) if self . subscriber is not None : self . subscriber . stop ( ) self . subscriber = None
Stop subscriber and delete the instance
49,710
def get_current_semver ( data ) : known = { key : data . get ( alias ) for key , alias in config . _forward_aliases . items ( ) if data . get ( alias ) is not None } potentials = [ known . pop ( Constants . VERSION_STRICT_FIELD , None ) , known . pop ( Constants . VERSION_FIELD , None ) , ] from_components = [ known . get ( k ) for k in SemVerSigFig . _fields if k in known ] if len ( from_components ) == 3 : potentials . append ( "." . join ( from_components ) ) versions = set ( ) for potential in potentials : if not potential : continue match = re_semver . match ( potential ) if match : parts = match . groupdict ( ) parts . pop ( "tail" ) versions . add ( SemVer ( ** parts ) ) if len ( versions ) > 1 : raise ValueError ( "conflicting versions within project: %s" % versions ) if not versions : _LOG . debug ( "key pairs found: \n%r" , known ) raise ValueError ( "could not find existing semver" ) return versions . pop ( )
Given a dictionary of all version data available determine the current version
49,711
def make_new_semver ( current_semver , all_triggers , ** overrides ) : new_semver = { } bumped = False for sig_fig in SemVerSigFig : value = getattr ( current_semver , sig_fig ) override = overrides . get ( sig_fig ) if override is not None : new_semver [ sig_fig ] = override if int ( override ) > int ( value ) : bumped = True elif bumped : new_semver [ sig_fig ] = "0" elif sig_fig in all_triggers : new_semver [ sig_fig ] = str ( int ( value ) + 1 ) bumped = True else : new_semver [ sig_fig ] = value return SemVer ( ** new_semver )
Defines how to increment semver based on which significant figure is triggered
49,712
def loads ( data ) : data += "\n_EOS_" match_dest = RE_HEADER . search ( data ) dest_name = match_dest . group ( 1 ) dest_ip = match_dest . group ( 2 ) traceroute = Traceroute ( dest_name , dest_ip ) matches_hop = RE_HOP . findall ( data ) for match_hop in matches_hop : idx = int ( match_hop [ 0 ] ) if match_hop [ 1 ] : asn = int ( match_hop [ 1 ] ) else : asn = None hop = Hop ( idx , asn ) probes_data = match_hop [ 2 ] . split ( ) probes_data = filter ( lambda s : s . lower ( ) != 'ms' , probes_data ) i = 0 while i < len ( probes_data ) : name = None ip = None rtt = None anno = '' if RE_PROBE_RTT . match ( probes_data [ i ] ) : rtt = float ( probes_data [ i ] ) i += 1 elif RE_PROBE_NAME . match ( probes_data [ i ] ) : name = probes_data [ i ] ip = probes_data [ i + 1 ] . strip ( '()' ) rtt = float ( probes_data [ i + 2 ] ) i += 3 elif RE_PROBE_TIMEOUT . match ( probes_data [ i ] ) : rtt = None i += 1 else : ext = "i: %d\nprobes_data: %s\nname: %s\nip: %s\nrtt: %s\nanno: %s" % ( i , probes_data , name , ip , rtt , anno ) raise ParseError ( "Parse error \n%s" % ext ) try : if RE_PROBE_ANNOTATION . match ( probes_data [ i ] ) : anno = probes_data [ i ] i += 1 except IndexError : pass probe = Probe ( name , ip , rtt , anno ) hop . add_probe ( probe ) traceroute . add_hop ( hop ) return traceroute
Parser entry point . Parses the output of a traceroute execution
49,713
def add_probe ( self , probe ) : if self . probes : probe_last = self . probes [ - 1 ] if not probe . ip : probe . ip = probe_last . ip probe . name = probe_last . name self . probes . append ( probe )
Adds a Probe instance to this hop s results .
49,714
def synchronized ( func , * args , ** kwargs ) : if not ( args and hasattr ( args [ 0 ] , '_lock' ) ) : return func ( * args , ** kwargs ) with args [ 0 ] . _lock : return func ( * args , ** kwargs )
Function decorator to make function synchronized on self . _lock .
49,715
def normalize_job_id ( job_id ) : if not isinstance ( job_id , uuid . UUID ) : job_id = uuid . UUID ( job_id ) return job_id
Convert a value to a job id .
49,716
def start ( self ) : if self . _thread_running . is_set ( ) : raise RuntimeError ( 'the JobManager has already been started' ) self . _thread . start ( ) self . _thread_running . wait ( ) return
Start the JobManager thread .
49,717
def stop ( self ) : self . logger . debug ( 'stopping the job manager' ) self . _thread_running . clear ( ) self . _thread_shutdown . wait ( ) self . _job_lock . acquire ( ) self . logger . debug ( 'waiting on ' + str ( len ( self . _jobs ) ) + ' job threads' ) for job_desc in self . _jobs . values ( ) : if job_desc [ 'job' ] is None : continue if not job_desc [ 'job' ] . is_alive ( ) : continue job_desc [ 'job' ] . join ( ) self . _job_lock . release ( ) self . _thread . join ( ) self . logger . info ( 'the job manager has been stopped' ) return
Stop the JobManager thread .
49,718
def job_run ( self , callback , parameters = None ) : if not self . _thread_running . is_set ( ) : raise RuntimeError ( 'the JobManager is not running' ) parameters = ( parameters or ( ) ) if not isinstance ( parameters , ( list , tuple ) ) : parameters = ( parameters , ) job_desc = { } job_desc [ 'job' ] = JobRun ( callback , parameters ) job_desc [ 'last_run' ] = None job_desc [ 'run_every' ] = datetime . timedelta ( 0 , 1 ) job_desc [ 'callback' ] = callback job_desc [ 'parameters' ] = parameters job_desc [ 'enabled' ] = True job_desc [ 'tolerate_exceptions' ] = False job_desc [ 'run_count' ] = 0 job_desc [ 'expiration' ] = 0 job_id = uuid . uuid4 ( ) self . logger . info ( 'adding new job with id: ' + str ( job_id ) + ' and callback function: ' + callback . __name__ ) with self . _job_lock : self . _jobs [ job_id ] = job_desc self . _job_execute ( job_id ) return job_id
Add a job and run it once immediately .
49,719
def job_add ( self , callback , parameters = None , hours = 0 , minutes = 0 , seconds = 0 , tolerate_exceptions = True , expiration = None ) : if not self . _thread_running . is_set ( ) : raise RuntimeError ( 'the JobManager is not running' ) parameters = ( parameters or ( ) ) if not isinstance ( parameters , ( list , tuple ) ) : parameters = ( parameters , ) job_desc = { } job_desc [ 'job' ] = JobRun ( callback , parameters ) job_desc [ 'last_run' ] = None job_desc [ 'run_every' ] = datetime . timedelta ( 0 , ( ( hours * 60 * 60 ) + ( minutes * 60 ) + seconds ) ) job_desc [ 'callback' ] = callback job_desc [ 'parameters' ] = parameters job_desc [ 'enabled' ] = True job_desc [ 'tolerate_exceptions' ] = tolerate_exceptions job_desc [ 'run_count' ] = 0 if isinstance ( expiration , int ) : job_desc [ 'expiration' ] = expiration elif isinstance ( expiration , datetime . timedelta ) : job_desc [ 'expiration' ] = self . now ( ) + expiration elif isinstance ( expiration , datetime . datetime ) : job_desc [ 'expiration' ] = expiration else : job_desc [ 'expiration' ] = None job_id = uuid . uuid4 ( ) self . logger . info ( 'adding new job with id: ' + str ( job_id ) + ' and callback function: ' + callback . __name__ ) with self . _job_lock : self . _jobs [ job_id ] = job_desc return job_id
Add a job to the job manager .
49,720
def job_count_enabled ( self ) : enabled = 0 for job_desc in self . _jobs . values ( ) : if job_desc [ 'enabled' ] : enabled += 1 return enabled
Return the number of enabled jobs .
49,721
def job_enable ( self , job_id ) : job_id = normalize_job_id ( job_id ) with self . _job_lock : job_desc = self . _jobs [ job_id ] job_desc [ 'enabled' ] = True
Enable a job .
49,722
def job_disable ( self , job_id ) : job_id = normalize_job_id ( job_id ) with self . _job_lock : job_desc = self . _jobs [ job_id ] job_desc [ 'enabled' ] = False
Disable a job . Disabled jobs will not be executed .
49,723
def job_delete ( self , job_id , wait = True ) : job_id = normalize_job_id ( job_id ) self . logger . info ( 'deleting job with id: ' + str ( job_id ) + ' and callback function: ' + self . _jobs [ job_id ] [ 'callback' ] . __name__ ) job_desc = self . _jobs [ job_id ] with self . _job_lock : job_desc [ 'enabled' ] = False if wait and self . job_is_running ( job_id ) : job_desc [ 'job' ] . join ( ) del self . _jobs [ job_id ]
Delete a job .
49,724
def job_is_enabled ( self , job_id ) : job_id = normalize_job_id ( job_id ) job_desc = self . _jobs [ job_id ] return job_desc [ 'enabled' ]
Check if a job is enabled .
49,725
def job_is_running ( self , job_id ) : job_id = normalize_job_id ( job_id ) if job_id not in self . _jobs : return False job_desc = self . _jobs [ job_id ] if job_desc [ 'job' ] : return job_desc [ 'job' ] . is_alive ( ) return False
Check if a job is currently running . False is returned if the job does not exist .
49,726
def bin_b64_type ( arg ) : try : arg = base64 . standard_b64decode ( arg ) except ( binascii . Error , TypeError ) : raise argparse . ArgumentTypeError ( "{0} is invalid base64 data" . format ( repr ( arg ) ) ) return arg
An argparse type representing binary data encoded in base64 .
49,727
def bin_hex_type ( arg ) : if re . match ( r'^[a-f0-9]{2}(:[a-f0-9]{2})+$' , arg , re . I ) : arg = arg . replace ( ':' , '' ) elif re . match ( r'^(\\x[a-f0-9]{2})+$' , arg , re . I ) : arg = arg . replace ( '\\x' , '' ) try : arg = binascii . a2b_hex ( arg ) except ( binascii . Error , TypeError ) : raise argparse . ArgumentTypeError ( "{0} is invalid hex data" . format ( repr ( arg ) ) ) return arg
An argparse type representing binary data encoded in hex .
49,728
def dir_type ( arg ) : if not os . path . isdir ( arg ) : raise argparse . ArgumentTypeError ( "{0} is not a valid directory" . format ( repr ( arg ) ) ) return arg
An argparse type representing a valid directory .
49,729
def email_type ( arg ) : if not is_valid_email_address ( arg ) : raise argparse . ArgumentTypeError ( "{0} is not a valid email address" . format ( repr ( arg ) ) ) return arg
An argparse type representing an email address .
49,730
def log_level_type ( arg ) : if not arg . upper ( ) in ( 'NOTSET' , 'DEBUG' , 'INFO' , 'WARNING' , 'ERROR' , 'CRITICAL' ) : raise argparse . ArgumentTypeError ( "{0} is not a valid log level" . format ( repr ( arg ) ) ) return getattr ( logging , arg . upper ( ) )
An argparse type representing a logging level .
49,731
def port_type ( arg ) : error_msg = "{0} is not a valid port" . format ( repr ( arg ) ) try : arg = ast . literal_eval ( arg ) except ValueError : raise argparse . ArgumentTypeError ( error_msg ) if arg < 0 or arg > 65535 : raise argparse . ArgumentTypeError ( error_msg ) return arg
An argparse type representing a tcp or udp port number .
49,732
def timespan_type ( arg ) : try : arg = parse_timespan ( arg ) except ValueError : raise argparse . ArgumentTypeError ( "{0} is not a valid time span" . format ( repr ( arg ) ) ) return arg
An argparse type representing a timespan such as 6h for 6 hours .
49,733
def get_own_ip ( ) : sock = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) try : sock . connect ( ( "8.8.8.8" , 80 ) ) except socket . gaierror : ip_ = "127.0.0.1" else : ip_ = sock . getsockname ( ) [ 0 ] finally : sock . close ( ) return ip_
Get the host s ip number .
49,734
def send ( self , msg ) : with self . _pub_lock : self . publish . send_string ( msg ) return self
Send the given message .
49,735
def start ( self ) : pub_addr = "tcp://*:" + str ( self . _port ) self . _publisher = self . _publisher_class ( pub_addr , self . _name ) LOGGER . debug ( "entering publish %s" , str ( self . _publisher . destination ) ) addr = ( "tcp://" + str ( get_own_ip ( ) ) + ":" + str ( self . _publisher . port_number ) ) self . _broadcaster = sendaddressservice ( self . _name , addr , self . _aliases , self . _broadcast_interval , self . _nameservers ) . start ( ) return self . _publisher
Start the publisher .
49,736
def configure_stream_logger ( logger = '' , level = None , formatter = '%(levelname)-8s %(message)s' ) : level = level or logging . WARNING if isinstance ( level , str ) : level = getattr ( logging , level , None ) if level is None : raise ValueError ( 'invalid log level: ' + level ) root_logger = logging . getLogger ( '' ) for handler in root_logger . handlers : root_logger . removeHandler ( handler ) logging . getLogger ( logger ) . setLevel ( logging . DEBUG ) console_log_handler = logging . StreamHandler ( ) console_log_handler . setLevel ( level ) if isinstance ( formatter , str ) : formatter = logging . Formatter ( formatter ) elif not isinstance ( formatter , logging . Formatter ) : raise TypeError ( 'formatter must be an instance of logging.Formatter' ) console_log_handler . setFormatter ( formatter ) logging . getLogger ( logger ) . addHandler ( console_log_handler ) logging . captureWarnings ( True ) return console_log_handler
Configure the default stream handler for logging messages to the console remove other logging handlers and enable capturing warnings .
49,737
def format_bytes_size ( val ) : if not val : return '0 bytes' for sz_name in [ 'bytes' , 'KB' , 'MB' , 'GB' , 'TB' , 'PB' , 'EB' ] : if val < 1024.0 : return "{0:.2f} {1}" . format ( val , sz_name ) val /= 1024.0 raise OverflowError ( )
Take a number of bytes and convert it to a human readable number .
49,738
def grep ( expression , file , flags = 0 , invert = False ) : if isinstance ( file , str ) : file = open ( file ) lines = [ ] for line in file : if bool ( re . search ( expression , line , flags = flags ) ) ^ invert : lines . append ( line ) return lines
Search a file and return a list of all lines that match a regular expression .
49,739
def parse_case_snake_to_camel ( snake , upper_first = True ) : snake = snake . split ( '_' ) first_part = snake [ 0 ] if upper_first : first_part = first_part . title ( ) return first_part + '' . join ( word . title ( ) for word in snake [ 1 : ] )
Convert a string from snake_case to CamelCase .
49,740
def parse_to_slug ( words , maxlen = 24 ) : slug = '' maxlen = min ( maxlen , len ( words ) ) for c in words : if len ( slug ) == maxlen : break c = ord ( c ) if c == 0x27 : continue elif c >= 0x30 and c <= 0x39 : slug += chr ( c ) elif c >= 0x41 and c <= 0x5a : slug += chr ( c + 0x20 ) elif c >= 0x61 and c <= 0x7a : slug += chr ( c ) elif len ( slug ) and slug [ - 1 ] != '-' : slug += '-' if len ( slug ) and slug [ - 1 ] == '-' : slug = slug [ : - 1 ] return slug
Parse a string into a slug format suitable for use in URLs and other character restricted applications . Only utf - 8 strings are supported at this time .
49,741
def selection_collision ( selections , poolsize ) : probability = 100.0 poolsize = float ( poolsize ) for i in range ( selections ) : probability = probability * ( poolsize - i ) / poolsize probability = ( 100.0 - probability ) return probability
Calculate the probability that two random values selected from an arbitrary sized pool of unique values will be equal . This is commonly known as the Birthday Problem .
49,742
def unique ( seq , key = None ) : if key is None : key = lambda x : x preserved_type = type ( seq ) if preserved_type not in ( list , tuple ) : raise TypeError ( "unique argument 1 must be list or tuple, not {0}" . format ( preserved_type . __name__ ) ) seen = [ ] result = [ ] for item in seq : marker = key ( item ) if marker in seen : continue seen . append ( marker ) result . append ( item ) return preserved_type ( result )
Create a unique list or tuple from a provided list or tuple and preserve the order .
49,743
def xfrange ( start , stop = None , step = 1 ) : if stop is None : stop = start start = 0.0 start = float ( start ) while start < stop : yield start start += step
Iterate through an arithmetic progression .
49,744
def set ( self , option , value ) : self . config_parser . set ( self . section_name , option , value )
Set an option to an arbitrary value .
49,745
def reformat_exception ( cls , message , err , * format_args , ** format_kwds ) : final_message = message . format ( * format_args , ** format_kwds ) final_message = "{} -- {}: {}" . format ( final_message , type ( err ) . __name__ , str ( err ) , ) final_message = cls . sanitize_errstr ( final_message ) return final_message
Reformats an exception by adding a message to it and reporting the original exception name and message
49,746
def require_condition ( cls , expr , message , * format_args , ** format_kwds ) : if not expr : raise cls ( message , * format_args , ** format_kwds )
used to assert a certain state . If the expression renders a false value an exception will be raised with the supplied message
49,747
def visit_html ( self , node ) : parentClsNode = node . parent . parent assert parentClsNode . attributes [ 'objtype' ] == 'class' assert parentClsNode . attributes [ 'domain' ] == 'py' sign = node . parent . parent . children [ 0 ] assert isinstance ( sign , desc_signature ) absolute_name = sign . attributes [ 'ids' ] [ 0 ] _construct = node [ "constructor_fn " ] serialno = node [ "serialno" ] try : if _construct is None : unitCls = generic_import ( absolute_name ) if not issubclass ( unitCls , Unit ) : raise AssertionError ( "Can not use hwt-schematic sphinx directive and create schematic" " for %s because it is not subclass of %r" % ( absolute_name , Unit ) ) u = unitCls ( ) else : assert len ( _construct ) > 0 and RE_IS_ID . match ( _construct ) , _construct _absolute_name = [ ] assert ".." not in absolute_name , absolute_name for n in absolute_name . split ( sep = "." ) [ : - 1 ] : if n != "" : _absolute_name . append ( n ) _absolute_name . append ( _construct ) constructor_fn = generic_import ( _absolute_name ) u = constructor_fn ( ) if not isinstance ( u , Unit ) : raise AssertionError ( "Can not use hwt-schematic sphinx directive and create schematic" " for %s because function did not returned instance of %r, (%r)" % ( _absolute_name , Unit , u ) ) schem_file = SchematicPaths . get_sch_file_name_absolute ( self . document , absolute_name , serialno ) makedirs ( path . dirname ( schem_file ) , exist_ok = True ) with open ( schem_file , "w" ) as f : synthesised ( u , DEFAULT_PLATFORM ) g = UnitToLNode ( u , optimizations = DEFAULT_LAYOUT_OPTIMIZATIONS ) idStore = ElkIdStore ( ) data = g . toElkJson ( idStore ) json . dump ( data , f ) viewer = SchematicPaths . get_sch_viewer_link ( self . document ) sch_name = SchematicPaths . get_sch_file_name ( self . document , absolute_name , serialno ) ref = nodes . reference ( text = _ ( "schematic" ) , refuri = "%s?schematic=%s" % ( viewer , path . join ( SchematicPaths . SCHEMATIC_DIR_PREFIX , sch_name ) ) ) node += ref except Exception as e : logging . error ( e , exc_info = True ) raise Exception ( "Error occured while processing of %s" % absolute_name )
Generate html elements and schematic json
49,748
def run ( self ) : has_npm = npm_installation_check ( ) if has_npm : run_npm_install ( ) else : print ( "Warning: npm not installed using prebuilded js files!" , file = sys . stderr ) for js in JS_FILES : downloaded_js_name = os . path . join ( TOP_DIR , js ) installed_js_name = os . path . join ( TOP_DIR , "sphinx_hwt" , "html" , js ) if has_npm : assert os . path . exists ( downloaded_js_name ) , downloaded_js_name os . makedirs ( os . path . dirname ( installed_js_name ) , exist_ok = True ) copyfile ( downloaded_js_name , installed_js_name ) print ( "copy generated from NPM packages" , installed_js_name ) else : if os . path . exists ( installed_js_name ) : print ( "using prebuilded" , installed_js_name ) else : raise Exception ( "Can not find npm," " which is required for the installation " "and this is pacpage has not js prebuilded" )
Download npm packages required by package . json and extract required files from them
49,749
def density ( self ) : r = self . radius * _Rsun m = self . mass * _Msun return 0.75 * m / ( np . pi * r * r * r )
Stellar density in CGS units
49,750
def get_context ( ) : pid = os . getpid ( ) if pid not in context : context [ pid ] = zmq . Context ( ) logger . debug ( 'renewed context for PID %d' , pid ) return context [ pid ]
Provide the context to use .
49,751
def strp_isoformat ( strg ) : if isinstance ( strg , datetime ) : return strg if len ( strg ) < 19 or len ( strg ) > 26 : if len ( strg ) > 30 : strg = strg [ : 30 ] + '...' raise ValueError ( "Invalid ISO formatted time string '%s'" % strg ) if strg . find ( "." ) == - 1 : strg += '.000000' if sys . version [ 0 : 3 ] >= '2.6' : return datetime . strptime ( strg , "%Y-%m-%dT%H:%M:%S.%f" ) else : dat , mis = strg . split ( "." ) dat = datetime . strptime ( dat , "%Y-%m-%dT%H:%M:%S" ) mis = int ( float ( '.' + mis ) * 1000000 ) return dat . replace ( microsecond = mis )
Decode an ISO formatted string to a datetime object . Allow a time - string without microseconds .
49,752
def find_analysis ( self , family , started_at , status ) : query = self . Analysis . query . filter_by ( family = family , started_at = started_at , status = status , ) return query . first ( )
Find a single analysis .
49,753
def analyses ( self , * , family : str = None , query : str = None , status : str = None , deleted : bool = None , temp : bool = False , before : dt . datetime = None , is_visible : bool = None ) : analysis_query = self . Analysis . query if family : analysis_query = analysis_query . filter_by ( family = family ) elif query : analysis_query = analysis_query . filter ( sqa . or_ ( self . Analysis . family . like ( f"%{query}%" ) , self . Analysis . status . like ( f"%{query}%" ) , ) ) if status : analysis_query = analysis_query . filter_by ( status = status ) if isinstance ( deleted , bool ) : analysis_query = analysis_query . filter_by ( is_deleted = deleted ) if temp : analysis_query = analysis_query . filter ( self . Analysis . status . in_ ( TEMP_STATUSES ) ) if before : analysis_query = analysis_query . filter ( self . Analysis . started_at < before ) if is_visible is not None : analysis_query = analysis_query . filter_by ( is_visible = is_visible ) return analysis_query . order_by ( self . Analysis . started_at . desc ( ) )
Fetch analyses form the database .
49,754
def analysis ( self , analysis_id : int ) -> models . Analysis : return self . Analysis . query . get ( analysis_id )
Get a single analysis .
49,755
def track_update ( self ) : metadata = self . info ( ) metadata . updated_at = dt . datetime . now ( ) self . commit ( )
Update the lastest updated date in the database .
49,756
def add_pending ( self , family : str , email : str = None ) -> models . Analysis : started_at = dt . datetime . now ( ) new_log = self . Analysis ( family = family , status = 'pending' , started_at = started_at ) new_log . user = self . user ( email ) if email else None self . add_commit ( new_log ) return new_log
Add pending entry for an analysis .
49,757
def add_user ( self , name : str , email : str ) -> models . User : new_user = self . User ( name = name , email = email ) self . add_commit ( new_user ) return new_user
Add a new user to the database .
49,758
def user ( self , email : str ) -> models . User : return self . User . query . filter_by ( email = email ) . first ( )
Fetch a user from the database .
49,759
def start ( self ) : if not self . _is_running : self . _do_run = True self . _thread . start ( ) return self
Start the receiver .
49,760
def _check_age ( self , pub , min_interval = timedelta ( seconds = 0 ) ) : now = datetime . utcnow ( ) if ( now - self . _last_age_check ) <= min_interval : return LOGGER . debug ( "%s - checking addresses" , str ( datetime . utcnow ( ) ) ) self . _last_age_check = now to_del = [ ] with self . _address_lock : for addr , metadata in self . _addresses . items ( ) : atime = metadata [ "receive_time" ] if now - atime > self . _max_age : mda = { 'status' : False , 'URI' : addr , 'service' : metadata [ 'service' ] } msg = Message ( '/address/' + metadata [ 'name' ] , 'info' , mda ) to_del . append ( addr ) LOGGER . info ( "publish remove '%s'" , str ( msg ) ) pub . send ( msg . encode ( ) ) for addr in to_del : del self . _addresses [ addr ]
Check the age of the receiver .
49,761
def _run ( self ) : port = broadcast_port nameservers = [ ] if self . _multicast_enabled : recv = MulticastReceiver ( port ) . settimeout ( 2. ) while True : try : recv = MulticastReceiver ( port ) . settimeout ( 2. ) LOGGER . info ( "Receiver initialized." ) break except IOError as err : if err . errno == errno . ENODEV : LOGGER . error ( "Receiver initialization failed " "(no such device). " "Trying again in %d s" , 10 ) time . sleep ( 10 ) else : raise else : recv = _SimpleReceiver ( port ) nameservers = [ "localhost" ] self . _is_running = True with Publish ( "address_receiver" , self . _port , [ "addresses" ] , nameservers = nameservers ) as pub : try : while self . _do_run : try : data , fromaddr = recv ( ) LOGGER . debug ( "data %s" , data ) del fromaddr except SocketTimeout : if self . _multicast_enabled : LOGGER . debug ( "Multicast socket timed out on recv!" ) continue finally : self . _check_age ( pub , min_interval = self . _max_age / 20 ) if self . _do_heartbeat : pub . heartbeat ( min_interval = 29 ) msg = Message . decode ( data ) name = msg . subject . split ( "/" ) [ 1 ] if ( msg . type == 'info' and msg . subject . lower ( ) . startswith ( self . _subject ) ) : addr = msg . data [ "URI" ] msg . data [ 'status' ] = True metadata = copy . copy ( msg . data ) metadata [ "name" ] = name LOGGER . debug ( 'receiving address %s %s %s' , str ( addr ) , str ( name ) , str ( metadata ) ) if addr not in self . _addresses : LOGGER . info ( "nameserver: publish add '%s'" , str ( msg ) ) pub . send ( msg . encode ( ) ) self . _add ( addr , metadata ) finally : self . _is_running = False recv . close ( )
Run the receiver .
49,762
def _add ( self , adr , metadata ) : with self . _address_lock : metadata [ "receive_time" ] = datetime . utcnow ( ) self . _addresses [ adr ] = metadata
Add an address .
49,763
def get_console_width ( ) : if os . name == 'nt' : STD_INPUT_HANDLE = - 10 STD_OUTPUT_HANDLE = - 11 STD_ERROR_HANDLE = - 12 from ctypes import windll , Structure , byref try : from ctypes . wintypes import SHORT , WORD , DWORD except ImportError : from ctypes import ( c_short as SHORT , c_ushort as WORD , c_ulong as DWORD ) console_handle = windll . kernel32 . GetStdHandle ( STD_OUTPUT_HANDLE ) class COORD ( Structure ) : _fields_ = [ ( "X" , SHORT ) , ( "Y" , SHORT ) ] class SMALL_RECT ( Structure ) : _fields_ = [ ( "Left" , SHORT ) , ( "Top" , SHORT ) , ( "Right" , SHORT ) , ( "Bottom" , SHORT ) ] class CONSOLE_SCREEN_BUFFER_INFO ( Structure ) : _fields_ = [ ( "dwSize" , COORD ) , ( "dwCursorPosition" , COORD ) , ( "wAttributes" , WORD ) , ( "srWindow" , SMALL_RECT ) , ( "dwMaximumWindowSize" , DWORD ) ] sbi = CONSOLE_SCREEN_BUFFER_INFO ( ) ret = windll . kernel32 . GetConsoleScreenBufferInfo ( console_handle , byref ( sbi ) ) if ret == 0 : return 0 return sbi . srWindow . Right + 1 elif os . name == 'posix' : from fcntl import ioctl from termios import TIOCGWINSZ from array import array winsize = array ( "H" , [ 0 ] * 4 ) try : ioctl ( sys . stdout . fileno ( ) , TIOCGWINSZ , winsize ) except IOError : pass return ( winsize [ 1 ] , winsize [ 0 ] ) [ 0 ] return 80
Return width of available window area . Autodetection works for Windows and POSIX platforms . Returns 80 for others
49,764
def report_bar ( bytes_so_far , total_size , speed , eta ) : percent = int ( bytes_so_far * 100 / total_size ) current = approximate_size ( bytes_so_far ) . center ( 9 ) total = approximate_size ( total_size ) . center ( 9 ) shaded = int ( float ( bytes_so_far ) / total_size * AVAIL_WIDTH ) sys . stdout . write ( " {0}% [{1}{2}{3}] {4}/{5} {6} eta{7}" . format ( str ( percent ) . center ( 4 ) , '=' * ( shaded - 1 ) , '>' , ' ' * ( AVAIL_WIDTH - shaded ) , current , total , ( approximate_size ( speed ) + '/s' ) . center ( 11 ) , eta . center ( 10 ) ) ) sys . stdout . write ( "\r" ) sys . stdout . flush ( )
This callback for the download function is used to print the download bar
49,765
def report_unknown ( bytes_so_far , total_size , speed , eta ) : sys . stdout . write ( "Downloading: {0} / Unknown - {1}/s " . format ( approximate_size ( bytes_so_far ) , approximate_size ( speed ) ) ) sys . stdout . write ( "\r" ) sys . stdout . flush ( )
This callback for the download function is used when the total size is unknown
49,766
def report_onlysize ( bytes_so_far , total_size , speed , eta ) : percent = int ( bytes_so_far * 100 / total_size ) current = approximate_size ( bytes_so_far ) . center ( 10 ) total = approximate_size ( total_size ) . center ( 10 ) sys . stdout . write ( 'D: {0}% -{1}/{2}' . format ( percent , current , total ) + "eta {0}" . format ( eta ) ) sys . stdout . write ( "\r" ) sys . stdout . flush ( )
This callback for the download function is used when console width is not enough to print the bar . It prints only the sizes
49,767
def ls_cmd ( context , before , status ) : runs = context . obj [ 'store' ] . analyses ( status = status , deleted = False , before = parse_date ( before ) if before else None , ) . limit ( 30 ) for run_obj in runs : if run_obj . status == 'pending' : message = f"{run_obj.id} | {run_obj.family} [{run_obj.status.upper()}]" else : message = ( f"{run_obj.id} | {run_obj.family} {run_obj.started_at.date()} " f"[{run_obj.type.upper()}/{run_obj.status.upper()}]" ) if run_obj . status == 'running' : message = click . style ( f"{message} - {run_obj.progress * 100}/100" , fg = 'blue' ) elif run_obj . status == 'completed' : message = click . style ( f"{message} - {run_obj.completed_at}" , fg = 'green' ) elif run_obj . status == 'failed' : message = click . style ( message , fg = 'red' ) print ( message )
Display recent logs for analyses .
49,768
def analytics ( account = None , * args , ** kwargs ) : if not account : try : account = settings . GOOGLE_ANALYTICS_ACCOUNT except : raise template . TemplateSyntaxError ( "Analytics account could not found either " "in tag parameters or settings" ) return { 'account' : account , 'params' : kwargs }
Simple Google Analytics integration .
49,769
def analyses ( ) : per_page = int ( request . args . get ( 'per_page' , 50 ) ) page = int ( request . args . get ( 'page' , 1 ) ) query = store . analyses ( status = request . args . get ( 'status' ) , query = request . args . get ( 'query' ) , is_visible = request . args . get ( 'is_visible' ) == 'true' or None ) query_page = query . paginate ( page , per_page = per_page ) data = [ ] for analysis_obj in query_page . items : analysis_data = analysis_obj . to_dict ( ) analysis_data [ 'user' ] = analysis_obj . user . to_dict ( ) if analysis_obj . user else None analysis_data [ 'failed_jobs' ] = [ job_obj . to_dict ( ) for job_obj in analysis_obj . failed_jobs ] data . append ( analysis_data ) return jsonify ( analyses = data )
Display analyses .
49,770
def analysis ( analysis_id ) : analysis_obj = store . analysis ( analysis_id ) if analysis_obj is None : return abort ( 404 ) if request . method == 'PUT' : analysis_obj . update ( request . json ) store . commit ( ) data = analysis_obj . to_dict ( ) data [ 'failed_jobs' ] = [ job_obj . to_dict ( ) for job_obj in analysis_obj . failed_jobs ] data [ 'user' ] = analysis_obj . user . to_dict ( ) if analysis_obj . user else None return jsonify ( ** data )
Display a single analysis .
49,771
def get_datetime_now ( ) : try : from django . utils import timezone return timezone . now ( ) except ImportError : return datetime . datetime . now ( )
Returns datetime object with current point in time .
49,772
def is_valid_data ( obj ) : if obj : try : tmp = json . dumps ( obj , default = datetime_encoder ) del tmp except ( TypeError , UnicodeDecodeError ) : return False return True
Check if data is JSON serializable .
49,773
def datetime_decoder ( dct ) : if isinstance ( dct , list ) : pairs = enumerate ( dct ) elif isinstance ( dct , dict ) : pairs = dct . items ( ) result = [ ] for key , val in pairs : if isinstance ( val , six . string_types ) : try : val = strp_isoformat ( val ) except ValueError : pass elif isinstance ( val , ( dict , list ) ) : val = datetime_decoder ( val ) result . append ( ( key , val ) ) if isinstance ( dct , list ) : return [ x [ 1 ] for x in result ] elif isinstance ( dct , dict ) : return dict ( result )
Decode datetimes to python objects .
49,774
def _decode ( rawstr ) : try : rawstr = rawstr . decode ( 'utf-8' ) except ( AttributeError , UnicodeEncodeError ) : pass except ( UnicodeDecodeError ) : try : rawstr = rawstr . decode ( 'iso-8859-1' ) except ( UnicodeDecodeError ) : rawstr = rawstr . decode ( 'utf-8' , 'ignore' ) if not rawstr . startswith ( _MAGICK ) : raise MessageError ( "This is not a '%s' message (wrong magick word)" % _MAGICK ) rawstr = rawstr [ len ( _MAGICK ) : ] raw = re . split ( r"\s+" , rawstr , maxsplit = 6 ) if len ( raw ) < 5 : raise MessageError ( "Could node decode raw string: '%s ...'" % str ( rawstr [ : 36 ] ) ) version = raw [ 4 ] [ : len ( _VERSION ) ] if not _is_valid_version ( version ) : raise MessageError ( "Invalid Message version: '%s'" % str ( version ) ) msg = dict ( ( ( 'subject' , raw [ 0 ] . strip ( ) ) , ( 'type' , raw [ 1 ] . strip ( ) ) , ( 'sender' , raw [ 2 ] . strip ( ) ) , ( 'time' , strp_isoformat ( raw [ 3 ] . strip ( ) ) ) , ( 'version' , version ) ) ) try : mimetype = raw [ 5 ] . lower ( ) data = raw [ 6 ] except IndexError : mimetype = None if mimetype is None : msg [ 'data' ] = '' msg [ 'binary' ] = False elif mimetype == 'application/json' : try : msg [ 'data' ] = json . loads ( raw [ 6 ] , object_hook = datetime_decoder ) msg [ 'binary' ] = False except ValueError : raise MessageError ( "JSON decode failed on '%s ...'" % raw [ 6 ] [ : 36 ] ) elif mimetype == 'text/ascii' : msg [ 'data' ] = str ( data ) msg [ 'binary' ] = False elif mimetype == 'binary/octet-stream' : msg [ 'data' ] = data msg [ 'binary' ] = True else : raise MessageError ( "Unknown mime-type '%s'" % mimetype ) return msg
Convert a raw string to a Message .
49,775
def _encode ( msg , head = False , binary = False ) : rawstr = str ( _MAGICK ) + u"{0:s} {1:s} {2:s} {3:s} {4:s}" . format ( msg . subject , msg . type , msg . sender , msg . time . isoformat ( ) , msg . version ) if not head and msg . data : if not binary and isinstance ( msg . data , six . string_types ) : return ( rawstr + ' ' + 'text/ascii' + ' ' + msg . data ) elif not binary : return ( rawstr + ' ' + 'application/json' + ' ' + json . dumps ( msg . data , default = datetime_encoder ) ) else : return ( rawstr + ' ' + 'binary/octet-stream' + ' ' + msg . data ) return rawstr
Convert a Message to a raw string .
49,776
def _getsender ( ) : import os import pwd import socket host = socket . gethostname ( ) user = pwd . getpwuid ( os . getuid ( ) ) [ 0 ] return "%s@%s" % ( user , host )
Return local sender . Don t use the getpass module it looks at various environment variables and is unreliable .
49,777
def _validate ( self ) : if not is_valid_subject ( self . subject ) : raise MessageError ( "Invalid subject: '%s'" % self . subject ) if not is_valid_type ( self . type ) : raise MessageError ( "Invalid type: '%s'" % self . type ) if not is_valid_sender ( self . sender ) : raise MessageError ( "Invalid sender: '%s'" % self . sender ) if not self . binary and not is_valid_data ( self . data ) : raise MessageError ( "Invalid data: data is not JSON serializable: %s" % str ( self . data ) )
Validate a messages attributes .
49,778
def __generateSPK ( self ) : key = self . __KeyPair . generate ( ) key_serialized = self . __PublicKeyEncoder . encodePublicKey ( key . pub , self . __curve ) signature = self . __XEdDSA ( mont_priv = self . __ik . priv ) . sign ( key_serialized ) self . __spk = { "key" : key , "signature" : signature , "timestamp" : time . time ( ) }
Generate a new PK and sign its public key using the IK add the timestamp aswell to allow for periodic rotations .
49,779
def __generateOTPKs ( self , num_otpks = None ) : if num_otpks == None : num_otpks = self . __max_num_otpks otpks = [ ] for _ in range ( num_otpks ) : otpks . append ( self . __KeyPair . generate ( ) ) try : self . __otpks . extend ( otpks ) except AttributeError : self . __otpks = otpks
Generate the given amount of OTPKs .
49,780
def __checkSPKTimestamp ( self ) : if time . time ( ) - self . __spk [ "timestamp" ] > self . __spk_timeout : self . __generateSPK ( )
Check whether the SPK is too old and generate a new one in that case .
49,781
def __refillOTPKs ( self ) : remainingOTPKs = len ( self . __otpks ) if remainingOTPKs < self . __min_num_otpks : self . __generateOTPKs ( self . __max_num_otpks - remainingOTPKs )
If the amount of available OTPKs fell under the minimum refills the OTPKs up to the maximum limit again .
49,782
def hideFromPublicBundle ( self , otpk_pub ) : self . __checkSPKTimestamp ( ) for otpk in self . __otpks : if otpk . pub == otpk_pub : self . __otpks . remove ( otpk ) self . __hidden_otpks . append ( otpk ) self . __refillOTPKs ( )
Hide a one - time pre key from the public bundle .
49,783
def deleteOTPK ( self , otpk_pub ) : self . __checkSPKTimestamp ( ) for otpk in self . __otpks : if otpk . pub == otpk_pub : self . __otpks . remove ( otpk ) for otpk in self . __hidden_otpks : if otpk . pub == otpk_pub : self . __hidden_otpks . remove ( otpk ) self . __refillOTPKs ( )
Delete a one - time pre key either publicly visible or hidden .
49,784
def getPublicBundle ( self ) : self . __checkSPKTimestamp ( ) ik_pub = self . __ik . pub spk_pub = self . __spk [ "key" ] . pub spk_sig = self . __spk [ "signature" ] otpk_pubs = [ otpk . pub for otpk in self . __otpks ] return PublicBundle ( ik_pub , spk_pub , spk_sig , otpk_pubs )
Fill a PublicBundle object with the public bundle data of this State .
49,785
def getSharedSecretActive ( self , other_public_bundle , allow_zero_otpks = False ) : self . __checkSPKTimestamp ( ) other_ik = self . __KeyPair ( pub = other_public_bundle . ik ) other_spk = { "key" : self . __KeyPair ( pub = other_public_bundle . spk ) , "signature" : other_public_bundle . spk_signature } other_otpks = [ self . __KeyPair ( pub = otpk ) for otpk in other_public_bundle . otpks ] if len ( other_otpks ) == 0 and not allow_zero_otpks : raise KeyExchangeException ( "The other public bundle does not contain any OTPKs, which is not " + "allowed." ) other_spk_serialized = self . __PublicKeyEncoder . encodePublicKey ( other_spk [ "key" ] . pub , self . __curve ) if not self . __XEdDSA ( mont_pub = other_ik . pub ) . verify ( other_spk_serialized , other_spk [ "signature" ] ) : raise KeyExchangeException ( "The signature of this public bundle's spk could not be verifified." ) ek = self . __KeyPair . generate ( ) dh1 = self . __ik . getSharedSecret ( other_spk [ "key" ] ) dh2 = ek . getSharedSecret ( other_ik ) dh3 = ek . getSharedSecret ( other_spk [ "key" ] ) dh4 = b"" otpk = None if len ( other_otpks ) > 0 : otpk_index = ord ( os . urandom ( 1 ) ) % len ( other_otpks ) otpk = other_otpks [ otpk_index ] dh4 = ek . getSharedSecret ( otpk ) sk = self . __kdf ( dh1 + dh2 + dh3 + dh4 ) ik_pub_serialized = self . __PublicKeyEncoder . encodePublicKey ( self . __ik . pub , self . __curve ) other_ik_pub_serialized = self . __PublicKeyEncoder . encodePublicKey ( other_ik . pub , self . __curve ) ad = ik_pub_serialized + other_ik_pub_serialized return { "to_other" : { "ik" : self . __ik . pub , "ek" : ek . pub , "otpk" : otpk . pub if otpk else None , "spk" : other_spk [ "key" ] . pub } , "ad" : ad , "sk" : sk }
Do the key exchange as the active party . This involves selecting keys from the passive parties public bundle .
49,786
def getSharedSecretPassive ( self , passive_exchange_data , allow_no_otpk = False , keep_otpk = False ) : self . __checkSPKTimestamp ( ) other_ik = self . __KeyPair ( pub = passive_exchange_data [ "ik" ] ) other_ek = self . __KeyPair ( pub = passive_exchange_data [ "ek" ] ) if self . __spk [ "key" ] . pub != passive_exchange_data [ "spk" ] : raise KeyExchangeException ( "The SPK used for this key exchange has been rotated, the key exchange " + "can not be completed." ) my_otpk = None if "otpk" in passive_exchange_data : for otpk in self . __otpks : if otpk . pub == passive_exchange_data [ "otpk" ] : my_otpk = otpk break for otpk in self . __hidden_otpks : if otpk . pub == passive_exchange_data [ "otpk" ] : my_otpk = otpk break if not my_otpk : raise KeyExchangeException ( "The OTPK used for this key exchange has been deleted, the key " + "exchange can not be completed." ) elif not allow_no_otpk : raise KeyExchangeException ( "This key exchange data does not contain an OTPK, which is not allowed." ) dh1 = self . __spk [ "key" ] . getSharedSecret ( other_ik ) dh2 = self . __ik . getSharedSecret ( other_ek ) dh3 = self . __spk [ "key" ] . getSharedSecret ( other_ek ) dh4 = b"" if my_otpk : dh4 = my_otpk . getSharedSecret ( other_ek ) sk = self . __kdf ( dh1 + dh2 + dh3 + dh4 ) other_ik_pub_serialized = self . __PublicKeyEncoder . encodePublicKey ( other_ik . pub , self . __curve ) ik_pub_serialized = self . __PublicKeyEncoder . encodePublicKey ( self . __ik . pub , self . __curve ) ad = other_ik_pub_serialized + ik_pub_serialized if my_otpk and not keep_otpk : self . deleteOTPK ( my_otpk . pub ) return { "ad" : ad , "sk" : sk }
Do the key exchange as the passive party . This involves retrieving data about the key exchange from the active party .
49,787
def save ( self , mode = 0o600 ) : if self . _parent is not None : self . _parent . save ( mode = mode ) else : config_dir = os . path . dirname ( os . path . abspath ( self . config_files [ - 1 ] ) ) try : os . makedirs ( config_dir ) except OSError as e : if not ( e . errno == errno . EEXIST and os . path . isdir ( config_dir ) ) : raise with open ( self . config_files [ - 1 ] , "wb" if sys . version_info < ( 3 , 0 ) else "w" ) as fh : self . _dump ( fh ) os . chmod ( self . config_files [ - 1 ] , mode ) self . _logger . debug ( "Saved config to %s" , self . config_files [ - 1 ] )
Serialize the config data to the user home directory .
49,788
def getChemicalPotential ( self , solution ) : if isinstance ( solution , Solution ) : solution = solution . getSolution ( ) self . mu = self . solver . chemicalPotential ( solution ) return self . mu
Call solver in order to calculate chemical potential .
49,789
def getDampingIntegral ( self ) : reservoir = self . getReservoir ( ) density = self . getDensity ( ) length = self . model . getSpatialStep ( ) if self . solution . ndim == 1 : nodes = self . model . getNumberOfNodes ( ) radius = linspace ( 0 , nodes * self . model . getSpatialStep ( ) , nodes ) integral = 2 * pi * sum ( ( reservoir - 1.0 ) * density * radius * length ) elif self . solution . ndim == 2 : area = length ** 2 integral = sum ( sum ( ( reservoir - 1.0 ) * density * area ) ) return integral
Calculate integral of damping terms of hamiltonian using rectangular method .
49,790
def fmt_val ( val , shorten = True ) : val = repr ( val ) max = 50 if shorten : if len ( val ) > max : close = val [ - 1 ] val = val [ 0 : max - 4 ] + "..." if close in ( ">" , "'" , '"' , ']' , '}' , ')' ) : val = val + close return val
Format a value for inclusion in an informative text string .
49,791
def fmt_dict_vals ( dict_vals , shorten = True ) : items = dict_vals . items ( ) if not items : return [ fmt_val ( None , shorten = shorten ) ] return [ "%s=%s" % ( k , fmt_val ( v , shorten = shorten ) ) for k , v in items ]
Returns list of key = val pairs formatted for inclusion in an informative text string .
49,792
def build_command ( self , config , ** kwargs ) : command = [ 'perl' , self . script , CLI_OPTIONS [ 'config' ] [ 'option' ] , config ] for key , value in kwargs . items ( ) : if value : command . append ( CLI_OPTIONS [ key ] [ 'option' ] ) if value is True : command . append ( CLI_OPTIONS [ key ] . get ( 'default' , '1' ) ) else : command . append ( value ) return command
Builds the command to execute MIP .
49,793
def execute ( self , command ) : process = subprocess . Popen ( command , preexec_fn = lambda : signal . signal ( signal . SIGPIPE , signal . SIG_DFL ) ) return process
Start a new MIP run .
49,794
def update ( self ) : self . log . debug ( 'starting the ``update`` method' ) if "sherlock wiki root" not in self . settings : print "Sherlock wiki settings not found in settings file" return staticTableInfo = self . _get_table_infos ( ) viewInfo = self . _get_view_infos ( ) streamedTableInfo = self . _get_stream_view_infos ( ) self . _create_md_tables ( tableData = staticTableInfo , viewData = viewInfo , streamData = streamedTableInfo ) self . _write_wiki_pages ( ) self . _update_github ( ) self . log . debug ( 'completed the ``update`` method' ) return
Update wiki pages
49,795
def _get_table_infos ( self , trimmed = False ) : self . log . debug ( 'starting the ``_get_table_infos`` method' ) sqlQuery = u % locals ( ) tableInfo = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) if trimmed : cleanTable = [ ] for r in tableInfo : orow = collections . OrderedDict ( sorted ( { } . items ( ) ) ) for c in self . basicColumns : if c in r : orow [ c ] = r [ c ] cleanTable . append ( orow ) tableInfo = cleanTable self . log . debug ( 'completed the ``_get_table_infos`` method' ) return tableInfo
query the sherlock - catalogues database table metadata
49,796
def _get_view_infos ( self , trimmed = False ) : self . log . debug ( 'starting the ``_get_view_infos`` method' ) sqlQuery = u % locals ( ) viewInfo = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) if trimmed : cleanTable = [ ] for r in viewInfo : orow = collections . OrderedDict ( sorted ( { } . items ( ) ) ) for c in self . basicColumns : if c in r : orow [ c ] = r [ c ] cleanTable . append ( orow ) viewInfo = cleanTable self . log . debug ( 'completed the ``_get_view_infos`` method' ) return viewInfo
query the sherlock - catalogues database view metadata
49,797
def _get_stream_view_infos ( self , trimmed = False ) : self . log . debug ( 'starting the ``_get_stream_view_infos`` method' ) sqlQuery = u % locals ( ) streamInfo = readquery ( log = self . log , sqlQuery = sqlQuery , dbConn = self . cataloguesDbConn , quiet = False ) if trimmed : cleanTable = [ ] for r in streamInfo : orow = collections . OrderedDict ( sorted ( { } . items ( ) ) ) for c in self . basicColumns : if c in r : orow [ c ] = r [ c ] cleanTable . append ( orow ) streamInfo = cleanTable self . log . debug ( 'completed the ``_get_stream_view_infos`` method' ) return streamInfo
query the sherlock - catalogues database streamed data tables metadata
49,798
def email_user ( self , subject , message , from_email = settings . DEFAULT_FROM_EMAIL , ** kwargs ) : receiver = self . email if settings . EMAIL_OVERRIDE_ADDRESS : receiver = settings . EMAIL_OVERRIDE_ADDRESS send_mail ( subject , message , from_email , [ receiver ] , ** kwargs )
Sends an email to this User . If settings . EMAIL_OVERRIDE_ADDRESS is set this mail will be redirected to the alternate mail address .
49,799
def clean ( context , days_ago , yes ) : number_of_days_ago = dt . datetime . now ( ) - dt . timedelta ( days = days_ago ) analyses = context . obj [ 'store' ] . analyses ( status = 'completed' , before = number_of_days_ago , deleted = False , ) for analysis_obj in analyses : LOG . debug ( f"checking analysis: {analysis_obj.family} ({analysis_obj.id})" ) latest_analysis = context . obj [ 'store' ] . analyses ( family = analysis_obj . family ) . first ( ) if analysis_obj != latest_analysis : print ( click . style ( f"{analysis_obj.family}: family has been re-started" , fg = 'yellow' ) ) else : print ( f"delete analysis: {analysis_obj.family} ({analysis_obj.id})" ) context . invoke ( delete , analysis_id = analysis_obj . id , yes = yes )
Clean up files from old analyses runs .