idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
4,000
def setup_apiv2 ( ) : if sys . version_info [ 0 ] == 2 : logging . getLogger ( __name__ ) . debug ( 'setting up SIP API to version 2' ) import sip try : sip . setapi ( "QString" , 2 ) sip . setapi ( "QVariant" , 2 ) except ValueError : logging . getLogger ( __name__ ) . critical ( "failed to set up sip api to version 2 for PyQt4" ) raise ImportError ( 'PyQt4' )
Setup apiv2 when using PyQt4 and Python2 .
4,001
def from_config ( cls , pyvlx , item ) : name = item [ 'name' ] ident = item [ 'id' ] subtype = item [ 'subtype' ] typeid = item [ 'typeId' ] return cls ( pyvlx , ident , name , subtype , typeid )
Read roller shutter from config .
4,002
def read_remote_spec ( filename , encoding = 'binary' , cache = True , show_progress = True , ** kwargs ) : with get_readable_fileobj ( filename , encoding = encoding , cache = cache , show_progress = show_progress ) as fd : header , wavelengths , fluxes = read_spec ( fd , fname = filename , ** kwargs ) return header , wavelengths , fluxes
Read FITS or ASCII spectrum from a remote location .
4,003
def read_spec ( filename , fname = '' , ** kwargs ) : if isinstance ( filename , str ) : fname = filename elif not fname : raise exceptions . SynphotError ( 'Cannot determine filename.' ) if fname . endswith ( 'fits' ) or fname . endswith ( 'fit' ) : read_func = read_fits_spec else : read_func = read_ascii_spec return read_func ( filename , ** kwargs )
Read FITS or ASCII spectrum .
4,004
def read_ascii_spec ( filename , wave_unit = u . AA , flux_unit = units . FLAM , ** kwargs ) : header = { } dat = ascii . read ( filename , ** kwargs ) wave_unit = units . validate_unit ( wave_unit ) flux_unit = units . validate_unit ( flux_unit ) wavelengths = dat . columns [ 0 ] . data . astype ( np . float64 ) * wave_unit fluxes = dat . columns [ 1 ] . data . astype ( np . float64 ) * flux_unit return header , wavelengths , fluxes
Read ASCII spectrum .
4,005
def read_fits_spec ( filename , ext = 1 , wave_col = 'WAVELENGTH' , flux_col = 'FLUX' , wave_unit = u . AA , flux_unit = units . FLAM ) : fs = fits . open ( filename ) header = dict ( fs [ str ( 'PRIMARY' ) ] . header ) wave_dat = fs [ ext ] . data . field ( wave_col ) . copy ( ) flux_dat = fs [ ext ] . data . field ( flux_col ) . copy ( ) fits_wave_unit = fs [ ext ] . header . get ( 'TUNIT1' ) fits_flux_unit = fs [ ext ] . header . get ( 'TUNIT2' ) if fits_wave_unit is not None : try : wave_unit = units . validate_unit ( fits_wave_unit ) except ( exceptions . SynphotError , ValueError ) as e : warnings . warn ( '{0} from FITS header is not valid wavelength unit, using ' '{1}: {2}' . format ( fits_wave_unit , wave_unit , e ) , AstropyUserWarning ) if fits_flux_unit is not None : try : flux_unit = units . validate_unit ( fits_flux_unit ) except ( exceptions . SynphotError , ValueError ) as e : warnings . warn ( '{0} from FITS header is not valid flux unit, using ' '{1}: {2}' . format ( fits_flux_unit , flux_unit , e ) , AstropyUserWarning ) wave_unit = units . validate_unit ( wave_unit ) flux_unit = units . validate_unit ( flux_unit ) wavelengths = wave_dat * wave_unit fluxes = flux_dat * flux_unit if isinstance ( filename , str ) : fs . close ( ) return header , wavelengths , fluxes
Read FITS spectrum .
4,006
def spectral_density_vega ( wav , vegaflux ) : vega_photlam = vegaflux . to ( PHOTLAM , equivalencies = u . spectral_density ( wav ) ) . value def converter ( x ) : val = - 2.5 * np . log10 ( x / vega_photlam ) result = np . zeros ( val . shape , dtype = np . float64 ) - 99 mask = np . isfinite ( val ) if result . ndim > 0 : result [ mask ] = val [ mask ] elif mask : result = np . asarray ( val ) return result def iconverter ( x ) : return vega_photlam * 10 ** ( - 0.4 * x ) return [ ( PHOTLAM , VEGAMAG , converter , iconverter ) ]
Flux equivalencies between PHOTLAM and VEGAMAG .
4,007
def validate_unit ( input_unit ) : if isinstance ( input_unit , str ) : input_unit_lowcase = input_unit . lower ( ) if input_unit_lowcase == 'angstroms' : output_unit = u . AA elif input_unit_lowcase == 'inversemicrons' : output_unit = u . micron ** - 1 elif input_unit_lowcase in ( 'transmission' , 'extinction' , 'emissivity' ) : output_unit = THROUGHPUT elif input_unit_lowcase == 'jy' : output_unit = u . Jy elif input_unit_lowcase in ( 'stmag' , 'mag(st)' ) : output_unit = u . STmag elif input_unit_lowcase in ( 'abmag' , 'mag(ab)' ) : output_unit = u . ABmag else : try : output_unit = u . Unit ( input_unit ) except ValueError : output_unit = u . Unit ( input_unit_lowcase ) elif isinstance ( input_unit , ( u . UnitBase , u . LogUnit ) ) : output_unit = input_unit else : raise exceptions . SynphotError ( '{0} must be a recognized string or ' 'astropy.units.core.Unit' . format ( input_unit ) ) return output_unit
Validate unit .
4,008
def add ( self , device ) : if not isinstance ( device , Device ) : raise TypeError ( ) self . __devices . append ( device )
Add device .
4,009
def data_import ( self , json_response ) : if 'data' not in json_response : raise PyVLXException ( 'no element data found: {0}' . format ( json . dumps ( json_response ) ) ) data = json_response [ 'data' ] for item in data : if 'category' not in item : raise PyVLXException ( 'no element category: {0}' . format ( json . dumps ( item ) ) ) category = item [ 'category' ] if category == 'Window opener' : self . load_window_opener ( item ) elif category in [ 'Roller shutter' , 'Dual Shutter' ] : self . load_roller_shutter ( item ) elif category in [ 'Blind' ] : self . load_blind ( item ) else : self . pyvlx . logger . warning ( 'WARNING: Could not parse product: %s' , category )
Import data from json response .
4,010
def load_window_opener ( self , item ) : window = Window . from_config ( self . pyvlx , item ) self . add ( window )
Load window opener from JSON .
4,011
def load_roller_shutter ( self , item ) : rollershutter = RollerShutter . from_config ( self . pyvlx , item ) self . add ( rollershutter )
Load roller shutter from JSON .
4,012
def load_blind ( self , item ) : blind = Blind . from_config ( self . pyvlx , item ) self . add ( blind )
Load blind from JSON .
4,013
def get_terminal_size ( fallback = ( 80 , 24 ) ) : for stream in [ sys . __stdout__ , sys . __stderr__ , sys . __stdin__ ] : try : data = fcntl . ioctl ( stream . fileno ( ) , TIOCGWINSZ , b"\x00\x00\00\x00" ) except OSError : pass else : lines , columns = struct . unpack ( "hh" , data ) break else : columns , lines = fallback return columns , lines
Return tuple containing columns and rows of controlling terminal trying harder than shutil . get_terminal_size to find a tty before returning fallback .
4,014
def run_json ( self ) : checks = { } for file in self . files : try : results = self . _check ( file ) except Error as e : checks [ file ] = { "error" : e . msg } else : checks [ file ] = { "score" : results . score , "comments" : results . comment_ratio >= results . COMMENT_MIN , "diff" : "<pre>{}</pre>" . format ( "\n" . join ( self . html_diff ( results . original , results . styled ) ) ) , } json . dump ( checks , sys . stdout , indent = 4 ) print ( )
Run checks on self . files printing json object containing information relavent to the CS50 IDE plugin at the end .
4,015
def run_score ( self ) : diffs = 0 lines = 0 for file in self . files : try : results = self . _check ( file ) except Error as e : termcolor . cprint ( e . msg , "yellow" , file = sys . stderr ) continue diffs += results . diffs lines += results . lines try : print ( max ( 1 - diffs / lines , 0.0 ) ) except ZeroDivisionError : print ( 0.0 )
Run checks on self . files printing raw percentage to stdout .
4,016
def _check ( self , file ) : if not os . path . exists ( file ) : raise Error ( "file \"{}\" not found" . format ( file ) ) _ , extension = os . path . splitext ( file ) try : check = self . extension_map [ extension [ 1 : ] ] except KeyError : magic_type = magic . from_file ( file ) for name , cls in self . magic_map . items ( ) : if name in magic_type : check = cls break else : raise Error ( "unknown file type \"{}\", skipping..." . format ( file ) ) try : with open ( file ) as f : code = "\n" . join ( line . rstrip ( ) for line in f ) except UnicodeDecodeError : raise Error ( "file does not seem to contain text, skipping..." ) try : if code [ - 1 ] != '\n' : code += '\n' except IndexError : pass return check ( code )
Run apropriate check based on file s extension and return it otherwise raise an Error
4,017
def split_diff ( old , new ) : return map ( lambda l : l . rstrip ( ) , icdiff . ConsoleDiff ( cols = COLUMNS ) . make_table ( old . splitlines ( ) , new . splitlines ( ) ) )
Returns a generator yielding the side - by - side diff of old and new ) .
4,018
def unified ( old , new ) : for diff in difflib . ndiff ( old . splitlines ( ) , new . splitlines ( ) ) : if diff [ 0 ] == " " : yield diff elif diff [ 0 ] == "?" : continue else : yield termcolor . colored ( diff , "red" if diff [ 0 ] == "-" else "green" , attrs = [ "bold" ] )
Returns a generator yielding a unified diff between old and new .
4,019
def char_diff ( self , old , new ) : def color_transition ( old_type , new_type ) : new_color = termcolor . colored ( "" , None , "on_red" if new_type == "-" else "on_green" if new_type == "+" else None ) return "{}{}" . format ( termcolor . RESET , new_color [ : - len ( termcolor . RESET ) ] ) return self . _char_diff ( old , new , color_transition )
Return color - coded character - based diff between old and new .
4,020
def _char_diff ( self , old , new , transition , fmt = lambda c : c ) : differ = difflib . ndiff ( old , new ) dtype = None line = [ ] while True : d = next ( differ , ( None , ) ) if d [ 0 ] != dtype : line += transition ( dtype , d [ 0 ] ) dtype = d [ 0 ] if dtype is None : break if d [ 2 ] == "\n" : if dtype != " " : self . _warn_chars . add ( ( dtype , "\\n" ) ) line += [ fmt ( r"\n" ) , transition ( dtype , " " ) ] if dtype != "-" : yield "" . join ( line ) line . clear ( ) line . append ( transition ( " " , dtype ) ) elif dtype != " " and d [ 2 ] == "\t" : line . append ( fmt ( "\\t" ) ) self . _warn_chars . add ( ( dtype , "\\t" ) ) else : line . append ( fmt ( d [ 2 ] ) ) last = "" . join ( line ) if re . sub ( r"\x1b[^m]*m" , "" , last ) : yield last
Returns a char - based diff between old and new where each character is formatted by fmt and transitions between blocks are determined by transition .
4,021
def frame_from_raw ( raw ) : command , payload = extract_from_frame ( raw ) frame = create_frame ( command ) if frame is None : PYVLXLOG . warning ( "Command %s not implemented, raw: %s" , command , ":" . join ( "{:02x}" . format ( c ) for c in raw ) ) return None frame . validate_payload_len ( payload ) frame . from_payload ( payload ) return frame
Create and return frame from raw bytes .
4,022
def create_frame ( command ) : if command == Command . GW_ERROR_NTF : return FrameErrorNotification ( ) if command == Command . GW_COMMAND_SEND_REQ : return FrameCommandSendRequest ( ) if command == Command . GW_COMMAND_SEND_CFM : return FrameCommandSendConfirmation ( ) if command == Command . GW_COMMAND_RUN_STATUS_NTF : return FrameCommandRunStatusNotification ( ) if command == Command . GW_COMMAND_REMAINING_TIME_NTF : return FrameCommandRemainingTimeNotification ( ) if command == Command . GW_SESSION_FINISHED_NTF : return FrameSessionFinishedNotification ( ) if command == Command . GW_PASSWORD_ENTER_REQ : return FramePasswordEnterRequest ( ) if command == Command . GW_PASSWORD_ENTER_CFM : return FramePasswordEnterConfirmation ( ) if command == Command . GW_CS_DISCOVER_NODES_REQ : return FrameDiscoverNodesRequest ( ) if command == Command . GW_CS_DISCOVER_NODES_CFM : return FrameDiscoverNodesConfirmation ( ) if command == Command . GW_CS_DISCOVER_NODES_NTF : return FrameDiscoverNodesNotification ( ) if command == Command . GW_GET_SCENE_LIST_REQ : return FrameGetSceneListRequest ( ) if command == Command . GW_GET_SCENE_LIST_CFM : return FrameGetSceneListConfirmation ( ) if command == Command . GW_GET_SCENE_LIST_NTF : return FrameGetSceneListNotification ( ) if command == Command . GW_GET_NODE_INFORMATION_REQ : return FrameGetNodeInformationRequest ( ) if command == Command . GW_GET_NODE_INFORMATION_CFM : return FrameGetNodeInformationConfirmation ( ) if command == Command . GW_GET_NODE_INFORMATION_NTF : return FrameGetNodeInformationNotification ( ) if command == Command . GW_GET_ALL_NODES_INFORMATION_REQ : return FrameGetAllNodesInformationRequest ( ) if command == Command . GW_GET_ALL_NODES_INFORMATION_CFM : return FrameGetAllNodesInformationConfirmation ( ) if command == Command . GW_GET_ALL_NODES_INFORMATION_NTF : return FrameGetAllNodesInformationNotification ( ) if command == Command . GW_GET_ALL_NODES_INFORMATION_FINISHED_NTF : return FrameGetAllNodesInformationFinishedNotification ( ) if command == Command . GW_ACTIVATE_SCENE_REQ : return FrameActivateSceneRequest ( ) if command == Command . GW_ACTIVATE_SCENE_CFM : return FrameActivateSceneConfirmation ( ) if command == Command . GW_GET_VERSION_REQ : return FrameGetVersionRequest ( ) if command == Command . GW_GET_VERSION_CFM : return FrameGetVersionConfirmation ( ) if command == Command . GW_GET_PROTOCOL_VERSION_REQ : return FrameGetProtocolVersionRequest ( ) if command == Command . GW_GET_PROTOCOL_VERSION_CFM : return FrameGetProtocolVersionConfirmation ( ) if command == Command . GW_SET_NODE_NAME_REQ : return FrameSetNodeNameRequest ( ) if command == Command . GW_SET_NODE_NAME_CFM : return FrameSetNodeNameConfirmation ( ) if command == Command . GW_NODE_INFORMATION_CHANGED_NTF : return FrameNodeInformationChangedNotification ( ) if command == Command . GW_GET_STATE_REQ : return FrameGetStateRequest ( ) if command == Command . GW_GET_STATE_CFM : return FrameGetStateConfirmation ( ) if command == Command . GW_SET_UTC_REQ : return FrameSetUTCRequest ( ) if command == Command . GW_SET_UTC_CFM : return FrameSetUTCConfirmation ( ) if command == Command . GW_ACTIVATION_LOG_UPDATED_NTF : return FrameActivationLogUpdatedNotification ( ) if command == Command . GW_HOUSE_STATUS_MONITOR_ENABLE_REQ : return FrameHouseStatusMonitorEnableRequest ( ) if command == Command . GW_HOUSE_STATUS_MONITOR_ENABLE_CFM : return FrameHouseStatusMonitorEnableConfirmation ( ) if command == Command . GW_HOUSE_STATUS_MONITOR_DISABLE_REQ : return FrameHouseStatusMonitorDisableRequest ( ) if command == Command . GW_HOUSE_STATUS_MONITOR_DISABLE_CFM : return FrameHouseStatusMonitorDisableConfirmation ( ) if command == Command . GW_NODE_STATE_POSITION_CHANGED_NTF : return FrameNodeStatePositionChangedNotification ( ) return None
Create and return empty Frame from Command .
4,023
def data_received ( self , data ) : self . tokenizer . feed ( data ) while self . tokenizer . has_tokens ( ) : raw = self . tokenizer . get_next_token ( ) frame = frame_from_raw ( raw ) if frame is not None : self . frame_received_cb ( frame )
Handle data received .
4,024
async def connect ( self ) : tcp_client = TCPTransport ( self . frame_received_cb , self . connection_closed_cb ) self . transport , _ = await self . loop . create_connection ( lambda : tcp_client , host = self . config . host , port = self . config . port , ssl = self . create_ssl_context ( ) ) self . connected = True
Connect to gateway via SSL .
4,025
def write ( self , frame ) : if not isinstance ( frame , FrameBase ) : raise PyVLXException ( "Frame not of type FrameBase" , frame_type = type ( frame ) ) PYVLXLOG . debug ( "SEND: %s" , frame ) self . transport . write ( slip_pack ( bytes ( frame ) ) )
Write frame to Bus .
4,026
def create_ssl_context ( ) : ssl_context = ssl . create_default_context ( ssl . Purpose . SERVER_AUTH ) ssl_context . check_hostname = False ssl_context . verify_mode = ssl . CERT_NONE return ssl_context
Create and return SSL Context .
4,027
def frame_received_cb ( self , frame ) : PYVLXLOG . debug ( "REC: %s" , frame ) for frame_received_cb in self . frame_received_cbs : self . loop . create_task ( frame_received_cb ( frame ) )
Received message .
4,028
async def process_frame ( self , frame ) : if isinstance ( frame , FrameNodeStatePositionChangedNotification ) : if frame . node_id not in self . pyvlx . nodes : return node = self . pyvlx . nodes [ frame . node_id ] if isinstance ( node , OpeningDevice ) : node . position = Position ( frame . current_position ) await node . after_update ( ) elif isinstance ( frame , FrameGetAllNodesInformationNotification ) : if frame . node_id not in self . pyvlx . nodes : return node = self . pyvlx . nodes [ frame . node_id ] if isinstance ( node , OpeningDevice ) : node . position = Position ( frame . current_position ) await node . after_update ( )
Update nodes via frame usually received by house monitor .
4,029
def add ( self , scene ) : if not isinstance ( scene , Scene ) : raise TypeError ( ) for i , j in enumerate ( self . __scenes ) : if j . scene_id == scene . scene_id : self . __scenes [ i ] = scene return self . __scenes . append ( scene )
Add scene replace existing scene if scene with scene_id is present .
4,030
def best_prefix ( bytes , system = NIST ) : if isinstance ( bytes , Bitmath ) : value = bytes . bytes else : value = bytes return Byte ( value ) . best_prefix ( system = system )
Return a bitmath instance representing the best human - readable representation of the number of bytes given by bytes . In addition to a numeric type the bytes parameter may also be a bitmath type .
4,031
def query_device_capacity ( device_fd ) : if os_name ( ) != 'posix' : raise NotImplementedError ( "'bitmath.query_device_capacity' is not supported on this platform: %s" % os_name ( ) ) s = os . stat ( device_fd . name ) . st_mode if not stat . S_ISBLK ( s ) : raise ValueError ( "The file descriptor provided is not of a device type" ) ioctl_map = { "Linux" : { "request_params" : [ ( "BLKGETSIZE64" , "L" , 0x80081272 ) ] , "func" : lambda x : x [ "BLKGETSIZE64" ] } , "Darwin" : { "request_params" : [ ( "DKIOCGETBLOCKCOUNT" , "L" , 0x40086419 ) , ( "DKIOCGETBLOCKSIZE" , "I" , 0x40046418 ) ] , "func" : lambda x : x [ "DKIOCGETBLOCKCOUNT" ] * x [ "DKIOCGETBLOCKSIZE" ] } } platform_params = ioctl_map [ platform . system ( ) ] results = { } for req_name , fmt , request_code in platform_params [ 'request_params' ] : buffer_size = struct . calcsize ( fmt ) buffer = ' ' * buffer_size buffer = fcntl . ioctl ( device_fd . fileno ( ) , request_code , buffer ) result = struct . unpack ( fmt , buffer ) [ 0 ] results [ req_name ] = result return Byte ( platform_params [ 'func' ] ( results ) )
Create bitmath instances of the capacity of a system block device
4,032
def parse_string ( s ) : if not isinstance ( s , ( str , unicode ) ) : raise ValueError ( "parse_string only accepts string inputs but a %s was given" % type ( s ) ) try : index = list ( [ i . isalpha ( ) for i in s ] ) . index ( True ) except ValueError : raise ValueError ( "No unit detected, can not parse string '%s' into a bitmath object" % s ) val , unit = s [ : index ] , s [ index : ] if unit == "b" : unit_class = Bit elif unit == "B" : unit_class = Byte else : if not ( hasattr ( sys . modules [ __name__ ] , unit ) and isinstance ( getattr ( sys . modules [ __name__ ] , unit ) , type ) ) : raise ValueError ( "The unit %s is not a valid bitmath unit" % unit ) unit_class = globals ( ) [ unit ] try : val = float ( val ) except ValueError : raise try : return unit_class ( val ) except : raise ValueError ( "Can't parse string %s into a bitmath object" % s )
Parse a string with units and try to make a bitmath object out of it .
4,033
def parse_string_unsafe ( s , system = SI ) : if not isinstance ( s , ( str , unicode ) ) and not isinstance ( s , numbers . Number ) : raise ValueError ( "parse_string_unsafe only accepts string/number inputs but a %s was given" % type ( s ) ) if isinstance ( s , numbers . Number ) : return Byte ( s ) if isinstance ( s , ( str , unicode ) ) : try : return Byte ( float ( s ) ) except ValueError : pass try : index = list ( [ i . isalpha ( ) for i in s ] ) . index ( True ) except ValueError : raise ValueError ( "No unit detected, can not parse string '%s' into a bitmath object" % s ) val , unit = s [ : index ] , s [ index : ] unit = unit . rstrip ( 'Bb' ) unit += 'B' if len ( unit ) == 2 : if system == NIST : unit = capitalize_first ( unit ) _unit = list ( unit ) _unit . insert ( 1 , 'i' ) unit = '' . join ( _unit ) unit_class = globals ( ) [ unit ] else : if unit . startswith ( 'K' ) : unit = unit . replace ( 'K' , 'k' ) elif not unit . startswith ( 'k' ) : unit = capitalize_first ( unit ) if unit [ 0 ] in SI_PREFIXES : unit_class = globals ( ) [ unit ] elif len ( unit ) == 3 : unit = capitalize_first ( unit ) if unit [ : 2 ] in NIST_PREFIXES : unit_class = globals ( ) [ unit ] else : raise ValueError ( "The unit %s is not a valid bitmath unit" % unit ) try : unit_class except UnboundLocalError : raise ValueError ( "The unit %s is not a valid bitmath unit" % unit ) return unit_class ( float ( val ) )
Attempt to parse a string with ambiguous units and try to make a bitmath object out of it .
4,034
def format ( fmt_str = None , plural = False , bestprefix = False ) : if 'bitmath' not in globals ( ) : import bitmath if plural : orig_fmt_plural = bitmath . format_plural bitmath . format_plural = True if fmt_str : orig_fmt_str = bitmath . format_string bitmath . format_string = fmt_str yield if plural : bitmath . format_plural = orig_fmt_plural if fmt_str : bitmath . format_string = orig_fmt_str
Context manager for printing bitmath instances .
4,035
def cli_script_main ( cli_args ) : choices = ALL_UNIT_TYPES parser = argparse . ArgumentParser ( description = 'Converts from one type of size to another.' ) parser . add_argument ( '--from-stdin' , default = False , action = 'store_true' , help = 'Reads number from stdin rather than the cli' ) parser . add_argument ( '-f' , '--from' , choices = choices , nargs = 1 , type = str , dest = 'fromunit' , default = [ 'Byte' ] , help = 'Input type you are converting from. Defaultes to Byte.' ) parser . add_argument ( '-t' , '--to' , choices = choices , required = False , nargs = 1 , type = str , help = ( 'Input type you are converting to. ' 'Attempts to detect best result if omitted.' ) , dest = 'tounit' ) parser . add_argument ( 'size' , nargs = '*' , type = float , help = 'The number to convert.' ) args = parser . parse_args ( cli_args ) if args . from_stdin : args . size = [ float ( sys . stdin . readline ( ) [ : - 1 ] ) ] results = [ ] for size in args . size : instance = getattr ( __import__ ( 'bitmath' , fromlist = [ 'True' ] ) , args . fromunit [ 0 ] ) ( size ) if args . tounit : result = getattr ( instance , args . tounit [ 0 ] ) else : result = instance . best_prefix ( ) results . append ( result ) return results
A command line interface to basic bitmath operations .
4,036
def _do_setup ( self ) : ( self . _base , self . _power , self . _name_singular , self . _name_plural ) = self . _setup ( ) self . _unit_value = self . _base ** self . _power
Setup basic parameters for this class .
4,037
def _norm ( self , value ) : if isinstance ( value , self . valid_types ) : self . _byte_value = value * self . _unit_value self . _bit_value = self . _byte_value * 8.0 else : raise ValueError ( "Initialization value '%s' is of an invalid type: %s. " "Must be one of %s" % ( value , type ( value ) , ", " . join ( str ( x ) for x in self . valid_types ) ) )
Normalize the input value into the fundamental unit for this prefix type .
4,038
def system ( self ) : if self . _base == 2 : return "NIST" elif self . _base == 10 : return "SI" else : raise ValueError ( "Instances mathematical base is an unsupported value: %s" % ( str ( self . _base ) ) )
The system of units used to measure an instance
4,039
def from_other ( cls , item ) : if isinstance ( item , Bitmath ) : return cls ( bits = item . bits ) else : raise ValueError ( "The provided items must be a valid bitmath class: %s" % str ( item . __class__ ) )
Factory function to return instances of item converted into a new instance of cls . Because this is a class method it may be called from any bitmath class object without the need to explicitly instantiate the class ahead of time .
4,040
def format ( self , fmt ) : _fmt_params = { 'base' : self . base , 'bin' : self . bin , 'binary' : self . binary , 'bits' : self . bits , 'bytes' : self . bytes , 'power' : self . power , 'system' : self . system , 'unit' : self . unit , 'unit_plural' : self . unit_plural , 'unit_singular' : self . unit_singular , 'value' : self . value } return fmt . format ( ** _fmt_params )
Return a representation of this instance formatted with user supplied syntax
4,041
def _norm ( self , value ) : self . _bit_value = value * self . _unit_value self . _byte_value = self . _bit_value / 8.0
Normalize the input value into the fundamental unit for this prefix type
4,042
def extract_from_frame ( data ) : if len ( data ) <= 4 : raise PyVLXException ( "could_not_extract_from_frame_too_short" , data = data ) length = data [ 0 ] * 256 + data [ 1 ] - 1 if len ( data ) != length + 3 : raise PyVLXException ( "could_not_extract_from_frame_invalid_length" , data = data , current_length = len ( data ) , expected_length = length + 3 ) if calc_crc ( data [ : - 1 ] ) != data [ - 1 ] : raise PyVLXException ( "could_not_extract_from_frame_invalid_crc" , data = data , expected_crc = calc_crc ( data [ : - 1 ] ) , current_crc = data [ - 1 ] ) payload = data [ 4 : - 1 ] try : command = Command ( data [ 2 ] * 256 + data [ 3 ] ) except ValueError : raise PyVLXException ( "could_not_extract_from_frame_command" , data = data ) return command , payload
Extract payload and command from frame .
4,043
def _get_meta ( obj ) : if hasattr ( obj , 'meta' ) : meta = deepcopy ( obj . meta ) elif isinstance ( obj , dict ) : meta = deepcopy ( obj ) else : meta = { } return meta
Extract metadata if any from given object .
4,044
def _merge_meta ( left , right , result , clean = True ) : left = BaseSpectrum . _get_meta ( left ) right = BaseSpectrum . _get_meta ( right ) if clean : for key in ( 'header' , 'expr' ) : for d in ( left , right ) : if key in d : del d [ key ] mid = metadata . merge ( left , right , metadata_conflicts = 'silent' ) result . meta = metadata . merge ( result . meta , mid , metadata_conflicts = 'silent' )
Merge metadata from left and right onto results .
4,045
def _process_generic_param ( pval , def_unit , equivalencies = [ ] ) : if isinstance ( pval , u . Quantity ) : outval = pval . to ( def_unit , equivalencies ) . value else : outval = pval return outval
Process generic model parameter .
4,046
def _process_wave_param ( self , pval ) : return self . _process_generic_param ( pval , self . _internal_wave_unit , equivalencies = u . spectral ( ) )
Process individual model parameter representing wavelength .
4,047
def waveset ( self ) : w = get_waveset ( self . model ) if w is not None : utils . validate_wavelengths ( w ) w = w * self . _internal_wave_unit return w
Optimal wavelengths for sampling the spectrum or bandpass .
4,048
def waverange ( self ) : if self . waveset is None : x = [ None , None ] else : x = u . Quantity ( [ self . waveset . min ( ) , self . waveset . max ( ) ] ) return x
Range of waveset .
4,049
def _validate_wavelengths ( self , wave ) : if wave is None : if self . waveset is None : raise exceptions . SynphotError ( 'self.waveset is undefined; ' 'Provide wavelengths for sampling.' ) wavelengths = self . waveset else : w = self . _process_wave_param ( wave ) utils . validate_wavelengths ( w ) wavelengths = w * self . _internal_wave_unit return wavelengths
Validate wavelengths for sampling .
4,050
def integrate ( self , wavelengths = None , ** kwargs ) : if 'flux_unit' in kwargs : self . _validate_flux_unit ( kwargs [ 'flux_unit' ] , wav_only = True ) x = self . _validate_wavelengths ( wavelengths ) try : m = self . model . integral except ( AttributeError , NotImplementedError ) : if conf . default_integrator == 'trapezoid' : y = self ( x , ** kwargs ) result = abs ( np . trapz ( y . value , x = x . value ) ) result_unit = y . unit else : raise NotImplementedError ( 'Analytic integral not available and default integrator ' '{0} is not supported' . format ( conf . default_integrator ) ) else : start = x [ 0 ] . value stop = x [ - 1 ] . value result = ( m ( stop ) - m ( start ) ) result_unit = self . _internal_flux_unit if result_unit != units . THROUGHPUT : if result_unit == units . PHOTLAM : result_unit = u . photon / ( u . cm ** 2 * u . s ) elif result_unit == units . FLAM : result_unit = u . erg / ( u . cm ** 2 * u . s ) else : raise NotImplementedError ( 'Integration of {0} is not supported' . format ( result_unit ) ) else : result_unit *= self . _internal_wave_unit return result * result_unit
Perform integration .
4,051
def force_extrapolation ( self ) : if isinstance ( self . _model , Empirical1D ) : self . _model . fill_value = np . nan is_forced = True else : is_forced = False return is_forced
Force the underlying model to extrapolate .
4,052
def taper ( self , wavelengths = None ) : x = self . _validate_wavelengths ( wavelengths ) w1 = x [ 0 ] ** 2 / x [ 1 ] w2 = x [ - 1 ] ** 2 / x [ - 2 ] if isinstance ( self . _model , Empirical1D ) : y1 = self . _model . lookup_table [ 0 ] y2 = self . _model . lookup_table [ - 1 ] else : y1 = self ( w1 ) y2 = self ( w2 ) if y1 == 0 and y2 == 0 : return self y = self ( x ) if y1 != 0 : x = np . insert ( x , 0 , w1 ) y = np . insert ( y , 0 , 0.0 ) if y2 != 0 : x = np . insert ( x , x . size , w2 ) y = np . insert ( y , y . size , 0.0 ) return self . __class__ ( Empirical1D , points = x , lookup_table = y )
Taper the spectrum or bandpass .
4,053
def _get_arrays ( self , wavelengths , ** kwargs ) : x = self . _validate_wavelengths ( wavelengths ) y = self ( x , ** kwargs ) if isinstance ( wavelengths , u . Quantity ) : w = x . to ( wavelengths . unit , u . spectral ( ) ) else : w = x return w , y
Get sampled spectrum or bandpass in user units .
4,054
def _do_plot ( x , y , title = '' , xlog = False , ylog = False , left = None , right = None , bottom = None , top = None , save_as = '' ) : try : import matplotlib . pyplot as plt except ImportError : log . error ( 'No matplotlib installation found; plotting disabled ' 'as a result.' ) return fig , ax = plt . subplots ( ) ax . plot ( x , y ) if left is not None : ax . set_xlim ( left = left ) if right is not None : ax . set_xlim ( right = right ) if bottom is not None : ax . set_ylim ( bottom = bottom ) if top is not None : ax . set_ylim ( top = top ) xu = x . unit if xu . physical_type == 'frequency' : ax . set_xlabel ( 'Frequency ({0})' . format ( xu ) ) else : ax . set_xlabel ( 'Wavelength ({0})' . format ( xu ) ) yu = y . unit if yu is u . dimensionless_unscaled : ax . set_ylabel ( 'Unitless' ) else : ax . set_ylabel ( 'Flux ({0})' . format ( yu ) ) if title : ax . set_title ( title ) if xlog : ax . set_xscale ( 'log' ) if ylog : ax . set_yscale ( 'log' ) plt . draw ( ) if save_as : plt . savefig ( save_as ) log . info ( 'Plot saved as {0}' . format ( save_as ) )
Plot worker .
4,055
def _process_flux_param ( self , pval , wave ) : if isinstance ( pval , u . Quantity ) : self . _validate_flux_unit ( pval . unit ) outval = units . convert_flux ( self . _redshift_model ( wave ) , pval , self . _internal_flux_unit ) . value else : outval = pval return outval
Process individual model parameter representing flux .
4,056
def model ( self ) : if self . z == 0 : m = self . _model else : if self . _internal_wave_unit . physical_type == 'length' : rs = self . _redshift_model . inverse else : rs = self . _redshift_model if self . z_type == 'wavelength_only' : m = rs | self . _model else : m = rs | self . _model | self . _redshift_flux_model return m
Model of the spectrum with given redshift .
4,057
def z ( self , what ) : if not isinstance ( what , numbers . Real ) : raise exceptions . SynphotError ( 'Redshift must be a real scalar number.' ) self . _z = float ( what ) self . _redshift_model = RedshiftScaleFactor ( self . _z ) if self . z_type == 'wavelength_only' : self . _redshift_flux_model = None else : self . _redshift_flux_model = Scale ( 1 / ( 1 + self . _z ) )
Change redshift .
4,058
def to_fits ( self , filename , wavelengths = None , flux_unit = None , area = None , vegaspec = None , ** kwargs ) : w , y = self . _get_arrays ( wavelengths , flux_unit = flux_unit , area = area , vegaspec = vegaspec ) bkeys = { 'tdisp1' : 'G15.7' , 'tdisp2' : 'G15.7' } if 'expr' in self . meta : bkeys [ 'expr' ] = ( self . meta [ 'expr' ] , 'synphot expression' ) if 'ext_header' in kwargs : kwargs [ 'ext_header' ] . update ( bkeys ) else : kwargs [ 'ext_header' ] = bkeys specio . write_fits_spec ( filename , w , y , ** kwargs )
Write the spectrum to a FITS file .
4,059
def from_file ( cls , filename , keep_neg = False , ** kwargs ) : header , wavelengths , fluxes = specio . read_spec ( filename , ** kwargs ) return cls ( Empirical1D , points = wavelengths , lookup_table = fluxes , keep_neg = keep_neg , meta = { 'header' : header } )
Create a spectrum from file .
4,060
def from_file ( cls , filename , ** kwargs ) : if 'flux_unit' not in kwargs : kwargs [ 'flux_unit' ] = cls . _internal_flux_unit if ( ( filename . endswith ( 'fits' ) or filename . endswith ( 'fit' ) ) and 'flux_col' not in kwargs ) : kwargs [ 'flux_col' ] = 'THROUGHPUT' header , wavelengths , throughput = specio . read_spec ( filename , ** kwargs ) return cls ( Empirical1D , points = wavelengths , lookup_table = throughput , keep_neg = True , meta = { 'header' : header } )
Creates a bandpass from file .
4,061
def _init_bins ( self , binset ) : if binset is None : if self . bandpass . waveset is not None : self . _binset = self . bandpass . waveset elif self . spectrum . waveset is not None : self . _binset = self . spectrum . waveset log . info ( 'Bandpass waveset is undefined; ' 'Using source spectrum waveset instead.' ) else : raise exceptions . UndefinedBinset ( 'Both source spectrum and bandpass have undefined ' 'waveset; Provide binset manually.' ) else : self . _binset = self . _validate_wavelengths ( binset ) if self . _binset [ 0 ] > self . _binset [ - 1 ] : self . _binset = self . _binset [ : : - 1 ] self . _bin_edges = binning . calculate_bin_edges ( self . _binset ) spwave = utils . merge_wavelengths ( self . _bin_edges . value , self . _binset . value ) if self . waveset is not None : spwave = utils . merge_wavelengths ( spwave , self . waveset . value ) spwave = spwave [ spwave > 0 ] indices = np . searchsorted ( spwave , self . _bin_edges . value ) i_beg = indices [ : - 1 ] i_end = indices [ 1 : ] flux = self ( spwave ) avflux = ( flux . value [ 1 : ] + flux . value [ : - 1 ] ) * 0.5 deltaw = spwave [ 1 : ] - spwave [ : - 1 ] binflux , intwave = binning . calcbinflux ( self . _binset . size , i_beg , i_end , avflux , deltaw ) self . _binflux = binflux * flux . unit
Calculated binned wavelength centers edges and flux .
4,062
def sample_binned ( self , wavelengths = None , flux_unit = None , ** kwargs ) : x = self . _validate_binned_wavelengths ( wavelengths ) i = np . searchsorted ( self . binset , x ) if not np . allclose ( self . binset [ i ] . value , x . value ) : raise exceptions . InterpolationNotAllowed ( 'Some or all wavelength values are not in binset.' ) y = self . binflux [ i ] if flux_unit is None : flux = y else : flux = units . convert_flux ( x , y , flux_unit , ** kwargs ) return flux
Sample binned observation without interpolation .
4,063
def _get_binned_arrays ( self , wavelengths , flux_unit , area = None , vegaspec = None ) : x = self . _validate_binned_wavelengths ( wavelengths ) y = self . sample_binned ( wavelengths = x , flux_unit = flux_unit , area = area , vegaspec = vegaspec ) if isinstance ( wavelengths , u . Quantity ) : w = x . to ( wavelengths . unit , u . spectral ( ) ) else : w = x return w , y
Get binned observation in user units .
4,064
def binned_waverange ( self , cenwave , npix , ** kwargs ) : if not isinstance ( cenwave , u . Quantity ) : cenwave = cenwave * self . _internal_wave_unit bin_wave = units . validate_quantity ( self . binset , cenwave . unit , equivalencies = u . spectral ( ) ) return binning . wave_range ( bin_wave . value , cenwave . value , npix , ** kwargs ) * cenwave . unit
Calculate the wavelength range covered by the given number of pixels centered on the given central wavelengths of binset .
4,065
def binned_pixelrange ( self , waverange , ** kwargs ) : x = units . validate_quantity ( waverange , self . _internal_wave_unit , equivalencies = u . spectral ( ) ) return binning . pixel_range ( self . binset . value , x . value , ** kwargs )
Calculate the number of pixels within the given wavelength range and binset .
4,066
def plot ( self , binned = True , wavelengths = None , flux_unit = None , area = None , vegaspec = None , ** kwargs ) : if binned : w , y = self . _get_binned_arrays ( wavelengths , flux_unit , area = area , vegaspec = vegaspec ) else : w , y = self . _get_arrays ( wavelengths , flux_unit = flux_unit , area = area , vegaspec = vegaspec ) self . _do_plot ( w , y , ** kwargs )
Plot the observation .
4,067
def as_spectrum ( self , binned = True , wavelengths = None ) : if binned : w , y = self . _get_binned_arrays ( wavelengths , self . _internal_flux_unit ) else : w , y = self . _get_arrays ( wavelengths , flux_unit = self . _internal_flux_unit ) header = { 'observation' : str ( self ) , 'binned' : binned } return SourceSpectrum ( Empirical1D , points = w , lookup_table = y , meta = { 'header' : header } )
Reduce the observation to an empirical source spectrum .
4,068
async def do_api_call ( self ) : self . pyvlx . connection . register_frame_received_cb ( self . response_rec_callback ) await self . send_frame ( ) await self . start_timeout ( ) await self . response_received_or_timeout . wait ( ) await self . stop_timeout ( ) self . pyvlx . connection . unregister_frame_received_cb ( self . response_rec_callback )
Start . Sending and waiting for answer .
4,069
async def start_timeout ( self ) : self . timeout_handle = self . pyvlx . connection . loop . call_later ( self . timeout_in_seconds , self . timeout )
Start timeout .
4,070
async def main ( ) : pyvlx = PyVLX ( 'pyvlx.yaml' ) await pyvlx . load_devices ( ) print ( pyvlx . devices [ 1 ] ) print ( pyvlx . devices [ 'Fenster 4' ] ) await pyvlx . load_scenes ( ) print ( pyvlx . scenes [ 0 ] ) print ( pyvlx . scenes [ 'Bath Closed' ] ) await pyvlx . scenes [ 1 ] . run ( ) await pyvlx . disconnect ( )
Load devices and scenes run first scene .
4,071
async def set_state ( self , parameter ) : command_send = CommandSend ( pyvlx = self . pyvlx , node_id = self . node_id , parameter = parameter ) await command_send . do_api_call ( ) if not command_send . success : raise PyVLXException ( "Unable to send command" ) self . parameter = parameter await self . after_update ( )
Set switch to desired state .
4,072
def etau_madau ( wave , z , ** kwargs ) : if not isinstance ( z , numbers . Real ) : raise exceptions . SynphotError ( 'Redshift must be a real scalar number.' ) if np . isscalar ( wave ) or len ( wave ) <= 1 : raise exceptions . SynphotError ( 'Wavelength has too few data points' ) wave = units . validate_quantity ( wave , u . AA , ** kwargs ) . value ll = 912.0 c = np . array ( [ 3.6e-3 , 1.7e-3 , 1.2e-3 , 9.3e-4 ] ) el = np . array ( [ 1216 , 1026 , 973 , 950 ] , dtype = np . float ) tau = np . zeros_like ( wave , dtype = np . float ) xe = 1.0 + z for i in range ( len ( el ) ) : tau = np . where ( wave <= el [ i ] * xe , tau + c [ i ] * ( wave / el [ i ] ) ** 3.46 , tau ) xc = wave / ll xc3 = xc ** 3 tau = np . where ( wave <= ll * xe , ( tau + 0.25 * xc3 * ( xe ** 0.46 - xc ** 0.46 ) + 9.4 * xc ** 1.5 * ( xe ** 0.18 - xc ** 0.18 ) - 0.7 * xc3 * ( xc ** ( - 1.32 ) - xe ** ( - 1.32 ) ) - 0.023 * ( xe ** 1.68 - xc ** 1.68 ) ) , tau ) thru = np . where ( tau > 700. , 0. , np . exp ( - tau ) ) meta = { 'descrip' : 'Madau 1995 extinction for z={0}' . format ( z ) } return ExtinctionCurve ( ExtinctionModel1D , points = wave , lookup_table = thru , meta = meta )
Madau 1995 extinction for a galaxy at given redshift . This is the Lyman - alpha prescription from the photo - z code BPZ .
4,073
def extinction_curve ( self , ebv , wavelengths = None ) : if isinstance ( ebv , u . Quantity ) and ebv . unit . decompose ( ) == u . mag : ebv = ebv . value elif not isinstance ( ebv , numbers . Real ) : raise exceptions . SynphotError ( 'E(B-V)={0} is invalid.' . format ( ebv ) ) x = self . _validate_wavelengths ( wavelengths ) . value y = 10 ** ( - 0.4 * self ( x ) . value * ebv ) header = { 'E(B-V)' : ebv , 'ReddeningLaw' : self . meta . get ( 'expr' , 'unknown' ) } return ExtinctionCurve ( ExtinctionModel1D , points = x , lookup_table = y , meta = { 'header' : header } )
Generate extinction curve .
4,074
def to_fits ( self , filename , wavelengths = None , ** kwargs ) : w , y = self . _get_arrays ( wavelengths ) kwargs [ 'flux_col' ] = 'Av/E(B-V)' kwargs [ 'flux_unit' ] = self . _internal_flux_unit if 'pad_zero_ends' not in kwargs : kwargs [ 'pad_zero_ends' ] = False if 'trim_zero' not in kwargs : kwargs [ 'trim_zero' ] = False bkeys = { 'tdisp1' : 'G15.7' , 'tdisp2' : 'G15.7' } if 'expr' in self . meta : bkeys [ 'expr' ] = ( self . meta [ 'expr' ] , 'synphot expression' ) if 'ext_header' in kwargs : kwargs [ 'ext_header' ] . update ( bkeys ) else : kwargs [ 'ext_header' ] = bkeys specio . write_fits_spec ( filename , w , y , ** kwargs )
Write the reddening law to a FITS file .
4,075
def from_file ( cls , filename , ** kwargs ) : if 'flux_unit' not in kwargs : kwargs [ 'flux_unit' ] = cls . _internal_flux_unit if ( ( filename . endswith ( 'fits' ) or filename . endswith ( 'fit' ) ) and 'flux_col' not in kwargs ) : kwargs [ 'flux_col' ] = 'Av/E(B-V)' header , wavelengths , rvs = specio . read_spec ( filename , ** kwargs ) return cls ( Empirical1D , points = wavelengths , lookup_table = rvs , meta = { 'header' : header } )
Create a reddening law from file .
4,076
def validate_payload_len ( self , payload ) : if not hasattr ( self , "PAYLOAD_LEN" ) : return if len ( payload ) != self . PAYLOAD_LEN : raise PyVLXException ( "Invalid payload len" , expected_len = self . PAYLOAD_LEN , current_len = len ( payload ) , frame_type = type ( self ) . __name__ )
Validate payload len .
4,077
def build_frame ( command , payload ) : packet_length = 2 + len ( payload ) + 1 ret = struct . pack ( "BB" , 0 , packet_length ) ret += struct . pack ( ">H" , command . value ) ret += payload ret += struct . pack ( "B" , calc_crc ( ret ) ) return ret
Build raw bytes from command and payload .
4,078
async def run ( self , wait_for_completion = True ) : activate_scene = ActivateScene ( pyvlx = self . pyvlx , wait_for_completion = wait_for_completion , scene_id = self . scene_id ) await activate_scene . do_api_call ( ) if not activate_scene . success : raise PyVLXException ( "Unable to activate scene" )
Run scene .
4,079
def add ( self , scene ) : if not isinstance ( scene , Scene ) : raise TypeError ( ) self . __scenes . append ( scene )
Add scene .
4,080
def data_import ( self , json_response ) : if 'data' not in json_response : raise PyVLXException ( 'no element data found: {0}' . format ( json . dumps ( json_response ) ) ) data = json_response [ 'data' ] for item in data : self . load_scene ( item )
Import scenes from JSON response .
4,081
def load_scene ( self , item ) : scene = Scene . from_config ( self . pyvlx , item ) self . add ( scene )
Load scene from json .
4,082
def parse_raw ( self , raw ) : if not isinstance ( raw , bytes ) : raise PyVLXException ( "AliasArray::invalid_type_if_raw" , type_raw = type ( raw ) ) if len ( raw ) != 21 : raise PyVLXException ( "AliasArray::invalid_size" , size = len ( raw ) ) nbr_of_alias = raw [ 0 ] if nbr_of_alias > 5 : raise PyVLXException ( "AliasArray::invalid_nbr_of_alias" , nbr_of_alias = nbr_of_alias ) for i in range ( 0 , nbr_of_alias ) : self . alias_array_ . append ( ( raw [ i * 4 + 1 : i * 4 + 3 ] , raw [ i * 4 + 3 : i * 4 + 5 ] ) )
Parse alias array from raw bytes .
4,083
def decode ( raw ) : return raw . replace ( bytes ( [ SLIP_ESC , SLIP_ESC_END ] ) , bytes ( [ SLIP_END ] ) ) . replace ( bytes ( [ SLIP_ESC , SLIP_ESC_ESC ] ) , bytes ( [ SLIP_ESC ] ) )
Decode SLIP message .
4,084
def encode ( raw ) : return raw . replace ( bytes ( [ SLIP_ESC ] ) , bytes ( [ SLIP_ESC , SLIP_ESC_ESC ] ) ) . replace ( bytes ( [ SLIP_END ] ) , bytes ( [ SLIP_ESC , SLIP_ESC_END ] ) )
Encode SLIP message .
4,085
def get_next_slip ( raw ) : if not is_slip ( raw ) : return None , raw length = raw [ 1 : ] . index ( SLIP_END ) slip_packet = decode ( raw [ 1 : length + 1 ] ) new_raw = raw [ length + 2 : ] return slip_packet , new_raw
Get the next slip packet from raw data .
4,086
def _slow_calcbinflux ( len_binwave , i_beg , i_end , avflux , deltaw ) : binflux = np . empty ( shape = ( len_binwave , ) , dtype = np . float64 ) intwave = np . empty ( shape = ( len_binwave , ) , dtype = np . float64 ) for i in range ( len ( i_beg ) ) : first = i_beg [ i ] last = i_end [ i ] cur_dw = deltaw [ first : last ] intwave [ i ] = cur_dw . sum ( ) binflux [ i ] = np . sum ( avflux [ first : last ] * cur_dw ) / intwave [ i ] return binflux , intwave
Python implementation of calcbinflux .
4,087
def pixel_range ( bins , waverange , mode = 'round' ) : mode = mode . lower ( ) if mode not in ( 'round' , 'min' , 'max' , 'none' ) : raise exceptions . SynphotError ( 'mode={0} is invalid, must be "round", "min", "max", ' 'or "none".' . format ( mode ) ) if waverange [ 0 ] < waverange [ - 1 ] : wave1 = waverange [ 0 ] wave2 = waverange [ - 1 ] else : wave1 = waverange [ - 1 ] wave2 = waverange [ 0 ] if bins [ 0 ] > bins [ - 1 ] : bins = bins [ : : - 1 ] minwave = bins [ 0 ] - ( bins [ 0 : 2 ] . mean ( ) - bins [ 0 ] ) maxwave = bins [ - 1 ] + ( bins [ - 1 ] - bins [ - 2 : ] . mean ( ) ) if wave1 < minwave or wave2 > maxwave : raise exceptions . OverlapError ( 'Wavelength range ({0}, {1}) is out of bounds of bins ' '(min={2}, max={3}).' . format ( wave1 , wave2 , minwave , maxwave ) ) if wave1 == wave2 : return 0 if mode == 'round' : ind1 = bins . searchsorted ( wave1 , side = 'right' ) ind2 = bins . searchsorted ( wave2 , side = 'right' ) else : ind1 = bins . searchsorted ( wave1 , side = 'left' ) ind2 = bins . searchsorted ( wave2 , side = 'left' ) if mode == 'round' : npix = ind2 - ind1 elif mode == 'min' : frac = ( bins [ ind1 ] - wave1 ) / ( bins [ ind1 ] - bins [ ind1 - 1 ] ) if frac < 0.5 : ind1 += 1 frac = ( wave2 - bins [ ind2 - 1 ] ) / ( bins [ ind2 ] - bins [ ind2 - 1 ] ) if frac < 0.5 : ind2 -= 1 npix = ind2 - ind1 elif mode == 'max' : frac = ( wave1 - bins [ ind1 - 1 ] ) / ( bins [ ind1 ] - bins [ ind1 - 1 ] ) if frac < 0.5 : ind1 -= 1 frac = ( bins [ ind2 ] - wave2 ) / ( bins [ ind2 ] - bins [ ind2 - 1 ] ) if frac < 0.5 : ind2 += 1 npix = ind2 - ind1 else : frac1 = ind1 - ( bins [ ind1 ] - wave1 ) / ( bins [ ind1 ] - bins [ ind1 - 1 ] ) frac2 = ind2 - ( bins [ ind2 ] - wave2 ) / ( bins [ ind2 ] - bins [ ind2 - 1 ] ) npix = frac2 - frac1 return npix
Calculate the number of pixels within the given wavelength range and the given bins .
4,088
async def connect ( self ) : PYVLXLOG . warning ( "Connecting to KLF 200." ) await self . connection . connect ( ) login = Login ( pyvlx = self , password = self . config . password ) await login . do_api_call ( ) if not login . success : raise PyVLXException ( "Login to KLF 200 failed, check credentials" )
Connect to KLF 200 .
4,089
async def update_version ( self ) : get_version = GetVersion ( pyvlx = self ) await get_version . do_api_call ( ) if not get_version . success : raise PyVLXException ( "Unable to retrieve version" ) self . version = get_version . version get_protocol_version = GetProtocolVersion ( pyvlx = self ) await get_protocol_version . do_api_call ( ) if not get_protocol_version . success : raise PyVLXException ( "Unable to retrieve protocol version" ) self . protocol_version = get_protocol_version . version PYVLXLOG . warning ( "Connected to: %s, protocol version: %s" , self . version , self . protocol_version )
Retrieve version and protocol version from API .
4,090
async def send_frame ( self , frame ) : if not self . connection . connected : await self . connect ( ) await self . update_version ( ) await set_utc ( pyvlx = self ) await house_status_monitor_enable ( pyvlx = self ) self . connection . write ( frame )
Send frame to API via connection .
4,091
def from_config ( cls , pyvlx , item ) : name = item [ 'name' ] ident = item [ 'id' ] return cls ( pyvlx , ident , name )
Read scene from configuration .
4,092
async def api_call ( self , verb , action , params = None , add_authorization_token = True , retry = False ) : if add_authorization_token and not self . token : await self . refresh_token ( ) try : return await self . _api_call_impl ( verb , action , params , add_authorization_token ) except InvalidToken : if not retry and add_authorization_token : await self . refresh_token ( ) return await self . api_call ( verb , action , params , add_authorization_token , True ) raise
Send api call .
4,093
async def refresh_token ( self ) : json_response = await self . api_call ( 'auth' , 'login' , { 'password' : self . config . password } , add_authorization_token = False ) if 'token' not in json_response : raise PyVLXException ( 'no element token found in response: {0}' . format ( json . dumps ( json_response ) ) ) self . token = json_response [ 'token' ]
Refresh API token from KLF 200 .
4,094
def create_body ( action , params ) : body = { } body [ 'action' ] = action if params is not None : body [ 'params' ] = params return body
Create http body for rest request .
4,095
def evaluate_response ( json_response ) : if 'errors' in json_response and json_response [ 'errors' ] : Interface . evaluate_errors ( json_response ) elif 'result' not in json_response : raise PyVLXException ( 'no element result found in response: {0}' . format ( json . dumps ( json_response ) ) ) elif not json_response [ 'result' ] : raise PyVLXException ( 'Request failed {0}' . format ( json . dumps ( json_response ) ) )
Evaluate rest response .
4,096
def evaluate_errors ( json_response ) : if 'errors' not in json_response or not isinstance ( json_response [ 'errors' ] , list ) or not json_response [ 'errors' ] or not isinstance ( json_response [ 'errors' ] [ 0 ] , int ) : raise PyVLXException ( 'Could not evaluate errors {0}' . format ( json . dumps ( json_response ) ) ) first_error = json_response [ 'errors' ] [ 0 ] if first_error in [ 402 , 403 , 405 , 406 ] : raise InvalidToken ( first_error ) raise PyVLXException ( 'Unknown error code {0}' . format ( first_error ) )
Evaluate rest errors .
4,097
def temperature ( self , what ) : self . _temperature = units . validate_quantity ( what , u . K )
Set temperature .
4,098
def from_file ( cls , filename , temperature_key = 'DEFT' , beamfill_key = 'BEAMFILL' , ** kwargs ) : if not ( filename . endswith ( 'fits' ) or filename . endswith ( 'fit' ) ) : raise exceptions . SynphotError ( 'Only FITS format is supported.' ) ext = kwargs . get ( 'ext' , 1 ) tab_hdr = fits . getheader ( filename , ext = ext ) temperature = tab_hdr . get ( temperature_key ) if temperature is None : raise exceptions . SynphotError ( 'Missing {0} keyword.' . format ( temperature_key ) ) beam_fill_factor = tab_hdr . get ( 'BEAMFILL' , 1 ) if 'flux_unit' not in kwargs : kwargs [ 'flux_unit' ] = cls . _internal_flux_unit if 'flux_col' not in kwargs : kwargs [ 'flux_col' ] = 'EMISSIVITY' header , wavelengths , em = specio . read_spec ( filename , ** kwargs ) return cls ( Empirical1D , temperature , beam_fill_factor = beam_fill_factor , points = wavelengths , lookup_table = em , meta = { 'header' : header } )
Creates a thermal spectral element from file .
4,099
def from_parameter ( self , parameter ) : if not isinstance ( parameter , Parameter ) : raise Exception ( "parameter::from_parameter_wrong_object" ) self . raw = parameter . raw
Set internal raw state from parameter .