idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
47,800
def parse ( self , filename ) : filehandle = storage . open_vos_or_local ( filename , "rb" ) assert filehandle is not None , "Failed to open file {} " . format ( filename ) filestr = filehandle . read ( ) filehandle . close ( ) assert filestr is not None , "File contents are None" observations = self . _parse_observation_list ( filestr ) self . _parse_observation_headers ( filestr , observations ) sys_header = self . _parse_system_header ( filestr ) sources = self . _parse_source_data ( filestr , observations ) return AstromData ( observations , sys_header , sources , discovery_only = self . discovery_only )
Parses a file into an AstromData structure .
47,801
def write_headers ( self , observations , sys_header ) : if self . _header_written : raise AstromFormatError ( "Astrom file already has headers." ) self . _write_observation_list ( observations ) self . _write_observation_headers ( observations ) self . _write_sys_header ( sys_header ) self . _write_source_header ( ) self . _header_written = True
Writes the header part of the astrom file so that only the source data has to be filled in .
47,802
def write_astrom_data ( self , astrom_data ) : self . write_headers ( astrom_data . observations , astrom_data . sys_header ) self . _write_source_data ( astrom_data . sources )
Writes a full AstromData structure at once .
47,803
def reference_source_point ( self ) : xref = isinstance ( self . xref , Quantity ) and self . xref . value or self . xref yref = isinstance ( self . yref , Quantity ) and self . yref . value or self . yref return xref + self . x_ref_offset , yref + self . y_ref_offset
The location of the source in the reference image in terms of the current image coordinates .
47,804
def get_coordinate_offset ( self , other_reading ) : my_x , my_y = self . reference_source_point other_x , other_y = other_reading . reference_source_point return my_x - other_x , my_y - other_y
Calculates the offsets between readings coordinate systems .
47,805
def from_source_reference ( expnum , ccd , x , y ) : image_uri = storage . dbimages_uri ( expnum = expnum , ccd = None , version = 'p' , ext = '.fits' , subdir = None ) logger . debug ( 'Trying to access {}' . format ( image_uri ) ) if not storage . exists ( image_uri , force = False ) : logger . warning ( 'Image not in dbimages? Trying subdir.' ) image_uri = storage . dbimages_uri ( expnum = expnum , ccd = ccd , version = 'p' ) if not storage . exists ( image_uri , force = False ) : logger . warning ( "Image doesn't exist in ccd subdir. %s" % image_uri ) return None if x == - 9999 or y == - 9999 : logger . warning ( "Skipping {} as x/y not resolved." . format ( image_uri ) ) return None mopheader_uri = storage . dbimages_uri ( expnum = expnum , ccd = ccd , version = 'p' , ext = '.mopheader' ) if not storage . exists ( mopheader_uri , force = False ) : logger . critical ( 'Image exists but processing incomplete. Mopheader missing. {}' . format ( image_uri ) ) return None observation = Observation ( expnum = str ( expnum ) , ftype = 'p' , ccdnum = str ( ccd ) , fk = "" ) return observation
Given the location of a source in the image create an Observation .
47,806
def _gradient_penalty ( self , real_samples , fake_samples , kwargs ) : import torch from torch . autograd import Variable , grad real_samples = real_samples . view ( fake_samples . shape ) subset_size = real_samples . shape [ 0 ] real_samples = real_samples [ : subset_size ] fake_samples = fake_samples [ : subset_size ] alpha = torch . rand ( subset_size ) if self . use_cuda : alpha = alpha . cuda ( ) alpha = alpha . view ( ( - 1 , ) + ( ( 1 , ) * ( real_samples . dim ( ) - 1 ) ) ) interpolates = alpha * real_samples + ( ( 1 - alpha ) * fake_samples ) interpolates = Variable ( interpolates , requires_grad = True ) if self . use_cuda : interpolates = interpolates . cuda ( ) d_output = self . critic ( interpolates , ** kwargs ) grad_ouputs = torch . ones ( d_output . size ( ) ) if self . use_cuda : grad_ouputs = grad_ouputs . cuda ( ) gradients = grad ( outputs = d_output , inputs = interpolates , grad_outputs = grad_ouputs , create_graph = True , retain_graph = True , only_inputs = True ) [ 0 ] return ( ( gradients . norm ( 2 , dim = 1 ) - 1 ) ** 2 ) . mean ( ) * 10
Compute the norm of the gradients for each sample in a batch and penalize anything on either side of unit norm
47,807
def mono ( self ) : if self . channels == 1 : return self x = self . sum ( axis = 1 ) * 0.5 y = x * 0.5 return AudioSamples ( y , self . samplerate )
Return this instance summed to mono . If the instance is already mono this is a no - op .
47,808
def encode ( self , flo = None , fmt = 'WAV' , subtype = 'PCM_16' ) : flo = flo or BytesIO ( ) with SoundFile ( flo , mode = 'w' , channels = self . channels , format = fmt , subtype = subtype , samplerate = self . samples_per_second ) as f : if fmt == 'OGG' : factor = 20 chunksize = self . samples_per_second * factor for i in range ( 0 , len ( self ) , chunksize ) : chunk = self [ i : i + chunksize ] f . write ( chunk ) else : f . write ( self ) flo . seek ( 0 ) return flo
Return audio samples encoded as bytes given a particular audio format
47,809
def extend_with_default ( validator_class : Any ) -> Any : validate_properties = validator_class . VALIDATORS [ 'properties' ] def set_defaults ( validator : Any , properties : dict , instance : dict , schema : dict ) -> Iterator [ ValidationError ] : for prop , subschema in properties . items ( ) : if 'default' in subschema : instance . setdefault ( prop , subschema [ 'default' ] ) for error in validate_properties ( validator , properties , instance , schema , ) : yield error return extend ( validator_class , { 'properties' : set_defaults } )
Append defaults from schema to instance need to be validated .
47,810
def download_hdulist ( self , uri , ** kwargs ) : logger . debug ( str ( kwargs ) ) hdulist = None try : vobj = storage . vofile ( uri , ** kwargs ) try : fobj = cStringIO . StringIO ( vobj . read ( ) ) fobj . seek ( 0 ) hdulist = fits . open ( fobj ) except Exception as e : sys . stderr . write ( "ERROR: {}\n" . format ( str ( e ) ) ) sys . stderr . write ( "While loading {} {}\n" . format ( uri , kwargs ) ) pass finally : vobj . close ( ) except Exception as e : sys . stderr . write ( str ( e ) + "\n" ) sys . stderr . write ( "While opening connection to {}.\n" . format ( uri ) ) sys . stderr . write ( "Sending back FLAT instead, too keep display happy." ) hdulist = self . download_hdulist ( 'vos:OSSOS/dbimages/calibrators/13AQ05_r_flat.fits' , ** kwargs ) return hdulist
Downloads a FITS image as a HDUList .
47,811
def download_apcor ( self , uri ) : local_file = os . path . basename ( uri ) if os . access ( local_file , os . F_OK ) : fobj = open ( local_file ) else : fobj = storage . vofile ( uri , view = 'data' ) fobj . seek ( 0 ) str = fobj . read ( ) fobj . close ( ) apcor_str = str return ApcorData . from_string ( apcor_str )
Downloads apcor data .
47,812
def from_string ( cls , rawstr ) : try : args = map ( float , rawstr . split ( ) ) except Exception as ex : import sys logger . error ( "Failed to convert aperture correction: {}" . format ( ex ) ) raise ex return cls ( * args )
Creates an ApcorData record from the raw string format .
47,813
def htmIndex ( ra , dec , htm_level = 3 ) : import re if os . uname ( ) [ 0 ] == "Linux" : javabin = '/opt/java2/bin/java ' htm_level = htm_level verc_htm_cmd = javabin + '-classpath /usr/cadc/misc/htm/htmIndex.jar edu.jhu.htm.app.lookup %s %s %s' % ( htm_level , ra , dec ) for result in os . popen ( verc_htm_cmd ) . readlines ( ) : result = result [ : - 1 ] if re . search ( "ID/Name cc" , result ) : ( void , coord ) = result . split ( "=" ) ( void , junk , htm_index ) = coord . split ( " " ) return htm_index
Compute htm index of htm_level at position ra dec
47,814
def predict ( abg , date , obs = 568 ) : import orbfit import RO . StringUtil ( ra , dec , a , b , ang ) = orbfit . predict ( abg , date , obs ) obj [ 'RA' ] = ra obj [ 'DEC' ] = dec obj [ 'dRA' ] = a obj [ 'dDEC' ] = b obj [ 'dANG' ] = ang return obj
Run GB s predict using an ABG file as input .
47,815
def config_logging ( level ) : logger = logging . getLogger ( '' ) logger . setLevel ( level ) if level < logging . DEBUG : log_format = "%(asctime)s %(message)s" else : log_format = "%(asctime)s %(module)s : %(lineno)d %(message)s" sh = logging . StreamHandler ( ) sh . formatter = logging . Formatter ( fmt = log_format ) logger . handlers = [ ] logger . addHandler ( sh )
Configure the logging given the level desired
47,816
def exec_prog ( args ) : program_name = args [ 0 ] logging . info ( " " . join ( args ) ) output = subprocess . check_output ( args , stderr = subprocess . STDOUT ) if not os . access ( program_name + ".OK" , os . F_OK ) : logging . error ( "No {}.OK file?" . format ( program_name ) ) raise subprocess . CalledProcessError ( - 1 , ' ' . join ( args ) , output ) os . unlink ( program_name + ".OK" ) if os . access ( program_name + ".FAILED" , os . F_OK ) : os . unlink ( program_name + ".FAILED" ) return output
Run a subprocess check for . OK and raise error if does not exist .
47,817
def stream ( self ) : if self . _stream is None : self . _stream = tempfile . NamedTemporaryFile ( delete = False ) try : self . _stream . write ( self . client . open ( self . filename , view = 'data' ) . read ( ) ) except : pass return self . _stream
the stream to write the log content too .
47,818
def client ( self ) : if self . _client is not None : return self . _client self . _client = vospace . client return self . _client
Send back the client we were sent or construct a default one .
47,819
def parse_string ( self , timestr , subfmts ) : components = ( 'year' , 'mon' , 'mday' ) defaults = ( None , 1 , 1 , 0 ) try : idot = timestr . rindex ( '.' ) except : fracday = 0.0 else : timestr , fracday = timestr [ : idot ] , timestr [ idot : ] fracday = float ( fracday ) for _ , strptime_fmt_or_regex , _ in subfmts : vals = [ ] if isinstance ( strptime_fmt_or_regex , six . string_types ) : try : tm = time . strptime ( timestr , strptime_fmt_or_regex ) tm . tm_hour += int ( 24 * fracday ) tm . tm_min += int ( 60 * ( 24 * fracday - tm . tm_hour ) ) tm . tm_sec += 60 * ( 60 * ( 24 * fracday - tm . tm_hour ) - tm . tm_min ) except ValueError as ex : print ex continue else : vals = [ getattr ( tm , 'tm_' + component ) for component in components ] else : tm = re . match ( strptime_fmt_or_regex , timestr ) if tm is None : continue tm = tm . groupdict ( ) vals = [ int ( tm . get ( component , default ) ) for component , default in six . moves . zip ( components , defaults ) ] hrprt = int ( 24 * fracday ) vals . append ( hrprt ) mnprt = int ( 60 * ( 24 * fracday - hrprt ) ) vals . append ( mnprt ) scprt = 60 * ( 60 * ( 24 * fracday - hrprt ) - mnprt ) vals . append ( scprt ) return vals else : raise ValueError ( 'Time {0} does not match {1} format' . format ( timestr , self . name ) )
Read time from a single string using a set of possible formats .
47,820
def _get_db_options ( args ) : import optik , getpass , sys from optik import OptionParser parser = OptionParser ( ) parser . add_option ( "-d" , "--database" , action = "store" , type = "string" , dest = "database" , default = "cfht" , help = "Name of the SYBASE database containing TABLE" , metavar = "FILE" ) parser . add_option ( "-u" , "--user" , action = "store" , type = "string" , dest = "user" , default = getpass . getuser ( ) , help = "User name to access db with" , metavar = "USER" ) ( opt , unused_args ) = parser . parse_args ( args ) return opt . database , opt . user , unused_args
Parse through a command line of arguments to over - ride the values in the users . dbrc file .
47,821
def _get_db_connect ( dbSystem , db , user , password ) : if dbSystem == 'SYBASE' : import Sybase try : dbh = Sybase . connect ( dbSystem , user , password , database = db ) except : dbh = None elif dbSystem == 'MYSQL' : import MySQLdb try : dbh = MySQLdb . connect ( user = user , passwd = password , db = db , host = 'gimli' ) except : dbh = None return dbh
Create a connection to the database specified on the command line
47,822
def download_cutout ( self , reading , focus = None , needs_apcor = False ) : logger . debug ( "Doing download_cutout with inputs: reading:{} focus:{} needs_apcor:{}" . format ( reading , focus , needs_apcor ) ) assert isinstance ( reading , SourceReading ) min_radius = config . read ( 'CUTOUTS.SINGLETS.RADIUS' ) if not isinstance ( min_radius , Quantity ) : min_radius = min_radius * units . arcsec radius = max ( reading . uncertainty_ellipse . a , reading . uncertainty_ellipse . b ) * 2.5 + min_radius logger . debug ( "got radius for cutout: {}" . format ( radius ) ) image_uri = reading . get_image_uri ( ) logger . debug ( "Getting cutout at {} for {}" . format ( reading . reference_sky_coord , image_uri ) ) hdulist = storage . _cutout_expnum ( reading . obs , reading . reference_sky_coord , radius ) logger . debug ( "Getting the aperture correction." ) source = SourceCutout ( reading , hdulist , radius = radius ) try : apcor = source . apcor zmag = source . zmag source . reading . get_observation_header ( ) except Exception as ex : if needs_apcor : import sys , traceback sys . stderr . write ( "Failed to retrieve apcor but apcor required. Raising error, see logs for more details" ) sys . stderr . write ( traceback . print_exc ( ) ) pass logger . debug ( "Sending back the source reading." ) return source
Downloads a cutout of the FITS image for a given source reading .
47,823
def load_objects ( self , directory_name = None ) : if directory_name is not None : if directory_name == parameters . REAL_KBO_AST_DIR : kbos = parsers . ossos_discoveries ( all_objects = True , data_release = None ) else : kbos = parsers . ossos_discoveries ( directory_name , all_objects = False , data_release = None ) for kbo in kbos : self . kbos [ kbo . name ] = kbo . orbit self . kbos [ kbo . name ] . mag = kbo . mag self . doplot ( )
Load the targets from a file .
47,824
def p2c ( self , p = None ) : if p is None : p = [ 0 , 0 ] x = ( p [ 0 ] - self . x1 ) * self . xscale + self . cx1 y = ( p [ 1 ] - self . y1 ) * self . yscale + self . cy1 return ( x , y )
convert from plot to canvas coordinates .
47,825
def p2s ( self , p = None ) : if not p : p = [ 0 , 0 ] s = self . p2c ( p ) return self . c2s ( s )
Convert from plot to screen coordinates
47,826
def tickmark ( self , x , y , size = 10 , orientation = 90 ) : ( x1 , y1 ) = self . p2c ( [ x , y ] ) x2 = x1 + size * math . cos ( math . radians ( orientation ) ) y2 = y1 - size * math . sin ( math . radians ( orientation ) ) self . create_line ( x1 , y1 , x2 , y2 )
Draw a line of size and orientation at x y
47,827
def relocate ( self ) : name = self . SearchVar . get ( ) if self . kbos . has_key ( name ) : kbo = self . kbos [ name ] assert isinstance ( kbo , orbfit . Orbfit ) this_time = Time ( self . date . get ( ) , scale = 'utc' ) try : kbo . predict ( this_time ) self . recenter ( kbo . coordinate . ra . radian , kbo . coordinate . dec . radian ) self . create_point ( kbo . coordinate . ra . radian , kbo . coordinate . dec . radian , color = 'blue' , size = 4 ) except : logging . error ( "failed to compute KBO position" )
Move to the position of self . SearchVar
47,828
def create_point ( self , xcen , ycen , size = 10 , color = 'red' , fill = None ) : if fill is None : fill = color ( x , y ) = self . p2c ( ( xcen , ycen ) ) x1 = x - size x2 = x + size y1 = y - size y2 = y + size self . create_rectangle ( x1 , y1 , x2 , y2 , fill = fill , outline = color )
Plot a circle of size at this x y location
47,829
def current_pointing ( self , index ) : if self . current is not None : for item in self . pointings [ self . current ] [ 'items' ] : self . itemconfigure ( item , outline = "black" ) self . current = index for item in self . pointings [ self . current ] [ 'items' ] : self . itemconfigure ( item , outline = "blue" )
set the color of the currently selected pointing to blue
47,830
def delete_pointing ( self , event ) : if self . current is None : return for item in self . pointings [ self . current ] [ 'items' ] : self . delete ( item ) self . delete ( self . pointings [ self . current ] [ 'label' ] [ 'id' ] ) del ( self . pointings [ self . current ] ) self . current = None
Delete the currently active pointing
47,831
def create_pointing ( self , event , label_text = None ) : x = self . canvasx ( event . x ) y = self . canvasy ( event . y ) ( ra , dec ) = self . c2p ( ( x , y ) ) this_camera = Camera ( ra = float ( ra ) * units . radian , dec = float ( dec ) * units . radian , camera = self . camera . get ( ) ) ccds = numpy . radians ( numpy . array ( this_camera . geometry ) ) items = [ ] for ccd in ccds : if len ( ccd ) == 4 : ( x1 , y1 ) = self . p2c ( ( ccd [ 0 ] , ccd [ 1 ] ) ) ( x2 , y2 ) = self . p2c ( ( ccd [ 2 ] , ccd [ 3 ] ) ) item = self . create_rectangle ( x1 , y1 , x2 , y2 , stipple = 'gray25' , fill = None ) else : ( x1 , y1 ) = self . p2c ( ( ccd [ 0 ] - ccd [ 2 ] , ccd [ 1 ] - ccd [ 2 ] ) ) ( x2 , y2 ) = self . p2c ( ( ccd [ 0 ] + ccd [ 2 ] , ccd [ 1 ] + ccd [ 2 ] ) ) item = self . create_oval ( x1 , y1 , x2 , y2 ) items . append ( item ) label = { } if label_text is None : label_text = self . plabel . get ( ) label [ 'text' ] = label_text label [ 'id' ] = self . label ( this_camera . ra . radian , this_camera . dec . radian , label [ 'text' ] ) self . pointings . append ( { "label" : label , "items" : items , "camera" : this_camera } ) self . current = len ( self . pointings ) - 1 self . current_pointing ( len ( self . pointings ) - 1 )
Plot the sky coverage of pointing at event . x event . y on the canvas .
47,832
def move_pointing ( self , event ) : ( ra , dec ) = self . c2p ( ( self . canvasx ( event . x ) , self . canvasy ( event . y ) ) ) closest = None this_pointing = None this_index = - 1 index = - 1 for pointing in self . pointings : index = index + 1 ds = pointing [ "camera" ] . separation ( ra , dec ) if this_pointing is None or ds < closest : this_index = index closest = ds this_pointing = pointing if this_pointing is None : return self . plabel . set ( this_pointing [ 'label' ] [ 'text' ] ) this_pointing [ "camera" ] . set_coord ( ( ra * units . radian , dec * units . radian ) ) ccds = numpy . radians ( this_pointing [ "camera" ] . geometry ) items = this_pointing [ "items" ] label = this_pointing [ "label" ] ( x1 , y1 ) = self . p2c ( ( this_pointing [ "camera" ] . ra . radian , this_pointing [ "camera" ] . dec . radian ) ) self . coords ( label [ "id" ] , x1 , y1 ) for i in range ( len ( ccds ) ) : ccd = ccds [ i ] item = items [ i ] if len ( ccd ) == 4 : ( x1 , y1 ) = self . p2c ( ( ccd [ 0 ] , ccd [ 1 ] ) ) ( x2 , y2 ) = self . p2c ( ( ccd [ 2 ] , ccd [ 3 ] ) ) else : ( x1 , y1 ) = self . p2c ( ( ccd [ 0 ] - ccd [ 2 ] ) , ccd [ 1 ] - ccd [ 2 ] ) ( x2 , y2 ) = self . p2c ( ( ccd [ 0 ] + ccd [ 2 ] ) , ccd [ 1 ] + ccd [ 2 ] ) self . coords ( item , x1 , y1 , x2 , y2 ) self . current_pointing ( this_index )
Grab nearest pointing to event . x event . y and with cursor
47,833
def ossos_pointings ( self ) : match = re . match ( '(\d+)\D(\d+)' , self . expnum . get ( ) ) if match is not None : expnum = int ( match . group ( 1 ) ) ccd = int ( match . group ( 2 ) ) x = 2112 / 2.0 y = 4644 / 2.0 else : expnum = int ( str ( self . expnum . get ( ) ) ) ccd = 22 x = 1000 y = 4644 - 15 / 0.185 header = None try : header = storage . get_astheader ( expnum , ccd = ccd ) except : if header is None : print "Didn't get a header... " return ossos_wcs = wcs . WCS ( header ) ( ra , dec ) = ossos_wcs . xy2sky ( x , y ) class MyEvent ( object ) : def __init__ ( self , x , y ) : self . x = x self . y = y ( x , y ) = self . p2s ( ( math . radians ( ra ) , math . radians ( dec ) ) ) event = MyEvent ( x , y ) self . create_pointing ( event , label_text = header [ 'OBJECT' ] + ' ccd{}' . format ( ccd ) )
plot an OSSOS observation on the OSSOS plot .
47,834
def get_pointings ( self ) : self . camera . set ( "MEGACAM_40" ) ( ra1 , dec1 ) = self . c2p ( ( self . canvasx ( 1 ) , self . canvasy ( 1 ) ) ) ( ra2 , dec2 ) = self . c2p ( ( self . canvasx ( 480 * 2 ) , self . canvasy ( 360 * 2 ) ) ) ra_cen = math . degrees ( ( ra2 + ra1 ) / 2.0 ) dec_cen = math . degrees ( ( dec2 + dec1 ) / 2.0 ) width = 180 height = 90 date = mpc . Time ( self . date . get ( ) , scale = 'utc' ) . iso table = cadc . cfht_megacam_tap_query ( ra_cen , dec_cen , width , height , date = date ) for row in table : ra = row [ 'RAJ2000' ] dec = row [ 'DEJ2000' ] ( x , y ) = self . p2s ( ( math . radians ( ra ) , math . radians ( dec ) ) ) event = MyEvent ( x , y ) self . create_pointing ( event , label_text = "" )
Retrieve the MEGACAM pointings that overlap with the current FOV and plot .
47,835
def doplot ( self ) : w = self w . delete ( ALL ) w . coord_grid ( ) w . objList . delete ( 0 , END ) self . _plot ( )
Clear the plot and then redraw it .
47,836
def ec2eq ( self ) : import math from math import sin , cos from math import asin as arcsin from math import atan2 as arctan2 from math import acos as arccos eb = self . eb el = self . el ob = math . radians ( 23.439281 ) dec = arcsin ( sin ( eb ) * cos ( ob ) + cos ( eb ) * sin ( ob ) * sin ( el ) ) sra = ( sin ( dec ) * cos ( ob ) - sin ( eb ) ) / ( cos ( dec ) * sin ( ob ) ) cra = cos ( el ) * cos ( eb ) / cos ( dec ) if sra < 1 and sra > - 1 : sa = arcsin ( sra ) else : sa = 0 ca = arccos ( cra ) tsa = sa tca = ca if tsa < 0 : ca = 2.0 * math . pi - ca if tca >= math . pi / 2.0 : sa = math . pi - sa if ca >= math . pi * 2.0 : ca = ca - math . pi * 2.0 self . tsa = sra self . tca = cra self . ra = ca self . dec = dec
Convert ecliptic coordinates to equatorial coordinates
47,837
def plot_line ( axes , fname , ltype ) : x = np . genfromtxt ( fname , unpack = True ) axes . plot ( x [ 0 ] , x [ 1 ] , ltype )
plot the ecliptic plane line on the given axes .
47,838
def apmag_at_absmag ( H , d , phi = 1 ) : d_observer = 1. m_r = H + 2.5 * math . log10 ( ( d ** 4 ) / ( phi * d_observer ** 4 ) ) print ( "m_r = {:2.2f} for a H = {} TNO at {} AU at opposition." . format ( m_r , H , d ) ) return m_r
Calculate the apparent magnitude of a TNO given its absolute magnitude H for a given distance .
47,839
def scci_cmd ( host , userid , password , cmd , port = 443 , auth_method = 'basic' , client_timeout = 60 , do_async = True , ** kwargs ) : auth_obj = None try : protocol = { 80 : 'http' , 443 : 'https' } [ port ] auth_obj = { 'basic' : requests . auth . HTTPBasicAuth ( userid , password ) , 'digest' : requests . auth . HTTPDigestAuth ( userid , password ) } [ auth_method . lower ( ) ] except KeyError : raise SCCIInvalidInputError ( ( "Invalid port %(port)d or " + "auth_method for method %(auth_method)s" ) % { 'port' : port , 'auth_method' : auth_method } ) try : header = { 'Content-type' : 'application/x-www-form-urlencoded' } if kwargs . get ( 'upgrade_type' ) == 'irmc' : with open ( cmd , 'rb' ) as file : data = file . read ( ) config_type = '/irmcupdate?flashSelect=255' elif kwargs . get ( 'upgrade_type' ) == 'bios' : with open ( cmd , 'rb' ) as file : data = file . read ( ) config_type = '/biosupdate' else : data = cmd config_type = '/config' r = requests . post ( protocol + '://' + host + config_type , data = data , headers = header , verify = False , timeout = client_timeout , allow_redirects = False , auth = auth_obj ) if not do_async : time . sleep ( 5 ) if DEBUG : print ( cmd ) print ( r . text ) print ( "do_async = %s" % do_async ) if r . status_code not in ( 200 , 201 ) : raise SCCIClientError ( ( 'HTTP PROTOCOL ERROR, STATUS CODE = %s' % str ( r . status_code ) ) ) result_xml = ET . fromstring ( r . text ) status = result_xml . find ( "./Value" ) error = result_xml . find ( "./Error" ) message = result_xml . find ( "./Message" ) if not int ( status . text ) == 0 : raise SCCIClientError ( ( 'SCCI PROTOCOL ERROR, STATUS CODE = %s, ' 'ERROR = %s, MESSAGE = %s' % ( str ( status . text ) , error . text , message . text ) ) ) else : return r except IOError as input_error : raise SCCIClientError ( input_error ) except ET . ParseError as parse_error : raise SCCIClientError ( parse_error ) except requests . exceptions . RequestException as requests_exception : raise SCCIClientError ( requests_exception )
execute SCCI command
47,840
def get_client ( host , userid , password , port = 443 , auth_method = 'basic' , client_timeout = 60 , ** kwargs ) : return functools . partial ( scci_cmd , host , userid , password , port = port , auth_method = auth_method , client_timeout = client_timeout , ** kwargs )
get SCCI command partial function
47,841
def get_virtual_cd_set_params_cmd ( remote_image_server , remote_image_user_domain , remote_image_share_type , remote_image_share_name , remote_image_deploy_iso , remote_image_username , remote_image_user_password ) : cmd = _VIRTUAL_MEDIA_CD_SETTINGS % ( remote_image_server , remote_image_user_domain , remote_image_share_type , remote_image_share_name , remote_image_deploy_iso , remote_image_username , remote_image_user_password ) return cmd
get Virtual CD Media Set Parameters Command
47,842
def get_virtual_fd_set_params_cmd ( remote_image_server , remote_image_user_domain , remote_image_share_type , remote_image_share_name , remote_image_floppy_fat , remote_image_username , remote_image_user_password ) : cmd = _VIRTUAL_MEDIA_FD_SETTINGS % ( remote_image_server , remote_image_user_domain , remote_image_share_type , remote_image_share_name , remote_image_floppy_fat , remote_image_username , remote_image_user_password ) return cmd
get Virtual FD Media Set Parameters Command
47,843
def get_report ( host , userid , password , port = 443 , auth_method = 'basic' , client_timeout = 60 ) : auth_obj = None try : protocol = { 80 : 'http' , 443 : 'https' } [ port ] auth_obj = { 'basic' : requests . auth . HTTPBasicAuth ( userid , password ) , 'digest' : requests . auth . HTTPDigestAuth ( userid , password ) } [ auth_method . lower ( ) ] except KeyError : raise SCCIInvalidInputError ( ( "Invalid port %(port)d or " + "auth_method for method %(auth_method)s" ) % { 'port' : port , 'auth_method' : auth_method } ) try : r = requests . get ( protocol + '://' + host + '/report.xml' , verify = False , timeout = ( 10 , client_timeout ) , allow_redirects = False , auth = auth_obj ) if r . status_code not in ( 200 , 201 ) : raise SCCIClientError ( ( 'HTTP PROTOCOL ERROR, STATUS CODE = %s' % str ( r . status_code ) ) ) root = ET . fromstring ( r . text ) return root except ET . ParseError as parse_error : raise SCCIClientError ( parse_error ) except requests . exceptions . RequestException as requests_exception : raise SCCIClientError ( requests_exception )
get iRMC report
47,844
def get_essential_properties ( report , prop_keys ) : v = { } v [ 'memory_mb' ] = int ( report . find ( './System/Memory/Installed' ) . text ) v [ 'local_gb' ] = sum ( [ int ( int ( size . text ) / 1024 ) for size in report . findall ( './/PhysicalDrive/ConfigurableSize' ) ] ) v [ 'cpus' ] = sum ( [ int ( cpu . find ( './CoreNumber' ) . text ) for cpu in report . find ( './System/Processor' ) if cpu . find ( './CoreNumber' ) is not None ] ) v [ 'cpu_arch' ] = 'x86_64' return { k : v [ k ] for k in prop_keys }
get essential properties
47,845
def get_capabilities_properties ( d_info , capa_keys , gpu_ids , fpga_ids = None , ** kwargs ) : snmp_client = snmp . SNMPClient ( d_info [ 'irmc_address' ] , d_info [ 'irmc_snmp_port' ] , d_info [ 'irmc_snmp_version' ] , d_info [ 'irmc_snmp_community' ] , d_info [ 'irmc_snmp_security' ] ) try : v = { } if 'rom_firmware_version' in capa_keys : v [ 'rom_firmware_version' ] = snmp . get_bios_firmware_version ( snmp_client ) if 'irmc_firmware_version' in capa_keys : v [ 'irmc_firmware_version' ] = snmp . get_irmc_firmware_version ( snmp_client ) if 'server_model' in capa_keys : v [ 'server_model' ] = snmp . get_server_model ( snmp_client ) if kwargs . get ( 'sleep_flag' , False ) and any ( k in capa_keys for k in ( 'pci_gpu_devices' , 'cpu_fpga' ) ) : time . sleep ( 5 ) if 'pci_gpu_devices' in capa_keys : v [ 'pci_gpu_devices' ] = ipmi . get_pci_device ( d_info , gpu_ids ) if fpga_ids is not None and 'cpu_fpga' in capa_keys : v [ 'cpu_fpga' ] = ipmi . get_pci_device ( d_info , fpga_ids ) if 'trusted_boot' in capa_keys : v [ 'trusted_boot' ] = ipmi . get_tpm_status ( d_info ) return v except ( snmp . SNMPFailure , ipmi . IPMIFailure ) as err : raise SCCIClientError ( 'Capabilities inspection failed: %s' % err )
get capabilities properties
47,846
def get_firmware_upgrade_status ( irmc_info , upgrade_type ) : host = irmc_info . get ( 'irmc_address' ) userid = irmc_info . get ( 'irmc_username' ) password = irmc_info . get ( 'irmc_password' ) port = irmc_info . get ( 'irmc_port' , 443 ) auth_method = irmc_info . get ( 'irmc_auth_method' , 'digest' ) client_timeout = irmc_info . get ( 'irmc_client_timeout' , 60 ) auth_obj = None try : protocol = { 80 : 'http' , 443 : 'https' } [ port ] auth_obj = { 'basic' : requests . auth . HTTPBasicAuth ( userid , password ) , 'digest' : requests . auth . HTTPDigestAuth ( userid , password ) } [ auth_method . lower ( ) ] except KeyError : raise SCCIInvalidInputError ( ( "Invalid port %(port)d or " + "auth_method for method %(auth_method)s" ) % { 'port' : port , 'auth_method' : auth_method } ) try : if upgrade_type == 'bios' : upgrade_type = '/biosprogress' elif upgrade_type == 'irmc' : upgrade_type = '/irmcprogress' r = requests . get ( protocol + '://' + host + upgrade_type , verify = False , timeout = ( 10 , client_timeout ) , allow_redirects = False , auth = auth_obj ) if r . status_code not in ( 200 , 201 ) : raise SCCIClientError ( ( 'HTTP PROTOCOL ERROR, STATUS CODE = %s' % str ( r . status_code ) ) ) upgrade_status_xml = ET . fromstring ( r . text ) return upgrade_status_xml except ET . ParseError as parse_error : raise SCCIClientError ( parse_error )
get firmware upgrade status of bios or irmc
47,847
def on_toggle_autoplay_key ( self ) : if self . autoplay_manager . is_running ( ) : self . autoplay_manager . stop_autoplay ( ) self . view . set_autoplay ( False ) else : self . autoplay_manager . start_autoplay ( ) self . view . set_autoplay ( True )
The user has pressed the keybind for toggling autoplay .
47,848
def on_do_accept ( self , minor_planet_number , provisional_name , note1 , note2 , date_of_obs , ra , dec , obs_mag , obs_mag_err , band , observatory_code , comment ) : note1_code = note1 . split ( " " ) [ 0 ] note2_code = note2 . split ( " " ) [ 0 ] self . view . close_accept_source_dialog ( ) self . model . set_current_source_name ( provisional_name ) source_cutout = self . model . get_current_cutout ( ) mpc_observation = mpc . Observation ( null_observation = False , provisional_name = provisional_name , note1 = note1_code , note2 = note2_code , date = date_of_obs , ra = ra , dec = dec , mag = obs_mag , mag_err = obs_mag_err , band = band , observatory_code = observatory_code , discovery = self . is_discovery , comment = comment , xpos = source_cutout . reading . x , ypos = source_cutout . reading . y , frame = source_cutout . reading . obs . rawname , astrometric_level = source_cutout . astrom_header . get ( 'ASTLEVEL' , None ) ) data = self . model . get_current_workunit ( ) . data key = mpc_observation . comment . frame . strip ( ) data . mpc_observations [ key ] = mpc_observation self . model . get_writer ( ) . write ( mpc_observation ) self . model . accept_current_item ( ) reset_frame = False if self . model . get_current_workunit ( ) . get_current_source_readings ( ) . is_on_last_item ( ) : self . view . clear ( ) reset_frame = True self . model . next_item ( ) if reset_frame : self . view . frame ( 1 )
After a source has been mark for acceptance create an MPC Observation record .
47,849
def on_load_comparison ( self , research = False ) : cutout = self . model . get_current_cutout ( ) if research : cutout . comparison_image_index = None comparison_image = cutout . comparison_image if comparison_image is None : print "Failed to load comparison image: {}" . format ( cutout . comparison_image_list [ cutout . comparison_image_index ] ) else : self . view . display ( cutout . comparison_image , self . use_pixel_coords ) self . view . align ( self . model . get_current_cutout ( ) , self . model . get_current_reading ( ) , self . model . get_current_source ( ) ) self . model . get_current_workunit ( ) . previous_obs ( ) self . model . acknowledge_image_displayed ( )
Display the comparison image
47,850
def apply ( self , reboot = False ) : self . root . use_virtual_addresses = True self . root . manage . manage = True self . root . mode = 'new' self . root . init_boot = reboot self . client . set_profile ( self . root . get_json ( ) )
Apply the configuration to iRMC .
47,851
def terminate ( self , reboot = False ) : self . root . manage . manage = False self . root . mode = 'delete' self . root . init_boot = reboot self . client . set_profile ( self . root . get_json ( ) )
Delete VIOM configuration from iRMC .
47,852
def set_lan_port ( self , port_id , mac = None ) : port_handler = _parse_physical_port_id ( port_id ) port = self . _find_port ( port_handler ) if port : port_handler . set_lan_port ( port , mac ) else : self . _add_port ( port_handler , port_handler . create_lan_port ( mac ) )
Set LAN port information to configuration .
47,853
def set_iscsi_volume ( self , port_id , initiator_iqn , initiator_dhcp = False , initiator_ip = None , initiator_netmask = None , target_dhcp = False , target_iqn = None , target_ip = None , target_port = 3260 , target_lun = 0 , boot_prio = 1 , chap_user = None , chap_secret = None , mutual_chap_secret = None ) : initiator_netmask = ( _convert_netmask ( initiator_netmask ) if initiator_netmask else None ) port_handler = _parse_physical_port_id ( port_id ) iscsi_boot = _create_iscsi_boot ( initiator_iqn , initiator_dhcp = initiator_dhcp , initiator_ip = initiator_ip , initiator_netmask = initiator_netmask , target_dhcp = target_dhcp , target_iqn = target_iqn , target_ip = target_ip , target_port = target_port , target_lun = target_lun , boot_prio = boot_prio , chap_user = chap_user , chap_secret = chap_secret , mutual_chap_secret = mutual_chap_secret ) port = self . _find_port ( port_handler ) if port : port_handler . set_iscsi_port ( port , iscsi_boot ) else : port = port_handler . create_iscsi_port ( iscsi_boot ) self . _add_port ( port_handler , port )
Set iSCSI volume information to configuration .
47,854
def set_fc_volume ( self , port_id , target_wwn , target_lun = 0 , boot_prio = 1 , initiator_wwnn = None , initiator_wwpn = None ) : port_handler = _parse_physical_port_id ( port_id ) fc_target = elcm . FCTarget ( target_wwn , target_lun ) fc_boot = elcm . FCBoot ( boot_prio = boot_prio , boot_enable = True ) fc_boot . add_target ( fc_target ) port = self . _find_port ( port_handler ) if port : port_handler . set_fc_port ( port , fc_boot , wwnn = initiator_wwnn , wwpn = initiator_wwpn ) else : port = port_handler . create_fc_port ( fc_boot , wwnn = initiator_wwnn , wwpn = initiator_wwpn ) self . _add_port ( port_handler , port )
Set FibreChannel volume information to configuration .
47,855
def _pad_former_ports ( self , port_handler ) : if not port_handler . need_padding ( ) : return for port_idx in range ( 1 , port_handler . port_idx ) : pad_handler = port_handler . __class__ ( port_handler . slot_type , port_handler . card_type , port_handler . slot_idx , port_handler . card_idx , port_idx ) if not self . _find_port ( pad_handler ) : self . _add_port ( pad_handler , pad_handler . create_lan_port ( ) )
Create ports with former port index .
47,856
def chunk_size_samples ( sf , buf ) : byte_depth = _lookup [ sf . subtype ] channels = sf . channels bytes_per_second = byte_depth * sf . samplerate * channels secs = len ( buf ) / bytes_per_second secs = max ( 1 , secs - 6 ) return int ( secs * sf . samplerate )
Black magic to account for the fact that libsndfile s behavior varies depending on file format when using the virtual io api .
47,857
def encode ( number , alphabet ) : if not isinstance ( number , ( int , long ) ) : raise TypeError ( "Number must be an integer." ) base_n = "" sign = "" if number < 0 : sign = "-" number = - number if 0 <= number < len ( alphabet ) : return sign + alphabet [ number ] while number != 0 : number , i = divmod ( number , len ( alphabet ) ) base_n = alphabet [ i ] + base_n return sign + base_n
Converts an integer to a base n string where n is the length of the provided alphabet .
47,858
def categorical ( x , mu = 255 , normalize = True ) : if normalize : mx = x . max ( ) x = np . divide ( x , mx , where = mx != 0 ) x = mu_law ( x ) x = ( x - x . min ( ) ) * 0.5 x = ( x * mu ) . astype ( np . uint8 ) c = np . zeros ( ( np . product ( x . shape ) , mu + 1 ) , dtype = np . uint8 ) c [ np . arange ( len ( c ) ) , x . flatten ( ) ] = 1 return ArrayWithUnits ( c . reshape ( x . shape + ( mu + 1 , ) ) , x . dimensions + ( IdentityDimension ( ) , ) )
Mu - law compress a block of audio samples and convert them into a categorical distribution
47,859
def inverse_categorical ( x , mu = 255 ) : flat = x . reshape ( ( - 1 , x . shape [ - 1 ] ) ) indices = np . argmax ( flat , axis = 1 ) . astype ( np . float32 ) indices = ( indices / mu ) - 0.5 inverted = inverse_mu_law ( indices , mu = mu ) . reshape ( x . shape [ : - 1 ] ) return ArrayWithUnits ( inverted , x . dimensions [ : 2 ] )
Invert categorical samples
47,860
def synthesize ( self , duration , freqs_in_hz = [ 440. ] ) : freqs = np . array ( freqs_in_hz ) scaling = 1 / len ( freqs ) sr = int ( self . samplerate ) cps = freqs / sr ts = ( duration / Seconds ( 1 ) ) * sr ranges = np . array ( [ np . arange ( 0 , ts * c , c ) for c in cps ] ) raw = ( np . sin ( ranges * ( 2 * np . pi ) ) * scaling ) . sum ( axis = 0 ) return AudioSamples ( raw , self . samplerate )
Synthesize one or more sine waves
47,861
def synthesize ( self , duration , tick_frequency ) : sr = self . samplerate . samples_per_second tick = np . random . uniform ( low = - 1. , high = 1. , size = int ( sr * .1 ) ) tick *= np . linspace ( 1 , 0 , len ( tick ) ) samples = np . zeros ( int ( sr * ( duration / Seconds ( 1 ) ) ) ) ticks_per_second = Seconds ( 1 ) / tick_frequency step = int ( sr // ticks_per_second ) for i in range ( 0 , len ( samples ) , step ) : size = len ( samples [ i : i + len ( tick ) ] ) samples [ i : i + len ( tick ) ] += tick [ : size ] return AudioSamples ( samples , self . samplerate )
Synthesize periodic ticks generated from white noise and an envelope
47,862
def synthesize ( self , duration ) : sr = self . samplerate . samples_per_second seconds = duration / Seconds ( 1 ) samples = np . random . uniform ( low = - 1. , high = 1. , size = int ( sr * seconds ) ) return AudioSamples ( samples , self . samplerate )
Synthesize white noise
47,863
def query_ssos ( self ) : self . _ssos_queried = True mpc_filename = self . save ( ) return self . builder . build_workunit ( mpc_filename )
Use the MPC file that has been built up in processing this work unit to generate another workunit .
47,864
def save ( self ) : self . get_writer ( ) . flush ( ) mpc_filename = self . get_writer ( ) . get_filename ( ) self . get_writer ( ) . close ( ) self . _writer = None return mpc_filename
Update the SouceReading information for the currently recorded observations and then flush those to a file .
47,865
def get_writer ( self ) : if self . _writer is None : suffix = tasks . get_suffix ( tasks . TRACK_TASK ) try : base_name = re . search ( "(?P<base_name>.*?)\.\d*{}" . format ( suffix ) , self . filename ) . group ( 'base_name' ) except : base_name = os . path . splitext ( self . filename ) [ 0 ] mpc_filename_pattern = self . output_context . get_full_path ( "{}.?{}" . format ( base_name , suffix ) ) mpc_file_count = len ( glob ( mpc_filename_pattern ) ) mpc_filename = "{}.{}{}" . format ( base_name , mpc_file_count , suffix ) self . _writer = self . _create_writer ( mpc_filename ) return self . _writer
Get a writer .
47,866
def _filter ( self , filename ) : return self . name_filter is not None and re . search ( self . name_filter , filename ) is None
return true if filename doesn t match name_filter regex and should be filtered out of the list .
47,867
def get_workunit ( self , ignore_list = None ) : if ignore_list is None : ignore_list = [ ] potential_files = self . get_potential_files ( ignore_list ) while len ( potential_files ) > 0 : potential_file = self . select_potential_file ( potential_files ) potential_files . remove ( potential_file ) if self . _filter ( potential_file ) : continue if self . directory_context . get_file_size ( potential_file ) == 0 : continue if self . progress_manager . is_done ( potential_file ) : self . _done . append ( potential_file ) continue else : try : self . progress_manager . lock ( potential_file ) except FileLockedException : continue self . _already_fetched . append ( potential_file ) return self . builder . build_workunit ( self . directory_context . get_full_path ( potential_file ) ) logger . info ( "No eligible workunits remain to be fetched." ) raise NoAvailableWorkException ( )
Gets a new unit of work .
47,868
def move_discovery_to_front ( self , data ) : readings = self . get_readings ( data ) discovery_index = self . get_discovery_index ( data ) reordered_readings = ( readings [ discovery_index : discovery_index + 3 ] + readings [ : discovery_index ] + readings [ discovery_index + 3 : ] ) self . set_readings ( data , reordered_readings )
Moves the discovery triplet to the front of the reading list . Leaves everything else in the same order .
47,869
def TAPQuery ( RAdeg = 180.0 , DECdeg = 0.0 , width = 1 , height = 1 ) : QUERY = ( ) QUERY = QUERY . format ( RAdeg , DECdeg , width , height ) data = { "QUERY" : QUERY , "REQUEST" : "doQuery" , "LANG" : "ADQL" , "FORMAT" : "votable" } url = "http://www.cadc.hia.nrc.gc.ca/tap/sync" print url , data return urllib . urlopen ( url , urllib . urlencode ( data ) )
Do a query of the CADC Megacam table . Get all observations insize the box . Returns a file - like object
47,870
def get_flipped_ext ( file_id , ccd ) : import MOPfits import os , shutil filename = MOPfits . adGet ( file_id , extno = int ( ccd ) ) if int ( ccd ) < 18 : tfname = filename + "F" shutil . move ( filename , tfname ) os . system ( "imcopy %s[-*,-*] %s" % ( tfname , filename ) ) os . unlink ( tfname ) if not os . access ( filename , os . R_OK ) : return ( None ) return ( filename )
Given a list of exposure numbers and CCD get them from the DB
47,871
def get_file_ids ( object ) : import MOPdbaccess mysql = MOPdbaccess . connect ( 'cfeps' , 'cfhls' , dbSystem = 'MYSQL' ) cfeps = mysql . cursor ( ) sql = "SELECT file_id FROM measure WHERE provisional LIKE %s" cfeps . execute ( sql , ( object , ) ) file_ids = cfeps . fetchall ( ) return ( file_ids )
Get the exposure for a particular line in the meausre table
47,872
def main ( ) : parser = argparse . ArgumentParser ( description = 'replace image header' ) parser . add_argument ( '--extname' , help = 'name of extension to in header' ) parser . add_argument ( 'expnum' , type = str , help = 'exposure to update' ) parser . add_argument ( '-r' , '--replace' , action = 'store_true' , help = 'store modified image back to VOSpace?' ) parser . add_argument ( '-v' , '--verbose' , action = 'store_true' ) parser . add_argument ( '--debug' , action = 'store_true' ) parser . add_argument ( '--force' , action = 'store_true' , help = "Re-run even if previous success recorded" ) parser . add_argument ( '--dbimages' , help = "VOSpace DATA storage area." , default = "vos:OSSOS/dbimages" ) args = parser . parse_args ( ) task = util . task ( ) dependency = 'preproc' prefix = "" storage . DBIMAGES = args . dbimages level = logging . CRITICAL message_format = "%(message)s" if args . verbose : level = logging . INFO if args . debug : level = logging . DEBUG message_format = "%(module)s %(funcName)s %(lineno)s %(message)s" logging . basicConfig ( level = level , format = message_format ) storage . set_logger ( task , prefix , args . expnum , None , None , False ) message = storage . SUCCESS expnum = args . expnum exit_status = 0 try : if storage . get_status ( task , prefix , expnum , "p" , 36 ) and not args . force : logging . info ( "Already updated, skipping" ) sys . exit ( 0 ) image_hdulist = storage . get_image ( args . expnum , return_file = False ) ast_hdulist = storage . get_astheader ( expnum , ccd = None ) run_update_header ( image_hdulist , ast_hdulist ) image_filename = os . path . basename ( storage . get_uri ( expnum ) ) image_hdulist . writeto ( image_filename ) if args . replace : dest = storage . dbimages_uri ( expnum ) storage . copy ( image_filename , dest ) storage . set_status ( 'update_header' , "" , expnum , 'p' , 36 , message ) except Exception as e : message = str ( e ) if args . replace : storage . set_status ( task , prefix , expnum , 'p' , 36 , message ) exit_status = message logging . error ( message ) return exit_status
Do the script .
47,873
def handle_error ( self , error , download_request ) : if hasattr ( error , "errno" ) and error . errno == errno . EACCES : self . handle_certificate_problem ( str ( error ) ) else : self . handle_general_download_error ( str ( error ) , download_request )
Checks what error occured and looks for an appropriate solution .
47,874
def get_current_observation_date ( self ) : header = self . get_current_cutout ( ) . hdulist [ - 1 ] . header mjd_obs = float ( header . get ( 'MJD-OBS' ) ) exptime = float ( header . get ( 'EXPTIME' ) ) mpc_date = Time ( mjd_obs , format = 'mjd' , scale = 'utc' , precision = config . read ( 'MPC.DATE_PRECISION' ) ) mpc_date += TimeDelta ( exptime * units . second ) / 2.0 mpc_date = mpc_date . mpc return mpc_date
Get the date of the current observation by looking in the header of the observation for the DATE and EXPTIME keywords .
47,875
def parse_eff ( filename ) : blocks = [ ] block = { } with open ( filename ) as efile : for line in efile . readlines ( ) : if line . lstrip ( ) . startswith ( "#" ) : continue keyword = line . lstrip ( ) . split ( "=" ) [ 0 ] funcs = { 'square_param' : parse_square_param , 'rates' : rates } block [ keyword ] = funcs . get ( keyword , dummy ) ( line ) if keyword == 'mag_lim' : blocks . append ( block ) block = { } return blocks
Parse through Jean - Marcs OSSSO . eff files . The efficiency files comes in chunks meant to be used at different rates of motion .
47,876
def read ( keypath , configfile = None ) : if configfile in _configs : appconfig = _configs [ configfile ] else : appconfig = AppConfig ( configfile = configfile ) _configs [ configfile ] = appconfig return appconfig . read ( keypath )
Reads a value from the configuration file .
47,877
def _cdata_header ( self , colsep = "|" ) : fields = self . fields header_lines = [ ] line = "" for fieldName in self . field_names : width = int ( fields [ fieldName ] [ 'attr' ] [ 'width' ] ) line += self . _entry ( fieldName , width , colsep ) header_lines . append ( line ) line = "" for fieldName in self . field_names : width = int ( fields [ fieldName ] [ 'attr' ] [ 'width' ] ) line += self . _entry ( fields [ fieldName ] [ 'attr' ] [ 'format' ] , width = width , colsep = colsep ) header_lines . append ( line ) line = "" for fieldName in self . field_names : width = int ( fields [ fieldName ] [ 'attr' ] [ 'width' ] ) ( l , m ) = divmod ( width , 10 ) guide = "" for i in range ( l ) : guide += "" . join ( map ( str , range ( 10 ) ) ) guide += "" . join ( map ( str , range ( m ) ) ) line += self . _entry ( guide , width = width , colsep = colsep ) header_lines . append ( line ) line = "" for fieldName in self . field_names : width = int ( fields [ fieldName ] [ 'attr' ] [ 'width' ] ) guide = "-" * width line += self . _entry ( guide , width = width , colsep = colsep ) header_lines . append ( line ) return header_lines
Create a header for the CDATA section as a visual guide .
47,878
def _append_cdata ( self , coordinate ) : fields = self . fields sra = coordinate . ra . to_string ( units . hour , sep = ':' , precision = 2 , pad = True ) sdec = coordinate . dec . to_string ( units . degree , sep = ':' , precision = 1 , alwayssign = True ) coord = SkyCoord ( sra + " " + sdec , unit = ( units . hour , units . degree ) ) sra = coord . ra . to_string ( units . hour , sep = ":" , precision = 2 , pad = True ) sdec = coord . dec . to_string ( units . degree , sep = ":" , precision = 1 , pad = True , alwayssign = True ) sdate = str ( coordinate . obstime . replicate ( format ( 'iso' ) ) ) self . cdata . appendData ( self . _entry ( sdate , fields [ "DATE_UTC" ] [ 'attr' ] [ 'width' ] , colsep = self . column_separator ) ) self . cdata . appendData ( self . _entry ( sra , fields [ "RA_J2000" ] [ 'attr' ] [ 'width' ] , colsep = self . column_separator ) ) self . cdata . appendData ( self . _entry ( sdec , fields [ "DEC_J2000" ] [ "attr" ] [ "width" ] , colsep = self . column_separator ) ) self . cdata . appendData ( "\n" )
Append an target location to the ephemeris listing .
47,879
def gemini_writer ( self , f_handle ) : f_handle . write ( GEMINI_HEADER ) for coordinate in self . coordinates : date = coordinate . obstime . datetime . strftime ( '%Y-%b-%d %H:%M' ) [ : 17 ] f_handle . write ( " {:16} {:17.9f} {:27} {:+8.5f} {:+8.5f}\n" . format ( date , coordinate . obstime . jd , coordinate . to_string ( 'hmsdms' , sep = ' ' , precision = 4 , pad = True ) [ : 27 ] , float ( coordinate . dra ) , float ( coordinate . ddec ) ) , ) f_handle . write ( GEMINI_FOOTER ) return
Write out a GEMINI formated OT ephemeris . This is just a hack of SSD Horizons output .
47,880
def intersect ( self , other ) : lowest_stop = min ( self . stop_hz , other . stop_hz ) highest_start = max ( self . start_hz , other . start_hz ) return FrequencyBand ( highest_start , lowest_stop )
Return the intersection between this frequency band and another .
47,881
def bands ( self ) : if self . _bands is None : self . _bands = self . _compute_bands ( ) return self . _bands
An iterable of all bands in this scale
47,882
def Q ( self ) : return np . array ( list ( self . center_frequencies ) ) / np . array ( list ( self . bandwidths ) )
The quality factor of the scale or the ratio of center frequencies to bandwidths
47,883
def get_slice ( self , frequency_band ) : index = frequency_band if isinstance ( index , slice ) : types = { index . start . __class__ , index . stop . __class__ , index . step . __class__ } if Hertz not in types : return index try : start = Hertz ( 0 ) if index . start is None else index . start if start < Hertz ( 0 ) : start = self . stop_hz + start stop = self . stop_hz if index . stop is None else index . stop if stop < Hertz ( 0 ) : stop = self . stop_hz + stop frequency_band = FrequencyBand ( start , stop ) except ( ValueError , TypeError ) : pass start_index = bisect . bisect_left ( self . band_stops , frequency_band . start_hz ) stop_index = bisect . bisect_left ( self . band_starts , frequency_band . stop_hz ) if self . always_even and ( stop_index - start_index ) % 2 : stop_index += 1 return slice ( start_index , stop_index )
Given a frequency band and a frequency dimension comprised of n_samples return a slice using integer indices that may be used to extract only the frequency samples that intersect with the frequency band
47,884
def _hz_to_semitones ( self , hz ) : return np . log ( hz / self . _a440 ) / np . log ( self . _a )
Convert hertz into a number of semitones above or below some reference value in this case A440
47,885
def resolve ( object ) : import re sesame_cmd = 'curl -s http://cdsweb.u-strasbg.fr/viz-bin/nph-sesame/-oI?' + string . replace ( object , ' ' , '' ) f = os . popen ( sesame_cmd ) lines = f . readlines ( ) f . close ( ) for line in lines : if re . search ( '%J ' , line ) : result2 = line . split ( ) ra_deg = float ( result2 [ 1 ] ) dec_deg = float ( result2 [ 2 ] ) return ( ra_deg , dec_deg ) return ( 0 , 0 )
Look up the name of a source using a resolver
47,886
def handle_exit_code ( d , code ) : if code in ( d . DIALOG_CANCEL , d . DIALOG_ESC ) : if code == d . DIALOG_CANCEL : msg = "You chose cancel in the last dialog box. Do you want to " "exit this demo?" else : msg = "You pressed ESC in the last dialog box. Do you want to " "exit this demo?" if d . yesno ( msg ) == d . DIALOG_OK : sys . exit ( 0 ) return 0 else : return 1
Sample function showing how to interpret the dialog exit codes .
47,887
def main ( ) : try : demo ( ) except dialog . error , exc_instance : sys . stderr . write ( "Error:\n\n%s\n" % exc_instance . complete_message ( ) ) sys . exit ( 1 ) sys . exit ( 0 )
This demo shows the main features of the pythondialog Dialog class .
47,888
def is_up ( coordinate , current_time ) : cfht . date = current_time . iso . replace ( '-' , '/' ) cfht . horizon = math . radians ( - 7 ) sun . compute ( cfht ) sun_rise = Time ( str ( sun . rise_time ) . replace ( '/' , '-' ) ) sun_set = Time ( str ( sun . set_time ) . replace ( '/' , '-' ) ) if current_time < sun_set or current_time > sun_rise : return False fb . _ra = coordinate . ra . radian fb . _dec = coordinate . dec . radian cfht . horizon = math . radians ( 40 ) fb . compute ( cfht ) fb_rise_time = Time ( str ( fb . rise_time ) . replace ( '/' , '-' ) ) fb_set_time = Time ( str ( fb . set_time ) . replace ( '/' , '-' ) ) if ( current_time > fb_set_time > fb_set_time or fb_rise_time > current_time > fb_set_time ) : return False return True
Given the position and time determin if the given target is up .
47,889
def get_json ( self ) : viom_table = self . get_basic_json ( ) if self . slots : viom_table [ 'Slots' ] = { 'Slot' : [ s . get_json ( ) for s in self . slots . values ( ) ] } if self . manage : viom_table [ 'VIOMManage' ] = self . manage . get_json ( ) return viom_table
Create JSON data for AdapterConfig .
47,890
def get_json ( self ) : json = self . get_basic_json ( ) if self . onboard_cards : json [ 'OnboardControllers' ] = { 'OnboardController' : [ c . get_json ( ) for c in self . onboard_cards . values ( ) ] } if self . addon_cards : json [ 'AddOnCards' ] = { 'AddOnCard' : [ c . get_json ( ) for c in self . addon_cards . values ( ) ] } return json
Create JSON data for slot .
47,891
def get_json ( self ) : port = self . get_basic_json ( ) port . update ( { 'BootProtocol' : self . boot . BOOT_PROTOCOL , 'BootPriority' : self . boot . boot_prio , } ) boot_env = self . boot . get_json ( ) if boot_env : port . update ( boot_env ) if self . use_virtual_addresses and self . mac : port [ 'VirtualAddress' ] = { 'MAC' : self . mac } return port
Create JSON data for LANPort .
47,892
def get_json ( self ) : port = self . get_basic_json ( ) port . update ( { 'BootProtocol' : self . boot . BOOT_PROTOCOL , 'BootPriority' : self . boot . boot_prio , } ) boot_env = self . boot . get_json ( ) if boot_env : port . update ( boot_env ) if self . use_virtual_addresses : addresses = { } if self . wwnn : addresses [ 'WWNN' ] = self . wwnn if self . wwpn : addresses [ 'WWPN' ] = self . wwpn if addresses : port [ 'VirtualAddress' ] = addresses return port
Create FC port .
47,893
def get_json ( self ) : port = self . get_basic_json ( ) port [ 'Functions' ] = { 'Function' : [ f . get_json ( ) for f in self . functions . values ( ) ] } return port
Create JSON for CNA port .
47,894
def get_json ( self ) : json = self . get_basic_json ( ) for i in range ( len ( self . targets ) ) : self . targets [ i ] . set_index ( i + 1 ) json [ 'FCTargets' ] = { 'FCTarget' : [ t . get_json ( ) for t in self . targets ] } return { 'FCBootEnvironment' : json }
Create JSON for FCBootEnvironment .
47,895
def get_json ( self ) : if self . dhcp_usage : return { 'DHCPUsage' : self . dhcp_usage , 'Name' : self . iqn } else : return self . get_basic_json ( )
Create JSON data for iSCSI initiator .
47,896
def get_json ( self ) : json = { 'DHCPUsage' : self . dhcp_usage , 'AuthenticationMethod' : self . auth_method , } if not self . dhcp_usage : json [ 'Name' ] = self . iqn json [ 'IPv4Address' ] = self . ip json [ 'PortNumber' ] = self . port json [ 'BootLUN' ] = self . lun if self . chap_user : json [ 'ChapUserName' ] = self . chap_user if self . chap_secret : json [ 'ChapSecret' ] = self . chap_secret if self . mutual_chap_secret : json [ 'MutualChapSecret' ] = self . mutual_chap_secret return json
Create JSON data for iSCSI target .
47,897
def zscale ( data , contrast , min = 100 , max = 60000 ) : import random x = [ ] for i in random . sample ( xrange ( data . shape [ 0 ] ) , 50 ) : for j in random . sample ( xrange ( data . shape [ 1 ] ) , 50 ) : x . append ( data [ i , j ] ) yl = numarray . sort ( numarray . clip ( x , min , max ) ) n = len ( yl ) ym = sum ( yl ) / float ( n ) xl = numarray . array ( range ( n ) ) xm = sum ( xl ) / float ( n ) ss_xx = sum ( ( xl - xm ) * ( xl - xm ) ) ss_yy = sum ( ( yl - ym ) * ( yl - ym ) ) ss_xy = sum ( ( xl - xm ) * ( yl - ym ) ) b = ss_xy / ss_xx a = ym - b * xm z1 = yl [ n / 2 ] + ( b / contrast ) * ( 1 - n / 2 ) z2 = yl [ n / 2 ] + ( b / contrast ) * ( n - n / 2 ) high = data - z1 z2 = z2 - z1 high = numarray . clip ( high , 0 , z2 ) high = 256 - 256 * high / z2 return high
Scale the data cube into the range 0 - 255
47,898
def convert ( self , point ) : x , y = point ( x1 , y1 ) = x - self . x_offset , y - self . y_offset logger . debug ( "converted {} {} ==> {} {}" . format ( x , y , x1 , y1 ) ) return x1 , y1
Convert a point from one coordinate system to another .
47,899
def apply_network ( network , x , chunksize = None ) : network_is_cuda = next ( network . parameters ( ) ) . is_cuda x = torch . from_numpy ( x ) with torch . no_grad ( ) : if network_is_cuda : x = x . cuda ( ) if chunksize is None : return from_var ( network ( x ) ) return np . concatenate ( [ from_var ( network ( x [ i : i + chunksize ] ) ) for i in range ( 0 , len ( x ) , chunksize ) ] )
Apply a pytorch network potentially in chunks