idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
4,900
def restore ( self ) : if len ( list ( self . backup . keys ( ) ) ) == 0 : return for key in self . backup . keys ( ) : if key != 'WCSCDATE' : self . __dict__ [ self . wcstrans [ key ] ] = self . orig_wcs [ self . backup [ key ] ] self . update ( )
Reset the active WCS keywords to values stored in the backup keywords .
4,901
def archive ( self , prepend = None , overwrite = no , quiet = yes ) : if len ( list ( self . backup . keys ( ) ) ) > 0 and overwrite == no : if not quiet : print ( 'WARNING: Backup WCS keywords already exist! No backup made.' ) print ( ' The values can only be overridden if overwrite=yes.' ) return if prepend is None : if self . prepend is not None : _prefix = self . prepend else : _prefix = DEFAULT_PREFIX else : _prefix = prepend self . prepend = _prefix for key in self . wcstrans . keys ( ) : if key != 'pixel scale' : _archive_key = self . _buildNewKeyname ( key , _prefix ) else : _archive_key = self . prepend . lower ( ) + 'pscale' self . orig_wcs [ _archive_key ] = self . __dict__ [ self . wcstrans [ key ] ] self . backup [ key ] = _archive_key self . revert [ _archive_key ] = key self . orig_wcs [ 'WCSCDATE' ] = fileutil . getLTime ( ) self . backup [ 'WCSCDATE' ] = 'WCSCDATE' self . revert [ 'WCSCDATE' ] = 'WCSCDATE'
Create backup copies of the WCS keywords with the given prepended string . If backup keywords are already present only update them if overwrite is set to yes otherwise do warn the user and do nothing . Set the WCSDATE at this time as well .
4,902
def read_archive ( self , header , prepend = None ) : _prefix = None _archive = False if header is not None : for kw in header . items ( ) : if kw [ 0 ] [ 1 : ] in self . wcstrans . keys ( ) : _prefix = kw [ 0 ] [ 0 ] _archive = True break if not _archive : self . archive ( prepend = prepend ) return if _prefix is not None : self . prepend = _prefix else : self . prepend = DEFAULT_PREFIX for key in self . wcstrans . keys ( ) : _archive_key = self . _buildNewKeyname ( key , _prefix ) if key != 'pixel scale' : if _archive_key in header : self . orig_wcs [ _archive_key ] = header [ _archive_key ] else : self . orig_wcs [ _archive_key ] = header [ key ] self . backup [ key ] = _archive_key self . revert [ _archive_key ] = key _cd11str = self . prepend + 'CD1_1' _cd21str = self . prepend + 'CD2_1' pscale = self . compute_pscale ( self . orig_wcs [ _cd11str ] , self . orig_wcs [ _cd21str ] ) _archive_key = self . prepend . lower ( ) + 'pscale' self . orig_wcs [ _archive_key ] = pscale self . backup [ 'pixel scale' ] = _archive_key self . revert [ _archive_key ] = 'pixel scale' if 'WCSCDATE' in header : self . orig_wcs [ 'WCSCDATE' ] = header [ 'WCSCDATE' ] else : self . orig_wcs [ 'WCSCDATE' ] = fileutil . getLTime ( ) self . backup [ 'WCSCDATE' ] = 'WCSCDATE' self . revert [ 'WCSCDATE' ] = 'WCSCDATE'
Extract a copy of WCS keywords from an open file header if they have already been created and remember the prefix used for those keywords . Otherwise setup the current WCS keywords as the archive values .
4,903
def restoreWCS ( self , prepend = None ) : image = self . rootname if prepend : _prepend = prepend elif self . prepend : _prepend = self . prepend else : _prepend = None fimg = fileutil . openImage ( image , mode = 'update' ) _root , _iextn = fileutil . parseFilename ( self . rootname ) _extn = fileutil . getExtn ( fimg , _iextn ) if len ( self . backup ) > 0 : for newkey in self . revert . keys ( ) : if newkey != 'opscale' : _orig_key = self . revert [ newkey ] _extn . header [ _orig_key ] = _extn . header [ newkey ] elif _prepend : for key in self . wcstrans . keys ( ) : if key != 'pixel scale' : _okey = self . _buildNewKeyname ( key , _prepend ) if _okey in _extn . header : _extn . header [ key ] = _extn . header [ _okey ] else : print ( 'No original WCS values found. Exiting...' ) break else : print ( 'No original WCS values found. Exiting...' ) fimg . close ( ) del fimg
Resets the WCS values to the original values stored in the backup keywords recorded in self . backup .
4,904
def createReferenceWCS ( self , refname , overwrite = yes ) : hdu = self . createWcsHDU ( ) if os . path . exists ( refname ) : if overwrite == yes : os . remove ( refname ) hdu . writeto ( refname ) else : wcs_append = True oldhdu = fits . open ( refname , mode = 'append' ) for e in oldhdu : if 'extname' in e . header and e . header [ 'extname' ] == 'WCS' : wcs_append = False if wcs_append == True : oldhdu . append ( hdu ) oldhdu . close ( ) del oldhdu else : hdu . writeto ( refname ) del hdu
Write out the values of the WCS keywords to the NEW specified image fitsname .
4,905
def to_jd ( year , month , day , method = None ) : method = method or 'equinox' if day < 1 or day > 30 : raise ValueError ( "Invalid day for this calendar" ) if month > 13 : raise ValueError ( "Invalid month for this calendar" ) if month == 13 and day > 5 + leap ( year , method = method ) : raise ValueError ( "Invalid day for this month in this calendar" ) if method == 'equinox' : return _to_jd_equinox ( year , month , day ) else : return _to_jd_schematic ( year , month , day , method )
Obtain Julian day from a given French Revolutionary calendar date .
4,906
def _to_jd_schematic ( year , month , day , method ) : y0 , y1 , y2 , y3 , y4 , y5 = 0 , 0 , 0 , 0 , 0 , 0 intercal_cycle_yrs , over_cycle_yrs , leap_suppression_yrs = None , None , None if ( ( method in ( 100 , 'romme' ) and year < 15 ) or ( method in ( 128 , 'madler' ) and year < 17 ) ) : method = 4 if method in ( 4 , 'continuous' ) : y5 = - 365 elif method in ( 100 , 'romme' ) : year = year - 13 y5 = DAYS_IN_YEAR * 12 + 3 leap_suppression_yrs = 100. leap_suppression_days = 36524 intercal_cycle_yrs = 400. intercal_cycle_days = 146097 over_cycle_yrs = 4000. over_cycle_days = 1460969 elif method in ( 128 , 'madler' ) : year = year - 17 y5 = DAYS_IN_YEAR * 16 + 4 leap_suppression_days = 46751 leap_suppression_yrs = 128 else : raise ValueError ( "Unknown leap year method. Try: continuous, romme, madler or equinox" ) if over_cycle_yrs : y0 = trunc ( year / over_cycle_yrs ) * over_cycle_days year = year % over_cycle_yrs if intercal_cycle_yrs : y1 = trunc ( year / intercal_cycle_yrs ) * intercal_cycle_days year = year % intercal_cycle_yrs if leap_suppression_yrs : y2 = trunc ( year / leap_suppression_yrs ) * leap_suppression_days year = year % leap_suppression_yrs y3 = trunc ( year / LEAP_CYCLE_YEARS ) * LEAP_CYCLE_DAYS year = year % LEAP_CYCLE_YEARS y4 = year * DAYS_IN_YEAR yj = y0 + y1 + y2 + y3 + y4 + y5 mj = ( month - 1 ) * 30 return EPOCH + yj + mj + day - 1
Calculate JD using various leap - year calculation methods
4,907
def from_jd ( jd , method = None ) : method = method or 'equinox' if method == 'equinox' : return _from_jd_equinox ( jd ) else : return _from_jd_schematic ( jd , method )
Calculate date in the French Revolutionary calendar from Julian day . The five or six sansculottides are considered a thirteenth month in the results of this function .
4,908
def _from_jd_schematic ( jd , method ) : if jd < EPOCH : raise ValueError ( "Can't convert days before the French Revolution" ) J = trunc ( jd ) + 0.5 - EPOCH y0 , y1 , y2 , y3 , y4 , y5 = 0 , 0 , 0 , 0 , 0 , 0 intercal_cycle_days = leap_suppression_days = over_cycle_days = None if ( J <= DAYS_IN_YEAR * 12 + 3 and method in ( 100 , 'romme' ) ) or ( J <= DAYS_IN_YEAR * 17 + 4 and method in ( 128 , 'madler' ) ) : method = 4 if method in ( 4 , 'continuous' ) : J = J + 365 y5 = - 1 elif method in ( 100 , 'romme' ) : y5 = 12 J = J - DAYS_IN_YEAR * 12 - 3 leap_suppression_yrs = 100. leap_suppression_days = 36524 intercal_cycle_yrs = 400. intercal_cycle_days = 146097 over_cycle_yrs = 4000. over_cycle_days = 1460969 elif method in ( 128 , 'madler' ) : y5 = 16 J = J - DAYS_IN_YEAR * 16 - 4 leap_suppression_yrs = 128 leap_suppression_days = 46751 else : raise ValueError ( "Unknown leap year method. Try: continuous, romme, madler or equinox" ) if over_cycle_days : y0 = trunc ( J / over_cycle_days ) * over_cycle_yrs J = J % over_cycle_days if intercal_cycle_days : y1 = trunc ( J / intercal_cycle_days ) * intercal_cycle_yrs J = J % intercal_cycle_days if leap_suppression_days : y2 = trunc ( J / leap_suppression_days ) * leap_suppression_yrs J = J % leap_suppression_days y3 = trunc ( J / LEAP_CYCLE_DAYS ) * LEAP_CYCLE_YEARS if J % LEAP_CYCLE_DAYS == LEAP_CYCLE_DAYS - 1 : J = 1460 else : J = J % LEAP_CYCLE_DAYS y4 = trunc ( J / DAYS_IN_YEAR ) if J == DAYS_IN_YEAR * 4 : y4 = y4 - 1 J = 365.0 else : J = J % DAYS_IN_YEAR year = y0 + y1 + y2 + y3 + y4 + y5 month = trunc ( J / 30. ) J = J - month * 30 return year + 1 , month + 1 , trunc ( J ) + 1
Convert from JD using various leap - year calculation methods
4,909
def _from_jd_equinox ( jd ) : jd = trunc ( jd ) + 0.5 equinoxe = premier_da_la_annee ( jd ) an = gregorian . from_jd ( equinoxe ) [ 0 ] - YEAR_EPOCH mois = trunc ( ( jd - equinoxe ) / 30. ) + 1 jour = int ( ( jd - equinoxe ) % 30 ) + 1 return ( an , mois , jour )
Calculate the FR day using the equinox as day 1
4,910
def to_jd ( year , month , day ) : gyear = year + 78 leap = isleap ( gyear ) start = gregorian . to_jd ( gyear , 3 , 22 - leap ) if leap : Caitra = 31 else : Caitra = 30 if month == 1 : jd = start + ( day - 1 ) else : jd = start + Caitra m = month - 2 m = min ( m , 5 ) jd += m * 31 if month >= 8 : m = month - 7 jd += m * 30 jd += day - 1 return jd
Obtain Julian day for Indian Civil date
4,911
def from_jd ( jd ) : start = 80 jd = trunc ( jd ) + 0.5 greg = gregorian . from_jd ( jd ) leap = isleap ( greg [ 0 ] ) year = greg [ 0 ] - SAKA_EPOCH greg0 = gregorian . to_jd ( greg [ 0 ] , 1 , 1 ) yday = jd - greg0 if leap : Caitra = 31 else : Caitra = 30 if yday < start : year -= 1 yday += Caitra + ( 31 * 5 ) + ( 30 * 3 ) + 10 + start yday -= start if yday < Caitra : month = 1 day = yday + 1 else : mday = yday - Caitra if ( mday < ( 31 * 5 ) ) : month = trunc ( mday / 31 ) + 2 day = ( mday % 31 ) + 1 else : mday -= 31 * 5 month = trunc ( mday / 30 ) + 7 day = ( mday % 30 ) + 1 return ( year , month , int ( day ) )
Calculate Indian Civil date from Julian day Offset in years from Saka era to Gregorian epoch
4,912
def format_stack ( skip = 0 , length = 6 , _sep = os . path . sep ) : return ' < ' . join ( "%s:%s:%s" % ( '/' . join ( f . f_code . co_filename . split ( _sep ) [ - 2 : ] ) , f . f_lineno , f . f_code . co_name ) for f in islice ( frame_iterator ( sys . _getframe ( 1 + skip ) ) , length ) )
Returns a one - line string with the current callstack .
4,913
def get ( self , deviceId ) : measurementsByName = self . measurements . get ( deviceId ) if measurementsByName is None : return [ ] else : return list ( measurementsByName . values ( ) )
lists all known active measurements .
4,914
def get ( self , deviceId , measurementId ) : record = self . measurements . get ( deviceId ) if record is not None : return record . get ( measurementId ) return None
details the specific measurement .
4,915
def clicked ( self ) : try : from . import teal except : teal = None try : tealGui = self . _mainGuiObj tealGui . showStatus ( 'Clicked "' + self . getButtonLabel ( ) + '"' , keep = 1 ) pscope = self . paramInfo . scope pname = self . paramInfo . name tpo = tealGui . _taskParsObj tup = tpo . getExecuteStrings ( pscope , pname ) code = '' if not tup : if teal : teal . popUpErr ( tealGui . top , "No action to perform" , "Action Button Error" ) return for exname in tup : if '_RULES_' in tpo and exname in tpo [ '_RULES_' ] . configspec : ruleSig = tpo [ '_RULES_' ] . configspec [ exname ] chkArgsDict = vtor_checks . sigStrToKwArgsDict ( ruleSig ) code = chkArgsDict . get ( 'code' ) teal . execEmbCode ( pscope , pname , self . getButtonLabel ( ) , tealGui , code ) tealGui . debug ( 'Finished: "' + self . getButtonLabel ( ) + '"' ) except Exception as ex : msg = 'Error executing: "' + self . getButtonLabel ( ) + '"\n' + ex . message msgFull = msg + '\n' + '' . join ( traceback . format_exc ( ) ) msgFull += "CODE:\n" + code if tealGui : if teal : teal . popUpErr ( tealGui . top , msg , "Action Button Error" ) tealGui . debug ( msgFull ) else : if teal : teal . popUpErr ( None , msg , "Action Button Error" ) print ( msgFull )
Called when this button is clicked . Execute code from . cfgspc
4,916
def tobytes ( s , encoding = 'ascii' ) : if PY3K : if isinstance ( s , bytes ) : return s else : return s . encode ( encoding ) else : if isinstance ( s , unicode ) : return s . encode ( encoding ) else : return s
Convert string s to the bytes type in all Pythons even back before Python 2 . 6 . What str means varies by PY3K or not . In Pythons before 3 . 0 this is technically the same as the str type in terms of the character data in memory .
4,917
def tostr ( s , encoding = 'ascii' ) : if PY3K : if isinstance ( s , str ) : return s else : return s . decode ( encoding ) else : if isinstance ( s , unicode ) : return s . encode ( encoding ) else : return s
Convert string - like - thing s to the str type in all Pythons even back before Python 2 . 6 . What str means varies by PY3K or not . In Pythons before 3 . 0 str and bytes are the same type . In Python 3 + this may require a decoding step .
4,918
def retry ( func = None , retries = 5 , backoff = None , exceptions = ( IOError , OSError , EOFError ) , cleanup = None , sleep = time . sleep ) : @ Aspect ( bind = True ) def retry_aspect ( cutpoint , * args , ** kwargs ) : for count in range ( retries + 1 ) : try : if count and cleanup : cleanup ( * args , ** kwargs ) yield break except exceptions as exc : if count == retries : raise if not backoff : timeout = 0 elif isinstance ( backoff , ( int , float ) ) : timeout = backoff else : timeout = backoff ( count ) logger . exception ( "%s(%s, %s) raised exception %s. %s retries left. Sleeping %s secs." , cutpoint . __name__ , args , kwargs , exc , retries - count , timeout ) sleep ( timeout ) return retry_aspect if func is None else retry_aspect ( func )
Decorator that retries the call retries times if func raises exceptions . Can use a backoff function to sleep till next retry .
4,919
def eparOptionFactory ( master , statusBar , param , defaultParam , doScroll , fieldWidths , plugIn = None , editedCallbackObj = None , helpCallbackObj = None , mainGuiObj = None , defaultsVerb = "Default" , bg = None , indent = False , flagging = False , flaggedColor = None ) : if plugIn is not None : eparOption = plugIn elif param . choice is not None : eparOption = EnumEparOption else : eparOption = _eparOptionDict . get ( param . type , StringEparOption ) eo = eparOption ( master , statusBar , param , defaultParam , doScroll , fieldWidths , defaultsVerb , bg , indent = indent , helpCallbackObj = helpCallbackObj , mainGuiObj = mainGuiObj ) eo . setEditedCallbackObj ( editedCallbackObj ) eo . setIsFlagging ( flagging , False ) if flaggedColor : eo . setFlaggedColor ( flaggedColor ) return eo
Return EparOption item of appropriate type for the parameter param
4,920
def popupChoices ( self , event = None ) : if NORMAL not in ( self . browserEnabled , self . clearEnabled , self . unlearnEnabled , self . helpEnabled ) : return self . menu = Menu ( self . entry , tearoff = 0 ) if self . browserEnabled != DISABLED : if capable . OF_TKFD_IN_EPAR : self . menu . add_command ( label = "File Browser" , state = self . browserEnabled , command = self . fileBrowser ) self . menu . add_command ( label = "Directory Browser" , state = self . browserEnabled , command = self . dirBrowser ) else : self . menu . add_command ( label = "File/Directory Browser" , state = self . browserEnabled , command = self . fileBrowser ) self . menu . add_separator ( ) self . menu . add_command ( label = "Clear" , state = self . clearEnabled , command = self . clearEntry ) self . menu . add_command ( label = self . defaultsVerb , state = self . unlearnEnabled , command = self . unlearnValue ) self . menu . add_command ( label = 'Help' , state = self . helpEnabled , command = self . helpOnParam ) ycoord = self . entry . winfo_rooty ( ) xcoord = self . entry . winfo_pointerx ( ) - XSHIFT self . menu . tk_popup ( xcoord , ycoord )
Popup right - click menu of special parameter operations
4,921
def fileBrowser ( self ) : if capable . OF_TKFD_IN_EPAR : fname = askopenfilename ( parent = self . entry , title = "Select File" ) else : from . import filedlg self . fd = filedlg . PersistLoadFileDialog ( self . entry , "Select File" , "*" ) if self . fd . Show ( ) != 1 : self . fd . DialogCleanup ( ) return fname = self . fd . GetFileName ( ) self . fd . DialogCleanup ( ) if not fname : return self . choice . set ( fname ) self . lastSelection = None
Invoke a tkinter file dialog
4,922
def dirBrowser ( self ) : if capable . OF_TKFD_IN_EPAR : fname = askdirectory ( parent = self . entry , title = "Select Directory" ) else : raise NotImplementedError ( 'Fix popupChoices() logic.' ) if not fname : return self . choice . set ( fname ) self . lastSelection = None
Invoke a tkinter directory dialog
4,923
def forceValue ( self , newVal , noteEdited = False ) : if newVal is None : newVal = "" self . choice . set ( newVal ) if noteEdited : self . widgetEdited ( val = newVal , skipDups = False )
Force - set a parameter entry to the given value
4,924
def unlearnValue ( self ) : defaultValue = self . defaultParamInfo . get ( field = "p_filename" , native = 0 , prompt = 0 ) self . choice . set ( defaultValue )
Unlearn a parameter value by setting it back to its default
4,925
def keypress ( self , event ) : try : self . choice . set ( self . shortcuts [ event . keysym ] ) except KeyError : pass
Allow keys typed in widget to select items
4,926
def postcmd ( self ) : value = self . choice . get ( ) try : index = self . paramInfo . choice . index ( value ) self . entry . menu . activate ( index ) except ValueError : pass
Make sure proper entry is activated when menu is posted
4,927
def convertToNative ( self , aVal ) : if aVal is None : return None if isinstance ( aVal , bool ) : return aVal return str ( aVal ) . lower ( ) in ( '1' , 'on' , 'yes' , 'true' )
Convert to native bool ; interpret certain strings .
4,928
def toggle ( self , event = None ) : if self . choice . get ( ) == "yes" : self . rbno . select ( ) else : self . rbyes . select ( ) self . widgetEdited ( )
Toggle value between Yes and No
4,929
def entryCheck ( self , event = None , repair = True ) : valupr = self . choice . get ( ) . upper ( ) if valupr . strip ( ) == 'INDEF' : self . choice . set ( valupr ) return EparOption . entryCheck ( self , event , repair = repair )
Ensure any INDEF entry is uppercase before base class behavior
4,930
def _setSampleSizeBytes ( self ) : self . sampleSizeBytes = self . getPacketSize ( ) if self . sampleSizeBytes > 0 : self . maxBytesPerFifoRead = ( 32 // self . sampleSizeBytes )
updates the current record of the packet size per sample and the relationship between this and the fifo reads .
4,931
def easter ( year ) : c = trunc ( year / 100 ) n = year - 19 * trunc ( year / 19 ) k = trunc ( ( c - 17 ) / 25 ) i = c - trunc ( c / 4 ) - trunc ( ( c - k ) / 3 ) + ( 19 * n ) + 15 i = i - 30 * trunc ( i / 30 ) i = i - trunc ( i / 28 ) * ( 1 - trunc ( i / 28 ) * trunc ( 29 / ( i + 1 ) ) * trunc ( ( 21 - n ) / 11 ) ) j = year + trunc ( year / 4 ) + i + 2 - c + trunc ( c / 4 ) j = j - 7 * trunc ( j / 7 ) l = i - j month = 3 + trunc ( ( l + 40 ) / 44 ) day = l + 28 - 31 * trunc ( month / 4 ) return year , int ( month ) , int ( day )
Calculate western easter
4,932
def convert ( input , width = 132 , output = None , keep = False ) : trl = open ( input ) lines = np . array ( [ i for text in trl . readlines ( ) for i in textwrap . wrap ( text , width = width ) ] ) trl . close ( ) if output is None : rootname , suffix = os . path . splitext ( input ) s = suffix [ 1 : ] . replace ( 'ra' , 'rl' ) fitsname = "{}_{}{}fits" . format ( rootname , s , os . path . extsep ) else : fitsname = output full_name = os . path . abspath ( os . path . join ( os . path . curdir , fitsname ) ) old_file = os . path . exists ( full_name ) if old_file : if keep : print ( "ERROR: Trailer file already written out as: {}" . format ( full_name ) ) raise IOError else : os . remove ( full_name ) line_fmt = "{}A" . format ( width ) tbhdu = fits . BinTableHDU . from_columns ( [ fits . Column ( name = 'TEXT_FILE' , format = line_fmt , array = lines ) ] ) tbhdu . writeto ( fitsname ) print ( "Created output FITS filename for trailer:{} {}" . format ( os . linesep , full_name ) ) os . remove ( input )
Input ASCII trailer file input will be read .
4,933
def get_extra_values ( conf , _prepend = ( ) ) : out = [ ] out . extend ( [ ( _prepend , name ) for name in conf . extra_values ] ) for name in conf . sections : if name not in conf . extra_values : out . extend ( get_extra_values ( conf [ name ] , _prepend + ( name , ) ) ) return out
Find all the values and sections not in the configspec from a validated ConfigObj .
4,934
def _fetch ( self , key ) : save_interp = self . section . main . interpolation self . section . main . interpolation = False current_section = self . section while True : val = current_section . get ( key ) if val is not None and not isinstance ( val , Section ) : break val = current_section . get ( 'DEFAULT' , { } ) . get ( key ) if val is not None and not isinstance ( val , Section ) : break if current_section . parent is current_section : break current_section = current_section . parent self . section . main . interpolation = save_interp if val is None : raise MissingInterpolationOption ( key ) return val , current_section
Helper function to fetch values from owning section .
4,935
def dict ( self ) : newdict = { } for entry in self : this_entry = self [ entry ] if isinstance ( this_entry , Section ) : this_entry = this_entry . dict ( ) elif isinstance ( this_entry , list ) : this_entry = list ( this_entry ) elif isinstance ( this_entry , tuple ) : this_entry = tuple ( this_entry ) newdict [ entry ] = this_entry return newdict
Return a deepcopy of self as a dictionary .
4,936
def merge ( self , indict ) : for key , val in list ( indict . items ( ) ) : if ( key in self and isinstance ( self [ key ] , dict ) and isinstance ( val , dict ) ) : self [ key ] . merge ( val ) else : self [ key ] = val
A recursive update - useful for merging config files .
4,937
def rename ( self , oldkey , newkey ) : if oldkey in self . scalars : the_list = self . scalars elif oldkey in self . sections : the_list = self . sections else : raise KeyError ( 'Key "%s" not found.' % oldkey ) pos = the_list . index ( oldkey ) val = self [ oldkey ] dict . __delitem__ ( self , oldkey ) dict . __setitem__ ( self , newkey , val ) the_list . remove ( oldkey ) the_list . insert ( pos , newkey ) comm = self . comments [ oldkey ] inline_comment = self . inline_comments [ oldkey ] del self . comments [ oldkey ] del self . inline_comments [ oldkey ] self . comments [ newkey ] = comm self . inline_comments [ newkey ] = inline_comment
Change a keyname to another without changing position in sequence .
4,938
def walk ( self , function , raise_errors = True , call_on_sections = False , ** keywargs ) : out = { } for i in range ( len ( self . scalars ) ) : entry = self . scalars [ i ] try : val = function ( self , entry , ** keywargs ) entry = self . scalars [ i ] out [ entry ] = val except Exception : if raise_errors : raise else : entry = self . scalars [ i ] out [ entry ] = False for i in range ( len ( self . sections ) ) : entry = self . sections [ i ] if call_on_sections : try : function ( self , entry , ** keywargs ) except Exception : if raise_errors : raise else : entry = self . sections [ i ] out [ entry ] = False entry = self . sections [ i ] out [ entry ] = self [ entry ] . walk ( function , raise_errors = raise_errors , call_on_sections = call_on_sections , ** keywargs ) return out
Walk every member and call a function on the keyword and value .
4,939
def as_list ( self , key ) : result = self [ key ] if isinstance ( result , ( tuple , list ) ) : return list ( result ) return [ result ]
A convenience method which fetches the specified value guaranteeing that it is a list .
4,940
def restore_defaults ( self ) : for key in self . default_values : self . restore_default ( key ) for section in self . sections : self [ section ] . restore_defaults ( )
Recursively restore default values to all members that have them .
4,941
def _handle_bom ( self , infile ) : if ( ( self . encoding is not None ) and ( self . encoding . lower ( ) not in BOM_LIST ) ) : return self . _decode ( infile , self . encoding ) if isinstance ( infile , ( list , tuple ) ) : line = infile [ 0 ] else : line = infile if self . encoding is not None : enc = BOM_LIST [ self . encoding . lower ( ) ] if enc == 'utf_16' : for BOM , ( encoding , final_encoding ) in list ( BOMS . items ( ) ) : if not final_encoding : continue if infile . startswith ( BOM ) : return self . _decode ( infile , encoding ) return self . _decode ( infile , self . encoding ) BOM = BOM_SET [ enc ] if not line . startswith ( BOM ) : return self . _decode ( infile , self . encoding ) newline = line [ len ( BOM ) : ] if isinstance ( infile , ( list , tuple ) ) : infile [ 0 ] = newline else : infile = newline self . BOM = True return self . _decode ( infile , self . encoding ) for BOM , ( encoding , final_encoding ) in list ( BOMS . items ( ) ) : if not isinstance ( BOM , str ) or not line . startswith ( BOM ) : continue else : self . encoding = final_encoding if not final_encoding : self . BOM = True newline = line [ len ( BOM ) : ] if isinstance ( infile , ( list , tuple ) ) : infile [ 0 ] = newline else : infile = newline if isinstance ( infile , string_types ) : return infile . splitlines ( True ) else : return infile return self . _decode ( infile , encoding ) if isinstance ( infile , string_types ) : return infile . splitlines ( True ) return infile
Handle any BOM and decode if necessary .
4,942
def _decode ( self , infile , encoding ) : if isinstance ( infile , string_types ) : return infile . decode ( encoding ) . splitlines ( True ) for i , line in enumerate ( infile ) : if PY3K : if not isinstance ( line , str ) : infile [ i ] = line . decode ( encoding ) else : if not isinstance ( line , unicode ) : infile [ i ] = line . decode ( encoding ) return infile
Decode infile to unicode . Using the specified encoding .
4,943
def _decode_element ( self , line ) : if not self . encoding : return line if isinstance ( line , str ) and self . default_encoding : return line . decode ( self . default_encoding ) return line
Decode element to unicode if necessary .
4,944
def _match_depth ( self , sect , depth ) : while depth < sect . depth : if sect is sect . parent : raise SyntaxError ( ) sect = sect . parent if sect . depth == depth : return sect raise SyntaxError ( )
Given a section and a depth level walk back through the sections parents to see if the depth level matches a previous section .
4,945
def _handle_error ( self , text , ErrorClass , infile , cur_index ) : line = infile [ cur_index ] cur_index += 1 message = text % cur_index error = ErrorClass ( message , cur_index , line ) if self . raise_errors : raise error self . _errors . append ( error )
Handle an error according to the error settings .
4,946
def _unquote ( self , value ) : if not value : raise SyntaxError if ( value [ 0 ] == value [ - 1 ] ) and ( value [ 0 ] in ( '"' , "'" ) ) : value = value [ 1 : - 1 ] return value
Return an unquoted version of a value
4,947
def _quote ( self , value , multiline = True ) : if multiline and self . write_empty_values and value == '' : return '' if multiline and isinstance ( value , ( list , tuple ) ) : if not value : return ',' elif len ( value ) == 1 : return self . _quote ( value [ 0 ] , multiline = False ) + ',' return ', ' . join ( [ self . _quote ( val , multiline = False ) for val in value ] ) if not isinstance ( value , string_types ) : if self . stringify : value = str ( value ) else : raise TypeError ( 'Value "%s" is not a string.' % value ) if not value : return '""' no_lists_no_quotes = not self . list_values and '\n' not in value and '#' not in value need_triple = multiline and ( ( ( "'" in value ) and ( '"' in value ) ) or ( '\n' in value ) ) hash_triple_quote = multiline and not need_triple and ( "'" in value ) and ( '"' in value ) and ( '#' in value ) check_for_single = ( no_lists_no_quotes or not need_triple ) and not hash_triple_quote if check_for_single : if not self . list_values : quot = noquot elif '\n' in value : raise ConfigObjError ( 'Value "%s" cannot be safely quoted.' % value ) elif ( ( value [ 0 ] not in wspace_plus ) and ( value [ - 1 ] not in wspace_plus ) and ( ',' not in value ) ) : quot = noquot else : quot = self . _get_single_quote ( value ) else : quot = self . _get_triple_quote ( value ) if quot == noquot and '#' in value and self . list_values : quot = self . _get_single_quote ( value ) return quot % value
Return a safely quoted version of a value .
4,948
def _multiline ( self , value , infile , cur_index , maxline ) : quot = value [ : 3 ] newvalue = value [ 3 : ] single_line = self . _triple_quote [ quot ] [ 0 ] multi_line = self . _triple_quote [ quot ] [ 1 ] mat = single_line . match ( value ) if mat is not None : retval = list ( mat . groups ( ) ) retval . append ( cur_index ) return retval elif newvalue . find ( quot ) != - 1 : raise SyntaxError ( ) while cur_index < maxline : cur_index += 1 newvalue += '\n' line = infile [ cur_index ] if line . find ( quot ) == - 1 : newvalue += line else : break else : raise SyntaxError ( ) mat = multi_line . match ( line ) if mat is None : raise SyntaxError ( ) ( value , comment ) = mat . groups ( ) return ( newvalue + value , comment , cur_index )
Extract the value where we are in a multiline situation .
4,949
def _handle_configspec ( self , configspec ) : if not isinstance ( configspec , ConfigObj ) : try : configspec = ConfigObj ( configspec , raise_errors = True , file_error = True , _inspec = True ) except ConfigObjError as e : raise ConfigspecError ( 'Parsing configspec failed: %s' % e ) except IOError as e : raise IOError ( 'Reading configspec failed: %s' % e ) self . configspec = configspec
Parse the configspec .
4,950
def _write_line ( self , indent_string , entry , this_entry , comment ) : if not self . unrepr : val = self . _decode_element ( self . _quote ( this_entry ) ) else : val = repr ( this_entry ) return '%s%s%s%s%s' % ( indent_string , self . _decode_element ( self . _quote ( entry , multiline = False ) ) , self . _a_to_u ( ' = ' ) , val , self . _decode_element ( comment ) )
Write an individual line for the write method
4,951
def _write_marker ( self , indent_string , depth , entry , comment ) : return '%s%s%s%s%s' % ( indent_string , self . _a_to_u ( '[' * depth ) , self . _quote ( self . _decode_element ( entry ) , multiline = False ) , self . _a_to_u ( ']' * depth ) , self . _decode_element ( comment ) )
Write a section marker line
4,952
def _handle_comment ( self , comment ) : if not comment : return '' start = self . indent_type if not comment . startswith ( '#' ) : start += self . _a_to_u ( ' # ' ) return ( start + comment )
Deal with a comment .
4,953
def reset ( self ) : self . clear ( ) self . _initialise ( ) self . configspec = None self . _original_configspec = None
Clear ConfigObj instance and restore to freshly created state .
4,954
def reload ( self ) : if not isinstance ( self . filename , string_types ) : raise ReloadError ( ) filename = self . filename current_options = { } for entry in OPTION_DEFAULTS : if entry == 'configspec' : continue current_options [ entry ] = getattr ( self , entry ) configspec = self . _original_configspec current_options [ 'configspec' ] = configspec self . clear ( ) self . _initialise ( current_options ) self . _load ( filename , configspec )
Reload a ConfigObj from file .
4,955
def check ( self , check , member , missing = False ) : if missing : raise self . baseErrorClass ( ) return member
A dummy check method always returns the value unchanged .
4,956
def _verify ( waiveredHdul ) : if len ( waiveredHdul ) == 2 : if waiveredHdul [ 0 ] . header [ 'NAXIS' ] > 0 : if isinstance ( waiveredHdul [ 1 ] , fits . TableHDU ) : if waiveredHdul [ 0 ] . data . shape [ 0 ] == waiveredHdul [ 1 ] . data . shape [ 0 ] or waiveredHdul [ 1 ] . data . shape [ 0 ] == 1 : return raise ValueError ( "Input object does not represent a valid waivered" + " FITS file" )
Verify that the input HDUList is for a waivered FITS file .
4,957
def convertwaiveredfits ( waiveredObject , outputFileName = None , forceFileOutput = False , convertTo = 'multiExtension' , verbose = False ) : if convertTo == 'multiExtension' : func = toMultiExtensionFits else : raise ValueError ( 'Conversion type ' + convertTo + ' unknown' ) return func ( * ( waiveredObject , outputFileName , forceFileOutput , verbose ) )
Convert the input waivered FITS object to various formats . The default conversion format is multi - extension FITS . Generate an output file in the desired format if requested .
4,958
def to_jd ( year , month , day ) : if year >= 0 : y = 474 else : y = 473 epbase = year - y epyear = 474 + ( epbase % 2820 ) if month <= 7 : m = ( month - 1 ) * 31 else : m = ( month - 1 ) * 30 + 6 return day + m + trunc ( ( ( epyear * 682 ) - 110 ) / 2816 ) + ( epyear - 1 ) * 365 + trunc ( epbase / 2820 ) * 1029983 + ( EPOCH - 1 )
Determine Julian day from Persian date
4,959
def from_jd ( jd ) : jd = trunc ( jd ) + 0.5 depoch = jd - to_jd ( 475 , 1 , 1 ) cycle = trunc ( depoch / 1029983 ) cyear = ( depoch % 1029983 ) if cyear == 1029982 : ycycle = 2820 else : aux1 = trunc ( cyear / 366 ) aux2 = cyear % 366 ycycle = trunc ( ( ( 2134 * aux1 ) + ( 2816 * aux2 ) + 2815 ) / 1028522 ) + aux1 + 1 year = ycycle + ( 2820 * cycle ) + 474 if ( year <= 0 ) : year -= 1 yday = ( jd - to_jd ( year , 1 , 1 ) ) + 1 if yday <= 186 : month = ceil ( yday / 31 ) else : month = ceil ( ( yday - 6 ) / 30 ) day = int ( jd - to_jd ( year , month , 1 ) ) + 1 return ( year , month , day )
Calculate Persian date from Julian day
4,960
def teardown_global_logging ( ) : global global_logging_started if not global_logging_started : return stdout_logger = logging . getLogger ( __name__ + '.stdout' ) stderr_logger = logging . getLogger ( __name__ + '.stderr' ) if sys . stdout is stdout_logger : sys . stdout = sys . stdout . stream if sys . stderr is stderr_logger : sys . stderr = sys . stderr . stream exc_type , exc_value , exc_traceback = sys . exc_info ( ) if exc_type is not None : sys . excepthook ( exc_type , exc_value , exc_traceback ) del exc_type del exc_value del exc_traceback if not PY3K : sys . exc_clear ( ) del sys . excepthook logging . captureWarnings ( False ) rawinput = 'input' if PY3K else 'raw_input' if hasattr ( builtins , '_original_raw_input' ) : setattr ( builtins , rawinput , builtins . _original_raw_input ) del builtins . _original_raw_input global_logging_started = False
Disable global logging of stdio warnings and exceptions .
4,961
def create_logger ( name , format = '%(levelname)s: %(message)s' , datefmt = None , stream = None , level = logging . INFO , filename = None , filemode = 'w' , filelevel = None , propagate = True ) : logger = logging . getLogger ( name ) logger . setLevel ( level ) fmt = logging . Formatter ( format , datefmt ) logger . propagate = propagate for hdlr in logger . handlers : logger . removeHandler ( hdlr ) if not ( filename or stream ) : logger . addHandler ( logging . NullHandler ( ) ) if filename : hdlr = logging . FileHandler ( filename , filemode ) if filelevel is None : filelevel = level hdlr . setLevel ( filelevel ) hdlr . setFormatter ( fmt ) logger . addHandler ( hdlr ) if stream : hdlr = logging . StreamHandler ( stream ) hdlr . setLevel ( level ) hdlr . setFormatter ( fmt ) logger . addHandler ( hdlr ) return logger
Do basic configuration for the logging system . Similar to logging . basicConfig but the logger name is configurable and both a file output and a stream output can be created . Returns a logger object .
4,962
def _post_login_page ( self , login_url ) : data = { "login" : self . username , "_58_password" : self . password } try : raw_res = yield from self . _session . post ( login_url , data = data , timeout = self . _timeout , allow_redirects = False ) except OSError : raise PyHydroQuebecError ( "Can not submit login form" ) if raw_res . status != 302 : raise PyHydroQuebecError ( "Login error: Bad HTTP status code. " "Please check your username/password." ) return True
Login to HydroQuebec website .
4,963
def _get_p_p_id_and_contract ( self ) : contracts = { } try : raw_res = yield from self . _session . get ( PROFILE_URL , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get profile page" ) content = yield from raw_res . text ( ) soup = BeautifulSoup ( content , 'html.parser' ) for node in soup . find_all ( 'span' , { "class" : "contrat" } ) : rematch = re . match ( "C[a-z]* ([0-9]{4} [0-9]{5})" , node . text ) if rematch is not None : contracts [ rematch . group ( 1 ) . replace ( " " , "" ) ] = None for node in soup . find_all ( 'a' , { "class" : "big iconLink" } ) : for contract in contracts : if contract in node . attrs . get ( 'href' ) : contracts [ contract ] = node . attrs . get ( 'href' ) p_p_id = None for node in soup . find_all ( 'span' ) : node_id = node . attrs . get ( 'id' , "" ) if node_id . startswith ( "p_portraitConsommation_WAR" ) : p_p_id = node_id [ 2 : ] break if p_p_id is None : raise PyHydroQuebecError ( "Could not get p_p_id" ) return p_p_id , contracts
Get id of consumption profile .
4,964
def _get_lonely_contract ( self ) : contracts = { } try : raw_res = yield from self . _session . get ( MAIN_URL , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get main page" ) content = yield from raw_res . text ( ) soup = BeautifulSoup ( content , 'html.parser' ) info_node = soup . find ( "div" , { "class" : "span3 contrat" } ) if info_node is None : raise PyHydroQuebecError ( "Can not found contract" ) research = re . search ( "Contrat ([0-9]{4} [0-9]{5})" , info_node . text ) if research is not None : contracts [ research . group ( 1 ) . replace ( " " , "" ) ] = None if contracts == { } : raise PyHydroQuebecError ( "Can not found contract" ) return contracts
Get contract number when we have only one contract .
4,965
def _get_balances ( self ) : balances = [ ] try : raw_res = yield from self . _session . get ( MAIN_URL , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get main page" ) content = yield from raw_res . text ( ) soup = BeautifulSoup ( content , 'html.parser' ) solde_nodes = soup . find_all ( "div" , { "class" : "solde-compte" } ) if solde_nodes == [ ] : raise PyHydroQuebecError ( "Can not found balance" ) for solde_node in solde_nodes : try : balance = solde_node . find ( "p" ) . text except AttributeError : raise PyHydroQuebecError ( "Can not found balance" ) balances . append ( float ( balance [ : - 2 ] . replace ( "," , "." ) . replace ( "\xa0" , "" ) ) ) return balances
Get all balances .
4,966
def _load_contract_page ( self , contract_url ) : try : yield from self . _session . get ( contract_url , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get profile page for a " "specific contract" )
Load the profile page of a specific contract when we have multiple contracts .
4,967
def _get_annual_data ( self , p_p_id ) : params = { "p_p_id" : p_p_id , "p_p_lifecycle" : 2 , "p_p_state" : "normal" , "p_p_mode" : "view" , "p_p_resource_id" : "resourceObtenirDonneesConsommationAnnuelles" } try : raw_res = yield from self . _session . get ( PROFILE_URL , params = params , timeout = self . _timeout ) except OSError : raise PyHydroQuebecAnnualError ( "Can not get annual data" ) try : json_output = yield from raw_res . json ( content_type = 'text/json' ) except ( OSError , json . decoder . JSONDecodeError ) : raise PyHydroQuebecAnnualError ( "Could not get annual data" ) if not json_output . get ( 'success' ) : raise PyHydroQuebecAnnualError ( "Could not get annual data" ) if not json_output . get ( 'results' ) : raise PyHydroQuebecAnnualError ( "Could not get annual data" ) if 'courant' not in json_output . get ( 'results' ) [ 0 ] : raise PyHydroQuebecAnnualError ( "Could not get annual data" ) return json_output . get ( 'results' ) [ 0 ] [ 'courant' ]
Get annual data .
4,968
def _get_monthly_data ( self , p_p_id ) : params = { "p_p_id" : p_p_id , "p_p_lifecycle" : 2 , "p_p_resource_id" : ( "resourceObtenirDonnees" "PeriodesConsommation" ) } try : raw_res = yield from self . _session . get ( PROFILE_URL , params = params , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get monthly data" ) try : json_output = yield from raw_res . json ( content_type = 'text/json' ) except ( OSError , json . decoder . JSONDecodeError ) : raise PyHydroQuebecError ( "Could not get monthly data" ) if not json_output . get ( 'success' ) : raise PyHydroQuebecError ( "Could not get monthly data" ) return json_output . get ( 'results' )
Get monthly data .
4,969
def _get_hourly_data ( self , day_date , p_p_id ) : params = { "p_p_id" : p_p_id , "p_p_lifecycle" : 2 , "p_p_state" : "normal" , "p_p_mode" : "view" , "p_p_resource_id" : "resourceObtenirDonneesConsommationHoraires" , "p_p_cacheability" : "cacheLevelPage" , "p_p_col_id" : "column-2" , "p_p_col_count" : 1 , "date" : day_date , } try : raw_res = yield from self . _session . get ( PROFILE_URL , params = params , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get hourly data" ) try : json_output = yield from raw_res . json ( content_type = 'text/json' ) except ( OSError , json . decoder . JSONDecodeError ) : raise PyHydroQuebecAnnualError ( "Could not get hourly data" ) hourly_consumption_data = json_output [ 'results' ] [ 'listeDonneesConsoEnergieHoraire' ] hourly_power_data = json_output [ 'results' ] [ 'listeDonneesConsoPuissanceHoraire' ] params = { "p_p_id" : p_p_id , "p_p_lifecycle" : 2 , "p_p_state" : "normal" , "p_p_mode" : "view" , "p_p_resource_id" : "resourceObtenirDonneesMeteoHoraires" , "p_p_cacheability" : "cacheLevelPage" , "p_p_col_id" : "column-2" , "p_p_col_count" : 1 , "dateDebut" : day_date , "dateFin" : day_date , } try : raw_res = yield from self . _session . get ( PROFILE_URL , params = params , timeout = self . _timeout ) except OSError : raise PyHydroQuebecError ( "Can not get hourly data" ) try : json_output = yield from raw_res . json ( content_type = 'text/json' ) except ( OSError , json . decoder . JSONDecodeError ) : raise PyHydroQuebecAnnualError ( "Could not get hourly data" ) hourly_weather_data = [ ] if not json_output . get ( 'results' ) : hourly_weather_data = [ None ] * 24 else : hourly_weather_data = json_output [ 'results' ] [ 0 ] [ 'listeTemperaturesHeure' ] processed_hourly_data = [ { 'hour' : data [ 'heure' ] , 'lower' : data [ 'consoReg' ] , 'high' : data [ 'consoHaut' ] , 'total' : data [ 'consoTotal' ] , 'temp' : hourly_weather_data [ i ] } for i , data in enumerate ( hourly_consumption_data ) ] raw_hourly_data = { 'Energy' : hourly_consumption_data , 'Power' : hourly_power_data , 'Weather' : hourly_weather_data } hourly_data = { 'processed_hourly_data' : processed_hourly_data , 'raw_hourly_data' : raw_hourly_data } return hourly_data
Get Hourly Data .
4,970
def fetch_data_detailled_energy_use ( self , start_date = None , end_date = None ) : if start_date is None : start_date = datetime . datetime . now ( HQ_TIMEZONE ) - datetime . timedelta ( days = 1 ) if end_date is None : end_date = datetime . datetime . now ( HQ_TIMEZONE ) yield from self . _get_httpsession ( ) login_url = yield from self . _get_login_page ( ) yield from self . _post_login_page ( login_url ) p_p_id , contracts = yield from self . _get_p_p_id_and_contract ( ) if contracts == { } : contracts = yield from self . _get_lonely_contract ( ) for contract , contract_url in contracts . items ( ) : if contract_url : yield from self . _load_contract_page ( contract_url ) data = { } dates = [ ( start_date + datetime . timedelta ( n ) ) for n in range ( int ( ( end_date - start_date ) . days ) ) ] for date in dates : day_date = date . strftime ( "%Y-%m-%d" ) hourly_data = yield from self . _get_hourly_data ( day_date , p_p_id ) data [ day_date ] = hourly_data [ 'raw_hourly_data' ] self . _data [ contract ] = data
Get detailled energy use from a specific contract .
4,971
def fetch_data ( self ) : yield from self . _get_httpsession ( ) login_url = yield from self . _get_login_page ( ) yield from self . _post_login_page ( login_url ) p_p_id , contracts = yield from self . _get_p_p_id_and_contract ( ) if contracts == { } : contracts = yield from self . _get_lonely_contract ( ) balances = yield from self . _get_balances ( ) balances_len = len ( balances ) balance_id = 0 for contract , contract_url in contracts . items ( ) : if contract_url : yield from self . _load_contract_page ( contract_url ) try : yesterday = datetime . datetime . now ( HQ_TIMEZONE ) - datetime . timedelta ( days = 1 ) day_date = yesterday . strftime ( "%Y-%m-%d" ) hourly_data = yield from self . _get_hourly_data ( day_date , p_p_id ) hourly_data = hourly_data [ 'processed_hourly_data' ] except Exception : hourly_data = { } try : annual_data = yield from self . _get_annual_data ( p_p_id ) except PyHydroQuebecAnnualError : annual_data = { } monthly_data = yield from self . _get_monthly_data ( p_p_id ) monthly_data = monthly_data [ 0 ] start_date = monthly_data . get ( 'dateDebutPeriode' ) end_date = monthly_data . get ( 'dateFinPeriode' ) try : daily_data = yield from self . _get_daily_data ( p_p_id , start_date , end_date ) except Exception : daily_data = [ ] if daily_data : daily_data = daily_data [ 0 ] [ 'courant' ] contract_data = { "balance" : balances [ balance_id ] } for key1 , key2 in MONTHLY_MAP : contract_data [ key1 ] = monthly_data [ key2 ] for key1 , key2 in ANNUAL_MAP : contract_data [ key1 ] = annual_data . get ( key2 , "" ) if daily_data : for key1 , key2 in DAILY_MAP : contract_data [ key1 ] = daily_data [ key2 ] if hourly_data : contract_data [ 'yesterday_hourly_consumption' ] = hourly_data self . _data [ contract ] = contract_data balance_count = balance_id + 1 if balance_count < balances_len : balance_id += 1
Get the latest data from HydroQuebec .
4,972
def get_data ( self , contract = None ) : if contract is None : return self . _data if contract in self . _data . keys ( ) : return { contract : self . _data [ contract ] } raise PyHydroQuebecError ( "Contract {} not found" . format ( contract ) )
Return collected data .
4,973
def _validate_argument ( self , arg ) : if arg is None : return arg if isinstance ( arg , type ) : return InstanceOf ( arg ) if not isinstance ( arg , BaseMatcher ) : raise TypeError ( "argument of %s can be a type or a matcher (got %r)" % ( self . __class__ . __name__ , type ( arg ) ) ) return arg
Validate a type or matcher argument to the constructor .
4,974
def _initialize ( self , * args , ** kwargs ) : self . items = None self . keys = None self . values = None if args : if len ( args ) != 2 : raise TypeError ( "expected exactly two positional arguments, " "got %s" % len ( args ) ) if kwargs : raise TypeError ( "expected positional or keyword arguments, not both" ) self . keys , self . values = map ( self . _validate_argument , args ) elif kwargs : has_kv = 'keys' in kwargs and 'values' in kwargs has_of = 'of' in kwargs if not ( has_kv or has_of ) : raise TypeError ( "expected keys/values or items matchers, " "but got: %s" % list ( kwargs . keys ( ) ) ) if has_kv and has_of : raise TypeError ( "expected keys & values, or items matchers, not both" ) if has_kv : self . keys = self . _validate_argument ( kwargs [ 'keys' ] ) self . values = self . _validate_argument ( kwargs [ 'values' ] ) else : of = kwargs [ 'of' ] if isinstance ( of , tuple ) : try : self . keys , self . values = map ( self . _validate_argument , of ) except ValueError : raise TypeError ( "of= tuple has to be a pair of matchers/types" % ( self . __class__ . __name__ , ) ) else : self . items = self . _validate_argument ( of )
Initiaize the mapping matcher with constructor arguments .
4,975
def docs ( ctx , output = 'html' , rebuild = False , show = True , verbose = True ) : sphinx_build = ctx . run ( 'sphinx-build -b {output} {all} {verbose} docs docs/_build' . format ( output = output , all = '-a -E' if rebuild else '' , verbose = '-v' if verbose else '' ) ) if not sphinx_build . ok : fatal ( "Failed to build the docs" , cause = sphinx_build ) if show : path = os . path . join ( DOCS_OUTPUT_DIR , 'index.html' ) if sys . platform == 'darwin' : path = 'file://%s' % os . path . abspath ( path ) webbrowser . open_new_tab ( path )
Build the docs and show them in default web browser .
4,976
def upload ( ctx , yes = False ) : import callee version = callee . __version__ if version . endswith ( '-dev' ) : fatal ( "Can't upload a development version (%s) to PyPI!" , version ) if not yes : answer = input ( "Do you really want to upload to PyPI [y/N]? " ) yes = answer . strip ( ) . lower ( ) == 'y' if not yes : logging . warning ( "Aborted -- not uploading to PyPI." ) return - 2 logging . debug ( "Uploading version %s to PyPI..." , version ) setup_py_upload = ctx . run ( 'python setup.py sdist upload' ) if not setup_py_upload . ok : fatal ( "Failed to upload version %s to PyPI!" , version , cause = setup_py_upload ) logging . info ( "PyPI upload completed successfully." ) git_tag = ctx . run ( 'git tag %s' % version ) if not git_tag . ok : fatal ( "Failed to add a Git tag for uploaded version %s" , version , cause = git_tag ) git_push = ctx . run ( 'git push && git push --tags' ) if not git_push . ok : fatal ( "Failed to push the release upstream." , cause = git_push )
Upload the package to PyPI .
4,977
def fatal ( * args , ** kwargs ) : exitcode = None if 'exitcode' in kwargs : exitcode = kwargs . pop ( 'exitcode' ) if 'cause' in kwargs : cause = kwargs . pop ( 'cause' ) if not isinstance ( cause , Result ) : raise TypeError ( "invalid cause of fatal error: expected %r, got %r" % ( Result , type ( cause ) ) ) exitcode = exitcode or cause . return_code logging . error ( * args , ** kwargs ) raise Exit ( exitcode or - 1 )
Log an error message and exit .
4,978
def _add_request_parameters ( func ) : async def decorated_func ( * args , handle_ratelimit = None , max_tries = None , request_timeout = None , ** kwargs ) : return await func ( * args , handle_ratelimit = handle_ratelimit , max_tries = max_tries , request_timeout = request_timeout , ** kwargs ) return decorated_func
Adds the ratelimit and request timeout parameters to a function .
4,979
async def _base_request ( self , battle_tag : str , endpoint_name : str , session : aiohttp . ClientSession , * , platform = None , handle_ratelimit = None , max_tries = None , request_timeout = None ) : if platform is None : platform = self . default_platform if handle_ratelimit is None : handle_ratelimit = self . default_handle_ratelimit if max_tries is None : max_tries = self . default_max_tries if request_timeout is None : request_timeout = self . default_request_timeout san_battle_tag = self . sanitize_battletag ( battle_tag ) for _ in range ( max_tries ) : try : resp_json , status = await self . _async_get ( session , self . server_url + self . _api_urlpath + "{battle_tag}/{endpoint}" . format ( battle_tag = san_battle_tag , endpoint = endpoint_name ) , params = { "platform" : platform } , headers = { "User-Agent" : "overwatch_python_api" } , _async_timeout_seconds = request_timeout ) if status == 429 and resp_json [ "msg" ] == "you are being ratelimited" : raise RatelimitError except RatelimitError as e : if handle_ratelimit : await asyncio . sleep ( resp_json [ "retry" ] + 1 ) continue else : raise else : break else : raise RatelimitError ( "Got ratelimited for each requests until the maximum number of retries were reached." ) if status != 200 : if status == 404 and resp_json [ "msg" ] == "profile not found" : raise ProfileNotFoundError ( "Got HTTP 404, profile not found. This is caused by the given battletag not existing on the specified platform." ) if status == 429 and resp_json [ "msg" ] == "you are being ratelimited" : raise RatelimitError ( "Got HTTP 429, you are being ratelimited. This is caused by calls to the api too frequently." ) raise ConnectionError ( "Did not get HTTP status 200, got: {0}" . format ( status ) ) return resp_json
Does a request to some endpoint . This is also where ratelimit logic is handled .
4,980
def is_method ( arg , min_arity = None , max_arity = None ) : if not callable ( arg ) : return False if not any ( is_ ( arg ) for is_ in ( inspect . ismethod , inspect . ismethoddescriptor , inspect . isbuiltin ) ) : return False try : argnames , varargs , kwargs , defaults = getargspec ( arg ) except TypeError : return True else : if argnames and argnames [ 0 ] == 'self' : argnames = argnames [ 1 : ] if min_arity is not None : actual_min_arity = len ( argnames ) - len ( defaults or ( ) ) assert actual_min_arity >= 0 , ( "Minimum arity of %r found to be negative (got %s)!" % ( arg , actual_min_arity ) ) if int ( min_arity ) != actual_min_arity : return False if max_arity is not None : actual_max_arity = sys . maxsize if varargs or kwargs else len ( argnames ) if int ( max_arity ) != actual_max_arity : return False return True
Check if argument is a method .
4,981
def _is_readable ( self , obj ) : try : read = getattr ( obj , 'read' ) except AttributeError : return False else : return is_method ( read , max_arity = 1 )
Check if the argument is a readable file - like object .
4,982
def _is_writable ( self , obj ) : try : write = getattr ( obj , 'write' ) except AttributeError : return False else : return is_method ( write , min_arity = 1 , max_arity = 1 )
Check if the argument is a writable file - like object .
4,983
def run ( time : datetime , altkm : float , glat : Union [ float , np . ndarray ] , glon : Union [ float , np . ndarray ] , * , f107a : float = None , f107 : float = None , Ap : int = None ) -> xarray . Dataset : glat = np . atleast_2d ( glat ) glon = np . atleast_2d ( glon ) if glat . size == 1 and glon . size == 1 and isinstance ( time , ( str , date , datetime , np . datetime64 ) ) : atmos = rungtd1d ( time , altkm , glat . squeeze ( ) [ ( ) ] , glon . squeeze ( ) [ ( ) ] , f107a = f107a , f107 = f107 , Ap = Ap ) else : atmos = loopalt_gtd ( time , glat , glon , altkm , f107a = f107a , f107 = f107 , Ap = Ap ) return atmos
loops the rungtd1d function below . Figure it s easier to troubleshoot in Python than Fortran .
4,984
def loopalt_gtd ( time : datetime , glat : Union [ float , np . ndarray ] , glon : Union [ float , np . ndarray ] , altkm : Union [ float , List [ float ] , np . ndarray ] , * , f107a : float = None , f107 : float = None , Ap : int = None ) -> xarray . Dataset : glat = np . atleast_2d ( glat ) glon = np . atleast_2d ( glon ) assert glat . ndim == glon . ndim == 2 times = np . atleast_1d ( time ) assert times . ndim == 1 atmos = xarray . Dataset ( ) for k , t in enumerate ( times ) : print ( 'computing' , t ) for i in range ( glat . shape [ 0 ] ) : for j in range ( glat . shape [ 1 ] ) : atm = rungtd1d ( t , altkm , glat [ i , j ] , glon [ i , j ] , f107a = f107a , f107 = f107 , Ap = Ap ) atmos = xarray . merge ( ( atmos , atm ) ) atmos . attrs = atm . attrs return atmos
loop over location and time
4,985
def _validate_desc ( self , desc ) : if desc is None : return desc if not isinstance ( desc , STRING_TYPES ) : raise TypeError ( "predicate description for Matching must be a string, " "got %r" % ( type ( desc ) , ) ) if not IS_PY3 and isinstance ( desc , unicode ) : try : desc = desc . encode ( 'ascii' , errors = 'strict' ) except UnicodeEncodeError : raise TypeError ( "predicate description must be " "an ASCII string in Python 2" ) return desc
Validate the predicate description .
4,986
def clean_email ( self ) : contacts = self . api . lists . contacts ( id = self . list_id ) [ 'result' ] for contact in contacts : if contact [ 'email' ] == self . cleaned_data [ 'email' ] : raise forms . ValidationError ( _ ( u'This email is already subscribed' ) ) return self . cleaned_data [ 'email' ]
Raise ValidationError if the contact exists .
4,987
def add_contact ( self ) : self . api . lists . addcontact ( contact = self . cleaned_data [ 'email' ] , id = self . list_id , method = 'POST' )
Create a contact with using the email on the list .
4,988
def list_id ( self ) : list_id = getattr ( self , '_list_id' , None ) if list_id is None : for l in self . api . lists . all ( ) [ 'lists' ] : if l [ 'name' ] == self . list_name : self . _list_id = l [ 'id' ] if not getattr ( self , '_list_id' , None ) : self . _list_id = self . api . lists . create ( label = self . list_label , name = self . list_name , method = 'POST' ) [ 'list_id' ] return self . _list_id
Get or create the list id .
4,989
def read_tags ( filename ) : with open ( filename ) as f : ast_tree = ast . parse ( f . read ( ) , filename ) res = { } for node in ast . walk ( ast_tree ) : if type ( node ) is not ast . Assign : continue target = node . targets [ 0 ] if type ( target ) is not ast . Name : continue if not ( target . id . startswith ( '__' ) and target . id . endswith ( '__' ) ) : continue name = target . id [ 2 : - 2 ] res [ name ] = ast . literal_eval ( node . value ) return res
Reads values of magic tags defined in the given Python file .
4,990
def word_tokenize ( text , stopwords = _stopwords , ngrams = None , min_length = 0 , ignore_numeric = True ) : if ngrams is None : ngrams = 1 text = re . sub ( re . compile ( '\'s' ) , '' , text ) text = re . sub ( _re_punctuation , '' , text ) matched_tokens = re . findall ( _re_token , text . lower ( ) ) for tokens in get_ngrams ( matched_tokens , ngrams ) : for i in range ( len ( tokens ) ) : tokens [ i ] = tokens [ i ] . strip ( punctuation ) if len ( tokens [ i ] ) < min_length or tokens [ i ] in stopwords : break if ignore_numeric and isnumeric ( tokens [ i ] ) : break else : yield tuple ( tokens )
Parses the given text and yields tokens which represent words within the given text . Tokens are assumed to be divided by any form of whitespace character .
4,991
def cmake_setup ( ) : cmake_exe = shutil . which ( 'cmake' ) if not cmake_exe : raise FileNotFoundError ( 'CMake not available' ) wopts = [ '-G' , 'MinGW Makefiles' , '-DCMAKE_SH="CMAKE_SH-NOTFOUND' ] if os . name == 'nt' else [ ] subprocess . check_call ( [ cmake_exe ] + wopts + [ str ( SRCDIR ) ] , cwd = BINDIR ) ret = subprocess . run ( [ cmake_exe , '--build' , str ( BINDIR ) ] , stderr = subprocess . PIPE , universal_newlines = True ) result ( ret )
attempt to build using CMake > = 3
4,992
def meson_setup ( ) : meson_exe = shutil . which ( 'meson' ) ninja_exe = shutil . which ( 'ninja' ) if not meson_exe or not ninja_exe : raise FileNotFoundError ( 'Meson or Ninja not available' ) if not ( BINDIR / 'build.ninja' ) . is_file ( ) : subprocess . check_call ( [ meson_exe , str ( SRCDIR ) ] , cwd = BINDIR ) ret = subprocess . run ( ninja_exe , cwd = BINDIR , stderr = subprocess . PIPE , universal_newlines = True ) result ( ret )
attempt to build with Meson + Ninja
4,993
def add_term_occurrence ( self , term , document ) : if document not in self . _documents : self . _documents [ document ] = 0 if term not in self . _terms : if self . _freeze : return else : self . _terms [ term ] = collections . Counter ( ) if document not in self . _terms [ term ] : self . _terms [ term ] [ document ] = 0 self . _documents [ document ] += 1 self . _terms [ term ] [ document ] += 1
Adds an occurrence of the term in the specified document .
4,994
def get_total_term_frequency ( self , term ) : if term not in self . _terms : raise IndexError ( TERM_DOES_NOT_EXIST ) return sum ( self . _terms [ term ] . values ( ) )
Gets the frequency of the specified term in the entire corpus added to the HashedIndex .
4,995
def get_term_frequency ( self , term , document , normalized = False ) : if document not in self . _documents : raise IndexError ( DOCUMENT_DOES_NOT_EXIST ) if term not in self . _terms : raise IndexError ( TERM_DOES_NOT_EXIST ) result = self . _terms [ term ] . get ( document , 0 ) if normalized : result /= self . get_document_length ( document ) return float ( result )
Returns the frequency of the term specified in the document .
4,996
def get_document_frequency ( self , term ) : if term not in self . _terms : raise IndexError ( TERM_DOES_NOT_EXIST ) else : return len ( self . _terms [ term ] )
Returns the number of documents the specified term appears in .
4,997
def get_document_length ( self , document ) : if document in self . _documents : return self . _documents [ document ] else : raise IndexError ( DOCUMENT_DOES_NOT_EXIST )
Returns the number of terms found within the specified document .
4,998
def get_documents ( self , term ) : if term not in self . _terms : raise IndexError ( TERM_DOES_NOT_EXIST ) else : return self . _terms [ term ]
Returns all documents related to the specified term in the form of a Counter object .
4,999
def get_tfidf ( self , term , document , normalized = False ) : tf = self . get_term_frequency ( term , document ) if tf != 0.0 : df = 1 + self . get_document_frequency ( term ) n = 2 + len ( self . _documents ) if normalized : tf /= self . get_document_length ( document ) return tf * math . log10 ( n / df ) else : return 0.0
Returns the Term - Frequency Inverse - Document - Frequency value for the given term in the specified document . If normalized is True term frequency will be divided by the document length .