idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
15,000
def configuration ( parent_package = '' , top_path = None ) : config = Configuration ( '' , parent_package , top_path ) F95FLAGS = get_compiler_flags ( ) kwargs = { 'libraries' : [ ] , 'include_dirs' : [ ] , 'library_dirs' : [ ] , } kwargs [ 'extra_compile_args' ] = F95FLAGS kwargs [ 'f2py_options' ] = [ '--quiet' ] compiler = FCompiler ( get_default_fcompiler ( ) ) compiler . src_extensions . append ( '.F95' ) compiler . language_map [ '.F95' ] = 'f90' files = os . listdir ( 'src' ) exclude_sources = [ 'PlanetsConstants.f95' , 'PythonWrapper.f95' ] sources = [ os . path . join ( 'src' , file ) for file in files if file . lower ( ) . endswith ( ( '.f95' , '.c' ) ) and file not in exclude_sources ] build_lib_dir = "{dirname}.{platform}-{version[0]}.{version[1]}" dirparams = { 'dirname' : 'temp' , 'platform' : sysconfig . get_platform ( ) , 'version' : sys . version_info } libdir = os . path . join ( 'build' , build_lib_dir . format ( ** dirparams ) ) print ( 'searching SHTOOLS in:' , libdir ) config . add_library ( 'SHTOOLS' , sources = sources , ** kwargs ) kwargs [ 'libraries' ] . extend ( [ 'SHTOOLS' ] ) kwargs [ 'include_dirs' ] . extend ( [ libdir ] ) kwargs [ 'library_dirs' ] . extend ( [ libdir ] ) fftw_info = get_info ( 'fftw' , notfound_action = 2 ) dict_append ( kwargs , ** fftw_info ) if sys . platform != 'win32' : kwargs [ 'libraries' ] . extend ( [ 'm' ] ) lapack_info = get_info ( 'lapack_opt' , notfound_action = 2 ) blas_info = get_info ( 'blas_opt' , notfound_action = 2 ) dict_append ( kwargs , ** blas_info ) dict_append ( kwargs , ** lapack_info ) config . add_extension ( 'pyshtools._SHTOOLS' , sources = [ 'src/pyshtools.pyf' , 'src/PythonWrapper.f95' ] , ** kwargs ) return config
Configure all packages that need to be built .
15,001
def _time_variable_part ( epoch , ref_epoch , trnd , periodic ) : delta_t = epoch - ref_epoch trend = trnd * delta_t periodic_sum = _np . zeros_like ( trnd ) for period in periodic : for trifunc in periodic [ period ] : coeffs = periodic [ period ] [ trifunc ] if trifunc == 'acos' : periodic_sum += coeffs * _np . cos ( 2 * _np . pi / period * delta_t ) elif trifunc == 'asin' : periodic_sum += coeffs * _np . sin ( 2 * _np . pi / period * delta_t ) return trend + periodic_sum
Return sum of the time - variable part of the coefficients
15,002
def modify_subroutine ( subroutine ) : subroutine [ 'use' ] = { 'shtools' : { 'map' : { subroutine [ 'name' ] : subroutine [ 'name' ] } , 'only' : 1 } } for varname , varattribs in subroutine [ 'vars' ] . items ( ) : if varname == subroutine [ 'name' ] : subroutine [ 'vars' ] [ 'py' + varname ] = subroutine [ 'vars' ] . pop ( varname ) varname = 'py' + varname if has_assumed_shape ( varattribs ) : make_explicit ( subroutine , varname , varattribs ) subroutine [ 'name' ] = 'py' + subroutine [ 'name' ]
loops through variables of a subroutine and modifies them
15,003
def figstyle ( rel_width = 0.75 , screen_dpi = 114 , aspect_ratio = 4 / 3 , max_width = 7.48031 ) : width_x = max_width * rel_width width_y = max_width * rel_width / aspect_ratio shtools = { 'font.size' : 10 , 'font.family' : 'sans-serif' , 'font.sans-serif' : [ 'Myriad Pro' , 'DejaVu Sans' , 'Bitstream Vera Sans' , 'Verdana' , 'Arial' , 'Helvetica' ] , 'axes.titlesize' : 10 , 'axes.labelsize' : 10 , 'xtick.labelsize' : 8 , 'ytick.labelsize' : 8 , 'legend.fontsize' : 9 , 'text.usetex' : False , 'axes.formatter.limits' : ( - 3 , 3 ) , 'figure.dpi' : screen_dpi , 'figure.figsize' : ( width_x , width_y ) , 'axes.linewidth' : 1 , 'lines.linewidth' : 1.5 , 'xtick.major.width' : 0.6 , 'ytick.major.width' : 0.6 , 'xtick.minor.width' : 0.6 , 'xtick.minor.width' : 0.6 , 'xtick.top' : True , 'ytick.right' : True , 'grid.linewidth' : 0.3 , 'grid.color' : 'k' , 'grid.linestyle' : '-' , 'legend.framealpha' : 1. , 'legend.edgecolor' : 'k' , 'image.lut' : 65536 , 'savefig.bbox' : 'tight' , 'savefig.pad_inches' : 0.02 , 'savefig.dpi' : 600 , 'savefig.format' : 'pdf' } _plt . style . use ( [ shtools ] )
Set matplotlib parameters for creating publication quality graphics .
15,004
def from_zeros ( self , lmax , kind = 'real' , normalization = '4pi' , csphase = 1 ) : if kind . lower ( ) not in ( 'real' , 'complex' ) : raise ValueError ( "Kind must be 'real' or 'complex'. " + "Input value was {:s}." . format ( repr ( kind ) ) ) if normalization . lower ( ) not in ( '4pi' , 'ortho' , 'schmidt' , 'unnorm' ) : raise ValueError ( "The normalization must be '4pi', 'ortho', 'schmidt', " + "or 'unnorm'. Input value was {:s}." . format ( repr ( normalization ) ) ) if csphase != 1 and csphase != - 1 : raise ValueError ( "csphase must be either 1 or -1. Input value was {:s}." . format ( repr ( csphase ) ) ) if normalization . lower ( ) == 'unnorm' and lmax > 85 : _warnings . warn ( "Calculations using unnormalized coefficients " + "are stable only for degrees less than or equal " + "to 85. lmax for the coefficients will be set to " + "85. Input value was {:d}." . format ( lmax ) , category = RuntimeWarning ) lmax = 85 nl = lmax + 1 if kind . lower ( ) == 'real' : coeffs = _np . zeros ( ( 2 , nl , nl ) ) else : coeffs = _np . zeros ( ( 2 , nl , nl ) , dtype = complex ) for cls in self . __subclasses__ ( ) : if cls . istype ( kind ) : return cls ( coeffs , normalization = normalization . lower ( ) , csphase = csphase )
Initialize class with spherical harmonic coefficients set to zero from degree 0 to lmax .
15,005
def to_file ( self , filename , format = 'shtools' , header = None , ** kwargs ) : if format is 'shtools' : with open ( filename , mode = 'w' ) as file : if header is not None : file . write ( header + '\n' ) for l in range ( self . lmax + 1 ) : for m in range ( l + 1 ) : file . write ( '{:d}, {:d}, {:.16e}, {:.16e}\n' . format ( l , m , self . coeffs [ 0 , l , m ] , self . coeffs [ 1 , l , m ] ) ) elif format is 'npy' : _np . save ( filename , self . coeffs , ** kwargs ) else : raise NotImplementedError ( 'format={:s} not implemented' . format ( repr ( format ) ) )
Save raw spherical harmonic coefficients to a file .
15,006
def to_array ( self , normalization = None , csphase = None , lmax = None ) : if normalization is None : normalization = self . normalization if csphase is None : csphase = self . csphase if lmax is None : lmax = self . lmax coeffs = _convert ( self . coeffs , normalization_in = self . normalization , normalization_out = normalization , csphase_in = self . csphase , csphase_out = csphase , lmax = lmax ) return coeffs
Return spherical harmonic coefficients as a numpy array .
15,007
def volume ( self , lmax = None ) : if self . coeffs [ 0 , 0 , 0 ] == 0 : raise ValueError ( 'The volume of the object can not be calculated ' 'when the degree and order 0 term is equal to ' 'zero.' ) if self . kind == 'complex' : raise ValueError ( 'The volume of the object can not be calculated ' 'for complex functions.' ) if lmax is None : lmax = self . lmax r0 = self . coeffs [ 0 , 0 , 0 ] grid = self . expand ( lmax = 3 * lmax ) - r0 h200 = ( grid ** 2 ) . expand ( lmax_calc = 0 ) . coeffs [ 0 , 0 , 0 ] h300 = ( grid ** 3 ) . expand ( lmax_calc = 0 ) . coeffs [ 0 , 0 , 0 ] volume = 4 * _np . pi / 3 * ( h300 + 3 * r0 * h200 + r0 ** 3 ) return volume
If the function is the real shape of an object calculate the volume of the body .
15,008
def rotate ( self , alpha , beta , gamma , degrees = True , convention = 'y' , body = False , dj_matrix = None ) : if type ( convention ) != str : raise ValueError ( 'convention must be a string. ' + 'Input type was {:s}' . format ( str ( type ( convention ) ) ) ) if convention . lower ( ) not in ( 'x' , 'y' ) : raise ValueError ( "convention must be either 'x' or 'y'. " + "Provided value was {:s}" . format ( repr ( convention ) ) ) if convention is 'y' : if body is True : angles = _np . array ( [ - gamma , - beta , - alpha ] ) else : angles = _np . array ( [ alpha , beta , gamma ] ) elif convention is 'x' : if body is True : angles = _np . array ( [ - gamma - _np . pi / 2 , - beta , - alpha + _np . pi / 2 ] ) else : angles = _np . array ( [ alpha - _np . pi / 2 , beta , gamma + _np . pi / 2 ] ) if degrees : angles = _np . radians ( angles ) if self . lmax > 1200 : _warnings . warn ( "The rotate() method is accurate only to about" + " spherical harmonic degree 1200. " + "lmax = {:d}" . format ( self . lmax ) , category = RuntimeWarning ) rot = self . _rotate ( angles , dj_matrix ) return rot
Rotate either the coordinate system used to express the spherical harmonic coefficients or the physical body and return a new class instance .
15,009
def convert ( self , normalization = None , csphase = None , lmax = None , kind = None , check = True ) : if normalization is None : normalization = self . normalization if csphase is None : csphase = self . csphase if lmax is None : lmax = self . lmax if kind is None : kind = self . kind if type ( normalization ) != str : raise ValueError ( 'normalization must be a string. ' + 'Input type was {:s}' . format ( str ( type ( normalization ) ) ) ) if normalization . lower ( ) not in ( '4pi' , 'ortho' , 'schmidt' , 'unnorm' ) : raise ValueError ( "normalization must be '4pi', 'ortho', 'schmidt', or " + "'unnorm'. Provided value was {:s}" . format ( repr ( normalization ) ) ) if csphase != 1 and csphase != - 1 : raise ValueError ( "csphase must be 1 or -1. Input value was {:s}" . format ( repr ( csphase ) ) ) if ( kind != self . kind ) : if ( kind == 'complex' ) : temp = self . _make_complex ( ) else : temp = self . _make_real ( check = check ) coeffs = temp . to_array ( normalization = normalization . lower ( ) , csphase = csphase , lmax = lmax ) else : coeffs = self . to_array ( normalization = normalization . lower ( ) , csphase = csphase , lmax = lmax ) return SHCoeffs . from_array ( coeffs , normalization = normalization . lower ( ) , csphase = csphase , copy = False )
Return a SHCoeffs class instance with a different normalization convention .
15,010
def expand ( self , grid = 'DH' , lat = None , colat = None , lon = None , degrees = True , zeros = None , lmax = None , lmax_calc = None ) : if lat is not None and colat is not None : raise ValueError ( 'lat and colat can not both be specified.' ) if lat is not None and lon is not None : if lmax_calc is None : lmax_calc = self . lmax values = self . _expand_coord ( lat = lat , lon = lon , degrees = degrees , lmax_calc = lmax_calc ) return values if colat is not None and lon is not None : if lmax_calc is None : lmax_calc = self . lmax if type ( colat ) is list : lat = list ( map ( lambda x : 90 - x , colat ) ) else : lat = 90 - colat values = self . _expand_coord ( lat = lat , lon = lon , degrees = degrees , lmax_calc = lmax_calc ) return values else : if lmax is None : lmax = self . lmax if lmax_calc is None : lmax_calc = lmax if type ( grid ) != str : raise ValueError ( 'grid must be a string. ' + 'Input type was {:s}' . format ( str ( type ( grid ) ) ) ) if grid . upper ( ) in ( 'DH' , 'DH1' ) : gridout = self . _expandDH ( sampling = 1 , lmax = lmax , lmax_calc = lmax_calc ) elif grid . upper ( ) == 'DH2' : gridout = self . _expandDH ( sampling = 2 , lmax = lmax , lmax_calc = lmax_calc ) elif grid . upper ( ) == 'GLQ' : gridout = self . _expandGLQ ( zeros = zeros , lmax = lmax , lmax_calc = lmax_calc ) else : raise ValueError ( "grid must be 'DH', 'DH1', 'DH2', or 'GLQ'. " + "Input value was {:s}" . format ( repr ( grid ) ) ) return gridout
Evaluate the spherical harmonic coefficients either on a global grid or for a list of coordinates .
15,011
def _make_complex ( self ) : rcomplex_coeffs = _shtools . SHrtoc ( self . coeffs , convention = 1 , switchcs = 0 ) complex_coeffs = _np . zeros ( ( 2 , self . lmax + 1 , self . lmax + 1 ) , dtype = 'complex' ) complex_coeffs [ 0 , : , : ] = ( rcomplex_coeffs [ 0 , : , : ] + 1j * rcomplex_coeffs [ 1 , : , : ] ) complex_coeffs [ 1 , : , : ] = complex_coeffs [ 0 , : , : ] . conjugate ( ) for m in self . degrees ( ) : if m % 2 == 1 : complex_coeffs [ 1 , : , m ] = - complex_coeffs [ 1 , : , m ] return SHCoeffs . from_array ( complex_coeffs , normalization = self . normalization , csphase = self . csphase , copy = False )
Convert the real SHCoeffs class to the complex class .
15,012
def _expand_coord ( self , lat , lon , lmax_calc , degrees ) : if self . normalization == '4pi' : norm = 1 elif self . normalization == 'schmidt' : norm = 2 elif self . normalization == 'unnorm' : norm = 3 elif self . normalization == 'ortho' : norm = 4 else : raise ValueError ( "Normalization must be '4pi', 'ortho', 'schmidt', or " + "'unnorm'. Input value was {:s}" . format ( repr ( self . normalization ) ) ) if degrees is True : latin = lat lonin = lon else : latin = _np . rad2deg ( lat ) lonin = _np . rad2deg ( lon ) if type ( lat ) is not type ( lon ) : raise ValueError ( 'lat and lon must be of the same type. ' + 'Input types are {:s} and {:s}' . format ( repr ( type ( lat ) ) , repr ( type ( lon ) ) ) ) if type ( lat ) is int or type ( lat ) is float or type ( lat ) is _np . float_ : return _shtools . MakeGridPoint ( self . coeffs , lat = latin , lon = lonin , lmax = lmax_calc , norm = norm , csphase = self . csphase ) elif type ( lat ) is _np . ndarray : values = _np . empty_like ( lat , dtype = float ) for v , latitude , longitude in _np . nditer ( [ values , latin , lonin ] , op_flags = [ 'readwrite' ] ) : v [ ... ] = _shtools . MakeGridPoint ( self . coeffs , lat = latitude , lon = longitude , lmax = lmax_calc , norm = norm , csphase = self . csphase ) return values elif type ( lat ) is list : values = [ ] for latitude , longitude in zip ( latin , lonin ) : values . append ( _shtools . MakeGridPoint ( self . coeffs , lat = latitude , lon = longitude , lmax = lmax_calc , norm = norm , csphase = self . csphase ) ) return values else : raise ValueError ( 'lat and lon must be either an int, float, ' + 'ndarray, or list. ' + 'Input types are {:s} and {:s}' . format ( repr ( type ( lat ) ) , repr ( type ( lon ) ) ) )
Evaluate the function at the coordinates lat and lon .
15,013
def _make_real ( self , check = True ) : if check : for l in self . degrees ( ) : if self . coeffs [ 0 , l , 0 ] != self . coeffs [ 0 , l , 0 ] . conjugate ( ) : raise RuntimeError ( 'Complex coefficients do not ' + 'correspond to a real field. ' + 'l = {:d}, m = 0: {:e}' . format ( l , self . coeffs [ 0 , l , 0 ] ) ) for m in _np . arange ( 1 , l + 1 ) : if m % 2 == 1 : if ( self . coeffs [ 0 , l , m ] != - self . coeffs [ 1 , l , m ] . conjugate ( ) ) : raise RuntimeError ( 'Complex coefficients do not ' + 'correspond to a real field. ' + 'l = {:d}, m = {:d}: {:e}, {:e}' . format ( l , m , self . coeffs [ 0 , l , 0 ] , self . coeffs [ 1 , l , 0 ] ) ) else : if ( self . coeffs [ 0 , l , m ] != self . coeffs [ 1 , l , m ] . conjugate ( ) ) : raise RuntimeError ( 'Complex coefficients do not ' + 'correspond to a real field. ' + 'l = {:d}, m = {:d}: {:e}, {:e}' . format ( l , m , self . coeffs [ 0 , l , 0 ] , self . coeffs [ 1 , l , 0 ] ) ) coeffs_rc = _np . zeros ( ( 2 , self . lmax + 1 , self . lmax + 1 ) ) coeffs_rc [ 0 , : , : ] = self . coeffs [ 0 , : , : ] . real coeffs_rc [ 1 , : , : ] = self . coeffs [ 0 , : , : ] . imag real_coeffs = _shtools . SHctor ( coeffs_rc , convention = 1 , switchcs = 0 ) return SHCoeffs . from_array ( real_coeffs , normalization = self . normalization , csphase = self . csphase )
Convert the complex SHCoeffs class to the real class .
15,014
def _expandGLQ ( self , zeros , lmax , lmax_calc ) : if self . normalization == '4pi' : norm = 1 elif self . normalization == 'schmidt' : norm = 2 elif self . normalization == 'unnorm' : norm = 3 elif self . normalization == 'ortho' : norm = 4 else : raise ValueError ( "Normalization must be '4pi', 'ortho', 'schmidt', or " + "'unnorm'. Input value was {:s}" . format ( repr ( self . normalization ) ) ) if zeros is None : zeros , weights = _shtools . SHGLQ ( self . lmax ) data = _shtools . MakeGridGLQC ( self . coeffs , zeros , norm = norm , csphase = self . csphase , lmax = lmax , lmax_calc = lmax_calc ) gridout = SHGrid . from_array ( data , grid = 'GLQ' , copy = False ) return gridout
Evaluate the coefficients on a Gauss - Legendre quadrature grid .
15,015
def from_array ( self , array , grid = 'DH' , copy = True ) : if _np . iscomplexobj ( array ) : kind = 'complex' else : kind = 'real' if type ( grid ) != str : raise ValueError ( 'grid must be a string. ' + 'Input type was {:s}' . format ( str ( type ( grid ) ) ) ) if grid . upper ( ) not in set ( [ 'DH' , 'GLQ' ] ) : raise ValueError ( "grid must be 'DH' or 'GLQ'. Input value was {:s}." . format ( repr ( grid ) ) ) for cls in self . __subclasses__ ( ) : if cls . istype ( kind ) and cls . isgrid ( grid ) : return cls ( array , copy = copy )
Initialize the class instance from an input array .
15,016
def from_file ( self , fname , binary = False , ** kwargs ) : if binary is False : data = _np . loadtxt ( fname , ** kwargs ) elif binary is True : data = _np . load ( fname , ** kwargs ) else : raise ValueError ( 'binary must be True or False. ' 'Input value is {:s}' . format ( binary ) ) if _np . iscomplexobj ( data ) : kind = 'complex' else : kind = 'real' if ( data . shape [ 1 ] == data . shape [ 0 ] ) or ( data . shape [ 1 ] == 2 * data . shape [ 0 ] ) : grid = 'DH' elif data . shape [ 1 ] == 2 * data . shape [ 0 ] - 1 : grid = 'GLQ' else : raise ValueError ( 'Input grid must be dimensioned as ' + '(nlat, nlon). For DH grids, nlon = nlat or ' + 'nlon = 2 * nlat. For GLQ grids, nlon = ' + '2 * nlat - 1. Input dimensions are nlat = ' + '{:d}, nlon = {:d}' . format ( data . shape [ 0 ] , data . shape [ 1 ] ) ) for cls in self . __subclasses__ ( ) : if cls . istype ( kind ) and cls . isgrid ( grid ) : return cls ( data )
Initialize the class instance from gridded data in a file .
15,017
def to_file ( self , filename , binary = False , ** kwargs ) : if binary is False : _np . savetxt ( filename , self . data , ** kwargs ) elif binary is True : _np . save ( filename , self . data , ** kwargs ) else : raise ValueError ( 'binary must be True or False. ' 'Input value is {:s}' . format ( binary ) )
Save gridded data to a file .
15,018
def lats ( self , degrees = True ) : if degrees is False : return _np . radians ( self . _lats ( ) ) else : return self . _lats ( )
Return the latitudes of each row of the gridded data .
15,019
def lons ( self , degrees = True ) : if degrees is False : return _np . radians ( self . _lons ( ) ) else : return self . _lons ( )
Return the longitudes of each column of the gridded data .
15,020
def expand ( self , normalization = '4pi' , csphase = 1 , ** kwargs ) : if type ( normalization ) != str : raise ValueError ( 'normalization must be a string. ' + 'Input type was {:s}' . format ( str ( type ( normalization ) ) ) ) if normalization . lower ( ) not in ( '4pi' , 'ortho' , 'schmidt' , 'unnorm' ) : raise ValueError ( "The normalization must be '4pi', 'ortho', 'schmidt', " + "or 'unnorm'. Input value was {:s}." . format ( repr ( normalization ) ) ) if csphase != 1 and csphase != - 1 : raise ValueError ( "csphase must be either 1 or -1. Input value was {:s}." . format ( repr ( csphase ) ) ) return self . _expand ( normalization = normalization , csphase = csphase , ** kwargs )
Expand the grid into spherical harmonics .
15,021
def _expand ( self , normalization , csphase , ** kwargs ) : if normalization . lower ( ) == '4pi' : norm = 1 elif normalization . lower ( ) == 'schmidt' : norm = 2 elif normalization . lower ( ) == 'unnorm' : norm = 3 elif normalization . lower ( ) == 'ortho' : norm = 4 else : raise ValueError ( "The normalization must be '4pi', 'ortho', 'schmidt', " + "or 'unnorm'. Input value was {:s}." . format ( repr ( normalization ) ) ) cilm = _shtools . SHExpandDH ( self . data , norm = norm , csphase = csphase , sampling = self . sampling , ** kwargs ) coeffs = SHCoeffs . from_array ( cilm , normalization = normalization . lower ( ) , csphase = csphase , copy = False ) return coeffs
Expand the grid into real spherical harmonics .
15,022
def plot ( self , colorbar = True , cb_orientation = 'vertical' , cb_label = 'geoid, m' , show = True , ** kwargs ) : return self . geoid . plot ( colorbar = colorbar , cb_orientation = cb_orientation , cb_label = cb_label , show = True , ** kwargs )
Plot the geoid .
15,023
def _yyyymmdd_to_year_fraction ( date ) : date = str ( date ) if '.' in date : date , residual = str ( date ) . split ( '.' ) residual = float ( '0.' + residual ) else : residual = 0.0 date = _datetime . datetime . strptime ( date , '%Y%m%d' ) date += _datetime . timedelta ( days = residual ) year = date . year year_start = _datetime . datetime ( year = year , month = 1 , day = 1 ) next_year_start = _datetime . datetime ( year = year + 1 , month = 1 , day = 1 ) year_duration = next_year_start - year_start year_elapsed = date - year_start fraction = year_elapsed / year_duration return year + fraction
Convert YYYMMDD . DD date string or float to YYYY . YYY
15,024
def example ( ) : infile = os . path . join ( os . path . dirname ( __file__ ) , '../../ExampleDataFiles/MarsTopo719.shape' ) coeffs , lmax = shio . shread ( infile ) grid = expand . MakeGridDH ( coeffs , csphase = - 1 ) fig_map = plt . figure ( ) plt . imshow ( grid ) ls = np . arange ( lmax + 1 ) pspectrum = spectralanalysis . spectrum ( coeffs , unit = 'per_l' ) pdensity = spectralanalysis . spectrum ( coeffs , unit = 'per_lm' ) fig_spectrum , ax = plt . subplots ( 1 , 1 ) ax . set_xscale ( 'log' ) ax . set_yscale ( 'log' ) ax . set_xlabel ( 'degree l' ) ax . grid ( True , which = 'both' ) ax . plot ( ls [ 1 : ] , pspectrum [ 1 : ] , label = 'power per degree l' ) ax . plot ( ls [ 1 : ] , pdensity [ 1 : ] , label = 'power per degree l and order m' ) ax . legend ( ) fig_map . savefig ( 'SHRtopography_mars.png' ) fig_spectrum . savefig ( 'SHRspectrum_mars.png' ) print ( 'mars topography and spectrum saved' )
example that plots the power spectrum of Mars topography data
15,025
def from_zeros ( self , lmax , gm , r0 , omega = None , errors = False , normalization = '4pi' , csphase = 1 ) : if normalization . lower ( ) not in ( '4pi' , 'ortho' , 'schmidt' , 'unnorm' ) : raise ValueError ( "The normalization must be '4pi', 'ortho', 'schmidt', " "or 'unnorm'. Input value was {:s}." . format ( repr ( normalization ) ) ) if csphase != 1 and csphase != - 1 : raise ValueError ( "csphase must be either 1 or -1. Input value was {:s}." . format ( repr ( csphase ) ) ) if normalization . lower ( ) == 'unnorm' and lmax > 85 : _warnings . warn ( "Calculations using unnormalized coefficients " "are stable only for degrees less than or equal " "to 85. lmax for the coefficients will be set to " "85. Input value was {:d}." . format ( lmax ) , category = RuntimeWarning ) lmax = 85 coeffs = _np . zeros ( ( 2 , lmax + 1 , lmax + 1 ) ) coeffs [ 0 , 0 , 0 ] = 1.0 if errors is False : clm = SHGravRealCoeffs ( coeffs , gm = gm , r0 = r0 , omega = omega , normalization = normalization . lower ( ) , csphase = csphase ) else : clm = SHGravRealCoeffs ( coeffs , gm = gm , r0 = r0 , omega = omega , errors = _np . zeros ( ( 2 , lmax + 1 , lmax + 1 ) ) , normalization = normalization . lower ( ) , csphase = csphase ) return clm
Initialize the class with spherical harmonic coefficients set to zero from degree 1 to lmax and set the degree 0 term to 1 .
15,026
def from_shape ( self , shape , rho , gm , nmax = 7 , lmax = None , lmax_grid = None , lmax_calc = None , omega = None ) : mass = gm / _G . value if type ( shape ) is not _SHRealCoeffs and type ( shape ) is not _DHRealGrid : raise ValueError ( 'shape must be of type SHRealCoeffs ' 'or DHRealGrid. Input type is {:s}' . format ( repr ( type ( shape ) ) ) ) if ( not issubclass ( type ( rho ) , float ) and type ( rho ) is not int and type ( rho ) is not _np . ndarray and type ( rho ) is not _SHRealCoeffs and type ( rho is not _DHRealGrid ) ) : raise ValueError ( 'rho must be of type float, int, ndarray, ' 'SHRealCoeffs or DHRealGrid. Input type is {:s}' . format ( repr ( type ( rho ) ) ) ) if type ( shape ) is _SHRealCoeffs : shape = shape . expand ( lmax = lmax_grid , lmax_calc = lmax_calc ) if type ( rho ) is _SHRealCoeffs : rho = rho . expand ( lmax = lmax_grid , lmax_calc = lmax_calc ) if type ( rho ) is _DHRealGrid : if shape . lmax != rho . lmax : raise ValueError ( 'The grids for shape and rho must have the ' 'same size. ' 'lmax of shape = {:d}, lmax of rho = {:d}' . format ( shape . lmax , rho . lmax ) ) cilm , d = _CilmPlusRhoHDH ( shape . data , nmax , mass , rho . data , lmax = lmax ) else : cilm , d = _CilmPlusDH ( shape . data , nmax , mass , rho , lmax = lmax ) clm = SHGravRealCoeffs ( cilm , gm = gm , r0 = d , omega = omega , normalization = '4pi' , csphase = 1 ) return clm
Initialize a class of gravitational potential spherical harmonic coefficients by calculuting the gravitational potential associatiated with relief along an interface .
15,027
def to_file ( self , filename , format = 'shtools' , header = None , errors = False , ** kwargs ) : if format is 'shtools' : if errors is True and self . errors is None : raise ValueError ( 'Can not save errors when then have not been ' 'initialized.' ) if self . omega is None : omega = 0. else : omega = self . omega with open ( filename , mode = 'w' ) as file : if header is not None : file . write ( header + '\n' ) file . write ( '{:.16e}, {:.16e}, {:.16e}, {:d}\n' . format ( self . r0 , self . gm , omega , self . lmax ) ) for l in range ( self . lmax + 1 ) : for m in range ( l + 1 ) : if errors is True : file . write ( '{:d}, {:d}, {:.16e}, {:.16e}, ' '{:.16e}, {:.16e}\n' . format ( l , m , self . coeffs [ 0 , l , m ] , self . coeffs [ 1 , l , m ] , self . errors [ 0 , l , m ] , self . errors [ 1 , l , m ] ) ) else : file . write ( '{:d}, {:d}, {:.16e}, {:.16e}\n' . format ( l , m , self . coeffs [ 0 , l , m ] , self . coeffs [ 1 , l , m ] ) ) elif format is 'npy' : _np . save ( filename , self . coeffs , ** kwargs ) else : raise NotImplementedError ( 'format={:s} not implemented' . format ( repr ( format ) ) )
Save spherical harmonic coefficients to a file .
15,028
def change_ref ( self , gm = None , r0 = None , lmax = None ) : if lmax is None : lmax = self . lmax clm = self . pad ( lmax ) if gm is not None and gm != self . gm : clm . coeffs *= self . gm / gm clm . gm = gm if self . errors is not None : clm . errors *= self . gm / gm if r0 is not None and r0 != self . r0 : for l in _np . arange ( lmax + 1 ) : clm . coeffs [ : , l , : l + 1 ] *= ( self . r0 / r0 ) ** l if self . errors is not None : clm . errors [ : , l , : l + 1 ] *= ( self . r0 / r0 ) ** l clm . r0 = r0 return clm
Return a new SHGravCoeffs class instance with a different reference gm or r0 .
15,029
def expand ( self , a = None , f = None , lmax = None , lmax_calc = None , normal_gravity = True , sampling = 2 ) : if a is None : a = self . r0 if f is None : f = 0. if normal_gravity is True : ng = 1 else : ng = 0 if lmax is None : lmax = self . lmax if lmax_calc is None : lmax_calc = lmax if self . errors is not None : coeffs , errors = self . to_array ( normalization = '4pi' , csphase = 1 ) else : coeffs = self . to_array ( normalization = '4pi' , csphase = 1 ) rad , theta , phi , total , pot = _MakeGravGridDH ( coeffs , self . gm , self . r0 , a = a , f = f , lmax = lmax , lmax_calc = lmax_calc , sampling = sampling , omega = self . omega , normal_gravity = ng ) return _SHGravGrid ( rad , theta , phi , total , pot , self . gm , a , f , self . omega , normal_gravity , lmax , lmax_calc )
Create 2D cylindrical maps on a flattened and rotating ellipsoid of all three components of the gravity field the gravity disturbance and the gravitational potential and return as a SHGravGrid class instance .
15,030
def tensor ( self , a = None , f = None , lmax = None , lmax_calc = None , degree0 = False , sampling = 2 ) : if a is None : a = self . r0 if f is None : f = 0. if lmax is None : lmax = self . lmax if lmax_calc is None : lmax_calc = lmax if self . errors is not None : coeffs , errors = self . to_array ( normalization = '4pi' , csphase = 1 ) else : coeffs = self . to_array ( normalization = '4pi' , csphase = 1 ) if degree0 is False : coeffs [ 0 , 0 , 0 ] = 0. vxx , vyy , vzz , vxy , vxz , vyz = _MakeGravGradGridDH ( coeffs , self . gm , self . r0 , a = a , f = f , lmax = lmax , lmax_calc = lmax_calc , sampling = sampling ) return _SHGravTensor ( 1.e9 * vxx , 1.e9 * vyy , 1.e9 * vzz , 1.e9 * vxy , 1.e9 * vxz , 1.e9 * vyz , self . gm , a , f , lmax , lmax_calc )
Create 2D cylindrical maps on a flattened ellipsoid of the 9 components of the gravity gradient tensor in a local north - oriented reference frame and return an SHGravTensor class instance .
15,031
def geoid ( self , potref , a = None , f = None , r = None , omega = None , order = 2 , lmax = None , lmax_calc = None , grid = 'DH2' ) : if a is None : a = self . r0 if f is None : f = 0. if r is None : r = self . r0 if lmax is None : lmax = self . lmax if lmax_calc is None : lmax_calc = lmax if grid . upper ( ) in ( 'DH' , 'DH1' ) : sampling = 1 elif grid . upper ( ) == 'DH2' : sampling = 2 else : raise ValueError ( "grid must be 'DH', 'DH1', or 'DH2'. " "Input value was {:s}" . format ( repr ( grid ) ) ) if self . errors is not None : coeffs , errors = self . to_array ( normalization = '4pi' , csphase = 1 ) else : coeffs = self . to_array ( normalization = '4pi' , csphase = 1 ) if omega is None : omega = self . omega geoid = _MakeGeoidGridDH ( coeffs , self . r0 , self . gm , potref , lmax = lmax , omega = omega , r = r , order = order , lmax_calc = lmax_calc , a = a , f = f , sampling = sampling ) return _SHGeoid ( geoid , self . gm , potref , a , f , omega , r , order , lmax , lmax_calc )
Create a global map of the height of the geoid and return an SHGeoid class instance .
15,032
def node_application ( self , application_id ) : path = '/ws/v1/node/apps/{appid}' . format ( appid = application_id ) return self . request ( path )
An application resource contains information about a particular application that was run or is running on this NodeManager .
15,033
def node_container ( self , container_id ) : path = '/ws/v1/node/containers/{containerid}' . format ( containerid = container_id ) return self . request ( path )
A container resource contains information about a particular container that is running on this NodeManager .
15,034
def cluster_application_statistics ( self , state_list = None , application_type_list = None ) : path = '/ws/v1/cluster/appstatistics' states = ',' . join ( state_list ) if state_list is not None else None if application_type_list is not None : application_types = ',' . join ( application_type_list ) else : application_types = None loc_args = ( ( 'states' , states ) , ( 'applicationTypes' , application_types ) ) params = self . construct_parameters ( loc_args ) return self . request ( path , ** params )
With the Application Statistics API you can obtain a collection of triples each of which contains the application type the application state and the number of applications of this type and this state in ResourceManager context .
15,035
def cluster_application ( self , application_id ) : path = '/ws/v1/cluster/apps/{appid}' . format ( appid = application_id ) return self . request ( path )
An application resource contains information about a particular application that was submitted to a cluster .
15,036
def cluster_application_attempts ( self , application_id ) : path = '/ws/v1/cluster/apps/{appid}/appattempts' . format ( appid = application_id ) return self . request ( path )
With the application attempts API you can obtain a collection of resources that represent an application attempt .
15,037
def cluster_application_attempt_info ( self , application_id , attempt_id ) : path = '/ws/v1/cluster/apps/{appid}/appattempts/{attemptid}' . format ( appid = application_id , attemptid = attempt_id ) return self . request ( path )
With the application attempts API you can obtain an extended info about an application attempt .
15,038
def cluster_application_state ( self , application_id ) : path = '/ws/v1/cluster/apps/{appid}/state' . format ( appid = application_id ) return self . request ( path )
With the application state API you can obtain the current state of an application .
15,039
def cluster_application_kill ( self , application_id ) : data = '{"state": "KILLED"}' path = '/ws/v1/cluster/apps/{appid}/state' . format ( appid = application_id ) return self . update ( path , data )
With the application kill API you can kill an application that is not in FINISHED or FAILED state .
15,040
def cluster_nodes ( self , state = None , healthy = None ) : path = '/ws/v1/cluster/nodes' legal_healthy = [ 'true' , 'false' ] if healthy is not None and healthy not in legal_healthy : msg = 'Valid Healthy arguments are true, false' raise IllegalArgumentError ( msg ) loc_args = ( ( 'state' , state ) , ( 'healthy' , healthy ) , ) params = self . construct_parameters ( loc_args ) return self . request ( path , ** params )
With the Nodes API you can obtain a collection of resources each of which represents a node .
15,041
def cluster_node ( self , node_id ) : path = '/ws/v1/cluster/nodes/{nodeid}' . format ( nodeid = node_id ) return self . request ( path )
A node resource contains information about a node in the cluster .
15,042
def application_information ( self , application_id ) : path = '/proxy/{appid}/ws/v1/mapreduce/info' . format ( appid = application_id ) return self . request ( path )
The MapReduce application master information resource provides overall information about that mapreduce application master . This includes application id time it was started user name etc .
15,043
def jobs ( self , application_id ) : path = '/proxy/{appid}/ws/v1/mapreduce/jobs' . format ( appid = application_id ) return self . request ( path )
The jobs resource provides a list of the jobs running on this application master .
15,044
def job ( self , application_id , job_id ) : path = '/proxy/{appid}/ws/v1/mapreduce/jobs/{jobid}' . format ( appid = application_id , jobid = job_id ) return self . request ( path )
A job resource contains information about a particular job that was started by this application master . Certain fields are only accessible if user has permissions - depends on acl settings .
15,045
def job_task ( self , application_id , job_id , task_id ) : path = '/proxy/{appid}/ws/v1/mapreduce/jobs/{jobid}/tasks/{taskid}' . format ( appid = application_id , jobid = job_id , taskid = task_id ) return self . request ( path )
A Task resource contains information about a particular task within a job .
15,046
def jobs ( self , state = None , user = None , queue = None , limit = None , started_time_begin = None , started_time_end = None , finished_time_begin = None , finished_time_end = None ) : path = '/ws/v1/history/mapreduce/jobs' legal_states = set ( [ s for s , _ in JobStateInternal ] ) if state is not None and state not in legal_states : msg = 'Job Internal State %s is illegal' % ( state , ) raise IllegalArgumentError ( msg ) loc_args = ( ( 'state' , state ) , ( 'user' , user ) , ( 'queue' , queue ) , ( 'limit' , limit ) , ( 'startedTimeBegin' , started_time_begin ) , ( 'startedTimeEnd' , started_time_end ) , ( 'finishedTimeBegin' , finished_time_begin ) , ( 'finishedTimeEnd' , finished_time_end ) ) params = self . construct_parameters ( loc_args ) return self . request ( path , ** params )
The jobs resource provides a list of the MapReduce jobs that have finished . It does not currently return a full list of parameters .
15,047
def job ( self , job_id ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}' . format ( jobid = job_id ) return self . request ( path )
A Job resource contains information about a particular job identified by jobid .
15,048
def job_attempts ( self , job_id ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}/jobattempts' . format ( jobid = job_id ) return self . request ( path )
With the job attempts API you can obtain a collection of resources that represent a job attempt .
15,049
def job_counters ( self , job_id ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}/counters' . format ( jobid = job_id ) return self . request ( path )
With the job counters API you can object a collection of resources that represent al the counters for that job .
15,050
def job_conf ( self , job_id ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}/conf' . format ( jobid = job_id ) return self . request ( path )
A job configuration resource contains information about the job configuration for this job .
15,051
def job_tasks ( self , job_id , type = None ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}/tasks' . format ( jobid = job_id ) valid_types = [ 'm' , 'r' ] if type is not None and type not in valid_types : msg = 'Job type %s is illegal' % ( type , ) raise IllegalArgumentError ( msg ) params = { } if type is not None : params [ 'type' ] = type return self . request ( path , ** params )
With the tasks API you can obtain a collection of resources that represent a task within a job .
15,052
def task_attempt ( self , job_id , task_id , attempt_id ) : path = '/ws/v1/history/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}' . format ( jobid = job_id , taskid = task_id , attemptid = attempt_id ) return self . request ( path )
A Task Attempt resource contains information about a particular task attempt within a job .
15,053
def q_mentioned_fields ( q , model ) : query = Query ( model ) where = query . _add_q ( q , used_aliases = set ( ) , allow_joins = False ) [ 0 ] return list ( sorted ( set ( expression_mentioned_fields ( where ) ) ) )
Returns list of field names mentioned in Q object .
15,054
def set_name_with_model ( self , model ) : table_name = model . _meta . db_table column_names = [ model . _meta . get_field ( field_name ) . column for field_name , order in self . fields_orders ] column_names_with_order = [ ( ( '-%s' if order else '%s' ) % column_name ) for column_name , ( field_name , order ) in zip ( column_names , self . fields_orders ) ] hash_data = [ table_name ] + column_names_with_order + [ self . suffix ] + self . name_hash_extra_data ( ) self . name = '%s_%s_%s' % ( table_name [ : 11 ] , column_names [ 0 ] [ : 7 ] , '%s_%s' % ( self . _hash_generator ( * hash_data ) , self . suffix ) , ) assert len ( self . name ) <= self . max_name_length , ( 'Index too long for multiple database support. Is self.suffix ' 'longer than 3 characters?' ) self . check_name ( )
Sets an unique generated name for the index .
15,055
def validate_partial_unique ( self ) : unique_idxs = [ idx for idx in self . _meta . indexes if isinstance ( idx , PartialIndex ) and idx . unique ] if unique_idxs : model_fields = set ( f . name for f in self . _meta . get_fields ( include_parents = True , include_hidden = True ) ) for idx in unique_idxs : where = idx . where if not isinstance ( where , Q ) : raise ImproperlyConfigured ( 'ValidatePartialUniqueMixin is not supported for PartialIndexes with a text-based where condition. ' + 'Please upgrade to Q-object based where conditions.' ) mentioned_fields = set ( idx . fields ) | set ( query . q_mentioned_fields ( where , self . __class__ ) ) missing_fields = mentioned_fields - model_fields if missing_fields : raise RuntimeError ( 'Unable to use ValidatePartialUniqueMixin: expecting to find fields %s on model. ' + 'This is a bug in the PartialIndex definition or the django-partial-index library itself.' ) values = { field_name : getattr ( self , field_name ) for field_name in mentioned_fields } conflict = self . __class__ . objects . filter ( ** values ) conflict = conflict . filter ( where ) if self . pk : conflict = conflict . exclude ( pk = self . pk ) if conflict . exists ( ) : raise PartialUniqueValidationError ( '%s with the same values for %s already exists.' % ( self . __class__ . __name__ , ', ' . join ( sorted ( idx . fields ) ) , ) )
Check partial unique constraints on the model and raise ValidationError if any failed .
15,056
def extracts ( self , page : 'WikipediaPage' , ** kwargs ) -> str : params = { 'action' : 'query' , 'prop' : 'extracts' , 'titles' : page . title } if self . extract_format == ExtractFormat . HTML : pass elif self . extract_format == ExtractFormat . WIKI : params [ 'explaintext' ] = 1 params [ 'exsectionformat' ] = 'wiki' used_params = kwargs used_params . update ( params ) raw = self . _query ( page , used_params ) self . _common_attributes ( raw [ 'query' ] , page ) pages = raw [ 'query' ] [ 'pages' ] for k , v in pages . items ( ) : if k == '-1' : page . _attributes [ 'pageid' ] = - 1 return '' else : return self . _build_extracts ( v , page ) return ''
Returns summary of the page with respect to parameters
15,057
def langlinks ( self , page : 'WikipediaPage' , ** kwargs ) -> PagesDict : params = { 'action' : 'query' , 'prop' : 'langlinks' , 'titles' : page . title , 'lllimit' : 500 , 'llprop' : 'url' , } used_params = kwargs used_params . update ( params ) raw = self . _query ( page , used_params ) self . _common_attributes ( raw [ 'query' ] , page ) pages = raw [ 'query' ] [ 'pages' ] for k , v in pages . items ( ) : if k == '-1' : page . _attributes [ 'pageid' ] = - 1 return { } else : return self . _build_langlinks ( v , page ) return { }
Returns langlinks of the page with respect to parameters
15,058
def links ( self , page : 'WikipediaPage' , ** kwargs ) -> PagesDict : params = { 'action' : 'query' , 'prop' : 'links' , 'titles' : page . title , 'pllimit' : 500 , } used_params = kwargs used_params . update ( params ) raw = self . _query ( page , used_params ) self . _common_attributes ( raw [ 'query' ] , page ) pages = raw [ 'query' ] [ 'pages' ] for k , v in pages . items ( ) : if k == '-1' : page . _attributes [ 'pageid' ] = - 1 return { } else : while 'continue' in raw : params [ 'plcontinue' ] = raw [ 'continue' ] [ 'plcontinue' ] raw = self . _query ( page , params ) v [ 'links' ] += raw [ 'query' ] [ 'pages' ] [ k ] [ 'links' ] return self . _build_links ( v , page ) return { }
Returns links to other pages with respect to parameters
15,059
def backlinks ( self , page : 'WikipediaPage' , ** kwargs ) -> PagesDict : params = { 'action' : 'query' , 'list' : 'backlinks' , 'bltitle' : page . title , 'bllimit' : 500 , } used_params = kwargs used_params . update ( params ) raw = self . _query ( page , used_params ) self . _common_attributes ( raw [ 'query' ] , page ) v = raw [ 'query' ] while 'continue' in raw : params [ 'blcontinue' ] = raw [ 'continue' ] [ 'blcontinue' ] raw = self . _query ( page , params ) v [ 'backlinks' ] += raw [ 'query' ] [ 'backlinks' ] return self . _build_backlinks ( v , page )
Returns backlinks from other pages with respect to parameters
15,060
def categorymembers ( self , page : 'WikipediaPage' , ** kwargs ) -> PagesDict : params = { 'action' : 'query' , 'list' : 'categorymembers' , 'cmtitle' : page . title , 'cmlimit' : 500 , } used_params = kwargs used_params . update ( params ) raw = self . _query ( page , used_params ) self . _common_attributes ( raw [ 'query' ] , page ) v = raw [ 'query' ] while 'continue' in raw : params [ 'cmcontinue' ] = raw [ 'continue' ] [ 'cmcontinue' ] raw = self . _query ( page , params ) v [ 'categorymembers' ] += raw [ 'query' ] [ 'categorymembers' ] return self . _build_categorymembers ( v , page )
Returns pages in given category with respect to parameters
15,061
def full_text ( self , level : int = 1 ) -> str : res = "" if self . wiki . extract_format == ExtractFormat . WIKI : res += self . title elif self . wiki . extract_format == ExtractFormat . HTML : res += "<h{}>{}</h{}>" . format ( level , self . title , level ) else : raise NotImplementedError ( "Unknown ExtractFormat type" ) res += "\n" res += self . _text if len ( self . _text ) > 0 : res += "\n\n" for sec in self . sections : res += sec . full_text ( level + 1 ) return res
Returns text of the current section as well as all its subsections .
15,062
def sections ( self ) -> List [ WikipediaPageSection ] : if not self . _called [ 'extracts' ] : self . _fetch ( 'extracts' ) return self . _section
Returns all sections of the curent page .
15,063
def section_by_title ( self , title : str , ) -> Optional [ WikipediaPageSection ] : if not self . _called [ 'extracts' ] : self . _fetch ( 'extracts' ) return self . _section_mapping . get ( title )
Returns section of the current page with given title .
15,064
def text ( self ) -> str : txt = self . summary if len ( txt ) > 0 : txt += "\n\n" for sec in self . sections : txt += sec . full_text ( level = 2 ) return txt . strip ( )
Returns text of the current page .
15,065
async def permits ( self , identity , permission , context = None ) : user = self . user_map . get ( identity ) if not user : return False return permission in user . permissions
Check user permissions . Return True if the identity is allowed the permission in the current context else return False .
15,066
async def remember ( request , response , identity , ** kwargs ) : assert isinstance ( identity , str ) , identity assert identity identity_policy = request . config_dict . get ( IDENTITY_KEY ) if identity_policy is None : text = ( "Security subsystem is not initialized, " "call aiohttp_security.setup(...) first" ) raise web . HTTPInternalServerError ( reason = text , text = text ) await identity_policy . remember ( request , response , identity , ** kwargs )
Remember identity into response .
15,067
async def forget ( request , response ) : identity_policy = request . config_dict . get ( IDENTITY_KEY ) if identity_policy is None : text = ( "Security subsystem is not initialized, " "call aiohttp_security.setup(...) first" ) raise web . HTTPInternalServerError ( reason = text , text = text ) await identity_policy . forget ( request , response )
Forget previously remembered identity .
15,068
async def is_anonymous ( request ) : identity_policy = request . config_dict . get ( IDENTITY_KEY ) if identity_policy is None : return True identity = await identity_policy . identify ( request ) if identity is None : return True return False
Check if user is anonymous .
15,069
def login_required ( fn ) : @ wraps ( fn ) async def wrapped ( * args , ** kwargs ) : request = args [ - 1 ] if not isinstance ( request , web . BaseRequest ) : msg = ( "Incorrect decorator usage. " "Expecting `def handler(request)` " "or `def handler(self, request)`." ) raise RuntimeError ( msg ) await check_authorized ( request ) return await fn ( * args , ** kwargs ) warnings . warn ( "login_required decorator is deprecated, " "use check_authorized instead" , DeprecationWarning ) return wrapped
Decorator that restrict access only for authorized users .
15,070
async def check_permission ( request , permission , context = None ) : await check_authorized ( request ) allowed = await permits ( request , permission , context ) if not allowed : raise web . HTTPForbidden ( )
Checker that passes only to authoraised users with given permission .
15,071
def has_permission ( permission , context = None , ) : def wrapper ( fn ) : @ wraps ( fn ) async def wrapped ( * args , ** kwargs ) : request = args [ - 1 ] if not isinstance ( request , web . BaseRequest ) : msg = ( "Incorrect decorator usage. " "Expecting `def handler(request)` " "or `def handler(self, request)`." ) raise RuntimeError ( msg ) await check_permission ( request , permission , context ) return await fn ( * args , ** kwargs ) return wrapped warnings . warn ( "has_permission decorator is deprecated, " "use check_permission instead" , DeprecationWarning ) return wrapper
Decorator that restricts access only for authorized users with correct permissions .
15,072
def normal_print ( raw ) : lines = raw . split ( '\n' ) for line in lines : if line : print ( line + '\n' )
no colorful text for output .
15,073
def delete_word ( word ) : conn = sqlite3 . connect ( os . path . join ( DEFAULT_PATH , 'word.db' ) ) curs = conn . cursor ( ) curs . execute ( 'SELECT expl, pr FROM Word WHERE name = "%s"' % word ) res = curs . fetchall ( ) if res : try : curs . execute ( 'DELETE FROM Word WHERE name = "%s"' % word ) except Exception as e : print ( e ) else : print ( colored ( '%s has been deleted from database' % word , 'green' ) ) conn . commit ( ) finally : curs . close ( ) conn . close ( ) else : print ( colored ( '%s not exists in the database' % word , 'white' , 'on_red' ) )
delete the word or phrase from database .
15,074
def count_word ( arg ) : conn = sqlite3 . connect ( os . path . join ( DEFAULT_PATH , 'word.db' ) ) curs = conn . cursor ( ) if arg [ 0 ] . isdigit ( ) : if len ( arg ) == 1 : curs . execute ( 'SELECT count(*) FROM Word WHERE pr == %d' % ( int ( arg [ 0 ] ) ) ) elif len ( arg ) == 2 and arg [ 1 ] == '+' : curs . execute ( 'SELECT count(*) FROM Word WHERE pr >= %d' % ( int ( arg [ 0 ] ) ) ) elif len ( arg ) == 3 and arg [ 1 ] == '-' : curs . execute ( 'SELECT count(*) FROM Word WHERE pr >= %d AND pr<= % d' % ( int ( arg [ 0 ] ) , int ( arg [ 2 ] ) ) ) elif arg [ 0 ] . isalpha ( ) : if arg == 'all' : curs . execute ( 'SELECT count(*) FROM Word' ) elif len ( arg ) == 1 : curs . execute ( 'SELECT count(*) FROM Word WHERE aset == "%s"' % arg . upper ( ) ) res = curs . fetchall ( ) print ( res [ 0 ] [ 0 ] ) curs . close ( ) conn . close ( )
count the number of words
15,075
def authentication_url ( self ) : params = { 'client_id' : self . client_id , 'response_type' : self . type , 'redirect_uri' : self . callback_url } return AUTHENTICATION_URL + "?" + urlencode ( params )
Redirect your users to here to authenticate them .
15,076
def setup ( self , paths = None ) : if not paths : self . state . add_error ( 'No `paths` argument provided in recipe, bailing' , critical = True ) else : self . _paths = [ path . strip ( ) for path in paths . strip ( ) . split ( ',' ) ]
Sets up the _paths attribute .
15,077
def _create_hunt ( self , name , args ) : runner_args = self . grr_api . types . CreateHuntRunnerArgs ( ) runner_args . description = self . reason hunt = self . grr_api . CreateHunt ( flow_name = name , flow_args = args , hunt_runner_args = runner_args ) print ( '{0!s}: Hunt created' . format ( hunt . hunt_id ) ) self . _check_approval_wrapper ( hunt , hunt . Start ) return hunt
Create specified hunt .
15,078
def setup ( self , artifacts , use_tsk , reason , grr_server_url , grr_username , grr_password , approvers = None , verify = True ) : super ( GRRHuntArtifactCollector , self ) . setup ( reason , grr_server_url , grr_username , grr_password , approvers = approvers , verify = verify ) self . artifacts = [ item . strip ( ) for item in artifacts . strip ( ) . split ( ',' ) ] if not artifacts : self . state . add_error ( 'No artifacts were specified.' , critical = True ) self . use_tsk = use_tsk
Initializes a GRR Hunt artifact collector .
15,079
def process ( self ) : print ( 'Artifacts to be collected: {0!s}' . format ( self . artifacts ) ) hunt_args = flows_pb2 . ArtifactCollectorFlowArgs ( artifact_list = self . artifacts , use_tsk = self . use_tsk , ignore_interpolation_errors = True , apply_parsers = False , ) return self . _create_hunt ( 'ArtifactCollectorFlow' , hunt_args )
Construct and start new Artifact Collection hunt .
15,080
def collect_hunt_results ( self , hunt ) : if not os . path . isdir ( self . output_path ) : os . makedirs ( self . output_path ) output_file_path = os . path . join ( self . output_path , '.' . join ( ( self . hunt_id , 'zip' ) ) ) if os . path . exists ( output_file_path ) : print ( '{0:s} already exists: Skipping' . format ( output_file_path ) ) return None self . _check_approval_wrapper ( hunt , self . _get_and_write_archive , hunt , output_file_path ) results = self . _extract_hunt_results ( output_file_path ) print ( 'Wrote results of {0:s} to {1:s}' . format ( hunt . hunt_id , output_file_path ) ) return results
Download current set of files in results .
15,081
def _get_and_write_archive ( self , hunt , output_file_path ) : hunt_archive = hunt . GetFilesArchive ( ) hunt_archive . WriteToFile ( output_file_path )
Gets and writes a hunt archive .
15,082
def _get_client_fqdn ( self , client_info_contents ) : yamldict = yaml . safe_load ( client_info_contents ) fqdn = yamldict [ 'system_info' ] [ 'fqdn' ] client_id = yamldict [ 'client_id' ] . split ( '/' ) [ 1 ] return client_id , fqdn
Extracts a GRR client s FQDN from its client_info . yaml file .
15,083
def _extract_hunt_results ( self , output_file_path ) : collection_paths = [ ] client_ids = set ( ) client_id_to_fqdn = { } hunt_dir = None try : with zipfile . ZipFile ( output_file_path ) as archive : items = archive . infolist ( ) for f in items : if not hunt_dir : hunt_dir = f . filename . split ( '/' ) [ 0 ] if f . filename . split ( '/' ) [ - 1 ] == 'client_info.yaml' : client_id , fqdn = self . _get_client_fqdn ( archive . read ( f ) ) client_id_to_fqdn [ client_id ] = fqdn continue client_id = f . filename . split ( '/' ) [ 1 ] if client_id . startswith ( 'C.' ) : if client_id not in client_ids : client_directory = os . path . join ( self . output_path , hunt_dir , client_id ) collection_paths . append ( ( client_id , client_directory ) ) client_ids . add ( client_id ) try : archive . extract ( f , self . output_path ) except KeyError as exception : print ( 'Extraction error: {0:s}' . format ( exception ) ) return [ ] except OSError as exception : msg = 'Error manipulating file {0:s}: {1!s}' . format ( output_file_path , exception ) self . state . add_error ( msg , critical = True ) return [ ] except zipfile . BadZipfile as exception : msg = 'Bad zipfile {0:s}: {1!s}' . format ( output_file_path , exception ) self . state . add_error ( msg , critical = True ) return [ ] try : os . remove ( output_file_path ) except OSError as exception : print ( 'Output path {0:s} could not be removed: {1:s}' . format ( output_file_path , exception ) ) fqdn_collection_paths = [ ] for client_id , path in collection_paths : fqdn = client_id_to_fqdn . get ( client_id , client_id ) fqdn_collection_paths . append ( ( fqdn , path ) ) if not fqdn_collection_paths : self . state . add_error ( 'Nothing was extracted from the hunt archive' , critical = True ) return [ ] return fqdn_collection_paths
Open a hunt output archive and extract files .
15,084
def get_extra ( cls , name = None ) : if not name : return cls . _extra_config return cls . _extra_config . get ( name , None )
Gets extra configuration parameters .
15,085
def load_extra ( cls , filename ) : try : with open ( filename , 'rb' ) as configuration_file : cls . load_extra_data ( configuration_file . read ( ) ) sys . stderr . write ( "Config successfully loaded from {0:s}\n" . format ( filename ) ) return True except IOError : return False
Loads extra JSON configuration parameters from a file on the filesystem .
15,086
def load_extra_data ( cls , data ) : try : cls . _extra_config . update ( json . loads ( data ) ) except ValueError as exception : sys . stderr . write ( 'Could convert to JSON. {0:s}' . format ( exception ) ) exit ( - 1 )
Loads extra JSON configuration parameters from a data buffer .
15,087
def register_recipe ( cls , recipe ) : recipe_name = recipe . contents [ 'name' ] cls . _recipe_classes [ recipe_name ] = ( recipe . contents , recipe . args , recipe . __doc__ )
Registers a dftimewolf recipe .
15,088
def _get_client_by_hostname ( self , hostname ) : print ( 'Searching for client: {0:s}' . format ( hostname ) ) try : search_result = self . grr_api . SearchClients ( hostname ) except grr_errors . UnknownError as exception : self . state . add_error ( 'Could not search for host {0:s}: {1!s}' . format ( hostname , exception ) , critical = True ) return None result = [ ] for client in search_result : if hostname . lower ( ) in client . data . os_info . fqdn . lower ( ) : result . append ( ( client . data . last_seen_at , client ) ) if not result : self . state . add_error ( 'Could not get client_id for {0:s}' . format ( hostname ) , critical = True ) return None last_seen , client = sorted ( result , key = lambda x : x [ 0 ] , reverse = True ) [ 0 ] last_seen_datetime = datetime . datetime . utcfromtimestamp ( last_seen / 1000000 ) last_seen_seconds = ( datetime . datetime . utcnow ( ) - last_seen_datetime ) . total_seconds ( ) last_seen_minutes = int ( round ( last_seen_seconds / 60 ) ) print ( '{0:s}: Found active client' . format ( client . client_id ) ) print ( 'Found active client: {0:s}' . format ( client . client_id ) ) print ( 'Client last seen: {0:s} ({1:d} minutes ago)' . format ( last_seen_datetime . strftime ( '%Y-%m-%dT%H:%M:%S+0000' ) , last_seen_minutes ) ) return client
Search GRR by hostname and get the latest active client .
15,089
def find_clients ( self , hosts ) : clients = [ ] for host in hosts : clients . append ( self . _get_client_by_hostname ( host ) ) return [ client for client in clients if client is not None ]
Finds GRR clients given a list of hosts .
15,090
def _get_client_by_id ( self , client_id ) : client = self . grr_api . Client ( client_id ) print ( 'Checking for client approval' ) self . _check_approval_wrapper ( client , client . ListFlows ) print ( '{0:s}: Client approval is valid' . format ( client_id ) ) return client . Get ( )
Get GRR client dictionary and make sure valid approvals exist .
15,091
def _launch_flow ( self , client , name , args ) : flow = self . _check_approval_wrapper ( client , client . CreateFlow , name = name , args = args ) flow_id = flow . flow_id print ( '{0:s}: Scheduled' . format ( flow_id ) ) if self . keepalive : keepalive_flow = client . CreateFlow ( name = 'KeepAlive' , args = flows_pb2 . KeepAliveArgs ( ) ) print ( 'KeepAlive Flow:{0:s} scheduled' . format ( keepalive_flow . flow_id ) ) return flow_id
Create specified flow setting KeepAlive if requested .
15,092
def _await_flow ( self , client , flow_id ) : print ( '{0:s}: Waiting to finish' . format ( flow_id ) ) while True : try : status = client . Flow ( flow_id ) . Get ( ) . data except grr_errors . UnknownError : msg = 'Unable to stat flow {0:s} for host {1:s}' . format ( flow_id , client . data . os_info . fqdn . lower ( ) ) self . state . add_error ( msg ) raise DFTimewolfError ( 'Unable to stat flow {0:s} for host {1:s}' . format ( flow_id , client . data . os_info . fqdn . lower ( ) ) ) if status . state == flows_pb2 . FlowContext . ERROR : message = status . context . backtrace if 'ArtifactNotRegisteredError' in status . context . backtrace : message = status . context . backtrace . split ( '\n' ) [ - 2 ] raise DFTimewolfError ( '{0:s}: FAILED! Message from GRR:\n{1:s}' . format ( flow_id , message ) ) if status . state == flows_pb2 . FlowContext . TERMINATED : print ( '{0:s}: Complete' . format ( flow_id ) ) break time . sleep ( self . _CHECK_FLOW_INTERVAL_SEC )
Awaits flow completion .
15,093
def _download_files ( self , client , flow_id ) : output_file_path = os . path . join ( self . output_path , '.' . join ( ( flow_id , 'zip' ) ) ) if os . path . exists ( output_file_path ) : print ( '{0:s} already exists: Skipping' . format ( output_file_path ) ) return None flow = client . Flow ( flow_id ) file_archive = flow . GetFilesArchive ( ) file_archive . WriteToFile ( output_file_path ) fqdn = client . data . os_info . fqdn . lower ( ) client_output_file = os . path . join ( self . output_path , fqdn ) if not os . path . isdir ( client_output_file ) : os . makedirs ( client_output_file ) with zipfile . ZipFile ( output_file_path ) as archive : archive . extractall ( path = client_output_file ) os . remove ( output_file_path ) return client_output_file
Download files from the specified flow .
15,094
def setup ( self , hosts , artifacts , extra_artifacts , use_tsk , reason , grr_server_url , grr_username , grr_password , approvers = None , verify = True ) : super ( GRRArtifactCollector , self ) . setup ( reason , grr_server_url , grr_username , grr_password , approvers = approvers , verify = verify ) if artifacts is not None : self . artifacts = [ item . strip ( ) for item in artifacts . strip ( ) . split ( ',' ) ] if extra_artifacts is not None : self . extra_artifacts = [ item . strip ( ) for item in extra_artifacts . strip ( ) . split ( ',' ) ] self . hostnames = [ item . strip ( ) for item in hosts . strip ( ) . split ( ',' ) ] self . use_tsk = use_tsk
Initializes a GRR artifact collector .
15,095
def _process_thread ( self , client ) : system_type = client . data . os_info . system print ( 'System type: {0:s}' . format ( system_type ) ) artifact_list = [ ] if self . artifacts : print ( 'Artifacts to be collected: {0!s}' . format ( self . artifacts ) ) artifact_list = self . artifacts else : default_artifacts = self . artifact_registry . get ( system_type , None ) if default_artifacts : print ( 'Collecting default artifacts for {0:s}: {1:s}' . format ( system_type , ', ' . join ( default_artifacts ) ) ) artifact_list . extend ( default_artifacts ) if self . extra_artifacts : print ( 'Throwing in an extra {0!s}' . format ( self . extra_artifacts ) ) artifact_list . extend ( self . extra_artifacts ) artifact_list = list ( set ( artifact_list ) ) if not artifact_list : return flow_args = flows_pb2 . ArtifactCollectorFlowArgs ( artifact_list = artifact_list , use_tsk = self . use_tsk , ignore_interpolation_errors = True , apply_parsers = False ) flow_id = self . _launch_flow ( client , 'ArtifactCollectorFlow' , flow_args ) self . _await_flow ( client , flow_id ) collected_flow_data = self . _download_files ( client , flow_id ) if collected_flow_data : print ( '{0!s}: Downloaded: {1:s}' . format ( flow_id , collected_flow_data ) ) fqdn = client . data . os_info . fqdn . lower ( ) self . state . output . append ( ( fqdn , collected_flow_data ) )
Process a single GRR client .
15,096
def process ( self ) : threads = [ ] for client in self . find_clients ( self . hostnames ) : print ( client ) thread = threading . Thread ( target = self . _process_thread , args = ( client , ) ) threads . append ( thread ) thread . start ( ) for thread in threads : thread . join ( )
Collect the artifacts .
15,097
def setup ( self , hosts , files , use_tsk , reason , grr_server_url , grr_username , grr_password , approvers = None , verify = True ) : super ( GRRFileCollector , self ) . setup ( reason , grr_server_url , grr_username , grr_password , approvers = approvers , verify = verify ) if files is not None : self . files = [ item . strip ( ) for item in files . strip ( ) . split ( ',' ) ] self . hostnames = [ item . strip ( ) for item in hosts . strip ( ) . split ( ',' ) ] self . use_tsk = use_tsk
Initializes a GRR file collector .
15,098
def _process_thread ( self , client ) : file_list = self . files if not file_list : return print ( 'Filefinder to collect {0:d} items' . format ( len ( file_list ) ) ) flow_action = flows_pb2 . FileFinderAction ( action_type = flows_pb2 . FileFinderAction . DOWNLOAD ) flow_args = flows_pb2 . FileFinderArgs ( paths = file_list , action = flow_action , ) flow_id = self . _launch_flow ( client , 'FileFinder' , flow_args ) self . _await_flow ( client , flow_id ) collected_flow_data = self . _download_files ( client , flow_id ) if collected_flow_data : print ( '{0!s}: Downloaded: {1:s}' . format ( flow_id , collected_flow_data ) ) fqdn = client . data . os_info . fqdn . lower ( ) self . state . output . append ( ( fqdn , collected_flow_data ) )
Process a single client .
15,099
def setup ( self , host , flow_id , reason , grr_server_url , grr_username , grr_password , approvers = None , verify = True ) : super ( GRRFlowCollector , self ) . setup ( reason , grr_server_url , grr_username , grr_password , approvers = approvers , verify = verify ) self . flow_id = flow_id self . host = host
Initializes a GRR flow collector .