idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
62,300 | def set_bfield ( self , B_G ) : if not ( B_G > 0 ) : raise ValueError ( 'must have B_G > 0; got %r' % ( B_G , ) ) self . in_vals [ IN_VAL_B ] = B_G return self | Set the strength of the local magnetic field . |
62,301 | def set_bfield_for_s0 ( self , s0 ) : if not ( s0 > 0 ) : raise ValueError ( 'must have s0 > 0; got %r' % ( s0 , ) ) B0 = 2 * np . pi * cgs . me * cgs . c * self . in_vals [ IN_VAL_FREQ0 ] / ( cgs . e * s0 ) self . in_vals [ IN_VAL_B ] = B0 return self | Set B to probe a certain harmonic number . |
62,302 | def set_edist_powerlaw ( self , emin_mev , emax_mev , delta , ne_cc ) : if not ( emin_mev >= 0 ) : raise ValueError ( 'must have emin_mev >= 0; got %r' % ( emin_mev , ) ) if not ( emax_mev >= emin_mev ) : raise ValueError ( 'must have emax_mev >= emin_mev; got %r, %r' % ( emax_mev , emin_mev ) ) if not ( delta >= 0 ) :... | Set the energy distribution function to a power law . |
62,303 | def set_edist_powerlaw_gamma ( self , gmin , gmax , delta , ne_cc ) : if not ( gmin >= 1 ) : raise ValueError ( 'must have gmin >= 1; got %r' % ( gmin , ) ) if not ( gmax >= gmin ) : raise ValueError ( 'must have gmax >= gmin; got %r, %r' % ( gmax , gmin ) ) if not ( delta >= 0 ) : raise ValueError ( 'must have delta >... | Set the energy distribution function to a power law in the Lorentz factor |
62,304 | def set_freqs ( self , n , f_lo_ghz , f_hi_ghz ) : if not ( f_lo_ghz >= 0 ) : raise ValueError ( 'must have f_lo_ghz >= 0; got %r' % ( f_lo_ghz , ) ) if not ( f_hi_ghz >= f_lo_ghz ) : raise ValueError ( 'must have f_hi_ghz >= f_lo_ghz; got %r, %r' % ( f_hi_ghz , f_lo_ghz ) ) if not n >= 1 : raise ValueError ( 'must hav... | Set the frequency grid on which to perform the calculations . |
62,305 | def set_obs_angle ( self , theta_rad ) : self . in_vals [ IN_VAL_THETA ] = theta_rad * 180 / np . pi return self | Set the observer angle relative to the field . |
62,306 | def set_one_freq ( self , f_ghz ) : if not ( f_ghz >= 0 ) : raise ValueError ( 'must have f_lo_ghz >= 0; got %r' % ( f_lo_ghz , ) ) self . in_vals [ IN_VAL_NFREQ ] = 1 self . in_vals [ IN_VAL_FREQ0 ] = f_ghz * 1e9 self . in_vals [ IN_VAL_LOGDFREQ ] = 1.0 return self | Set the code to calculate results at just one frequency . |
62,307 | def set_padist_gaussian_loss_cone ( self , boundary_rad , expwidth ) : self . in_vals [ IN_VAL_PADIST ] = PADIST_GLC self . in_vals [ IN_VAL_LCBDY ] = boundary_rad * 180 / np . pi self . in_vals [ IN_VAL_DELTAMU ] = expwidth return self | Set the pitch - angle distribution to a Gaussian loss cone . |
62,308 | def set_thermal_background ( self , T_K , nth_cc ) : if not ( T_K >= 0 ) : raise ValueError ( 'must have T_K >= 0; got %r' % ( T_K , ) ) if not ( nth_cc >= 0 ) : raise ValueError ( 'must have nth_cc >= 0; got %r, %r' % ( nth_cc , ) ) self . in_vals [ IN_VAL_T0 ] = T_K self . in_vals [ IN_VAL_N0 ] = nth_cc return self | Set the properties of the background thermal plasma . |
62,309 | def set_trapezoidal_integration ( self , n ) : if not ( n >= 2 ) : raise ValueError ( 'must have n >= 2; got %r' % ( n , ) ) self . in_vals [ IN_VAL_INTEG_METH ] = n + 1 return self | Set the code to use trapezoidal integration . |
62,310 | def find_rt_coefficients ( self , depth0 = None ) : if self . in_vals [ IN_VAL_NFREQ ] != 1 : raise Exception ( 'must have nfreq=1 to run Calculator.find_rt_coefficients()' ) if depth0 is not None : depth = depth0 self . in_vals [ IN_VAL_DEPTH ] = depth0 else : depth = self . in_vals [ IN_VAL_DEPTH ] scale_factor = 100... | Figure out emission and absorption coefficients for the current parameters . |
62,311 | def find_rt_coefficients_tot_intens ( self , depth0 = None ) : j_O , alpha_O , j_X , alpha_X = self . find_rt_coefficients ( depth0 = depth0 ) j_I = j_O + j_X alpha_I = 0.5 * ( alpha_O + alpha_X ) return ( j_I , alpha_I ) | Figure out total - intensity emission and absorption coefficients for the current parameters . |
62,312 | def make_path_func ( * baseparts ) : from os . path import join base = join ( * baseparts ) def path_func ( * args ) : return join ( base , * args ) return path_func | Return a function that joins paths onto some base directory . |
62,313 | def djoin ( * args ) : from os . path import join i = 0 alen = len ( args ) while i < alen and ( args [ i ] == '' or args [ i ] == '.' ) : i += 1 if i == alen : return '.' return join ( * args [ i : ] ) | dotless join for nicer paths . |
62,314 | def ensure_symlink ( src , dst ) : try : os . symlink ( src , dst ) except OSError as e : if e . errno == 17 : return True raise return False | Ensure the existence of a symbolic link pointing to src named dst . Returns a boolean indicating whether the symlink already existed . |
62,315 | def ensure_dir ( self , mode = 0o777 , parents = False ) : if parents : p = self . parent if p == self : return False p . ensure_dir ( mode , True ) made_it = False try : self . mkdir ( mode ) made_it = True except OSError as e : if e . errno == 17 : return False raise if not self . is_dir ( ) : import errno raise OSEr... | Ensure that this path exists as a directory . |
62,316 | def make_tempfile ( self , want = 'handle' , resolution = 'try_unlink' , suffix = '' , ** kwargs ) : if want not in ( 'handle' , 'path' ) : raise ValueError ( 'unrecognized make_tempfile() "want" mode %r' % ( want , ) ) if resolution not in ( 'unlink' , 'try_unlink' , 'keep' , 'overwrite' ) : raise ValueError ( 'unreco... | Get a context manager that creates and cleans up a uniquely - named temporary file with a name similar to this path . |
62,317 | def try_unlink ( self ) : try : self . unlink ( ) return True except OSError as e : if e . errno == 2 : return False raise | Try to unlink this path . If it doesn t exist no error is returned . Returns a boolean indicating whether the path was really unlinked . |
62,318 | def read_pickles ( self ) : try : import cPickle as pickle except ImportError : import pickle with self . open ( mode = 'rb' ) as f : while True : try : obj = pickle . load ( f ) except EOFError : break yield obj | Generate a sequence of objects by opening the path and unpickling items until EOF is reached . |
62,319 | def read_text ( self , encoding = None , errors = None , newline = None ) : with self . open ( mode = 'rt' , encoding = encoding , errors = errors , newline = newline ) as f : return f . read ( ) | Read this path as one large chunk of text . |
62,320 | def read_toml ( self , encoding = None , errors = None , newline = None , ** kwargs ) : import pytoml with self . open ( mode = 'rt' , encoding = encoding , errors = errors , newline = newline ) as f : return pytoml . load ( f , ** kwargs ) | Read this path as a TOML document . |
62,321 | def read_yaml ( self , encoding = None , errors = None , newline = None , ** kwargs ) : import yaml with self . open ( mode = 'rt' , encoding = encoding , errors = errors , newline = newline ) as f : return yaml . load ( f , ** kwargs ) | Read this path as a YAML document . |
62,322 | def enumeration ( cls ) : from pwkit import unicode_to_str name = cls . __name__ pickle_compat = getattr ( cls , '__pickle_compat__' , False ) def __unicode__ ( self ) : return '<enumeration holder %s>' % name def getattr_error ( self , attr ) : raise AttributeError ( 'enumeration %s does not contain attribute %s' % ( ... | A very simple decorator for creating enumerations . Unlike Python 3 . 4 enumerations this just gives a way to use a class declaration to create an immutable object containing only the values specified in the class . |
62,323 | def slice_around_gaps ( values , maxgap ) : if not ( maxgap > 0 ) : raise ValueError ( 'maxgap must be positive; got %r' % maxgap ) values = np . asarray ( values ) delta = values [ 1 : ] - values [ : - 1 ] if np . any ( delta < 0 ) : raise ValueError ( 'values must be in nondecreasing order' ) whgap = np . where ( del... | Given an ordered array of values generate a set of slices that traverse all of the values . Within each slice no gap between adjacent values is larger than maxgap . In other words these slices break the array into chunks separated by gaps of size larger than maxgap . |
62,324 | def reduce_data_frame ( df , chunk_slicers , avg_cols = ( ) , uavg_cols = ( ) , minmax_cols = ( ) , nchunk_colname = 'nchunk' , uncert_prefix = 'u' , min_points_per_chunk = 3 ) : subds = [ df . iloc [ idx ] for idx in chunk_slicers ] subds = [ sd for sd in subds if sd . shape [ 0 ] >= min_points_per_chunk ] chunked = d... | Reduce a DataFrame by collapsing rows in grouped chunks . Returns another DataFrame with similar columns but fewer rows . |
62,325 | def reduce_data_frame_evenly_with_gaps ( df , valcol , target_len , maxgap , ** kwargs ) : return reduce_data_frame ( df , slice_evenly_with_gaps ( df [ valcol ] , target_len , maxgap ) , ** kwargs ) | Reduce a DataFrame by collapsing rows in grouped chunks grouping based on gaps in one of the columns . |
62,326 | def usmooth ( window , uncerts , * data , ** kwargs ) : window = np . asarray ( window ) uncerts = np . asarray ( uncerts ) k = kwargs . pop ( 'k' , None ) if len ( kwargs ) : raise TypeError ( "smooth() got an unexpected keyword argument '%s'" % kwargs . keys ( ) [ 0 ] ) if k is None : k = window . size conv = lambda ... | Smooth data series according to a window weighting based on uncertainties . |
62,327 | def weighted_variance ( x , weights ) : n = len ( x ) if n < 3 : raise ValueError ( 'cannot calculate meaningful variance of fewer ' 'than three samples' ) wt_mean = np . average ( x , weights = weights ) return np . average ( np . square ( x - wt_mean ) , weights = weights ) * n / ( n - 1 ) | Return the variance of a weighted sample . |
62,328 | def unit_tophat_ee ( x ) : x = np . asarray ( x ) x1 = np . atleast_1d ( x ) r = ( ( 0 < x1 ) & ( x1 < 1 ) ) . astype ( x . dtype ) if x . ndim == 0 : return np . asscalar ( r ) return r | Tophat function on the unit interval left - exclusive and right - exclusive . Returns 1 if 0 < x < 1 0 otherwise . |
62,329 | def make_tophat_ee ( lower , upper ) : if not np . isfinite ( lower ) : raise ValueError ( '"lower" argument must be finite number; got %r' % lower ) if not np . isfinite ( upper ) : raise ValueError ( '"upper" argument must be finite number; got %r' % upper ) def range_tophat_ee ( x ) : x = np . asarray ( x ) x1 = np ... | Return a ufunc - like tophat function on the defined range left - exclusive and right - exclusive . Returns 1 if lower < x < upper 0 otherwise . |
62,330 | def make_tophat_ei ( lower , upper ) : if not np . isfinite ( lower ) : raise ValueError ( '"lower" argument must be finite number; got %r' % lower ) if not np . isfinite ( upper ) : raise ValueError ( '"upper" argument must be finite number; got %r' % upper ) def range_tophat_ei ( x ) : x = np . asarray ( x ) x1 = np ... | Return a ufunc - like tophat function on the defined range left - exclusive and right - inclusive . Returns 1 if lower < x < = upper 0 otherwise . |
62,331 | def make_tophat_ie ( lower , upper ) : if not np . isfinite ( lower ) : raise ValueError ( '"lower" argument must be finite number; got %r' % lower ) if not np . isfinite ( upper ) : raise ValueError ( '"upper" argument must be finite number; got %r' % upper ) def range_tophat_ie ( x ) : x = np . asarray ( x ) x1 = np ... | Return a ufunc - like tophat function on the defined range left - inclusive and right - exclusive . Returns 1 if lower < = x < upper 0 otherwise . |
62,332 | def make_tophat_ii ( lower , upper ) : if not np . isfinite ( lower ) : raise ValueError ( '"lower" argument must be finite number; got %r' % lower ) if not np . isfinite ( upper ) : raise ValueError ( '"upper" argument must be finite number; got %r' % upper ) def range_tophat_ii ( x ) : x = np . asarray ( x ) x1 = np ... | Return a ufunc - like tophat function on the defined range left - inclusive and right - inclusive . Returns 1 if lower < x < upper 0 otherwise . |
62,333 | def make_step_lcont ( transition ) : if not np . isfinite ( transition ) : raise ValueError ( '"transition" argument must be finite number; got %r' % transition ) def step_lcont ( x ) : x = np . asarray ( x ) x1 = np . atleast_1d ( x ) r = ( x1 > transition ) . astype ( x . dtype ) if x . ndim == 0 : return np . asscal... | Return a ufunc - like step function that is left - continuous . Returns 1 if x > transition 0 otherwise . |
62,334 | def make_step_rcont ( transition ) : if not np . isfinite ( transition ) : raise ValueError ( '"transition" argument must be finite number; got %r' % transition ) def step_rcont ( x ) : x = np . asarray ( x ) x1 = np . atleast_1d ( x ) r = ( x1 >= transition ) . astype ( x . dtype ) if x . ndim == 0 : return np . assca... | Return a ufunc - like step function that is right - continuous . Returns 1 if x > = transition 0 otherwise . |
62,335 | def make_fixed_temp_multi_apec ( kTs , name_template = 'apec%d' , norm = None ) : total_model = None sub_models = [ ] for i , kT in enumerate ( kTs ) : component = ui . xsapec ( name_template % i ) component . kT = kT ui . freeze ( component . kT ) if norm is not None : component . norm = norm sub_models . append ( com... | Create a model summing multiple APEC components at fixed temperatures . |
62,336 | def expand_rmf_matrix ( rmf ) : n_chan = rmf . e_min . size n_energy = rmf . n_grp . size expanded = np . zeros ( ( n_energy , n_chan ) ) mtx_ofs = 0 grp_ofs = 0 for i in range ( n_energy ) : for j in range ( rmf . n_grp [ i ] ) : f = rmf . f_chan [ grp_ofs ] n = rmf . n_chan [ grp_ofs ] expanded [ i , f : f + n ] = rm... | Expand an RMF matrix stored in compressed form . |
62,337 | def derive_identity_arf ( name , arf ) : from sherpa . astro . data import DataARF from sherpa . astro . instrument import ARF1D darf = DataARF ( name , arf . energ_lo , arf . energ_hi , np . ones ( arf . specresp . shape ) , arf . bin_lo , arf . bin_hi , arf . exposure , header = None , ) return ARF1D ( darf , pha = a... | Create an identity ARF that has uniform sensitivity . |
62,338 | def get_source_qq_data ( id = None ) : sdata = ui . get_data ( id = id ) kev = sdata . get_x ( ) obs_data = sdata . counts model_data = ui . get_model ( id = id ) ( kev ) return np . vstack ( ( kev , obs_data , model_data ) ) | Get data for a quantile - quantile plot of the source data and model . |
62,339 | def get_bkg_qq_data ( id = None , bkg_id = None ) : bdata = ui . get_bkg ( id = id , bkg_id = bkg_id ) kev = bdata . get_x ( ) obs_data = bdata . counts model_data = ui . get_bkg_model ( id = id , bkg_id = bkg_id ) ( kev ) return np . vstack ( ( kev , obs_data , model_data ) ) | Get data for a quantile - quantile plot of the background data and model . |
62,340 | def make_qq_plot ( kev , obs , mdl , unit , key_text ) : import omega as om kev = np . asarray ( kev ) obs = np . asarray ( obs ) mdl = np . asarray ( mdl ) c_obs = np . cumsum ( obs ) c_mdl = np . cumsum ( mdl ) mx = max ( c_obs [ - 1 ] , c_mdl [ - 1 ] ) p = om . RectPlot ( ) p . addXY ( [ 0 , mx ] , [ 0 , mx ] , '1:1... | Make a quantile - quantile plot comparing events and a model . |
62,341 | def make_multi_qq_plots ( arrays , key_text ) : import omega as om p = om . RectPlot ( ) p . addXY ( [ 0 , 1. ] , [ 0 , 1. ] , '1:1' ) for index , array in enumerate ( arrays ) : kev , obs , mdl = array c_obs = np . cumsum ( obs ) c_mdl = np . cumsum ( mdl ) mx = 0.5 * ( c_obs [ - 1 ] + c_mdl [ - 1 ] ) c_obs /= mx c_md... | Make a quantile - quantile plot comparing multiple sets of events and models . |
62,342 | def make_spectrum_plot ( model_plot , data_plot , desc , xmin_clamp = 0.01 , min_valid_x = None , max_valid_x = None ) : import omega as om model_x = np . concatenate ( ( model_plot . xlo , [ model_plot . xhi [ - 1 ] ] ) ) model_x [ 0 ] = max ( model_x [ 0 ] , xmin_clamp ) model_y = np . concatenate ( ( model_plot . y ... | Make a plot of a spectral model and data . |
62,343 | def download_file ( local_filename , url , clobber = False ) : dir_name = os . path . dirname ( local_filename ) mkdirs ( dir_name ) if clobber or not os . path . exists ( local_filename ) : i = requests . get ( url ) if i . status_code == 404 : print ( 'Failed to download file:' , local_filename , url ) return False c... | Download the given file . Clobber overwrites file if exists . |
62,344 | def download_json ( local_filename , url , clobber = False ) : with open ( local_filename , 'w' ) as json_file : json_file . write ( json . dumps ( requests . get ( url ) . json ( ) , sort_keys = True , indent = 2 , separators = ( ',' , ': ' ) ) ) | Download the given JSON file and pretty - print before we output it . |
62,345 | def data_to_argb32 ( data , cmin = None , cmax = None , stretch = 'linear' , cmap = 'black_to_blue' ) : clipper = Clipper ( ) clipper . alloc_buffer ( data ) clipper . set_tile_size ( ) clipper . dmin = cmin if cmin is not None else data . min ( ) clipper . dmax = cmax if cmax is not None else data . max ( ) clipper . ... | Turn arbitrary data values into ARGB32 colors . |
62,346 | def data_to_imagesurface ( data , ** kwargs ) : import cairo data = np . atleast_2d ( data ) if data . ndim != 2 : raise ValueError ( 'input array may not have more than 2 dimensions' ) argb32 = data_to_argb32 ( data , ** kwargs ) format = cairo . FORMAT_ARGB32 height , width = argb32 . shape stride = cairo . ImageSurf... | Turn arbitrary data values into a Cairo ImageSurface . |
62,347 | def get_token ( filename = TOKEN_PATH , envvar = TOKEN_ENVVAR ) : if os . path . isfile ( filename ) : with open ( filename ) as token_file : token = token_file . readline ( ) . strip ( ) else : token = os . environ . get ( envvar ) if not token : raise ValueError ( "No token found.\n" "{} file doesn't exist.\n{} envir... | Returns pipeline_token for API |
62,348 | def stats ( self , antnames ) : nbyant = np . zeros ( self . nants , dtype = np . int ) sum = np . zeros ( self . nants , dtype = np . complex ) sumsq = np . zeros ( self . nants ) q = np . abs ( self . normvis - 1 ) for i in range ( self . nsamps ) : i1 , i2 = self . blidxs [ i ] nbyant [ i1 ] += 1 nbyant [ i2 ] += 1 ... | XXX may be out of date . |
62,349 | def _qr_factor_packed ( a , enorm , finfo ) : machep = finfo . eps n , m = a . shape if m < n : raise ValueError ( '"a" must be at least as tall as it is wide' ) acnorm = np . empty ( n , finfo . dtype ) for j in range ( n ) : acnorm [ j ] = enorm ( a [ j ] , finfo ) rdiag = acnorm . copy ( ) wa = acnorm . copy ( ) pmu... | Compute the packed pivoting Q - R factorization of a matrix . |
62,350 | def _qr_factor_full ( a , dtype = np . float ) : n , m = a . shape packed , pmut , rdiag , acnorm = _manual_qr_factor_packed ( a , dtype ) r = np . zeros ( ( n , m ) ) for i in range ( n ) : r [ i , : i ] = packed [ i , : i ] r [ i , i ] = rdiag [ i ] q = np . eye ( m ) v = np . empty ( m ) for i in range ( n ) : v [ :... | Compute the QR factorization of a matrix with pivoting . |
62,351 | def _qrd_solve ( r , pmut , ddiag , bqt , sdiag ) : n , m = r . shape for i in range ( n ) : r [ i , i : ] = r [ i : , i ] x = r . diagonal ( ) . copy ( ) zwork = bqt . copy ( ) for i in range ( n ) : li = pmut [ i ] if ddiag [ li ] == 0 : sdiag [ i ] = r [ i , i ] r [ i , i ] = x [ i ] continue sdiag [ i : ] = 0 sdiag... | Solve an equation given a QR factored matrix and a diagonal . |
62,352 | def _qrd_solve_full ( a , b , ddiag , dtype = np . float ) : a = np . asarray ( a , dtype ) b = np . asarray ( b , dtype ) ddiag = np . asarray ( ddiag , dtype ) n , m = a . shape assert m >= n assert b . shape == ( m , ) assert ddiag . shape == ( n , ) q , r , pmut = _qr_factor_full ( a ) bqt = np . dot ( b , q . T ) ... | Solve the equation A^T x = B D x = 0 . |
62,353 | def _calc_covariance ( r , pmut , tol = 1e-14 ) : n = r . shape [ 1 ] assert r . shape [ 0 ] >= n r = r . copy ( ) jrank = - 1 abstol = tol * abs ( r [ 0 , 0 ] ) for i in range ( n ) : if abs ( r [ i , i ] ) <= abstol : break r [ i , i ] **= - 1 for j in range ( i ) : temp = r [ i , i ] * r [ i , j ] r [ i , j ] = 0. r... | Calculate the covariance matrix of the fitted parameters |
62,354 | def invoke_tool ( namespace , tool_class = None ) : import sys from . . import cli cli . propagate_sigint ( ) cli . unicode_stdio ( ) cli . backtrace_on_usr1 ( ) if tool_class is None : for value in itervalues ( namespace ) : if is_strict_subclass ( value , Multitool ) : if tool_class is not None : raise PKError ( 'do ... | Invoke a tool and exit . |
62,355 | def get_arg_parser ( self , ** kwargs ) : import argparse ap = argparse . ArgumentParser ( prog = kwargs [ 'argv0' ] , description = self . summary , ) return ap | Return an instance of argparse . ArgumentParser used to process this tool s command - line arguments . |
62,356 | def register ( self , cmd ) : if cmd . name is None : raise ValueError ( 'no name set for Command object %r' % cmd ) if cmd . name in self . commands : raise ValueError ( 'a command named "%s" has already been ' 'registered' % cmd . name ) self . commands [ cmd . name ] = cmd return self | Register a new command with the tool . cmd is expected to be an instance of Command although here only the cmd . name attribute is investigated . Multiple commands with the same name are not allowed to be registered . Returns self . |
62,357 | def invoke_command ( self , cmd , args , ** kwargs ) : new_kwargs = kwargs . copy ( ) new_kwargs [ 'argv0' ] = kwargs [ 'argv0' ] + ' ' + cmd . name new_kwargs [ 'parent' ] = self new_kwargs [ 'parent_kwargs' ] = kwargs return cmd . invoke_with_usage ( args , ** new_kwargs ) | This function mainly exists to be overridden by subclasses . |
62,358 | def merge_bibtex_collections ( citednames , maindict , extradicts , allow_missing = False ) : allrecords = { } for ed in extradicts : allrecords . update ( ed ) allrecords . update ( maindict ) missing = [ ] from collections import OrderedDict records = OrderedDict ( ) from itertools import chain wantednames = sorted (... | There must be a way to be efficient and stream output instead of loading everything into memory at once but meh . |
62,359 | def write_bibtex_dict ( stream , entries ) : from bibtexparser . bwriter import BibTexWriter writer = BibTexWriter ( ) writer . indent = ' ' writer . entry_separator = '' first = True for rec in entries : if first : first = False else : stream . write ( b'\n' ) stream . write ( writer . _entry_to_bibtex ( rec ) . enco... | bibtexparser . write converts the entire database to one big string and writes it out in one go . I m sure it will always all fit in RAM but some things just will not stand . |
62,360 | def merge_bibtex_with_aux ( auxpath , mainpath , extradir , parse = get_bibtex_dict , allow_missing = False ) : auxpath = Path ( auxpath ) mainpath = Path ( mainpath ) extradir = Path ( extradir ) with auxpath . open ( 'rt' ) as aux : citednames = sorted ( cited_names_from_aux_file ( aux ) ) main = mainpath . try_open ... | Merge multiple BibTeX files into a single homogeneously - formatted output using a LaTeX . aux file to know which records are worth paying attention to . |
62,361 | def just_smart_bibtools ( bib_style , aux , bib ) : extradir = Path ( '.bibtex' ) extradir . ensure_dir ( parents = True ) bib_export ( bib_style , aux , extradir / 'ZZ_bibtools.bib' , no_tool_ok = True , quiet = True , ignore_missing = True ) merge_bibtex_with_aux ( aux , bib , extradir ) | Tectonic has taken over most of the features that this tool used to provide but here s a hack to keep my smart . bib file generation working . |
62,362 | def aap_to_bp ( ant1 , ant2 , pol ) : if ant1 < 0 : raise ValueError ( 'first antenna is below 0: %s' % ant1 ) if ant2 < ant1 : raise ValueError ( 'second antenna is below first: %s' % ant2 ) if pol < 1 or pol > 12 : raise ValueError ( 'illegal polarization code %s' % pol ) fps = _pol_to_fpol [ pol ] ap1 = ( ant1 << 3 ... | Create a basepol from antenna numbers and a CASA polarization code . |
62,363 | def _finish_timeslot ( self ) : for fpol , aps in self . ap_by_fpol . items ( ) : aps = sorted ( aps ) nap = len ( aps ) for i1 , ap1 in enumerate ( aps ) : for i2 in range ( i1 , nap ) : ap2 = aps [ i2 ] bp1 = ( ap1 , ap2 ) info = self . data_by_bp . get ( bp1 ) if info is None : continue data1 , flags1 = info for i3 ... | We have loaded in all of the visibilities in one timeslot . We can now compute the phase closure triples . |
62,364 | def _process_sample ( self , ap1 , ap2 , ap3 , triple , tflags ) : np . divide ( triple , np . abs ( triple ) , triple ) phase = np . angle ( triple ) self . ap_spec_stats_by_ddid [ self . cur_ddid ] . accum ( ap1 , phase , tflags + 0. ) self . ap_spec_stats_by_ddid [ self . cur_ddid ] . accum ( ap2 , phase , tflags + ... | We have computed one independent phase closure triple in one timeslot . |
62,365 | def load_spectrum ( path , smoothing = 181 , DF = - 8. ) : try : ang , lflam = np . loadtxt ( path , usecols = ( 0 , 1 ) ) . T except ValueError : with open ( path , 'rb' ) as f : def lines ( ) : for line in f : yield line . replace ( b'D' , b'e' ) ang , lflam = np . genfromtxt ( lines ( ) , delimiter = ( 13 , 12 ) ) .... | Load a Phoenix model atmosphere spectrum . |
62,366 | def lbol_from_spt_dist_mag ( sptnum , dist_pc , jmag , kmag , format = 'cgs' ) : bcj = bcj_from_spt ( sptnum ) bck = bck_from_spt ( sptnum ) n = np . zeros ( sptnum . shape , dtype = np . int ) app_mbol = np . zeros ( sptnum . shape ) w = np . isfinite ( bcj ) & np . isfinite ( jmag ) app_mbol [ w ] += jmag [ w ] + bcj... | Estimate a UCD s bolometric luminosity given some basic parameters . |
62,367 | def map ( self , func , iterable , chunksize = None ) : r = self . map_async ( func , iterable , chunksize ) while True : try : return r . get ( self . wait_timeout ) except TimeoutError : pass except KeyboardInterrupt : self . terminate ( ) self . join ( ) raise | Equivalent of map built - in without swallowing KeyboardInterrupt . |
62,368 | def fmthours ( radians , norm = 'wrap' , precision = 3 , seps = '::' ) : return _fmtsexagesimal ( radians * R2H , norm , 24 , seps , precision = precision ) | Format an angle as sexagesimal hours in a string . |
62,369 | def fmtdeglon ( radians , norm = 'wrap' , precision = 2 , seps = '::' ) : return _fmtsexagesimal ( radians * R2D , norm , 360 , seps , precision = precision ) | Format a longitudinal angle as sexagesimal degrees in a string . |
62,370 | def fmtdeglat ( radians , norm = 'raise' , precision = 2 , seps = '::' ) : if norm == 'none' : pass elif norm == 'raise' : if radians > halfpi or radians < - halfpi : raise ValueError ( 'illegal latitude of %f radians' % radians ) elif norm == 'wrap' : radians = angcen ( radians ) if radians > halfpi : radians = pi - r... | Format a latitudinal angle as sexagesimal degrees in a string . |
62,371 | def fmtradec ( rarad , decrad , precision = 2 , raseps = '::' , decseps = '::' , intersep = ' ' ) : return ( fmthours ( rarad , precision = precision + 1 , seps = raseps ) + text_type ( intersep ) + fmtdeglat ( decrad , precision = precision , seps = decseps ) ) | Format equatorial coordinates in a single sexagesimal string . |
62,372 | def parsehours ( hrstr ) : hr = _parsesexagesimal ( hrstr , 'hours' , False ) if hr >= 24 : raise ValueError ( 'illegal hour specification: ' + hrstr ) return hr * H2R | Parse a string formatted as sexagesimal hours into an angle . |
62,373 | def parsedeglat ( latstr ) : deg = _parsesexagesimal ( latstr , 'latitude' , True ) if abs ( deg ) > 90 : raise ValueError ( 'illegal latitude specification: ' + latstr ) return deg * D2R | Parse a latitude formatted as sexagesimal degrees into an angle . |
62,374 | def sphdist ( lat1 , lon1 , lat2 , lon2 ) : cd = np . cos ( lon2 - lon1 ) sd = np . sin ( lon2 - lon1 ) c2 = np . cos ( lat2 ) c1 = np . cos ( lat1 ) s2 = np . sin ( lat2 ) s1 = np . sin ( lat1 ) a = np . sqrt ( ( c2 * sd ) ** 2 + ( c1 * s2 - s1 * c2 * cd ) ** 2 ) b = s1 * s2 + c1 * c2 * cd return np . arctan2 ( a , b ... | Calculate the distance between two locations on a sphere . |
62,375 | def sphbear ( lat1 , lon1 , lat2 , lon2 , tol = 1e-15 ) : ocross = lambda a , b : np . cross ( a , b , axisa = 0 , axisb = 0 , axisc = 0 ) v1 = np . asarray ( [ np . cos ( lat1 ) * np . cos ( lon1 ) , np . cos ( lat1 ) * np . sin ( lon1 ) , np . sin ( lat1 ) ] ) v2 = np . asarray ( [ np . cos ( lat2 ) * np . cos ( lon2... | Calculate the bearing between two locations on a sphere . |
62,376 | def sphofs ( lat1 , lon1 , r , pa , tol = 1e-2 , rmax = None ) : if rmax is not None and np . abs ( r ) > rmax : raise ValueError ( 'sphofs radius value %f is too big for ' 'our approximation' % r ) lat2 = lat1 + r * np . cos ( pa ) lon2 = lon1 + r * np . sin ( pa ) / np . cos ( lat2 ) if tol is not None : s = sphdist ... | Offset from one location on the sphere to another . |
62,377 | def parang ( hourangle , declination , latitude ) : return - np . arctan2 ( - np . sin ( hourangle ) , np . cos ( declination ) * np . tan ( latitude ) - np . sin ( declination ) * np . cos ( hourangle ) ) | Calculate the parallactic angle of a sky position . |
62,378 | def gaussian_convolve ( maj1 , min1 , pa1 , maj2 , min2 , pa2 ) : c1 = np . cos ( pa1 ) s1 = np . sin ( pa1 ) c2 = np . cos ( pa2 ) s2 = np . sin ( pa2 ) a = ( maj1 * c1 ) ** 2 + ( min1 * s1 ) ** 2 + ( maj2 * c2 ) ** 2 + ( min2 * s2 ) ** 2 b = ( maj1 * s1 ) ** 2 + ( min1 * c1 ) ** 2 + ( maj2 * s2 ) ** 2 + ( min2 * c2 )... | Convolve two Gaussians analytically . |
62,379 | def gaussian_deconvolve ( smaj , smin , spa , bmaj , bmin , bpa ) : from numpy import cos , sin , sqrt , min , abs , arctan2 if smaj < bmaj : smaj = bmaj if smin < bmin : smin = bmin alpha = ( ( smaj * cos ( spa ) ) ** 2 + ( smin * sin ( spa ) ) ** 2 - ( bmaj * cos ( bpa ) ) ** 2 - ( bmin * sin ( bpa ) ) ** 2 ) beta = ... | Deconvolve two Gaussians analytically . |
62,380 | def load_skyfield_data ( ) : import os . path from astropy . config import paths from skyfield . api import Loader cache_dir = os . path . join ( paths . get_cache_dir ( ) , 'pwkit' ) loader = Loader ( cache_dir ) planets = loader ( 'de421.bsp' ) ts = loader . timescale ( ) return planets , ts | Load data files used in Skyfield . This will download files from the internet if they haven t been downloaded before . |
62,381 | def get_2mass_epoch ( tmra , tmdec , debug = False ) : import codecs try : from urllib . request import urlopen except ImportError : from urllib2 import urlopen postdata = b % ( tmra * R2D , tmdec * R2D ) jd = None for line in codecs . getreader ( 'utf-8' ) ( urlopen ( _vizurl , postdata ) ) : line = line . strip ( ) i... | Given a 2MASS position look up the epoch when it was observed . |
62,382 | def verify ( self , complain = True ) : import sys if self . ra is None : raise ValueError ( 'AstrometryInfo missing "ra"' ) if self . dec is None : raise ValueError ( 'AstrometryInfo missing "dec"' ) if self . _partial_info ( self . promo_ra , self . promo_dec ) : raise ValueError ( 'partial proper-motion info in Astr... | Validate that the attributes are self - consistent . |
62,383 | def fill_from_simbad ( self , ident , debug = False ) : info = get_simbad_astrometry_info ( ident , debug = debug ) posref = 'unknown' for k , v in six . iteritems ( info ) : if '~' in v : continue if k == 'COO(d;A)' : self . ra = float ( v ) * D2R elif k == 'COO(d;D)' : self . dec = float ( v ) * D2R elif k == 'COO(E)... | Fill in astrometric information using the Simbad web service . |
62,384 | def fill_from_allwise ( self , ident , catalog_ident = 'II/328/allwise' ) : from astroquery . vizier import Vizier import numpy . ma . core as ma_core table_list = Vizier . query_constraints ( catalog = catalog_ident , AllWISE = ident ) if not len ( table_list ) : raise PKError ( 'Vizier query returned no tables (catal... | Fill in astrometric information from the AllWISE catalog using Astroquery . |
62,385 | def backtrace_on_usr1 ( ) : import signal try : signal . signal ( signal . SIGUSR1 , _print_backtrace_signal_handler ) except Exception as e : warn ( 'failed to set up Python backtraces on SIGUSR1: %s' , e ) | Install a signal handler such that this program prints a Python traceback upon receipt of SIGUSR1 . This could be useful for checking that long - running programs are behaving properly or for discovering where an infinite loop is occurring . |
62,386 | def fork_detached_process ( ) : import os , struct from . . import Holder payload = struct . Struct ( 'L' ) info = Holder ( ) readfd , writefd = os . pipe ( ) pid1 = os . fork ( ) if pid1 > 0 : info . whoami = 'original' info . pipe = os . fdopen ( readfd , 'rb' ) os . close ( writefd ) retcode = os . waitpid ( pid1 , ... | Fork this process creating a subprocess detached from the current context . |
62,387 | def pop_option ( ident , argv = None ) : if argv is None : from sys import argv if len ( ident ) == 1 : ident = '-' + ident else : ident = '--' + ident found = ident in argv if found : argv . remove ( ident ) return found | A lame routine for grabbing command - line arguments . Returns a boolean indicating whether the option was present . If it was it s removed from the argument string . Because of the lame behavior options can t be combined and non - boolean options aren t supported . Operates on sys . argv by default . |
62,388 | def show_usage ( docstring , short , stream , exitcode ) : if stream is None : from sys import stdout as stream if not short : print ( 'Usage:' , docstring . strip ( ) , file = stream ) else : intext = False for l in docstring . splitlines ( ) : if intext : if not len ( l ) : break print ( l , file = stream ) elif len ... | Print program usage information and exit . |
62,389 | def wrong_usage ( docstring , * rest ) : intext = False if len ( rest ) == 0 : detail = 'invalid command-line arguments' elif len ( rest ) == 1 : detail = rest [ 0 ] else : detail = rest [ 0 ] % tuple ( rest [ 1 : ] ) print ( 'error:' , detail , '\n' , file = sys . stderr ) show_usage ( docstring , True , sys . stderr ... | Print a message indicating invalid command - line arguments and exit with an error code . |
62,390 | def excepthook ( self , etype , evalue , etb ) : self . inner_excepthook ( etype , evalue , etb ) if issubclass ( etype , KeyboardInterrupt ) : signal . signal ( signal . SIGINT , signal . SIG_DFL ) os . kill ( os . getpid ( ) , signal . SIGINT ) | Handle an uncaught exception . We always forward the exception on to whatever sys . excepthook was present upon setup . However if the exception is a KeyboardInterrupt we additionally kill ourselves with an uncaught SIGINT so that invoking programs know what happened . |
62,391 | def calc_nu_b ( b ) : return cgs . e * b / ( 2 * cgs . pi * cgs . me * cgs . c ) | Calculate the cyclotron frequency in Hz given a magnetic field strength in Gauss . |
62,392 | def calc_freefree_snu_ujy ( ne , t , width , elongation , dist , ghz ) : hz = ghz * 1e9 eta = calc_freefree_eta ( ne , t , hz ) kappa = calc_freefree_kappa ( ne , t , hz ) snu = calc_snu ( eta , kappa , width , elongation , dist ) ujy = snu * cgs . jypercgs * 1e6 return ujy | Calculate a flux density from pure free - free emission . |
62,393 | def concat ( invises , outvis , timesort = False ) : tb = util . tools . table ( ) ms = util . tools . ms ( ) if os . path . exists ( outvis ) : raise RuntimeError ( 'output "%s" already exists' % outvis ) for invis in invises : if not os . path . isdir ( invis ) : raise RuntimeError ( 'input "%s" does not exist' % inv... | Concatenate visibility measurement sets . |
62,394 | def delcal ( mspath ) : wantremove = 'MODEL_DATA CORRECTED_DATA' . split ( ) tb = util . tools . table ( ) tb . open ( b ( mspath ) , nomodify = False ) cols = frozenset ( tb . colnames ( ) ) toremove = [ b ( c ) for c in wantremove if c in cols ] if len ( toremove ) : tb . removecols ( toremove ) tb . close ( ) if six... | Delete the MODEL_DATA and CORRECTED_DATA columns from a measurement set . |
62,395 | def delmod_cli ( argv , alter_logger = True ) : check_usage ( delmod_doc , argv , usageifnoargs = True ) if alter_logger : util . logger ( ) cb = util . tools . calibrater ( ) for mspath in argv [ 1 : ] : cb . open ( b ( mspath ) , addcorr = False , addmodel = False ) cb . delmod ( otf = True , scr = False ) cb . close... | Command - line access to delmod functionality . |
62,396 | def extractbpflags ( calpath , deststream ) : tb = util . tools . table ( ) tb . open ( b ( os . path . join ( calpath , 'ANTENNA' ) ) ) antnames = tb . getcol ( b'NAME' ) tb . close ( ) tb . open ( b ( calpath ) ) try : t = tb . getkeyword ( b'VisCal' ) except RuntimeError : raise PKError ( 'no "VisCal" keyword in %s;... | Make a flags file out of a bandpass calibration table |
62,397 | def flagmanager_cli ( argv , alter_logger = True ) : check_usage ( flagmanager_doc , argv , usageifnoargs = True ) if len ( argv ) < 3 : wrong_usage ( flagmanager_doc , 'expect at least a mode and an MS name' ) mode = argv [ 1 ] ms = argv [ 2 ] if alter_logger : if mode == 'list' : util . logger ( 'info' ) elif mode ==... | Command - line access to flagmanager functionality . |
62,398 | def image2fits ( mspath , fitspath , velocity = False , optical = False , bitpix = - 32 , minpix = 0 , maxpix = - 1 , overwrite = False , dropstokes = False , stokeslast = True , history = True , ** kwargs ) : ia = util . tools . image ( ) ia . open ( b ( mspath ) ) ia . tofits ( outfile = b ( fitspath ) , velocity = v... | Convert an image in MS format to FITS format . |
62,399 | def importalma ( asdm , ms ) : from . scripting import CasapyScript script = os . path . join ( os . path . dirname ( __file__ ) , 'cscript_importalma.py' ) with CasapyScript ( script , asdm = asdm , ms = ms ) as cs : pass | Convert an ALMA low - level ASDM dataset to Measurement Set format . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.