idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
62,200 | def add_injectable ( name , value , autocall = True , cache = False , cache_scope = _CS_FOREVER , memoize = False ) : if isinstance ( value , Callable ) : if autocall : value = _InjectableFuncWrapper ( name , value , cache = cache , cache_scope = cache_scope ) value . clear_cached ( ) elif not autocall and memoize : va... | Add a value that will be injected into other functions . |
62,201 | def injectable ( name = None , autocall = True , cache = False , cache_scope = _CS_FOREVER , memoize = False ) : def decorator ( func ) : if name : n = name else : n = func . __name__ add_injectable ( n , func , autocall = autocall , cache = cache , cache_scope = cache_scope , memoize = memoize ) return func return dec... | Decorates functions that will be injected into other functions . |
62,202 | def get_injectable_func_source_data ( name ) : if injectable_type ( name ) != 'function' : raise ValueError ( 'injectable {!r} is not a function' . format ( name ) ) inj = get_raw_injectable ( name ) if isinstance ( inj , _InjectableFuncWrapper ) : return utils . func_source_data ( inj . _func ) elif hasattr ( inj , '_... | Return data about an injectable function s source including file name line number and source code . |
62,203 | def add_step ( step_name , func ) : if isinstance ( func , Callable ) : logger . debug ( 'registering step {!r}' . format ( step_name ) ) _STEPS [ step_name ] = _StepFuncWrapper ( step_name , func ) else : raise TypeError ( 'func must be a callable' ) | Add a step function to Orca . |
62,204 | def step ( step_name = None ) : def decorator ( func ) : if step_name : name = step_name else : name = func . __name__ add_step ( name , func ) return func return decorator | Decorates functions that will be called by the run function . |
62,205 | def broadcast ( cast , onto , cast_on = None , onto_on = None , cast_index = False , onto_index = False ) : logger . debug ( 'registering broadcast of table {!r} onto {!r}' . format ( cast , onto ) ) _BROADCASTS [ ( cast , onto ) ] = Broadcast ( cast , onto , cast_on , onto_on , cast_index , onto_index ) | Register a rule for merging two tables by broadcasting one onto the other . |
62,206 | def _get_broadcasts ( tables ) : tables = set ( tables ) casts = tz . keyfilter ( lambda x : x [ 0 ] in tables and x [ 1 ] in tables , _BROADCASTS ) if tables - set ( tz . concat ( casts . keys ( ) ) ) : raise ValueError ( 'Not enough links to merge all tables.' ) return casts | Get the broadcasts associated with a set of tables . |
62,207 | def get_broadcast ( cast_name , onto_name ) : if is_broadcast ( cast_name , onto_name ) : return _BROADCASTS [ ( cast_name , onto_name ) ] else : raise KeyError ( 'no rule found for broadcasting {!r} onto {!r}' . format ( cast_name , onto_name ) ) | Get a single broadcast . |
62,208 | def _all_reachable_tables ( t ) : for k , v in t . items ( ) : for tname in _all_reachable_tables ( v ) : yield tname yield k | A generator that provides all the names of tables that can be reached via merges starting at the given target table . |
62,209 | def _recursive_getitem ( d , key ) : if key in d : return d else : for v in d . values ( ) : return _recursive_getitem ( v , key ) else : raise KeyError ( 'Key not found: {}' . format ( key ) ) | Descend into a dict of dicts to return the one that contains a given key . Every value in the dict must be another dict . |
62,210 | def _next_merge ( merge_node ) : if all ( _is_leaf_node ( d ) for d in _dict_value_to_pairs ( merge_node ) ) : return merge_node else : for d in tz . remove ( _is_leaf_node , _dict_value_to_pairs ( merge_node ) ) : return _next_merge ( d ) else : raise OrcaError ( 'No node found for next merge.' ) | Gets a node that has only leaf nodes below it . This table and the ones below are ready to be merged to make a new leaf node . |
62,211 | def get_step_table_names ( steps ) : table_names = set ( ) for s in steps : table_names |= get_step ( s ) . _tables_used ( ) return list ( table_names ) | Returns a list of table names injected into the provided steps . |
62,212 | def write_tables ( fname , table_names = None , prefix = None , compress = False , local = False ) : if table_names is None : table_names = list_tables ( ) tables = ( get_table ( t ) for t in table_names ) key_template = '{}/{{}}' . format ( prefix ) if prefix is not None else '{}' complib = compress and 'zlib' or None... | Writes tables to a pandas . HDFStore file . |
62,213 | def run ( steps , iter_vars = None , data_out = None , out_interval = 1 , out_base_tables = None , out_run_tables = None , compress = False , out_base_local = True , out_run_local = True ) : iter_vars = iter_vars or [ None ] max_i = len ( iter_vars ) if out_base_tables is None or out_run_tables is None : step_tables = ... | Run steps in series optionally repeatedly over some sequence . The current iteration variable is set as a global injectable called iter_var . |
62,214 | def injectables ( ** kwargs ) : global _INJECTABLES original = _INJECTABLES . copy ( ) _INJECTABLES . update ( kwargs ) yield _INJECTABLES = original | Temporarily add injectables to the pipeline environment . Takes only keyword arguments . |
62,215 | def temporary_tables ( ** kwargs ) : global _TABLES original = _TABLES . copy ( ) for k , v in kwargs . items ( ) : if not isinstance ( v , pd . DataFrame ) : raise ValueError ( 'tables only accepts DataFrames' ) add_table ( k , v ) yield _TABLES = original | Temporarily set DataFrames as registered tables . |
62,216 | def eval_variable ( name , ** kwargs ) : with injectables ( ** kwargs ) : vars = _collect_variables ( [ name ] , [ name ] ) return vars [ name ] | Execute a single variable function registered with Orca and return the result . Any keyword arguments are temporarily set as injectables . This gives the value as would be injected into a function . |
62,217 | def to_frame ( self , columns = None ) : extra_cols = _columns_for_table ( self . name ) if columns is not None : columns = [ columns ] if isinstance ( columns , str ) else columns columns = set ( columns ) set_extra_cols = set ( extra_cols ) local_cols = set ( self . local . columns ) & columns - set_extra_cols df = s... | Make a DataFrame with the given columns . |
62,218 | def update_col ( self , column_name , series ) : logger . debug ( 'updating column {!r} in table {!r}' . format ( column_name , self . name ) ) self . local [ column_name ] = series | Add or replace a column in the underlying DataFrame . |
62,219 | def column_type ( self , column_name ) : extra_cols = list_columns_for_table ( self . name ) if column_name in extra_cols : col = _COLUMNS [ ( self . name , column_name ) ] if isinstance ( col , _SeriesWrapper ) : return 'series' elif isinstance ( col , _ColumnFuncWrapper ) : return 'function' elif column_name in self ... | Report column type as one of local series or function . |
62,220 | def update_col_from_series ( self , column_name , series , cast = False ) : logger . debug ( 'updating column {!r} in table {!r}' . format ( column_name , self . name ) ) col_dtype = self . local [ column_name ] . dtype if series . dtype != col_dtype : if cast : series = series . astype ( col_dtype ) else : err_msg = "... | Update existing values in a column from another series . Index values must match in both column and series . Optionally casts data type to match the existing column . |
62,221 | def clear_cached ( self ) : _TABLE_CACHE . pop ( self . name , None ) for col in _columns_for_table ( self . name ) . values ( ) : col . clear_cached ( ) logger . debug ( 'cleared cached columns for table {!r}' . format ( self . name ) ) | Remove cached results from this table s computed columns . |
62,222 | def _call_func ( self ) : if _CACHING and self . cache and self . name in _TABLE_CACHE : logger . debug ( 'returning table {!r} from cache' . format ( self . name ) ) return _TABLE_CACHE [ self . name ] . value with log_start_finish ( 'call function to get frame for table {!r}' . format ( self . name ) , logger ) : kwa... | Call the wrapped function and return the result wrapped by DataFrameWrapper . Also updates attributes like columns index and length . |
62,223 | def clear_cached ( self ) : x = _COLUMN_CACHE . pop ( ( self . table_name , self . name ) , None ) if x is not None : logger . debug ( 'cleared cached value for column {!r} in table {!r}' . format ( self . name , self . table_name ) ) | Remove any cached result of this column . |
62,224 | def clear_cached ( self ) : x = _INJECTABLE_CACHE . pop ( self . name , None ) if x : logger . debug ( 'injectable {!r} removed from cache' . format ( self . name ) ) | Clear a cached result for this injectable . |
62,225 | def _tables_used ( self ) : args = list ( self . _argspec . args ) if self . _argspec . defaults : default_args = list ( self . _argspec . defaults ) else : default_args = [ ] names = args [ : len ( args ) - len ( default_args ) ] + default_args tables = set ( ) for name in names : parent_name = name . split ( '.' ) [ ... | Tables injected into the step . |
62,226 | def qbe_tree ( graph , nodes , root = None ) : if root : start = root else : index = random . randint ( 0 , len ( nodes ) - 1 ) start = nodes [ index ] to_visit = deque ( ) cnodes = copy ( nodes ) visited = set ( ) to_visit . append ( ( None , None , start , None ) ) tree = { } while len ( to_visit ) != 0 and nodes : p... | Given a graph nodes to explore and an optinal root do a breadth - first search in order to return the tree . |
62,227 | def combine ( items , k = None ) : length_items = len ( items ) lengths = [ len ( i ) for i in items ] length = reduce ( lambda x , y : x * y , lengths ) repeats = [ reduce ( lambda x , y : x * y , lengths [ i : ] ) for i in range ( 1 , length_items ) ] + [ 1 ] if k is not None : k = k % length indices = [ old_div ( ( ... | Create a matrix in wich each row is a tuple containing one of solutions or solution k - esima . |
62,228 | def pickle_encode ( session_dict ) : "Returns the given session dictionary pickled and encoded as a string." pickled = pickle . dumps ( session_dict , pickle . HIGHEST_PROTOCOL ) return base64 . encodestring ( pickled + get_query_hash ( pickled ) . encode ( ) ) | Returns the given session dictionary pickled and encoded as a string . |
62,229 | def func_source_data ( func ) : filename = inspect . getsourcefile ( func ) lineno = inspect . getsourcelines ( func ) [ 1 ] source = inspect . getsource ( func ) return filename , lineno , source | Return data about a function source including file name line number and source code . |
62,230 | def clean ( self ) : if any ( self . errors ) : return ( selects , aliases , froms , wheres , sorts , groups_by , params ) = self . get_query_parts ( ) if not selects : validation_message = _ ( u"At least you must check a row to get." ) raise forms . ValidationError ( validation_message ) self . _selects = selects self... | Checks that there is almost one field to select |
62,231 | def get_results ( self , limit = None , offset = None , query = None , admin_name = None , row_number = False ) : add_extra_ids = ( admin_name is not None ) if not query : sql = self . get_raw_query ( limit = limit , offset = offset , add_extra_ids = add_extra_ids ) else : sql = query if settings . DEBUG : print ( sql ... | Fetch all results after perform SQL query and |
62,232 | def parse_content_type ( content_type ) : if '; charset=' in content_type : return tuple ( content_type . split ( '; charset=' ) ) else : if 'text' in content_type : encoding = 'ISO-8859-1' else : try : format = formats . find_by_content_type ( content_type ) except formats . UnknownFormat : encoding = 'ISO-8859-1' els... | Return a tuple of content type and charset . |
62,233 | def parse_http_accept_header ( header ) : components = [ item . strip ( ) for item in header . split ( ',' ) ] l = [ ] for component in components : if ';' in component : subcomponents = [ item . strip ( ) for item in component . split ( ';' ) ] l . append ( ( subcomponents [ 0 ] , subcomponents [ 1 ] [ 2 : ] ) ) else ... | Return a list of content types listed in the HTTP Accept header ordered by quality . |
62,234 | def parse_multipart_data ( request ) : return MultiPartParser ( META = request . META , input_data = StringIO ( request . body ) , upload_handlers = request . upload_handlers , encoding = request . encoding ) . parse ( ) | Parse a request with multipart data . |
62,235 | def override_supported_formats ( formats ) : def decorator ( function ) : @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : self . supported_formats = formats return function ( self , * args , ** kwargs ) return wrapper return decorator | Override the views class supported formats for the decorated function . |
62,236 | def route ( regex , method , name ) : def decorator ( function ) : function . route = routes . route ( regex = regex , view = function . __name__ , method = method , name = name ) @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : return function ( self , * args , ** kwargs ) return wrapper return decorat... | Route the decorated view . |
62,237 | def before ( method_name ) : def decorator ( function ) : @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : returns = getattr ( self , method_name ) ( * args , ** kwargs ) if returns is None : return function ( self , * args , ** kwargs ) else : if isinstance ( returns , HttpResponse ) : return returns e... | Run the given method prior to the decorated view . |
62,238 | def index ( self , request ) : objects = self . model . objects . all ( ) return self . _render ( request = request , template = 'index' , context = { cc2us ( pluralize ( self . model . __name__ ) ) : objects , } , status = 200 ) | Render a list of objects . |
62,239 | def new ( self , request ) : form = ( self . form or generate_form ( self . model ) ) ( ) return self . _render ( request = request , template = 'new' , context = { 'form' : form } , status = 200 ) | Render a form to create a new object . |
62,240 | def edit ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_path ... | Render a form to edit an object . |
62,241 | def update ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_pat... | Update an object . |
62,242 | def replace ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_pa... | Replace an object . |
62,243 | def build_q ( fields_dict , params_dict , request = None ) : and_query = Q ( ) for fieldname in fields_dict : search_field = fields_dict [ fieldname ] if fieldname in params_dict and params_dict [ fieldname ] != '' and params_dict [ fieldname ] != [ ] : or_query = None if type ( search_field ) == type ( list ( ) ) : fi... | Returns a Q object from filters config and actual parmeters . |
62,244 | def get_search_fields ( cls ) : sfdict = { } for klass in tuple ( cls . __bases__ ) + ( cls , ) : if hasattr ( klass , 'search_fields' ) : sfdict . update ( klass . search_fields ) return sfdict | Returns search fields in sfdict |
62,245 | def find ( identifier ) : for format in FORMATS : if identifier in [ format . name , format . acronym , format . extension ] : return format raise UnknownFormat ( 'No format found with name, acronym or extension "%s"' % identifier ) | Find and return a format by name acronym or extension . |
62,246 | def find_by_name ( name ) : for format in FORMATS : if name == format . name : return format raise UnknownFormat ( 'No format found with name "%s"' % name ) | Find and return a format by name . |
62,247 | def find_by_extension ( extension ) : for format in FORMATS : if extension in format . extensions : return format raise UnknownFormat ( 'No format found with extension "%s"' % extension ) | Find and return a format by extension . |
62,248 | def find_by_content_type ( content_type ) : for format in FORMATS : if content_type in format . content_types : return format raise UnknownFormat ( 'No format found with content type "%s"' % content_type ) | Find and return a format by content type . |
62,249 | def options ( self , request , map , * args , ** kwargs ) : options = { } for method , function in map . items ( ) : options [ method ] = function . __doc__ return self . _render ( request = request , template = 'options' , context = { 'options' : options } , status = 200 , headers = { 'Allow' : ', ' . join ( options .... | List communication options . |
62,250 | def _get_format ( self , request ) : supported_formats = [ formats . find ( format ) for format in self . supported_formats ] if '.' in request . path : extension = request . path . split ( '.' ) [ - 1 ] try : format = formats . find_by_extension ( extension ) except formats . UnknownFormat : return None if format in s... | Determine and return a formats . Format instance describing the most desired response format that is supported by these views . |
62,251 | def _render ( self , request , template = None , status = 200 , context = { } , headers = { } , prefix_template_path = True ) : format = self . _get_format ( request ) if not format : return HttpResponse ( status = 406 ) if template : if prefix_template_path : template_path = '%s.%s' % ( self . template_path + template... | Render a HTTP response . |
62,252 | def _error ( self , request , status , headers = { } , prefix_template_path = False , ** kwargs ) : return self . _render ( request = request , template = str ( status ) , status = status , context = { 'error' : kwargs } , headers = headers , prefix_template_path = prefix_template_path ) | Convenience method to render an error response . The template is inferred from the status code . |
62,253 | def find ( format ) : try : serializer = SERIALIZERS [ format ] except KeyError : raise UnknownSerializer ( 'No serializer found for %s' % format . acronym ) return serializer | Find and return a serializer for the given format . |
62,254 | def get_form_kwargs ( self ) : update_data = { } sfdict = self . filter_class . get_search_fields ( ) for fieldname in sfdict : try : has_multiple = sfdict [ fieldname ] . get ( 'multiple' , False ) except : has_multiple = False if has_multiple : value = self . request . GET . getlist ( fieldname , [ ] ) else : value =... | Returns the keyword arguments for instantiating the search form . |
62,255 | def pluralize ( word ) : rules = [ [ '(?i)(quiz)$' , '\\1zes' ] , [ '^(?i)(ox)$' , '\\1en' ] , [ '(?i)([m|l])ouse$' , '\\1ice' ] , [ '(?i)(matr|vert|ind)ix|ex$' , '\\1ices' ] , [ '(?i)(x|ch|ss|sh)$' , '\\1es' ] , [ '(?i)([^aeiouy]|qu)ies$' , '\\1y' ] , [ '(?i)([^aeiouy]|qu)y$' , '\\1ies' ] , [ '(?i)(hive)$' , '\\1s' ] ... | Pluralize an English noun . |
62,256 | def us2mc ( string ) : return re . sub ( r'_([a-z])' , lambda m : ( m . group ( 1 ) . upper ( ) ) , string ) | Transform an underscore_case string to a mixedCase string |
62,257 | def generate_form ( model , form = None , fields = False , exclude = False ) : _model , _fields , _exclude = model , fields , exclude class Form ( form or forms . ModelForm ) : class Meta : model = _model if _fields is not False : fields = _fields if _exclude is not False : exclude = _exclude return Form | Generate a form from a model . |
62,258 | def sample_double_norm ( mean , std_upper , std_lower , size ) : from scipy . special import erfinv samples = np . empty ( size ) percentiles = np . random . uniform ( 0. , 1. , size ) cutoff = std_lower / ( std_lower + std_upper ) w = ( percentiles < cutoff ) percentiles [ w ] *= 0.5 / cutoff samples [ w ] = mean + np... | Note that this function requires Scipy . |
62,259 | def find_gamma_params ( mode , std ) : if mode < 0 : raise ValueError ( 'input mode must be positive for gamma; got %e' % mode ) var = std ** 2 beta = ( mode + np . sqrt ( mode ** 2 + 4 * var ) ) / ( 2 * var ) j = 2 * var / mode ** 2 alpha = ( j + 1 + np . sqrt ( 2 * j + 1 ) ) / j if alpha <= 1 : raise ValueError ( 'co... | Given a modal value and a standard deviation compute corresponding parameters for the gamma distribution . |
62,260 | def _lval_add_towards_polarity ( x , polarity ) : if x < 0 : if polarity < 0 : return Lval ( 'toinf' , x ) return Lval ( 'pastzero' , x ) elif polarity > 0 : return Lval ( 'toinf' , x ) return Lval ( 'pastzero' , x ) | Compute the appropriate Lval kind for the limit of value x towards polarity . Either toinf or pastzero depending on the sign of x and the infinity direction of polarity . |
62,261 | def limtype ( msmt ) : if np . isscalar ( msmt ) : return 0 if isinstance ( msmt , Uval ) : return 0 if isinstance ( msmt , Lval ) : if msmt . kind == 'undef' : raise ValueError ( 'no simple limit type for Lval %r' % msmt ) p = msmt . _polarity ( ) if p == - 2 or p == 1 : return - 1 if p == 2 or p == - 1 : return 1 ret... | Return - 1 if this value is some kind of upper limit 1 if this value is some kind of lower limit 0 otherwise . |
62,262 | def from_pcount ( nevents ) : if nevents < 0 : raise ValueError ( 'Poisson parameter `nevents` must be nonnegative' ) return Uval ( np . random . gamma ( nevents + 1 , size = uval_nsamples ) ) | We assume a Poisson process . nevents is the number of events in some interval . The distribution of values is the distribution of the Poisson rate parameter given this observed number of events where the rate is in units of events per interval of the same duration . The max - likelihood value is nevents but the mean v... |
62,263 | def repvals ( self , method ) : if method == 'pct' : return pk_scoreatpercentile ( self . d , [ 50. , 84.134 , 15.866 ] ) if method == 'gauss' : m , s = self . d . mean ( ) , self . d . std ( ) return np . asarray ( [ m , m + s , m - s ] ) raise ValueError ( 'unknown representative-value method "%s"' % method ) | Compute representative statistical values for this Uval . method may be either pct or gauss . |
62,264 | def repval ( self , limitsok = False ) : if not limitsok and self . dkind in ( 'lower' , 'upper' ) : raise LimitError ( ) if self . dkind == 'unif' : lower , upper = map ( float , self . data ) v = 0.5 * ( lower + upper ) elif self . dkind in _noextra_dkinds : v = float ( self . data ) elif self . dkind in _yesextra_dk... | Get a best - effort representative value as a float . This can be DANGEROUS because it discards limit information which is rarely wise . |
62,265 | def moreland_adjusthue ( msh , m_unsat ) : if msh [ M ] >= m_unsat : return msh [ H ] hspin = ( msh [ S ] * np . sqrt ( m_unsat ** 2 - msh [ M ] ** 2 ) / ( msh [ M ] * np . sin ( msh [ S ] ) ) ) if msh [ H ] > - np . pi / 3 : return msh [ H ] + hspin return msh [ H ] - hspin | Moreland s AdjustHue procedure to adjust the hue value of an Msh color based on ... some criterion . |
62,266 | def get_datasets_in_nodes ( ) : data_dir = os . path . join ( scriptdir , ".." , "usgs" , "data" ) cwic = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , CWIC_LSI_EXPLORER_CATALOG_NODE ) [ 'data' ] ) ee = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , EARTH_EXPLORER_CATALOG_NODE ) [ 'data... | Get the node associated with each dataset . Some datasets will have an ambiguous node since they exists in more than one node . |
62,267 | def pivot_wavelength_ee ( bpass ) : from scipy . integrate import simps return np . sqrt ( simps ( bpass . resp , bpass . wlen ) / simps ( bpass . resp / bpass . wlen ** 2 , bpass . wlen ) ) | Compute pivot wavelength assuming equal - energy convention . |
62,268 | def get_std_registry ( ) : from six import itervalues reg = Registry ( ) for fn in itervalues ( builtin_registrars ) : fn ( reg ) return reg | Get a Registry object pre - filled with information for standard telescopes . |
62,269 | def pivot_wavelength ( self ) : wl = self . registry . _pivot_wavelengths . get ( ( self . telescope , self . band ) ) if wl is not None : return wl wl = self . calc_pivot_wavelength ( ) self . registry . register_pivot_wavelength ( self . telescope , self . band , wl ) return wl | Get the bandpass pivot wavelength . |
62,270 | def calc_halfmax_points ( self ) : d = self . _ensure_data ( ) return interpolated_halfmax_points ( d . wlen , d . resp ) | Calculate the wavelengths of the filter half - maximum values . |
62,271 | def halfmax_points ( self ) : t = self . registry . _halfmaxes . get ( ( self . telescope , self . band ) ) if t is not None : return t t = self . calc_halfmax_points ( ) self . registry . register_halfmaxes ( self . telescope , self . band , t [ 0 ] , t [ 1 ] ) return t | Get the bandpass half - maximum wavelengths . These can be used to compute a representative bandwidth or for display purposes . |
62,272 | def bands ( self , telescope ) : q = self . _seen_bands . get ( telescope ) if q is None : return [ ] return list ( q ) | Return a list of bands associated with the specified telescope . |
62,273 | def register_pivot_wavelength ( self , telescope , band , wlen ) : if ( telescope , band ) in self . _pivot_wavelengths : raise AlreadyDefinedError ( 'pivot wavelength for %s/%s already ' 'defined' , telescope , band ) self . _note ( telescope , band ) self . _pivot_wavelengths [ telescope , band ] = wlen return self | Register precomputed pivot wavelengths . |
62,274 | def register_halfmaxes ( self , telescope , band , lower , upper ) : if ( telescope , band ) in self . _halfmaxes : raise AlreadyDefinedError ( 'half-max points for %s/%s already ' 'defined' , telescope , band ) self . _note ( telescope , band ) self . _halfmaxes [ telescope , band ] = ( lower , upper ) return self | Register precomputed half - max points . |
62,275 | def register_bpass ( self , telescope , klass ) : if telescope in self . _bpass_classes : raise AlreadyDefinedError ( 'bandpass class for %s already ' 'defined' , telescope ) self . _note ( telescope , None ) self . _bpass_classes [ telescope ] = klass return self | Register a Bandpass class . |
62,276 | def get ( self , telescope , band ) : klass = self . _bpass_classes . get ( telescope ) if klass is None : raise NotDefinedError ( 'bandpass data for %s not defined' , telescope ) bp = klass ( ) bp . registry = self bp . telescope = telescope bp . band = band return bp | Get a Bandpass object for a known telescope and filter . |
62,277 | def _load_data ( self , band ) : df = bandpass_data_frame ( 'filter_wise_' + str ( band ) + '.dat' , 'wlen resp uncert' ) df . wlen *= 1e4 df . uncert *= df . resp / 1000. lo , hi = self . _filter_subsets [ band ] df = df [ lo : hi ] return df | From the WISE All - Sky Explanatory Supplement IV . 4 . h . i . 1 and Jarrett + 2011 . These are relative response per erg and so can be integrated directly against F_nu spectra . Wavelengths are in micron uncertainties are in parts per thousand . |
62,278 | def clean_comment_body ( body ) : body = _parser . unescape ( body ) body = re . sub ( r'<a [^>]+>(.+?)</a>' , r'\1' , body ) body = body . replace ( '<br>' , '\n' ) body = re . sub ( r'<.+?>' , '' , body ) return body | Returns given comment HTML as plaintext . |
62,279 | def _create_wcs ( fitsheader ) : wcsmodule = _load_wcs_module ( ) is_pywcs = hasattr ( wcsmodule , 'UnitConverter' ) wcs = wcsmodule . WCS ( fitsheader ) wcs . wcs . set ( ) wcs . wcs . fix ( ) if hasattr ( wcs , 'wcs_pix2sky' ) : wcs . wcs_pix2world = wcs . wcs_pix2sky wcs . wcs_world2pix = wcs . wcs_sky2pix return wc... | For compatibility between astropy and pywcs . |
62,280 | def sanitize_unicode ( item ) : if isinstance ( item , text_type ) : return item . encode ( 'utf8' ) if isinstance ( item , dict ) : return dict ( ( sanitize_unicode ( k ) , sanitize_unicode ( v ) ) for k , v in six . iteritems ( item ) ) if isinstance ( item , ( list , tuple ) ) : return item . __class__ ( sanitize_un... | Safely pass string values to the CASA tools . |
62,281 | def datadir ( * subdirs ) : import os . path data = None if 'CASAPATH' in os . environ : data = os . path . join ( os . environ [ 'CASAPATH' ] . split ( ) [ 0 ] , 'data' ) if data is None : try : import casadef except ImportError : pass else : data = os . path . join ( os . path . dirname ( casadef . task_directory ) ,... | Get a path within the CASA data directory . |
62,282 | def logger ( filter = 'WARN' ) : import os , shutil , tempfile cwd = os . getcwd ( ) tempdir = None try : tempdir = tempfile . mkdtemp ( prefix = 'casautil' ) try : os . chdir ( tempdir ) sink = tools . logsink ( ) sink . setlogfile ( sanitize_unicode ( os . devnull ) ) try : os . unlink ( 'casapy.log' ) except OSError... | Set up CASA to write log messages to standard output . |
62,283 | def forkandlog ( function , filter = 'INFO5' , debug = False ) : import sys , os readfd , writefd = os . pipe ( ) pid = os . fork ( ) if pid == 0 : os . close ( readfd ) if not debug : f = open ( os . devnull , 'w' ) os . dup2 ( f . fileno ( ) , 1 ) os . dup2 ( f . fileno ( ) , 2 ) sink = logger ( filter = filter ) sin... | Fork a child process and read its CASA log output . |
62,284 | def _get_extended ( scene , resp ) : root = ElementTree . fromstring ( resp . text ) items = root . findall ( "eemetadata:metadataFields/eemetadata:metadataField" , NAMESPACES ) scene [ 'extended' ] = { item . attrib . get ( 'name' ) . strip ( ) : xsi . get ( item [ 0 ] ) for item in items } return scene | Parse metadata returned from the metadataUrl of a USGS scene . |
62,285 | def _async_requests ( urls ) : session = FuturesSession ( max_workers = 30 ) futures = [ session . get ( url ) for url in urls ] return [ future . result ( ) for future in futures ] | Sends multiple non - blocking requests . Returns a list of responses . |
62,286 | def metadata ( dataset , node , entityids , extended = False , api_key = None ) : api_key = _get_api_key ( api_key ) url = '{}/metadata' . format ( USGS_API ) payload = { "jsonRequest" : payloads . metadata ( dataset , node , entityids , api_key = api_key ) } r = requests . post ( url , payload ) response = r . json ( ... | Request metadata for a given scene in a USGS dataset . |
62,287 | def reraise_context ( fmt , * args ) : import sys if len ( args ) : cstr = fmt % args else : cstr = text_type ( fmt ) ex = sys . exc_info ( ) [ 1 ] if isinstance ( ex , EnvironmentError ) : ex . strerror = '%s: %s' % ( cstr , ex . strerror ) ex . args = ( ex . errno , ex . strerror ) else : if len ( ex . args ) : cstr ... | Reraise an exception with its message modified to specify additional context . |
62,288 | def copy ( self ) : new = self . __class__ ( ) new . __dict__ = dict ( self . __dict__ ) return new | Return a shallow copy of this object . |
62,289 | def get_all_boards ( * args , ** kwargs ) : https = kwargs . get ( 'https' , args [ 1 ] if len ( args ) > 1 else False ) url_generator = Url ( None , https ) _fetch_boards_metadata ( url_generator ) return get_boards ( _metadata . keys ( ) , * args , ** kwargs ) | Returns every board on 4chan . |
62,290 | def get_thread ( self , thread_id , update_if_cached = True , raise_404 = False ) : cached_thread = self . _thread_cache . get ( thread_id ) if cached_thread : if update_if_cached : cached_thread . update ( ) return cached_thread res = self . _requests_session . get ( self . _url . thread_api_url ( thread_id = thread_i... | Get a thread from 4chan via 4chan API . |
62,291 | def thread_exists ( self , thread_id ) : return self . _requests_session . head ( self . _url . thread_api_url ( thread_id = thread_id ) ) . ok | Check if a thread exists or has 404 d . |
62,292 | def get_threads ( self , page = 1 ) : url = self . _url . page_url ( page ) return self . _request_threads ( url ) | Returns all threads on a certain page . |
62,293 | def get_all_thread_ids ( self ) : json = self . _get_json ( self . _url . thread_list ( ) ) return [ thread [ 'no' ] for page in json for thread in page [ 'threads' ] ] | Return the ID of every thread on this board . |
62,294 | def get_all_threads ( self , expand = False ) : if not expand : return self . _request_threads ( self . _url . catalog ( ) ) thread_ids = self . get_all_thread_ids ( ) threads = [ self . get_thread ( id , raise_404 = False ) for id in thread_ids ] return filter ( None , threads ) | Return every thread on this board . |
62,295 | def refresh_cache ( self , if_want_update = False ) : for thread in tuple ( self . _thread_cache . values ( ) ) : if if_want_update : if not thread . want_update : continue thread . update ( ) | Update all threads currently stored in our cache . |
62,296 | def modify_environment ( self , env ) : is_rpm_install = self . _rootdir . startswith ( '/usr/lib64/casapy/release/' ) def path ( * args ) : return os . path . join ( self . _rootdir , * args ) env [ 'CASAROOT' ] = path ( ) env [ 'CASAPATH' ] = ' ' . join ( [ path ( ) , os . uname ( ) [ 0 ] . lower ( ) , 'local' , os .... | Maintaining compatibility with different CASA versions is a pain . |
62,297 | def compute_bgband ( evtpath , srcreg , bkgreg , ebins , env = None ) : import numpy as np import pandas as pd from scipy . special import erfcinv , gammaln if env is None : from . import CiaoEnvironment env = CiaoEnvironment ( ) srcarea = get_region_area ( env , evtpath , srcreg ) bkgarea = get_region_area ( env , evt... | Compute background information for a source in one or more energy bands . |
62,298 | def simple_srcflux ( env , infile = None , psfmethod = 'arfcorr' , conf = 0.68 , verbose = 0 , ** kwargs ) : from . . . io import Path import shutil , signal , tempfile if infile is None : raise ValueError ( 'must specify infile' ) kwargs . update ( dict ( infile = infile , psfmethod = psfmethod , conf = conf , verbose... | Run the CIAO srcflux script and retrieve its results . |
62,299 | def new_for_fk10_fig9 ( cls , shlib_path ) : inst = ( cls ( shlib_path ) . set_thermal_background ( 2.1e7 , 3e9 ) . set_bfield ( 48 ) . set_edist_powerlaw ( 0.016 , 4.0 , 3.7 , 5e9 / 3 ) . set_freqs ( 100 , 0.5 , 50 ) . set_hybrid_parameters ( 12 , 12 ) . set_ignore_q_terms ( False ) . set_obs_angle ( 50 * np . pi / 18... | Create a calculator initialized to reproduce Figure 9 from FK10 . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.