idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
62,200 | def add_injectable ( name , value , autocall = True , cache = False , cache_scope = _CS_FOREVER , memoize = False ) : if isinstance ( value , Callable ) : if autocall : value = _InjectableFuncWrapper ( name , value , cache = cache , cache_scope = cache_scope ) value . clear_cached ( ) elif not autocall and memoize : value = _memoize_function ( value , name , cache_scope = cache_scope ) logger . debug ( 'registering injectable {!r}' . format ( name ) ) _INJECTABLES [ name ] = value | Add a value that will be injected into other functions . |
62,201 | def injectable ( name = None , autocall = True , cache = False , cache_scope = _CS_FOREVER , memoize = False ) : def decorator ( func ) : if name : n = name else : n = func . __name__ add_injectable ( n , func , autocall = autocall , cache = cache , cache_scope = cache_scope , memoize = memoize ) return func return decorator | Decorates functions that will be injected into other functions . |
62,202 | def get_injectable_func_source_data ( name ) : if injectable_type ( name ) != 'function' : raise ValueError ( 'injectable {!r} is not a function' . format ( name ) ) inj = get_raw_injectable ( name ) if isinstance ( inj , _InjectableFuncWrapper ) : return utils . func_source_data ( inj . _func ) elif hasattr ( inj , '__wrapped__' ) : return utils . func_source_data ( inj . __wrapped__ ) else : return utils . func_source_data ( inj ) | Return data about an injectable function s source including file name line number and source code . |
62,203 | def add_step ( step_name , func ) : if isinstance ( func , Callable ) : logger . debug ( 'registering step {!r}' . format ( step_name ) ) _STEPS [ step_name ] = _StepFuncWrapper ( step_name , func ) else : raise TypeError ( 'func must be a callable' ) | Add a step function to Orca . |
62,204 | def step ( step_name = None ) : def decorator ( func ) : if step_name : name = step_name else : name = func . __name__ add_step ( name , func ) return func return decorator | Decorates functions that will be called by the run function . |
62,205 | def broadcast ( cast , onto , cast_on = None , onto_on = None , cast_index = False , onto_index = False ) : logger . debug ( 'registering broadcast of table {!r} onto {!r}' . format ( cast , onto ) ) _BROADCASTS [ ( cast , onto ) ] = Broadcast ( cast , onto , cast_on , onto_on , cast_index , onto_index ) | Register a rule for merging two tables by broadcasting one onto the other . |
62,206 | def _get_broadcasts ( tables ) : tables = set ( tables ) casts = tz . keyfilter ( lambda x : x [ 0 ] in tables and x [ 1 ] in tables , _BROADCASTS ) if tables - set ( tz . concat ( casts . keys ( ) ) ) : raise ValueError ( 'Not enough links to merge all tables.' ) return casts | Get the broadcasts associated with a set of tables . |
62,207 | def get_broadcast ( cast_name , onto_name ) : if is_broadcast ( cast_name , onto_name ) : return _BROADCASTS [ ( cast_name , onto_name ) ] else : raise KeyError ( 'no rule found for broadcasting {!r} onto {!r}' . format ( cast_name , onto_name ) ) | Get a single broadcast . |
62,208 | def _all_reachable_tables ( t ) : for k , v in t . items ( ) : for tname in _all_reachable_tables ( v ) : yield tname yield k | A generator that provides all the names of tables that can be reached via merges starting at the given target table . |
62,209 | def _recursive_getitem ( d , key ) : if key in d : return d else : for v in d . values ( ) : return _recursive_getitem ( v , key ) else : raise KeyError ( 'Key not found: {}' . format ( key ) ) | Descend into a dict of dicts to return the one that contains a given key . Every value in the dict must be another dict . |
62,210 | def _next_merge ( merge_node ) : if all ( _is_leaf_node ( d ) for d in _dict_value_to_pairs ( merge_node ) ) : return merge_node else : for d in tz . remove ( _is_leaf_node , _dict_value_to_pairs ( merge_node ) ) : return _next_merge ( d ) else : raise OrcaError ( 'No node found for next merge.' ) | Gets a node that has only leaf nodes below it . This table and the ones below are ready to be merged to make a new leaf node . |
62,211 | def get_step_table_names ( steps ) : table_names = set ( ) for s in steps : table_names |= get_step ( s ) . _tables_used ( ) return list ( table_names ) | Returns a list of table names injected into the provided steps . |
62,212 | def write_tables ( fname , table_names = None , prefix = None , compress = False , local = False ) : if table_names is None : table_names = list_tables ( ) tables = ( get_table ( t ) for t in table_names ) key_template = '{}/{{}}' . format ( prefix ) if prefix is not None else '{}' complib = compress and 'zlib' or None complevel = compress and 1 or 0 with pd . HDFStore ( fname , mode = 'a' , complib = complib , complevel = complevel ) as store : for t in tables : columns = None if local is True : columns = t . local_columns store [ key_template . format ( t . name ) ] = t . to_frame ( columns = columns ) | Writes tables to a pandas . HDFStore file . |
62,213 | def run ( steps , iter_vars = None , data_out = None , out_interval = 1 , out_base_tables = None , out_run_tables = None , compress = False , out_base_local = True , out_run_local = True ) : iter_vars = iter_vars or [ None ] max_i = len ( iter_vars ) if out_base_tables is None or out_run_tables is None : step_tables = get_step_table_names ( steps ) if out_base_tables is None : out_base_tables = step_tables if out_run_tables is None : out_run_tables = step_tables if data_out : add_injectable ( 'iter_var' , iter_vars [ 0 ] ) write_tables ( data_out , out_base_tables , 'base' , compress = compress , local = out_base_local ) for i , var in enumerate ( iter_vars , start = 1 ) : add_injectable ( 'iter_var' , var ) if var is not None : print ( 'Running iteration {} with iteration value {!r}' . format ( i , var ) ) logger . debug ( 'running iteration {} with iteration value {!r}' . format ( i , var ) ) t1 = time . time ( ) for j , step_name in enumerate ( steps ) : add_injectable ( 'iter_step' , iter_step ( j , step_name ) ) print ( 'Running step {!r}' . format ( step_name ) ) with log_start_finish ( 'run step {!r}' . format ( step_name ) , logger , logging . INFO ) : step = get_step ( step_name ) t2 = time . time ( ) step ( ) print ( "Time to execute step '{}': {:.2f} s" . format ( step_name , time . time ( ) - t2 ) ) clear_cache ( scope = _CS_STEP ) print ( ( 'Total time to execute iteration {} ' 'with iteration value {!r}: ' '{:.2f} s' ) . format ( i , var , time . time ( ) - t1 ) ) if data_out : if ( i - 1 ) % out_interval == 0 or i == max_i : write_tables ( data_out , out_run_tables , var , compress = compress , local = out_run_local ) clear_cache ( scope = _CS_ITER ) | Run steps in series optionally repeatedly over some sequence . The current iteration variable is set as a global injectable called iter_var . |
62,214 | def injectables ( ** kwargs ) : global _INJECTABLES original = _INJECTABLES . copy ( ) _INJECTABLES . update ( kwargs ) yield _INJECTABLES = original | Temporarily add injectables to the pipeline environment . Takes only keyword arguments . |
62,215 | def temporary_tables ( ** kwargs ) : global _TABLES original = _TABLES . copy ( ) for k , v in kwargs . items ( ) : if not isinstance ( v , pd . DataFrame ) : raise ValueError ( 'tables only accepts DataFrames' ) add_table ( k , v ) yield _TABLES = original | Temporarily set DataFrames as registered tables . |
62,216 | def eval_variable ( name , ** kwargs ) : with injectables ( ** kwargs ) : vars = _collect_variables ( [ name ] , [ name ] ) return vars [ name ] | Execute a single variable function registered with Orca and return the result . Any keyword arguments are temporarily set as injectables . This gives the value as would be injected into a function . |
62,217 | def to_frame ( self , columns = None ) : extra_cols = _columns_for_table ( self . name ) if columns is not None : columns = [ columns ] if isinstance ( columns , str ) else columns columns = set ( columns ) set_extra_cols = set ( extra_cols ) local_cols = set ( self . local . columns ) & columns - set_extra_cols df = self . local [ list ( local_cols ) ] . copy ( ) extra_cols = { k : extra_cols [ k ] for k in ( columns & set_extra_cols ) } else : df = self . local . copy ( ) with log_start_finish ( 'computing {!r} columns for table {!r}' . format ( len ( extra_cols ) , self . name ) , logger ) : for name , col in extra_cols . items ( ) : with log_start_finish ( 'computing column {!r} for table {!r}' . format ( name , self . name ) , logger ) : df [ name ] = col ( ) return df | Make a DataFrame with the given columns . |
62,218 | def update_col ( self , column_name , series ) : logger . debug ( 'updating column {!r} in table {!r}' . format ( column_name , self . name ) ) self . local [ column_name ] = series | Add or replace a column in the underlying DataFrame . |
62,219 | def column_type ( self , column_name ) : extra_cols = list_columns_for_table ( self . name ) if column_name in extra_cols : col = _COLUMNS [ ( self . name , column_name ) ] if isinstance ( col , _SeriesWrapper ) : return 'series' elif isinstance ( col , _ColumnFuncWrapper ) : return 'function' elif column_name in self . local_columns : return 'local' raise KeyError ( 'column {!r} not found' . format ( column_name ) ) | Report column type as one of local series or function . |
62,220 | def update_col_from_series ( self , column_name , series , cast = False ) : logger . debug ( 'updating column {!r} in table {!r}' . format ( column_name , self . name ) ) col_dtype = self . local [ column_name ] . dtype if series . dtype != col_dtype : if cast : series = series . astype ( col_dtype ) else : err_msg = "Data type mismatch, existing:{}, update:{}" err_msg = err_msg . format ( col_dtype , series . dtype ) raise ValueError ( err_msg ) self . local . loc [ series . index , column_name ] = series | Update existing values in a column from another series . Index values must match in both column and series . Optionally casts data type to match the existing column . |
62,221 | def clear_cached ( self ) : _TABLE_CACHE . pop ( self . name , None ) for col in _columns_for_table ( self . name ) . values ( ) : col . clear_cached ( ) logger . debug ( 'cleared cached columns for table {!r}' . format ( self . name ) ) | Remove cached results from this table s computed columns . |
62,222 | def _call_func ( self ) : if _CACHING and self . cache and self . name in _TABLE_CACHE : logger . debug ( 'returning table {!r} from cache' . format ( self . name ) ) return _TABLE_CACHE [ self . name ] . value with log_start_finish ( 'call function to get frame for table {!r}' . format ( self . name ) , logger ) : kwargs = _collect_variables ( names = self . _argspec . args , expressions = self . _argspec . defaults ) frame = self . _func ( ** kwargs ) self . _columns = list ( frame . columns ) self . _index = frame . index self . _len = len ( frame ) wrapped = DataFrameWrapper ( self . name , frame , copy_col = self . copy_col ) if self . cache : _TABLE_CACHE [ self . name ] = CacheItem ( self . name , wrapped , self . cache_scope ) return wrapped | Call the wrapped function and return the result wrapped by DataFrameWrapper . Also updates attributes like columns index and length . |
62,223 | def clear_cached ( self ) : x = _COLUMN_CACHE . pop ( ( self . table_name , self . name ) , None ) if x is not None : logger . debug ( 'cleared cached value for column {!r} in table {!r}' . format ( self . name , self . table_name ) ) | Remove any cached result of this column . |
62,224 | def clear_cached ( self ) : x = _INJECTABLE_CACHE . pop ( self . name , None ) if x : logger . debug ( 'injectable {!r} removed from cache' . format ( self . name ) ) | Clear a cached result for this injectable . |
62,225 | def _tables_used ( self ) : args = list ( self . _argspec . args ) if self . _argspec . defaults : default_args = list ( self . _argspec . defaults ) else : default_args = [ ] names = args [ : len ( args ) - len ( default_args ) ] + default_args tables = set ( ) for name in names : parent_name = name . split ( '.' ) [ 0 ] if is_table ( parent_name ) : tables . add ( parent_name ) return tables | Tables injected into the step . |
62,226 | def qbe_tree ( graph , nodes , root = None ) : if root : start = root else : index = random . randint ( 0 , len ( nodes ) - 1 ) start = nodes [ index ] to_visit = deque ( ) cnodes = copy ( nodes ) visited = set ( ) to_visit . append ( ( None , None , start , None ) ) tree = { } while len ( to_visit ) != 0 and nodes : parent , parent_edge , v , v_edge = to_visit . pop ( ) if v in nodes : nodes . remove ( v ) node = graph [ v ] if v not in visited and len ( node ) > 1 : visited . add ( v ) if all ( ( parent , parent_edge , v , v_edge ) ) : if parent not in tree : tree [ parent ] = [ ] if ( parent_edge , v , v_edge ) not in tree [ parent ] : tree [ parent ] . append ( ( parent_edge , v , v_edge ) ) if v not in tree : tree [ v ] = [ ] if ( v_edge , parent , parent_edge ) not in tree [ v ] : tree [ v ] . append ( ( v_edge , parent , parent_edge ) ) for node_edge , neighbor , neighbor_edge in node : value = ( v , node_edge , neighbor , neighbor_edge ) to_visit . append ( value ) remove_leafs ( tree , cnodes ) return tree , ( len ( nodes ) == 0 ) | Given a graph nodes to explore and an optinal root do a breadth - first search in order to return the tree . |
62,227 | def combine ( items , k = None ) : length_items = len ( items ) lengths = [ len ( i ) for i in items ] length = reduce ( lambda x , y : x * y , lengths ) repeats = [ reduce ( lambda x , y : x * y , lengths [ i : ] ) for i in range ( 1 , length_items ) ] + [ 1 ] if k is not None : k = k % length indices = [ old_div ( ( k % ( lengths [ i ] * repeats [ i ] ) ) , repeats [ i ] ) for i in range ( length_items ) ] return [ items [ i ] [ indices [ i ] ] for i in range ( length_items ) ] else : matrix = [ ] for i , item in enumerate ( items ) : row = [ ] for subset in item : row . extend ( [ subset ] * repeats [ i ] ) times = old_div ( length , len ( row ) ) matrix . append ( row * times ) return list ( zip ( * matrix ) ) | Create a matrix in wich each row is a tuple containing one of solutions or solution k - esima . |
62,228 | def pickle_encode ( session_dict ) : "Returns the given session dictionary pickled and encoded as a string." pickled = pickle . dumps ( session_dict , pickle . HIGHEST_PROTOCOL ) return base64 . encodestring ( pickled + get_query_hash ( pickled ) . encode ( ) ) | Returns the given session dictionary pickled and encoded as a string . |
62,229 | def func_source_data ( func ) : filename = inspect . getsourcefile ( func ) lineno = inspect . getsourcelines ( func ) [ 1 ] source = inspect . getsource ( func ) return filename , lineno , source | Return data about a function source including file name line number and source code . |
62,230 | def clean ( self ) : if any ( self . errors ) : return ( selects , aliases , froms , wheres , sorts , groups_by , params ) = self . get_query_parts ( ) if not selects : validation_message = _ ( u"At least you must check a row to get." ) raise forms . ValidationError ( validation_message ) self . _selects = selects self . _aliases = aliases self . _froms = froms self . _wheres = wheres self . _sorts = sorts self . _groups_by = groups_by self . _params = params | Checks that there is almost one field to select |
62,231 | def get_results ( self , limit = None , offset = None , query = None , admin_name = None , row_number = False ) : add_extra_ids = ( admin_name is not None ) if not query : sql = self . get_raw_query ( limit = limit , offset = offset , add_extra_ids = add_extra_ids ) else : sql = query if settings . DEBUG : print ( sql ) cursor = self . _db_connection . cursor ( ) cursor . execute ( sql , tuple ( self . _params ) ) query_results = cursor . fetchall ( ) if admin_name and not self . _groups_by : selects = self . _get_selects_with_extra_ids ( ) results = [ ] try : offset = int ( offset ) except ValueError : offset = 0 for r , row in enumerate ( query_results ) : i = 0 l = len ( row ) if row_number : result = [ ( r + offset + 1 , u"#row%s" % ( r + offset + 1 ) ) ] else : result = [ ] while i < l : appmodel , field = selects [ i ] . split ( "." ) appmodel = self . _unquote_name ( appmodel ) field = self . _unquote_name ( field ) try : if appmodel in self . _models : _model = self . _models [ appmodel ] _appmodel = u"%s_%s" % ( _model . _meta . app_label , _model . _meta . model_name ) else : _appmodel = appmodel admin_url = reverse ( "%s:%s_change" % ( admin_name , _appmodel ) , args = [ row [ i + 1 ] ] ) except NoReverseMatch : admin_url = None result . append ( ( row [ i ] , admin_url ) ) i += 2 results . append ( result ) return results else : if row_number : results = [ ] for r , row in enumerate ( query_results ) : result = [ r + 1 ] for cell in row : result . append ( cell ) results . append ( result ) return results else : return query_results | Fetch all results after perform SQL query and |
62,232 | def parse_content_type ( content_type ) : if '; charset=' in content_type : return tuple ( content_type . split ( '; charset=' ) ) else : if 'text' in content_type : encoding = 'ISO-8859-1' else : try : format = formats . find_by_content_type ( content_type ) except formats . UnknownFormat : encoding = 'ISO-8859-1' else : encoding = format . default_encoding or 'ISO-8859-1' return ( content_type , encoding ) | Return a tuple of content type and charset . |
62,233 | def parse_http_accept_header ( header ) : components = [ item . strip ( ) for item in header . split ( ',' ) ] l = [ ] for component in components : if ';' in component : subcomponents = [ item . strip ( ) for item in component . split ( ';' ) ] l . append ( ( subcomponents [ 0 ] , subcomponents [ 1 ] [ 2 : ] ) ) else : l . append ( ( component , '1' ) ) l . sort ( key = lambda i : i [ 1 ] , reverse = True ) content_types = [ ] for i in l : content_types . append ( i [ 0 ] ) return content_types | Return a list of content types listed in the HTTP Accept header ordered by quality . |
62,234 | def parse_multipart_data ( request ) : return MultiPartParser ( META = request . META , input_data = StringIO ( request . body ) , upload_handlers = request . upload_handlers , encoding = request . encoding ) . parse ( ) | Parse a request with multipart data . |
62,235 | def override_supported_formats ( formats ) : def decorator ( function ) : @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : self . supported_formats = formats return function ( self , * args , ** kwargs ) return wrapper return decorator | Override the views class supported formats for the decorated function . |
62,236 | def route ( regex , method , name ) : def decorator ( function ) : function . route = routes . route ( regex = regex , view = function . __name__ , method = method , name = name ) @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : return function ( self , * args , ** kwargs ) return wrapper return decorator | Route the decorated view . |
62,237 | def before ( method_name ) : def decorator ( function ) : @ wraps ( function ) def wrapper ( self , * args , ** kwargs ) : returns = getattr ( self , method_name ) ( * args , ** kwargs ) if returns is None : return function ( self , * args , ** kwargs ) else : if isinstance ( returns , HttpResponse ) : return returns else : return function ( self , * returns ) return wrapper return decorator | Run the given method prior to the decorated view . |
62,238 | def index ( self , request ) : objects = self . model . objects . all ( ) return self . _render ( request = request , template = 'index' , context = { cc2us ( pluralize ( self . model . __name__ ) ) : objects , } , status = 200 ) | Render a list of objects . |
62,239 | def new ( self , request ) : form = ( self . form or generate_form ( self . model ) ) ( ) return self . _render ( request = request , template = 'new' , context = { 'form' : form } , status = 200 ) | Render a form to create a new object . |
62,240 | def edit ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_path = False ) form = ( self . form or generate_form ( self . model ) ) ( instance = object ) form . fields [ '_method' ] = CharField ( required = True , initial = 'PUT' , widget = HiddenInput ) return self . _render ( request = request , template = 'edit' , context = { cc2us ( self . model . __name__ ) : object , 'form' : form } , status = 200 ) | Render a form to edit an object . |
62,241 | def update ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_path = False ) fields = [ ] for field in request . PATCH : try : self . model . _meta . get_field_by_name ( field ) except FieldDoesNotExist : continue else : fields . append ( field ) Form = generate_form ( model = self . model , form = self . form , fields = fields ) form = Form ( request . PATCH , instance = object ) if form . is_valid ( ) : object = form . save ( ) return self . show ( request , id ) else : return self . _render ( request = request , template = 'edit' , context = { 'form' : form } , status = 400 ) | Update an object . |
62,242 | def replace ( self , request , id ) : try : object = self . model . objects . get ( id = id ) except self . model . DoesNotExist : return self . _render ( request = request , template = '404' , context = { 'error' : 'The %s could not be found.' % self . model . __name__ . lower ( ) } , status = 404 , prefix_template_path = False ) form = ( self . form or generate_form ( self . model ) ) ( request . PUT , instance = object ) if form . is_valid ( ) : object = form . save ( ) return self . show ( request , id ) else : return self . _render ( request = request , template = 'edit' , context = { 'form' : form } , status = 400 ) | Replace an object . |
62,243 | def build_q ( fields_dict , params_dict , request = None ) : and_query = Q ( ) for fieldname in fields_dict : search_field = fields_dict [ fieldname ] if fieldname in params_dict and params_dict [ fieldname ] != '' and params_dict [ fieldname ] != [ ] : or_query = None if type ( search_field ) == type ( list ( ) ) : field_list = search_field search_operator = "__icontains" fixed_filters = None multiple_values = False custom_query_method = None value_mapper = None else : if search_field . get ( 'ignore' , False ) : continue field_list = search_field [ 'fields' ] search_operator = search_field . get ( 'operator' , None ) fixed_filters = search_field . get ( 'fixed_filters' , None ) multiple_values = search_field . get ( 'multiple' , False ) custom_query_method = search_field . get ( 'custom_query' , None ) value_mapper = search_field . get ( 'value_mapper' , None ) for model_field in field_list : if multiple_values : if hasattr ( params_dict , "getlist" ) : request_field_value = params_dict . getlist ( fieldname ) elif type ( params_dict [ fieldname ] ) == list : request_field_value = params_dict [ fieldname ] else : request_field_value = [ params_dict [ fieldname ] ] if value_mapper : request_field_value = [ value_mapper ( value ) for value in request_field_value ] else : request_field_value = params_dict [ fieldname ] if not value_mapper else value_mapper ( params_dict [ fieldname ] ) if not custom_query_method : fieldname_key = model_field + search_operator filter_dict = { fieldname_key : request_field_value } if not or_query : or_query = Q ( ** filter_dict ) else : or_query = or_query | Q ( ** filter_dict ) else : if not request : cf = custom_query_method ( model_field , request_field_value , params_dict ) else : cf = custom_query_method ( model_field , request_field_value , request ) if not or_query : or_query = cf else : or_query = or_query | cf fixed_filters_q = Q ( ) if fixed_filters : if callable ( fixed_filters ) : fixed_filters_q = fixed_filters ( params_dict ) elif type ( fixed_filters ) is dict : fixed_filters_q = Q ( ** fixed_filters ) and_query = and_query & or_query and_query = and_query & fixed_filters_q return and_query | Returns a Q object from filters config and actual parmeters . |
62,244 | def get_search_fields ( cls ) : sfdict = { } for klass in tuple ( cls . __bases__ ) + ( cls , ) : if hasattr ( klass , 'search_fields' ) : sfdict . update ( klass . search_fields ) return sfdict | Returns search fields in sfdict |
62,245 | def find ( identifier ) : for format in FORMATS : if identifier in [ format . name , format . acronym , format . extension ] : return format raise UnknownFormat ( 'No format found with name, acronym or extension "%s"' % identifier ) | Find and return a format by name acronym or extension . |
62,246 | def find_by_name ( name ) : for format in FORMATS : if name == format . name : return format raise UnknownFormat ( 'No format found with name "%s"' % name ) | Find and return a format by name . |
62,247 | def find_by_extension ( extension ) : for format in FORMATS : if extension in format . extensions : return format raise UnknownFormat ( 'No format found with extension "%s"' % extension ) | Find and return a format by extension . |
62,248 | def find_by_content_type ( content_type ) : for format in FORMATS : if content_type in format . content_types : return format raise UnknownFormat ( 'No format found with content type "%s"' % content_type ) | Find and return a format by content type . |
62,249 | def options ( self , request , map , * args , ** kwargs ) : options = { } for method , function in map . items ( ) : options [ method ] = function . __doc__ return self . _render ( request = request , template = 'options' , context = { 'options' : options } , status = 200 , headers = { 'Allow' : ', ' . join ( options . keys ( ) ) } ) | List communication options . |
62,250 | def _get_format ( self , request ) : supported_formats = [ formats . find ( format ) for format in self . supported_formats ] if '.' in request . path : extension = request . path . split ( '.' ) [ - 1 ] try : format = formats . find_by_extension ( extension ) except formats . UnknownFormat : return None if format in supported_formats : return format else : return None if 'HTTP_ACCEPT' in request . META : content_types = parse_http_accept_header ( request . META [ 'HTTP_ACCEPT' ] ) if len ( content_types ) == 1 : content_type = content_types [ 0 ] if content_type == '*/*' : return supported_formats [ 0 ] try : format = formats . find_by_content_type ( content_type ) except formats . UnknownFormat : return None if format in supported_formats : return format else : return None if DEFAULT_FORMAT : format = formats . find ( DEFAULT_FORMAT ) if format in supported_formats : return format else : return None | Determine and return a formats . Format instance describing the most desired response format that is supported by these views . |
62,251 | def _render ( self , request , template = None , status = 200 , context = { } , headers = { } , prefix_template_path = True ) : format = self . _get_format ( request ) if not format : return HttpResponse ( status = 406 ) if template : if prefix_template_path : template_path = '%s.%s' % ( self . template_path + template , format . extension ) else : template_path = '%s.%s' % ( template , format . extension ) try : response = render ( request = request , template_name = template_path , dictionary = context , status = status , content_type = '%s; charset=%s' % ( format . content_type , settings . DEFAULT_CHARSET ) ) except TemplateDoesNotExist : try : response = HttpResponse ( content = serializers . find ( format ) ( context ) . serialize ( request ) , content_type = '%s; charset=%s' % ( format . content_type , settings . DEFAULT_CHARSET ) , status = status ) except serializers . UnknownSerializer : raise self . Error ( 'No template exists at %(template_path)s, and no serializer found for %(format)s' % { 'template_path' : template_path , 'format' : format } ) else : response = HttpResponse ( content = serializers . find ( format ) ( context ) . serialize ( request ) , content_type = '%s; charset=%s' % ( format . content_type , settings . DEFAULT_CHARSET ) , status = status ) for header , value in headers . items ( ) : response [ header ] = value return response | Render a HTTP response . |
62,252 | def _error ( self , request , status , headers = { } , prefix_template_path = False , ** kwargs ) : return self . _render ( request = request , template = str ( status ) , status = status , context = { 'error' : kwargs } , headers = headers , prefix_template_path = prefix_template_path ) | Convenience method to render an error response . The template is inferred from the status code . |
62,253 | def find ( format ) : try : serializer = SERIALIZERS [ format ] except KeyError : raise UnknownSerializer ( 'No serializer found for %s' % format . acronym ) return serializer | Find and return a serializer for the given format . |
62,254 | def get_form_kwargs ( self ) : update_data = { } sfdict = self . filter_class . get_search_fields ( ) for fieldname in sfdict : try : has_multiple = sfdict [ fieldname ] . get ( 'multiple' , False ) except : has_multiple = False if has_multiple : value = self . request . GET . getlist ( fieldname , [ ] ) else : value = self . request . GET . get ( fieldname , None ) update_data [ fieldname ] = value if self . order_field : update_data [ self . order_field ] = self . request . GET . get ( self . order_field , None ) initial = self . get_initial ( ) initial . update ( update_data ) kwargs = { 'initial' : initial } if self . groups_for_userlist != None : pot_users = User . objects . exclude ( id = self . request . user . id ) if len ( self . groups_for_userlist ) : pot_users = pot_users . filter ( groups__name__in = self . groups_for_userlist ) pot_users = pot_users . distinct ( ) . order_by ( 'username' ) user_choices = tuple ( [ ( user . id , str ( user ) ) for user in pot_users ] ) kwargs [ 'user_choices' ] = user_choices return kwargs | Returns the keyword arguments for instantiating the search form . |
62,255 | def pluralize ( word ) : rules = [ [ '(?i)(quiz)$' , '\\1zes' ] , [ '^(?i)(ox)$' , '\\1en' ] , [ '(?i)([m|l])ouse$' , '\\1ice' ] , [ '(?i)(matr|vert|ind)ix|ex$' , '\\1ices' ] , [ '(?i)(x|ch|ss|sh)$' , '\\1es' ] , [ '(?i)([^aeiouy]|qu)ies$' , '\\1y' ] , [ '(?i)([^aeiouy]|qu)y$' , '\\1ies' ] , [ '(?i)(hive)$' , '\\1s' ] , [ '(?i)(?:([^f])fe|([lr])f)$' , '\\1\\2ves' ] , [ '(?i)sis$' , 'ses' ] , [ '(?i)([ti])um$' , '\\1a' ] , [ '(?i)(buffal|tomat)o$' , '\\1oes' ] , [ '(?i)(bu)s$' , '\\1ses' ] , [ '(?i)(alias|status)' , '\\1es' ] , [ '(?i)(octop|vir)us$' , '\\1i' ] , [ '(?i)(ax|test)is$' , '\\1es' ] , [ '(?i)s$' , 's' ] , [ '(?i)$' , 's' ] ] uncountable_words = [ 'equipment' , 'information' , 'rice' , 'money' , 'species' , 'series' , 'fish' , 'sheep' ] irregular_words = { 'person' : 'people' , 'man' : 'men' , 'child' : 'children' , 'sex' : 'sexes' , 'move' : 'moves' } lower_cased_word = word . lower ( ) for uncountable_word in uncountable_words : if lower_cased_word [ - 1 * len ( uncountable_word ) : ] == uncountable_word : return word for irregular in irregular_words . keys ( ) : match = re . search ( '(' + irregular + ')$' , word , re . IGNORECASE ) if match : return re . sub ( '(?i)' + irregular + '$' , match . expand ( '\\1' ) [ 0 ] + irregular_words [ irregular ] [ 1 : ] , word ) for rule in range ( len ( rules ) ) : match = re . search ( rules [ rule ] [ 0 ] , word , re . IGNORECASE ) if match : groups = match . groups ( ) for k in range ( 0 , len ( groups ) ) : if groups [ k ] == None : rules [ rule ] [ 1 ] = rules [ rule ] [ 1 ] . replace ( '\\' + str ( k + 1 ) , '' ) return re . sub ( rules [ rule ] [ 0 ] , rules [ rule ] [ 1 ] , word ) return word | Pluralize an English noun . |
62,256 | def us2mc ( string ) : return re . sub ( r'_([a-z])' , lambda m : ( m . group ( 1 ) . upper ( ) ) , string ) | Transform an underscore_case string to a mixedCase string |
62,257 | def generate_form ( model , form = None , fields = False , exclude = False ) : _model , _fields , _exclude = model , fields , exclude class Form ( form or forms . ModelForm ) : class Meta : model = _model if _fields is not False : fields = _fields if _exclude is not False : exclude = _exclude return Form | Generate a form from a model . |
62,258 | def sample_double_norm ( mean , std_upper , std_lower , size ) : from scipy . special import erfinv samples = np . empty ( size ) percentiles = np . random . uniform ( 0. , 1. , size ) cutoff = std_lower / ( std_lower + std_upper ) w = ( percentiles < cutoff ) percentiles [ w ] *= 0.5 / cutoff samples [ w ] = mean + np . sqrt ( 2 ) * std_lower * erfinv ( 2 * percentiles [ w ] - 1 ) w = ~ w percentiles [ w ] = 1 - ( 1 - percentiles [ w ] ) * 0.5 / ( 1 - cutoff ) samples [ w ] = mean + np . sqrt ( 2 ) * std_upper * erfinv ( 2 * percentiles [ w ] - 1 ) return samples | Note that this function requires Scipy . |
62,259 | def find_gamma_params ( mode , std ) : if mode < 0 : raise ValueError ( 'input mode must be positive for gamma; got %e' % mode ) var = std ** 2 beta = ( mode + np . sqrt ( mode ** 2 + 4 * var ) ) / ( 2 * var ) j = 2 * var / mode ** 2 alpha = ( j + 1 + np . sqrt ( 2 * j + 1 ) ) / j if alpha <= 1 : raise ValueError ( 'couldn\'t compute self-consistent gamma parameters: ' 'mode=%e std=%e alpha=%e beta=%e' % ( mode , std , alpha , beta ) ) return alpha , beta | Given a modal value and a standard deviation compute corresponding parameters for the gamma distribution . |
62,260 | def _lval_add_towards_polarity ( x , polarity ) : if x < 0 : if polarity < 0 : return Lval ( 'toinf' , x ) return Lval ( 'pastzero' , x ) elif polarity > 0 : return Lval ( 'toinf' , x ) return Lval ( 'pastzero' , x ) | Compute the appropriate Lval kind for the limit of value x towards polarity . Either toinf or pastzero depending on the sign of x and the infinity direction of polarity . |
62,261 | def limtype ( msmt ) : if np . isscalar ( msmt ) : return 0 if isinstance ( msmt , Uval ) : return 0 if isinstance ( msmt , Lval ) : if msmt . kind == 'undef' : raise ValueError ( 'no simple limit type for Lval %r' % msmt ) p = msmt . _polarity ( ) if p == - 2 or p == 1 : return - 1 if p == 2 or p == - 1 : return 1 return 0 if isinstance ( msmt , Textual ) : return msmt . limtype ( ) raise ValueError ( 'don\'t know how to treat %r as a measurement' % msmt ) | Return - 1 if this value is some kind of upper limit 1 if this value is some kind of lower limit 0 otherwise . |
62,262 | def from_pcount ( nevents ) : if nevents < 0 : raise ValueError ( 'Poisson parameter `nevents` must be nonnegative' ) return Uval ( np . random . gamma ( nevents + 1 , size = uval_nsamples ) ) | We assume a Poisson process . nevents is the number of events in some interval . The distribution of values is the distribution of the Poisson rate parameter given this observed number of events where the rate is in units of events per interval of the same duration . The max - likelihood value is nevents but the mean value is nevents + 1 . The gamma distribution is obtained by assuming an improper uniform prior for the rate between 0 and infinity . |
62,263 | def repvals ( self , method ) : if method == 'pct' : return pk_scoreatpercentile ( self . d , [ 50. , 84.134 , 15.866 ] ) if method == 'gauss' : m , s = self . d . mean ( ) , self . d . std ( ) return np . asarray ( [ m , m + s , m - s ] ) raise ValueError ( 'unknown representative-value method "%s"' % method ) | Compute representative statistical values for this Uval . method may be either pct or gauss . |
62,264 | def repval ( self , limitsok = False ) : if not limitsok and self . dkind in ( 'lower' , 'upper' ) : raise LimitError ( ) if self . dkind == 'unif' : lower , upper = map ( float , self . data ) v = 0.5 * ( lower + upper ) elif self . dkind in _noextra_dkinds : v = float ( self . data ) elif self . dkind in _yesextra_dkinds : v = float ( self . data [ 0 ] ) else : raise RuntimeError ( 'can\'t happen' ) if self . tkind == 'log10' : return 10 ** v return v | Get a best - effort representative value as a float . This can be DANGEROUS because it discards limit information which is rarely wise . |
62,265 | def moreland_adjusthue ( msh , m_unsat ) : if msh [ M ] >= m_unsat : return msh [ H ] hspin = ( msh [ S ] * np . sqrt ( m_unsat ** 2 - msh [ M ] ** 2 ) / ( msh [ M ] * np . sin ( msh [ S ] ) ) ) if msh [ H ] > - np . pi / 3 : return msh [ H ] + hspin return msh [ H ] - hspin | Moreland s AdjustHue procedure to adjust the hue value of an Msh color based on ... some criterion . |
62,266 | def get_datasets_in_nodes ( ) : data_dir = os . path . join ( scriptdir , ".." , "usgs" , "data" ) cwic = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , CWIC_LSI_EXPLORER_CATALOG_NODE ) [ 'data' ] ) ee = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , EARTH_EXPLORER_CATALOG_NODE ) [ 'data' ] ) hdds = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , HDDS_EXPLORER_CATALOG_NODE ) [ 'data' ] ) lpcs = map ( lambda d : d [ "datasetName" ] , api . datasets ( None , LPCS_EXPLORER_CATALOG_NODE ) [ 'data' ] ) datasets = { } datasets . update ( { ds : "CWIC" for ds in cwic } ) datasets . update ( { ds : "EE" for ds in ee } ) datasets . update ( { ds : "HDDS" for ds in hdds } ) datasets . update ( { ds : "LPCS" for ds in lpcs } ) datasets_path = os . path . join ( data_dir , "datasets.json" ) with open ( datasets_path , "w" ) as f : f . write ( json . dumps ( datasets ) ) cwic_ee = [ ds for ds in cwic if ds in ee ] cwic_hdds = [ ds for ds in cwic if ds in hdds ] cwic_lpcs = [ ds for ds in cwic if ds in lpcs ] ee_hdds = [ ds for ds in ee if ds in hdds ] ee_lpcs = [ ds for ds in ee if ds in lpcs ] hdds_lpcs = [ ds for ds in hdds if ds in lpcs ] | Get the node associated with each dataset . Some datasets will have an ambiguous node since they exists in more than one node . |
62,267 | def pivot_wavelength_ee ( bpass ) : from scipy . integrate import simps return np . sqrt ( simps ( bpass . resp , bpass . wlen ) / simps ( bpass . resp / bpass . wlen ** 2 , bpass . wlen ) ) | Compute pivot wavelength assuming equal - energy convention . |
62,268 | def get_std_registry ( ) : from six import itervalues reg = Registry ( ) for fn in itervalues ( builtin_registrars ) : fn ( reg ) return reg | Get a Registry object pre - filled with information for standard telescopes . |
62,269 | def pivot_wavelength ( self ) : wl = self . registry . _pivot_wavelengths . get ( ( self . telescope , self . band ) ) if wl is not None : return wl wl = self . calc_pivot_wavelength ( ) self . registry . register_pivot_wavelength ( self . telescope , self . band , wl ) return wl | Get the bandpass pivot wavelength . |
62,270 | def calc_halfmax_points ( self ) : d = self . _ensure_data ( ) return interpolated_halfmax_points ( d . wlen , d . resp ) | Calculate the wavelengths of the filter half - maximum values . |
62,271 | def halfmax_points ( self ) : t = self . registry . _halfmaxes . get ( ( self . telescope , self . band ) ) if t is not None : return t t = self . calc_halfmax_points ( ) self . registry . register_halfmaxes ( self . telescope , self . band , t [ 0 ] , t [ 1 ] ) return t | Get the bandpass half - maximum wavelengths . These can be used to compute a representative bandwidth or for display purposes . |
62,272 | def bands ( self , telescope ) : q = self . _seen_bands . get ( telescope ) if q is None : return [ ] return list ( q ) | Return a list of bands associated with the specified telescope . |
62,273 | def register_pivot_wavelength ( self , telescope , band , wlen ) : if ( telescope , band ) in self . _pivot_wavelengths : raise AlreadyDefinedError ( 'pivot wavelength for %s/%s already ' 'defined' , telescope , band ) self . _note ( telescope , band ) self . _pivot_wavelengths [ telescope , band ] = wlen return self | Register precomputed pivot wavelengths . |
62,274 | def register_halfmaxes ( self , telescope , band , lower , upper ) : if ( telescope , band ) in self . _halfmaxes : raise AlreadyDefinedError ( 'half-max points for %s/%s already ' 'defined' , telescope , band ) self . _note ( telescope , band ) self . _halfmaxes [ telescope , band ] = ( lower , upper ) return self | Register precomputed half - max points . |
62,275 | def register_bpass ( self , telescope , klass ) : if telescope in self . _bpass_classes : raise AlreadyDefinedError ( 'bandpass class for %s already ' 'defined' , telescope ) self . _note ( telescope , None ) self . _bpass_classes [ telescope ] = klass return self | Register a Bandpass class . |
62,276 | def get ( self , telescope , band ) : klass = self . _bpass_classes . get ( telescope ) if klass is None : raise NotDefinedError ( 'bandpass data for %s not defined' , telescope ) bp = klass ( ) bp . registry = self bp . telescope = telescope bp . band = band return bp | Get a Bandpass object for a known telescope and filter . |
62,277 | def _load_data ( self , band ) : df = bandpass_data_frame ( 'filter_wise_' + str ( band ) + '.dat' , 'wlen resp uncert' ) df . wlen *= 1e4 df . uncert *= df . resp / 1000. lo , hi = self . _filter_subsets [ band ] df = df [ lo : hi ] return df | From the WISE All - Sky Explanatory Supplement IV . 4 . h . i . 1 and Jarrett + 2011 . These are relative response per erg and so can be integrated directly against F_nu spectra . Wavelengths are in micron uncertainties are in parts per thousand . |
62,278 | def clean_comment_body ( body ) : body = _parser . unescape ( body ) body = re . sub ( r'<a [^>]+>(.+?)</a>' , r'\1' , body ) body = body . replace ( '<br>' , '\n' ) body = re . sub ( r'<.+?>' , '' , body ) return body | Returns given comment HTML as plaintext . |
62,279 | def _create_wcs ( fitsheader ) : wcsmodule = _load_wcs_module ( ) is_pywcs = hasattr ( wcsmodule , 'UnitConverter' ) wcs = wcsmodule . WCS ( fitsheader ) wcs . wcs . set ( ) wcs . wcs . fix ( ) if hasattr ( wcs , 'wcs_pix2sky' ) : wcs . wcs_pix2world = wcs . wcs_pix2sky wcs . wcs_world2pix = wcs . wcs_sky2pix return wcs | For compatibility between astropy and pywcs . |
62,280 | def sanitize_unicode ( item ) : if isinstance ( item , text_type ) : return item . encode ( 'utf8' ) if isinstance ( item , dict ) : return dict ( ( sanitize_unicode ( k ) , sanitize_unicode ( v ) ) for k , v in six . iteritems ( item ) ) if isinstance ( item , ( list , tuple ) ) : return item . __class__ ( sanitize_unicode ( x ) for x in item ) from . . . io import Path if isinstance ( item , Path ) : return str ( item ) return item | Safely pass string values to the CASA tools . |
62,281 | def datadir ( * subdirs ) : import os . path data = None if 'CASAPATH' in os . environ : data = os . path . join ( os . environ [ 'CASAPATH' ] . split ( ) [ 0 ] , 'data' ) if data is None : try : import casadef except ImportError : pass else : data = os . path . join ( os . path . dirname ( casadef . task_directory ) , 'data' ) if not os . path . isdir ( data ) : dn = os . path . dirname data = os . path . join ( dn ( dn ( dn ( casadef . task_directory ) ) ) , 'lib' , 'casa' , 'data' ) if not os . path . isdir ( data ) : data = None if data is None : import casac prevp = None p = os . path . dirname ( casac . __file__ ) while len ( p ) and p != prevp : data = os . path . join ( p , 'data' ) if os . path . isdir ( data ) : break prevp = p p = os . path . dirname ( p ) if not os . path . isdir ( data ) : raise RuntimeError ( 'cannot identify CASA data directory' ) return os . path . join ( data , * subdirs ) | Get a path within the CASA data directory . |
62,282 | def logger ( filter = 'WARN' ) : import os , shutil , tempfile cwd = os . getcwd ( ) tempdir = None try : tempdir = tempfile . mkdtemp ( prefix = 'casautil' ) try : os . chdir ( tempdir ) sink = tools . logsink ( ) sink . setlogfile ( sanitize_unicode ( os . devnull ) ) try : os . unlink ( 'casapy.log' ) except OSError as e : if e . errno != 2 : raise finally : os . chdir ( cwd ) finally : if tempdir is not None : shutil . rmtree ( tempdir , onerror = _rmtree_error ) sink . showconsole ( True ) sink . setglobal ( True ) sink . filter ( sanitize_unicode ( filter . upper ( ) ) ) return sink | Set up CASA to write log messages to standard output . |
62,283 | def forkandlog ( function , filter = 'INFO5' , debug = False ) : import sys , os readfd , writefd = os . pipe ( ) pid = os . fork ( ) if pid == 0 : os . close ( readfd ) if not debug : f = open ( os . devnull , 'w' ) os . dup2 ( f . fileno ( ) , 1 ) os . dup2 ( f . fileno ( ) , 2 ) sink = logger ( filter = filter ) sink . setlogfile ( b'/dev/fd/%d' % writefd ) function ( sink ) sys . exit ( 0 ) os . close ( writefd ) with os . fdopen ( readfd ) as readhandle : for line in readhandle : yield line info = os . waitpid ( pid , 0 ) if info [ 1 ] : e = RuntimeError ( 'logging child process PID %d exited ' 'with error code %d' % tuple ( info ) ) e . pid , e . exitcode = info raise e | Fork a child process and read its CASA log output . |
62,284 | def _get_extended ( scene , resp ) : root = ElementTree . fromstring ( resp . text ) items = root . findall ( "eemetadata:metadataFields/eemetadata:metadataField" , NAMESPACES ) scene [ 'extended' ] = { item . attrib . get ( 'name' ) . strip ( ) : xsi . get ( item [ 0 ] ) for item in items } return scene | Parse metadata returned from the metadataUrl of a USGS scene . |
62,285 | def _async_requests ( urls ) : session = FuturesSession ( max_workers = 30 ) futures = [ session . get ( url ) for url in urls ] return [ future . result ( ) for future in futures ] | Sends multiple non - blocking requests . Returns a list of responses . |
62,286 | def metadata ( dataset , node , entityids , extended = False , api_key = None ) : api_key = _get_api_key ( api_key ) url = '{}/metadata' . format ( USGS_API ) payload = { "jsonRequest" : payloads . metadata ( dataset , node , entityids , api_key = api_key ) } r = requests . post ( url , payload ) response = r . json ( ) _check_for_usgs_error ( response ) if extended : metadata_urls = map ( _get_metadata_url , response [ 'data' ] ) results = _async_requests ( metadata_urls ) data = map ( lambda idx : _get_extended ( response [ 'data' ] [ idx ] , results [ idx ] ) , range ( len ( response [ 'data' ] ) ) ) return response | Request metadata for a given scene in a USGS dataset . |
62,287 | def reraise_context ( fmt , * args ) : import sys if len ( args ) : cstr = fmt % args else : cstr = text_type ( fmt ) ex = sys . exc_info ( ) [ 1 ] if isinstance ( ex , EnvironmentError ) : ex . strerror = '%s: %s' % ( cstr , ex . strerror ) ex . args = ( ex . errno , ex . strerror ) else : if len ( ex . args ) : cstr = '%s: %s' % ( cstr , ex . args [ 0 ] ) ex . args = ( cstr , ) + ex . args [ 1 : ] raise | Reraise an exception with its message modified to specify additional context . |
62,288 | def copy ( self ) : new = self . __class__ ( ) new . __dict__ = dict ( self . __dict__ ) return new | Return a shallow copy of this object . |
62,289 | def get_all_boards ( * args , ** kwargs ) : https = kwargs . get ( 'https' , args [ 1 ] if len ( args ) > 1 else False ) url_generator = Url ( None , https ) _fetch_boards_metadata ( url_generator ) return get_boards ( _metadata . keys ( ) , * args , ** kwargs ) | Returns every board on 4chan . |
62,290 | def get_thread ( self , thread_id , update_if_cached = True , raise_404 = False ) : cached_thread = self . _thread_cache . get ( thread_id ) if cached_thread : if update_if_cached : cached_thread . update ( ) return cached_thread res = self . _requests_session . get ( self . _url . thread_api_url ( thread_id = thread_id ) ) if raise_404 : res . raise_for_status ( ) elif not res . ok : return None thread = Thread . _from_request ( self , res , thread_id ) self . _thread_cache [ thread_id ] = thread return thread | Get a thread from 4chan via 4chan API . |
62,291 | def thread_exists ( self , thread_id ) : return self . _requests_session . head ( self . _url . thread_api_url ( thread_id = thread_id ) ) . ok | Check if a thread exists or has 404 d . |
62,292 | def get_threads ( self , page = 1 ) : url = self . _url . page_url ( page ) return self . _request_threads ( url ) | Returns all threads on a certain page . |
62,293 | def get_all_thread_ids ( self ) : json = self . _get_json ( self . _url . thread_list ( ) ) return [ thread [ 'no' ] for page in json for thread in page [ 'threads' ] ] | Return the ID of every thread on this board . |
62,294 | def get_all_threads ( self , expand = False ) : if not expand : return self . _request_threads ( self . _url . catalog ( ) ) thread_ids = self . get_all_thread_ids ( ) threads = [ self . get_thread ( id , raise_404 = False ) for id in thread_ids ] return filter ( None , threads ) | Return every thread on this board . |
62,295 | def refresh_cache ( self , if_want_update = False ) : for thread in tuple ( self . _thread_cache . values ( ) ) : if if_want_update : if not thread . want_update : continue thread . update ( ) | Update all threads currently stored in our cache . |
62,296 | def modify_environment ( self , env ) : is_rpm_install = self . _rootdir . startswith ( '/usr/lib64/casapy/release/' ) def path ( * args ) : return os . path . join ( self . _rootdir , * args ) env [ 'CASAROOT' ] = path ( ) env [ 'CASAPATH' ] = ' ' . join ( [ path ( ) , os . uname ( ) [ 0 ] . lower ( ) , 'local' , os . uname ( ) [ 1 ] ] ) if is_rpm_install : env [ 'CASA_INSTALLATION_TYPE' ] = 'rpm-installation' prepend_environ_path ( env , 'PATH' , '/usr/lib64/casa/01/bin' ) prepend_environ_path ( env , 'PATH' , path ( 'bin' ) ) else : env [ 'CASA_INSTALLATION_TYPE' ] = 'tar-installation' lib = 'lib64' if os . path . isdir ( path ( 'lib64' ) ) else 'lib' pydir = sorted ( glob . glob ( path ( lib , 'python2*' ) ) ) [ - 1 ] tcldir = path ( 'share' , 'tcl' ) if os . path . isdir ( tcldir ) : env [ 'TCL_LIBRARY' ] = tcldir else : tcl_versioned_dirs = glob . glob ( path ( 'share' , 'tcl*' ) ) if len ( tcl_versioned_dirs ) : env [ 'TCL_LIBRARY' ] = tcl_versioned_dirs [ - 1 ] bindir = path ( lib , 'casa' , 'bin' ) if not os . path . isdir ( bindir ) : bindir = path ( lib , 'casapy' , 'bin' ) prepend_environ_path ( env , 'PATH' , bindir ) env [ 'CASA_INSTALLATION_DIRECTORY' ] = env [ 'CASAROOT' ] env [ '__CASAPY_PYTHONDIR' ] = pydir env [ 'MATPLOTLIBRC' ] = path ( 'share' , 'matplotlib' ) env [ 'PYTHONHOME' ] = env [ 'CASAROOT' ] env [ 'TK_LIBRARY' ] = path ( 'share' , 'tk' ) env [ 'QT_PLUGIN_PATH' ] = path ( lib , 'qt4' , 'plugins' ) prepend_environ_path ( env , 'LD_LIBRARY_PATH' , path ( lib ) ) prepend_environ_path ( env , 'PYTHONPATH' , os . path . join ( pydir , 'site-packages' ) ) prepend_environ_path ( env , 'PYTHONPATH' , os . path . join ( pydir , 'heuristics' ) ) prepend_environ_path ( env , 'PYTHONPATH' , pydir ) return env | Maintaining compatibility with different CASA versions is a pain . |
62,297 | def compute_bgband ( evtpath , srcreg , bkgreg , ebins , env = None ) : import numpy as np import pandas as pd from scipy . special import erfcinv , gammaln if env is None : from . import CiaoEnvironment env = CiaoEnvironment ( ) srcarea = get_region_area ( env , evtpath , srcreg ) bkgarea = get_region_area ( env , evtpath , bkgreg ) srccounts = [ count_events ( env , evtpath , '[sky=%s][energy=%d:%d]' % ( srcreg , elo , ehi ) ) for elo , ehi in ebins ] bkgcounts = [ count_events ( env , evtpath , '[sky=%s][energy=%d:%d]' % ( bkgreg , elo , ehi ) ) for elo , ehi in ebins ] df = pd . DataFrame ( { 'elo' : [ t [ 0 ] for t in ebins ] , 'ehi' : [ t [ 1 ] for t in ebins ] , 'nsrc' : srccounts , 'nbkg' : bkgcounts } ) df [ 'ewidth' ] = np . abs ( df [ 'ehi' ] - df [ 'elo' ] ) df [ 'nbkg_scaled' ] = df [ 'nbkg' ] * srcarea / bkgarea df [ 'log_prob_bkg' ] = df [ 'nsrc' ] * np . log ( df [ 'nbkg_scaled' ] ) - df [ 'nbkg_scaled' ] - gammaln ( df [ 'nsrc' ] + 1 ) df [ 'src_sigma' ] = np . sqrt ( 2 ) * erfcinv ( np . exp ( df [ 'log_prob_bkg' ] ) ) df [ 'nsrc_subbed' ] = df [ 'nsrc' ] - df [ 'nbkg_scaled' ] return df | Compute background information for a source in one or more energy bands . |
62,298 | def simple_srcflux ( env , infile = None , psfmethod = 'arfcorr' , conf = 0.68 , verbose = 0 , ** kwargs ) : from . . . io import Path import shutil , signal , tempfile if infile is None : raise ValueError ( 'must specify infile' ) kwargs . update ( dict ( infile = infile , psfmethod = psfmethod , conf = conf , verbose = verbose , clobber = 'yes' , outroot = 'sf' , ) ) argv = [ 'srcflux' ] + [ '%s=%s' % t for t in kwargs . items ( ) ] argstr = ' ' . join ( argv ) tempdir = None try : tempdir = tempfile . mkdtemp ( prefix = 'srcflux' ) proc = env . launch ( argv , cwd = tempdir , shell = False ) retcode = proc . wait ( ) if retcode > 0 : raise RuntimeError ( 'command "%s" failed with exit code %d' % ( argstr , retcode ) ) elif retcode == - signal . SIGINT : raise KeyboardInterrupt ( ) elif retcode < 0 : raise RuntimeError ( 'command "%s" killed by signal %d' % ( argstr , - retcode ) ) tables = list ( Path ( tempdir ) . glob ( '*.flux' ) ) if len ( tables ) != 1 : raise RuntimeError ( 'expected exactly one flux table from srcflux; got %d' % len ( tables ) ) return tables [ 0 ] . read_fits_bintable ( hdu = 1 ) finally : if tempdir is not None : shutil . rmtree ( tempdir , onerror = _rmtree_error ) | Run the CIAO srcflux script and retrieve its results . |
62,299 | def new_for_fk10_fig9 ( cls , shlib_path ) : inst = ( cls ( shlib_path ) . set_thermal_background ( 2.1e7 , 3e9 ) . set_bfield ( 48 ) . set_edist_powerlaw ( 0.016 , 4.0 , 3.7 , 5e9 / 3 ) . set_freqs ( 100 , 0.5 , 50 ) . set_hybrid_parameters ( 12 , 12 ) . set_ignore_q_terms ( False ) . set_obs_angle ( 50 * np . pi / 180 ) . set_padist_gaussian_loss_cone ( 0.5 * np . pi , 0.4 ) . set_trapezoidal_integration ( 15 ) ) inst . in_vals [ 0 ] = 1.33e18 inst . in_vals [ 1 ] = 6e8 return inst | Create a calculator initialized to reproduce Figure 9 from FK10 . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.