idx int64 0 63k | question stringlengths 53 5.28k | target stringlengths 5 805 |
|---|---|---|
5,300 | def surveyPixel ( lon , lat , nside_pix , nside_subpix = None ) : pix = np . unique ( ang2pix ( nside_pix , lon , lat ) ) if nside_subpix is None : return pix else : subpix_array = [ ] for ii in range ( 0 , len ( pix ) ) : subpix = subpixel ( pix [ ii ] , nside_pix , nside_subpix ) subpix_array . append ( subpix ) return pix , np . array ( subpix_array ) | Return the set of HEALPix pixels that cover the given coordinates at resolution nside . Optionally return the set of subpixels within those pixels at resolution nside_subpix |
5,301 | def allSkyCoordinates ( nside ) : lon , lat = pix2ang ( nside , np . arange ( 0 , hp . nside2npix ( nside ) ) ) return lon , lat | Generate a set of coordinates at the centers of pixels of resolutions nside across the full sky . |
5,302 | def randomPositionsMask ( mask , nside_pix , n ) : npix = len ( mask ) nside = hp . npix2nside ( npix ) coverage_fraction = float ( np . sum ( mask ) ) / len ( mask ) n_throw = int ( n / coverage_fraction ) lon , lat = [ ] , [ ] latch = True count = 0 while len ( lon ) < n : lon_throw = np . random . uniform ( 0. , 360. , n_throw ) lat_throw = np . degrees ( np . arcsin ( np . random . uniform ( - 1. , 1. , n_throw ) ) ) pix = ugali . utils . healpix . angToPix ( nside , lon_throw , lat_throw ) cut = mask [ pix ] . astype ( bool ) lon = np . append ( lon , lon_throw [ cut ] ) lat = np . append ( lat , lat_throw [ cut ] ) count += 1 if count > 10 : raise RuntimeError ( 'Too many loops...' ) return lon [ 0 : n ] , lat [ 0 : n ] | Generate n random positions within a HEALPix mask of booleans . |
5,303 | def embed_ising ( source_linear , source_quadratic , embedding , target_adjacency , chain_strength = 1.0 ) : unused = { v for v in target_adjacency } - set ( ) . union ( * embedding . values ( ) ) target_linear = { v : 0. for v in target_adjacency } for v , bias in iteritems ( source_linear ) : try : chain_variables = embedding [ v ] except KeyError : try : embedding [ v ] = { unused . pop ( ) } except KeyError : raise ValueError ( 'no embedding provided for source variable {}' . format ( v ) ) chain_variables = embedding [ v ] b = bias / len ( chain_variables ) for s in chain_variables : try : target_linear [ s ] += b except KeyError : raise ValueError ( 'chain variable {} not in target_adjacency' . format ( s ) ) target_quadratic = { } for ( u , v ) , bias in iteritems ( source_quadratic ) : edges = set ( ) if u not in embedding : raise ValueError ( 'no embedding provided for source variable {}' . format ( u ) ) if v not in embedding : raise ValueError ( 'no embedding provided for source variable {}' . format ( v ) ) for s in embedding [ u ] : for t in embedding [ v ] : try : if s in target_adjacency [ t ] and ( t , s ) not in edges : edges . add ( ( s , t ) ) except KeyError : raise ValueError ( 'chain variable {} not in target_adjacency' . format ( s ) ) if not edges : raise ValueError ( "no edges in target graph between source variables {}, {}" . format ( u , v ) ) b = bias / len ( edges ) for s , t in edges : if ( s , t ) in target_quadratic : target_quadratic [ ( s , t ) ] += b elif ( t , s ) in target_quadratic : target_quadratic [ ( t , s ) ] += b else : target_quadratic [ ( s , t ) ] = b chain_quadratic = { } for chain in itervalues ( embedding ) : chain_quadratic . update ( chain_to_quadratic ( chain , target_adjacency , chain_strength ) ) return target_linear , target_quadratic , chain_quadratic | Embeds a logical Ising model onto another graph via an embedding . |
5,304 | def chain_break_frequency ( samples , embedding ) : counts = { v : 0 for v in embedding } total = 0 for sample in samples : for v , chain in iteritems ( embedding ) : vals = [ sample [ u ] for u in chain ] if not _all_equal ( vals ) : counts [ v ] += 1 total += 1 return { v : counts [ v ] / total for v in embedding } | Determines the frequency of chain breaks in the given samples . |
5,305 | def unembed_samples ( samples , embedding , chain_break_method = None ) : if chain_break_method is None : chain_break_method = majority_vote return list ( itertools . chain ( * ( chain_break_method ( sample , embedding ) for sample in samples ) ) ) | Return samples over the variables in the source graph . |
5,306 | def discard ( sample , embedding ) : unembeded = { } for v , chain in iteritems ( embedding ) : vals = [ sample [ u ] for u in chain ] if _all_equal ( vals ) : unembeded [ v ] = vals . pop ( ) else : return yield unembeded | Discards the sample if broken . |
5,307 | def majority_vote ( sample , embedding ) : unembeded = { } for v , chain in iteritems ( embedding ) : vals = [ sample [ u ] for u in chain ] if _all_equal ( vals ) : unembeded [ v ] = vals . pop ( ) else : unembeded [ v ] = _most_common ( vals ) yield unembeded | Determines the sample values by majority vote . |
5,308 | def weighted_random ( sample , embedding ) : unembeded = { } for v , chain in iteritems ( embedding ) : vals = [ sample [ u ] for u in chain ] unembeded [ v ] = random . choice ( vals ) yield unembeded | Determines the sample values by weighed random choice . |
5,309 | def _all_equal ( iterable ) : iterator = iter ( iterable ) first = next ( iterator ) return all ( first == rest for rest in iterator ) | True if all values in iterable are equal else False . |
5,310 | def _most_common ( iterable ) : data = Counter ( iterable ) return max ( data , key = data . __getitem__ ) | Returns the most common element in iterable . |
5,311 | def lnlike ( self , theta ) : params , loglike = self . params , self . loglike kwargs = dict ( list ( zip ( params , theta ) ) ) try : lnlike = loglike . value ( ** kwargs ) except ValueError as AssertionError : lnlike = - np . inf return lnlike | Logarithm of the likelihood |
5,312 | def lnprior ( self , theta ) : params , priors = self . params , self . priors kwargs = dict ( list ( zip ( params , theta ) ) ) err = np . seterr ( invalid = 'raise' ) try : lnprior = np . sum ( np . log ( [ priors [ k ] ( v ) for k , v in list ( kwargs . items ( ) ) ] ) ) except ( FloatingPointError , ValueError ) : lnprior = - np . inf np . seterr ( ** err ) return lnprior | Logarithm of the prior |
5,313 | def lnprob ( self , theta ) : global niter params , priors , loglike = self . params , self . priors , self . loglike _lnprior = self . lnprior ( theta ) if np . isfinite ( _lnprior ) : _lnlike = self . lnlike ( theta ) else : _lnprior = - np . inf _lnlike = - np . inf _lnprob = _lnprior + _lnlike if ( niter % 100 == 0 ) : msg = "%i function calls ...\n" % niter msg += ', ' . join ( '%s: %.3f' % ( k , v ) for k , v in zip ( params , theta ) ) msg += '\nlog(like): %.3f, log(prior): %.3f' % ( _lnprior , _lnlike ) logger . debug ( msg ) niter += 1 return _lnprob | Logarithm of the probability |
5,314 | def write_membership ( filename , config , srcfile , section = None ) : source = Source ( ) source . load ( srcfile , section = section ) loglike = createLoglike ( config , source ) loglike . write_membership ( filename ) | Top level interface to write the membership from a config and source model . |
5,315 | def createCatalog ( config , roi = None , lon = None , lat = None ) : import ugali . observation . catalog if roi is None : roi = createROI ( config , lon , lat ) catalog = ugali . observation . catalog . Catalog ( config , roi = roi ) return catalog | Create a catalog object |
5,316 | def simulateCatalog ( config , roi = None , lon = None , lat = None ) : import ugali . simulation . simulator if roi is None : roi = createROI ( config , lon , lat ) sim = ugali . simulation . simulator . Simulator ( config , roi ) return sim . catalog ( ) | Simulate a catalog object . |
5,317 | def calc_observable_fraction ( self , distance_modulus ) : observable_fraction = self . isochrone . observableFraction ( self . mask , distance_modulus ) if not observable_fraction . sum ( ) > 0 : msg = "No observable fraction" msg += ( "\n" + str ( self . source . params ) ) logger . error ( msg ) raise ValueError ( msg ) return observable_fraction | Calculated observable fraction within each pixel of the target region . |
5,318 | def calc_signal_spatial ( self ) : self . surface_intensity_sparse = self . calc_surface_intensity ( ) self . surface_intensity_object = self . kernel . pdf ( self . catalog . lon , self . catalog . lat ) u_spatial = self . surface_intensity_object return u_spatial | Calculate the spatial signal probability for each catalog object . |
5,319 | def fit_richness ( self , atol = 1.e-3 , maxiter = 50 ) : if np . isnan ( self . u ) . any ( ) : logger . warning ( "NaN signal probability found" ) return 0. , 0. , None if not np . any ( self . u ) : logger . warning ( "Signal probability is zero for all objects" ) return 0. , 0. , None if self . f == 0 : logger . warning ( "Observable fraction is zero" ) return 0. , 0. , None richness = np . array ( [ 0. , 1. / self . f , 10. / self . f ] ) loglike = np . array ( [ self . value ( richness = r ) for r in richness ] ) found_maximum = False iteration = 0 while not found_maximum : parabola = ugali . utils . parabola . Parabola ( richness , 2. * loglike ) if parabola . vertex_x < 0. : found_maximum = True else : richness = np . append ( richness , parabola . vertex_x ) loglike = np . append ( loglike , self . value ( richness = richness [ - 1 ] ) ) if np . fabs ( loglike [ - 1 ] - np . max ( loglike [ 0 : - 1 ] ) ) < atol : found_maximum = True iteration += 1 if iteration > maxiter : logger . warning ( "Maximum number of iterations reached" ) break index = np . argmax ( loglike ) return loglike [ index ] , richness [ index ] , parabola | Maximize the log - likelihood as a function of richness . |
5,320 | def resolve_reference ( self , ref ) : url , resolved = self . resolver . resolve ( ref ) return resolved | Resolve a JSON Pointer object reference to the object itself . |
5,321 | def get_path ( self , path ) : mapping = self . get_path_mapping ( path ) return self . path_class ( api = self , path = path , mapping = mapping ) | Construct a Path object from a path string . |
5,322 | def from_file ( cls , filename ) : with open ( filename ) as infp : if filename . endswith ( '.yaml' ) or filename . endswith ( '.yml' ) : import yaml data = yaml . safe_load ( infp ) else : import json data = json . load ( infp ) return cls . from_data ( data ) | Construct an APIDefinition by parsing the given filename . |
5,323 | def _server_error_message ( url , message ) : msg = _error_message . format ( url = url , message = message ) log . error ( msg ) return msg | Log and return a server error message . |
5,324 | def make_request ( url , method = 'GET' , query = None , body = None , auth = None , timeout = 10 , client = None , macaroons = None ) : headers = { } kwargs = { 'timeout' : timeout , 'headers' : headers } if body is not None : if isinstance ( body , collections . Mapping ) : body = json . dumps ( body ) kwargs [ 'data' ] = body if method in ( 'GET' , 'HEAD' ) : if query : url = '{}?{}' . format ( url , urlencode ( query , True ) ) elif method in ( 'DELETE' , 'PATCH' , 'POST' , 'PUT' ) : headers [ 'Content-Type' ] = 'application/json' else : raise ValueError ( 'invalid method {}' . format ( method ) ) if macaroons is not None : headers [ 'Macaroons' ] = macaroons kwargs [ 'auth' ] = auth if client is None else client . auth ( ) api_method = getattr ( requests , method . lower ( ) ) try : response = api_method ( url , ** kwargs ) except requests . exceptions . Timeout : raise timeout_error ( url , timeout ) except Exception as err : msg = _server_error_message ( url , err ) raise ServerError ( msg ) try : response . raise_for_status ( ) except HTTPError as err : msg = _server_error_message ( url , err . response . text ) raise ServerError ( err . response . status_code , msg ) except requests . exceptions . RequestException as err : msg = _server_error_message ( url , err . message ) raise ServerError ( msg ) if not response . content : return { } try : return response . json ( ) except Exception as err : msg = 'Error decoding JSON response: {} message: {}' . format ( url , err ) log . error ( msg ) raise ServerError ( msg ) | Make a request with the provided data . |
5,325 | def get_plans ( self , reference ) : response = make_request ( '{}charm?charm-url={}' . format ( self . url , 'cs:' + reference . path ( ) ) , timeout = self . timeout , client = self . _client ) try : return tuple ( map ( lambda plan : Plan ( url = plan [ 'url' ] , plan = plan [ 'plan' ] , created_on = datetime . datetime . strptime ( plan [ 'created-on' ] , "%Y-%m-%dT%H:%M:%SZ" ) , description = plan . get ( 'description' ) , price = plan . get ( 'price' ) ) , response ) ) except Exception as err : log . error ( 'cannot process plans: invalid JSON response: {!r}' . format ( response ) ) raise ServerError ( 'unable to get list of plans for {}: {}' . format ( reference . path ( ) , err ) ) | Get the plans for a given charm . |
5,326 | def list_wallets ( self ) : response = make_request ( '{}wallet' . format ( self . url ) , timeout = self . timeout , client = self . _client ) try : total = response [ 'total' ] return { 'credit' : response [ 'credit' ] , 'total' : WalletTotal ( limit = total [ 'limit' ] , budgeted = total [ 'budgeted' ] , available = total [ 'available' ] , unallocated = total [ 'unallocated' ] , usage = total [ 'usage' ] , consumed = total [ 'consumed' ] ) , 'wallets' : tuple ( Wallet ( owner = wallet [ 'owner' ] , wallet = wallet [ 'wallet' ] , limit = wallet [ 'limit' ] , budgeted = wallet [ 'budgeted' ] , unallocated = wallet [ 'unallocated' ] , available = wallet [ 'available' ] , consumed = wallet [ 'consumed' ] , default = 'default' in wallet ) for wallet in response [ 'wallets' ] ) , } except Exception as err : log . error ( 'cannot process wallets: invalid JSON response: {!r}' . format ( response ) ) raise ServerError ( 'unable to get list of wallets: {!r}' . format ( err ) ) | Get the list of wallets . |
5,327 | def get_wallet ( self , wallet_name ) : response = make_request ( '{}wallet/{}' . format ( self . url , wallet_name ) , timeout = self . timeout , client = self . _client ) try : total = response [ 'total' ] return { 'credit' : response [ 'credit' ] , 'limit' : response [ 'limit' ] , 'total' : WalletTotal ( limit = total [ 'limit' ] , budgeted = total [ 'budgeted' ] , available = total [ 'available' ] , unallocated = total [ 'unallocated' ] , usage = total [ 'usage' ] , consumed = total [ 'consumed' ] ) } except Exception as exc : log . error ( 'cannot get wallet from server: {!r}' . format ( exc ) ) raise ServerError ( 'unable to get list of wallets: {!r}' . format ( exc ) ) | Get a single wallet . |
5,328 | def update_wallet ( self , wallet_name , limit ) : request = { 'update' : { 'limit' : str ( limit ) , } } return make_request ( '{}wallet/{}' . format ( self . url , wallet_name ) , method = 'PATCH' , body = request , timeout = self . timeout , client = self . _client ) | Update a wallet with a new limit . |
5,329 | def delete_wallet ( self , wallet_name ) : return make_request ( '{}wallet/{}' . format ( self . url , wallet_name ) , method = 'DELETE' , timeout = self . timeout , client = self . _client ) | Delete a wallet . |
5,330 | def create_budget ( self , wallet_name , model_uuid , limit ) : request = { 'model' : model_uuid , 'limit' : limit , } return make_request ( '{}wallet/{}/budget' . format ( self . url , wallet_name ) , method = 'POST' , body = request , timeout = self . timeout , client = self . _client ) | Create a new budget for a model and wallet . |
5,331 | def delete_budget ( self , model_uuid ) : return make_request ( '{}model/{}/budget' . format ( self . url , model_uuid ) , method = 'DELETE' , timeout = self . timeout , client = self . _client ) | Delete a budget . |
5,332 | def confusion ( df , labels = [ 'neg' , 'pos' ] ) : c = pd . DataFrame ( np . zeros ( ( 2 , 2 ) ) , dtype = int ) a , b = df . columns [ : 2 ] c . columns = sorted ( set ( df [ a ] ) ) [ : 2 ] c . columns . name = a c . index = list ( c . columns ) c . index . name = b c1 , c2 = c . columns c [ c1 ] [ c1 ] = ( ( df [ a ] == c1 ) & ( df [ b ] == c1 ) ) . sum ( ) c [ c1 ] [ c2 ] = ( ( df [ a ] == c1 ) & ( df [ b ] == c2 ) ) . sum ( ) c [ c2 ] [ c2 ] = ( ( df [ a ] == c2 ) & ( df [ b ] == c2 ) ) . sum ( ) c [ c2 ] [ c1 ] = ( ( df [ a ] == c2 ) & ( df [ b ] == c1 ) ) . sum ( ) return c | Binary classification confusion |
5,333 | def thresh_from_spec ( spec , labels , scores , ** kwargs ) : r cost_fun . verbose = kwargs . pop ( 'verbose' , cost_fun . verbose ) cost_fun . target = spec return minimize ( cost_fun , x0 = [ .5 ] , args = ( labels , scores ) , method = 'SLSQP' , constraints = ( { 'type' : 'ineq' , 'fun' : lambda x : np . array ( [ x [ 0 ] ] ) , 'jac' : lambda x : np . array ( [ 1. ] ) } , ) , ** kwargs ) | r Find the threshold level that accomplishes the desired specificity |
5,334 | def add_dicts ( d1 , d2 ) : if d1 is None : return d2 if d2 is None : return d1 keys = set ( d1 ) keys . update ( set ( d2 ) ) ret = { } for key in keys : v1 = d1 . get ( key ) v2 = d2 . get ( key ) if v1 is None : ret [ key ] = v2 elif v2 is None : ret [ key ] = v1 else : ret [ key ] = v1 + v2 return ret | Merge two dicts of addable values |
5,335 | def _update_capacity ( self , data ) : if 'ConsumedCapacity' in data : consumed = data [ 'ConsumedCapacity' ] if not isinstance ( consumed , list ) : consumed = [ consumed ] for cap in consumed : self . capacity += cap . get ( 'CapacityUnits' , 0 ) self . table_capacity += cap . get ( 'Table' , { } ) . get ( 'CapacityUnits' , 0 ) local_indexes = cap . get ( 'LocalSecondaryIndexes' , { } ) for k , v in six . iteritems ( local_indexes ) : self . indexes . setdefault ( k , 0 ) self . indexes [ k ] += v [ 'CapacityUnits' ] global_indexes = cap . get ( 'GlobalSecondaryIndexes' , { } ) for k , v in six . iteritems ( global_indexes ) : self . global_indexes . setdefault ( k , 0 ) self . global_indexes [ k ] += v [ 'CapacityUnits' ] | Update the consumed capacity metrics |
5,336 | def fetch ( self ) : self . limit . set_request_args ( self . kwargs ) data = self . connection . call ( * self . args , ** self . kwargs ) self . limit . post_fetch ( data ) self . last_evaluated_key = data . get ( 'LastEvaluatedKey' ) if self . last_evaluated_key is None : self . kwargs . pop ( 'ExclusiveStartKey' , None ) else : self . kwargs [ 'ExclusiveStartKey' ] = self . last_evaluated_key self . _update_capacity ( data ) if 'consumed_capacity' in data : self . consumed_capacity += data [ 'consumed_capacity' ] for raw_item in data [ 'Items' ] : item = self . connection . dynamizer . decode_keys ( raw_item ) if self . limit . accept ( item ) : yield item | Fetch more results from Dynamo |
5,337 | def build_kwargs ( self ) : keys , self . keys = self . keys [ : MAX_GET_BATCH ] , self . keys [ MAX_GET_BATCH : ] query = { 'ConsistentRead' : self . consistent } if self . attributes is not None : query [ 'ProjectionExpression' ] = self . attributes if self . alias : query [ 'ExpressionAttributeNames' ] = self . alias query [ 'Keys' ] = keys return { 'RequestItems' : { self . tablename : query , } , 'ReturnConsumedCapacity' : self . return_capacity , } | Construct the kwargs to pass to batch_get_item |
5,338 | def fetch ( self ) : kwargs = self . build_kwargs ( ) data = self . connection . call ( 'batch_get_item' , ** kwargs ) if 'UnprocessedKeys' in data : for items in six . itervalues ( data [ 'UnprocessedKeys' ] ) : self . keys . extend ( items [ 'Keys' ] ) self . _attempt += 1 self . connection . exponential_sleep ( self . _attempt ) else : self . _attempt = 0 self . _update_capacity ( data ) if 'consumed_capacity' in data : self . consumed_capacity = sum ( data [ 'consumed_capacity' ] , self . consumed_capacity ) return iter ( data [ 'Responses' ] [ self . tablename ] ) | Fetch a set of items from their keys |
5,339 | def copy ( self ) : return Limit ( self . scan_limit , self . item_limit , self . min_scan_limit , self . strict , self . filter ) | Return a copy of the limit |
5,340 | def set_request_args ( self , args ) : if self . scan_limit is not None : args [ 'Limit' ] = self . scan_limit elif self . item_limit is not None : args [ 'Limit' ] = max ( self . item_limit , self . min_scan_limit ) else : args . pop ( 'Limit' , None ) | Set the Limit parameter into the request args |
5,341 | def complete ( self ) : if self . scan_limit is not None and self . scan_limit == 0 : return True if self . item_limit is not None and self . item_limit == 0 : return True return False | Return True if the limit has been reached |
5,342 | def accept ( self , item ) : accept = self . filter ( item ) if accept and self . item_limit is not None : if self . item_limit > 0 : self . item_limit -= 1 elif self . strict : return False return accept | Apply the filter and item_limit and return True to accept |
5,343 | def returned ( n ) : for pos in randwalk ( ) >> drop ( 1 ) >> takei ( xrange ( n - 1 ) ) : if pos == Origin : return True return False | Generate a random walk and return True if the walker has returned to the origin after taking n steps . |
5,344 | def first_return ( ) : walk = randwalk ( ) >> drop ( 1 ) >> takewhile ( lambda v : v != Origin ) >> list return len ( walk ) | Generate a random walk and return its length upto the moment that the walker first returns to the origin . |
5,345 | def seq ( start = 0 , step = 1 ) : def seq ( a , d ) : while 1 : yield a a += d return seq ( start , step ) | An arithmetic sequence generator . Works with any type with + defined . |
5,346 | def pipe ( inpipe , outpipe ) : if hasattr ( outpipe , '__pipe__' ) : return outpipe . __pipe__ ( inpipe ) elif hasattr ( outpipe , '__call__' ) : return outpipe ( inpipe ) else : raise BrokenPipe ( 'No connection mechanism defined' ) | Connect inpipe and outpipe . If outpipe is not a Stream instance it should be an function callable on an iterable . |
5,347 | def submit ( self , * items ) : with self . lock : if self . closed : raise BrokenPipe ( 'Job submission has been closed.' ) id = self . jobcount self . _status += [ 'SUBMITTED' ] * len ( items ) self . jobcount += len ( items ) for item in items : self . waitqueue . put ( ( id , item ) ) id += 1 if len ( items ) == 1 : return id - 1 else : return range ( id - len ( items ) , id ) | Return job ids assigned to the submitted items . |
5,348 | def cancel ( self , * ids ) : ncancelled = 0 with self . lock : for id in ids : try : if self . _status [ id ] == 'SUBMITTED' : self . _status [ id ] = 'CANCELLED' ncancelled += 1 except IndexError : pass return ncancelled | Try to cancel jobs with associated ids . Return the actual number of jobs cancelled . |
5,349 | def shutdown ( self ) : with self . lock : self . pool . inqueue . put ( StopIteration ) self . waitqueue . put ( StopIteration ) _iterqueue ( self . waitqueue ) >> item [ - 1 ] self . closed = True self . join ( ) | Shut down the Executor . Suspend all waiting jobs . Running workers will terminate after finishing their current job items . The call will block until all workers are terminated . |
5,350 | def main ( ) : arguments = docopt ( __doc__ , version = __version__ ) if arguments [ 'configure' ] and flag : configure ( ) if arguments [ 'cuisine' ] : if arguments [ 'list' ] : cuisine ( 'list' ) else : cuisine ( arguments [ '<cuisine-id>' ] ) elif arguments [ 'surprise' ] : surprise ( ) elif arguments [ 'reviews' ] : reviews ( arguments [ '<restaurant-id>' ] ) elif arguments [ 'search' ] : search ( arguments [ 'QUERY' ] ) elif arguments [ 'budget' ] : try : money = arguments [ '<budget>' ] money = float ( money ) budget ( money ) except : print 'Budget should be a number!' elif arguments [ 'restaurant' ] : restaurant ( arguments [ '<restaurant-id>' ] ) else : print ( __doc__ ) | monica helps you order food from the timeline |
5,351 | def _get_requirements ( fname ) : packages = _read ( fname ) . split ( '\n' ) packages = ( p . strip ( ) for p in packages ) packages = ( p for p in packages if p and not p . startswith ( '#' ) ) return list ( packages ) | Create a list of requirements from the output of the pip freeze command saved in a text file . |
5,352 | def TermsProcessor ( instance , placeholder , rendered_content , original_context ) : if 'terms' in original_context : return rendered_content return mark_safe ( replace_terms ( rendered_content ) ) | Adds links all placeholders plugins except django - terms plugins |
5,353 | def time_stops ( self ) : if not self . supports_time : return [ ] if self . service . calendar == 'standard' : units = self . service . time_interval_units interval = self . service . time_interval steps = [ self . time_start ] if units in ( 'years' , 'decades' , 'centuries' ) : if units == 'years' : years = interval elif units == 'decades' : years = 10 * interval else : years = 100 * interval next_value = lambda x : x . replace ( year = x . year + years ) elif units == 'months' : def _fn ( x ) : year = x . year + ( x . month + interval - 1 ) // 12 month = ( x . month + interval ) % 12 or 12 day = min ( x . day , calendar . monthrange ( year , month ) [ 1 ] ) return x . replace ( year = year , month = month , day = day ) next_value = _fn else : if units == 'milliseconds' : delta = timedelta ( milliseconds = interval ) elif units == 'seconds' : delta = timedelta ( seconds = interval ) elif units == 'minutes' : delta = timedelta ( minutes = interval ) elif units == 'hours' : delta = timedelta ( hours = interval ) elif units == 'days' : delta = timedelta ( days = interval ) elif units == 'weeks' : delta = timedelta ( weeks = interval ) else : raise ValidationError ( "Service has an invalid time_interval_units: {}" . format ( self . service . time_interval_units ) ) next_value = lambda x : x + delta while steps [ - 1 ] < self . time_end : value = next_value ( steps [ - 1 ] ) if value > self . time_end : break steps . append ( value ) return steps else : raise NotImplementedError | Valid time steps for this service as a list of datetime objects . |
5,354 | def _parse_coords ( self , opts ) : if 'coords' in vars ( opts ) : return radius = vars ( opts ) . get ( 'radius' , 0 ) gal = None if vars ( opts ) . get ( 'gal' ) is not None : gal = opts . gal elif vars ( opts ) . get ( 'cel' ) is not None : gal = cel2gal ( * opts . cel ) elif vars ( opts ) . get ( 'hpx' ) is not None : gal = pix2ang ( * opts . hpx ) if gal is not None : opts . coords = [ ( gal [ 0 ] , gal [ 1 ] , radius ) ] opts . names = [ vars ( opts ) . get ( 'name' , '' ) ] else : opts . coords = None opts . names = None if vars ( opts ) . get ( 'targets' ) is not None : opts . names , opts . coords = self . parse_targets ( opts . targets ) if vars ( opts ) . get ( 'radius' ) is not None : opts . coords [ 'radius' ] = vars ( opts ) . get ( 'radius' ) | Parse target coordinates in various ways ... |
5,355 | def default_value ( self ) : if callable ( self . default ) and self . call_default : return self . default ( ) return self . default | Property to return the default value . |
5,356 | def raw_value ( self ) : if self . parent_setting is not None : return self . parent_setting . raw_value [ self . full_name ] else : return getattr ( settings , self . full_name ) | Property to return the variable defined in django . conf . settings . |
5,357 | def get_value ( self ) : try : value = self . raw_value except ( AttributeError , KeyError ) as err : self . _reraise_if_required ( err ) default_value = self . default_value if self . transform_default : return self . transform ( default_value ) return default_value else : return self . transform ( value ) | Return the transformed raw or default value . |
5,358 | def run_validators ( self , value ) : errors = [ ] for validator in self . validators : try : validator ( value ) except ValidationError as error : errors . extend ( error . messages ) if errors : raise ValidationError ( errors ) | Run the validators on the setting value . |
5,359 | def transform ( self , path ) : if path is None or not path : return None obj_parent_modules = path . split ( "." ) objects = [ obj_parent_modules . pop ( - 1 ) ] while True : try : parent_module_path = "." . join ( obj_parent_modules ) parent_module = importlib . import_module ( parent_module_path ) break except ImportError : if len ( obj_parent_modules ) == 1 : raise ImportError ( "No module named '%s'" % obj_parent_modules [ 0 ] ) objects . insert ( 0 , obj_parent_modules . pop ( - 1 ) ) current_object = parent_module for obj in objects : current_object = getattr ( current_object , obj ) return current_object | Transform a path into an actual Python object . |
5,360 | def get_value ( self ) : try : self . raw_value except ( AttributeError , KeyError ) as err : self . _reraise_if_required ( err ) default_value = self . default_value if self . transform_default : return self . transform ( default_value ) return default_value else : value = { } for key , subsetting in self . settings . items ( ) : value [ key ] = subsetting . get_value ( ) return value | Return dictionary with values of subsettings . |
5,361 | def sum_mags ( mags , weights = None ) : flux = 10 ** ( - np . asarray ( mags ) / 2.5 ) if weights is None : return - 2.5 * np . log10 ( np . sum ( flux ) ) else : return - 2.5 * np . log10 ( np . sum ( weights * flux ) ) | Sum an array of magnitudes in flux space . |
5,362 | def absolute_magnitude ( distance_modulus , g , r , prob = None ) : V = g - 0.487 * ( g - r ) - 0.0249 flux = np . sum ( 10 ** ( - ( V - distance_modulus ) / 2.5 ) ) Mv = - 2.5 * np . log10 ( flux ) return Mv | Calculate the absolute magnitude from a set of bands |
5,363 | def observableFractionCDF ( self , mask , distance_modulus , mass_min = 0.1 ) : method = 'step' mass_init , mass_pdf , mass_act , mag_1 , mag_2 = self . sample ( mass_min = mass_min , full_data_range = False ) mag_1 = mag_1 + distance_modulus mag_2 = mag_2 + distance_modulus mask_1 , mask_2 = mask . mask_roi_unique . T mag_err_1 = mask . photo_err_1 ( mask_1 [ : , np . newaxis ] - mag_1 ) mag_err_2 = mask . photo_err_2 ( mask_2 [ : , np . newaxis ] - mag_2 ) delta_hi_1 = ( mask_1 [ : , np . newaxis ] - mag_1 ) / mag_err_1 delta_hi_2 = ( mask_2 [ : , np . newaxis ] - mag_2 ) / mag_err_2 delta_lo_1 = ( mask . roi . bins_mag [ 0 ] - mag_1 ) / mag_err_1 delta_lo_2 = ( mask . roi . bins_mag [ 0 ] - mag_2 ) / mag_err_2 cdf_1 = norm_cdf ( delta_hi_1 ) - norm_cdf ( delta_lo_1 ) cdf_2 = norm_cdf ( delta_hi_2 ) - norm_cdf ( delta_lo_2 ) cdf = cdf_1 * cdf_2 if method is None or method == 'none' : comp_cdf = cdf elif self . band_1_detection == True : comp = mask . mask_1 . completeness ( mag_1 , method = method ) comp_cdf = comp * cdf elif self . band_1_detection == False : comp = mask . mask_2 . completeness ( mag_2 , method = method ) comp_cdf = comp * cdf else : comp_1 = mask . mask_1 . completeness ( mag_1 , method = method ) comp_2 = mask . mask_2 . completeness ( mag_2 , method = method ) comp_cdf = comp_1 * comp_2 * cdf observable_fraction = ( mass_pdf [ np . newaxis ] * comp_cdf ) . sum ( axis = - 1 ) return observable_fraction [ mask . mask_roi_digi [ mask . roi . pixel_interior_cut ] ] | Compute observable fraction of stars with masses greater than mass_min in each pixel in the interior region of the mask . Incorporates simplistic photometric errors . |
5,364 | def histogram2d ( self , distance_modulus = None , delta_mag = 0.03 , steps = 10000 ) : if distance_modulus is not None : self . distance_modulus = distance_modulus mass_init , mass_pdf , mass_act , mag_1 , mag_2 = self . sample ( mass_steps = steps ) bins_mag_1 = np . arange ( self . mod + mag_1 . min ( ) - ( 0.5 * delta_mag ) , self . mod + mag_1 . max ( ) + ( 0.5 * delta_mag ) , delta_mag ) . astype ( np . float32 ) bins_mag_2 = np . arange ( self . mod + mag_2 . min ( ) - ( 0.5 * delta_mag ) , self . mod + mag_2 . max ( ) + ( 0.5 * delta_mag ) , delta_mag ) . astype ( np . float32 ) isochrone_pdf = np . histogram2d ( self . mod + mag_1 , self . mod + mag_2 , bins = [ bins_mag_1 , bins_mag_2 ] , weights = mass_pdf ) [ 0 ] . astype ( np . float32 ) return isochrone_pdf , bins_mag_1 , bins_mag_2 | Return a 2D histogram the isochrone in mag - mag space . |
5,365 | def pdf_mmd ( self , lon , lat , mag_1 , mag_2 , distance_modulus , mask , delta_mag = 0.03 , steps = 1000 ) : logger . info ( 'Running MMD pdf' ) roi = mask . roi mmd = self . signalMMD ( mask , distance_modulus , delta_mag = delta_mag , mass_steps = steps ) nedges = np . rint ( ( roi . bins_mag [ - 1 ] - roi . bins_mag [ 0 ] ) / delta_mag ) + 1 edges_mag , delta_mag = np . linspace ( roi . bins_mag [ 0 ] , roi . bins_mag [ - 1 ] , nedges , retstep = True ) idx_mag_1 = np . searchsorted ( edges_mag , mag_1 ) idx_mag_2 = np . searchsorted ( edges_mag , mag_2 ) if np . any ( idx_mag_1 > nedges ) or np . any ( idx_mag_1 == 0 ) : msg = "Magnitude out of range..." raise Exception ( msg ) if np . any ( idx_mag_2 > nedges ) or np . any ( idx_mag_2 == 0 ) : msg = "Magnitude out of range..." raise Exception ( msg ) idx = mask . roi . indexROI ( lon , lat ) u_color = mmd [ ( mask . mask_roi_digi [ idx ] , idx_mag_1 , idx_mag_2 ) ] u_color /= delta_mag ** 2 return u_color | Ok now here comes the beauty of having the signal MMD . |
5,366 | def raw_separation ( self , mag_1 , mag_2 , steps = 10000 ) : mag_1 = np . array ( mag_1 , copy = False , ndmin = 1 ) mag_2 = np . array ( mag_2 , copy = False , ndmin = 1 ) init , pdf , act , iso_mag_1 , iso_mag_2 = self . sample ( mass_steps = steps ) iso_mag_1 += self . distance_modulus iso_mag_2 += self . distance_modulus iso_cut = ( iso_mag_1 < np . max ( mag_1 ) ) & ( iso_mag_1 > np . min ( mag_1 ) ) | ( iso_mag_2 < np . max ( mag_2 ) ) & ( iso_mag_2 > np . min ( mag_2 ) ) iso_mag_1 = iso_mag_1 [ iso_cut ] iso_mag_2 = iso_mag_2 [ iso_cut ] dist_mag_1 = mag_1 [ : , np . newaxis ] - iso_mag_1 dist_mag_2 = mag_2 [ : , np . newaxis ] - iso_mag_2 return np . min ( np . sqrt ( dist_mag_1 ** 2 + dist_mag_2 ** 2 ) , axis = 1 ) | Calculate the separation in magnitude - magnitude space between points and isochrone . Uses a dense sampling of the isochrone and calculates the metric distance from any isochrone sample point . |
5,367 | def separation ( self , mag_1 , mag_2 ) : iso_mag_1 = self . mag_1 + self . distance_modulus iso_mag_2 = self . mag_2 + self . distance_modulus def interp_iso ( iso_mag_1 , iso_mag_2 , mag_1 , mag_2 ) : interp_1 = scipy . interpolate . interp1d ( iso_mag_1 , iso_mag_2 , bounds_error = False ) interp_2 = scipy . interpolate . interp1d ( iso_mag_2 , iso_mag_1 , bounds_error = False ) dy = interp_1 ( mag_1 ) - mag_2 dx = interp_2 ( mag_2 ) - mag_1 dmag_1 = np . fabs ( dx * dy ) / ( dx ** 2 + dy ** 2 ) * dy dmag_2 = np . fabs ( dx * dy ) / ( dx ** 2 + dy ** 2 ) * dx return dmag_1 , dmag_2 if np . issubdtype ( self . stage . dtype , np . number ) : sel = ( self . stage < self . hb_stage ) else : sel = ( self . stage != self . hb_stage ) rgb_mag_1 = iso_mag_1 [ sel ] rgb_mag_2 = iso_mag_2 [ sel ] dmag_1 , dmag_2 = interp_iso ( rgb_mag_1 , rgb_mag_2 , mag_1 , mag_2 ) if not np . all ( sel ) : hb_mag_1 = iso_mag_1 [ ~ sel ] hb_mag_2 = iso_mag_2 [ ~ sel ] hb_dmag_1 , hb_dmag_2 = interp_iso ( hb_mag_1 , hb_mag_2 , mag_1 , mag_2 ) dmag_1 = np . nanmin ( [ dmag_1 , hb_dmag_1 ] , axis = 0 ) dmag_2 = np . nanmin ( [ dmag_2 , hb_dmag_2 ] , axis = 0 ) return np . sqrt ( dmag_1 ** 2 + dmag_2 ** 2 ) | Calculate the separation between a specific point and the isochrone in magnitude - magnitude space . Uses an interpolation |
5,368 | def get_handler ( self , operation_id ) : handler = ( self . handlers . get ( operation_id ) or self . handlers . get ( snake_case ( operation_id ) ) ) if handler : return handler raise MissingHandler ( 'Missing handler for operation %s (tried %s too)' % ( operation_id , snake_case ( operation_id ) ) ) | Get the handler function for a given operation . |
5,369 | def add_handlers ( self , namespace ) : if isinstance ( namespace , str ) : namespace = import_module ( namespace ) if isinstance ( namespace , dict ) : namespace = namespace . items ( ) else : namespace = vars ( namespace ) . items ( ) for name , value in namespace : if name . startswith ( '_' ) : continue if isfunction ( value ) or ismethod ( value ) : self . handlers [ name ] = value | Add handler functions from the given namespace for instance a module . |
5,370 | def get_context ( self , arr , expr , context ) : expression_names = [ x for x in self . get_expression_names ( expr ) if x not in set ( context . keys ( ) ) . union ( [ 'i' ] ) ] if len ( expression_names ) != 1 : raise ValueError ( 'The expression must have exactly one variable.' ) return { expression_names [ 0 ] : arr } | Returns a context dictionary for use in evaluating the expression . |
5,371 | def execute ( self , array_in , expression , ** kwargs ) : context = self . get_context ( array_in , expression , kwargs ) context . update ( kwargs ) return ma . masked_where ( self . evaluate_expression ( expression , context ) , array_in ) | Creates and returns a masked view of the input array . |
5,372 | def timeout_error ( url , timeout ) : msg = 'Request timed out: {} timeout: {}s' . format ( url , timeout ) log . warning ( msg ) return ServerError ( msg ) | Raise a server error indicating a request timeout to the given URL . |
5,373 | def histogram ( title , title_x , title_y , x , bins_x ) : plt . figure ( ) plt . hist ( x , bins_x ) plt . xlabel ( title_x ) plt . ylabel ( title_y ) plt . title ( title ) | Plot a basic histogram . |
5,374 | def twoDimensionalHistogram ( title , title_x , title_y , z , bins_x , bins_y , lim_x = None , lim_y = None , vmin = None , vmax = None ) : plt . figure ( ) mesh_x , mesh_y = np . meshgrid ( bins_x , bins_y ) if vmin != None and vmin == vmax : plt . pcolor ( mesh_x , mesh_y , z ) else : plt . pcolor ( mesh_x , mesh_y , z , vmin = vmin , vmax = vmax ) plt . xlabel ( title_x ) plt . ylabel ( title_y ) plt . title ( title ) plt . colorbar ( ) if lim_x : plt . xlim ( lim_x [ 0 ] , lim_x [ 1 ] ) if lim_y : plt . ylim ( lim_y [ 0 ] , lim_y [ 1 ] ) | Create a two - dimension histogram plot or binned map . |
5,375 | def twoDimensionalScatter ( title , title_x , title_y , x , y , lim_x = None , lim_y = None , color = 'b' , size = 20 , alpha = None ) : plt . figure ( ) plt . scatter ( x , y , c = color , s = size , alpha = alpha , edgecolors = 'none' ) plt . xlabel ( title_x ) plt . ylabel ( title_y ) plt . title ( title ) if type ( color ) is not str : plt . colorbar ( ) if lim_x : plt . xlim ( lim_x [ 0 ] , lim_x [ 1 ] ) if lim_y : plt . ylim ( lim_y [ 0 ] , lim_y [ 1 ] ) | Create a two - dimensional scatter plot . |
5,376 | def drawHealpixMap ( hpxmap , lon , lat , size = 1.0 , xsize = 501 , coord = 'GC' , ** kwargs ) : ax = plt . gca ( ) x = np . linspace ( - size , size , xsize ) y = np . linspace ( - size , size , xsize ) xx , yy = np . meshgrid ( x , y ) coord = coord . upper ( ) if coord == 'GC' : llon , llat = image2sphere ( * gal2cel ( lon , lat ) , x = xx . flat , y = yy . flat ) pix = ang2pix ( get_nside ( hpxmap ) , * cel2gal ( llon , llat ) ) elif coord == 'CG' : llon , llat = image2sphere ( * cel2gal ( lon , lat ) , x = xx . flat , y = yy . flat ) pix = ang2pix ( get_nside ( hpxmap ) , * gal2cel ( llon , llat ) ) else : llon , llat = image2sphere ( lon , lat , xx . flat , yy . flat ) pix = ang2pix ( get_nside ( hpxmap ) , llon , llat ) values = hpxmap [ pix ] . reshape ( xx . shape ) zz = np . ma . array ( values , mask = ( values == hp . UNSEEN ) , fill_value = np . nan ) return drawProjImage ( xx , yy , zz , coord = coord , ** kwargs ) | Draw local projection of healpix map . |
5,377 | def getDSSImage ( ra , dec , radius = 1.0 , xsize = 800 , ** kwargs ) : import subprocess import tempfile service = 'skyview' if service == 'stsci' : url = "https://archive.stsci.edu/cgi-bin/dss_search?" scale = 2.0 * radius * 60. params = dict ( ra = '%.3f' % ra , dec = '%.3f' % dec , width = scale , height = scale , format = 'gif' , version = 1 ) elif service == 'skyview' : url = "https://skyview.gsfc.nasa.gov/cgi-bin/images?" params = dict ( survey = 'DSS' , position = '%.3f,%.3f' % ( ra , dec ) , scaling = 'Linear' , Return = 'GIF' , size = 2 * radius , projection = 'Car' , pixels = xsize ) else : raise Exception ( "Unrecognized service." ) query = '&' . join ( "%s=%s" % ( k , v ) for k , v in params . items ( ) ) tmp = tempfile . NamedTemporaryFile ( suffix = '.gif' ) cmd = 'wget --progress=dot:mega -O %s "%s"' % ( tmp . name , url + query ) subprocess . call ( cmd , shell = True ) im = plt . imread ( tmp . name ) tmp . close ( ) if service == 'stsci' and xsize : im = scipy . misc . imresize ( im , size = ( xsize , xsize ) ) return im | Download Digitized Sky Survey images |
5,378 | def draw_slices ( hist , func = np . sum , ** kwargs ) : from mpl_toolkits . axes_grid1 import make_axes_locatable kwargs . setdefault ( 'ls' , '-' ) ax = plt . gca ( ) data = hist vslice = func ( data , axis = 0 ) hslice = func ( data , axis = 1 ) npix = np . array ( data . shape ) xlim = ax . get_xlim ( ) ylim = ax . get_ylim ( ) xbin = np . linspace ( xlim [ 0 ] , xlim [ 1 ] , len ( vslice ) ) ybin = np . linspace ( ylim [ 0 ] , ylim [ 1 ] , len ( hslice ) ) divider = make_axes_locatable ( ax ) hax = divider . append_axes ( "right" , size = 1.2 , pad = 0.05 , sharey = ax , axes_class = axes_divider . LocatableAxes ) hax . axis [ "left" ] . toggle ( label = False , ticklabels = False ) hax . plot ( hslice , ybin , ** kwargs ) hax . xaxis . set_major_locator ( MaxNLocator ( 4 , prune = 'both' ) ) hax . set_ylim ( * ylim ) vax = divider . append_axes ( "top" , size = 1.2 , pad = 0.05 , sharex = ax , axes_class = axes_divider . LocatableAxes ) vax . axis [ "bottom" ] . toggle ( label = False , ticklabels = False ) vax . plot ( xbin , vslice , ** kwargs ) vax . yaxis . set_major_locator ( MaxNLocator ( 4 , prune = 'lower' ) ) vax . set_xlim ( * xlim ) return vax , hax | Draw horizontal and vertical slices through histogram |
5,379 | def plotSkymapCatalog ( lon , lat , ** kwargs ) : fig = plt . figure ( ) ax = plt . subplot ( 111 , projection = projection ) drawSkymapCatalog ( ax , lon , lat , ** kwargs ) | Plot a catalog of coordinates on a full - sky map . |
5,380 | def makePath ( x_path , y_path , epsilon = 1.e-10 ) : x_path_closed = np . concatenate ( [ x_path , x_path [ : : - 1 ] ] ) y_path_closed = np . concatenate ( [ y_path , epsilon + y_path [ : : - 1 ] ] ) path = matplotlib . path . Path ( list ( zip ( x_path_closed , y_path_closed ) ) ) return path | Create closed path . |
5,381 | def drawMask ( self , ax = None , mask = None , mtype = 'maglim' ) : if not ax : ax = plt . gca ( ) if mask is None : mask = ugali . analysis . loglike . createMask ( self . config , roi = self . roi ) mask_map = hp . UNSEEN * np . ones ( hp . nside2npix ( self . nside ) ) if mtype . lower ( ) == 'maglim' : mask_map [ mask . roi . pixels ] = mask . mask_1 . mask_roi_sparse elif mtype . lower ( ) == 'fracdet' : mask_map [ mask . roi . pixels ] = mask . mask_1 . frac_roi_sparse else : raise Exception ( "Unrecognized type: %s" % mtype ) masked = ( mask_map == hp . UNSEEN ) | ( mask_map == 0 ) mask_map = np . ma . array ( mask_map , mask = masked , fill_value = np . nan ) im = drawHealpixMap ( mask_map , self . lon , self . lat , self . radius , coord = self . coord ) try : cbar = ax . cax . colorbar ( im ) except : cbar = plt . colorbar ( im ) cbar . ax . set_xticklabels ( cbar . ax . get_xticklabels ( ) , rotation = 90 ) ax . annotate ( mtype , ** self . label_kwargs ) return im | Draw the maglim from the mask . |
5,382 | def parse ( self , configManager , config ) : parser = ConfigParser . RawConfigParser ( ) configOptions = dict ( ) configFile = self . _getConfigFile ( config ) if configFile : parser . readfp ( configFile ) for section in parser . sections ( ) : if self . sections is None or section in self . sections : configOptions . update ( parser . items ( section ) ) return configOptions | Parse configuration options out of an . ini configuration file . |
5,383 | def write_chapter ( self ) : self . paragraphs = [ ] self . paragraphs . append ( '\n' ) for x in range ( randint ( 0 , 50 ) ) : p = Paragraph ( self . model ) self . paragraphs . append ( p . get_paragraph ( ) ) self . paragraphs . append ( '\n' ) return self . paragraphs | Create a chapter that contains a random number of paragraphs |
5,384 | def buildcss ( app , buildpath , imagefile ) : div = 'body' repeat = 'repeat-y' position = 'center' attachment = 'scroll' if app . config . sphinxmark_div != 'default' : div = app . config . sphinxmark_div if app . config . sphinxmark_repeat is False : repeat = 'no-repeat' if app . config . sphinxmark_fixed is True : attachment = 'fixed' border = app . config . sphinxmark_border if border == 'left' or border == 'right' : css = template ( 'border' , div = div , image = imagefile , side = border ) else : css = template ( 'watermark' , div = div , image = imagefile , repeat = repeat , position = position , attachment = attachment ) LOG . debug ( '[sphinxmark] Template: ' + css ) cssname = 'sphinxmark.css' cssfile = os . path . join ( buildpath , cssname ) with open ( cssfile , 'w' ) as f : f . write ( css ) return ( cssname ) | Create CSS file . |
5,385 | def createimage ( app , srcdir , buildpath ) : text = app . config . sphinxmark_text width = app . config . sphinxmark_text_width height = app . config . sphinxmark_text_spacing img = Image . new ( 'RGBA' , ( width , height ) , ( 255 , 255 , 255 , 0 ) ) d = ImageDraw . Draw ( img ) fontfile = os . path . join ( srcdir , 'arial.ttf' ) font = ImageFont . truetype ( fontfile , app . config . sphinxmark_text_size ) xsize , ysize = d . textsize ( text , font ) LOG . debug ( '[sphinxmark] x = ' + str ( xsize ) + '\ny = ' + str ( ysize ) ) x = ( width / 2 ) - ( xsize / 2 ) y = ( height / 2 ) - ( ysize / 2 ) color = app . config . sphinxmark_text_color d . text ( ( x , y ) , text , font = font , fill = color ) img . putalpha ( app . config . sphinxmark_text_opacity ) img = img . rotate ( app . config . sphinxmark_text_rotation ) imagefile = 'textmark_' + text + '.png' imagepath = os . path . join ( buildpath , imagefile ) img . save ( imagepath , 'PNG' ) LOG . debug ( '[sphinxmark] Image saved to: ' + imagepath ) return ( imagefile ) | Create PNG image from string . |
5,386 | def getimage ( app ) : srcdir = os . path . abspath ( os . path . dirname ( __file__ ) ) TEMPLATE_PATH . append ( srcdir ) staticbase = '_static' buildpath = os . path . join ( app . outdir , staticbase ) try : os . makedirs ( buildpath ) except OSError : if not os . path . isdir ( buildpath ) : raise if app . config . sphinxmark_image == 'default' : imagefile = 'watermark-draft.png' imagepath = os . path . join ( srcdir , imagefile ) copy ( imagepath , buildpath ) LOG . debug ( '[sphinxmark] Using default image: ' + imagefile ) elif app . config . sphinxmark_image == 'text' : imagefile = createimage ( app , srcdir , buildpath ) LOG . debug ( '[sphinxmark] Image: ' + imagefile ) else : imagefile = app . config . sphinxmark_image if app . config . html_static_path : staticpath = app . config . html_static_path [ 0 ] else : staticpath = '_static' LOG . debug ( '[sphinxmark] static path: ' + staticpath ) imagepath = os . path . join ( app . confdir , staticpath , imagefile ) LOG . debug ( '[sphinxmark] Imagepath: ' + imagepath ) try : copy ( imagepath , buildpath ) except Exception : message = ( "Cannot find '%s'. Put watermark images in the " "'_static' directory or specify the location using " "'html_static_path'." % imagefile ) LOG . warning ( message ) LOG . warning ( 'Failed to add watermark.' ) return return ( buildpath , imagefile ) | Get image file . |
5,387 | def watermark ( app , env ) : if app . config . sphinxmark_enable is True : LOG . info ( 'adding watermark...' , nonl = True ) buildpath , imagefile = getimage ( app ) cssname = buildcss ( app , buildpath , imagefile ) app . add_css_file ( cssname ) LOG . info ( ' done' ) | Add watermark . |
5,388 | def setup ( app ) : app . add_config_value ( 'sphinxmark_enable' , False , 'html' ) app . add_config_value ( 'sphinxmark_div' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_border' , None , 'html' ) app . add_config_value ( 'sphinxmark_repeat' , True , 'html' ) app . add_config_value ( 'sphinxmark_fixed' , False , 'html' ) app . add_config_value ( 'sphinxmark_image' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_text' , 'default' , 'html' ) app . add_config_value ( 'sphinxmark_text_color' , ( 255 , 0 , 0 ) , 'html' ) app . add_config_value ( 'sphinxmark_text_size' , 100 , 'html' ) app . add_config_value ( 'sphinxmark_text_width' , 1000 , 'html' ) app . add_config_value ( 'sphinxmark_text_opacity' , 20 , 'html' ) app . add_config_value ( 'sphinxmark_text_spacing' , 400 , 'html' ) app . add_config_value ( 'sphinxmark_text_rotation' , 0 , 'html' ) app . connect ( 'env-updated' , watermark ) return { 'version' : '0.1.18' , 'parallel_read_safe' : True , 'parallel_write_safe' : True , } | Configure setup for Sphinx extension . |
5,389 | def gammalnStirling ( z ) : return ( 0.5 * ( np . log ( 2. * np . pi ) - np . log ( z ) ) ) + ( z * ( np . log ( z + ( 1. / ( ( 12. * z ) - ( 1. / ( 10. * z ) ) ) ) ) - 1. ) ) | Uses Stirling s approximation for the log - gamma function suitable for large arguments . |
5,390 | def satellite ( isochrone , kernel , stellar_mass , distance_modulus , ** kwargs ) : mag_1 , mag_2 = isochrone . simulate ( stellar_mass , distance_modulus ) lon , lat = kernel . simulate ( len ( mag_1 ) ) return mag_1 , mag_2 , lon , lat | Wrapping the isochrone and kernel simulate functions . |
5,391 | def detectability ( self , ** kwargs ) : distance_modulus = kwargs . get ( 'distance_modulus' ) distance = mod2dist ( distance_modulus ) stellar_mass = kwargs . get ( 'stellar_mass' ) extension = kwargs . get ( 'extension' ) norm = 10 ** 3 / mod2dist ( 18 ) ** 2 detect = stellar_mass / distance ** 2 detect /= norm | An a priori detectability proxy . |
5,392 | def _create_catalog ( self , catalog = None ) : if catalog is None : catalog = ugali . analysis . loglike . createCatalog ( self . config , self . roi ) cut = self . mask . restrictCatalogToObservableSpace ( catalog ) self . catalog = catalog . applyCut ( cut ) | Bundle it . |
5,393 | def _setup_subpix ( self , nside = 2 ** 16 ) : if hasattr ( self , 'subpix' ) : return self . roi_radius = self . config [ 'coords' ] [ 'roi_radius' ] logger . info ( "Setup subpixels..." ) self . nside_pixel = self . config [ 'coords' ] [ 'nside_pixel' ] self . nside_subpixel = self . nside_pixel * 2 ** 4 epsilon = np . degrees ( hp . max_pixrad ( self . nside_pixel ) ) subpix = ugali . utils . healpix . query_disc ( self . nside_subpixel , self . roi . vec , self . roi_radius + epsilon ) superpix = ugali . utils . healpix . superpixel ( subpix , self . nside_subpixel , self . nside_pixel ) self . subpix = subpix [ np . in1d ( superpix , self . roi . pixels ) ] | Subpixels for random position generation . |
5,394 | def _setup_cmd ( self , mode = 'cloud-in-cells' ) : if hasattr ( self , 'bkg_lambda' ) : return logger . info ( "Setup color..." ) solid_angle_roi = self . roi . area_pixel * len ( self . roi . pixels ) config = Config ( self . config ) config [ 'color' ] [ 'n_bins' ] *= 5 config [ 'mag' ] [ 'n_bins' ] *= 1 roi = ugali . analysis . loglike . createROI ( config , self . roi . lon , self . roi . lat ) mask = ugali . analysis . loglike . createMask ( config , roi ) self . bkg_centers_color = roi . centers_color self . bkg_centers_mag = roi . centers_mag cmd_background = mask . backgroundCMD ( self . catalog , mode ) self . bkg_lambda = cmd_background * solid_angle_roi * roi . delta_color * roi . delta_mag np . sum ( self . bkg_lambda ) del config , roi , mask | The purpose here is to create a more finely binned background CMD to sample from . |
5,395 | def toy_background ( self , mc_source_id = 2 , seed = None ) : logger . info ( "Running toy background simulation..." ) size = 20000 nstar = np . random . poisson ( size ) logger . info ( "Simulating %i background stars..." % nstar ) logger . info ( "Generating uniform positions..." ) idx = np . random . randint ( 0 , len ( self . subpix ) - 1 , size = nstar ) lon , lat = pix2ang ( self . nside_subpixel , self . subpix [ idx ] ) pix = ang2pix ( self . nside_pixel , lon , lat ) lon , lat = pix2ang ( self . nside_pixel , pix ) logger . info ( "Generating uniform CMD..." ) mag_1 = np . random . uniform ( self . config [ 'mag' ] [ 'min' ] , self . config [ 'mag' ] [ 'max' ] , size = nstar ) color = np . random . uniform ( self . config [ 'color' ] [ 'min' ] , self . config [ 'color' ] [ 'max' ] , size = nstar ) mag_2 = mag_1 - color mask = - 1. * np . ones ( hp . nside2npix ( self . nside_pixel ) ) mask [ self . roi . pixels ] = self . mask . mask_1 . mask_roi_sparse mag_lim_1 = mask [ pix ] mask = - 1. * np . ones ( hp . nside2npix ( self . nside_pixel ) ) mask [ self . roi . pixels ] = self . mask . mask_2 . mask_roi_sparse mag_lim_2 = mask [ pix ] mag_err_1 = self . photo_err_1 ( mag_lim_1 - mag_1 ) mag_err_2 = self . photo_err_2 ( mag_lim_2 - mag_2 ) mc_source_id = mc_source_id * np . ones ( len ( mag_1 ) ) select = ( mag_lim_1 > mag_1 ) & ( mag_lim_2 > mag_2 ) hdu = ugali . observation . catalog . makeHDU ( self . config , mag_1 [ select ] , mag_err_1 [ select ] , mag_2 [ select ] , mag_err_2 [ select ] , lon [ select ] , lat [ select ] , mc_source_id [ select ] ) catalog = ugali . observation . catalog . Catalog ( self . config , data = hdu . data ) return catalog | Quick uniform background generation . |
5,396 | def satellite ( self , stellar_mass , distance_modulus , mc_source_id = 1 , seed = None , ** kwargs ) : if seed is not None : np . random . seed ( seed ) isochrone = kwargs . pop ( 'isochrone' , self . isochrone ) kernel = kwargs . pop ( 'kernel' , self . kernel ) for k , v in kwargs . items ( ) : if k in kernel . params . keys ( ) : setattr ( kernel , k , v ) mag_1 , mag_2 = isochrone . simulate ( stellar_mass , distance_modulus ) lon , lat = kernel . simulate ( len ( mag_1 ) ) logger . info ( "Simulating %i satellite stars..." % len ( mag_1 ) ) pix = ang2pix ( self . config [ 'coords' ] [ 'nside_pixel' ] , lon , lat ) mask = - 1. * np . ones ( hp . nside2npix ( self . config [ 'coords' ] [ 'nside_pixel' ] ) ) mask [ self . roi . pixels ] = self . mask . mask_1 . mask_roi_sparse mag_lim_1 = mask [ pix ] mask = - 1. * np . ones ( hp . nside2npix ( self . config [ 'coords' ] [ 'nside_pixel' ] ) ) mask [ self . roi . pixels ] = self . mask . mask_2 . mask_roi_sparse mag_lim_2 = mask [ pix ] mag_err_1 = self . photo_err_1 ( mag_lim_1 - mag_1 ) mag_err_2 = self . photo_err_2 ( mag_lim_2 - mag_2 ) mag_obs_1 = mag_1 + np . random . normal ( size = len ( mag_1 ) ) * mag_err_1 mag_obs_2 = mag_2 + np . random . normal ( size = len ( mag_2 ) ) * mag_err_2 select = ( mag_lim_1 > mag_obs_1 ) & ( mag_lim_2 > mag_obs_2 ) logger . info ( "Clipping %i simulated satellite stars..." % ( ~ select ) . sum ( ) ) mc_source_id = mc_source_id * np . ones ( len ( mag_1 ) ) hdu = ugali . observation . catalog . makeHDU ( self . config , mag_obs_1 [ select ] , mag_err_1 [ select ] , mag_obs_2 [ select ] , mag_err_2 [ select ] , lon [ select ] , lat [ select ] , mc_source_id [ select ] ) catalog = ugali . observation . catalog . Catalog ( self . config , data = hdu . data ) return catalog | Create a simulated satellite . Returns a catalog object . |
5,397 | def makeHDU ( self , mag_1 , mag_err_1 , mag_2 , mag_err_2 , lon , lat , mc_source_id ) : if self . config [ 'catalog' ] [ 'coordsys' ] . lower ( ) == 'cel' and self . config [ 'coords' ] [ 'coordsys' ] . lower ( ) == 'gal' : lon , lat = ugali . utils . projector . gal2cel ( lon , lat ) elif self . config [ 'catalog' ] [ 'coordsys' ] . lower ( ) == 'gal' and self . config [ 'coords' ] [ 'coordsys' ] . lower ( ) == 'cel' : lon , lat = ugali . utils . projector . cel2gal ( lon , lat ) columns = [ pyfits . Column ( name = self . config [ 'catalog' ] [ 'objid_field' ] , format = 'D' , array = np . arange ( len ( lon ) ) ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'lon_field' ] , format = 'D' , array = lon ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'lat_field' ] , format = 'D' , array = lat ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'mag_1_field' ] , format = 'E' , array = mag_1 ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'mag_err_1_field' ] , format = 'E' , array = mag_err_1 ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'mag_2_field' ] , format = 'E' , array = mag_2 ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'mag_err_2_field' ] , format = 'E' , array = mag_err_2 ) , pyfits . Column ( name = self . config [ 'catalog' ] [ 'mc_source_id_field' ] , format = 'I' , array = mc_source_id ) , ] hdu = pyfits . new_table ( columns ) return hdu | Create a catalog fits file object based on input data . |
5,398 | def inverted_dict ( d ) : return dict ( ( force_hashable ( v ) , k ) for ( k , v ) in viewitems ( dict ( d ) ) ) | Return a dict with swapped keys and values |
5,399 | def inverted_dict_of_lists ( d ) : new_dict = { } for ( old_key , old_value_list ) in viewitems ( dict ( d ) ) : for new_key in listify ( old_value_list ) : new_dict [ new_key ] = old_key return new_dict | Return a dict where the keys are all the values listed in the values of the original dict |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.