idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
5,500
def create_response ( self , request , image , content_type ) : return HttpResponse ( content = image , content_type = content_type )
Returns a response object for the given image . Can be overridden to return different responses .
5,501
def create_response ( self , request , content , content_type ) : return HttpResponse ( content = content , content_type = content_type )
Returns a response object for the request . Can be overridden to return different responses .
5,502
def process_temporary_file ( self , tmp_file ) : if len ( tmp_file . filename ) > 100 : base_filename = tmp_file . filename [ : tmp_file . filename . rfind ( "." ) ] tmp_file . filename = "%s.%s" % ( base_filename [ : 99 - len ( tmp_file . extension ) ] , tmp_file . extension ) tmp_file . save ( ) data = { 'uuid' : str ( tmp_file . uuid ) } response = HttpResponse ( json . dumps ( data ) , status = 201 ) response [ 'Content-type' ] = "text/plain" return response
Truncates the filename if necessary saves the model and returns a response
5,503
def createLabels2D ( self ) : logger . debug ( " Creating 2D labels..." ) self . zmax = np . argmax ( self . values , axis = 1 ) self . vmax = self . values [ np . arange ( len ( self . pixels ) , dtype = int ) , self . zmax ] kwargs = dict ( pixels = self . pixels , values = self . vmax , nside = self . nside , threshold = self . threshold , xsize = self . xsize ) labels , nlabels = CandidateSearch . labelHealpix ( ** kwargs ) self . nlabels = nlabels self . labels = np . repeat ( labels , len ( self . distances ) ) . reshape ( len ( labels ) , len ( self . distances ) ) return self . labels , self . nlabels
2D labeling at zmax
5,504
def process_csv ( f ) : reader = unicodecsv . DictReader ( f , encoding = _ENCODING ) for row in reader : month , year = parse_month_year ( row [ 'Return Month' ] ) yield OrderedDict ( [ ( 'customer_name' , row [ 'CustomerName' ] ) , ( 'supplier_name' , row [ 'SupplierName' ] ) , ( 'month' , month ) , ( 'year' , year ) , ( 'date' , datetime . date ( year , month , 1 ) ) , ( 'total_ex_vat' , parse_price ( row [ 'EvidencedSpend' ] ) ) , ( 'lot' , parse_lot_name ( row [ 'LotDescription' ] ) ) , ( 'customer_sector' , parse_customer_sector ( row [ 'Sector' ] ) ) , ( 'supplier_type' , parse_sme_or_large ( row [ 'SME or Large' ] ) ) , ] )
Take a file - like object and yield OrderedDicts to be inserted into raw spending database .
5,505
def try_number ( value ) : for cast_function in [ int , float ] : try : return cast_function ( value ) except ValueError : pass raise ValueError ( "Unable to use value as int or float: {0!r}" . format ( value ) )
Attempt to cast the string value to an int and failing that a float failing that raise a ValueError .
5,506
def convert_durations ( metric ) : if metric [ 0 ] == 'avgSessionDuration' and metric [ 1 ] : new_metric = ( metric [ 0 ] , metric [ 1 ] * 1000 ) else : new_metric = metric return new_metric
Convert session duration metrics from seconds to milliseconds .
5,507
def to_datetime ( date_key ) : match = re . search ( r'\d{4}-\d{2}(-\d{2})?' , date_key ) formatter = '%Y-%m' if len ( match . group ( ) ) == 10 : formatter += '-%d' return datetime . strptime ( match . group ( ) , formatter ) . replace ( tzinfo = pytz . UTC )
Extract the first date from key matching YYYY - MM - DD or YYYY - MM and convert to datetime .
5,508
def float_to_decimal ( f ) : n , d = f . as_integer_ratio ( ) numerator , denominator = Decimal ( n ) , Decimal ( d ) return DECIMAL_CONTEXT . divide ( numerator , denominator )
Convert a float to a 38 - precision Decimal
5,509
def is_dynamo_value ( value ) : if not isinstance ( value , dict ) or len ( value ) != 1 : return False subkey = six . next ( six . iterkeys ( value ) ) return subkey in TYPES_REV
Returns True if the value is a Dynamo - formatted value
5,510
def encode_set ( dynamizer , value ) : inner_value = next ( iter ( value ) ) inner_type = dynamizer . raw_encode ( inner_value ) [ 0 ] return inner_type + 'S' , [ dynamizer . raw_encode ( v ) [ 1 ] for v in value ]
Encode a set for the DynamoDB format
5,511
def encode_list ( dynamizer , value ) : encoded_list = [ ] dict ( map ( dynamizer . raw_encode , value ) ) for v in value : encoded_type , encoded_value = dynamizer . raw_encode ( v ) encoded_list . append ( { encoded_type : encoded_value , } ) return 'L' , encoded_list
Encode a list for the DynamoDB format
5,512
def encode_dict ( dynamizer , value ) : encoded_dict = { } for k , v in six . iteritems ( value ) : encoded_type , encoded_value = dynamizer . raw_encode ( v ) encoded_dict [ k ] = { encoded_type : encoded_value , } return 'M' , encoded_dict
Encode a dict for the DynamoDB format
5,513
def raw_encode ( self , value ) : if type ( value ) in self . encoders : encoder = self . encoders [ type ( value ) ] return encoder ( self , value ) raise ValueError ( "No encoder for value '%s' of type '%s'" % ( value , type ( value ) ) )
Run the encoder on a value
5,514
def encode_keys ( self , keys ) : return dict ( ( ( k , self . encode ( v ) ) for k , v in six . iteritems ( keys ) if not is_null ( v ) ) )
Run the encoder on a dict of values
5,515
def maybe_encode_keys ( self , keys ) : ret = { } for k , v in six . iteritems ( keys ) : if is_dynamo_value ( v ) : return keys elif not is_null ( v ) : ret [ k ] = self . encode ( v ) return ret
Same as encode_keys but a no - op if already in Dynamo format
5,516
def decode_keys ( self , keys ) : return dict ( ( ( k , self . decode ( v ) ) for k , v in six . iteritems ( keys ) ) )
Run the decoder on a dict of values
5,517
def decode ( self , dynamo_value ) : type , value = next ( six . iteritems ( dynamo_value ) ) if type == STRING : return value elif type == BINARY : return Binary ( value ) elif type == NUMBER : return Decimal ( value ) elif type == STRING_SET : return set ( value ) elif type == BINARY_SET : return set ( ( Binary ( v ) for v in value ) ) elif type == NUMBER_SET : return set ( ( Decimal ( v ) for v in value ) ) elif type == BOOL : return value elif type == LIST : return [ self . decode ( v ) for v in value ] elif type == MAP : decoded_dict = { } for k , v in six . iteritems ( value ) : decoded_dict [ k ] = self . decode ( v ) return decoded_dict elif type == NULL : return None else : raise TypeError ( "Received unrecognized type %r from dynamo" , type )
Decode a dynamo value into a python value
5,518
def get_paragraph ( self ) : self . text = '' for x in range ( randint ( 5 , 12 ) ) : sentence = self . _write_sentence ( ) self . text = self . text + sentence return self . text
Write a paragraph of 5 sentences .
5,519
def skip_job ( counter ) : try : host_number = int ( socket . gethostname ( ) . split ( '-' ) [ - 1 ] ) except ValueError : return False if ( counter + host_number - ( NUMBER_OF_HOSTS - 1 ) ) % NUMBER_OF_HOSTS == 0 : return False return True
Should we skip the job based on its number
5,520
def generate_crontab ( current_crontab , path_to_jobs , path_to_app , unique_id ) : set_disable_envar = '' if os . environ . get ( 'DISABLE_COLLECTORS' ) == 'true' : set_disable_envar = 'DISABLE_COLLECTORS={} ' . format ( os . environ . get ( 'DISABLE_COLLECTORS' ) ) job_template = '{schedule} ' '{set_disable_envar}' '{app_path}/venv/bin/pp-collector ' '-l {collector_slug} ' '-c {app_path}/config/{credentials} ' '-t {app_path}/config/{token} ' '-b {app_path}/config/{performanceplatform} ' '>> {app_path}/log/out.log 2>> {app_path}/log/error.log' crontab = [ line . strip ( ) for line in current_crontab ] crontab = remove_existing_crontab_for_app ( crontab , unique_id ) additional_crontab = [ ] job_number = 0 with open ( path_to_jobs ) as jobs : try : for job in jobs : parsed = parse_job_line ( job ) if parsed is not None : job_number += 1 if skip_job ( job_number ) : continue schedule , collector_slug , credentials , token , performanceplatform = parsed cronjob = job_template . format ( schedule = schedule , set_disable_envar = set_disable_envar , app_path = path_to_app , collector_slug = collector_slug , credentials = credentials , token = token , performanceplatform = performanceplatform ) additional_crontab . append ( cronjob ) except ValueError as e : raise ParseError ( str ( e ) ) if additional_crontab : crontab . append ( crontab_begin_comment ( unique_id ) ) crontab . extend ( additional_crontab ) crontab . append ( crontab_end_comment ( unique_id ) ) return crontab
Returns a crontab with jobs from job path
5,521
def map_parameters ( cls , params ) : d = { } for k , v in six . iteritems ( params ) : d [ cls . FIELD_MAP . get ( k . lower ( ) , k ) ] = v return d
Maps parameters to form field names
5,522
def world_to_image ( bbox , size ) : px_per_unit = ( float ( size [ 0 ] ) / bbox . width , float ( size [ 1 ] ) / bbox . height ) return lambda x , y : ( ( x - bbox . xmin ) * px_per_unit [ 0 ] , size [ 1 ] - ( y - bbox . ymin ) * px_per_unit [ 1 ] )
Function generator to create functions for converting from world coordinates to image coordinates
5,523
def warp ( self , target_bbox , target_size = None ) : if not target_size : px_per_unit = ( float ( self . image . size [ 0 ] ) / self . bbox . width , float ( self . image . size [ 1 ] ) / self . bbox . height ) src_bbox = target_bbox . project ( self . bbox . projection ) target_size = ( int ( round ( src_bbox . width * px_per_unit [ 0 ] ) ) , int ( round ( src_bbox . height * px_per_unit [ 1 ] ) ) ) canvas_size = ( max ( target_size [ 0 ] , self . image . size [ 0 ] ) , max ( target_size [ 1 ] , self . image . size [ 1 ] ) ) if self . bbox == target_bbox and self . image . size == target_size : return self elif self . bbox . projection . srs == target_bbox . projection . srs : to_source_image = world_to_image ( self . bbox , self . image . size ) upper_left = to_source_image ( * ( target_bbox . xmin , target_bbox . ymax ) ) lower_right = to_source_image ( * ( target_bbox . xmax , target_bbox . ymin ) ) if canvas_size == self . image . size : im = self . image else : im = Image . new ( "RGBA" , canvas_size , ( 0 , 0 , 0 , 0 ) ) im . paste ( self . image , ( 0 , 0 ) ) new_image = im . transform ( target_size , Image . EXTENT , ( upper_left [ 0 ] , upper_left [ 1 ] , lower_right [ 0 ] , lower_right [ 1 ] ) , Image . NEAREST ) else : if canvas_size == self . image . size : im = self . image else : im = Image . new ( "RGBA" , canvas_size , ( 0 , 0 , 0 , 0 ) ) im . paste ( self . image , ( 0 , 0 ) ) new_image = im . transform ( target_size , Image . MESH , self . _create_mesh ( target_bbox , target_size ) , Image . NEAREST ) return GeoImage ( new_image , target_bbox )
Returns a copy of this image warped to a target size and bounding box
5,524
def try_get_department ( department_or_code ) : try : value = take_first_department_code ( department_or_code ) except AssertionError : value = department_or_code if value in DEPARTMENT_MAPPING : value = DEPARTMENT_MAPPING [ value ] return value
Try to take the first department code or fall back to string as passed
5,525
def debug ( self ) : url = '{}debug/status' . format ( self . url ) try : return make_request ( url , timeout = self . timeout ) except ServerError as err : return { "error" : str ( err ) }
Retrieve the debug information from the identity manager .
5,526
def login ( self , username , json_document ) : url = '{}u/{}' . format ( self . url , username ) make_request ( url , method = 'PUT' , body = json_document , timeout = self . timeout )
Send user identity information to the identity manager .
5,527
def discharge ( self , username , macaroon ) : caveats = macaroon . third_party_caveats ( ) if len ( caveats ) != 1 : raise InvalidMacaroon ( 'Invalid number of third party caveats (1 != {})' '' . format ( len ( caveats ) ) ) url = '{}discharger/discharge?discharge-for-user={}&id={}' . format ( self . url , quote ( username ) , caveats [ 0 ] [ 1 ] ) logging . debug ( 'Sending identity info to {}' . format ( url ) ) logging . debug ( 'data is {}' . format ( caveats [ 0 ] [ 1 ] ) ) response = make_request ( url , method = 'POST' , timeout = self . timeout ) try : macaroon = response [ 'Macaroon' ] json_macaroon = json . dumps ( macaroon ) except ( KeyError , UnicodeDecodeError ) as err : raise InvalidMacaroon ( 'Invalid macaroon from discharger: {}' . format ( err . message ) ) return base64 . urlsafe_b64encode ( json_macaroon . encode ( 'utf-8' ) )
Discharge the macarooon for the identity .
5,528
def discharge_token ( self , username ) : url = '{}discharge-token-for-user?username={}' . format ( self . url , quote ( username ) ) logging . debug ( 'Sending identity info to {}' . format ( url ) ) response = make_request ( url , method = 'GET' , timeout = self . timeout ) try : macaroon = response [ 'DischargeToken' ] json_macaroon = json . dumps ( macaroon ) except ( KeyError , UnicodeDecodeError ) as err : raise InvalidMacaroon ( 'Invalid macaroon from discharger: {}' . format ( err . message ) ) return base64 . urlsafe_b64encode ( "[{}]" . format ( json_macaroon ) . encode ( 'utf-8' ) )
Discharge token for a user .
5,529
def set_extra_info ( self , username , extra_info ) : url = self . _get_extra_info_url ( username ) make_request ( url , method = 'PUT' , body = extra_info , timeout = self . timeout )
Set extra info for the given user .
5,530
def get_extra_info ( self , username ) : url = self . _get_extra_info_url ( username ) return make_request ( url , timeout = self . timeout )
Get extra info for the given user .
5,531
def is_complete ( self ) : return all ( p . name in self . values for p in self . parameters if p . required )
Do all required parameters have values?
5,532
def _encode_write ( dynamizer , data , action , key ) : data = dict ( ( ( k , dynamizer . encode ( v ) ) for k , v in six . iteritems ( data ) if not is_null ( v ) ) ) return { action : { key : data , } }
Encode an item write command
5,533
def encode_query_kwargs ( dynamizer , kwargs ) : ret = { } for k , v in six . iteritems ( kwargs ) : if '__' not in k : raise TypeError ( "Invalid query argument '%s'" % k ) name , condition_key = k . split ( '__' ) if condition_key == 'eq' and is_null ( v ) : condition_key = 'null' v = True if condition_key == 'null' : ret [ name ] = { 'ComparisonOperator' : 'NULL' if v else 'NOT_NULL' } continue elif condition_key not in ( 'in' , 'between' ) : v = ( v , ) ret [ name ] = { 'AttributeValueList' : [ dynamizer . encode ( value ) for value in v ] , 'ComparisonOperator' : CONDITIONS [ condition_key ] , } return ret
Encode query constraints in Dynamo format
5,534
def attrs ( self , dynamizer ) : ret = { self . key : { 'Action' : self . action , } } if not is_null ( self . value ) : ret [ self . key ] [ 'Value' ] = dynamizer . encode ( self . value ) return ret
Get the attributes for the update
5,535
def expected ( self , dynamizer ) : if self . _expect_kwargs : return encode_query_kwargs ( dynamizer , self . _expect_kwargs ) if self . _expected is not NO_ARG : ret = { } if is_null ( self . _expected ) : ret [ 'Exists' ] = False else : ret [ 'Value' ] = dynamizer . encode ( self . _expected ) ret [ 'Exists' ] = True return { self . key : ret } return { }
Get the expected values for the update
5,536
def flush ( self ) : items = [ ] for data in self . _to_put : items . append ( encode_put ( self . connection . dynamizer , data ) ) for data in self . _to_delete : items . append ( encode_delete ( self . connection . dynamizer , data ) ) self . _write ( items ) self . _to_put = [ ] self . _to_delete = [ ]
Flush pending items to Dynamo
5,537
def _write ( self , items ) : response = self . _batch_write_item ( items ) if 'consumed_capacity' in response : self . consumed_capacity = sum ( response [ 'consumed_capacity' ] , self . consumed_capacity ) if response . get ( 'UnprocessedItems' ) : unprocessed = response [ 'UnprocessedItems' ] . get ( self . tablename , [ ] ) LOG . info ( "%d items were unprocessed. Storing for later." , len ( unprocessed ) ) self . _unprocessed . extend ( unprocessed ) self . _attempt += 1 self . connection . exponential_sleep ( self . _attempt ) else : self . _attempt = 0 return response
Perform a batch write and handle the response
5,538
def resend_unprocessed ( self ) : LOG . info ( "Re-sending %d unprocessed items." , len ( self . _unprocessed ) ) while self . _unprocessed : to_resend = self . _unprocessed [ : MAX_WRITE_BATCH ] self . _unprocessed = self . _unprocessed [ MAX_WRITE_BATCH : ] LOG . info ( "Sending %d items" , len ( to_resend ) ) self . _write ( to_resend ) LOG . info ( "%d unprocessed items left" , len ( self . _unprocessed ) )
Resend all unprocessed items
5,539
def _batch_write_item ( self , items ) : kwargs = { 'RequestItems' : { self . tablename : items , } , 'ReturnConsumedCapacity' : self . return_capacity , 'ReturnItemCollectionMetrics' : self . return_item_collection_metrics , } return self . connection . call ( 'batch_write_item' , ** kwargs )
Make a BatchWriteItem call to Dynamo
5,540
def _get_path ( entity_id ) : try : path = entity_id . path ( ) except AttributeError : path = entity_id if path . startswith ( 'cs:' ) : path = path [ 3 : ] return path
Get the entity_id as a string if it is a Reference .
5,541
def _get ( self , url ) : try : response = requests . get ( url , verify = self . verify , cookies = self . cookies , timeout = self . timeout , auth = self . _client . auth ( ) ) response . raise_for_status ( ) return response except HTTPError as exc : if exc . response . status_code in ( 404 , 407 ) : raise EntityNotFound ( url ) else : message = ( 'Error during request: {url} ' 'status code:({code}) ' 'message: {message}' ) . format ( url = url , code = exc . response . status_code , message = exc . response . text ) logging . error ( message ) raise ServerError ( exc . response . status_code , exc . response . text , message ) except Timeout : message = 'Request timed out: {url} timeout: {timeout}' message = message . format ( url = url , timeout = self . timeout ) logging . error ( message ) raise ServerError ( message ) except RequestException as exc : message = ( 'Error during request: {url} ' 'message: {message}' ) . format ( url = url , message = exc ) logging . error ( message ) raise ServerError ( exc . args [ 0 ] [ 1 ] . errno , exc . args [ 0 ] [ 1 ] . strerror , message )
Make a get request against the charmstore .
5,542
def _meta ( self , entity_id , includes , channel = None ) : queries = [ ] if includes is not None : queries . extend ( [ ( 'include' , include ) for include in includes ] ) if channel is not None : queries . append ( ( 'channel' , channel ) ) if len ( queries ) : url = '{}/{}/meta/any?{}' . format ( self . url , _get_path ( entity_id ) , urlencode ( queries ) ) else : url = '{}/{}/meta/any' . format ( self . url , _get_path ( entity_id ) ) data = self . _get ( url ) return data . json ( )
Retrieve metadata about an entity in the charmstore .
5,543
def entities ( self , entity_ids ) : url = '%s/meta/any?include=id&' % self . url for entity_id in entity_ids : url += 'id=%s&' % _get_path ( entity_id ) url = url [ : - 1 ] data = self . _get ( url ) return data . json ( )
Get the default data for entities .
5,544
def bundle ( self , bundle_id , channel = None ) : return self . entity ( bundle_id , get_files = True , channel = channel )
Get the default data for a bundle .
5,545
def charm ( self , charm_id , channel = None ) : return self . entity ( charm_id , get_files = True , channel = channel )
Get the default data for a charm .
5,546
def charm_icon_url ( self , charm_id , channel = None ) : url = '{}/{}/icon.svg' . format ( self . url , _get_path ( charm_id ) ) return _add_channel ( url , channel )
Generate the path to the icon for charms .
5,547
def charm_icon ( self , charm_id , channel = None ) : url = self . charm_icon_url ( charm_id , channel = channel ) response = self . _get ( url ) return response . content
Get the charm icon .
5,548
def bundle_visualization ( self , bundle_id , channel = None ) : url = self . bundle_visualization_url ( bundle_id , channel = channel ) response = self . _get ( url ) return response . content
Get the bundle visualization .
5,549
def bundle_visualization_url ( self , bundle_id , channel = None ) : url = '{}/{}/diagram.svg' . format ( self . url , _get_path ( bundle_id ) ) return _add_channel ( url , channel )
Generate the path to the visualization for bundles .
5,550
def entity_readme_url ( self , entity_id , channel = None ) : url = '{}/{}/readme' . format ( self . url , _get_path ( entity_id ) ) return _add_channel ( url , channel )
Generate the url path for the readme of an entity .
5,551
def entity_readme_content ( self , entity_id , channel = None ) : readme_url = self . entity_readme_url ( entity_id , channel = channel ) response = self . _get ( readme_url ) return response . text
Get the readme for an entity .
5,552
def archive_url ( self , entity_id , channel = None ) : url = '{}/{}/archive' . format ( self . url , _get_path ( entity_id ) ) return _add_channel ( url , channel )
Generate a URL for the archive of an entity ..
5,553
def file_url ( self , entity_id , filename , channel = None ) : url = '{}/{}/archive/{}' . format ( self . url , _get_path ( entity_id ) , filename ) return _add_channel ( url , channel )
Generate a URL for a file in an archive without requesting it .
5,554
def files ( self , entity_id , manifest = None , filename = None , read_file = False , channel = None ) : if manifest is None : manifest_url = '{}/{}/meta/manifest' . format ( self . url , _get_path ( entity_id ) ) manifest_url = _add_channel ( manifest_url , channel ) manifest = self . _get ( manifest_url ) manifest = manifest . json ( ) files = { } for f in manifest : manifest_name = f [ 'Name' ] file_url = self . file_url ( _get_path ( entity_id ) , manifest_name , channel = channel ) files [ manifest_name ] = file_url if filename : file_url = files . get ( filename , None ) if file_url is None : raise EntityNotFound ( entity_id , filename ) if read_file : data = self . _get ( file_url ) return data . text else : return file_url else : return files
Get the files or file contents of a file for an entity .
5,555
def resource_url ( self , entity_id , name , revision ) : return '{}/{}/resource/{}/{}' . format ( self . url , _get_path ( entity_id ) , name , revision )
Return the resource url for a given resource on an entity .
5,556
def config ( self , charm_id , channel = None ) : url = '{}/{}/meta/charm-config' . format ( self . url , _get_path ( charm_id ) ) data = self . _get ( _add_channel ( url , channel ) ) return data . json ( )
Get the config data for a charm .
5,557
def entityId ( self , partial , channel = None ) : url = '{}/{}/meta/any' . format ( self . url , _get_path ( partial ) ) data = self . _get ( _add_channel ( url , channel ) ) return data . json ( ) [ 'Id' ]
Get an entity s full id provided a partial one .
5,558
def search ( self , text , includes = None , doc_type = None , limit = None , autocomplete = False , promulgated_only = False , tags = None , sort = None , owner = None , series = None ) : queries = self . _common_query_parameters ( doc_type , includes , owner , promulgated_only , series , sort ) if len ( text ) : queries . append ( ( 'text' , text ) ) if limit is not None : queries . append ( ( 'limit' , limit ) ) if autocomplete : queries . append ( ( 'autocomplete' , 1 ) ) if tags is not None : if type ( tags ) is list : tags = ',' . join ( tags ) queries . append ( ( 'tags' , tags ) ) if len ( queries ) : url = '{}/search?{}' . format ( self . url , urlencode ( queries ) ) else : url = '{}/search' . format ( self . url ) data = self . _get ( url ) return data . json ( ) [ 'Results' ]
Search for entities in the charmstore .
5,559
def list ( self , includes = None , doc_type = None , promulgated_only = False , sort = None , owner = None , series = None ) : queries = self . _common_query_parameters ( doc_type , includes , owner , promulgated_only , series , sort ) if len ( queries ) : url = '{}/list?{}' . format ( self . url , urlencode ( queries ) ) else : url = '{}/list' . format ( self . url ) data = self . _get ( url ) return data . json ( ) [ 'Results' ]
List entities in the charmstore .
5,560
def _common_query_parameters ( self , doc_type , includes , owner , promulgated_only , series , sort ) : queries = [ ] if includes is not None : queries . extend ( [ ( 'include' , include ) for include in includes ] ) if doc_type is not None : queries . append ( ( 'type' , doc_type ) ) if promulgated_only : queries . append ( ( 'promulgated' , 1 ) ) if owner is not None : queries . append ( ( 'owner' , owner ) ) if series is not None : if type ( series ) is list : series = ',' . join ( series ) queries . append ( ( 'series' , series ) ) if sort is not None : queries . append ( ( 'sort' , sort ) ) return queries
Extract common query parameters between search and list into slice .
5,561
def fetch_related ( self , ids ) : if not ids : return [ ] meta = '&id=' . join ( id [ 'Id' ] for id in ids ) url = ( '{url}/meta/any?id={meta}' '&include=bundle-metadata&include=stats' '&include=supported-series&include=extra-info' '&include=bundle-unit-count&include=owner' ) . format ( url = self . url , meta = meta ) data = self . _get ( url ) return data . json ( ) . values ( )
Fetch related entity information .
5,562
def fetch_interfaces ( self , interface , way ) : if not interface : return [ ] if way == 'requires' : request = '&requires=' + interface else : request = '&provides=' + interface url = ( self . url + '/search?' + 'include=charm-metadata&include=stats&include=supported-series' '&include=extra-info&include=bundle-unit-count' '&limit=1000&include=owner' + request ) data = self . _get ( url ) return data . json ( ) . values ( )
Get the list of charms that provides or requires this interface .
5,563
def debug ( self ) : url = '{}/debug/status' . format ( self . url ) data = self . _get ( url ) return data . json ( )
Retrieve the debug information from the charmstore .
5,564
def query_server ( self , outfile , age , metallicity ) : params = copy . deepcopy ( self . download_defaults ) epsilon = 1e-4 lage = np . log10 ( age * 1e9 ) lage_min , lage_max = params [ 'isoc_lage0' ] , params [ 'isoc_lage1' ] if not ( lage_min - epsilon < lage < lage_max + epsilon ) : msg = 'Age outside of valid range: %g [%g < log(age) < %g]' % ( lage , lage_min , lage_max ) raise RuntimeError ( msg ) z_min , z_max = params [ 'isoc_z0' ] , params [ 'isoc_z1' ] if not ( z_min <= metallicity <= z_max ) : msg = 'Metallicity outside of valid range: %g [%g < z < %g]' % ( metallicity , z_min , z_max ) raise RuntimeError ( msg ) params [ 'photsys_file' ] = photsys_dict [ self . survey ] params [ 'isoc_age' ] = age * 1e9 params [ 'isoc_zeta' ] = metallicity server = self . download_url url = server + '/cgi-bin/cmd_%s' % params [ 'cmd_version' ] logger . debug ( "Accessing %s..." % url ) urlopen ( url , timeout = 2 ) q = urlencode ( params ) . encode ( 'utf-8' ) logger . debug ( url + '?' + q ) c = str ( urlopen ( url , q ) . read ( ) ) aa = re . compile ( 'output\d+' ) fname = aa . findall ( c ) if len ( fname ) == 0 : msg = "Output filename not found" raise RuntimeError ( msg ) out = '{0}/tmp/{1}.dat' . format ( server , fname [ 0 ] ) cmd = 'wget --progress dot:binary %s -O %s' % ( out , outfile ) logger . debug ( cmd ) stdout = subprocess . check_output ( cmd , shell = True , stderr = subprocess . STDOUT ) logger . debug ( stdout ) return outfile
Server query for the isochrone file .
5,565
def run ( self , coords = None , debug = False ) : self . grid . search ( coords = coords ) return self . grid
Run the likelihood grid search
5,566
def write ( self , outfile ) : data = odict ( ) data [ 'PIXEL' ] = self . roi . pixels_target if self . config [ 'scan' ] [ 'full_pdf' ] : data [ 'LOG_LIKELIHOOD' ] = self . log_likelihood_sparse_array . T data [ 'RICHNESS' ] = self . richness_sparse_array . T data [ 'RICHNESS_LOWER' ] = self . richness_lower_sparse_array . T data [ 'RICHNESS_UPPER' ] = self . richness_upper_sparse_array . T data [ 'RICHNESS_LIMIT' ] = self . richness_upper_limit_sparse_array . T data [ 'FRACTION_OBSERVABLE' ] = self . fraction_observable_sparse_array . T else : data [ 'LOG_LIKELIHOOD' ] = self . log_likelihood_sparse_array . T data [ 'RICHNESS' ] = self . richness_sparse_array . T data [ 'FRACTION_OBSERVABLE' ] = self . fraction_observable_sparse_array . T for k in list ( data . keys ( ) ) [ 1 : ] : data [ k ] = data [ k ] . astype ( 'f4' , copy = False ) header = odict ( ) header [ 'STELLAR' ] = round ( self . stellar_mass_conversion , 8 ) header [ 'LKDNSIDE' ] = self . config [ 'coords' ] [ 'nside_likelihood' ] header [ 'LKDPIX' ] = ang2pix ( self . config [ 'coords' ] [ 'nside_likelihood' ] , self . roi . lon , self . roi . lat ) header [ 'NROI' ] = self . roi . inROI ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NANNULUS' ] = self . roi . inAnnulus ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NINSIDE' ] = self . roi . inInterior ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) header [ 'NTARGET' ] = self . roi . inTarget ( self . loglike . catalog_roi . lon , self . loglike . catalog_roi . lat ) . sum ( ) if len ( self . distance_modulus_array ) == 1 : for key in data : data [ key ] = data [ key ] . flatten ( ) logger . info ( "Writing %s..." % outfile ) write_partial_map ( outfile , data , nside = self . config [ 'coords' ] [ 'nside_pixel' ] , header = header , clobber = True ) fitsio . write ( outfile , dict ( DISTANCE_MODULUS = self . distance_modulus_array . astype ( 'f4' , copy = False ) ) , extname = 'DISTANCE_MODULUS' , clobber = False )
Save the likelihood results as a sparse HEALPix map .
5,567
def from_uri ( cls , uri , socket_timeout = None , auto_decode = False ) : parts = six . moves . urllib . parse . urlparse ( uri ) if parts . scheme . lower ( ) not in ( 'beanstalk' , 'beanstalkd' ) : raise ValueError ( 'Invalid scheme %s' % parts . scheme ) ipv6_md = re . match ( r'^\[([0-9a-fA-F:]+)\](:[0-9]+)?$' , parts . netloc ) if ipv6_md : host = ipv6_md . group ( 1 ) port = ipv6_md . group ( 2 ) or '11300' port = port . lstrip ( ':' ) elif ':' in parts . netloc : host , port = parts . netloc . rsplit ( ':' , 1 ) else : host = parts . netloc port = 11300 port = int ( port ) return cls ( host , port , socket_timeout = socket_timeout , auto_decode = auto_decode )
Construct a synchronous Beanstalk Client from a URI .
5,568
def list_tubes ( self ) : with self . _sock_ctx ( ) as sock : self . _send_message ( 'list-tubes' , sock ) body = self . _receive_data_with_prefix ( b'OK' , sock ) tubes = yaml_load ( body ) return tubes
Return a list of tubes that this beanstalk instance knows about
5,569
def stats ( self ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'stats' , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) stats = yaml_load ( body ) return stats
Return a dictionary with a bunch of instance - wide statistics
5,570
def put_job ( self , data , pri = 65536 , delay = 0 , ttr = 120 ) : with self . _sock_ctx ( ) as socket : message = 'put {pri} {delay} {ttr} {datalen}\r\n' . format ( pri = pri , delay = delay , ttr = ttr , datalen = len ( data ) , data = data ) . encode ( 'utf-8' ) if not isinstance ( data , bytes ) : data = data . encode ( 'utf-8' ) message += data message += b'\r\n' self . _send_message ( message , socket ) return self . _receive_id ( socket )
Insert a new job into whatever queue is currently USEd
5,571
def watchlist ( self , tubes ) : tubes = set ( tubes ) for tube in tubes - self . _watchlist : self . watch ( tube ) for tube in self . _watchlist - tubes : self . ignore ( tube )
Set the watchlist to the given tubes
5,572
def watch ( self , tube ) : with self . _sock_ctx ( ) as socket : self . desired_watchlist . add ( tube ) if tube not in self . _watchlist : self . _send_message ( 'watch {0}' . format ( tube ) , socket ) self . _receive_id ( socket ) self . _watchlist . add ( tube ) if self . initial_watch : if tube != 'default' : self . ignore ( 'default' ) self . initial_watch = False
Add the given tube to the watchlist .
5,573
def ignore ( self , tube ) : with self . _sock_ctx ( ) as socket : if tube not in self . _watchlist : raise KeyError ( tube ) if tube != 'default' : self . desired_watchlist . remove ( tube ) if tube in self . _watchlist : self . _send_message ( 'ignore {0}' . format ( tube ) , socket ) self . _receive_id ( socket ) self . _watchlist . remove ( tube ) if not self . _watchlist : self . _watchlist . add ( 'default' )
Remove the given tube from the watchlist .
5,574
def stats_job ( self , job_id ) : with self . _sock_ctx ( ) as socket : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id self . _send_message ( 'stats-job {0}' . format ( job_id ) , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) job_status = yaml_load ( body ) return job_status
Fetch statistics about a single job
5,575
def stats_tube ( self , tube_name ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'stats-tube {0}' . format ( tube_name ) , socket ) body = self . _receive_data_with_prefix ( b'OK' , socket ) return yaml_load ( body )
Fetch statistics about a single tube
5,576
def reserve_job ( self , timeout = 5 ) : timeout = int ( timeout ) if self . socket_timeout is not None : if timeout >= self . socket_timeout : raise ValueError ( 'reserve_job timeout must be < socket timeout' ) if not self . _watchlist : raise ValueError ( 'Select a tube or two before reserving a job' ) with self . _sock_ctx ( ) as socket : self . _send_message ( 'reserve-with-timeout {0}' . format ( timeout ) , socket ) job_id , job_data = self . _receive_id_and_data_with_prefix ( b'RESERVED' , socket ) return Job ( job_id , job_data )
Reserve a job for this connection . Blocks for TIMEOUT secionds and raises TIMED_OUT if no job was available
5,577
def delete_job ( self , job_id ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'delete {0}' . format ( job_id ) , socket ) self . _receive_word ( socket , b'DELETED' )
Delete the given job id . The job must have been previously reserved by this connection
5,578
def bury_job ( self , job_id , pri = 65536 ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'bury {0} {1}' . format ( job_id , pri ) , socket ) return self . _receive_word ( socket , b'BURIED' )
Mark the given job_id as buried . The job must have been previously reserved by this connection
5,579
def kick_job ( self , job_id ) : if hasattr ( job_id , 'job_id' ) : job_id = job_id . job_id with self . _sock_ctx ( ) as socket : self . _send_message ( 'kick-job {0}' . format ( job_id ) , socket ) self . _receive_word ( socket , b'KICKED' )
Kick the given job id . The job must either be in the DELAYED or BURIED state and will be immediately moved to the READY state .
5,580
def use ( self , tube ) : with self . _sock_ctx ( ) as socket : if self . current_tube != tube : self . desired_tube = tube self . _send_message ( 'use {0}' . format ( tube ) , socket ) self . _receive_name ( socket ) self . current_tube = tube
Start producing jobs into the given tube .
5,581
def using ( self , tube ) : try : current_tube = self . current_tube self . use ( tube ) yield BeanstalkInsertingProxy ( self , tube ) finally : self . use ( current_tube )
Context - manager to insert jobs into a specific tube
5,582
def kick_jobs ( self , num_jobs ) : with self . _sock_ctx ( ) as socket : self . _send_message ( 'kick {0}' . format ( num_jobs ) , socket ) return self . _receive_id ( socket )
Kick some number of jobs from the buried queue onto the ready queue .
5,583
def pause_tube ( self , tube , delay = 3600 ) : with self . _sock_ctx ( ) as socket : delay = int ( delay ) self . _send_message ( 'pause-tube {0} {1}' . format ( tube , delay ) , socket ) return self . _receive_word ( socket , b'PAUSED' )
Pause a tube for some number of seconds preventing it from issuing jobs .
5,584
def interval ( best , lo = np . nan , hi = np . nan ) : return [ float ( best ) , [ float ( lo ) , float ( hi ) ] ]
Pythonized interval for easy output to yaml
5,585
def peak_interval ( data , alpha = _alpha , npoints = _npoints ) : peak = kde_peak ( data , npoints ) x = np . sort ( data . flat ) n = len ( x ) window = int ( np . rint ( ( 1.0 - alpha ) * n ) ) starts = x [ : n - window ] ends = x [ window : ] widths = ends - starts select = ( peak >= starts ) & ( peak <= ends ) widths = widths [ select ] if len ( widths ) == 0 : raise ValueError ( 'Too few elements for interval calculation' ) min_idx = np . argmin ( widths ) lo = x [ min_idx ] hi = x [ min_idx + window ] return interval ( peak , lo , hi )
Identify interval using Gaussian kernel density estimator .
5,586
def supplement ( self , coordsys = 'gal' ) : from ugali . utils . projector import gal2cel , gal2cel_angle from ugali . utils . projector import cel2gal , cel2gal_angle coordsys = coordsys . lower ( ) kwargs = dict ( usemask = False , asrecarray = True ) out = copy . deepcopy ( self ) if ( 'lon' in out . names ) and ( 'lat' in out . names ) : zeros = np . all ( self . ndarray == 0 , axis = 1 ) if coordsys == 'gal' : ra , dec = gal2cel ( out . lon , out . lat ) glon , glat = out . lon , out . lat else : ra , dec = out . lon , out . lat glon , glat = cel2gal ( out . lon , out . lat ) ra [ zeros ] = 0 dec [ zeros ] = 0 glon [ zeros ] = 0 glat [ zeros ] = 0 names = [ 'ra' , 'dec' , 'glon' , 'glat' ] arrs = [ ra , dec , glon , glat ] out = mlab . rec_append_fields ( out , names , arrs ) . view ( Samples ) if 'position_angle' in out . names : if coordsys == 'gal' : pa_gal = out . position_angle pa_cel = gal2cel_angle ( out . lon , out . lat , out . position_angle ) pa_cel = pa_cel - 180. * ( pa_cel > 180. ) else : pa_gal = cel2gal_angle ( out . lon , out . lat , out . position_angle ) pa_cel = out . position_angle pa_gal = pa_gal - 180. * ( pa_gal > 180. ) pa_gal [ zeros ] = 0 pa_cel [ zeros ] = 0 names = [ 'position_angle_gal' , 'position_angle_cel' ] arrs = [ pa_gal , pa_cel ] out = recfuncs . append_fields ( out , names , arrs , ** kwargs ) . view ( Samples ) return out
Add some supplemental columns
5,587
def median ( self , name , ** kwargs ) : data = self . get ( name , ** kwargs ) return np . percentile ( data , [ 50 ] )
Median of the distribution .
5,588
def kde_peak ( self , name , npoints = _npoints , ** kwargs ) : data = self . get ( name , ** kwargs ) return kde_peak ( data , npoints )
Calculate peak of kernel density estimator
5,589
def kde ( self , name , npoints = _npoints , ** kwargs ) : data = self . get ( name , ** kwargs ) return kde ( data , npoints )
Calculate kernel density estimator for parameter
5,590
def peak_interval ( self , name , alpha = _alpha , npoints = _npoints , ** kwargs ) : data = self . get ( name , ** kwargs ) return peak_interval ( data , alpha , npoints )
Calculate peak interval for parameter .
5,591
def min_interval ( self , name , alpha = _alpha , ** kwargs ) : data = self . get ( name , ** kwargs ) return min_interval ( data , alpha )
Calculate minimum interval for parameter .
5,592
def results ( self , names = None , alpha = _alpha , mode = 'peak' , ** kwargs ) : if names is None : names = self . names ret = odict ( ) for n in names : ret [ n ] = getattr ( self , '%s_interval' % mode ) ( n , ** kwargs ) return ret
Calculate the results for a set of parameters .
5,593
def densify ( self , factor = 10 ) : x = [ ] y = [ ] for ii in range ( 0 , len ( self . x ) - 2 ) : p = Parabola ( self . x [ ii : ii + 3 ] , self . y [ ii : ii + 3 ] ) x . append ( np . linspace ( self . x [ ii ] , self . x [ ii + 1 ] , factor ) [ 0 : - 1 ] ) y . append ( p ( x [ - 1 ] ) ) p = Parabola ( self . x [ len ( self . x ) - 3 : ] , self . y [ len ( self . y ) - 3 : ] ) x . append ( np . linspace ( self . x [ - 2 ] , self . x [ - 1 ] , factor ) [ 0 : - 1 ] ) y . append ( p ( x [ - 1 ] ) ) x . append ( [ self . x [ - 1 ] ] ) y . append ( [ self . y [ - 1 ] ] ) return np . concatenate ( x ) , np . concatenate ( y )
Increase the density of points along the parabolic curve .
5,594
def profileUpperLimit ( self , delta = 2.71 ) : a = self . p_2 b = self . p_1 if self . vertex_x < 0 : c = self . p_0 + delta else : c = self . p_0 - self . vertex_y + delta if b ** 2 - 4. * a * c < 0. : print ( 'WARNING' ) print ( a , b , c ) return 0. return max ( ( np . sqrt ( b ** 2 - 4. * a * c ) - b ) / ( 2. * a ) , ( - 1. * np . sqrt ( b ** 2 - 4. * a * c ) - b ) / ( 2. * a ) )
Compute one - sided upperlimit via profile method .
5,595
def bayesianUpperLimit ( self , alpha , steps = 1.e5 , plot = False ) : x_dense , y_dense = self . densify ( ) y_dense -= np . max ( y_dense ) f = scipy . interpolate . interp1d ( x_dense , y_dense , kind = 'linear' ) x = np . linspace ( 0. , np . max ( x_dense ) , steps ) pdf = np . exp ( f ( x ) / 2. ) cut = ( pdf / np . max ( pdf ) ) > 1.e-10 x = x [ cut ] pdf = pdf [ cut ] cdf = np . cumsum ( pdf ) cdf /= cdf [ - 1 ] cdf_reflect = scipy . interpolate . interp1d ( cdf , x ) return cdf_reflect ( alpha )
Compute one - sided upper limit using Bayesian Method of Helene . Several methods of increasing numerical stability have been implemented .
5,596
def bayesianUpperLimit2 ( self , alpha , steps = 1.e5 , plot = False ) : cut = ( ( self . y / 2. ) > - 30. ) try : f = scipy . interpolate . interp1d ( self . x [ cut ] , self . y [ cut ] , kind = 'cubic' ) except : f = scipy . interpolate . interp1d ( self . x [ cut ] , self . y [ cut ] , kind = 'linear' ) x = np . linspace ( 0. , np . max ( self . x [ cut ] ) , steps ) y = np . exp ( f ( x ) / 2. ) forbidden = np . nonzero ( ( y / self . vertex_y ) < 1.e-10 ) [ 0 ] if len ( forbidden ) > 0 : index = forbidden [ 0 ] x = x [ 0 : index ] y = y [ 0 : index ] cdf = np . cumsum ( y ) cdf /= cdf [ - 1 ] cdf_reflect = scipy . interpolate . interp1d ( cdf , x ) return cdf_reflect ( alpha )
Compute one - sided upper limit using Bayesian Method of Helene .
5,597
def confidenceInterval ( self , alpha = 0.6827 , steps = 1.e5 , plot = False ) : x_dense , y_dense = self . densify ( ) y_dense -= np . max ( y_dense ) f = scipy . interpolate . interp1d ( x_dense , y_dense , kind = 'linear' ) x = np . linspace ( 0. , np . max ( x_dense ) , steps ) pdf = np . exp ( f ( x ) / 2. ) cut = ( pdf / np . max ( pdf ) ) > 1.e-10 x = x [ cut ] pdf = pdf [ cut ] sorted_pdf_indices = np . argsort ( pdf ) [ : : - 1 ] cdf = np . cumsum ( pdf [ sorted_pdf_indices ] ) cdf /= cdf [ - 1 ] sorted_pdf_index_max = np . argmin ( ( cdf - alpha ) ** 2 ) x_select = x [ sorted_pdf_indices [ 0 : sorted_pdf_index_max ] ] return np . min ( x_select ) , np . max ( x_select )
Compute two - sided confidence interval by taking x - values corresponding to the largest PDF - values first .
5,598
def hms2dec ( hms ) : DEGREE = 360. HOUR = 24. MINUTE = 60. SECOND = 3600. if isstring ( hms ) : hour , minute , second = np . array ( re . split ( '[hms]' , hms ) ) [ : 3 ] . astype ( float ) else : hour , minute , second = hms . T decimal = ( hour + minute * 1. / MINUTE + second * 1. / SECOND ) * ( DEGREE / HOUR ) return decimal
Convert longitude from hours minutes seconds in string or 3 - array format to decimal degrees .
5,599
def dms2dec ( dms ) : DEGREE = 360. HOUR = 24. MINUTE = 60. SECOND = 3600. if isstring ( dms ) : degree , minute , second = np . array ( re . split ( '[dms]' , hms ) ) [ : 3 ] . astype ( float ) else : degree , minute , second = dms . T sign = np . copysign ( 1.0 , degree ) decimal = np . abs ( degree ) + minute * 1. / MINUTE + second * 1. / SECOND decimal *= sign return decimal
Convert latitude from degrees minutes seconds in string or 3 - array format to decimal degrees .