idx
int64 0
63k
| question
stringlengths 61
4.03k
| target
stringlengths 6
1.23k
|
|---|---|---|
4,100
|
def from_int ( value ) : if not isinstance ( value , int ) : raise PyVLXException ( "value_has_to_be_int" ) if not Parameter . is_valid_int ( value ) : raise PyVLXException ( "value_out_of_range" ) return bytes ( [ value >> 8 & 255 , value & 255 ] )
|
Create raw out of position vlaue .
|
4,101
|
def is_valid_int ( value ) : if 0 <= value <= Parameter . MAX : return True if value == Parameter . UNKNOWN_VALUE : return True if value == Parameter . CURRENT_POSITION : return True return False
|
Test if value can be rendered out of int .
|
4,102
|
def from_raw ( raw ) : if not isinstance ( raw , bytes ) : raise PyVLXException ( "Position::raw_must_be_bytes" ) if len ( raw ) != 2 : raise PyVLXException ( "Position::raw_must_be_two_bytes" ) if raw != Position . from_int ( Position . CURRENT_POSITION ) and raw != Position . from_int ( Position . UNKNOWN_VALUE ) and Position . to_int ( raw ) > Position . MAX : raise PyVLXException ( "position::raw_exceed_limit" , raw = raw ) return raw
|
Test if raw packets are valid for initialization of Position .
|
4,103
|
def from_percent ( position_percent ) : if not isinstance ( position_percent , int ) : raise PyVLXException ( "Position::position_percent_has_to_be_int" ) if position_percent < 0 : raise PyVLXException ( "Position::position_percent_has_to_be_positive" ) if position_percent > 100 : raise PyVLXException ( "Position::position_percent_out_of_range" ) return bytes ( [ position_percent * 2 , 0 ] )
|
Create raw value out of percent position .
|
4,104
|
def product ( self ) : if self . product_group == 14 and self . product_type == 3 : return "KLF 200" return "Unknown Product: {}:{}" . format ( self . product_group , self . product_type )
|
Return product as human readable string .
|
4,105
|
async def main ( loop ) : PYVLXLOG . setLevel ( logging . DEBUG ) stream_handler = logging . StreamHandler ( ) stream_handler . setLevel ( logging . DEBUG ) PYVLXLOG . addHandler ( stream_handler ) pyvlx = PyVLX ( 'pyvlx.yaml' , loop = loop ) await pyvlx . load_scenes ( ) await pyvlx . load_nodes ( ) await asyncio . sleep ( 90 ) await pyvlx . disconnect ( )
|
Log packets from Bus .
|
4,106
|
async def rename ( self , name ) : set_node_name = SetNodeName ( pyvlx = self . pyvlx , node_id = self . node_id , name = name ) await set_node_name . do_api_call ( ) if not set_node_name . success : raise PyVLXException ( "Unable to rename node" ) self . name = name
|
Change name of node .
|
4,107
|
async def set_position ( self , position , wait_for_completion = True ) : command_send = CommandSend ( pyvlx = self . pyvlx , wait_for_completion = wait_for_completion , node_id = self . node_id , parameter = position ) await command_send . do_api_call ( ) if not command_send . success : raise PyVLXException ( "Unable to send command" ) await self . after_update ( )
|
Set window to desired position .
|
4,108
|
async def open ( self , wait_for_completion = True ) : await self . set_position ( position = Position ( position_percent = 0 ) , wait_for_completion = wait_for_completion )
|
Open window .
|
4,109
|
async def close ( self , wait_for_completion = True ) : await self . set_position ( position = Position ( position_percent = 100 ) , wait_for_completion = wait_for_completion )
|
Close window .
|
4,110
|
async def stop ( self , wait_for_completion = True ) : await self . set_position ( position = CurrentPosition ( ) , wait_for_completion = wait_for_completion )
|
Stop window .
|
4,111
|
def _get_sampleset ( model ) : if isinstance ( model , Model ) : if hasattr ( model , 'sampleset' ) : w = model . sampleset ( ) else : w = None else : w = model return w
|
Return sampleset of a model or None if undefined . Model could be a real model or evaluated sampleset .
|
4,112
|
def _shift_wavelengths ( model1 , model2 ) : if isinstance ( model1 , _models . RedshiftScaleFactor ) : val = _get_sampleset ( model2 ) if val is None : w = val else : w = model1 . inverse ( val ) elif isinstance ( model1 , _models . Scale ) : w = _get_sampleset ( model2 ) else : w = _get_sampleset ( model1 ) return w
|
One of the models is either RedshiftScaleFactor or Scale .
|
4,113
|
def get_waveset ( model ) : if not isinstance ( model , Model ) : raise SynphotError ( '{0} is not a model.' . format ( model ) ) if isinstance ( model , _CompoundModel ) : waveset = model . _tree . evaluate ( WAVESET_OPERATORS , getter = None ) else : waveset = _get_sampleset ( model ) return waveset
|
Get optimal wavelengths for sampling a given model .
|
4,114
|
def _get_meta ( model ) : if isinstance ( model , Model ) : w = model . meta else : w = model return w
|
Return metadata of a model . Model could be a real model or evaluated metadata .
|
4,115
|
def get_metadata ( model ) : if not isinstance ( model , Model ) : raise SynphotError ( '{0} is not a model.' . format ( model ) ) if isinstance ( model , _CompoundModel ) : metadata = model . _tree . evaluate ( METADATA_OPERATORS , getter = None ) else : metadata = deepcopy ( model . meta ) return metadata
|
Get metadata for a given model .
|
4,116
|
def lambda_max ( self ) : return ( ( const . b_wien . value / self . temperature ) * u . m ) . to ( u . AA ) . value
|
Peak wavelength in Angstrom when the curve is expressed as power density .
|
4,117
|
def _calc_sampleset ( w1 , w2 , step , minimal ) : if minimal : arr = [ w1 - step , w1 , w2 , w2 + step ] else : arr = np . arange ( w1 - step , w2 + step + step , step ) return arr
|
Calculate sampleset for each model .
|
4,118
|
def evaluate ( self , x , * args ) : a = ( self . amplitude * np . ones_like ( x ) ) * self . _flux_unit y = units . convert_flux ( x , a , units . PHOTLAM ) return y . value
|
One dimensional constant flux model function .
|
4,119
|
def _process_neg_flux ( self , x , y ) : if self . _keep_neg : return y old_y = None if np . isscalar ( y ) : if y < 0 : n_neg = 1 old_x = x old_y = y y = 0 else : x = np . asarray ( x ) y = np . asarray ( y ) i = np . where ( y < 0 ) n_neg = len ( i [ 0 ] ) if n_neg > 0 : old_x = x [ i ] old_y = y [ i ] y [ i ] = 0 if old_y is not None : warn_str = ( '{0} bin(s) contained negative flux or throughput' '; it/they will be set to zero.' . format ( n_neg ) ) warn_str += '\n points: {0}\n lookup_table: {1}' . format ( old_x , old_y ) self . meta [ 'warnings' ] . update ( { 'NegativeFlux' : warn_str } ) warnings . warn ( warn_str , AstropyUserWarning ) return y
|
Remove negative flux .
|
4,120
|
def evaluate ( x , amplitude , mean , stddev ) : return 1.0 - Gaussian1D . evaluate ( x , amplitude , mean , stddev )
|
GaussianAbsorption1D model function .
|
4,121
|
def fit_deriv ( x , amplitude , mean , stddev ) : import operator return list ( map ( operator . neg , Gaussian1D . fit_deriv ( x , amplitude , mean , stddev ) ) )
|
GaussianAbsorption1D model function derivatives .
|
4,122
|
def evaluate ( self , x , * args ) : xx = x / self . x_0 y = ( self . amplitude * xx ** ( - self . alpha ) ) * self . _flux_unit flux = units . convert_flux ( x , y , units . PHOTLAM ) return flux . value
|
Return flux in PHOTLAM . Assume input wavelength is in Angstrom .
|
4,123
|
def get_payment_request ( self , cart , request ) : try : self . charge ( cart , request ) thank_you_url = OrderModel . objects . get_latest_url ( ) js_expression = 'window.location.href="{}";' . format ( thank_you_url ) return js_expression except ( KeyError , stripe . error . StripeError ) as err : raise ValidationError ( err )
|
From the given request add a snippet to the page .
|
4,124
|
def refund_payment ( self ) : Money = MoneyMaker ( self . currency ) filter_kwargs = { 'transaction_id__startswith' : 'ch_' , 'payment_method' : StripePayment . namespace , } for payment in self . orderpayment_set . filter ( ** filter_kwargs ) : refund = stripe . Refund . create ( charge = payment . transaction_id ) if refund [ 'status' ] == 'succeeded' : amount = Money ( refund [ 'amount' ] ) / Money . subunits OrderPayment . objects . create ( order = self , amount = - amount , transaction_id = refund [ 'id' ] , payment_method = StripePayment . namespace ) del self . amount_paid if self . amount_paid : super ( OrderWorkflowMixin , self ) . refund_payment ( )
|
Refund the payment using Stripe s refunding API .
|
4,125
|
def _create_in_progress ( self ) : instance = self . service . service . get_instance ( self . service . name ) if ( instance [ 'last_operation' ] [ 'state' ] == 'in progress' and instance [ 'last_operation' ] [ 'type' ] == 'create' ) : return True return False
|
Creating this service is handled asynchronously so this method will simply check if the create is in progress . If it is not in progress we could probably infer it either failed or succeeded .
|
4,126
|
def create ( self , max_wait = 180 , ** kwargs ) : self . service . create ( async = True , create_keys = False ) while self . _create_in_progress ( ) and max_wait > 0 : time . sleep ( 1 ) max_wait -= 1 cfg = self . service . _get_service_config ( ) self . service . settings . save ( cfg ) host = predix . config . get_env_key ( self . use_class , 'host' ) os . environ [ host ] = self . service . settings . data [ 'host' ] password = predix . config . get_env_key ( self . use_class , 'password' ) os . environ [ password ] = self . service . settings . data [ 'password' ] port = predix . config . get_env_key ( self . use_class , 'port' ) os . environ [ port ] = str ( self . service . settings . data [ 'port' ] )
|
Create an instance of the Predix Cache Service with they typical starting settings .
|
4,127
|
def _get_uri ( self ) : if not self . service . exists ( ) : logging . warning ( "Service does not yet exist." ) return self . service . settings . data [ 'uri' ]
|
Will return the uri for an existing instance .
|
4,128
|
def _get_zone_id ( self ) : if not self . service . exists ( ) : logging . warning ( "Service does not yet exist." ) return self . service . settings . data [ 'zone' ] [ 'http-header-value' ]
|
Will return the zone id for an existing instance .
|
4,129
|
def create ( self ) : self . service . create ( ) predix . config . set_env_value ( self . use_class , 'uri' , self . _get_uri ( ) ) predix . config . set_env_value ( self . use_class , 'zone_id' , self . _get_zone_id ( ) )
|
Create an instance of the Access Control Service with the typical starting settings .
|
4,130
|
def grant_client ( self , client_id ) : zone = self . service . settings . data [ 'zone' ] [ 'oauth-scope' ] scopes = [ 'openid' , zone , 'acs.policies.read' , 'acs.attributes.read' , 'acs.policies.write' , 'acs.attributes.write' ] authorities = [ 'uaa.resource' , zone , 'acs.policies.read' , 'acs.policies.write' , 'acs.attributes.read' , 'acs.attributes.write' ] self . service . uaa . uaac . update_client_grants ( client_id , scope = scopes , authorities = authorities ) return self . service . uaa . uaac . get_client ( client_id )
|
Grant the given client id all the scopes and authorities needed to work with the access control service .
|
4,131
|
def get ( self , path ) : uri = self . config . get_target ( ) + path headers = self . _get_headers ( ) logging . debug ( "URI=GET " + str ( uri ) ) logging . debug ( "HEADERS=" + str ( headers ) ) response = self . session . get ( uri , headers = headers ) if response . status_code == 200 : return response . json ( ) elif response . status_code == 401 : raise predix . admin . cf . config . CloudFoundryLoginError ( 'token invalid' ) else : response . raise_for_status ( )
|
Generic GET with headers
|
4,132
|
def post ( self , path , data ) : uri = self . config . get_target ( ) + path headers = self . _post_headers ( ) logging . debug ( "URI=POST " + str ( uri ) ) logging . debug ( "HEADERS=" + str ( headers ) ) logging . debug ( "BODY=" + str ( data ) ) response = self . session . post ( uri , headers = headers , data = json . dumps ( data ) ) if response . status_code in ( 200 , 201 , 202 ) : return response . json ( ) elif response . status_code == 401 : raise predix . admin . cf . config . CloudFoundryLoginError ( 'token invalid' ) else : logging . debug ( "STATUS=" + str ( response . status_code ) ) logging . debug ( "CONTENT=" + str ( response . content ) ) response . raise_for_status ( )
|
Generic POST with headers
|
4,133
|
def delete ( self , path , data = None , params = None ) : uri = self . config . get_target ( ) + path headers = { 'Authorization' : self . config . get_access_token ( ) } logging . debug ( "URI=DELETE " + str ( uri ) ) logging . debug ( "HEADERS=" + str ( headers ) ) response = self . session . delete ( uri , headers = headers , params = params , data = json . dumps ( data ) ) if response . status_code == 204 : return response else : logging . debug ( "STATUS=" + str ( response . status_code ) ) logging . debug ( "CONTENT=" + str ( response . content ) ) response . raise_for_status ( )
|
Generic DELETE with headers
|
4,134
|
def get_orgs ( self ) : orgs = [ ] for resource in self . _get_orgs ( ) [ 'resources' ] : orgs . append ( resource [ 'entity' ] [ 'name' ] ) return orgs
|
Returns a flat list of the names for the organizations user belongs .
|
4,135
|
def get_apps ( self ) : apps = [ ] for resource in self . _get_apps ( ) [ 'resources' ] : apps . append ( resource [ 'entity' ] [ 'name' ] ) return apps
|
Returns a flat list of the names for the apps in the organization .
|
4,136
|
def add_user ( self , user_name , role = 'user' ) : role_uri = self . _get_role_uri ( role = role ) return self . api . put ( path = role_uri , data = { 'username' : user_name } )
|
Calls CF s associate user with org . Valid roles include user auditor manager billing_manager
|
4,137
|
def remove_user ( self , user_name , role ) : role_uri = self . _get_role_uri ( role = role ) return self . api . delete ( path = role_uri , data = { 'username' : user_name } )
|
Calls CF s remove user with org
|
4,138
|
def _init_publisher_ws ( self ) : logging . debug ( "Initializing new web socket connection." ) url = ( 'wss://%s/v1/stream/messages/' % self . eventhub_client . host ) headers = self . _generate_publish_headers ( ) logging . debug ( "URL=" + str ( url ) ) logging . debug ( "HEADERS=" + str ( headers ) ) websocket . enableTrace ( False ) self . _ws = websocket . WebSocketApp ( url , header = headers , on_message = self . _on_ws_message , on_open = self . _on_ws_open , on_close = self . _on_ws_close ) self . _ws_thread = threading . Thread ( target = self . _ws . run_forever , kwargs = { 'ping_interval' : 30 } ) self . _ws_thread . daemon = True self . _ws_thread . start ( ) time . sleep ( 1 )
|
Create a new web socket connection with proper headers .
|
4,139
|
def create ( self ) : self . service . create ( ) os . environ [ self . __module__ + '.uri' ] = self . service . settings . data [ 'url' ] os . environ [ self . __module__ + '.zone_id' ] = self . get_predix_zone_id ( )
|
Create an instance of the Parking Planning Service with the typical starting settings .
|
4,140
|
def read_manifest ( self , encrypted = None ) : with open ( self . manifest_path , 'r' ) as input_file : self . manifest = yaml . safe_load ( input_file ) if 'env' not in self . manifest : self . manifest [ 'env' ] = { } if 'services' not in self . manifest : self . manifest [ 'services' ] = [ ] if 'PREDIXPY_ENCRYPTED' in self . manifest [ 'env' ] : self . encrypted = True if encrypted or self . encrypted : key = predix . config . get_crypt_key ( self . manifest_key ) f = Fernet ( key ) for var in self . manifest [ 'env' ] . keys ( ) : value = f . decrypt ( bytes ( self . manifest [ 'env' ] [ var ] , 'utf-8' ) ) self . manifest [ 'env' ] [ var ] = value . decode ( 'utf-8' ) self . app_name = self . manifest [ 'applications' ] [ 0 ] [ 'name' ] input_file . close ( )
|
Read an existing manifest .
|
4,141
|
def create_manifest ( self ) : self . manifest = { } self . manifest [ 'applications' ] = [ { 'name' : self . app_name } ] self . manifest [ 'services' ] = [ ] self . manifest [ 'env' ] = { 'PREDIXPY_VERSION' : str ( predix . version ) , } self . write_manifest ( )
|
Create a new manifest and write it to disk .
|
4,142
|
def _get_encrypted_manifest ( self ) : key = predix . config . get_crypt_key ( self . manifest_key ) f = Fernet ( key ) manifest = copy . deepcopy ( self . manifest ) for var in self . manifest [ 'env' ] . keys ( ) : value = str ( self . manifest [ 'env' ] [ var ] ) manifest [ 'env' ] [ var ] = f . encrypt ( bytes ( value , 'utf-8' ) ) . decode ( 'utf-8' ) return manifest
|
Returns contents of the manifest where environment variables that are secret will be encrypted without modifying the existing state in memory which will remain unencrypted .
|
4,143
|
def write_manifest ( self , manifest_path = None , encrypted = None ) : manifest_path = manifest_path or self . manifest_path self . manifest [ 'env' ] [ 'PREDIXPY_VERSION' ] = str ( predix . version ) with open ( manifest_path , 'w' ) as output_file : if encrypted or self . encrypted : self . manifest [ 'env' ] [ 'PREDIXPY_ENCRYPTED' ] = self . manifest_key content = self . _get_encrypted_manifest ( ) else : content = self . manifest if 'PREDIXPY_ENCRYPTED' in content [ 'env' ] : del ( content [ 'env' ] [ 'PREDIXPY_ENCRYPTED' ] ) yaml . safe_dump ( content , output_file , default_flow_style = False , explicit_start = True ) output_file . close ( )
|
Write manifest to disk .
|
4,144
|
def add_service ( self , service_name ) : if service_name not in self . manifest [ 'services' ] : self . manifest [ 'services' ] . append ( service_name )
|
Add the given service to the manifest .
|
4,145
|
def set_os_environ ( self ) : for key in self . manifest [ 'env' ] . keys ( ) : os . environ [ key ] = str ( self . manifest [ 'env' ] [ key ] )
|
Will load any environment variables found in the manifest file into the current process for use by applications .
|
4,146
|
def get_client_id ( self ) : self . _client_id = predix . config . get_env_value ( predix . app . Manifest , 'client_id' ) return self . _client_id
|
Return the client id that should have all the needed scopes and authorities for the services in this manifest .
|
4,147
|
def get_client_secret ( self ) : self . _client_secret = predix . config . get_env_value ( predix . app . Manifest , 'client_secret' ) return self . _client_secret
|
Return the client secret that should correspond with the client id .
|
4,148
|
def get_timeseries ( self , * args , ** kwargs ) : import predix . data . timeseries ts = predix . data . timeseries . TimeSeries ( * args , ** kwargs ) return ts
|
Returns an instance of the Time Series Service .
|
4,149
|
def get_asset ( self ) : import predix . data . asset asset = predix . data . asset . Asset ( ) return asset
|
Returns an instance of the Asset Service .
|
4,150
|
def get_uaa ( self ) : import predix . security . uaa uaa = predix . security . uaa . UserAccountAuthentication ( ) return uaa
|
Returns an insstance of the UAA Service .
|
4,151
|
def get_acs ( self ) : import predix . security . acs acs = predix . security . acs . AccessControl ( ) return acs
|
Returns an instance of the Asset Control Service .
|
4,152
|
def get_weather ( self ) : import predix . data . weather weather = predix . data . weather . WeatherForecast ( ) return weather
|
Returns an instance of the Weather Service .
|
4,153
|
def get_weather_forecast_days ( self , latitude , longitude , days = 1 , frequency = 1 , reading_type = None ) : params = { } if frequency not in [ 1 , 3 ] : raise ValueError ( "Reading frequency must be 1 or 3" ) params [ 'days' ] = days params [ 'source' ] = 'NWS' + str ( frequency ) params [ 'latitude' ] = latitude params [ 'longitude' ] = longitude if reading_type : reading_type = reading_type . replace ( ' ' , '%20' ) params [ 'reading_type' ] = urllib . quote_plus ( reading_type ) url = self . uri + '/v1/weather-forecast-days/' return self . service . _get ( url , params = params )
|
Return the weather forecast for a given location .
|
4,154
|
def get_weather_forecast ( self , latitude , longitude , start , end , frequency = 1 , reading_type = None ) : params = { } if frequency not in [ 1 , 3 ] : raise ValueError ( "Reading frequency must be 1 or 3" ) params [ 'source' ] = 'NWS' + str ( frequency ) params [ 'latitude' ] = latitude params [ 'longitude' ] = longitude params [ 'start_datetime_utc' ] = start params [ 'end_datetime_utc' ] = end if reading_type : reading_type = reading_type . replace ( ' ' , '%20' ) params [ 'reading_type' ] = reading_type url = self . uri + '/v1/weather-forecast-datetime/' return self . service . _get ( url , params = params )
|
Return the weather forecast for a given location for specific datetime specified in UTC format .
|
4,155
|
def _generate_name ( self , space , service_name , plan_name ) : return str . join ( '-' , [ space , service_name , plan_name ] ) . lower ( )
|
Can generate a name based on the space service name and plan .
|
4,156
|
def _get_config_path ( self ) : org = self . service . space . org . name space = self . service . space . name name = self . name return "~/.predix/%s/%s/%s.json" % ( org , space , name )
|
Return a sensible configuration path for caching config settings .
|
4,157
|
def _create_service ( self , parameters = { } , ** kwargs ) : logging . debug ( "_create_service()" ) logging . debug ( str . join ( ',' , [ self . service_name , self . plan_name , self . name , str ( parameters ) ] ) ) return self . service . create_service ( self . service_name , self . plan_name , self . name , parameters , ** kwargs )
|
Create a Cloud Foundry service that has custom parameters .
|
4,158
|
def _delete_service ( self , service_only = False ) : logging . debug ( '_delete_service()' ) return self . service . delete_service ( self . service_name )
|
Delete a Cloud Foundry service and any associations .
|
4,159
|
def _get_or_create_service_key ( self ) : keys = self . service . _get_service_keys ( self . name ) for key in keys [ 'resources' ] : if key [ 'entity' ] [ 'name' ] == self . service_name : return self . service . get_service_key ( self . name , self . service_name ) self . service . create_service_key ( self . name , self . service_name ) return self . service . get_service_key ( self . name , self . service_name )
|
Get a service key or create one if needed .
|
4,160
|
def _get_service_config ( self ) : key = self . _get_or_create_service_key ( ) config = { } config [ 'service_key' ] = [ { 'name' : self . name } ] config . update ( key [ 'entity' ] [ 'credentials' ] ) return config
|
Will get configuration for the service from a service key .
|
4,161
|
def create ( self , parameters = { } , create_keys = True , ** kwargs ) : cs = self . _create_service ( parameters = parameters , ** kwargs ) if create_keys : cfg = parameters cfg . update ( self . _get_service_config ( ) ) self . settings . save ( cfg )
|
Create the service .
|
4,162
|
def _get_or_create_uaa ( self , uaa ) : if isinstance ( uaa , predix . admin . uaa . UserAccountAuthentication ) : return uaa logging . debug ( "Initializing a new UAA" ) return predix . admin . uaa . UserAccountAuthentication ( )
|
Returns a valid UAA instance for performing administrative functions on services .
|
4,163
|
def grant_client ( self , client_id , publish = False , subscribe = False , publish_protocol = None , publish_topics = None , subscribe_topics = None , scope_prefix = 'predix-event-hub' , ** kwargs ) : scopes = [ 'openid' ] authorities = [ 'uaa.resource' ] zone_id = self . get_zone_id ( ) scopes . append ( '%s.zones.%s.user' % ( scope_prefix , zone_id ) ) authorities . append ( '%s.zones.%s.user' % ( scope_prefix , zone_id ) ) if publish_topics is not None or subscribe_topics is not None : raise Exception ( "multiple topics are not currently available in preidx-py" ) if publish_topics is None : publish_topics = [ 'topic' ] if subscribe_topics is None : subscribe_topics = [ 'topic' ] if publish : if publish_protocol is None : scopes . append ( '%s.zones.%s.grpc.publish' % ( scope_prefix , zone_id ) ) authorities . append ( '%s.zones.%s.grpc.publish' % ( scope_prefix , zone_id ) ) scopes . append ( '%s.zones.%s.wss.publish' % ( scope_prefix , zone_id ) ) authorities . append ( '%s.zones.%s.wss.publish' % ( scope_prefix , zone_id ) ) else : scopes . append ( '%s.zones.%s.%s.publish' % ( scope_prefix , zone_id , publish_protocol ) ) authorities . append ( '%s.zones.%s.%s.publish' % ( scope_prefix , zone_id , publish_protocol ) ) for topic in publish_topics : if publish_protocol is None : scopes . append ( '%s.zones.%s.%s.grpc.publish' % ( scope_prefix , zone_id , topic ) ) scopes . append ( '%s.zones.%s.%s.wss.publish' % ( scope_prefix , zone_id , topic ) ) scopes . append ( '%s.zones.%s.%s.user' % ( scope_prefix , zone_id , topic ) ) authorities . append ( '%s.zones.%s.%s.grpc.publish' % ( scope_prefix , zone_id , topic ) ) authorities . append ( '%s.zones.%s.%s.wss.publish' % ( scope_prefix , zone_id , topic ) ) authorities . append ( '%s.zones.%s.%s.user' % ( scope_prefix , zone_id , topic ) ) else : scopes . append ( '%s.zones.%s.%s.%s.publish' % ( scope_prefix , zone_id , topic , publish_protocol ) ) authorities . append ( '%s.zones.%s.%s.%s.publish' % ( scope_prefix , zone_id , topic , publish_protocol ) ) if subscribe : scopes . append ( '%s.zones.%s.grpc.subscribe' % ( scope_prefix , zone_id ) ) authorities . append ( '%s.zones.%s.grpc.subscribe' % ( scope_prefix , zone_id ) ) for topic in subscribe_topics : scopes . append ( '%s.zones.%s.%s.grpc.subscribe' % ( scope_prefix , zone_id , topic ) ) authorities . append ( '%s.zones.%s.%s.grpc.subscribe' % ( scope_prefix , zone_id , topic ) ) self . service . uaa . uaac . update_client_grants ( client_id , scope = scopes , authorities = authorities ) return self . service . uaa . uaac . get_client ( client_id )
|
Grant the given client id all the scopes and authorities needed to work with the eventhub service .
|
4,164
|
def get_eventhub_host ( self ) : for protocol in self . service . settings . data [ 'publish' ] [ 'protocol_details' ] : if protocol [ 'protocol' ] == 'grpc' : return protocol [ 'uri' ] [ 0 : protocol [ 'uri' ] . index ( ':' ) ]
|
returns the publish grpc endpoint for ingestion .
|
4,165
|
def _get_host ( self ) : if 'VCAP_SERVICES' in os . environ : services = json . loads ( os . getenv ( 'VCAP_SERVICES' ) ) host = services [ 'predix-blobstore' ] [ 0 ] [ 'credentials' ] [ 'host' ] else : host = predix . config . get_env_value ( self , 'host' ) if 'https://' not in host : host = 'https://' + host return host
|
Returns the host address for an instance of Blob Store service from environment inspection .
|
4,166
|
def _get_access_key_id ( self ) : if 'VCAP_SERVICES' in os . environ : services = json . loads ( os . getenv ( 'VCAP_SERVICES' ) ) return services [ 'predix-blobstore' ] [ 0 ] [ 'credentials' ] [ 'access_key_id' ] else : return predix . config . get_env_value ( self , 'access_key_id' )
|
Returns the access key for an instance of Blob Store service from environment inspection .
|
4,167
|
def list_objects ( self , bucket_name = None , ** kwargs ) : if not bucket_name : bucket_name = self . bucket_name return self . client . list_objects ( Bucket = bucket_name , ** kwargs )
|
This method is primarily for illustration and just calls the boto3 client implementation of list_objects but is a common task for first time Predix BlobStore users .
|
4,168
|
def upload_file ( self , src_filepath , dest_filename = None , bucket_name = None , ** kwargs ) : if not bucket_name : bucket_name = self . bucket_name if not dest_filename : dest_filename = src_filepath return self . client . upload_file ( src_filepath , bucket_name , dest_filename , ** kwargs )
|
This method is primarily for illustration and just calls the boto3 client implementation of upload_file but is a common task for first time Predix BlobStore users .
|
4,169
|
def _get_cloud_foundry_config ( self ) : config = os . path . expanduser ( self . config_file ) if not os . path . exists ( config ) : raise CloudFoundryLoginError ( 'You must run `cf login` to authenticate' ) with open ( config , "r" ) as data : return json . load ( data )
|
Reads the local cf CLI cache stored in the users home directory .
|
4,170
|
def get_organization_guid ( self ) : if 'PREDIX_ORGANIZATION_GUID' in os . environ : return os . environ [ 'PREDIX_ORGANIZATION_GUID' ] else : info = self . _get_organization_info ( ) for key in ( 'Guid' , 'GUID' ) : if key in info . keys ( ) : return info [ key ] raise ValueError ( 'Unable to determine cf organization guid' )
|
Returns the GUID for the organization currently targeted .
|
4,171
|
def get_space_guid ( self ) : if 'PREDIX_SPACE_GUID' in os . environ : return os . environ [ 'PREDIX_SPACE_GUID' ] else : info = self . _get_space_info ( ) for key in ( 'Guid' , 'GUID' ) : if key in info . keys ( ) : return info [ key ] raise ValueError ( 'Unable to determine cf space guid' )
|
Returns the GUID for the space currently targeted .
|
4,172
|
def get_crypt_key ( key_path ) : key_path = os . path . expanduser ( key_path ) if os . path . exists ( key_path ) : with open ( key_path , 'r' ) as data : key = data . read ( ) else : key = Fernet . generate_key ( ) with open ( key_path , 'w' ) as output : output . write ( key ) return key
|
Get the user s PredixPy manifest key . Generate and store one if not yet generated .
|
4,173
|
def get_env_key ( obj , key = None ) : return str . join ( '_' , [ obj . __module__ . replace ( '.' , '_' ) . upper ( ) , key . upper ( ) ] )
|
Return environment variable key to use for lookups within a namespace represented by the package name .
|
4,174
|
def get_env_value ( obj , attribute ) : varname = get_env_key ( obj , attribute ) var = os . environ . get ( varname ) if not var : raise ValueError ( "%s must be set in your environment." % varname ) return var
|
Returns the environment variable value for the attribute of the given object .
|
4,175
|
def set_env_value ( obj , attribute , value ) : varname = get_env_key ( obj , attribute ) os . environ [ varname ] = value return varname
|
Set the environment variable value for the attribute of the given object .
|
4,176
|
def get_instance_guid ( self , service_name ) : summary = self . space . get_space_summary ( ) for service in summary [ 'services' ] : if service [ 'name' ] == service_name : return service [ 'guid' ] raise ValueError ( "No service with name '%s' found." % ( service_name ) )
|
Returns the GUID for the service instance with the given name .
|
4,177
|
def _get_service_bindings ( self , service_name ) : instance = self . get_instance ( service_name ) return self . api . get ( instance [ 'service_bindings_url' ] )
|
Return the service bindings for the service instance .
|
4,178
|
def delete_service_bindings ( self , service_name ) : instance = self . get_instance ( service_name ) return self . api . delete ( instance [ 'service_bindings_url' ] )
|
Remove service bindings to applications .
|
4,179
|
def _get_service_keys ( self , service_name ) : guid = self . get_instance_guid ( service_name ) uri = "/v2/service_instances/%s/service_keys" % ( guid ) return self . api . get ( uri )
|
Return the service keys for the given service .
|
4,180
|
def get_service_keys ( self , service_name ) : keys = [ ] for key in self . _get_service_keys ( service_name ) [ 'resources' ] : keys . append ( key [ 'entity' ] [ 'name' ] ) return keys
|
Returns a flat list of the names of the service keys for the given service .
|
4,181
|
def get_service_key ( self , service_name , key_name ) : for key in self . _get_service_keys ( service_name ) [ 'resources' ] : if key_name == key [ 'entity' ] [ 'name' ] : guid = key [ 'metadata' ] [ 'guid' ] uri = "/v2/service_keys/%s" % ( guid ) return self . api . get ( uri ) return None
|
Returns the service key details .
|
4,182
|
def create_service_key ( self , service_name , key_name ) : if self . has_key ( service_name , key_name ) : logging . warning ( "Reusing existing service key %s" % ( key_name ) ) return self . get_service_key ( service_name , key_name ) body = { 'service_instance_guid' : self . get_instance_guid ( service_name ) , 'name' : key_name } return self . api . post ( '/v2/service_keys' , body )
|
Create a service key for the given service .
|
4,183
|
def delete_service_key ( self , service_name , key_name ) : key = self . get_service_key ( service_name , key_name ) logging . info ( "Deleting service key %s for service %s" % ( key , service_name ) ) return self . api . delete ( key [ 'metadata' ] [ 'url' ] )
|
Delete a service key for the given service .
|
4,184
|
def get_instance ( self , service_name ) : for resource in self . space . _get_instances ( ) : if resource [ 'entity' ] [ 'name' ] == service_name : return resource [ 'entity' ]
|
Retrieves a service instance with the given name .
|
4,185
|
def get_service_plan_for_service ( self , service_name ) : services = self . get_services ( ) for service in services [ 'resources' ] : if service [ 'entity' ] [ 'label' ] == service_name : response = self . api . get ( service [ 'entity' ] [ 'service_plans_url' ] ) return response [ 'resources' ]
|
Return the service plans available for a given service .
|
4,186
|
def create_service ( self , service_type , plan_name , service_name , params , async = False , ** kwargs ) : if self . space . has_service_with_name ( service_name ) : logging . warning ( "Service already exists with that name." ) return self . get_instance ( service_name ) if self . space . has_service_of_type ( service_type ) : logging . warning ( "Service type already exists." ) guid = self . get_service_plan_guid ( service_type , plan_name ) if not guid : raise ValueError ( "No service plan named: %s" % ( plan_name ) ) body = { 'name' : service_name , 'space_guid' : self . space . guid , 'service_plan_guid' : guid , 'parameters' : params } uri = '/v2/service_instances?accepts_incomplete=true' if async : uri += '&async=true' return self . api . post ( uri , body )
|
Create a service instance .
|
4,187
|
def _get_query_uri ( self ) : if 'VCAP_SERVICES' in os . environ : services = json . loads ( os . getenv ( 'VCAP_SERVICES' ) ) predix_timeseries = services [ 'predix-timeseries' ] [ 0 ] [ 'credentials' ] return predix_timeseries [ 'query' ] [ 'uri' ] . partition ( '/v1' ) [ 0 ] else : return predix . config . get_env_value ( self , 'query_uri' )
|
Returns the URI endpoint for performing queries of a Predix Time Series instance from environment inspection .
|
4,188
|
def _get_query_zone_id ( self ) : if 'VCAP_SERVICES' in os . environ : services = json . loads ( os . getenv ( 'VCAP_SERVICES' ) ) predix_timeseries = services [ 'predix-timeseries' ] [ 0 ] [ 'credentials' ] return predix_timeseries [ 'query' ] [ 'zone-http-header-value' ] else : return predix . config . get_env_value ( self , 'query_zone_id' )
|
Returns the ZoneId for performing queries of a Predix Time Series instance from environment inspection .
|
4,189
|
def _get_datapoints ( self , params ) : url = self . query_uri + '/v1/datapoints' return self . service . _get ( url , params = params )
|
Will make a direct REST call with the given json body payload to get datapoints .
|
4,190
|
def get_values ( self , * args , ** kwargs ) : if isinstance ( args [ 0 ] , list ) : raise ValueError ( "Can only get_values() for a single tag." ) response = self . get_datapoints ( * args , ** kwargs ) for value in response [ 'tags' ] [ 0 ] [ 'results' ] [ 0 ] [ 'values' ] : yield [ datetime . datetime . utcfromtimestamp ( value [ 0 ] / 1000 ) , value [ 1 ] , value [ 2 ] ]
|
Convenience method that for simple single tag queries will return just the values to be iterated on .
|
4,191
|
def get_datapoints ( self , tags , start = None , end = None , order = None , limit = None , qualities = None , attributes = None , measurement = None , aggregations = None , post = False ) : params = { } if not start : start = '1w-ago' logging . warning ( "Defaulting query for data with start date %s" % ( start ) ) params [ 'start' ] = start if end : params [ 'end' ] = end params [ 'tags' ] = [ ] if not isinstance ( tags , list ) : tags = [ tags ] for tag in tags : query = { } query [ 'name' ] = tag if limit : query [ 'limit' ] = int ( limit ) if order : query [ 'order' ] = order filters = { } if qualities is not None : if isinstance ( qualities , int ) or isinstance ( qualities , str ) : qualities = [ qualities ] for i , quality in enumerate ( qualities ) : qualities [ i ] = str ( quality ) filters [ 'qualities' ] = { "values" : qualities } if attributes is not None : if not isinstance ( attributes , dict ) : raise ValueError ( "Attribute filters must be dictionary." ) filters [ 'attributes' ] = attributes if measurement is not None : filters [ 'measurements' ] = { 'condition' : measurement [ 0 ] , 'values' : measurement [ 1 ] } if filters : query [ 'filters' ] = filters if aggregations is not None : if not isinstance ( aggregations , list ) : aggregations = [ aggregations ] query [ 'aggregations' ] = [ ] for aggregation in aggregations : query [ 'aggregations' ] . append ( { 'sampling' : { 'datapoints' : 1 } , 'type' : aggregation } ) params [ 'tags' ] . append ( query ) if post : return self . _post_datapoints ( params ) else : return self . _get_datapoints ( { "query" : json . dumps ( params ) } )
|
Returns all of the datapoints that match the given query .
|
4,192
|
def _create_connection ( self ) : logging . debug ( "Initializing new websocket connection." ) headers = { 'Authorization' : self . service . _get_bearer_token ( ) , 'Predix-Zone-Id' : self . ingest_zone_id , 'Content-Type' : 'application/json' , } url = self . ingest_uri logging . debug ( "URL=" + str ( url ) ) logging . debug ( "HEADERS=" + str ( headers ) ) return websocket . create_connection ( url , header = headers )
|
Create a new websocket connection with proper headers .
|
4,193
|
def _get_websocket ( self , reuse = True ) : if self . ws and reuse : if self . ws . connected : return self . ws logging . debug ( "Stale connection, reconnecting." ) self . ws = self . _create_connection ( ) return self . ws
|
Reuse existing connection or create a new connection .
|
4,194
|
def _send_to_timeseries ( self , message ) : logging . debug ( "MESSAGE=" + str ( message ) ) result = None try : ws = self . _get_websocket ( ) ws . send ( json . dumps ( message ) ) result = ws . recv ( ) except ( websocket . WebSocketConnectionClosedException , Exception ) as e : logging . debug ( "Connection failed, will try again." ) logging . debug ( e ) ws = self . _get_websocket ( reuse = False ) ws . send ( json . dumps ( message ) ) result = ws . recv ( ) logging . debug ( "RESULT=" + str ( result ) ) return result
|
Establish or reuse socket connection and send the given message to the timeseries service .
|
4,195
|
def execute ( self , statement , * args , ** kwargs ) : with self . engine . connect ( ) as conn : s = sqlalchemy . sql . text ( statement ) return conn . execute ( s , ** kwargs )
|
This convenience method will execute the query passed in as is . For more complex functionality you may want to use the sqlalchemy engine directly but this serves as an example implementation .
|
4,196
|
def create_temp_space ( ) : unique_name = str ( uuid . uuid4 ( ) ) . split ( '-' ) [ - 1 ] admin = predix . admin . cf . spaces . Space ( ) res = admin . create_space ( unique_name ) space = predix . admin . cf . spaces . Space ( guid = res [ 'metadata' ] [ 'guid' ] , name = res [ 'entity' ] [ 'name' ] ) space . target ( ) return space
|
Create a new temporary cloud foundry space for a project .
|
4,197
|
def _get_spaces ( self ) : guid = self . api . config . get_organization_guid ( ) uri = '/v2/organizations/%s/spaces' % ( guid ) return self . api . get ( uri )
|
Get the marketplace services .
|
4,198
|
def target ( self ) : os . environ [ 'PREDIX_SPACE_GUID' ] = self . guid os . environ [ 'PREDIX_SPACE_NAME' ] = self . name os . environ [ 'PREDIX_ORGANIZATION_GUID' ] = self . org . guid os . environ [ 'PREDIX_ORGANIZATION_NAME' ] = self . org . name
|
Target the current space for any forthcoming Cloud Foundry operations .
|
4,199
|
def get_spaces ( self ) : self . spaces = [ ] for resource in self . _get_spaces ( ) [ 'resources' ] : self . spaces . append ( resource [ 'entity' ] [ 'name' ] ) return self . spaces
|
Return a flat list of the names for spaces in the organization .
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.