sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def charge(self, cart, request):
"""
Use the Stripe token from the request and charge immediately.
This view is invoked by the Javascript function `scope.charge()` delivered
by `get_payment_request`.
"""
token_id = cart.extra['payment_extra_data']['token_id']
if LooseVersion(SHOP_VERSION) < LooseVersion('0.11'):
charge = stripe.Charge.create(
amount=cart.total.as_integer(),
currency=cart.total.currency,
source=token_id,
description=settings.SHOP_STRIPE['PURCHASE_DESCRIPTION']
)
if charge['status'] == 'succeeded':
order = OrderModel.objects.create_from_cart(cart, request)
order.add_stripe_payment(charge)
order.save()
else:
order = OrderModel.objects.create_from_cart(cart, request)
charge = stripe.Charge.create(
amount=cart.total.as_integer(),
currency=cart.total.currency,
source=token_id,
transfer_group=order.get_number(),
description=settings.SHOP_STRIPE['PURCHASE_DESCRIPTION'],
)
if charge['status'] == 'succeeded':
order.populate_from_cart(cart, request)
order.add_stripe_payment(charge)
order.save()
if charge['status'] != 'succeeded':
msg = "Stripe returned status '{status}' for id: {id}"
raise stripe.error.InvalidRequestError(msg.format(**charge))
|
Use the Stripe token from the request and charge immediately.
This view is invoked by the Javascript function `scope.charge()` delivered
by `get_payment_request`.
|
entailment
|
def refund_payment(self):
"""
Refund the payment using Stripe's refunding API.
"""
Money = MoneyMaker(self.currency)
filter_kwargs = {
'transaction_id__startswith': 'ch_',
'payment_method': StripePayment.namespace,
}
for payment in self.orderpayment_set.filter(**filter_kwargs):
refund = stripe.Refund.create(charge=payment.transaction_id)
if refund['status'] == 'succeeded':
amount = Money(refund['amount']) / Money.subunits
OrderPayment.objects.create(order=self, amount=-amount, transaction_id=refund['id'],
payment_method=StripePayment.namespace)
del self.amount_paid # to invalidate the cache
if self.amount_paid:
# proceed with other payment service providers
super(OrderWorkflowMixin, self).refund_payment()
|
Refund the payment using Stripe's refunding API.
|
entailment
|
def create(self):
"""
Create an instance of the US Weather Forecast Service with
typical starting settings.
"""
self.service.create()
# Set env vars for immediate use
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
zone = self.service.settings.data['zone']['http-header-value']
os.environ[zone_id] = zone
uri = predix.config.get_env_key(self.use_class, 'uri')
os.environ[uri] = self.service.settings.data['uri']
|
Create an instance of the US Weather Forecast Service with
typical starting settings.
|
entailment
|
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
manifest.add_env_var(zone_id,
self.service.settings.data['zone']['http-header-value'])
uri = predix.config.get_env_key(self.use_class, 'uri')
manifest.add_env_var(uri, self.service.settings.data['uri'])
manifest.write_manifest()
|
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
|
entailment
|
def _create_in_progress(self):
"""
Creating this service is handled asynchronously so this method will
simply check if the create is in progress. If it is not in progress,
we could probably infer it either failed or succeeded.
"""
instance = self.service.service.get_instance(self.service.name)
if (instance['last_operation']['state'] == 'in progress' and
instance['last_operation']['type'] == 'create'):
return True
return False
|
Creating this service is handled asynchronously so this method will
simply check if the create is in progress. If it is not in progress,
we could probably infer it either failed or succeeded.
|
entailment
|
def create(self, max_wait=180, **kwargs):
"""
Create an instance of the Predix Cache Service with they typical
starting settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
"""
# Will need to wait for the service to be provisioned before can add
# service keys and get env details.
self.service.create(async=True, create_keys=False)
while self._create_in_progress() and max_wait > 0:
time.sleep(1)
max_wait -= 1
# Now get the service env (via service keys)
cfg = self.service._get_service_config()
self.service.settings.save(cfg)
host = predix.config.get_env_key(self.use_class, 'host')
os.environ[host] = self.service.settings.data['host']
password = predix.config.get_env_key(self.use_class, 'password')
os.environ[password] = self.service.settings.data['password']
port = predix.config.get_env_key(self.use_class, 'port')
os.environ[port] = str(self.service.settings.data['port'])
|
Create an instance of the Predix Cache Service with they typical
starting settings.
:param max_wait: service is created asynchronously, so will only wait
this number of seconds before giving up.
|
entailment
|
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
"""
manifest.add_service(self.service.name)
host = predix.config.get_env_key(self.use_class, 'host')
manifest.add_env_var(host, self.service.settings.data['host'])
password = predix.config.get_env_key(self.use_class, 'password')
manifest.add_env_var(password, self.service.settings.data['password'])
port = predix.config.get_env_key(self.use_class, 'port')
manifest.add_env_var(port, self.service.settings.data['port'])
manifest.write_manifest()
|
Add useful details to the manifest about this service so
that it can be used in an application.
:param manifest: A predix.admin.app.Manifest object instance
that manages reading/writing manifest config for a
cloud foundry app.
|
entailment
|
def _get_uri(self):
"""
Will return the uri for an existing instance.
"""
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['uri']
|
Will return the uri for an existing instance.
|
entailment
|
def _get_zone_id(self):
"""
Will return the zone id for an existing instance.
"""
if not self.service.exists():
logging.warning("Service does not yet exist.")
return self.service.settings.data['zone']['http-header-value']
|
Will return the zone id for an existing instance.
|
entailment
|
def create(self):
"""
Create an instance of the Access Control Service with the typical
starting settings.
"""
self.service.create()
# Set environment variables for immediate use
predix.config.set_env_value(self.use_class, 'uri', self._get_uri())
predix.config.set_env_value(self.use_class, 'zone_id',
self._get_zone_id())
|
Create an instance of the Access Control Service with the typical
starting settings.
|
entailment
|
def grant_client(self, client_id):
"""
Grant the given client id all the scopes and authorities
needed to work with the access control service.
"""
zone = self.service.settings.data['zone']['oauth-scope']
scopes = ['openid', zone,
'acs.policies.read', 'acs.attributes.read',
'acs.policies.write', 'acs.attributes.write']
authorities = ['uaa.resource', zone,
'acs.policies.read', 'acs.policies.write',
'acs.attributes.read', 'acs.attributes.write']
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id)
|
Grant the given client id all the scopes and authorities
needed to work with the access control service.
|
entailment
|
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
uri = predix.config.get_env_key(self.use_class, 'uri')
manifest.add_env_var(uri, self._get_uri())
zone_id = predix.config.get_env_key(self.use_class, 'zone_id')
manifest.add_env_var(zone_id, self._get_zone_id())
manifest.write_manifest()
|
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
|
entailment
|
def get(self, path):
"""
Generic GET with headers
"""
uri = self.config.get_target() + path
headers = self._get_headers()
logging.debug("URI=GET " + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.get(uri, headers=headers)
if response.status_code == 200:
return response.json()
elif response.status_code == 401:
raise predix.admin.cf.config.CloudFoundryLoginError('token invalid')
else:
response.raise_for_status()
|
Generic GET with headers
|
entailment
|
def post(self, path, data):
"""
Generic POST with headers
"""
uri = self.config.get_target() + path
headers = self._post_headers()
logging.debug("URI=POST " + str(uri))
logging.debug("HEADERS=" + str(headers))
logging.debug("BODY=" + str(data))
response = self.session.post(uri, headers=headers,
data=json.dumps(data))
if response.status_code in (200, 201, 202):
return response.json()
elif response.status_code == 401:
raise predix.admin.cf.config.CloudFoundryLoginError('token invalid')
else:
logging.debug("STATUS=" + str(response.status_code))
logging.debug("CONTENT=" + str(response.content))
response.raise_for_status()
|
Generic POST with headers
|
entailment
|
def delete(self, path, data=None, params=None):
"""
Generic DELETE with headers
"""
uri = self.config.get_target() + path
headers = {
'Authorization': self.config.get_access_token()
}
logging.debug("URI=DELETE " + str(uri))
logging.debug("HEADERS=" + str(headers))
response = self.session.delete(
uri, headers=headers, params=params, data=json.dumps(data))
if response.status_code == 204:
return response
else:
logging.debug("STATUS=" + str(response.status_code))
logging.debug("CONTENT=" + str(response.content))
response.raise_for_status()
|
Generic DELETE with headers
|
entailment
|
def get_orgs(self):
"""
Returns a flat list of the names for the organizations
user belongs.
"""
orgs = []
for resource in self._get_orgs()['resources']:
orgs.append(resource['entity']['name'])
return orgs
|
Returns a flat list of the names for the organizations
user belongs.
|
entailment
|
def get_apps(self):
"""
Returns a flat list of the names for the apps in
the organization.
"""
apps = []
for resource in self._get_apps()['resources']:
apps.append(resource['entity']['name'])
return apps
|
Returns a flat list of the names for the apps in
the organization.
|
entailment
|
def add_user(self, user_name, role='user'):
"""
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
`manager`,`billing_manager`
"""
role_uri = self._get_role_uri(role=role)
return self.api.put(path=role_uri, data={'username': user_name})
|
Calls CF's associate user with org. Valid roles include `user`, `auditor`,
`manager`,`billing_manager`
|
entailment
|
def remove_user(self, user_name, role):
"""
Calls CF's remove user with org
"""
role_uri = self._get_role_uri(role=role)
return self.api.delete(path=role_uri, data={'username': user_name})
|
Calls CF's remove user with org
|
entailment
|
def add_message(self, id, body, tags=False):
"""
add messages to the rx_queue
:param id: str message Id
:param body: str the message body
:param tags: dict[string->string] tags to be associated with the message
:return: self
"""
if not tags:
tags = {}
try:
self._tx_queue_lock.acquire()
self._tx_queue.append(
EventHub_pb2.Message(id=id, body=body, tags=tags, zone_id=self.eventhub_client.zone_id))
finally:
self._tx_queue_lock.release()
return self
|
add messages to the rx_queue
:param id: str message Id
:param body: str the message body
:param tags: dict[string->string] tags to be associated with the message
:return: self
|
entailment
|
def publish_queue(self):
"""
Publish all messages that have been added to the queue for configured protocol
:return: None
"""
self.last_send_time = time.time()
try:
self._tx_queue_lock.acquire()
start_length = len(self._rx_queue)
publish_amount = len(self._tx_queue)
if self.config.protocol == PublisherConfig.Protocol.GRPC:
self._publish_queue_grpc()
else:
self._publish_queue_wss()
self._tx_queue = []
finally:
self._tx_queue_lock.release()
if self.config.publish_type == self.config.Type.SYNC:
start_time = time.time()
while time.time() - start_time < self.config.sync_timeout and \
len(self._rx_queue) - start_length < publish_amount:
pass
return self._rx_queue
|
Publish all messages that have been added to the queue for configured protocol
:return: None
|
entailment
|
def ack_generator(self):
"""
generator for acks to yield messages to the user in a async configuration
:return: messages as they come in
"""
if self.config.is_sync():
logging.warning('cant use generator on a sync publisher')
return
while self._run_ack_generator:
while len(self._rx_queue) != 0:
logging.debug('yielding to client')
yield self._rx_queue.pop(0)
return
|
generator for acks to yield messages to the user in a async configuration
:return: messages as they come in
|
entailment
|
def _auto_send(self):
"""
auto send blocking function, when the interval or the message size has been reached, publish
:return:
"""
while True:
if time.time() - self.last_send_time > self.config.async_auto_send_interval_millis or \
len(self._tx_queue) >= self.config.async_auto_send_amount:
self.publish_queue()
|
auto send blocking function, when the interval or the message size has been reached, publish
:return:
|
entailment
|
def _generate_publish_headers(self):
"""
generate the headers for the connection to event hub service based on the provided config
:return: {} headers
"""
headers = {
'predix-zone-id': self.eventhub_client.zone_id
}
token = self.eventhub_client.service._get_bearer_token()
if self.config.is_grpc():
headers['authorization'] = token[(token.index(' ') + 1):]
else:
headers['authorization'] = token
if self.config.topic == '':
headers['topic'] = self.eventhub_client.zone_id + '_topic'
else:
headers['topic'] = self.config.topic
if self.config.publish_type == self.config.Type.SYNC:
headers['sync-acks'] = 'true'
else:
headers['sync-acks'] = 'false'
headers['send-acks-interval'] = str(self.config.async_cache_ack_interval_millis)
headers['acks'] = str(self.config.async_enable_acks).lower()
headers['nacks'] = str(self.config.async_enable_nacks_only).lower()
headers['cache-acks'] = str(self.config.async_cache_acks_and_nacks).lower()
return headers
|
generate the headers for the connection to event hub service based on the provided config
:return: {} headers
|
entailment
|
def _publisher_callback(self, publish_ack):
"""
publisher callback that grpc and web socket can pass messages to
address the received message onto the queue
:param publish_ack: EventHub_pb2.Ack the ack received from either wss or grpc
:return: None
"""
logging.debug("ack received: " + str(publish_ack).replace('\n', ' '))
self._rx_queue.append(publish_ack)
|
publisher callback that grpc and web socket can pass messages to
address the received message onto the queue
:param publish_ack: EventHub_pb2.Ack the ack received from either wss or grpc
:return: None
|
entailment
|
def _init_grpc_publisher(self):
"""
initialize the grpc publisher, builds the stub and then starts the grpc manager
:return: None
"""
self._stub = EventHub_pb2_grpc.PublisherStub(channel=self._channel)
self.grpc_manager = Eventhub.GrpcManager(stub_call=self._stub.send,
on_msg_callback=self._publisher_callback,
metadata=self._generate_publish_headers().items())
|
initialize the grpc publisher, builds the stub and then starts the grpc manager
:return: None
|
entailment
|
def _publish_queue_grpc(self):
"""
send the messages in the tx queue to the GRPC manager
:return: None
"""
messages = EventHub_pb2.Messages(msg=self._tx_queue)
publish_request = EventHub_pb2.PublishRequest(messages=messages)
self.grpc_manager.send_message(publish_request)
|
send the messages in the tx queue to the GRPC manager
:return: None
|
entailment
|
def _publish_queue_wss(self):
"""
send the messages down the web socket connection as a json object
:return: None
"""
msg = []
for m in self._tx_queue:
msg.append({'id': m.id, 'body': m.body, 'zone_id': m.zone_id})
self._ws.send(json.dumps(msg), opcode=websocket.ABNF.OPCODE_BINARY)
|
send the messages down the web socket connection as a json object
:return: None
|
entailment
|
def _init_publisher_ws(self):
"""
Create a new web socket connection with proper headers.
"""
logging.debug("Initializing new web socket connection.")
url = ('wss://%s/v1/stream/messages/' % self.eventhub_client.host)
headers = self._generate_publish_headers()
logging.debug("URL=" + str(url))
logging.debug("HEADERS=" + str(headers))
websocket.enableTrace(False)
self._ws = websocket.WebSocketApp(url,
header=headers,
on_message=self._on_ws_message,
on_open=self._on_ws_open,
on_close=self._on_ws_close)
self._ws_thread = threading.Thread(target=self._ws.run_forever, kwargs={'ping_interval': 30})
self._ws_thread.daemon = True
self._ws_thread.start()
time.sleep(1)
|
Create a new web socket connection with proper headers.
|
entailment
|
def _on_ws_message(self, ws, message):
"""
on_message callback of websocket class, load the message into a dict and then
update an Ack Object with the results
:param ws: web socket connection that the message was received on
:param message: web socket message in text form
:return: None
"""
logging.debug(message)
json_list = json.loads(message)
for rx_ack in json_list:
ack = EventHub_pb2.Ack()
for key, value in rx_ack.items():
setattr(ack, key, value)
self._publisher_callback(ack)
|
on_message callback of websocket class, load the message into a dict and then
update an Ack Object with the results
:param ws: web socket connection that the message was received on
:param message: web socket message in text form
:return: None
|
entailment
|
def create(self):
"""
Create an instance of the Parking Planning Service with the
typical starting settings.
"""
self.service.create()
os.environ[self.__module__ + '.uri'] = self.service.settings.data['url']
os.environ[self.__module__ + '.zone_id'] = self.get_predix_zone_id()
|
Create an instance of the Parking Planning Service with the
typical starting settings.
|
entailment
|
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
manifest.add_env_var(self.__module__ + '.uri',
self.service.settings.data['url'])
manifest.add_env_var(self.__module__ + '.zone_id',
self.get_predix_zone_id())
manifest.write_manifest()
|
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
|
entailment
|
def read_manifest(self, encrypted=None):
"""
Read an existing manifest.
"""
with open(self.manifest_path, 'r') as input_file:
self.manifest = yaml.safe_load(input_file)
if 'env' not in self.manifest:
self.manifest['env'] = {}
if 'services' not in self.manifest:
self.manifest['services'] = []
# If manifest is encrypted, use manifest key to
# decrypt each value before storing in memory.
if 'PREDIXPY_ENCRYPTED' in self.manifest['env']:
self.encrypted = True
if encrypted or self.encrypted:
key = predix.config.get_crypt_key(self.manifest_key)
f = Fernet(key)
for var in self.manifest['env'].keys():
value = f.decrypt(bytes(self.manifest['env'][var], 'utf-8'))
self.manifest['env'][var] = value.decode('utf-8')
self.app_name = self.manifest['applications'][0]['name']
input_file.close()
|
Read an existing manifest.
|
entailment
|
def create_manifest(self):
"""
Create a new manifest and write it to
disk.
"""
self.manifest = {}
self.manifest['applications'] = [{'name': self.app_name}]
self.manifest['services'] = []
self.manifest['env'] = {
'PREDIXPY_VERSION': str(predix.version),
}
self.write_manifest()
|
Create a new manifest and write it to
disk.
|
entailment
|
def _get_encrypted_manifest(self):
"""
Returns contents of the manifest where environment variables
that are secret will be encrypted without modifying the existing
state in memory which will remain unencrypted.
"""
key = predix.config.get_crypt_key(self.manifest_key)
f = Fernet(key)
manifest = copy.deepcopy(self.manifest)
for var in self.manifest['env'].keys():
value = str(self.manifest['env'][var])
manifest['env'][var] = f.encrypt(bytes(value, 'utf-8')).decode('utf-8')
return manifest
|
Returns contents of the manifest where environment variables
that are secret will be encrypted without modifying the existing
state in memory which will remain unencrypted.
|
entailment
|
def write_manifest(self, manifest_path=None, encrypted=None):
"""
Write manifest to disk.
:param manifest_path: write to a different location
:param encrypted: write with env data encrypted
"""
manifest_path = manifest_path or self.manifest_path
self.manifest['env']['PREDIXPY_VERSION'] = str(predix.version)
with open(manifest_path, 'w') as output_file:
if encrypted or self.encrypted:
self.manifest['env']['PREDIXPY_ENCRYPTED'] = self.manifest_key
content = self._get_encrypted_manifest()
else:
content = self.manifest # shallow reference
if 'PREDIXPY_ENCRYPTED' in content['env']:
del(content['env']['PREDIXPY_ENCRYPTED'])
yaml.safe_dump(content, output_file,
default_flow_style=False, explicit_start=True)
output_file.close()
|
Write manifest to disk.
:param manifest_path: write to a different location
:param encrypted: write with env data encrypted
|
entailment
|
def add_env_var(self, key, value):
"""
Add the given key / value as another environment
variable.
"""
self.manifest['env'][key] = value
os.environ[key] = str(value)
|
Add the given key / value as another environment
variable.
|
entailment
|
def add_service(self, service_name):
"""
Add the given service to the manifest.
"""
if service_name not in self.manifest['services']:
self.manifest['services'].append(service_name)
|
Add the given service to the manifest.
|
entailment
|
def set_os_environ(self):
"""
Will load any environment variables found in the
manifest file into the current process for use
by applications.
When apps run in cloud foundry this would happen
automatically.
"""
for key in self.manifest['env'].keys():
os.environ[key] = str(self.manifest['env'][key])
|
Will load any environment variables found in the
manifest file into the current process for use
by applications.
When apps run in cloud foundry this would happen
automatically.
|
entailment
|
def get_client_id(self):
"""
Return the client id that should have all the
needed scopes and authorities for the services
in this manifest.
"""
self._client_id = predix.config.get_env_value(predix.app.Manifest, 'client_id')
return self._client_id
|
Return the client id that should have all the
needed scopes and authorities for the services
in this manifest.
|
entailment
|
def get_client_secret(self):
"""
Return the client secret that should correspond with
the client id.
"""
self._client_secret = predix.config.get_env_value(predix.app.Manifest, 'client_secret')
return self._client_secret
|
Return the client secret that should correspond with
the client id.
|
entailment
|
def get_timeseries(self, *args, **kwargs):
"""
Returns an instance of the Time Series Service.
"""
import predix.data.timeseries
ts = predix.data.timeseries.TimeSeries(*args, **kwargs)
return ts
|
Returns an instance of the Time Series Service.
|
entailment
|
def get_asset(self):
"""
Returns an instance of the Asset Service.
"""
import predix.data.asset
asset = predix.data.asset.Asset()
return asset
|
Returns an instance of the Asset Service.
|
entailment
|
def get_uaa(self):
"""
Returns an insstance of the UAA Service.
"""
import predix.security.uaa
uaa = predix.security.uaa.UserAccountAuthentication()
return uaa
|
Returns an insstance of the UAA Service.
|
entailment
|
def get_acs(self):
"""
Returns an instance of the Asset Control Service.
"""
import predix.security.acs
acs = predix.security.acs.AccessControl()
return acs
|
Returns an instance of the Asset Control Service.
|
entailment
|
def get_weather(self):
"""
Returns an instance of the Weather Service.
"""
import predix.data.weather
weather = predix.data.weather.WeatherForecast()
return weather
|
Returns an instance of the Weather Service.
|
entailment
|
def get_weather_forecast_days(self, latitude, longitude,
days=1, frequency=1, reading_type=None):
"""
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError("Reading frequency must be 1 or 3")
params['days'] = days
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
if reading_type:
# url encoding will make spaces a + instead of %20, which service
# interprets as an "and" search which is undesirable
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = urllib.quote_plus(reading_type)
url = self.uri + '/v1/weather-forecast-days/'
return self.service._get(url, params=params)
|
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
|
entailment
|
def get_weather_forecast(self, latitude, longitude, start, end,
frequency=1, reading_type=None):
"""
Return the weather forecast for a given location for specific
datetime specified in UTC format.
::
results = ws.get_weather_forecast(lat, long, start, end)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], '=', w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError("Reading frequency must be 1 or 3")
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
params['start_datetime_utc'] = start
params['end_datetime_utc'] = end
if reading_type:
# Not using urllib.quote_plus() because its using a + which is
# being interpreted by service as an and instead of a space.
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = reading_type
url = self.uri + '/v1/weather-forecast-datetime/'
return self.service._get(url, params=params)
|
Return the weather forecast for a given location for specific
datetime specified in UTC format.
::
results = ws.get_weather_forecast(lat, long, start, end)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], '=', w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
|
entailment
|
def _generate_name(self, space, service_name, plan_name):
"""
Can generate a name based on the space, service name and plan.
"""
return str.join('-', [space, service_name, plan_name]).lower()
|
Can generate a name based on the space, service name and plan.
|
entailment
|
def _get_config_path(self):
"""
Return a sensible configuration path for caching config
settings.
"""
org = self.service.space.org.name
space = self.service.space.name
name = self.name
return "~/.predix/%s/%s/%s.json" % (org, space, name)
|
Return a sensible configuration path for caching config
settings.
|
entailment
|
def _create_service(self, parameters={}, **kwargs):
"""
Create a Cloud Foundry service that has custom parameters.
"""
logging.debug("_create_service()")
logging.debug(str.join(',', [self.service_name, self.plan_name,
self.name, str(parameters)]))
return self.service.create_service(self.service_name, self.plan_name,
self.name, parameters, **kwargs)
|
Create a Cloud Foundry service that has custom parameters.
|
entailment
|
def _delete_service(self, service_only=False):
"""
Delete a Cloud Foundry service and any associations.
"""
logging.debug('_delete_service()')
return self.service.delete_service(self.service_name)
|
Delete a Cloud Foundry service and any associations.
|
entailment
|
def _get_or_create_service_key(self):
"""
Get a service key or create one if needed.
"""
keys = self.service._get_service_keys(self.name)
for key in keys['resources']:
if key['entity']['name'] == self.service_name:
return self.service.get_service_key(self.name,
self.service_name)
self.service.create_service_key(self.name, self.service_name)
return self.service.get_service_key(self.name, self.service_name)
|
Get a service key or create one if needed.
|
entailment
|
def _get_service_config(self):
"""
Will get configuration for the service from a service key.
"""
key = self._get_or_create_service_key()
config = {}
config['service_key'] = [{'name': self.name}]
config.update(key['entity']['credentials'])
return config
|
Will get configuration for the service from a service key.
|
entailment
|
def create(self, parameters={}, create_keys=True, **kwargs):
"""
Create the service.
"""
# Create the service
cs = self._create_service(parameters=parameters, **kwargs)
# Create the service key to get config details and
# store in local cache file.
if create_keys:
cfg = parameters
cfg.update(self._get_service_config())
self.settings.save(cfg)
|
Create the service.
|
entailment
|
def _get_or_create_uaa(self, uaa):
"""
Returns a valid UAA instance for performing administrative functions
on services.
"""
if isinstance(uaa, predix.admin.uaa.UserAccountAuthentication):
return uaa
logging.debug("Initializing a new UAA")
return predix.admin.uaa.UserAccountAuthentication()
|
Returns a valid UAA instance for performing administrative functions
on services.
|
entailment
|
def create(self, parameters={}, **kwargs):
"""
Create an instance of the US Weather Forecast Service with
typical starting settings.
"""
# Add parameter during create for UAA issuer
uri = self.uaa.service.settings.data['uri'] + '/oauth/token'
parameters["trustedIssuerIds"] = [uri]
super(PredixService, self).create(parameters=parameters, **kwargs)
|
Create an instance of the US Weather Forecast Service with
typical starting settings.
|
entailment
|
def create(self):
"""
Create an instance of the Time Series Service with the typical
starting settings.
"""
self.service.create()
os.environ[predix.config.get_env_key(self.use_class, 'host')] = self.get_eventhub_host()
os.environ[predix.config.get_env_key(self.use_class, 'port')] = self.get_eventhub_grpc_port()
os.environ[predix.config.get_env_key(self.use_class, 'wss_publish_uri')] = self.get_publish_wss_uri()
os.environ[predix.config.get_env_key(self.use_class, 'zone_id')] = self.get_zone_id()
|
Create an instance of the Time Series Service with the typical
starting settings.
|
entailment
|
def grant_client(self, client_id, publish=False, subscribe=False, publish_protocol=None, publish_topics=None,
subscribe_topics=None, scope_prefix='predix-event-hub', **kwargs):
"""
Grant the given client id all the scopes and authorities
needed to work with the eventhub service.
"""
scopes = ['openid']
authorities = ['uaa.resource']
zone_id = self.get_zone_id()
# always must be part of base user scope
scopes.append('%s.zones.%s.user' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.user' % (scope_prefix, zone_id))
if publish_topics is not None or subscribe_topics is not None:
raise Exception("multiple topics are not currently available in preidx-py")
if publish_topics is None:
publish_topics = ['topic']
if subscribe_topics is None:
subscribe_topics = ['topic']
if publish:
# we are granting just the default topic
if publish_protocol is None:
scopes.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.grpc.publish' % (scope_prefix, zone_id))
scopes.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.wss.publish' % (scope_prefix, zone_id))
else:
scopes.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol))
authorities.append('%s.zones.%s.%s.publish' % (scope_prefix, zone_id, publish_protocol))
# we are requesting multiple topics
for topic in publish_topics:
if publish_protocol is None:
scopes.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic))
scopes.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic))
scopes.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.grpc.publish' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.wss.publish' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.user' % (scope_prefix, zone_id, topic))
else:
scopes.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol))
authorities.append('%s.zones.%s.%s.%s.publish' % (scope_prefix, zone_id, topic, publish_protocol))
if subscribe:
# we are granting just the default topic
scopes.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id))
authorities.append('%s.zones.%s.grpc.subscribe' % (scope_prefix, zone_id))
# we are requesting multiple topics
for topic in subscribe_topics:
scopes.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic))
authorities.append('%s.zones.%s.%s.grpc.subscribe' % (scope_prefix, zone_id, topic))
self.service.uaa.uaac.update_client_grants(client_id, scope=scopes,
authorities=authorities)
return self.service.uaa.uaac.get_client(client_id)
|
Grant the given client id all the scopes and authorities
needed to work with the eventhub service.
|
entailment
|
def get_eventhub_host(self):
"""
returns the publish grpc endpoint for ingestion.
"""
for protocol in self.service.settings.data['publish']['protocol_details']:
if protocol['protocol'] == 'grpc':
return protocol['uri'][0:protocol['uri'].index(':')]
|
returns the publish grpc endpoint for ingestion.
|
entailment
|
def add_to_manifest(self, manifest):
"""
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
"""
# Add this service to list of services
manifest.add_service(self.service.name)
# Add environment variables
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'host'), self.get_eventhub_host())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'port'), self.get_eventhub_grpc_port())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'wss_publish_uri'), self.get_publish_wss_uri())
manifest.add_env_var(predix.config.get_env_key(self.use_class, 'zone_id'), self.get_zone_id())
manifest.write_manifest()
|
Add useful details to the manifest about this service
so that it can be used in an application.
:param manifest: An predix.admin.app.Manifest object
instance that manages reading/writing manifest config
for a cloud foundry app.
|
entailment
|
def _get_host(self):
"""
Returns the host address for an instance of Blob Store service from
environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
host = services['predix-blobstore'][0]['credentials']['host']
else:
host = predix.config.get_env_value(self, 'host')
# Protocol may not always be included in host setting
if 'https://' not in host:
host = 'https://' + host
return host
|
Returns the host address for an instance of Blob Store service from
environment inspection.
|
entailment
|
def _get_access_key_id(self):
"""
Returns the access key for an instance of Blob Store service from
environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
return services['predix-blobstore'][0]['credentials']['access_key_id']
else:
return predix.config.get_env_value(self, 'access_key_id')
|
Returns the access key for an instance of Blob Store service from
environment inspection.
|
entailment
|
def list_objects(self, bucket_name=None, **kwargs):
"""
This method is primarily for illustration and just calls the
boto3 client implementation of list_objects but is a common task
for first time Predix BlobStore users.
"""
if not bucket_name: bucket_name = self.bucket_name
return self.client.list_objects(Bucket=bucket_name, **kwargs)
|
This method is primarily for illustration and just calls the
boto3 client implementation of list_objects but is a common task
for first time Predix BlobStore users.
|
entailment
|
def upload_file(self, src_filepath, dest_filename=None, bucket_name=None,
**kwargs):
"""
This method is primarily for illustration and just calls the
boto3 client implementation of upload_file but is a common task
for first time Predix BlobStore users.
"""
if not bucket_name: bucket_name = self.bucket_name
if not dest_filename: dest_filename = src_filepath
return self.client.upload_file(src_filepath, bucket_name,
dest_filename, **kwargs)
|
This method is primarily for illustration and just calls the
boto3 client implementation of upload_file but is a common task
for first time Predix BlobStore users.
|
entailment
|
def _get_cloud_foundry_config(self):
"""
Reads the local cf CLI cache stored in the users
home directory.
"""
config = os.path.expanduser(self.config_file)
if not os.path.exists(config):
raise CloudFoundryLoginError('You must run `cf login` to authenticate')
with open(config, "r") as data:
return json.load(data)
|
Reads the local cf CLI cache stored in the users
home directory.
|
entailment
|
def get_organization_guid(self):
"""
Returns the GUID for the organization currently targeted.
"""
if 'PREDIX_ORGANIZATION_GUID' in os.environ:
return os.environ['PREDIX_ORGANIZATION_GUID']
else:
info = self._get_organization_info()
for key in ('Guid', 'GUID'):
if key in info.keys():
return info[key]
raise ValueError('Unable to determine cf organization guid')
|
Returns the GUID for the organization currently targeted.
|
entailment
|
def get_space_guid(self):
"""
Returns the GUID for the space currently targeted.
Can be set by environment variable with PREDIX_SPACE_GUID.
Can be determined by ~/.cf/config.json.
"""
if 'PREDIX_SPACE_GUID' in os.environ:
return os.environ['PREDIX_SPACE_GUID']
else:
info = self._get_space_info()
for key in ('Guid', 'GUID'):
if key in info.keys():
return info[key]
raise ValueError('Unable to determine cf space guid')
|
Returns the GUID for the space currently targeted.
Can be set by environment variable with PREDIX_SPACE_GUID.
Can be determined by ~/.cf/config.json.
|
entailment
|
def get_crypt_key(key_path):
"""
Get the user's PredixPy manifest key. Generate and store one if not
yet generated.
"""
key_path = os.path.expanduser(key_path)
if os.path.exists(key_path):
with open(key_path, 'r') as data:
key = data.read()
else:
key = Fernet.generate_key()
with open(key_path, 'w') as output:
output.write(key)
return key
|
Get the user's PredixPy manifest key. Generate and store one if not
yet generated.
|
entailment
|
def get_env_key(obj, key=None):
"""
Return environment variable key to use for lookups within a
namespace represented by the package name.
For example, any varialbes for predix.security.uaa are stored
as PREDIX_SECURITY_UAA_KEY
"""
return str.join('_', [obj.__module__.replace('.','_').upper(),
key.upper()])
|
Return environment variable key to use for lookups within a
namespace represented by the package name.
For example, any varialbes for predix.security.uaa are stored
as PREDIX_SECURITY_UAA_KEY
|
entailment
|
def get_env_value(obj, attribute):
"""
Returns the environment variable value for the attribute of
the given object.
For example `get_env_value(predix.security.uaa, 'uri')` will
return value of environment variable PREDIX_SECURITY_UAA_URI.
"""
varname = get_env_key(obj, attribute)
var = os.environ.get(varname)
if not var:
raise ValueError("%s must be set in your environment." % varname)
return var
|
Returns the environment variable value for the attribute of
the given object.
For example `get_env_value(predix.security.uaa, 'uri')` will
return value of environment variable PREDIX_SECURITY_UAA_URI.
|
entailment
|
def set_env_value(obj, attribute, value):
"""
Set the environment variable value for the attribute of the
given object.
For example, `set_env_value(predix.security.uaa, 'uri', 'http://...')`
will set the environment variable PREDIX_SECURITY_UAA_URI to the given
uri.
"""
varname = get_env_key(obj, attribute)
os.environ[varname] = value
return varname
|
Set the environment variable value for the attribute of the
given object.
For example, `set_env_value(predix.security.uaa, 'uri', 'http://...')`
will set the environment variable PREDIX_SECURITY_UAA_URI to the given
uri.
|
entailment
|
def get_instance_guid(self, service_name):
"""
Returns the GUID for the service instance with
the given name.
"""
summary = self.space.get_space_summary()
for service in summary['services']:
if service['name'] == service_name:
return service['guid']
raise ValueError("No service with name '%s' found." % (service_name))
|
Returns the GUID for the service instance with
the given name.
|
entailment
|
def _get_service_bindings(self, service_name):
"""
Return the service bindings for the service instance.
"""
instance = self.get_instance(service_name)
return self.api.get(instance['service_bindings_url'])
|
Return the service bindings for the service instance.
|
entailment
|
def delete_service_bindings(self, service_name):
"""
Remove service bindings to applications.
"""
instance = self.get_instance(service_name)
return self.api.delete(instance['service_bindings_url'])
|
Remove service bindings to applications.
|
entailment
|
def _get_service_keys(self, service_name):
"""
Return the service keys for the given service.
"""
guid = self.get_instance_guid(service_name)
uri = "/v2/service_instances/%s/service_keys" % (guid)
return self.api.get(uri)
|
Return the service keys for the given service.
|
entailment
|
def get_service_keys(self, service_name):
"""
Returns a flat list of the names of the service keys
for the given service.
"""
keys = []
for key in self._get_service_keys(service_name)['resources']:
keys.append(key['entity']['name'])
return keys
|
Returns a flat list of the names of the service keys
for the given service.
|
entailment
|
def get_service_key(self, service_name, key_name):
"""
Returns the service key details.
Similar to `cf service-key`.
"""
for key in self._get_service_keys(service_name)['resources']:
if key_name == key['entity']['name']:
guid = key['metadata']['guid']
uri = "/v2/service_keys/%s" % (guid)
return self.api.get(uri)
return None
|
Returns the service key details.
Similar to `cf service-key`.
|
entailment
|
def create_service_key(self, service_name, key_name):
"""
Create a service key for the given service.
"""
if self.has_key(service_name, key_name):
logging.warning("Reusing existing service key %s" % (key_name))
return self.get_service_key(service_name, key_name)
body = {
'service_instance_guid': self.get_instance_guid(service_name),
'name': key_name
}
return self.api.post('/v2/service_keys', body)
|
Create a service key for the given service.
|
entailment
|
def delete_service_key(self, service_name, key_name):
"""
Delete a service key for the given service.
"""
key = self.get_service_key(service_name, key_name)
logging.info("Deleting service key %s for service %s" % (key, service_name))
return self.api.delete(key['metadata']['url'])
|
Delete a service key for the given service.
|
entailment
|
def get_instance(self, service_name):
"""
Retrieves a service instance with the given name.
"""
for resource in self.space._get_instances():
if resource['entity']['name'] == service_name:
return resource['entity']
|
Retrieves a service instance with the given name.
|
entailment
|
def get_service_plan_for_service(self, service_name):
"""
Return the service plans available for a given service.
"""
services = self.get_services()
for service in services['resources']:
if service['entity']['label'] == service_name:
response = self.api.get(service['entity']['service_plans_url'])
return response['resources']
|
Return the service plans available for a given service.
|
entailment
|
def get_service_plan_guid(self, service_name, plan_name):
"""
Return the service plan GUID for the given service / plan.
"""
for plan in self.get_service_plan_for_service(service_name):
if plan['entity']['name'] == plan_name:
return plan['metadata']['guid']
return None
|
Return the service plan GUID for the given service / plan.
|
entailment
|
def create_service(self, service_type, plan_name, service_name, params,
async=False, **kwargs):
"""
Create a service instance.
"""
if self.space.has_service_with_name(service_name):
logging.warning("Service already exists with that name.")
return self.get_instance(service_name)
if self.space.has_service_of_type(service_type):
logging.warning("Service type already exists.")
guid = self.get_service_plan_guid(service_type, plan_name)
if not guid:
raise ValueError("No service plan named: %s" % (plan_name))
body = {
'name': service_name,
'space_guid': self.space.guid,
'service_plan_guid': guid,
'parameters': params
}
uri = '/v2/service_instances?accepts_incomplete=true'
if async:
uri += '&async=true'
return self.api.post(uri, body)
|
Create a service instance.
|
entailment
|
def delete_service(self, service_name, params=None):
"""
Delete the service of the given name. It may fail if there are
any service keys or app bindings. Use purge() if you want
to delete it all.
"""
if not self.space.has_service_with_name(service_name):
logging.warning("Service not found so... succeeded?")
return True
guid = self.get_instance_guid(service_name)
logging.info("Deleting service %s with guid %s" % (service_name, guid))
# MAINT: this endpoint changes in newer version of api
return self.api.delete("/v2/service_instances/%s?accepts_incomplete=true" %
(guid), params=params)
|
Delete the service of the given name. It may fail if there are
any service keys or app bindings. Use purge() if you want
to delete it all.
|
entailment
|
def _get_query_uri(self):
"""
Returns the URI endpoint for performing queries of a
Predix Time Series instance from environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_timeseries = services['predix-timeseries'][0]['credentials']
return predix_timeseries['query']['uri'].partition('/v1')[0]
else:
return predix.config.get_env_value(self, 'query_uri')
|
Returns the URI endpoint for performing queries of a
Predix Time Series instance from environment inspection.
|
entailment
|
def _get_query_zone_id(self):
"""
Returns the ZoneId for performing queries of a Predix
Time Series instance from environment inspection.
"""
if 'VCAP_SERVICES' in os.environ:
services = json.loads(os.getenv('VCAP_SERVICES'))
predix_timeseries = services['predix-timeseries'][0]['credentials']
return predix_timeseries['query']['zone-http-header-value']
else:
return predix.config.get_env_value(self, 'query_zone_id')
|
Returns the ZoneId for performing queries of a Predix
Time Series instance from environment inspection.
|
entailment
|
def _get_datapoints(self, params):
"""
Will make a direct REST call with the given json body payload to
get datapoints.
"""
url = self.query_uri + '/v1/datapoints'
return self.service._get(url, params=params)
|
Will make a direct REST call with the given json body payload to
get datapoints.
|
entailment
|
def get_values(self, *args, **kwargs):
"""
Convenience method that for simple single tag queries will
return just the values to be iterated on.
"""
if isinstance(args[0], list):
raise ValueError("Can only get_values() for a single tag.")
response = self.get_datapoints(*args, **kwargs)
for value in response['tags'][0]['results'][0]['values']:
yield [datetime.datetime.utcfromtimestamp(value[0]/1000),
value[1],
value[2]]
|
Convenience method that for simple single tag queries will
return just the values to be iterated on.
|
entailment
|
def get_datapoints(self, tags, start=None, end=None, order=None,
limit=None, qualities=None, attributes=None, measurement=None,
aggregations=None, post=False):
"""
Returns all of the datapoints that match the given query.
- tags: list or string identifying the name/tag (ie. "temp")
- start: data after this, absolute or relative (ie. '1w-ago' or
1494015972386)
- end: data before this value
- order: ascending (asc) or descending (desc)
- limit: only return a few values (ie. 25)
- qualities: data quality value (ie. [ts.GOOD, ts.UNCERTAIN])
- attributes: dictionary of key-values (ie. {'unit': 'mph'})
- measurement: tuple of operation and value (ie. ('gt', 30))
- aggregations: summary statistics on data results (ie. 'avg')
- post: POST query instead of GET (caching implication)
A few additional observations:
- allow service to do most data validation
- order is applied before limit so resultset will differ
The returned results match what the service response is so you'll
need to unpack it as appropriate. Oftentimes what you want for
a simple single tag query will be:
response['tags'][0]['results'][0]['values']
"""
params = {}
# Documentation says start is required for GET but not POST, but
# seems to be required all the time, so using sensible default.
if not start:
start = '1w-ago'
logging.warning("Defaulting query for data with start date %s" % (start))
# Start date can be absolute or relative, only certain legal values
# but service will throw error if used improperly. (ms, s, mi, h, d,
# w, mm, y). Relative dates must end in -ago.
params['start'] = start
# Docs say when making POST with a start that end must also be
# specified, but this does not seem to be the case.
if end:
# MAINT: error when end < start which is handled by service
params['end'] = end
params['tags'] = []
if not isinstance(tags, list):
tags = [tags]
for tag in tags:
query = {}
query['name'] = tag
# Limit resultset with an integer value
if limit:
query['limit'] = int(limit)
# Order must be 'asc' or 'desc' but will get sensible error
# from service.
if order:
query['order'] = order
# Filters are complex and support filtering by
# quality, measurement, and attributes.
filters = {}
# Check for the quality of the datapoints
if qualities is not None:
if isinstance(qualities, int) or isinstance(qualities, str):
qualities = [qualities]
# Timeseries expects quality to be a string, not integer,
# so coerce each into a string
for i, quality in enumerate(qualities):
qualities[i] = str(quality)
filters['qualities'] = {"values": qualities}
# Check for attributes on the datapoints, expected to be
# a dictionary of key / value pairs that datapoints must match.
if attributes is not None:
if not isinstance(attributes, dict):
raise ValueError("Attribute filters must be dictionary.")
filters['attributes'] = attributes
# Check for measurements that meets a given comparison operation
# such as ge, gt, eq, ne, le, lt
if measurement is not None:
filters['measurements'] = {
'condition': measurement[0],
'values': measurement[1]
}
# If we found any filters add them to the query
if filters:
query['filters'] = filters
# Handle any additional aggregations of dataset
if aggregations is not None:
if not isinstance(aggregations, list):
aggregations = [aggregations]
query['aggregations'] = []
for aggregation in aggregations:
query['aggregations'].append({
'sampling': {'datapoints': 1},
'type': aggregation })
params['tags'].append(query)
if post:
return self._post_datapoints(params)
else:
return self._get_datapoints({"query": json.dumps(params)})
|
Returns all of the datapoints that match the given query.
- tags: list or string identifying the name/tag (ie. "temp")
- start: data after this, absolute or relative (ie. '1w-ago' or
1494015972386)
- end: data before this value
- order: ascending (asc) or descending (desc)
- limit: only return a few values (ie. 25)
- qualities: data quality value (ie. [ts.GOOD, ts.UNCERTAIN])
- attributes: dictionary of key-values (ie. {'unit': 'mph'})
- measurement: tuple of operation and value (ie. ('gt', 30))
- aggregations: summary statistics on data results (ie. 'avg')
- post: POST query instead of GET (caching implication)
A few additional observations:
- allow service to do most data validation
- order is applied before limit so resultset will differ
The returned results match what the service response is so you'll
need to unpack it as appropriate. Oftentimes what you want for
a simple single tag query will be:
response['tags'][0]['results'][0]['values']
|
entailment
|
def _create_connection(self):
"""
Create a new websocket connection with proper headers.
"""
logging.debug("Initializing new websocket connection.")
headers = {
'Authorization': self.service._get_bearer_token(),
'Predix-Zone-Id': self.ingest_zone_id,
'Content-Type': 'application/json',
}
url = self.ingest_uri
logging.debug("URL=" + str(url))
logging.debug("HEADERS=" + str(headers))
# Should consider connection pooling and longer timeouts
return websocket.create_connection(url, header=headers)
|
Create a new websocket connection with proper headers.
|
entailment
|
def _get_websocket(self, reuse=True):
"""
Reuse existing connection or create a new connection.
"""
# Check if still connected
if self.ws and reuse:
if self.ws.connected:
return self.ws
logging.debug("Stale connection, reconnecting.")
self.ws = self._create_connection()
return self.ws
|
Reuse existing connection or create a new connection.
|
entailment
|
def _send_to_timeseries(self, message):
"""
Establish or reuse socket connection and send
the given message to the timeseries service.
"""
logging.debug("MESSAGE=" + str(message))
result = None
try:
ws = self._get_websocket()
ws.send(json.dumps(message))
result = ws.recv()
except (websocket.WebSocketConnectionClosedException, Exception) as e:
logging.debug("Connection failed, will try again.")
logging.debug(e)
ws = self._get_websocket(reuse=False)
ws.send(json.dumps(message))
result = ws.recv()
logging.debug("RESULT=" + str(result))
return result
|
Establish or reuse socket connection and send
the given message to the timeseries service.
|
entailment
|
def queue(self, name, value, quality=None, timestamp=None,
attributes=None):
"""
To reduce network traffic, you can buffer datapoints and
then flush() anything in the queue.
:param name: the name / label / tag for sensor data
:param value: the sensor reading or value to record
:param quality: the quality value, use the constants BAD, GOOD, etc.
(optional and defaults to UNCERTAIN)
:param timestamp: the time the reading was recorded in epoch
milliseconds (optional and defaults to now)
:param attributes: dictionary for any key-value pairs to store with the
reading (optional)
"""
# Get timestamp first in case delay opening websocket connection
# and it must have millisecond accuracy
if not timestamp:
timestamp = int(round(time.time() * 1000))
else:
# Coerce datetime objects to epoch
if isinstance(timestamp, datetime.datetime):
timestamp = int(round(int(timestamp.strftime('%s')) * 1000))
# Only specific quality values supported
if quality not in [self.BAD, self.GOOD, self.NA, self.UNCERTAIN]:
quality = self.UNCERTAIN
# Check if adding to queue of an existing tag and add second datapoint
for point in self._queue:
if point['name'] == name:
point['datapoints'].append([timestamp, value, quality])
return
# If adding new tag, initialize and set any attributes
datapoint = {
"name": name,
"datapoints": [[timestamp, value, quality]]
}
# Attributes are extra details for a datapoint
if attributes is not None:
if not isinstance(attributes, dict):
raise ValueError("Attributes are expected to be a dictionary.")
# Validate rules for attribute keys to provide guidance.
invalid_value = ':;= '
has_invalid_value = re.compile(r'[%s]' % (invalid_value)).search
has_valid_key = re.compile(r'^[\w\.\/\-]+$').search
for (key, val) in list(attributes.items()):
# Values cannot be empty
if (val == '') or (val is None):
raise ValueError("Attribute (%s) must have a non-empty value." % (key))
# Values should be treated as a string for regex validation
val = str(val)
# Values cannot contain certain arbitrary characters
if bool(has_invalid_value(val)):
raise ValueError("Attribute (%s) cannot contain (%s)." %
(key, invalid_value))
# Attributes have to be alphanumeric-ish
if not bool(has_valid_key):
raise ValueError("Key (%s) not alphanumeric-ish." % (key))
datapoint['attributes'] = attributes
self._queue.append(datapoint)
logging.debug("QUEUE: " + str(len(self._queue)))
|
To reduce network traffic, you can buffer datapoints and
then flush() anything in the queue.
:param name: the name / label / tag for sensor data
:param value: the sensor reading or value to record
:param quality: the quality value, use the constants BAD, GOOD, etc.
(optional and defaults to UNCERTAIN)
:param timestamp: the time the reading was recorded in epoch
milliseconds (optional and defaults to now)
:param attributes: dictionary for any key-value pairs to store with the
reading (optional)
|
entailment
|
def send(self, name=None, value=None, **kwargs):
"""
Can accept a name/tag and value to be queued and then send anything in
the queue to the time series service. Optional parameters include
setting quality, timestamp, or attributes.
See spec for queue() for complete list of options.
Example of sending a batch of values:
queue('temp', 70.1)
queue('humidity', 20.4)
send()
Example of sending one and flushing queue immediately
send('temp', 70.3)
send('temp', 70.4, quality=ts.GOOD, attributes={'unit': 'F'})
"""
if name and value:
self.queue(name, value, **kwargs)
timestamp = int(round(time.time() * 1000))
# The label "name" or "tag" is sometimes used ambiguously
msg = {
"messageId": timestamp,
"body": self._queue
}
self._queue = []
return self._send_to_timeseries(msg)
|
Can accept a name/tag and value to be queued and then send anything in
the queue to the time series service. Optional parameters include
setting quality, timestamp, or attributes.
See spec for queue() for complete list of options.
Example of sending a batch of values:
queue('temp', 70.1)
queue('humidity', 20.4)
send()
Example of sending one and flushing queue immediately
send('temp', 70.3)
send('temp', 70.4, quality=ts.GOOD, attributes={'unit': 'F'})
|
entailment
|
def execute(self, statement, *args, **kwargs):
"""
This convenience method will execute the query passed in as is. For
more complex functionality you may want to use the sqlalchemy engine
directly, but this serves as an example implementation.
:param select_query: SQL statement to execute that will identify the
resultset of interest.
"""
with self.engine.connect() as conn:
s = sqlalchemy.sql.text(statement)
return conn.execute(s, **kwargs)
|
This convenience method will execute the query passed in as is. For
more complex functionality you may want to use the sqlalchemy engine
directly, but this serves as an example implementation.
:param select_query: SQL statement to execute that will identify the
resultset of interest.
|
entailment
|
def shutdown(self):
"""
Shutdown the client, shutdown the sub clients and stop the health checker
:return: None
"""
self._run_health_checker = False
if self.publisher is not None:
self.publisher.shutdown()
if self.subscriber is not None:
self.subscriber.shutdown()
|
Shutdown the client, shutdown the sub clients and stop the health checker
:return: None
|
entailment
|
def get_service_env_value(self, key):
"""
Get a env variable as defined by the service admin
:param key: the base of the key to use
:return: the env if it exists
"""
service_key = predix.config.get_env_key(self, key)
value = os.environ[service_key]
if not value:
raise ValueError("%s env unset" % key)
return value
|
Get a env variable as defined by the service admin
:param key: the base of the key to use
:return: the env if it exists
|
entailment
|
def _init_channel(self):
"""
build the grpc channel used for both publisher and subscriber
:return: None
"""
host = self._get_host()
port = self._get_grpc_port()
if 'TLS_PEM_FILE' in os.environ:
with open(os.environ['TLS_PEM_FILE'], mode='rb') as f: # b is important -> binary
file_content = f.read()
credentials = grpc.ssl_channel_credentials(root_certificates=file_content)
else:
credentials = grpc.ssl_channel_credentials()
self._channel = grpc.secure_channel(host + ":" + port, credentials=credentials)
self._init_health_checker()
|
build the grpc channel used for both publisher and subscriber
:return: None
|
entailment
|
def _init_health_checker(self):
"""
start the health checker stub and start a thread to ping it every 30 seconds
:return: None
"""
stub = Health_pb2_grpc.HealthStub(channel=self._channel)
self._health_check = stub.Check
health_check_thread = threading.Thread(target=self._health_check_thread)
health_check_thread.daemon = True
health_check_thread.start()
|
start the health checker stub and start a thread to ping it every 30 seconds
:return: None
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.