sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def get_page(self, form):
'''Get the requested page'''
page_size = form.cleaned_data['iDisplayLength']
start_index = form.cleaned_data['iDisplayStart']
paginator = Paginator(self.object_list, page_size)
num_page = (start_index / page_size) + 1
return paginator.page(num_page)
|
Get the requested page
|
entailment
|
def get_row(self, row):
'''Format a single row (if necessary)'''
if isinstance(self.fields, dict):
return dict([
(key, text_type(value).format(**row) if RE_FORMATTED.match(value) else row[value])
for key, value in self.fields.items()
])
else:
return [text_type(field).format(**row) if RE_FORMATTED.match(field)
else row[field]
for field in self.fields]
|
Format a single row (if necessary)
|
entailment
|
def render_to_response(self, form, **kwargs):
'''Render Datatables expected JSON format'''
page = self.get_page(form)
data = {
'iTotalRecords': page.paginator.count,
'iTotalDisplayRecords': page.paginator.count,
'sEcho': form.cleaned_data['sEcho'],
'aaData': self.get_rows(page.object_list),
}
return self.json_response(data)
|
Render Datatables expected JSON format
|
entailment
|
def set_grant_type(self, grant_type = 'client_credentials', api_key=None, api_secret=None, scope=None, info=None):
"""
Grant types:
- token:
An authorization is requested to the end-user by redirecting it to an authorization page hosted
on Dailymotion. Once authorized, a refresh token is requested by the API client to the token
server and stored in the end-user's cookie (or other storage technique implemented by subclasses).
The refresh token is then used to request time limited access token to the token server.
- none / client_credentials:
This grant type is a 2 legs authentication: it doesn't allow to act on behalf of another user.
With this grant type, all API requests will be performed with the user identity of the API key owner.
- password:
This grant type allows to authenticate end-user by directly providing its credentials.
This profile is highly discouraged for web-server workflows. If used, the username and password
MUST NOT be stored by the client.
"""
self.access_token = None
if api_key and api_secret:
self._grant_info['key'] = api_key
self._grant_info['secret'] = api_secret
else:
raise DailymotionClientError('Missing API key/secret')
if isinstance(info, dict):
self._grant_info.update(info)
else:
info = {}
if self._session_store_enabled and isinstance(info, dict) and info.get('username') is not None:
self._session_store.set_user(info.get('username'))
if grant_type in ('authorization', 'token'):
grant_type = 'authorization'
if 'redirect_uri' not in info:
raise DailymotionClientError('Missing redirect_uri in grant info for token grant type.')
elif grant_type in ('client_credentials', 'none'):
grant_type = 'client_credentials'
elif grant_type == 'password':
if 'username' not in info or 'password' not in info:
raise DailymotionClientError('Missing username or password in grant info for password grant type.')
self._grant_type = grant_type
if scope:
if not isinstance(scope, (list, tuple)):
raise DailymotionClientError('Invalid scope type: must be a list of valid scopes')
self._grant_info['scope'] = scope
|
Grant types:
- token:
An authorization is requested to the end-user by redirecting it to an authorization page hosted
on Dailymotion. Once authorized, a refresh token is requested by the API client to the token
server and stored in the end-user's cookie (or other storage technique implemented by subclasses).
The refresh token is then used to request time limited access token to the token server.
- none / client_credentials:
This grant type is a 2 legs authentication: it doesn't allow to act on behalf of another user.
With this grant type, all API requests will be performed with the user identity of the API key owner.
- password:
This grant type allows to authenticate end-user by directly providing its credentials.
This profile is highly discouraged for web-server workflows. If used, the username and password
MUST NOT be stored by the client.
|
entailment
|
def authenticated(func):
"""
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
"""
@wraps(func)
def wrapper(*args, **kwargs):
self = args[0]
if self.refresh_token is not None and \
self.token_expiration_time <= dt.datetime.utcnow():
self.re_authenticate()
return func(*args, **kwargs)
return wrapper
|
Decorator to check if Smappee's access token has expired.
If it has, use the refresh token to request a new access token
|
entailment
|
def urljoin(*parts):
"""
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
"""
# first strip extra forward slashes (except http:// and the likes) and
# create list
part_list = []
for part in parts:
p = str(part)
if p.endswith('//'):
p = p[0:-1]
else:
p = p.strip('/')
part_list.append(p)
# join everything together
url = '/'.join(part_list)
return url
|
Join terms together with forward slashes
Parameters
----------
parts
Returns
-------
str
|
entailment
|
def authenticate(self, username, password):
"""
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
"""
url = URLS['token']
data = {
"grant_type": "password",
"client_id": self.client_id,
"client_secret": self.client_secret,
"username": username,
"password": password
}
r = requests.post(url, data=data)
r.raise_for_status()
j = r.json()
self.access_token = j['access_token']
self.refresh_token = j['refresh_token']
self._set_token_expiration_time(expires_in=j['expires_in'])
return r
|
Uses a Smappee username and password to request an access token,
refresh token and expiry date.
Parameters
----------
username : str
password : str
Returns
-------
requests.Response
access token is saved in self.access_token
refresh token is saved in self.refresh_token
expiration time is set in self.token_expiration_time as
datetime.datetime
|
entailment
|
def _set_token_expiration_time(self, expires_in):
"""
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
"""
self.token_expiration_time = dt.datetime.utcnow() + \
dt.timedelta(0, expires_in)
|
Saves the token expiration time by adding the 'expires in' parameter
to the current datetime (in utc).
Parameters
----------
expires_in : int
number of seconds from the time of the request until expiration
Returns
-------
nothing
saves expiration time in self.token_expiration_time as
datetime.datetime
|
entailment
|
def get_service_locations(self):
"""
Request service locations
Returns
-------
dict
"""
url = URLS['servicelocation']
headers = {"Authorization": "Bearer {}".format(self.access_token)}
r = requests.get(url, headers=headers)
r.raise_for_status()
return r.json()
|
Request service locations
Returns
-------
dict
|
entailment
|
def get_service_location_info(self, service_location_id):
"""
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id, "info")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
r = requests.get(url, headers=headers)
r.raise_for_status()
return r.json()
|
Request service location info
Parameters
----------
service_location_id : int
Returns
-------
dict
|
entailment
|
def get_consumption(self, service_location_id, start, end, aggregation, raw=False):
"""
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id,
"consumption")
d = self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation)
if not raw:
for block in d['consumptions']:
if 'alwaysOn' not in block.keys():
break
block.update({'alwaysOn': block['alwaysOn'] / 12})
return d
|
Request Elektricity consumption and Solar production
for a given service location.
Parameters
----------
service_location_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
dict
|
entailment
|
def get_sensor_consumption(self, service_location_id, sensor_id, start,
end, aggregation):
"""
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
"""
url = urljoin(URLS['servicelocation'], service_location_id, "sensor",
sensor_id, "consumption")
return self._get_consumption(url=url, start=start, end=end,
aggregation=aggregation)
|
Request consumption for a given sensor in a given service location
Parameters
----------
service_location_id : int
sensor_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
aggregation : int
1 = 5 min values (only available for the last 14 days)
2 = hourly values
3 = daily values
4 = monthly values
5 = quarterly values
Returns
-------
dict
|
entailment
|
def _get_consumption(self, url, start, end, aggregation):
"""
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
"""
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"aggregation": aggregation,
"from": start,
"to": end
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json()
|
Request for both the get_consumption and
get_sensor_consumption methods.
Parameters
----------
url : str
start : dt.datetime
end : dt.datetime
aggregation : int
Returns
-------
dict
|
entailment
|
def get_events(self, service_location_id, appliance_id, start, end,
max_number=None):
"""
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
"""
start = self._to_milliseconds(start)
end = self._to_milliseconds(end)
url = urljoin(URLS['servicelocation'], service_location_id, "events")
headers = {"Authorization": "Bearer {}".format(self.access_token)}
params = {
"from": start,
"to": end,
"applianceId": appliance_id,
"maxNumber": max_number
}
r = requests.get(url, headers=headers, params=params)
r.raise_for_status()
return r.json()
|
Request events for a given appliance
Parameters
----------
service_location_id : int
appliance_id : int
start : int | dt.datetime | pd.Timestamp
end : int | dt.datetime | pd.Timestamp
start and end support epoch (in milliseconds),
datetime and Pandas Timestamp
timezone-naive datetimes are assumed to be in UTC
max_number : int, optional
The maximum number of events that should be returned by this query
Default returns all events in the selected period
Returns
-------
dict
|
entailment
|
def actuator_on(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
return self._actuator_on_off(
on_off='on', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration)
|
Turn actuator on
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
entailment
|
def actuator_off(self, service_location_id, actuator_id, duration=None):
"""
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
return self._actuator_on_off(
on_off='off', service_location_id=service_location_id,
actuator_id=actuator_id, duration=duration)
|
Turn actuator off
Parameters
----------
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
entailment
|
def _actuator_on_off(self, on_off, service_location_id, actuator_id,
duration=None):
"""
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
"""
url = urljoin(URLS['servicelocation'], service_location_id,
"actuator", actuator_id, on_off)
headers = {"Authorization": "Bearer {}".format(self.access_token)}
if duration is not None:
data = {"duration": duration}
else:
data = {}
r = requests.post(url, headers=headers, json=data)
r.raise_for_status()
return r
|
Turn actuator on or off
Parameters
----------
on_off : str
'on' or 'off'
service_location_id : int
actuator_id : int
duration : int, optional
300,900,1800 or 3600 , specifying the time in seconds the actuator
should be turned on. Any other value results in turning on for an
undetermined period of time.
Returns
-------
requests.Response
|
entailment
|
def get_consumption_dataframe(self, service_location_id, start, end,
aggregation, sensor_id=None, localize=False,
raw=False):
"""
Extends get_consumption() AND get_sensor_consumption(),
parses the results in a Pandas DataFrame
Parameters
----------
service_location_id : int
start : dt.datetime | int
end : dt.datetime | int
timezone-naive datetimes are assumed to be in UTC
epoch timestamps need to be in milliseconds
aggregation : int
sensor_id : int, optional
If a sensor id is passed, api method get_sensor_consumption will
be used otherwise (by default),
the get_consumption method will be used: this returns Electricity
and Solar consumption and production.
localize : bool
default False
default returns timestamps in UTC
if True, timezone is fetched from service location info and
Data Frame is localized
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
pd.DataFrame
"""
import pandas as pd
if sensor_id is None:
data = self.get_consumption(
service_location_id=service_location_id, start=start,
end=end, aggregation=aggregation, raw=raw)
consumptions = data['consumptions']
else:
data = self.get_sensor_consumption(
service_location_id=service_location_id, sensor_id=sensor_id,
start=start, end=end, aggregation=aggregation)
# yeah please someone explain me why they had to name this
# differently...
consumptions = data['records']
df = pd.DataFrame.from_dict(consumptions)
if not df.empty:
df.set_index('timestamp', inplace=True)
df.index = pd.to_datetime(df.index, unit='ms', utc=True)
if localize:
info = self.get_service_location_info(
service_location_id=service_location_id)
timezone = info['timezone']
df = df.tz_convert(timezone)
return df
|
Extends get_consumption() AND get_sensor_consumption(),
parses the results in a Pandas DataFrame
Parameters
----------
service_location_id : int
start : dt.datetime | int
end : dt.datetime | int
timezone-naive datetimes are assumed to be in UTC
epoch timestamps need to be in milliseconds
aggregation : int
sensor_id : int, optional
If a sensor id is passed, api method get_sensor_consumption will
be used otherwise (by default),
the get_consumption method will be used: this returns Electricity
and Solar consumption and production.
localize : bool
default False
default returns timestamps in UTC
if True, timezone is fetched from service location info and
Data Frame is localized
raw : bool
default False
if True: Return the data "as is" from the server
if False: convert the 'alwaysOn' value to Wh.
(the server returns this value as the sum of the power,
measured in 5 minute blocks. This means that it is 12 times
higher than the consumption in Wh.
See https://github.com/EnergieID/smappy/issues/24)
Returns
-------
pd.DataFrame
|
entailment
|
def _to_milliseconds(self, time):
"""
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
"""
if isinstance(time, dt.datetime):
if time.tzinfo is None:
time = time.replace(tzinfo=pytz.UTC)
return int(time.timestamp() * 1e3)
elif isinstance(time, numbers.Number):
return time
else:
raise NotImplementedError("Time format not supported. Use milliseconds since epoch,\
Datetime or Pandas Datetime")
|
Converts a datetime-like object to epoch, in milliseconds
Timezone-naive datetime objects are assumed to be in UTC
Parameters
----------
time : dt.datetime | pd.Timestamp | int
Returns
-------
int
epoch milliseconds
|
entailment
|
def _basic_post(self, url, data=None):
"""
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
"""
_url = urljoin(self.base_url, url)
r = self.session.post(_url, data=data, headers=self.headers, timeout=5)
r.raise_for_status()
return r
|
Because basically every post request is the same
Parameters
----------
url : str
data : str, optional
Returns
-------
requests.Response
|
entailment
|
def logon(self, password='admin'):
"""
Parameters
----------
password : str
default 'admin'
Returns
-------
dict
"""
r = self._basic_post(url='logon', data=password)
return r.json()
|
Parameters
----------
password : str
default 'admin'
Returns
-------
dict
|
entailment
|
def active_power(self):
"""
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
"""
inst = self.load_instantaneous()
values = [float(i['value']) for i in inst if i['key'].endswith('ActivePower')]
return sum(values) / 1000
|
Takes the sum of all instantaneous active power values
Returns them in kWh
Returns
-------
float
|
entailment
|
def active_cosfi(self):
"""
Takes the average of all instantaneous cosfi values
Returns
-------
float
"""
inst = self.load_instantaneous()
values = [float(i['value']) for i in inst if i['key'].endswith('Cosfi')]
return sum(values) / len(values)
|
Takes the average of all instantaneous cosfi values
Returns
-------
float
|
entailment
|
def on_command_control(self, val_id):
"""
Parameters
----------
val_id : str
Returns
-------
requests.Response
"""
data = "control,controlId=1|" + val_id
return self._basic_post(url='commandControlPublic', data=data)
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
entailment
|
def off_command_control(self, val_id):
"""
Parameters
----------
val_id : str
Returns
-------
requests.Response
"""
data = "control,controlId=0|" + val_id
return self._basic_post(url='commandControlPublic', data=data)
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
entailment
|
def delete_command_control(self, val_id):
"""
Parameters
----------
val_id : str
Returns
-------
requests.Response
"""
data = "delete,controlId=" + val_id
return self._basic_post(url='commandControlPublic', data=data)
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
entailment
|
def delete_command_control_timers(self, val_id):
"""
Parameters
----------
val_id : str
Returns
-------
requests.Response
"""
data = "deleteTimers,controlId=" + val_id
return self._basic_post(url='commandControlPublic', data=data)
|
Parameters
----------
val_id : str
Returns
-------
requests.Response
|
entailment
|
def select_logfile(self, logfile):
"""
Parameters
----------
logfile : str
Returns
-------
dict
"""
data = 'logFileSelect,' + logfile
r = self._basic_post(url='logBrowser', data=data)
return r.json()
|
Parameters
----------
logfile : str
Returns
-------
dict
|
entailment
|
def getInterfaceInAllSpeeds(interface, endpoint_list, class_descriptor_list=()):
"""
Produce similar fs, hs and ss interface and endpoints descriptors.
Should be useful for devices desiring to work in all 3 speeds with maximum
endpoint wMaxPacketSize. Reduces data duplication from descriptor
declarations.
Not intended to cover fancy combinations.
interface (dict):
Keyword arguments for
getDescriptor(USBInterfaceDescriptor, ...)
in all speeds.
bNumEndpoints must not be provided.
endpoint_list (list of dicts)
Each dict represents an endpoint, and may contain the following items:
- "endpoint": required, contains keyword arguments for
getDescriptor(USBEndpointDescriptorNoAudio, ...)
or
getDescriptor(USBEndpointDescriptor, ...)
The with-audio variant is picked when its extra fields are assigned a
value.
wMaxPacketSize may be missing, in which case it will be set to the
maximum size for given speed and endpoint type.
bmAttributes must be provided.
If bEndpointAddress is zero (excluding direction bit) on the first
endpoint, endpoints will be assigned their rank in this list,
starting at 1. Their direction bit is preserved.
If bInterval is present on a INT or ISO endpoint, it must be in
millisecond units (but may not be an integer), and will be converted
to the nearest integer millisecond for full-speed descriptor, and
nearest possible interval for high- and super-speed descriptors.
If bInterval is present on a BULK endpoint, it is set to zero on
full-speed descriptor and used as provided on high- and super-speed
descriptors.
- "superspeed": optional, contains keyword arguments for
getDescriptor(USBSSEPCompDescriptor, ...)
- "superspeed_iso": optional, contains keyword arguments for
getDescriptor(USBSSPIsocEndpointDescriptor, ...)
Must be provided and non-empty only when endpoint is isochronous and
"superspeed" dict has "bmAttributes" bit 7 set.
class_descriptor (list of descriptors of any type)
Descriptors to insert in all speeds between the interface descriptor and
endpoint descriptors.
Returns a 3-tuple of lists:
- fs descriptors
- hs descriptors
- ss descriptors
"""
interface = getDescriptor(
USBInterfaceDescriptor,
bNumEndpoints=len(endpoint_list),
**interface
)
class_descriptor_list = list(class_descriptor_list)
fs_list = [interface] + class_descriptor_list
hs_list = [interface] + class_descriptor_list
ss_list = [interface] + class_descriptor_list
need_address = (
endpoint_list[0]['endpoint'].get(
'bEndpointAddress',
0,
) & ~ch9.USB_DIR_IN == 0
)
for index, endpoint in enumerate(endpoint_list, 1):
endpoint_kw = endpoint['endpoint'].copy()
transfer_type = endpoint_kw[
'bmAttributes'
] & ch9.USB_ENDPOINT_XFERTYPE_MASK
fs_max, hs_max, ss_max = _MAX_PACKET_SIZE_DICT[transfer_type]
if need_address:
endpoint_kw['bEndpointAddress'] = index | (
endpoint_kw.get('bEndpointAddress', 0) & ch9.USB_DIR_IN
)
klass = (
USBEndpointDescriptor
if 'bRefresh' in endpoint_kw or 'bSynchAddress' in endpoint_kw else
USBEndpointDescriptorNoAudio
)
interval = endpoint_kw.pop('bInterval', _MARKER)
if interval is _MARKER:
fs_interval = hs_interval = 0
else:
if transfer_type == ch9.USB_ENDPOINT_XFER_BULK:
fs_interval = 0
hs_interval = interval
else: # USB_ENDPOINT_XFER_ISOC or USB_ENDPOINT_XFER_INT
fs_interval = max(1, min(255, round(interval)))
# 8 is the number of microframes in a millisecond
hs_interval = max(
1,
min(16, int(round(1 + math.log(interval * 8, 2)))),
)
packet_size = endpoint_kw.pop('wMaxPacketSize', _MARKER)
if packet_size is _MARKER:
fs_packet_size = fs_max
hs_packet_size = hs_max
ss_packet_size = ss_max
else:
fs_packet_size = min(fs_max, packet_size)
hs_packet_size = min(hs_max, packet_size)
ss_packet_size = min(ss_max, packet_size)
fs_list.append(getDescriptor(
klass,
wMaxPacketSize=fs_max,
bInterval=fs_interval,
**endpoint_kw
))
hs_list.append(getDescriptor(
klass,
wMaxPacketSize=hs_max,
bInterval=hs_interval,
**endpoint_kw
))
ss_list.append(getDescriptor(
klass,
wMaxPacketSize=ss_max,
bInterval=hs_interval,
**endpoint_kw
))
ss_companion_kw = endpoint.get('superspeed', _EMPTY_DICT)
ss_list.append(getDescriptor(
USBSSEPCompDescriptor,
**ss_companion_kw
))
ssp_iso_kw = endpoint.get('superspeed_iso', _EMPTY_DICT)
if bool(ssp_iso_kw) != (
endpoint_kw.get('bmAttributes', 0) &
ch9.USB_ENDPOINT_XFERTYPE_MASK ==
ch9.USB_ENDPOINT_XFER_ISOC and
bool(ch9.USB_SS_SSP_ISOC_COMP(
ss_companion_kw.get('bmAttributes', 0),
))
):
raise ValueError('Inconsistent isochronous companion')
if ssp_iso_kw:
ss_list.append(getDescriptor(
USBSSPIsocEndpointDescriptor,
**ssp_iso_kw
))
return (fs_list, hs_list, ss_list)
|
Produce similar fs, hs and ss interface and endpoints descriptors.
Should be useful for devices desiring to work in all 3 speeds with maximum
endpoint wMaxPacketSize. Reduces data duplication from descriptor
declarations.
Not intended to cover fancy combinations.
interface (dict):
Keyword arguments for
getDescriptor(USBInterfaceDescriptor, ...)
in all speeds.
bNumEndpoints must not be provided.
endpoint_list (list of dicts)
Each dict represents an endpoint, and may contain the following items:
- "endpoint": required, contains keyword arguments for
getDescriptor(USBEndpointDescriptorNoAudio, ...)
or
getDescriptor(USBEndpointDescriptor, ...)
The with-audio variant is picked when its extra fields are assigned a
value.
wMaxPacketSize may be missing, in which case it will be set to the
maximum size for given speed and endpoint type.
bmAttributes must be provided.
If bEndpointAddress is zero (excluding direction bit) on the first
endpoint, endpoints will be assigned their rank in this list,
starting at 1. Their direction bit is preserved.
If bInterval is present on a INT or ISO endpoint, it must be in
millisecond units (but may not be an integer), and will be converted
to the nearest integer millisecond for full-speed descriptor, and
nearest possible interval for high- and super-speed descriptors.
If bInterval is present on a BULK endpoint, it is set to zero on
full-speed descriptor and used as provided on high- and super-speed
descriptors.
- "superspeed": optional, contains keyword arguments for
getDescriptor(USBSSEPCompDescriptor, ...)
- "superspeed_iso": optional, contains keyword arguments for
getDescriptor(USBSSPIsocEndpointDescriptor, ...)
Must be provided and non-empty only when endpoint is isochronous and
"superspeed" dict has "bmAttributes" bit 7 set.
class_descriptor (list of descriptors of any type)
Descriptors to insert in all speeds between the interface descriptor and
endpoint descriptors.
Returns a 3-tuple of lists:
- fs descriptors
- hs descriptors
- ss descriptors
|
entailment
|
def getDescriptor(klass, **kw):
"""
Automatically fills bLength and bDescriptorType.
"""
# XXX: ctypes Structure.__init__ ignores arguments which do not exist
# as structure fields. So check it.
# This is annoying, but not doing it is a huge waste of time for the
# developer.
empty = klass()
assert hasattr(empty, 'bLength')
assert hasattr(empty, 'bDescriptorType')
unknown = [x for x in kw if not hasattr(empty, x)]
if unknown:
raise TypeError('Unknown fields %r' % (unknown, ))
# XXX: not very pythonic...
return klass(
bLength=ctypes.sizeof(klass),
# pylint: disable=protected-access
bDescriptorType=klass._bDescriptorType,
# pylint: enable=protected-access
**kw
)
|
Automatically fills bLength and bDescriptorType.
|
entailment
|
def getOSDesc(interface, ext_list):
"""
Return an OS description header.
interface (int)
Related interface number.
ext_list (list of OSExtCompatDesc or OSExtPropDesc)
List of instances of extended descriptors.
"""
try:
ext_type, = {type(x) for x in ext_list}
except ValueError:
raise TypeError('Extensions of a single type are required.')
if issubclass(ext_type, OSExtCompatDesc):
wIndex = 4
kw = {
'b': OSDescHeaderBCount(
bCount=len(ext_list),
Reserved=0,
),
}
elif issubclass(ext_type, OSExtPropDescHead):
wIndex = 5
kw = {
'wCount': len(ext_list),
}
else:
raise TypeError('Extensions of unexpected type')
ext_list_type = ext_type * len(ext_list)
klass = type(
'OSDesc',
(OSDescHeader, ),
{
'_fields_': [
('ext_list', ext_list_type),
],
},
)
return klass(
interface=interface,
dwLength=ctypes.sizeof(klass),
bcdVersion=1,
wIndex=wIndex,
ext_list=ext_list_type(*ext_list),
**kw
)
|
Return an OS description header.
interface (int)
Related interface number.
ext_list (list of OSExtCompatDesc or OSExtPropDesc)
List of instances of extended descriptors.
|
entailment
|
def getOSExtPropDesc(data_type, name, value):
"""
Returns an OS extension property descriptor.
data_type (int)
See wPropertyDataType documentation.
name (string)
See PropertyName documentation.
value (string)
See PropertyData documentation.
NULL chars must be explicitely included in the value when needed,
this function does not add any terminating NULL for example.
"""
klass = type(
'OSExtPropDesc',
(OSExtPropDescHead, ),
{
'_fields_': [
('bPropertyName', ctypes.c_char * len(name)),
('dwPropertyDataLength', le32),
('bProperty', ctypes.c_char * len(value)),
],
}
)
return klass(
dwSize=ctypes.sizeof(klass),
dwPropertyDataType=data_type,
wPropertyNameLength=len(name),
bPropertyName=name,
dwPropertyDataLength=len(value),
bProperty=value,
)
|
Returns an OS extension property descriptor.
data_type (int)
See wPropertyDataType documentation.
name (string)
See PropertyName documentation.
value (string)
See PropertyData documentation.
NULL chars must be explicitely included in the value when needed,
this function does not add any terminating NULL for example.
|
entailment
|
def getDescsV2(flags, fs_list=(), hs_list=(), ss_list=(), os_list=()):
"""
Return a FunctionFS descriptor suitable for serialisation.
flags (int)
Any combination of VIRTUAL_ADDR, EVENTFD, ALL_CTRL_RECIP,
CONFIG0_SETUP.
{fs,hs,ss,os}_list (list of descriptors)
Instances of the following classes:
{fs,hs,ss}_list:
USBInterfaceDescriptor
USBEndpointDescriptorNoAudio
USBEndpointDescriptor
USBSSEPCompDescriptor
USBSSPIsocEndpointDescriptor
USBOTGDescriptor
USBOTG20Descriptor
USBInterfaceAssocDescriptor
TODO: HID
All (non-empty) lists must define the same number of interfaces
and endpoints, and endpoint descriptors must be given in the same
order, bEndpointAddress-wise.
os_list:
OSDesc
"""
count_field_list = []
descr_field_list = []
kw = {}
for descriptor_list, flag, prefix, allowed_descriptor_klass in (
(fs_list, HAS_FS_DESC, 'fs', USBDescriptorHeader),
(hs_list, HAS_HS_DESC, 'hs', USBDescriptorHeader),
(ss_list, HAS_SS_DESC, 'ss', USBDescriptorHeader),
(os_list, HAS_MS_OS_DESC, 'os', OSDescHeader),
):
if descriptor_list:
for index, descriptor in enumerate(descriptor_list):
if not isinstance(descriptor, allowed_descriptor_klass):
raise TypeError(
'Descriptor %r of unexpected type: %r' % (
index,
type(descriptor),
),
)
descriptor_map = [
('desc_%i' % x, y)
for x, y in enumerate(descriptor_list)
]
flags |= flag
count_name = prefix + 'count'
descr_name = prefix + 'descr'
count_field_list.append((count_name, le32))
descr_type = type(
't_' + descr_name,
(ctypes.LittleEndianStructure, ),
{
'_pack_': 1,
'_fields_': [
(x, type(y))
for x, y in descriptor_map
],
}
)
descr_field_list.append((descr_name, descr_type))
kw[count_name] = len(descriptor_map)
kw[descr_name] = descr_type(**dict(descriptor_map))
elif flags & flag:
raise ValueError(
'Flag %r set but descriptor list empty, cannot generate type.' % (
FLAGS.get(flag),
)
)
klass = type(
'DescsV2_0x%02x' % (
flags & (
HAS_FS_DESC |
HAS_HS_DESC |
HAS_SS_DESC |
HAS_MS_OS_DESC
),
# XXX: include contained descriptors type information ? (and name ?)
),
(DescsHeadV2, ),
{
'_fields_': count_field_list + descr_field_list,
},
)
return klass(
magic=DESCRIPTORS_MAGIC_V2,
length=ctypes.sizeof(klass),
flags=flags,
**kw
)
|
Return a FunctionFS descriptor suitable for serialisation.
flags (int)
Any combination of VIRTUAL_ADDR, EVENTFD, ALL_CTRL_RECIP,
CONFIG0_SETUP.
{fs,hs,ss,os}_list (list of descriptors)
Instances of the following classes:
{fs,hs,ss}_list:
USBInterfaceDescriptor
USBEndpointDescriptorNoAudio
USBEndpointDescriptor
USBSSEPCompDescriptor
USBSSPIsocEndpointDescriptor
USBOTGDescriptor
USBOTG20Descriptor
USBInterfaceAssocDescriptor
TODO: HID
All (non-empty) lists must define the same number of interfaces
and endpoints, and endpoint descriptors must be given in the same
order, bEndpointAddress-wise.
os_list:
OSDesc
|
entailment
|
def getStrings(lang_dict):
"""
Return a FunctionFS descriptor suitable for serialisation.
lang_dict (dict)
Key: language ID (ex: 0x0409 for en-us)
Value: list of unicode objects
All values must have the same number of items.
"""
field_list = []
kw = {}
try:
str_count = len(next(iter(lang_dict.values())))
except StopIteration:
str_count = 0
else:
for lang, string_list in lang_dict.items():
if len(string_list) != str_count:
raise ValueError('All values must have the same string count.')
field_id = 'strings_%04x' % lang
strings = b'\x00'.join(x.encode('utf-8') for x in string_list) + b'\x00'
field_type = type(
'String',
(StringBase, ),
{
'_fields_': [
('strings', ctypes.c_char * len(strings)),
],
},
)
field_list.append((field_id, field_type))
kw[field_id] = field_type(
lang=lang,
strings=strings,
)
klass = type(
'Strings',
(StringsHead, ),
{
'_fields_': field_list,
},
)
return klass(
magic=STRINGS_MAGIC,
length=ctypes.sizeof(klass),
str_count=str_count,
lang_count=len(lang_dict),
**kw
)
|
Return a FunctionFS descriptor suitable for serialisation.
lang_dict (dict)
Key: language ID (ex: 0x0409 for en-us)
Value: list of unicode objects
All values must have the same number of items.
|
entailment
|
def serialise(structure):
"""
structure (ctypes.Structure)
The structure to serialise.
Returns a ctypes.c_char array.
Does not copy memory.
"""
return ctypes.cast(
ctypes.pointer(structure),
ctypes.POINTER(ctypes.c_char * ctypes.sizeof(structure)),
).contents
|
structure (ctypes.Structure)
The structure to serialise.
Returns a ctypes.c_char array.
Does not copy memory.
|
entailment
|
def halt(self, request_type):
"""
Halt current endpoint.
"""
try:
if request_type & ch9.USB_DIR_IN:
self.read(0)
else:
self.write(b'')
except IOError as exc:
if exc.errno != errno.EL2HLT:
raise
else:
raise ValueError('halt did not return EL2HLT ?')
|
Halt current endpoint.
|
entailment
|
def getRealInterfaceNumber(self, interface):
"""
Returns the host-visible interface number, or None if there is no such
interface.
"""
try:
return self._ioctl(INTERFACE_REVMAP, interface)
except IOError as exc:
if exc.errno == errno.EDOM:
return None
raise
|
Returns the host-visible interface number, or None if there is no such
interface.
|
entailment
|
def getDescriptor(self):
"""
Returns the currently active endpoint descriptor
(depending on current USB speed).
"""
result = USBEndpointDescriptor()
self._ioctl(ENDPOINT_DESC, result, True)
return result
|
Returns the currently active endpoint descriptor
(depending on current USB speed).
|
entailment
|
def halt(self):
"""
Halt current endpoint.
"""
try:
self._halt()
except IOError as exc:
if exc.errno != errno.EBADMSG:
raise
else:
raise ValueError('halt did not return EBADMSG ?')
self._halted = True
|
Halt current endpoint.
|
entailment
|
def close(self):
"""
Close all endpoint file descriptors.
"""
ep_list = self._ep_list
while ep_list:
ep_list.pop().close()
self._closed = True
|
Close all endpoint file descriptors.
|
entailment
|
def onSetup(self, request_type, request, value, index, length):
"""
Called when a setup USB transaction was received.
Default implementation:
- handles USB_REQ_GET_STATUS on interface and endpoints
- handles USB_REQ_CLEAR_FEATURE(USB_ENDPOINT_HALT) on endpoints
- handles USB_REQ_SET_FEATURE(USB_ENDPOINT_HALT) on endpoints
- halts on everything else
If this method raises anything, endpoint 0 is halted by its caller and
exception is let through.
May be overridden in subclass.
"""
if (request_type & ch9.USB_TYPE_MASK) == ch9.USB_TYPE_STANDARD:
recipient = request_type & ch9.USB_RECIP_MASK
is_in = (request_type & ch9.USB_DIR_IN) == ch9.USB_DIR_IN
if request == ch9.USB_REQ_GET_STATUS:
if is_in and length == 2:
if recipient == ch9.USB_RECIP_INTERFACE:
if value == 0:
status = 0
if index == 0:
if self.function_remote_wakeup_capable:
status |= 1 << 0
if self.function_remote_wakeup:
status |= 1 << 1
self.ep0.write(struct.pack('<H', status)[:length])
return
elif recipient == ch9.USB_RECIP_ENDPOINT:
if value == 0:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
status = 0
if endpoint.isHalted():
status |= 1 << 0
self.ep0.write(
struct.pack('<H', status)[:length],
)
return
elif request == ch9.USB_REQ_CLEAR_FEATURE:
if not is_in and length == 0:
if recipient == ch9.USB_RECIP_ENDPOINT:
if value == ch9.USB_ENDPOINT_HALT:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
endpoint.clearHalt()
self.ep0.read(0)
return
elif recipient == ch9.USB_RECIP_INTERFACE:
if value == ch9.USB_INTRF_FUNC_SUSPEND:
if self.function_remote_wakeup_capable:
self.disableRemoteWakeup()
self.ep0.read(0)
return
elif request == ch9.USB_REQ_SET_FEATURE:
if not is_in and length == 0:
if recipient == ch9.USB_RECIP_ENDPOINT:
if value == ch9.USB_ENDPOINT_HALT:
try:
endpoint = self.getEndpoint(index)
except IndexError:
pass
else:
endpoint.halt()
self.ep0.read(0)
return
elif recipient == ch9.USB_RECIP_INTERFACE:
if value == ch9.USB_INTRF_FUNC_SUSPEND:
if self.function_remote_wakeup_capable:
self.enableRemoteWakeup()
self.ep0.read(0)
return
self.ep0.halt(request_type)
|
Called when a setup USB transaction was received.
Default implementation:
- handles USB_REQ_GET_STATUS on interface and endpoints
- handles USB_REQ_CLEAR_FEATURE(USB_ENDPOINT_HALT) on endpoints
- handles USB_REQ_SET_FEATURE(USB_ENDPOINT_HALT) on endpoints
- halts on everything else
If this method raises anything, endpoint 0 is halted by its caller and
exception is let through.
May be overridden in subclass.
|
entailment
|
def main():
"""
Slowly writes to stdout, without emitting a newline so any output
buffering (or input for next pipeline command) can be detected.
"""
now = datetime.datetime.now
try:
while True:
sys.stdout.write(str(now()) + ' ')
time.sleep(1)
except KeyboardInterrupt:
pass
except IOError as exc:
if exc.errno != errno.EPIPE:
raise
|
Slowly writes to stdout, without emitting a newline so any output
buffering (or input for next pipeline command) can be detected.
|
entailment
|
def onEnable(self):
"""
The configuration containing this function has been enabled by host.
Endpoints become working files, so submit some read operations.
"""
trace('onEnable')
self._disable()
self._aio_context.submit(self._aio_recv_block_list)
self._real_onCanSend()
self._enabled = True
|
The configuration containing this function has been enabled by host.
Endpoints become working files, so submit some read operations.
|
entailment
|
def _disable(self):
"""
The configuration containing this function has been disabled by host.
Endpoint do not work anymore, so cancel AIO operation blocks.
"""
if self._enabled:
self._real_onCannotSend()
has_cancelled = 0
for block in self._aio_recv_block_list + self._aio_send_block_list:
try:
self._aio_context.cancel(block)
except OSError as exc:
trace(
'cancelling %r raised: %s' % (block, exc),
)
else:
has_cancelled += 1
if has_cancelled:
noIntr(functools.partial(self._aio_context.getEvents, min_nr=None))
self._enabled = False
|
The configuration containing this function has been disabled by host.
Endpoint do not work anymore, so cancel AIO operation blocks.
|
entailment
|
def onAIOCompletion(self):
"""
Call when eventfd notified events are available.
"""
event_count = self.eventfd.read()
trace('eventfd reports %i events' % event_count)
# Even though eventfd signaled activity, even though it may give us
# some number of pending events, some events seem to have been already
# processed (maybe during io_cancel call ?).
# So do not trust eventfd value, and do not even trust that there must
# be even one event to process.
self._aio_context.getEvents(0)
|
Call when eventfd notified events are available.
|
entailment
|
def write(self, value):
"""
Queue write in kernel.
value (bytes)
Value to send.
"""
aio_block = libaio.AIOBlock(
mode=libaio.AIOBLOCK_MODE_WRITE,
target_file=self.getEndpoint(1),
buffer_list=[bytearray(value)],
offset=0,
eventfd=self.eventfd,
onCompletion=self._onCanSend,
)
self._aio_send_block_list.append(aio_block)
self._aio_context.submit([aio_block])
if len(self._aio_send_block_list) == MAX_PENDING_WRITE_COUNT:
self._onCannotSend()
|
Queue write in kernel.
value (bytes)
Value to send.
|
entailment
|
def pretty_error(error, verbose=False):
"""Return an error message that is easier to read and more useful.
May require updating if the schemas change significantly.
"""
error_loc = ''
if error.path:
while len(error.path) > 0:
path_elem = error.path.popleft()
if type(path_elem) is not int:
if error_loc:
error_loc += '.'
error_loc += path_elem
# elif len(error.path) > 0:
else:
error_loc += '[' + text_type(path_elem) + ']'
error_loc += ': '
# Get error message and remove ugly u'' prefixes
if verbose:
msg = remove_u(text_type(error))
else:
msg = remove_u(error.message)
# Don't reword error messages from our validators,
# only the default error messages from the jsonschema library
if repr(error.schema) == '<unset>':
try:
return error_loc + msg
except UnicodeDecodeError:
return error_loc + msg.decode('utf-8')
# Reword error messages containing regexes
if error.validator == 'pattern' and 'title' in error.schema:
if error.schema['title'] == 'type':
msg = re.sub(r"match '.+'$", 'match the \'type\' field format '
'(lowercase ASCII a-z, 0-9, and hypens only - and no '
'two hyphens in a row)', msg)
elif error.schema['title'] == 'identifier':
msg = re.sub(r"match '.+'$", 'match the id format '
'([object-type]--[UUIDv4])', msg)
elif error.schema['title'] == 'id':
msg = re.sub(r"match '.+'$", 'start with \'' +
error.validator_value[1:-2] + '--\'', msg)
elif error.schema['title'] == 'timestamp':
msg = re.sub(r"match '.+'$", 'match the timestamp format '
'YYYY-MM-DDTHH:mm:ss[.s+]Z', msg)
elif error.schema['title'] == 'timestamp_millis':
msg = re.sub(r"match '.+'$", 'match the timestamp format '
'YYYY-MM-DDTHH:mm:ss.sssZ (must be precise to the '
'millisecond)', msg)
elif error.schema['title'] == 'relationship_type':
msg = re.sub(r"does not match '.+'$", 'contains invalid '
'characters', msg)
elif error.schema['title'] == 'url-regex':
msg = re.sub(r'match ".+"$', 'match the format '
'of a URL', msg)
elif error.schema['title'] == 'binary':
msg = re.sub(r"does not.+'$", 'must be a base64-encoded string', msg)
elif error.validator == 'pattern' and 'observed_data_refs' in error.schema_path:
msg = "'observed_data_refs' must refer to Observed Data Objects"
elif error.validator == 'pattern' and 'where_sighted_refs' in error.schema_path:
msg = "'where_sighted_refs' must refer to Identity Objects"
# Reword empty array errors
elif type(error.instance) is list and len(error.instance) == 0:
msg = re.sub(r"\[\] is not valid .+$", 'empty arrays are not allowed',
msg)
# Reword custom property errors
elif 'title' in error.schema and error.schema['title'] == 'core':
if error.validator == 'additionalProperties':
msg = re.sub(r"Additional .+$", 'Custom properties must match the '
'proper format (lowercase ASCII a-z, 0-9, and '
'underscores; 3-250 characters)', msg)
elif error.validator == 'not' and 'anyOf' in error.validator_value:
reserved_properties = [y for x in error.validator_value['anyOf'] for y in x['required']]
msg = re.sub(r".+", "Contains a reserved property ('%s')"
% "', '".join(reserved_properties), msg)
elif 'title' in error.schema and error.schema['title'] == 'cyber-observable-core':
if error.validator == 'additionalProperties':
msg = re.sub(r"Additional .+$", 'Custom observable properties must'
' match the proper format (lowercase ASCII a-z, 0-9, '
'and underscores; 3-250 characters)', msg)
elif error.validator == 'additionalProperties':
if 'extensions' in error.schema_path:
msg = re.sub(r"Additional .+$", 'Custom extension keys may only '
'contain alphanumeric characters, dashes, and '
'underscores; 3-256 characters', msg)
# Reword 'is valid under each of' errors
elif error.validator == 'oneOf':
try:
if 'external_references' in error.schema_path:
msg = "If the external reference is a CVE, 'source_name' must be" \
" 'cve' and 'external_id' must be in the CVE format " \
"(CVE-YYYY-NNNN+). If the external reference is a CAPEC, " \
"'source_name' must be 'capec' and 'external_id' must be " \
"in the CAPEC format (CAPEC-N+). If the external reference "\
"is neither, it must contain the 'source_name' property and"\
" at least one of the 'external_id', 'url', or "\
"'description' properties."
elif 'type' in error.instance and error.instance['type'] == 'email-message':
if 'is_multipart' not in error.instance:
msg = "'is_multipart' is a required property"
elif error.instance['is_multipart'] is True:
msg = "Since 'is_multipart' is true, 'body_multipart' must "\
"contain valid 'mime-part-type' objects and the 'body' "\
"property must not be present. "
elif error.instance['is_multipart'] is False:
msg = "Since 'is_multipart' is false, 'body' must be a string"\
" and the 'body_multipart' property must not be present."
elif 'type' in error.instance and error.instance['type'] == 'artifact':
if 'payload_bin' in error.instance and 'url' in error.instance:
msg = "'artifact' object must contain either 'payload_bin' "\
"or 'url' but not both"
elif 'payload_bin' in error.instance:
msg = "'payload_bin' must be base64 encoded and 'hashes', if "\
"present, must contain a valid dictionary of hashes"
elif 'url' in error.instance:
msg = "'url' must be a valid url and 'hashes', which must be "\
"present, must contain a valid hash dictionary"
else:
msg = "'artifact' object must contain either 'payload_bin' "\
"or 'url'"
elif 'type' in error.instance and error.instance['type'] == 'marking-definition':
msg = "'definition' must contain a valid statement, TLP, or "\
"custom marking definition"
elif 'type' in error.instance and error.instance['type'] == 'file':
if (('is_encrypted' not in error.instance or
error.instance['is_encrypted'] is False) and
('encryption_algorithm' in error.instance or
'decryption_key' in error.instance)):
msg = "'file' objects may only contain 'encryption_algorithm'"\
" or 'decryption_key' when 'is_encrypted' is true"
elif 'type' in error.instance and error.instance['type'] == 'network-traffic':
if ('is_active' in error.instance and
error.instance['is_active'] is True and
'end' in error.instance):
msg = "If the 'is_active' property is true, then the "\
"'end' property must not be included."
else:
raise TypeError
except TypeError:
msg = msg + ':\n' + remove_u(text_type(error.schema))
# reword forbidden property or value errors
elif error.validator == 'not':
if 'enum' in error.validator_value:
msg = re.sub(r"\{.+\} is not allowed for '(.+)'$", r"'\g<1>' is "
"not an allowed value", msg)
elif ('target_ref' in error.schema_path or
'source_ref' in error.schema_path):
msg = "Relationships cannot link bundles, marking definitions"\
", sightings, or other relationships. This field must "\
"contain the id of an SDO."
elif 'sighting_of_ref' in error.schema_path:
msg = "'sighting_of_ref' must refer to a STIX Domain Object or "\
"Custom Object"
# Reword 'is not valid under any of the given schemas' errors
elif error.validator == 'anyOf':
try:
if error.instance == {}:
msg = "must contain at least one property from this type."
elif error.instance is None:
msg = "null properties are not allowed in STIX."
elif 'type' in error.instance and error.instance['type'] == 'network-traffic':
if ('src_ref' not in error.instance and
'dst_ref' not in error.instance):
msg = "'network-traffic' objects must contain at least "\
"one of 'src_ref' or 'dst_ref'"
elif 'type' in error.instance and error.instance['type'] in ['process', 'x509-certificate']:
if error.instance.keys() == ['type']:
msg = "must contain at least one property (other than `type`) from this object."
else:
raise TypeError
except TypeError:
msg = msg + ':\n' + remove_u(text_type(error.schema))
return error_loc + msg
|
Return an error message that is easier to read and more useful.
May require updating if the schemas change significantly.
|
entailment
|
def _iter_errors_custom(instance, checks, options):
"""Perform additional validation not possible merely with JSON schemas.
Args:
instance: The STIX object to be validated.
checks: A sequence of callables which do the checks. Each callable
may be written to accept 1 arg, which is the object to check,
or 2 args, which are the object and a ValidationOptions instance.
options: ValidationOptions instance with settings affecting how
validation should be done.
"""
# Perform validation
for v_function in checks:
try:
result = v_function(instance)
except TypeError:
result = v_function(instance, options)
if isinstance(result, Iterable):
for x in result:
yield x
elif result is not None:
yield result
# Validate any child STIX objects
for field in instance:
if type(instance[field]) is list:
for obj in instance[field]:
if _is_stix_obj(obj):
for err in _iter_errors_custom(obj, checks, options):
yield err
|
Perform additional validation not possible merely with JSON schemas.
Args:
instance: The STIX object to be validated.
checks: A sequence of callables which do the checks. Each callable
may be written to accept 1 arg, which is the object to check,
or 2 args, which are the object and a ValidationOptions instance.
options: ValidationOptions instance with settings affecting how
validation should be done.
|
entailment
|
def list_json_files(directory, recursive=False):
"""Return a list of file paths for JSON files within `directory`.
Args:
directory: A path to a directory.
recursive: If ``True``, this function will descend into all
subdirectories.
Returns:
A list of JSON file paths directly under `directory`.
"""
json_files = []
for top, dirs, files in os.walk(directory):
dirs.sort()
# Get paths to each file in `files`
paths = (os.path.join(top, f) for f in sorted(files))
# Add all the .json files to our return collection
json_files.extend(x for x in paths if is_json(x))
if not recursive:
break
return json_files
|
Return a list of file paths for JSON files within `directory`.
Args:
directory: A path to a directory.
recursive: If ``True``, this function will descend into all
subdirectories.
Returns:
A list of JSON file paths directly under `directory`.
|
entailment
|
def get_json_files(files, recursive=False):
"""Return a list of files to validate from `files`. If a member of `files`
is a directory, its children with a ``.json`` extension will be added to
the return value.
Args:
files: A list of file paths and/or directory paths.
recursive: If ``true``, this will descend into any subdirectories
of input directories.
Returns:
A list of file paths to validate.
"""
json_files = []
if not files:
return json_files
for fn in files:
if os.path.isdir(fn):
children = list_json_files(fn, recursive)
json_files.extend(children)
elif is_json(fn):
json_files.append(fn)
else:
continue
if not json_files:
raise NoJSONFileFoundError("No JSON files found!")
return json_files
|
Return a list of files to validate from `files`. If a member of `files`
is a directory, its children with a ``.json`` extension will be added to
the return value.
Args:
files: A list of file paths and/or directory paths.
recursive: If ``true``, this will descend into any subdirectories
of input directories.
Returns:
A list of file paths to validate.
|
entailment
|
def run_validation(options):
"""Validate files based on command line options.
Args:
options: An instance of ``ValidationOptions`` containing options for
this validation run.
"""
if options.files == sys.stdin:
results = validate(options.files, options)
return [FileValidationResults(is_valid=results.is_valid,
filepath='stdin',
object_results=results)]
files = get_json_files(options.files, options.recursive)
results = [validate_file(fn, options) for fn in files]
return results
|
Validate files based on command line options.
Args:
options: An instance of ``ValidationOptions`` containing options for
this validation run.
|
entailment
|
def validate_parsed_json(obj_json, options=None):
"""
Validate objects from parsed JSON. This supports a single object, or a
list of objects. If a single object is given, a single result is
returned. Otherwise, a list of results is returned.
If an error occurs, a ValidationErrorResults instance or list which
includes one of these instances, is returned.
:param obj_json: The parsed json
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
"""
validating_list = isinstance(obj_json, list)
if not options:
options = ValidationOptions()
if not options.no_cache:
init_requests_cache(options.refresh_cache)
results = None
if validating_list:
results = []
for obj in obj_json:
try:
results.append(validate_instance(obj, options))
except SchemaInvalidError as ex:
error_result = ObjectValidationResults(is_valid=False,
object_id=obj.get('id', ''),
errors=[str(ex)])
results.append(error_result)
else:
try:
results = validate_instance(obj_json, options)
except SchemaInvalidError as ex:
error_result = ObjectValidationResults(is_valid=False,
object_id=obj_json.get('id', ''),
errors=[str(ex)])
results = error_result
if not options.no_cache and options.clear_cache:
clear_requests_cache()
return results
|
Validate objects from parsed JSON. This supports a single object, or a
list of objects. If a single object is given, a single result is
returned. Otherwise, a list of results is returned.
If an error occurs, a ValidationErrorResults instance or list which
includes one of these instances, is returned.
:param obj_json: The parsed json
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
|
entailment
|
def validate(in_, options=None):
"""
Validate objects from JSON data in a textual stream.
:param in_: A textual stream of JSON data.
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
"""
obj_json = json.load(in_)
results = validate_parsed_json(obj_json, options)
return results
|
Validate objects from JSON data in a textual stream.
:param in_: A textual stream of JSON data.
:param options: Validation options
:return: An ObjectValidationResults instance, or a list of such.
|
entailment
|
def validate_file(fn, options=None):
"""Validate the input document `fn` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
fn: The filename of the JSON file to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An instance of FileValidationResults.
"""
file_results = FileValidationResults(filepath=fn)
output.info("Performing JSON schema validation on %s" % fn)
if not options:
options = ValidationOptions(files=fn)
try:
with open(fn) as instance_file:
file_results.object_results = validate(instance_file, options)
except Exception as ex:
if 'Expecting value' in str(ex):
line_no = str(ex).split()[3]
file_results.fatal = ValidationErrorResults(
'Invalid JSON input on line %s' % line_no
)
else:
file_results.fatal = ValidationErrorResults(ex)
msg = ("Unexpected error occurred with file '{fn}'. No further "
"validation will be performed: {error}")
output.info(msg.format(fn=fn, error=str(ex)))
file_results.is_valid = (all(object_result.is_valid
for object_result in file_results.object_results)
and not file_results.fatal)
return file_results
|
Validate the input document `fn` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
fn: The filename of the JSON file to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An instance of FileValidationResults.
|
entailment
|
def validate_string(string, options=None):
"""Validate the input `string` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
string: The string containing the JSON to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An ObjectValidationResults instance, or a list of such.
"""
output.info("Performing JSON schema validation on input string: " + string)
stream = io.StringIO(string)
return validate(stream, options)
|
Validate the input `string` according to the options passed in.
If any exceptions are raised during validation, no further validation
will take place.
Args:
string: The string containing the JSON to be validated.
options: An instance of ``ValidationOptions``.
Returns:
An ObjectValidationResults instance, or a list of such.
|
entailment
|
def load_validator(schema_path, schema):
"""Create a JSON schema validator for the given schema.
Args:
schema_path: The filename of the JSON schema.
schema: A Python object representation of the same schema.
Returns:
An instance of Draft4Validator.
"""
# Get correct prefix based on OS
if os.name == 'nt':
file_prefix = 'file:///'
else:
file_prefix = 'file:'
resolver = RefResolver(file_prefix + schema_path.replace("\\", "/"), schema)
validator = Draft4Validator(schema, resolver=resolver)
return validator
|
Create a JSON schema validator for the given schema.
Args:
schema_path: The filename of the JSON schema.
schema: A Python object representation of the same schema.
Returns:
An instance of Draft4Validator.
|
entailment
|
def find_schema(schema_dir, obj_type):
"""Search the `schema_dir` directory for a schema called `obj_type`.json.
Return the file path of the first match it finds.
"""
schema_filename = obj_type + '.json'
for root, dirnames, filenames in os.walk(schema_dir):
if schema_filename in filenames:
return os.path.join(root, schema_filename)
|
Search the `schema_dir` directory for a schema called `obj_type`.json.
Return the file path of the first match it finds.
|
entailment
|
def load_schema(schema_path):
"""Load the JSON schema at the given path as a Python object.
Args:
schema_path: A filename for a JSON schema.
Returns:
A Python object representation of the schema.
"""
try:
with open(schema_path) as schema_file:
schema = json.load(schema_file)
except ValueError as e:
raise SchemaInvalidError('Invalid JSON in schema or included schema: '
'%s\n%s' % (schema_file.name, str(e)))
return schema
|
Load the JSON schema at the given path as a Python object.
Args:
schema_path: A filename for a JSON schema.
Returns:
A Python object representation of the schema.
|
entailment
|
def _get_error_generator(type, obj, schema_dir=None, version=DEFAULT_VER, default='core'):
"""Get a generator for validating against the schema for the given object type.
Args:
type (str): The object type to find the schema for.
obj: The object to be validated.
schema_dir (str): The path in which to search for schemas.
version (str): The version of the STIX specification to validate
against. Only used to find base schemas when schema_dir is None.
default (str): If the schema for the given type cannot be found, use
the one with this name instead.
Returns:
A generator for errors found when validating the object against the
appropriate schema, or None if schema_dir is None and the schema
cannot be found.
"""
# If no schema directory given, use default for the given STIX version,
# which comes bundled with this package
if schema_dir is None:
schema_dir = os.path.abspath(os.path.dirname(__file__) + '/schemas-'
+ version + '/')
try:
schema_path = find_schema(schema_dir, type)
schema = load_schema(schema_path)
except (KeyError, TypeError):
# Assume a custom object with no schema
try:
schema_path = find_schema(schema_dir, default)
schema = load_schema(schema_path)
except (KeyError, TypeError):
# Only raise an error when checking against default schemas, not custom
if schema_dir is not None:
return None
raise SchemaInvalidError("Cannot locate a schema for the object's "
"type, nor the base schema ({}.json).".format(default))
if type == 'observed-data' and schema_dir is None:
# Validate against schemas for specific observed data object types later.
# If schema_dir is not None the schema is custom and won't need to be modified.
schema['allOf'][1]['properties']['objects'] = {
"objects": {
"type": "object",
"minProperties": 1
}
}
# Don't use custom validator; only check schemas, no additional checks
validator = load_validator(schema_path, schema)
try:
error_gen = validator.iter_errors(obj)
except schema_exceptions.RefResolutionError:
raise SchemaInvalidError('Invalid JSON schema: a JSON '
'reference failed to resolve')
return error_gen
|
Get a generator for validating against the schema for the given object type.
Args:
type (str): The object type to find the schema for.
obj: The object to be validated.
schema_dir (str): The path in which to search for schemas.
version (str): The version of the STIX specification to validate
against. Only used to find base schemas when schema_dir is None.
default (str): If the schema for the given type cannot be found, use
the one with this name instead.
Returns:
A generator for errors found when validating the object against the
appropriate schema, or None if schema_dir is None and the schema
cannot be found.
|
entailment
|
def _get_musts(options):
"""Return the list of 'MUST' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
"""
if options.version == '2.0':
return musts20.list_musts(options)
else:
return musts21.list_musts(options)
|
Return the list of 'MUST' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
|
entailment
|
def _get_shoulds(options):
"""Return the list of 'SHOULD' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
"""
if options.version == '2.0':
return shoulds20.list_shoulds(options)
else:
return shoulds21.list_shoulds(options)
|
Return the list of 'SHOULD' validators for the correct version of STIX.
Args:
options: ValidationOptions instance with validation options for this
validation run, including the STIX spec version.
|
entailment
|
def _schema_validate(sdo, options):
"""Set up validation of a single STIX object against its type's schema.
This does no actual validation; it just returns generators which must be
iterated to trigger the actual generation.
This function first creates generators for the built-in schemas, then adds
generators for additional schemas from the options, if specified.
Do not call this function directly; use validate_instance() instead, as it
calls this one. This function does not perform any custom checks.
"""
error_gens = []
if 'id' in sdo:
try:
error_prefix = sdo['id'] + ": "
except TypeError:
error_prefix = 'unidentifiable object: '
else:
error_prefix = ''
# Get validator for built-in schema
base_sdo_errors = _get_error_generator(sdo['type'], sdo, version=options.version)
if base_sdo_errors:
error_gens.append((base_sdo_errors, error_prefix))
# Get validator for any user-supplied schema
if options.schema_dir:
custom_sdo_errors = _get_error_generator(sdo['type'], sdo, options.schema_dir)
if custom_sdo_errors:
error_gens.append((custom_sdo_errors, error_prefix))
# Validate each cyber observable object separately
if sdo['type'] == 'observed-data' and 'objects' in sdo:
# Check if observed data property is in dictionary format
if not isinstance(sdo['objects'], dict):
error_gens.append(([schema_exceptions.ValidationError("Observed Data objects must be in dict format.", error_prefix)],
error_prefix))
return error_gens
for key, obj in iteritems(sdo['objects']):
if 'type' not in obj:
error_gens.append(([schema_exceptions.ValidationError("Observable object must contain a 'type' property.", error_prefix)],
error_prefix + 'object \'' + key + '\': '))
continue
# Get validator for built-in schemas
base_obs_errors = _get_error_generator(obj['type'],
obj,
None,
options.version,
'cyber-observable-core')
if base_obs_errors:
error_gens.append((base_obs_errors,
error_prefix + 'object \'' + key + '\': '))
# Get validator for any user-supplied schema
custom_obs_errors = _get_error_generator(obj['type'],
obj,
options.schema_dir,
options.version,
'cyber-observable-core')
if custom_obs_errors:
error_gens.append((custom_obs_errors,
error_prefix + 'object \'' + key + '\': '))
return error_gens
|
Set up validation of a single STIX object against its type's schema.
This does no actual validation; it just returns generators which must be
iterated to trigger the actual generation.
This function first creates generators for the built-in schemas, then adds
generators for additional schemas from the options, if specified.
Do not call this function directly; use validate_instance() instead, as it
calls this one. This function does not perform any custom checks.
|
entailment
|
def validate_instance(instance, options=None):
"""Perform STIX JSON Schema validation against STIX input.
Find the correct schema by looking at the 'type' property of the
`instance` JSON object.
Args:
instance: A Python dictionary representing a STIX object with a
'type' property.
options: ValidationOptions instance with validation options for this
validation run.
Returns:
A dictionary of validation results
"""
if 'type' not in instance:
raise ValidationError("Input must be an object with a 'type' property.")
if not options:
options = ValidationOptions()
error_gens = []
# Schema validation
if instance['type'] == 'bundle' and 'objects' in instance:
# Validate each object in a bundle separately
for sdo in instance['objects']:
if 'type' not in sdo:
raise ValidationError("Each object in bundle must have a 'type' property.")
error_gens += _schema_validate(sdo, options)
else:
error_gens += _schema_validate(instance, options)
# Custom validation
must_checks = _get_musts(options)
should_checks = _get_shoulds(options)
output.info("Running the following additional checks: %s."
% ", ".join(x.__name__ for x in chain(must_checks, should_checks)))
try:
errors = _iter_errors_custom(instance, must_checks, options)
warnings = _iter_errors_custom(instance, should_checks, options)
if options.strict:
chained_errors = chain(errors, warnings)
warnings = []
else:
chained_errors = errors
warnings = [pretty_error(x, options.verbose) for x in warnings]
except schema_exceptions.RefResolutionError:
raise SchemaInvalidError('Invalid JSON schema: a JSON reference '
'failed to resolve')
# List of error generators and message prefixes (to denote which object the
# error comes from)
error_gens += [(chained_errors, '')]
# Prepare the list of errors (this actually triggers the custom validation
# functions).
error_list = []
for gen, prefix in error_gens:
for error in gen:
msg = prefix + pretty_error(error, options.verbose)
error_list.append(SchemaError(msg))
if error_list:
valid = False
else:
valid = True
return ObjectValidationResults(is_valid=valid, object_id=instance.get('id', ''),
errors=error_list, warnings=warnings)
|
Perform STIX JSON Schema validation against STIX input.
Find the correct schema by looking at the 'type' property of the
`instance` JSON object.
Args:
instance: A Python dictionary representing a STIX object with a
'type' property.
options: ValidationOptions instance with validation options for this
validation run.
Returns:
A dictionary of validation results
|
entailment
|
def object_result(self):
"""
Get the object result object, assuming there is only one. Raises
an error if there is more than one.
:return: The result object
:raises ValueError: If there is more than one result
"""
num_obj_results = len(self._object_results)
if num_obj_results < 1:
return None
elif num_obj_results < 2:
return self._object_results[0]
else:
raise ValueError("There is more than one result; use 'object_results'")
|
Get the object result object, assuming there is only one. Raises
an error if there is more than one.
:return: The result object
:raises ValueError: If there is more than one result
|
entailment
|
def object_results(self, object_results):
"""
Set the results to an iterable of values. The values will be collected
into a list. A single value is allowed; it will be converted to a
length 1 list.
:param object_results: The results to set
"""
if _is_iterable_non_string(object_results):
self._object_results = list(object_results)
elif object_results is None:
self._object_results = []
else:
self._object_results = [object_results]
|
Set the results to an iterable of values. The values will be collected
into a list. A single value is allowed; it will be converted to a
length 1 list.
:param object_results: The results to set
|
entailment
|
def as_dict(self):
"""A dictionary representation of the :class:`.ObjectValidationResults`
instance.
Keys:
* ``'result'``: The validation results (``True`` or ``False``)
* ``'errors'``: A list of validation errors.
Returns:
A dictionary representation of an instance of this class.
"""
d = super(ObjectValidationResults, self).as_dict()
if self.errors:
d['errors'] = [x.as_dict() for x in self.errors]
return d
|
A dictionary representation of the :class:`.ObjectValidationResults`
instance.
Keys:
* ``'result'``: The validation results (``True`` or ``False``)
* ``'errors'``: A list of validation errors.
Returns:
A dictionary representation of an instance of this class.
|
entailment
|
def custom_prefix_strict(instance):
"""Ensure custom content follows strict naming style conventions.
"""
for error in chain(custom_object_prefix_strict(instance),
custom_property_prefix_strict(instance),
custom_observable_object_prefix_strict(instance),
custom_object_extension_prefix_strict(instance),
custom_observable_properties_prefix_strict(instance)):
yield error
|
Ensure custom content follows strict naming style conventions.
|
entailment
|
def custom_prefix_lax(instance):
"""Ensure custom content follows lenient naming style conventions
for forward-compatibility.
"""
for error in chain(custom_object_prefix_lax(instance),
custom_property_prefix_lax(instance),
custom_observable_object_prefix_lax(instance),
custom_object_extension_prefix_lax(instance),
custom_observable_properties_prefix_lax(instance)):
yield error
|
Ensure custom content follows lenient naming style conventions
for forward-compatibility.
|
entailment
|
def custom_object_prefix_strict(instance):
"""Ensure custom objects follow strict naming style conventions.
"""
if (instance['type'] not in enums.TYPES and
instance['type'] not in enums.RESERVED_OBJECTS and
not CUSTOM_TYPE_PREFIX_RE.match(instance['type'])):
yield JSONError("Custom object type '%s' should start with 'x-' "
"followed by a source unique identifier (like a "
"domain name with dots replaced by hyphens), a hyphen "
"and then the name." % instance['type'],
instance['id'], 'custom-prefix')
|
Ensure custom objects follow strict naming style conventions.
|
entailment
|
def custom_object_prefix_lax(instance):
"""Ensure custom objects follow lenient naming style conventions
for forward-compatibility.
"""
if (instance['type'] not in enums.TYPES and
instance['type'] not in enums.RESERVED_OBJECTS and
not CUSTOM_TYPE_LAX_PREFIX_RE.match(instance['type'])):
yield JSONError("Custom object type '%s' should start with 'x-' in "
"order to be compatible with future versions of the "
"STIX 2 specification." % instance['type'],
instance['id'], 'custom-prefix-lax')
|
Ensure custom objects follow lenient naming style conventions
for forward-compatibility.
|
entailment
|
def custom_property_prefix_strict(instance):
"""Ensure custom properties follow strict naming style conventions.
Does not check property names in custom objects.
"""
for prop_name in instance.keys():
if (instance['type'] in enums.PROPERTIES and
prop_name not in enums.PROPERTIES[instance['type']] and
prop_name not in enums.RESERVED_PROPERTIES and
not CUSTOM_PROPERTY_PREFIX_RE.match(prop_name)):
yield JSONError("Custom property '%s' should have a type that "
"starts with 'x_' followed by a source unique "
"identifier (like a domain name with dots "
"replaced by hyphen), a hyphen and then the name."
% prop_name, instance['id'],
'custom-prefix')
|
Ensure custom properties follow strict naming style conventions.
Does not check property names in custom objects.
|
entailment
|
def custom_property_prefix_lax(instance):
"""Ensure custom properties follow lenient naming style conventions
for forward-compatibility.
Does not check property names in custom objects.
"""
for prop_name in instance.keys():
if (instance['type'] in enums.PROPERTIES and
prop_name not in enums.PROPERTIES[instance['type']] and
prop_name not in enums.RESERVED_PROPERTIES and
not CUSTOM_PROPERTY_LAX_PREFIX_RE.match(prop_name)):
yield JSONError("Custom property '%s' should have a type that "
"starts with 'x_' in order to be compatible with "
"future versions of the STIX 2 specification." %
prop_name, instance['id'],
'custom-prefix-lax')
|
Ensure custom properties follow lenient naming style conventions
for forward-compatibility.
Does not check property names in custom objects.
|
entailment
|
def open_vocab_values(instance):
"""Ensure that the values of all properties which use open vocabularies are
in lowercase and use hyphens instead of spaces or underscores as word
separators.
"""
if instance['type'] not in enums.VOCAB_PROPERTIES:
return
properties = enums.VOCAB_PROPERTIES[instance['type']]
for prop in properties:
if prop in instance:
if type(instance[prop]) is list:
values = instance[prop]
else:
values = [instance[prop]]
for v in values:
if not v.islower() or '_' in v or ' ' in v:
yield JSONError("Open vocabulary value '%s' should be all"
" lowercase and use hyphens instead of"
" spaces or underscores as word"
" separators." % v, instance['id'],
'open-vocab-format')
|
Ensure that the values of all properties which use open vocabularies are
in lowercase and use hyphens instead of spaces or underscores as word
separators.
|
entailment
|
def kill_chain_phase_names(instance):
"""Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
"""
if instance['type'] in enums.KILL_CHAIN_PHASE_USES and 'kill_chain_phases' in instance:
for phase in instance['kill_chain_phases']:
if 'kill_chain_name' not in phase:
# Since this field is required, schemas will already catch the error
return
chain_name = phase['kill_chain_name']
if not chain_name.islower() or '_' in chain_name or ' ' in chain_name:
yield JSONError("kill_chain_name '%s' should be all lowercase"
" and use hyphens instead of spaces or "
"underscores as word separators." % chain_name,
instance['id'], 'kill-chain-names')
phase_name = phase['phase_name']
if not phase_name.islower() or '_' in phase_name or ' ' in phase_name:
yield JSONError("phase_name '%s' should be all lowercase and "
"use hyphens instead of spaces or underscores "
"as word separators." % phase_name,
instance['id'], 'kill-chain-names')
|
Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
|
entailment
|
def check_vocab(instance, vocab, code):
"""Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
"""
vocab_uses = getattr(enums, vocab + "_USES")
for k in vocab_uses.keys():
if instance['type'] == k:
for prop in vocab_uses[k]:
if prop not in instance:
continue
vocab_ov = getattr(enums, vocab + "_OV")
if type(instance[prop]) is list:
is_in = set(instance[prop]).issubset(set(vocab_ov))
else:
is_in = instance[prop] in vocab_ov
if not is_in:
vocab_name = vocab.replace('_', '-').lower()
yield JSONError("%s contains a value not in the %s-ov "
"vocabulary." % (prop, vocab_name),
instance['id'], code)
|
Ensure that the open vocabulary specified by `vocab` is used properly.
This checks properties of objects specified in the appropriate `_USES`
dictionary to determine which properties SHOULD use the given vocabulary,
then checks that the values in those properties are from the vocabulary.
|
entailment
|
def vocab_marking_definition(instance):
"""Ensure that the `definition_type` property of `marking-definition`
objects is one of the values in the STIX 2.0 specification.
"""
if (instance['type'] == 'marking-definition' and
'definition_type' in instance and not
instance['definition_type'] in enums.MARKING_DEFINITION_TYPES):
return JSONError("Marking definition `definition_type` should be one "
"of: %s." % ', '.join(enums.MARKING_DEFINITION_TYPES),
instance['id'], 'marking-definition-type')
|
Ensure that the `definition_type` property of `marking-definition`
objects is one of the values in the STIX 2.0 specification.
|
entailment
|
def relationships_strict(instance):
"""Ensure that only the relationship types defined in the specification are
used.
"""
# Don't check objects that aren't relationships or that are custom objects
if (instance['type'] != 'relationship' or
instance['type'] not in enums.TYPES):
return
if ('relationship_type' not in instance or 'source_ref' not in instance or
'target_ref' not in instance):
# Since these fields are required, schemas will already catch the error
return
r_type = instance['relationship_type']
try:
r_source = re.search(r"(.+)\-\-", instance['source_ref']).group(1)
r_target = re.search(r"(.+)\-\-", instance['target_ref']).group(1)
except (AttributeError, TypeError):
# Schemas already catch errors of these properties not being strings or
# not containing the string '--'.
return
if (r_type in enums.COMMON_RELATIONSHIPS or
r_source in enums.NON_SDOS or
r_target in enums.NON_SDOS):
# If all objects can have this relationship type, no more checks needed
# Schemas already catch if source/target type cannot have relationship
return
if r_source not in enums.RELATIONSHIPS:
return JSONError("'%s' is not a suggested relationship source object "
"for the '%s' relationship." % (r_source, r_type),
instance['id'], 'relationship-types')
if r_type not in enums.RELATIONSHIPS[r_source]:
return JSONError("'%s' is not a suggested relationship type for '%s' "
"objects." % (r_type, r_source), instance['id'],
'relationship-types')
if r_target not in enums.RELATIONSHIPS[r_source][r_type]:
return JSONError("'%s' is not a suggested relationship target object "
"for '%s' objects with the '%s' relationship."
% (r_target, r_source, r_type), instance['id'],
'relationship-types')
|
Ensure that only the relationship types defined in the specification are
used.
|
entailment
|
def valid_hash_value(hashname):
"""Return true if given value is a valid, recommended hash name according
to the STIX 2 specification.
"""
custom_hash_prefix_re = re.compile(r"^x_")
if hashname in enums.HASH_ALGO_OV or custom_hash_prefix_re.match(hashname):
return True
else:
return False
|
Return true if given value is a valid, recommended hash name according
to the STIX 2 specification.
|
entailment
|
def vocab_hash_algo(instance):
"""Ensure objects with 'hashes' properties only use values from the
hash-algo-ov vocabulary.
"""
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
if obj['type'] == 'file':
try:
hashes = obj['hashes']
except KeyError:
pass
else:
for h in hashes:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has a 'hashes' dictionary"
" with a hash of type '%s', which is not a "
"value in the hash-algo-ov vocabulary nor a "
"custom value prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
try:
ads = obj['extensions']['ntfs-ext']['alternate_data_streams']
except KeyError:
pass
else:
for datastream in ads:
if 'hashes' not in datastream:
continue
for h in datastream['hashes']:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has an NTFS extension"
" with an alternate data stream that has a"
" 'hashes' dictionary with a hash of type "
"'%s', which is not a value in the "
"hash-algo-ov vocabulary nor a custom "
"value prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
try:
head_hashes = obj['extensions']['windows-pebinary-ext']['file_header_hashes']
except KeyError:
pass
else:
for h in head_hashes:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has a Windows PE Binary "
"File extension with a file header hash of "
"'%s', which is not a value in the "
"hash-algo-ov vocabulary nor a custom value "
"prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
try:
hashes = obj['extensions']['windows-pebinary-ext']['optional_header']['hashes']
except KeyError:
pass
else:
for h in hashes:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has a Windows PE Binary "
"File extension with an optional header that "
"has a hash of '%s', which is not a value in "
"the hash-algo-ov vocabulary nor a custom "
"value prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
try:
sections = obj['extensions']['windows-pebinary-ext']['sections']
except KeyError:
pass
else:
for s in sections:
if 'hashes' not in s:
continue
for h in s['hashes']:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has a Windows PE "
"Binary File extension with a section that"
" has a hash of '%s', which is not a value"
" in the hash-algo-ov vocabulary nor a "
"custom value prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
elif obj['type'] == 'artifact' or obj['type'] == 'x509-certificate':
try:
hashes = obj['hashes']
except KeyError:
pass
else:
for h in hashes:
if not (valid_hash_value(h)):
yield JSONError("Object '%s' has a 'hashes' dictionary"
" with a hash of type '%s', which is not a "
"value in the hash-algo-ov vocabulary nor a "
"custom value prepended with 'x_'."
% (key, h), instance['id'], 'hash-algo')
|
Ensure objects with 'hashes' properties only use values from the
hash-algo-ov vocabulary.
|
entailment
|
def vocab_windows_pebinary_type(instance):
"""Ensure file objects with the windows-pebinary-ext extension have a
'pe-type' property that is from the windows-pebinary-type-ov vocabulary.
"""
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'file':
try:
pe_type = obj['extensions']['windows-pebinary-ext']['pe_type']
except KeyError:
continue
if pe_type not in enums.WINDOWS_PEBINARY_TYPE_OV:
yield JSONError("Object '%s' has a Windows PE Binary File "
"extension with a 'pe_type' of '%s', which is not a "
"value in the windows-pebinary-type-ov vocabulary."
% (key, pe_type), instance['id'],
'windows-pebinary-type')
|
Ensure file objects with the windows-pebinary-ext extension have a
'pe-type' property that is from the windows-pebinary-type-ov vocabulary.
|
entailment
|
def vocab_account_type(instance):
"""Ensure a user-account objects' 'account-type' property is from the
account-type-ov vocabulary.
"""
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'user-account':
try:
acct_type = obj['account_type']
except KeyError:
continue
if acct_type not in enums.ACCOUNT_TYPE_OV:
yield JSONError("Object '%s' is a User Account Object "
"with an 'account_type' of '%s', which is not a "
"value in the account-type-ov vocabulary."
% (key, acct_type), instance['id'], 'account-type')
|
Ensure a user-account objects' 'account-type' property is from the
account-type-ov vocabulary.
|
entailment
|
def observable_object_keys(instance):
"""Ensure observable-objects keys are non-negative integers.
"""
digits_re = re.compile(r"^\d+$")
for key in instance['objects']:
if not digits_re.match(key):
yield JSONError("'%s' is not a good key value. Observable Objects "
"should use non-negative integers for their keys."
% key, instance['id'], 'observable-object-keys')
|
Ensure observable-objects keys are non-negative integers.
|
entailment
|
def custom_observable_object_prefix_strict(instance):
"""Ensure custom observable objects follow strict naming style conventions.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and
obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and
not CUSTOM_TYPE_PREFIX_RE.match(obj['type'])):
yield JSONError("Custom Observable Object type '%s' should start "
"with 'x-' followed by a source unique identifier "
"(like a domain name with dots replaced by "
"hyphens), a hyphen and then the name."
% obj['type'], instance['id'],
'custom-prefix')
|
Ensure custom observable objects follow strict naming style conventions.
|
entailment
|
def custom_observable_object_prefix_lax(instance):
"""Ensure custom observable objects follow naming style conventions.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] not in enums.OBSERVABLE_TYPES and
obj['type'] not in enums.OBSERVABLE_RESERVED_OBJECTS and
not CUSTOM_TYPE_LAX_PREFIX_RE.match(obj['type'])):
yield JSONError("Custom Observable Object type '%s' should start "
"with 'x-'."
% obj['type'], instance['id'],
'custom-prefix-lax')
|
Ensure custom observable objects follow naming style conventions.
|
entailment
|
def custom_object_extension_prefix_strict(instance):
"""Ensure custom observable object extensions follow strict naming style
conventions.
"""
for key, obj in instance['objects'].items():
if not ('extensions' in obj and 'type' in obj and
obj['type'] in enums.OBSERVABLE_EXTENSIONS):
continue
for ext_key in obj['extensions']:
if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and
not CUSTOM_TYPE_PREFIX_RE.match(ext_key)):
yield JSONError("Custom Cyber Observable Object extension type"
" '%s' should start with 'x-' followed by a source "
"unique identifier (like a domain name with dots "
"replaced by hyphens), a hyphen and then the name."
% ext_key, instance['id'],
'custom-prefix')
|
Ensure custom observable object extensions follow strict naming style
conventions.
|
entailment
|
def custom_object_extension_prefix_lax(instance):
"""Ensure custom observable object extensions follow naming style
conventions.
"""
for key, obj in instance['objects'].items():
if not ('extensions' in obj and 'type' in obj and
obj['type'] in enums.OBSERVABLE_EXTENSIONS):
continue
for ext_key in obj['extensions']:
if (ext_key not in enums.OBSERVABLE_EXTENSIONS[obj['type']] and
not CUSTOM_TYPE_LAX_PREFIX_RE.match(ext_key)):
yield JSONError("Custom Cyber Observable Object extension type"
" '%s' should start with 'x-'."
% ext_key, instance['id'],
'custom-prefix-lax')
|
Ensure custom observable object extensions follow naming style
conventions.
|
entailment
|
def custom_observable_properties_prefix_strict(instance):
"""Ensure observable object custom properties follow strict naming style
conventions.
"""
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
type_ = obj['type']
for prop in obj:
# Check objects' properties
if (type_ in enums.OBSERVABLE_PROPERTIES and
prop not in enums.OBSERVABLE_PROPERTIES[type_] and
not CUSTOM_PROPERTY_PREFIX_RE.match(prop)):
yield JSONError("Cyber Observable Object custom property '%s' "
"should start with 'x_' followed by a source "
"unique identifier (like a domain name with "
"dots replaced by hyphens), a hyphen and then the"
" name."
% prop, instance['id'],
'custom-prefix')
# Check properties of embedded cyber observable types
if (type_ in enums.OBSERVABLE_EMBEDDED_PROPERTIES and
prop in enums.OBSERVABLE_EMBEDDED_PROPERTIES[type_]):
for embed_prop in obj[prop]:
if isinstance(embed_prop, dict):
for embedded in embed_prop:
if (embedded not in enums.OBSERVABLE_EMBEDDED_PROPERTIES[type_][prop] and
not CUSTOM_PROPERTY_PREFIX_RE.match(embedded)):
yield JSONError("Cyber Observable Object custom "
"property '%s' in the %s property of "
"%s object should start with 'x_' "
"followed by a source unique "
"identifier (like a domain name with "
"dots replaced by hyphens), a hyphen and "
"then the name."
% (embedded, prop, type_), instance['id'],
'custom-prefix')
elif (embed_prop not in enums.OBSERVABLE_EMBEDDED_PROPERTIES[type_][prop] and
not CUSTOM_PROPERTY_PREFIX_RE.match(embed_prop)):
yield JSONError("Cyber Observable Object custom "
"property '%s' in the %s property of "
"%s object should start with 'x_' "
"followed by a source unique "
"identifier (like a domain name with "
"dots replaced by hyphens), a hyphen and "
"then the name."
% (embed_prop, prop, type_), instance['id'],
'custom-prefix')
# Check object extensions' properties
if (type_ in enums.OBSERVABLE_EXTENSIONS and 'extensions' in obj):
for ext_key in obj['extensions']:
if ext_key in enums.OBSERVABLE_EXTENSIONS[type_]:
for ext_prop in obj['extensions'][ext_key]:
if (ext_prop not in enums.OBSERVABLE_EXTENSION_PROPERTIES[ext_key] and
not CUSTOM_PROPERTY_PREFIX_RE.match(ext_prop)):
yield JSONError("Cyber Observable Object custom "
"property '%s' in the %s extension "
"should start with 'x_' followed by a "
"source unique identifier (like a "
"domain name with dots replaced by "
"hyphens), a hyphen and then the name."
% (ext_prop, ext_key), instance['id'],
'custom-prefix')
if ext_key in enums.OBSERVABLE_EXTENSIONS[type_]:
for ext_prop in obj['extensions'][ext_key]:
if (ext_key in enums.OBSERVABLE_EXTENSION_EMBEDDED_PROPERTIES and
ext_prop in enums.OBSERVABLE_EXTENSION_EMBEDDED_PROPERTIES[ext_key]):
for embed_prop in obj['extensions'][ext_key][ext_prop]:
if not (isinstance(embed_prop, Iterable) and not isinstance(embed_prop, string_types)):
embed_prop = [embed_prop]
for p in embed_prop:
if (p not in enums.OBSERVABLE_EXTENSION_EMBEDDED_PROPERTIES[ext_key][ext_prop] and
not CUSTOM_PROPERTY_PREFIX_RE.match(p)):
yield JSONError("Cyber Observable Object "
"custom property '%s' in the %s "
"property of the %s extension should "
"start with 'x_' followed by a source "
"unique identifier (like a domain name"
" with dots replaced by hyphens), a "
"hyphen and then the name."
% (p, ext_prop, ext_key), instance['id'],
'custom-prefix')
|
Ensure observable object custom properties follow strict naming style
conventions.
|
entailment
|
def network_traffic_ports(instance):
"""Ensure network-traffic objects contain both src_port and dst_port.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
('src_port' not in obj or 'dst_port' not in obj)):
yield JSONError("The Network Traffic object '%s' should contain "
"both the 'src_port' and 'dst_port' properties."
% key, instance['id'], 'network-traffic-ports')
|
Ensure network-traffic objects contain both src_port and dst_port.
|
entailment
|
def mime_type(instance):
"""Ensure the 'mime_type' property of file objects comes from the Template
column in the IANA media type registry.
"""
mime_pattern = re.compile(r'^(application|audio|font|image|message|model'
'|multipart|text|video)/[a-zA-Z0-9.+_-]+')
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'file' and 'mime_type' in obj):
if enums.media_types():
if obj['mime_type'] not in enums.media_types():
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA registered MIME "
"Type of the form 'type/subtype'."
% (key, obj['mime_type']), instance['id'],
'mime-type')
else:
info("Can't reach IANA website; using regex for mime types.")
if not mime_pattern.match(obj['mime_type']):
yield JSONError("The 'mime_type' property of object '%s' "
"('%s') should be an IANA MIME Type of the"
" form 'type/subtype'."
% (key, obj['mime_type']), instance['id'],
'mime-type')
|
Ensure the 'mime_type' property of file objects comes from the Template
column in the IANA media type registry.
|
entailment
|
def protocols(instance):
"""Ensure the 'protocols' property of network-traffic objects contains only
values from the IANA Service Name and Transport Protocol Port Number
Registry.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
'protocols' in obj):
for prot in obj['protocols']:
if enums.protocols():
if prot not in enums.protocols():
yield JSONError("The 'protocols' property of object "
"'%s' contains a value ('%s') not in "
"IANA Service Name and Transport "
"Protocol Port Number Registry."
% (key, prot), instance['id'],
'protocols')
else:
info("Can't reach IANA website; using regex for protocols.")
if not PROTOCOL_RE.match(prot):
yield JSONError("The 'protocols' property of object "
"'%s' contains a value ('%s') not in "
"IANA Service Name and Transport "
"Protocol Port Number Registry."
% (key, prot), instance['id'],
'protocols')
|
Ensure the 'protocols' property of network-traffic objects contains only
values from the IANA Service Name and Transport Protocol Port Number
Registry.
|
entailment
|
def ipfix(instance):
"""Ensure the 'ipfix' property of network-traffic objects contains only
values from the IANA IP Flow Information Export (IPFIX) Entities Registry.
"""
ipf_pattern = re.compile(r'^[a-z][a-zA-Z0-9]+')
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
'ipfix' in obj):
for ipf in obj['ipfix']:
if enums.ipfix():
if ipf not in enums.ipfix():
yield JSONError("The 'ipfix' property of object "
"'%s' contains a key ('%s') not in "
"IANA IP Flow Information Export "
"(IPFIX) Entities Registry."
% (key, ipf), instance['id'],
'ipfix')
else:
info("Can't reach IANA website; using regex for ipfix.")
if not ipf_pattern.match(ipf):
yield JSONError("The 'ipfix' property of object "
"'%s' contains a key ('%s') not in "
"IANA IP Flow Information Export "
"(IPFIX) Entities Registry."
% (key, ipf), instance['id'],
'ipfix')
|
Ensure the 'ipfix' property of network-traffic objects contains only
values from the IANA IP Flow Information Export (IPFIX) Entities Registry.
|
entailment
|
def http_request_headers(instance):
"""Ensure the keys of the 'request_headers' property of the http-request-
ext extension of network-traffic objects conform to the format for HTTP
request headers. Use a regex because there isn't a definitive source.
https://www.iana.org/assignments/message-headers/message-headers.xhtml does
not differentiate between request and response headers, and leaves out
several common non-standard request fields listed elsewhere.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic'):
try:
headers = obj['extensions']['http-request-ext']['request_header']
except KeyError:
continue
for hdr in headers:
if hdr not in enums.HTTP_REQUEST_HEADERS:
yield JSONError("The 'request_header' property of object "
"'%s' contains an invalid HTTP request "
"header ('%s')."
% (key, hdr), instance['id'],
'http-request-headers')
|
Ensure the keys of the 'request_headers' property of the http-request-
ext extension of network-traffic objects conform to the format for HTTP
request headers. Use a regex because there isn't a definitive source.
https://www.iana.org/assignments/message-headers/message-headers.xhtml does
not differentiate between request and response headers, and leaves out
several common non-standard request fields listed elsewhere.
|
entailment
|
def socket_options(instance):
"""Ensure the keys of the 'options' property of the socket-ext extension of
network-traffic objects are only valid socket options (SO_*).
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic'):
try:
options = obj['extensions']['socket-ext']['options']
except KeyError:
continue
for opt in options:
if opt not in enums.SOCKET_OPTIONS:
yield JSONError("The 'options' property of object '%s' "
"contains a key ('%s') that is not a valid"
" socket option (SO_*)."
% (key, opt), instance['id'], 'socket-options')
|
Ensure the keys of the 'options' property of the socket-ext extension of
network-traffic objects are only valid socket options (SO_*).
|
entailment
|
def pdf_doc_info(instance):
"""Ensure the keys of the 'document_info_dict' property of the pdf-ext
extension of file objects are only valid PDF Document Information
Dictionary Keys.
"""
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'file'):
try:
did = obj['extensions']['pdf-ext']['document_info_dict']
except KeyError:
continue
for elem in did:
if elem not in enums.PDF_DID:
yield JSONError("The 'document_info_dict' property of "
"object '%s' contains a key ('%s') that is"
" not a valid PDF Document Information "
"Dictionary key."
% (key, elem), instance['id'],
'pdf-doc-info')
|
Ensure the keys of the 'document_info_dict' property of the pdf-ext
extension of file objects are only valid PDF Document Information
Dictionary Keys.
|
entailment
|
def countries(instance):
"""Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
"""
if (instance['type'] == 'location' and 'country' in instance and not
instance['country'].upper() in enums.COUNTRY_CODES):
return JSONError("Location `country` should be a valid ISO 3166-1 "
"ALPHA-2 Code.",
instance['id'], 'marking-definition-type')
|
Ensure that the `country` property of `location` objects is a valid
ISO 3166-1 ALPHA-2 Code.
|
entailment
|
def windows_process_priority_format(instance):
"""Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
"""
class_suffix_re = re.compile(r'.+_CLASS$')
for key, obj in instance['objects'].items():
if 'type' in obj and obj['type'] == 'process':
try:
priority = obj['extensions']['windows-process-ext']['priority']
except KeyError:
continue
if not class_suffix_re.match(priority):
yield JSONError("The 'priority' property of object '%s' should"
" end in '_CLASS'." % key, instance['id'],
'windows-process-priority-format')
|
Ensure the 'priority' property of windows-process-ext ends in '_CLASS'.
|
entailment
|
def hash_length(instance):
"""Ensure keys in 'hashes'-type properties are no more than 30 characters long.
"""
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
if obj['type'] == 'file':
try:
hashes = obj['hashes']
except KeyError:
pass
else:
for h in hashes:
if (len(h) > 30):
yield JSONError("Object '%s' has a 'hashes' dictionary"
" with a hash of type '%s', which is "
"longer than 30 characters."
% (key, h), instance['id'], 'hash-algo')
try:
ads = obj['extensions']['ntfs-ext']['alternate_data_streams']
except KeyError:
pass
else:
for datastream in ads:
if 'hashes' not in datastream:
continue
for h in datastream['hashes']:
if (len(h) > 30):
yield JSONError("Object '%s' has an NTFS extension"
" with an alternate data stream that has a"
" 'hashes' dictionary with a hash of type "
"'%s', which is longer than 30 "
"characters."
% (key, h), instance['id'], 'hash-algo')
try:
head_hashes = obj['extensions']['windows-pebinary-ext']['file_header_hashes']
except KeyError:
pass
else:
for h in head_hashes:
if (len(h) > 30):
yield JSONError("Object '%s' has a Windows PE Binary "
"File extension with a file header hash of "
"'%s', which is longer than 30 "
"characters."
% (key, h), instance['id'], 'hash-algo')
try:
hashes = obj['extensions']['windows-pebinary-ext']['optional_header']['hashes']
except KeyError:
pass
else:
for h in hashes:
if (len(h) > 30):
yield JSONError("Object '%s' has a Windows PE Binary "
"File extension with an optional header that "
"has a hash of '%s', which is longer "
"than 30 characters."
% (key, h), instance['id'], 'hash-algo')
try:
sections = obj['extensions']['windows-pebinary-ext']['sections']
except KeyError:
pass
else:
for s in sections:
if 'hashes' not in s:
continue
for h in s['hashes']:
if (len(h) > 30):
yield JSONError("Object '%s' has a Windows PE "
"Binary File extension with a section that"
" has a hash of '%s', which is "
"longer than 30 characters."
% (key, h), instance['id'], 'hash-algo')
elif obj['type'] == 'artifact' or obj['type'] == 'x509-certificate':
try:
hashes = obj['hashes']
except KeyError:
pass
else:
for h in hashes:
if (len(h) > 30):
yield JSONError("Object '%s' has a 'hashes' dictionary"
" with a hash of type '%s', which is "
"longer than 30 characters."
% (key, h), instance['id'], 'hash-algo')
|
Ensure keys in 'hashes'-type properties are no more than 30 characters long.
|
entailment
|
def duplicate_ids(instance):
"""Ensure objects with duplicate IDs have different `modified` timestamps.
"""
if instance['type'] != 'bundle' or 'objects' not in instance:
return
unique_ids = {}
for obj in instance['objects']:
if 'id' not in obj or 'modified' not in obj:
continue
elif obj['id'] not in unique_ids:
unique_ids[obj['id']] = obj['modified']
elif obj['modified'] == unique_ids[obj['id']]:
yield JSONError("Duplicate ID '%s' has identical `modified` timestamp."
" If they are different versions of the same object, "
"they should have different `modified` properties."
% obj['id'], instance['id'], 'duplicate-ids')
|
Ensure objects with duplicate IDs have different `modified` timestamps.
|
entailment
|
def list_shoulds(options):
"""Construct the list of 'SHOULD' validators to be run by the validator.
"""
validator_list = []
# Default: enable all
if not options.disabled and not options.enabled:
validator_list.extend(CHECKS['all'])
return validator_list
# --disable
# Add SHOULD requirements to the list unless disabled
if options.disabled:
if 'all' not in options.disabled:
if 'format-checks' not in options.disabled:
if 'custom-prefix' not in options.disabled:
validator_list.append(CHECKS['custom-prefix'])
elif 'custom-prefix-lax' not in options.disabled:
validator_list.append(CHECKS['custom-prefix-lax'])
if 'open-vocab-format' not in options.disabled:
validator_list.append(CHECKS['open-vocab-format'])
if 'kill-chain-names' not in options.disabled:
validator_list.append(CHECKS['kill-chain-names'])
if 'observable-object-keys' not in options.disabled:
validator_list.append(CHECKS['observable-object-keys'])
if 'observable-dictionary-keys' not in options.disabled:
validator_list.append(CHECKS['observable-dictionary-keys'])
if 'windows-process-priority-format' not in options.disabled:
validator_list.append(CHECKS['windows-process-priority-format'])
if 'hash-length' not in options.disabled:
validator_list.append(CHECKS['hash-length'])
if 'approved-values' not in options.disabled:
if 'marking-definition-type' not in options.disabled:
validator_list.append(CHECKS['marking-definition-type'])
if 'relationship-types' not in options.disabled:
validator_list.append(CHECKS['relationship-types'])
if 'duplicate-ids' not in options.disabled:
validator_list.append(CHECKS['duplicate-ids'])
if 'all-vocabs' not in options.disabled:
if 'attack-motivation' not in options.disabled:
validator_list.append(CHECKS['attack-motivation'])
if 'attack-resource-level' not in options.disabled:
validator_list.append(CHECKS['attack-resource-level'])
if 'identity-class' not in options.disabled:
validator_list.append(CHECKS['identity-class'])
if 'indicator-types' not in options.disabled:
validator_list.append(CHECKS['indicator-types'])
if 'industry-sector' not in options.disabled:
validator_list.append(CHECKS['industry-sector'])
if 'malware-types' not in options.disabled:
validator_list.append(CHECKS['malware-types'])
if 'report-types' not in options.disabled:
validator_list.append(CHECKS['report-types'])
if 'threat-actor-types' not in options.disabled:
validator_list.append(CHECKS['threat-actor-types'])
if 'threat-actor-role' not in options.disabled:
validator_list.append(CHECKS['threat-actor-role'])
if 'threat-actor-sophistication' not in options.disabled:
validator_list.append(CHECKS['threat-actor-sophistication'])
if 'tool-types' not in options.disabled:
validator_list.append(CHECKS['tool-types'])
if 'region' not in options.disabled:
validator_list.append(CHECKS['region'])
if 'hash-algo' not in options.disabled:
validator_list.append(CHECKS['hash-algo'])
if 'windows-pebinary-type' not in options.disabled:
validator_list.append(CHECKS['windows-pebinary-type'])
if 'account-type' not in options.disabled:
validator_list.append(CHECKS['account-type'])
if 'all-external-sources' not in options.disabled:
if 'mime-type' not in options.disabled:
validator_list.append(CHECKS['mime-type'])
if 'protocols' not in options.disabled:
validator_list.append(CHECKS['protocols'])
if 'ipfix' not in options.disabled:
validator_list.append(CHECKS['ipfix'])
if 'http-request-headers' not in options.disabled:
validator_list.append(CHECKS['http-request-headers'])
if 'socket-options' not in options.disabled:
validator_list.append(CHECKS['socket-options'])
if 'pdf-doc-info' not in options.disabled:
validator_list.append(CHECKS['pdf-doc-info'])
if 'countries' not in options.disabled:
validator_list.append(CHECKS['countries'])
if 'network-traffic-ports' not in options.disabled:
validator_list.append(CHECKS['network-traffic-ports'])
if 'extref-hashes' not in options.disabled:
validator_list.append(CHECKS['extref-hashes'])
# --enable
if options.enabled:
for check in options.enabled:
try:
if CHECKS[check] in validator_list:
continue
if type(CHECKS[check]) is list:
validator_list.extend(CHECKS[check])
else:
validator_list.append(CHECKS[check])
except KeyError:
raise JSONError("%s is not a valid check!" % check)
return validator_list
|
Construct the list of 'SHOULD' validators to be run by the validator.
|
entailment
|
def timestamp(instance):
"""Ensure timestamps contain sane months, days, hours, minutes, seconds.
"""
ts_re = re.compile(r"^[0-9]{4}-(0[1-9]|1[012])-(0[1-9]|[12][0-9]|3[01])T([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9]|60)(\.[0-9]+)?Z$")
timestamp_props = ['created', 'modified']
if instance['type'] in enums.TIMESTAMP_PROPERTIES:
timestamp_props += enums.TIMESTAMP_PROPERTIES[instance['type']]
for tprop in timestamp_props:
if tprop in instance and ts_re.match(instance[tprop]):
# Don't raise an error if schemas will catch it
try:
parser.parse(instance[tprop])
except ValueError as e:
yield JSONError("'%s': '%s' is not a valid timestamp: %s"
% (tprop, instance[tprop], str(e)), instance['id'])
if has_cyber_observable_data(instance):
for key, obj in instance['objects'].items():
if 'type' not in obj:
continue
if obj['type'] in enums.TIMESTAMP_OBSERVABLE_PROPERTIES:
for tprop in enums.TIMESTAMP_OBSERVABLE_PROPERTIES[obj['type']]:
if tprop in obj and ts_re.match(obj[tprop]):
# Don't raise an error if schemas will catch it
try:
parser.parse(obj[tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], tprop, obj[tprop], str(e)), instance['id'])
if obj['type'] in enums.TIMESTAMP_EMBEDDED_PROPERTIES:
for embed in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']]:
if embed in obj:
for tprop in enums.TIMESTAMP_EMBEDDED_PROPERTIES[obj['type']][embed]:
if embed == 'extensions':
for ext in obj[embed]:
if tprop in obj[embed][ext] and ts_re.match(obj[embed][ext][tprop]):
try:
parser.parse(obj[embed][ext][tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], ext, tprop, obj[embed][ext][tprop], str(e)), instance['id'])
elif tprop in obj[embed] and ts_re.match(obj[embed][tprop]):
try:
parser.parse(obj[embed][tprop])
except ValueError as e:
yield JSONError("'%s': '%s': '%s' is not a valid timestamp: %s"
% (obj['type'], tprop, obj[embed][tprop], str(e)), instance['id'])
|
Ensure timestamps contain sane months, days, hours, minutes, seconds.
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.