repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value |
|---|---|---|---|---|---|---|---|---|---|---|---|
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient.post_info | def post_info(self, name, message):
"""Asynchronously post a user facing info message about a service.
Args:
name (string): The name of the service
message (string): The user facing info message that will be stored
for the service and can be queried later.
"""
self.post_command(OPERATIONS.CMD_POST_MESSAGE,
_create_message(name, states.INFO_LEVEL, message)) | python | def post_info(self, name, message):
"""Asynchronously post a user facing info message about a service.
Args:
name (string): The name of the service
message (string): The user facing info message that will be stored
for the service and can be queried later.
"""
self.post_command(OPERATIONS.CMD_POST_MESSAGE,
_create_message(name, states.INFO_LEVEL, message)) | [
"def",
"post_info",
"(",
"self",
",",
"name",
",",
"message",
")",
":",
"self",
".",
"post_command",
"(",
"OPERATIONS",
".",
"CMD_POST_MESSAGE",
",",
"_create_message",
"(",
"name",
",",
"states",
".",
"INFO_LEVEL",
",",
"message",
")",
")"
] | Asynchronously post a user facing info message about a service.
Args:
name (string): The name of the service
message (string): The user facing info message that will be stored
for the service and can be queried later. | [
"Asynchronously",
"post",
"a",
"user",
"facing",
"info",
"message",
"about",
"a",
"service",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L326-L335 | train |
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient._on_status_change | async def _on_status_change(self, update):
"""Update a service that has its status updated."""
info = update['payload']
new_number = info['new_status']
name = update['service']
if name not in self.services:
return
with self._state_lock:
is_changed = self.services[name].state != new_number
self.services[name].state = new_number
# Notify about this service state change if anyone is listening
if self._on_change_callback and is_changed:
self._on_change_callback(name, self.services[name].id, new_number, False, False) | python | async def _on_status_change(self, update):
"""Update a service that has its status updated."""
info = update['payload']
new_number = info['new_status']
name = update['service']
if name not in self.services:
return
with self._state_lock:
is_changed = self.services[name].state != new_number
self.services[name].state = new_number
# Notify about this service state change if anyone is listening
if self._on_change_callback and is_changed:
self._on_change_callback(name, self.services[name].id, new_number, False, False) | [
"async",
"def",
"_on_status_change",
"(",
"self",
",",
"update",
")",
":",
"info",
"=",
"update",
"[",
"'payload'",
"]",
"new_number",
"=",
"info",
"[",
"'new_status'",
"]",
"name",
"=",
"update",
"[",
"'service'",
"]",
"if",
"name",
"not",
"in",
"self",... | Update a service that has its status updated. | [
"Update",
"a",
"service",
"that",
"has",
"its",
"status",
"updated",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L408-L424 | train |
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient._on_heartbeat | async def _on_heartbeat(self, update):
"""Receive a new heartbeat for a service."""
name = update['service']
if name not in self.services:
return
with self._state_lock:
self.services[name].heartbeat() | python | async def _on_heartbeat(self, update):
"""Receive a new heartbeat for a service."""
name = update['service']
if name not in self.services:
return
with self._state_lock:
self.services[name].heartbeat() | [
"async",
"def",
"_on_heartbeat",
"(",
"self",
",",
"update",
")",
":",
"name",
"=",
"update",
"[",
"'service'",
"]",
"if",
"name",
"not",
"in",
"self",
".",
"services",
":",
"return",
"with",
"self",
".",
"_state_lock",
":",
"self",
".",
"services",
"[... | Receive a new heartbeat for a service. | [
"Receive",
"a",
"new",
"heartbeat",
"for",
"a",
"service",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L446-L455 | train |
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient._on_message | async def _on_message(self, update):
"""Receive a message from a service."""
name = update['service']
message_obj = update['payload']
if name not in self.services:
return
with self._state_lock:
self.services[name].post_message(message_obj['level'], message_obj['message']) | python | async def _on_message(self, update):
"""Receive a message from a service."""
name = update['service']
message_obj = update['payload']
if name not in self.services:
return
with self._state_lock:
self.services[name].post_message(message_obj['level'], message_obj['message']) | [
"async",
"def",
"_on_message",
"(",
"self",
",",
"update",
")",
":",
"name",
"=",
"update",
"[",
"'service'",
"]",
"message_obj",
"=",
"update",
"[",
"'payload'",
"]",
"if",
"name",
"not",
"in",
"self",
".",
"services",
":",
"return",
"with",
"self",
"... | Receive a message from a service. | [
"Receive",
"a",
"message",
"from",
"a",
"service",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L457-L467 | train |
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient._on_headline | async def _on_headline(self, update):
"""Receive a headline from a service."""
name = update['service']
message_obj = update['payload']
new_headline = False
if name not in self.services:
return
with self._state_lock:
self.services[name].set_headline(message_obj['level'], message_obj['message'])
if self.services[name].headline.count == 1:
new_headline = True
# Notify about this service state change if anyone is listening
# headline changes are only reported if they are not duplicates
if self._on_change_callback and new_headline:
self._on_change_callback(name, self.services[name].id, self.services[name].state, False, True) | python | async def _on_headline(self, update):
"""Receive a headline from a service."""
name = update['service']
message_obj = update['payload']
new_headline = False
if name not in self.services:
return
with self._state_lock:
self.services[name].set_headline(message_obj['level'], message_obj['message'])
if self.services[name].headline.count == 1:
new_headline = True
# Notify about this service state change if anyone is listening
# headline changes are only reported if they are not duplicates
if self._on_change_callback and new_headline:
self._on_change_callback(name, self.services[name].id, self.services[name].state, False, True) | [
"async",
"def",
"_on_headline",
"(",
"self",
",",
"update",
")",
":",
"name",
"=",
"update",
"[",
"'service'",
"]",
"message_obj",
"=",
"update",
"[",
"'payload'",
"]",
"new_headline",
"=",
"False",
"if",
"name",
"not",
"in",
"self",
".",
"services",
":"... | Receive a headline from a service. | [
"Receive",
"a",
"headline",
"from",
"a",
"service",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L469-L488 | train |
iotile/coretools | iotilegateway/iotilegateway/supervisor/client.py | AsyncSupervisorClient._on_rpc_command | async def _on_rpc_command(self, event):
"""Received an RPC command that we should execute."""
payload = event['payload']
rpc_id = payload['rpc_id']
tag = payload['response_uuid']
args = payload['payload']
result = 'success'
response = b''
if self._rpc_dispatcher is None or not self._rpc_dispatcher.has_rpc(rpc_id):
result = 'rpc_not_found'
else:
try:
response = self._rpc_dispatcher.call_rpc(rpc_id, args)
if inspect.iscoroutine(response):
response = await response
except RPCInvalidArgumentsError:
result = 'invalid_arguments'
except RPCInvalidReturnValueError:
result = 'invalid_response'
except Exception: #pylint:disable=broad-except;We are being called in a background task
self._logger.exception("Exception handling RPC 0x%04X", rpc_id)
result = 'execution_exception'
message = dict(response_uuid=tag, result=result, response=response)
try:
await self.send_command(OPERATIONS.CMD_RESPOND_RPC, message,
MESSAGES.RespondRPCResponse)
except: #pylint:disable=bare-except;We are being called in a background worker
self._logger.exception("Error sending response to RPC 0x%04X", rpc_id) | python | async def _on_rpc_command(self, event):
"""Received an RPC command that we should execute."""
payload = event['payload']
rpc_id = payload['rpc_id']
tag = payload['response_uuid']
args = payload['payload']
result = 'success'
response = b''
if self._rpc_dispatcher is None or not self._rpc_dispatcher.has_rpc(rpc_id):
result = 'rpc_not_found'
else:
try:
response = self._rpc_dispatcher.call_rpc(rpc_id, args)
if inspect.iscoroutine(response):
response = await response
except RPCInvalidArgumentsError:
result = 'invalid_arguments'
except RPCInvalidReturnValueError:
result = 'invalid_response'
except Exception: #pylint:disable=broad-except;We are being called in a background task
self._logger.exception("Exception handling RPC 0x%04X", rpc_id)
result = 'execution_exception'
message = dict(response_uuid=tag, result=result, response=response)
try:
await self.send_command(OPERATIONS.CMD_RESPOND_RPC, message,
MESSAGES.RespondRPCResponse)
except: #pylint:disable=bare-except;We are being called in a background worker
self._logger.exception("Error sending response to RPC 0x%04X", rpc_id) | [
"async",
"def",
"_on_rpc_command",
"(",
"self",
",",
"event",
")",
":",
"payload",
"=",
"event",
"[",
"'payload'",
"]",
"rpc_id",
"=",
"payload",
"[",
"'rpc_id'",
"]",
"tag",
"=",
"payload",
"[",
"'response_uuid'",
"]",
"args",
"=",
"payload",
"[",
"'pay... | Received an RPC command that we should execute. | [
"Received",
"an",
"RPC",
"command",
"that",
"we",
"should",
"execute",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilegateway/iotilegateway/supervisor/client.py#L490-L522 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/packing.py | _decode_datetime | def _decode_datetime(obj):
"""Decode a msgpack'ed datetime."""
if '__datetime__' in obj:
obj = datetime.datetime.strptime(obj['as_str'].decode(), "%Y%m%dT%H:%M:%S.%f")
return obj | python | def _decode_datetime(obj):
"""Decode a msgpack'ed datetime."""
if '__datetime__' in obj:
obj = datetime.datetime.strptime(obj['as_str'].decode(), "%Y%m%dT%H:%M:%S.%f")
return obj | [
"def",
"_decode_datetime",
"(",
"obj",
")",
":",
"if",
"'__datetime__'",
"in",
"obj",
":",
"obj",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"obj",
"[",
"'as_str'",
"]",
".",
"decode",
"(",
")",
",",
"\"%Y%m%dT%H:%M:%S.%f\"",
")",
"return",
... | Decode a msgpack'ed datetime. | [
"Decode",
"a",
"msgpack",
"ed",
"datetime",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/packing.py#L19-L24 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/generic/packing.py | _encode_datetime | def _encode_datetime(obj):
"""Encode a msgpck'ed datetime."""
if isinstance(obj, datetime.datetime):
obj = {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f").encode()}
return obj | python | def _encode_datetime(obj):
"""Encode a msgpck'ed datetime."""
if isinstance(obj, datetime.datetime):
obj = {'__datetime__': True, 'as_str': obj.strftime("%Y%m%dT%H:%M:%S.%f").encode()}
return obj | [
"def",
"_encode_datetime",
"(",
"obj",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"datetime",
".",
"datetime",
")",
":",
"obj",
"=",
"{",
"'__datetime__'",
":",
"True",
",",
"'as_str'",
":",
"obj",
".",
"strftime",
"(",
"\"%Y%m%dT%H:%M:%S.%f\"",
")",
... | Encode a msgpck'ed datetime. | [
"Encode",
"a",
"msgpck",
"ed",
"datetime",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/generic/packing.py#L27-L32 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cyglink.py | _versioned_lib_suffix | def _versioned_lib_suffix(env, suffix, version):
"""Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"""
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix= ", suffix)
print("_versioned_lib_suffix: version= ", version)
cygversion = re.sub('\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix
if Verbose:
print("_versioned_lib_suffix: return suffix= ", suffix)
return suffix | python | def _versioned_lib_suffix(env, suffix, version):
"""Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll'"""
Verbose = False
if Verbose:
print("_versioned_lib_suffix: suffix= ", suffix)
print("_versioned_lib_suffix: version= ", version)
cygversion = re.sub('\.', '-', version)
if not suffix.startswith('-' + cygversion):
suffix = '-' + cygversion + suffix
if Verbose:
print("_versioned_lib_suffix: return suffix= ", suffix)
return suffix | [
"def",
"_versioned_lib_suffix",
"(",
"env",
",",
"suffix",
",",
"version",
")",
":",
"Verbose",
"=",
"False",
"if",
"Verbose",
":",
"print",
"(",
"\"_versioned_lib_suffix: suffix= \"",
",",
"suffix",
")",
"print",
"(",
"\"_versioned_lib_suffix: version= \"",
",",
... | Generate versioned shared library suffix from a unversioned one.
If suffix='.dll', and version='0.1.2', then it returns '-0-1-2.dll | [
"Generate",
"versioned",
"shared",
"library",
"suffix",
"from",
"a",
"unversioned",
"one",
".",
"If",
"suffix",
"=",
".",
"dll",
"and",
"version",
"=",
"0",
".",
"1",
".",
"2",
"then",
"it",
"returns",
"-",
"0",
"-",
"1",
"-",
"2",
".",
"dll"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cyglink.py#L128-L140 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cyglink.py | generate | def generate(env):
"""Add Builders and construction variables for cyglink to an Environment."""
gnulink.generate(env)
env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined')
env['SHLINKCOM'] = shlib_action
env['LDMODULECOM'] = ldmod_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env.Append(LDMODULEEMITTER = [ldmod_emitter])
env['SHLIBPREFIX'] = 'cyg'
env['SHLIBSUFFIX'] = '.dll'
env['IMPLIBPREFIX'] = 'lib'
env['IMPLIBSUFFIX'] = '.dll.a'
# Variables used by versioned shared libraries
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink...
# LINKCALLBACKS are NOT inherited from gnulink
env['LINKCALLBACKS'] = {
'VersionedShLibSuffix' : _versioned_lib_suffix,
'VersionedLdModSuffix' : _versioned_lib_suffix,
'VersionedImpLibSuffix' : _versioned_lib_suffix,
'VersionedShLibName' : link._versioned_shlib_name,
'VersionedLdModName' : link._versioned_ldmod_name,
'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'),
'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'),
'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'),
'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'),
}
# these variables were set by gnulink but are not used in cyglink
try: del env['_SHLIBSONAME']
except KeyError: pass
try: del env['_LDMODULESONAME']
except KeyError: pass | python | def generate(env):
"""Add Builders and construction variables for cyglink to an Environment."""
gnulink.generate(env)
env['LINKFLAGS'] = SCons.Util.CLVar('-Wl,-no-undefined')
env['SHLINKCOM'] = shlib_action
env['LDMODULECOM'] = ldmod_action
env.Append(SHLIBEMITTER = [shlib_emitter])
env.Append(LDMODULEEMITTER = [ldmod_emitter])
env['SHLIBPREFIX'] = 'cyg'
env['SHLIBSUFFIX'] = '.dll'
env['IMPLIBPREFIX'] = 'lib'
env['IMPLIBSUFFIX'] = '.dll.a'
# Variables used by versioned shared libraries
env['_SHLIBVERSIONFLAGS'] = '$SHLIBVERSIONFLAGS'
env['_LDMODULEVERSIONFLAGS'] = '$LDMODULEVERSIONFLAGS'
# SHLIBVERSIONFLAGS and LDMODULEVERSIONFLAGS are same as in gnulink...
# LINKCALLBACKS are NOT inherited from gnulink
env['LINKCALLBACKS'] = {
'VersionedShLibSuffix' : _versioned_lib_suffix,
'VersionedLdModSuffix' : _versioned_lib_suffix,
'VersionedImpLibSuffix' : _versioned_lib_suffix,
'VersionedShLibName' : link._versioned_shlib_name,
'VersionedLdModName' : link._versioned_ldmod_name,
'VersionedShLibImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='ShLib'),
'VersionedLdModImpLibName' : lambda *args: _versioned_implib_name(*args, libtype='LdMod'),
'VersionedShLibImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='ShLib'),
'VersionedLdModImpLibSymlinks' : lambda *args: _versioned_implib_symlinks(*args, libtype='LdMod'),
}
# these variables were set by gnulink but are not used in cyglink
try: del env['_SHLIBSONAME']
except KeyError: pass
try: del env['_LDMODULESONAME']
except KeyError: pass | [
"def",
"generate",
"(",
"env",
")",
":",
"gnulink",
".",
"generate",
"(",
"env",
")",
"env",
"[",
"'LINKFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"'-Wl,-no-undefined'",
")",
"env",
"[",
"'SHLINKCOM'",
"]",
"=",
"shlib_action",
"env",
... | Add Builders and construction variables for cyglink to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"cyglink",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/cyglink.py#L185-L225 | train |
iotile/coretools | iotilecore/iotile/core/utilities/validating_dispatcher.py | ValidatingDispatcher.dispatch | def dispatch(self, message):
"""Dispatch a message to a callback based on its schema.
Args:
message (dict): The message to dispatch
"""
for validator, callback in self.validators:
if not validator.matches(message):
continue
callback(message)
return
raise ArgumentError("No handler was registered for message", message=message) | python | def dispatch(self, message):
"""Dispatch a message to a callback based on its schema.
Args:
message (dict): The message to dispatch
"""
for validator, callback in self.validators:
if not validator.matches(message):
continue
callback(message)
return
raise ArgumentError("No handler was registered for message", message=message) | [
"def",
"dispatch",
"(",
"self",
",",
"message",
")",
":",
"for",
"validator",
",",
"callback",
"in",
"self",
".",
"validators",
":",
"if",
"not",
"validator",
".",
"matches",
"(",
"message",
")",
":",
"continue",
"callback",
"(",
"message",
")",
"return"... | Dispatch a message to a callback based on its schema.
Args:
message (dict): The message to dispatch | [
"Dispatch",
"a",
"message",
"to",
"a",
"callback",
"based",
"on",
"its",
"schema",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/validating_dispatcher.py#L27-L41 | train |
iotile/coretools | iotilebuild/iotile/build/scripts/iotile_emulate.py | main | def main(raw_args=None):
"""Run the iotile-emulate script.
Args:
raw_args (list): Optional list of commmand line arguments. If not
passed these are pulled from sys.argv.
"""
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.firmware_image is None and args.gdb is None:
print("You must specify either a firmware image or attach a debugger with --gdb <PORT>")
return 1
test_args = ['qemu-system-gnuarmeclipse', '-verbose', '-verbose', '-board', 'STM32F0-Discovery',
'-nographic', '-monitor', 'null', '-serial', 'null', '--semihosting-config',
'enable=on,target=native', '-d', 'unimp,guest_errors']
if args.firmware_image:
test_args += ['-image', args.firmware_image]
if args.gdb:
test_args += ['--gdb', 'tcp::%d' % args.gdb]
proc = subprocess.Popen(test_args, stdout=sys.stdout, stderr=sys.stderr)
try:
proc.communicate()
except KeyboardInterrupt:
proc.terminate()
return 0 | python | def main(raw_args=None):
"""Run the iotile-emulate script.
Args:
raw_args (list): Optional list of commmand line arguments. If not
passed these are pulled from sys.argv.
"""
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.firmware_image is None and args.gdb is None:
print("You must specify either a firmware image or attach a debugger with --gdb <PORT>")
return 1
test_args = ['qemu-system-gnuarmeclipse', '-verbose', '-verbose', '-board', 'STM32F0-Discovery',
'-nographic', '-monitor', 'null', '-serial', 'null', '--semihosting-config',
'enable=on,target=native', '-d', 'unimp,guest_errors']
if args.firmware_image:
test_args += ['-image', args.firmware_image]
if args.gdb:
test_args += ['--gdb', 'tcp::%d' % args.gdb]
proc = subprocess.Popen(test_args, stdout=sys.stdout, stderr=sys.stderr)
try:
proc.communicate()
except KeyboardInterrupt:
proc.terminate()
return 0 | [
"def",
"main",
"(",
"raw_args",
"=",
"None",
")",
":",
"if",
"raw_args",
"is",
"None",
":",
"raw_args",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"parser",
"=",
"build_parser",
"(",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
"raw_args",
... | Run the iotile-emulate script.
Args:
raw_args (list): Optional list of commmand line arguments. If not
passed these are pulled from sys.argv. | [
"Run",
"the",
"iotile",
"-",
"emulate",
"script",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/scripts/iotile_emulate.py#L30-L65 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/qt.py | _detect | def _detect(env):
"""Not really safe, but fast method to detect the QT library"""
QTDIR = None
if not QTDIR:
QTDIR = env.get('QTDIR',None)
if not QTDIR:
QTDIR = os.environ.get('QTDIR',None)
if not QTDIR:
moc = env.WhereIs('moc')
if moc:
QTDIR = os.path.dirname(os.path.dirname(moc))
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
else:
QTDIR = None
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using empty QTDIR")
return QTDIR | python | def _detect(env):
"""Not really safe, but fast method to detect the QT library"""
QTDIR = None
if not QTDIR:
QTDIR = env.get('QTDIR',None)
if not QTDIR:
QTDIR = os.environ.get('QTDIR',None)
if not QTDIR:
moc = env.WhereIs('moc')
if moc:
QTDIR = os.path.dirname(os.path.dirname(moc))
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using moc executable as a hint (QTDIR=%s)" % QTDIR)
else:
QTDIR = None
SCons.Warnings.warn(
QtdirNotFound,
"Could not detect qt, using empty QTDIR")
return QTDIR | [
"def",
"_detect",
"(",
"env",
")",
":",
"QTDIR",
"=",
"None",
"if",
"not",
"QTDIR",
":",
"QTDIR",
"=",
"env",
".",
"get",
"(",
"'QTDIR'",
",",
"None",
")",
"if",
"not",
"QTDIR",
":",
"QTDIR",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'QTDIR'",... | Not really safe, but fast method to detect the QT library | [
"Not",
"really",
"safe",
"but",
"fast",
"method",
"to",
"detect",
"the",
"QT",
"library"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/qt.py#L188-L207 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/qt.py | generate | def generate(env):
"""Add Builders and construction variables for qt to an Environment."""
CLVar = SCons.Util.CLVar
Action = SCons.Action.Action
Builder = SCons.Builder.Builder
env.SetDefault(QTDIR = _detect(env),
QT_BINPATH = os.path.join('$QTDIR', 'bin'),
QT_CPPPATH = os.path.join('$QTDIR', 'include'),
QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
QT_MOC = os.path.join('$QT_BINPATH','moc'),
QT_UIC = os.path.join('$QT_BINPATH','uic'),
QT_LIB = 'qt', # may be set to qt-mt
QT_AUTOSCAN = 1, # scan for moc'able sources
# Some QT specific flags. I don't expect someone wants to
# manipulate those ...
QT_UICIMPLFLAGS = CLVar(''),
QT_UICDECLFLAGS = CLVar(''),
QT_MOCFROMHFLAGS = CLVar(''),
QT_MOCFROMCXXFLAGS = CLVar('-i'),
# suffixes/prefixes for the headers / sources to generate
QT_UICDECLPREFIX = '',
QT_UICDECLSUFFIX = '.h',
QT_UICIMPLPREFIX = 'uic_',
QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
QT_MOCHPREFIX = 'moc_',
QT_MOCHSUFFIX = '$CXXFILESUFFIX',
QT_MOCCXXPREFIX = '',
QT_MOCCXXSUFFIX = '.moc',
QT_UISUFFIX = '.ui',
# Commands for the qt support ...
# command to generate header, implementation and moc-file
# from a .ui file
QT_UICCOM = [
CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
'-o ${TARGETS[1]} $SOURCE'),
CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
# command to generate meta object information for a class
# declarated in a header
QT_MOCFROMHCOM = (
'$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
# command to generate meta object information for a class
# declarated in a cpp file
QT_MOCFROMCXXCOM = [
CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
Action(checkMocIncluded,None)])
# ... and the corresponding builders
uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
emitter=uicEmitter,
src_suffix='$QT_UISUFFIX',
suffix='$QT_UICDECLSUFFIX',
prefix='$QT_UICDECLPREFIX',
source_scanner=uicScanner)
mocBld = Builder(action={}, prefix={}, suffix={})
for h in header_extensions:
act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
mocBld.add_action(h, act)
mocBld.prefix[h] = '$QT_MOCHPREFIX'
mocBld.suffix[h] = '$QT_MOCHSUFFIX'
for cxx in cxx_suffixes:
act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
mocBld.add_action(cxx, act)
mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
# register the builders
env['BUILDERS']['Uic'] = uicBld
env['BUILDERS']['Moc'] = mocBld
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_src_builder('Uic')
shared_obj.add_src_builder('Uic')
# We use the emitters of Program / StaticLibrary / SharedLibrary
# to scan for moc'able files
# We can't refer to the builders directly, we have to fetch them
# as Environment attributes because that sets them up to be called
# correctly later by our emitter.
env.AppendUnique(PROGEMITTER =[AutomocStatic],
SHLIBEMITTER=[AutomocShared],
LDMODULEEMITTER=[AutomocShared],
LIBEMITTER =[AutomocStatic],
# Of course, we need to link against the qt libraries
CPPPATH=["$QT_CPPPATH"],
LIBPATH=["$QT_LIBPATH"],
LIBS=['$QT_LIB']) | python | def generate(env):
"""Add Builders and construction variables for qt to an Environment."""
CLVar = SCons.Util.CLVar
Action = SCons.Action.Action
Builder = SCons.Builder.Builder
env.SetDefault(QTDIR = _detect(env),
QT_BINPATH = os.path.join('$QTDIR', 'bin'),
QT_CPPPATH = os.path.join('$QTDIR', 'include'),
QT_LIBPATH = os.path.join('$QTDIR', 'lib'),
QT_MOC = os.path.join('$QT_BINPATH','moc'),
QT_UIC = os.path.join('$QT_BINPATH','uic'),
QT_LIB = 'qt', # may be set to qt-mt
QT_AUTOSCAN = 1, # scan for moc'able sources
# Some QT specific flags. I don't expect someone wants to
# manipulate those ...
QT_UICIMPLFLAGS = CLVar(''),
QT_UICDECLFLAGS = CLVar(''),
QT_MOCFROMHFLAGS = CLVar(''),
QT_MOCFROMCXXFLAGS = CLVar('-i'),
# suffixes/prefixes for the headers / sources to generate
QT_UICDECLPREFIX = '',
QT_UICDECLSUFFIX = '.h',
QT_UICIMPLPREFIX = 'uic_',
QT_UICIMPLSUFFIX = '$CXXFILESUFFIX',
QT_MOCHPREFIX = 'moc_',
QT_MOCHSUFFIX = '$CXXFILESUFFIX',
QT_MOCCXXPREFIX = '',
QT_MOCCXXSUFFIX = '.moc',
QT_UISUFFIX = '.ui',
# Commands for the qt support ...
# command to generate header, implementation and moc-file
# from a .ui file
QT_UICCOM = [
CLVar('$QT_UIC $QT_UICDECLFLAGS -o ${TARGETS[0]} $SOURCE'),
CLVar('$QT_UIC $QT_UICIMPLFLAGS -impl ${TARGETS[0].file} '
'-o ${TARGETS[1]} $SOURCE'),
CLVar('$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[2]} ${TARGETS[0]}')],
# command to generate meta object information for a class
# declarated in a header
QT_MOCFROMHCOM = (
'$QT_MOC $QT_MOCFROMHFLAGS -o ${TARGETS[0]} $SOURCE'),
# command to generate meta object information for a class
# declarated in a cpp file
QT_MOCFROMCXXCOM = [
CLVar('$QT_MOC $QT_MOCFROMCXXFLAGS -o ${TARGETS[0]} $SOURCE'),
Action(checkMocIncluded,None)])
# ... and the corresponding builders
uicBld = Builder(action=SCons.Action.Action('$QT_UICCOM', '$QT_UICCOMSTR'),
emitter=uicEmitter,
src_suffix='$QT_UISUFFIX',
suffix='$QT_UICDECLSUFFIX',
prefix='$QT_UICDECLPREFIX',
source_scanner=uicScanner)
mocBld = Builder(action={}, prefix={}, suffix={})
for h in header_extensions:
act = SCons.Action.Action('$QT_MOCFROMHCOM', '$QT_MOCFROMHCOMSTR')
mocBld.add_action(h, act)
mocBld.prefix[h] = '$QT_MOCHPREFIX'
mocBld.suffix[h] = '$QT_MOCHSUFFIX'
for cxx in cxx_suffixes:
act = SCons.Action.Action('$QT_MOCFROMCXXCOM', '$QT_MOCFROMCXXCOMSTR')
mocBld.add_action(cxx, act)
mocBld.prefix[cxx] = '$QT_MOCCXXPREFIX'
mocBld.suffix[cxx] = '$QT_MOCCXXSUFFIX'
# register the builders
env['BUILDERS']['Uic'] = uicBld
env['BUILDERS']['Moc'] = mocBld
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
static_obj.add_src_builder('Uic')
shared_obj.add_src_builder('Uic')
# We use the emitters of Program / StaticLibrary / SharedLibrary
# to scan for moc'able files
# We can't refer to the builders directly, we have to fetch them
# as Environment attributes because that sets them up to be called
# correctly later by our emitter.
env.AppendUnique(PROGEMITTER =[AutomocStatic],
SHLIBEMITTER=[AutomocShared],
LDMODULEEMITTER=[AutomocShared],
LIBEMITTER =[AutomocStatic],
# Of course, we need to link against the qt libraries
CPPPATH=["$QT_CPPPATH"],
LIBPATH=["$QT_LIBPATH"],
LIBS=['$QT_LIB']) | [
"def",
"generate",
"(",
"env",
")",
":",
"CLVar",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"Action",
"=",
"SCons",
".",
"Action",
".",
"Action",
"Builder",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"env",
".",
"SetDefault",
"(",
"QTDIR",
"=",
"_d... | Add Builders and construction variables for qt to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"qt",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/qt.py#L244-L334 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py | CPP_to_Python | def CPP_to_Python(s):
"""
Converts a C pre-processor expression into an equivalent
Python expression that can be evaluated.
"""
s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s)
for expr, repl in CPP_to_Python_Eval_List:
s = expr.sub(repl, s)
return s | python | def CPP_to_Python(s):
"""
Converts a C pre-processor expression into an equivalent
Python expression that can be evaluated.
"""
s = CPP_to_Python_Ops_Expression.sub(CPP_to_Python_Ops_Sub, s)
for expr, repl in CPP_to_Python_Eval_List:
s = expr.sub(repl, s)
return s | [
"def",
"CPP_to_Python",
"(",
"s",
")",
":",
"s",
"=",
"CPP_to_Python_Ops_Expression",
".",
"sub",
"(",
"CPP_to_Python_Ops_Sub",
",",
"s",
")",
"for",
"expr",
",",
"repl",
"in",
"CPP_to_Python_Eval_List",
":",
"s",
"=",
"expr",
".",
"sub",
"(",
"repl",
",",... | Converts a C pre-processor expression into an equivalent
Python expression that can be evaluated. | [
"Converts",
"a",
"C",
"pre",
"-",
"processor",
"expression",
"into",
"an",
"equivalent",
"Python",
"expression",
"that",
"can",
"be",
"evaluated",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py#L158-L166 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py | PreProcessor.tupleize | def tupleize(self, contents):
"""
Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression.
"""
global CPP_Expression, Table
contents = line_continuations.sub('', contents)
cpp_tuples = CPP_Expression.findall(contents)
return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] | python | def tupleize(self, contents):
"""
Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression.
"""
global CPP_Expression, Table
contents = line_continuations.sub('', contents)
cpp_tuples = CPP_Expression.findall(contents)
return [(m[0],) + Table[m[0]].match(m[1]).groups() for m in cpp_tuples] | [
"def",
"tupleize",
"(",
"self",
",",
"contents",
")",
":",
"global",
"CPP_Expression",
",",
"Table",
"contents",
"=",
"line_continuations",
".",
"sub",
"(",
"''",
",",
"contents",
")",
"cpp_tuples",
"=",
"CPP_Expression",
".",
"findall",
"(",
"contents",
")"... | Turns the contents of a file into a list of easily-processed
tuples describing the CPP lines in the file.
The first element of each tuple is the line's preprocessor
directive (#if, #include, #define, etc., minus the initial '#').
The remaining elements are specific to the type of directive, as
pulled apart by the regular expression. | [
"Turns",
"the",
"contents",
"of",
"a",
"file",
"into",
"a",
"list",
"of",
"easily",
"-",
"processed",
"tuples",
"describing",
"the",
"CPP",
"lines",
"in",
"the",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py#L274-L287 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py | PreProcessor.process_contents | def process_contents(self, contents, fname=None):
"""
Pre-processes a file contents.
This is the main internal entry point.
"""
self.stack = []
self.dispatch_table = self.default_table.copy()
self.current_file = fname
self.tuples = self.tupleize(contents)
self.initialize_result(fname)
while self.tuples:
t = self.tuples.pop(0)
# Uncomment to see the list of tuples being processed (e.g.,
# to validate the CPP lines are being translated correctly).
#print(t)
self.dispatch_table[t[0]](t)
return self.finalize_result(fname) | python | def process_contents(self, contents, fname=None):
"""
Pre-processes a file contents.
This is the main internal entry point.
"""
self.stack = []
self.dispatch_table = self.default_table.copy()
self.current_file = fname
self.tuples = self.tupleize(contents)
self.initialize_result(fname)
while self.tuples:
t = self.tuples.pop(0)
# Uncomment to see the list of tuples being processed (e.g.,
# to validate the CPP lines are being translated correctly).
#print(t)
self.dispatch_table[t[0]](t)
return self.finalize_result(fname) | [
"def",
"process_contents",
"(",
"self",
",",
"contents",
",",
"fname",
"=",
"None",
")",
":",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"dispatch_table",
"=",
"self",
".",
"default_table",
".",
"copy",
"(",
")",
"self",
".",
"current_file",
"="... | Pre-processes a file contents.
This is the main internal entry point. | [
"Pre",
"-",
"processes",
"a",
"file",
"contents",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py#L298-L316 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py | PreProcessor.save | def save(self):
"""
Pushes the current dispatch table on the stack and re-initializes
the current dispatch table to the default.
"""
self.stack.append(self.dispatch_table)
self.dispatch_table = self.default_table.copy() | python | def save(self):
"""
Pushes the current dispatch table on the stack and re-initializes
the current dispatch table to the default.
"""
self.stack.append(self.dispatch_table)
self.dispatch_table = self.default_table.copy() | [
"def",
"save",
"(",
"self",
")",
":",
"self",
".",
"stack",
".",
"append",
"(",
"self",
".",
"dispatch_table",
")",
"self",
".",
"dispatch_table",
"=",
"self",
".",
"default_table",
".",
"copy",
"(",
")"
] | Pushes the current dispatch table on the stack and re-initializes
the current dispatch table to the default. | [
"Pushes",
"the",
"current",
"dispatch",
"table",
"on",
"the",
"stack",
"and",
"re",
"-",
"initializes",
"the",
"current",
"dispatch",
"table",
"to",
"the",
"default",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py#L320-L326 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py | PreProcessor.eval_expression | def eval_expression(self, t):
"""
Evaluates a C preprocessor expression.
This is done by converting it to a Python equivalent and
eval()ing it in the C preprocessor namespace we use to
track #define values.
"""
t = CPP_to_Python(' '.join(t[1:]))
try: return eval(t, self.cpp_namespace)
except (NameError, TypeError): return 0 | python | def eval_expression(self, t):
"""
Evaluates a C preprocessor expression.
This is done by converting it to a Python equivalent and
eval()ing it in the C preprocessor namespace we use to
track #define values.
"""
t = CPP_to_Python(' '.join(t[1:]))
try: return eval(t, self.cpp_namespace)
except (NameError, TypeError): return 0 | [
"def",
"eval_expression",
"(",
"self",
",",
"t",
")",
":",
"t",
"=",
"CPP_to_Python",
"(",
"' '",
".",
"join",
"(",
"t",
"[",
"1",
":",
"]",
")",
")",
"try",
":",
"return",
"eval",
"(",
"t",
",",
"self",
".",
"cpp_namespace",
")",
"except",
"(",
... | Evaluates a C preprocessor expression.
This is done by converting it to a Python equivalent and
eval()ing it in the C preprocessor namespace we use to
track #define values. | [
"Evaluates",
"a",
"C",
"preprocessor",
"expression",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/cpp.py#L348-L358 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rmic.py | emit_rmic_classes | def emit_rmic_classes(target, source, env):
"""Create and return lists of Java RMI stub and skeleton
class files to be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[:-len(class_suffix)] == class_suffix:
classname = classname[-len(class_suffix):]
s = src.rfile()
s.attributes.java_classdir = classdir
s.attributes.java_classname = classname
slist.append(s)
stub_suffixes = ['_Stub']
if env.get('JAVAVERSION') == '1.4':
stub_suffixes.append('_Skel')
tlist = []
for s in source:
for suff in stub_suffixes:
fname = s.attributes.java_classname.replace('.', os.sep) + \
suff + class_suffix
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | python | def emit_rmic_classes(target, source, env):
"""Create and return lists of Java RMI stub and skeleton
class files to be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[:-len(class_suffix)] == class_suffix:
classname = classname[-len(class_suffix):]
s = src.rfile()
s.attributes.java_classdir = classdir
s.attributes.java_classname = classname
slist.append(s)
stub_suffixes = ['_Stub']
if env.get('JAVAVERSION') == '1.4':
stub_suffixes.append('_Skel')
tlist = []
for s in source:
for suff in stub_suffixes:
fname = s.attributes.java_classname.replace('.', os.sep) + \
suff + class_suffix
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | [
"def",
"emit_rmic_classes",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"class_suffix",
"=",
"env",
".",
"get",
"(",
"'JAVACLASSSUFFIX'",
",",
"'.class'",
")",
"classdir",
"=",
"env",
".",
"get",
"(",
"'JAVACLASSDIR'",
")",
"if",
"not",
"classdir",... | Create and return lists of Java RMI stub and skeleton
class files to be created from a set of class files. | [
"Create",
"and",
"return",
"lists",
"of",
"Java",
"RMI",
"stub",
"and",
"skeleton",
"class",
"files",
"to",
"be",
"created",
"from",
"a",
"set",
"of",
"class",
"files",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rmic.py#L43-L94 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rmic.py | generate | def generate(env):
"""Add Builders and construction variables for rmic to an Environment."""
env['BUILDERS']['RMIC'] = RMICBuilder
env['RMIC'] = 'rmic'
env['RMICFLAGS'] = SCons.Util.CLVar('')
env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | python | def generate(env):
"""Add Builders and construction variables for rmic to an Environment."""
env['BUILDERS']['RMIC'] = RMICBuilder
env['RMIC'] = 'rmic'
env['RMICFLAGS'] = SCons.Util.CLVar('')
env['RMICCOM'] = '$RMIC $RMICFLAGS -d ${TARGET.attributes.java_lookupdir} -classpath ${SOURCE.attributes.java_classdir} ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | [
"def",
"generate",
"(",
"env",
")",
":",
"env",
"[",
"'BUILDERS'",
"]",
"[",
"'RMIC'",
"]",
"=",
"RMICBuilder",
"env",
"[",
"'RMIC'",
"]",
"=",
"'rmic'",
"env",
"[",
"'RMICFLAGS'",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"''",
")",
"env"... | Add Builders and construction variables for rmic to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"rmic",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rmic.py#L104-L111 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._set_scan_parameters | def _set_scan_parameters(self, interval=2100, window=2100, active=False):
"""
Set the scan interval and window in units of ms and set whether active scanning is performed
"""
active_num = 0
if bool(active):
active_num = 1
interval_num = int(interval*1000/625)
window_num = int(window*1000/625)
payload = struct.pack("<HHB", interval_num, window_num, active_num)
try:
response = self._send_command(6, 7, payload)
if response.payload[0] != 0:
return False, {'reason': "Could not set scanning parameters", 'error': response.payload[0]}
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response'}
return True, None | python | def _set_scan_parameters(self, interval=2100, window=2100, active=False):
"""
Set the scan interval and window in units of ms and set whether active scanning is performed
"""
active_num = 0
if bool(active):
active_num = 1
interval_num = int(interval*1000/625)
window_num = int(window*1000/625)
payload = struct.pack("<HHB", interval_num, window_num, active_num)
try:
response = self._send_command(6, 7, payload)
if response.payload[0] != 0:
return False, {'reason': "Could not set scanning parameters", 'error': response.payload[0]}
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response'}
return True, None | [
"def",
"_set_scan_parameters",
"(",
"self",
",",
"interval",
"=",
"2100",
",",
"window",
"=",
"2100",
",",
"active",
"=",
"False",
")",
":",
"active_num",
"=",
"0",
"if",
"bool",
"(",
"active",
")",
":",
"active_num",
"=",
"1",
"interval_num",
"=",
"in... | Set the scan interval and window in units of ms and set whether active scanning is performed | [
"Set",
"the",
"scan",
"interval",
"and",
"window",
"in",
"units",
"of",
"ms",
"and",
"set",
"whether",
"active",
"scanning",
"is",
"performed"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L76-L97 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._query_systemstate | def _query_systemstate(self):
"""Query the maximum number of connections supported by this adapter
"""
def status_filter_func(event):
if event.command_class == 3 and event.command == 0:
return True
return False
try:
response = self._send_command(0, 6, [])
maxconn, = unpack("<B", response.payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
events = self._wait_process_events(0.5, status_filter_func, lambda x: False)
conns = []
for event in events:
handle, flags, addr, addr_type, interval, timeout, lat, bond = unpack("<BB6sBHHHB", event.payload)
if flags != 0:
conns.append(handle)
return True, {'max_connections': maxconn, 'active_connections': conns} | python | def _query_systemstate(self):
"""Query the maximum number of connections supported by this adapter
"""
def status_filter_func(event):
if event.command_class == 3 and event.command == 0:
return True
return False
try:
response = self._send_command(0, 6, [])
maxconn, = unpack("<B", response.payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
events = self._wait_process_events(0.5, status_filter_func, lambda x: False)
conns = []
for event in events:
handle, flags, addr, addr_type, interval, timeout, lat, bond = unpack("<BB6sBHHHB", event.payload)
if flags != 0:
conns.append(handle)
return True, {'max_connections': maxconn, 'active_connections': conns} | [
"def",
"_query_systemstate",
"(",
"self",
")",
":",
"def",
"status_filter_func",
"(",
"event",
")",
":",
"if",
"event",
".",
"command_class",
"==",
"3",
"and",
"event",
".",
"command",
"==",
"0",
":",
"return",
"True",
"return",
"False",
"try",
":",
"res... | Query the maximum number of connections supported by this adapter | [
"Query",
"the",
"maximum",
"number",
"of",
"connections",
"supported",
"by",
"this",
"adapter"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L99-L124 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._start_scan | def _start_scan(self, active):
"""Begin scanning forever
"""
success, retval = self._set_scan_parameters(active=active)
if not success:
return success, retval
try:
response = self._send_command(6, 2, [2])
if response.payload[0] != 0:
self._logger.error('Error starting scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not initiate scan for ble devices, error_code=%d, response=%s" % (response.payload[0], response)}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
return True, None | python | def _start_scan(self, active):
"""Begin scanning forever
"""
success, retval = self._set_scan_parameters(active=active)
if not success:
return success, retval
try:
response = self._send_command(6, 2, [2])
if response.payload[0] != 0:
self._logger.error('Error starting scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not initiate scan for ble devices, error_code=%d, response=%s" % (response.payload[0], response)}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
return True, None | [
"def",
"_start_scan",
"(",
"self",
",",
"active",
")",
":",
"success",
",",
"retval",
"=",
"self",
".",
"_set_scan_parameters",
"(",
"active",
"=",
"active",
")",
"if",
"not",
"success",
":",
"return",
"success",
",",
"retval",
"try",
":",
"response",
"=... | Begin scanning forever | [
"Begin",
"scanning",
"forever"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L126-L142 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._stop_scan | def _stop_scan(self):
"""Stop scanning for BLE devices
"""
try:
response = self._send_command(6, 4, [])
if response.payload[0] != 0:
# Error code 129 means we just were not currently scanning
if response.payload[0] != 129:
self._logger.error('Error stopping scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not stop scan for ble devices"}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
except DeviceNotConfiguredError:
return True, {'reason': "Device not connected (did you disconnect the dongle?"}
return True, None | python | def _stop_scan(self):
"""Stop scanning for BLE devices
"""
try:
response = self._send_command(6, 4, [])
if response.payload[0] != 0:
# Error code 129 means we just were not currently scanning
if response.payload[0] != 129:
self._logger.error('Error stopping scan for devices, error=%d', response.payload[0])
return False, {'reason': "Could not stop scan for ble devices"}
except InternalTimeoutError:
return False, {'reason': "Timeout waiting for response"}
except DeviceNotConfiguredError:
return True, {'reason': "Device not connected (did you disconnect the dongle?"}
return True, None | [
"def",
"_stop_scan",
"(",
"self",
")",
":",
"try",
":",
"response",
"=",
"self",
".",
"_send_command",
"(",
"6",
",",
"4",
",",
"[",
"]",
")",
"if",
"response",
".",
"payload",
"[",
"0",
"]",
"!=",
"0",
":",
"if",
"response",
".",
"payload",
"[",... | Stop scanning for BLE devices | [
"Stop",
"scanning",
"for",
"BLE",
"devices"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L144-L161 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._probe_services | def _probe_services(self, handle):
"""Probe for all primary services and characteristics in those services
Args:
handle (int): the connection handle to probe
"""
code = 0x2800
def event_filter_func(event):
if (event.command_class == 4 and event.command == 2):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
def end_filter_func(event):
if (event.command_class == 4 and event.command == 1):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
payload = struct.pack('<BHHBH', handle, 1, 0xFFFF, 2, code)
try:
response = self._send_command(4, 1, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
handle, result = unpack("<BH", response.payload)
if result != 0:
return False, None
events = self._wait_process_events(0.5, event_filter_func, end_filter_func)
gatt_events = [x for x in events if event_filter_func(x)]
end_events = [x for x in events if end_filter_func(x)]
if len(end_events) == 0:
return False, None
#Make sure we successfully probed the gatt table
end_event = end_events[0]
_, result, _ = unpack("<BHH", end_event.payload)
if result != 0:
self._logger.warn("Error enumerating GATT table, protocol error code = %d (0x%X)" % (result, result))
return False, None
services = {}
for event in gatt_events:
process_gatt_service(services, event)
return True, {'services': services} | python | def _probe_services(self, handle):
"""Probe for all primary services and characteristics in those services
Args:
handle (int): the connection handle to probe
"""
code = 0x2800
def event_filter_func(event):
if (event.command_class == 4 and event.command == 2):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
def end_filter_func(event):
if (event.command_class == 4 and event.command == 1):
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
payload = struct.pack('<BHHBH', handle, 1, 0xFFFF, 2, code)
try:
response = self._send_command(4, 1, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for command response'}
handle, result = unpack("<BH", response.payload)
if result != 0:
return False, None
events = self._wait_process_events(0.5, event_filter_func, end_filter_func)
gatt_events = [x for x in events if event_filter_func(x)]
end_events = [x for x in events if end_filter_func(x)]
if len(end_events) == 0:
return False, None
#Make sure we successfully probed the gatt table
end_event = end_events[0]
_, result, _ = unpack("<BHH", end_event.payload)
if result != 0:
self._logger.warn("Error enumerating GATT table, protocol error code = %d (0x%X)" % (result, result))
return False, None
services = {}
for event in gatt_events:
process_gatt_service(services, event)
return True, {'services': services} | [
"def",
"_probe_services",
"(",
"self",
",",
"handle",
")",
":",
"code",
"=",
"0x2800",
"def",
"event_filter_func",
"(",
"event",
")",
":",
"if",
"(",
"event",
".",
"command_class",
"==",
"4",
"and",
"event",
".",
"command",
"==",
"2",
")",
":",
"event_... | Probe for all primary services and characteristics in those services
Args:
handle (int): the connection handle to probe | [
"Probe",
"for",
"all",
"primary",
"services",
"and",
"characteristics",
"in",
"those",
"services"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L163-L215 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._probe_characteristics | def _probe_characteristics(self, conn, services, timeout=5.0):
"""Probe gatt services for all associated characteristics in a BLE device
Args:
conn (int): the connection handle to probe
services (dict): a dictionary of services produced by probe_services()
timeout (float): the maximum number of seconds to spend in any single task
"""
for service in services.values():
success, result = self._enumerate_handles(conn, service['start_handle'],
service['end_handle'])
if not success:
return False, None
attributes = result['attributes']
service['characteristics'] = {}
last_char = None
for handle, attribute in attributes.items():
if attribute['uuid'].hex[-4:] == '0328':
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
char = parse_characteristic_declaration(value)
service['characteristics'][char['uuid']] = char
last_char = char
elif attribute['uuid'].hex[-4:] == '0229':
if last_char is None:
return False, None
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
assert len(value) == 2
value, = unpack("<H", value)
last_char['client_configuration'] = {'handle': handle, 'value': value}
return True, {'services': services} | python | def _probe_characteristics(self, conn, services, timeout=5.0):
"""Probe gatt services for all associated characteristics in a BLE device
Args:
conn (int): the connection handle to probe
services (dict): a dictionary of services produced by probe_services()
timeout (float): the maximum number of seconds to spend in any single task
"""
for service in services.values():
success, result = self._enumerate_handles(conn, service['start_handle'],
service['end_handle'])
if not success:
return False, None
attributes = result['attributes']
service['characteristics'] = {}
last_char = None
for handle, attribute in attributes.items():
if attribute['uuid'].hex[-4:] == '0328':
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
char = parse_characteristic_declaration(value)
service['characteristics'][char['uuid']] = char
last_char = char
elif attribute['uuid'].hex[-4:] == '0229':
if last_char is None:
return False, None
success, result = self._read_handle(conn, handle, timeout)
if not success:
return False, None
value = result['data']
assert len(value) == 2
value, = unpack("<H", value)
last_char['client_configuration'] = {'handle': handle, 'value': value}
return True, {'services': services} | [
"def",
"_probe_characteristics",
"(",
"self",
",",
"conn",
",",
"services",
",",
"timeout",
"=",
"5.0",
")",
":",
"for",
"service",
"in",
"services",
".",
"values",
"(",
")",
":",
"success",
",",
"result",
"=",
"self",
".",
"_enumerate_handles",
"(",
"co... | Probe gatt services for all associated characteristics in a BLE device
Args:
conn (int): the connection handle to probe
services (dict): a dictionary of services produced by probe_services()
timeout (float): the maximum number of seconds to spend in any single task | [
"Probe",
"gatt",
"services",
"for",
"all",
"associated",
"characteristics",
"in",
"a",
"BLE",
"device"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L217-L262 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._enable_rpcs | def _enable_rpcs(self, conn, services, timeout=1.0):
"""Prepare this device to receive RPCs
"""
#FIXME: Check for characteristic existence in a try/catch and return failure if not found
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], True, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], True, timeout) | python | def _enable_rpcs(self, conn, services, timeout=1.0):
"""Prepare this device to receive RPCs
"""
#FIXME: Check for characteristic existence in a try/catch and return failure if not found
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], True, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], True, timeout) | [
"def",
"_enable_rpcs",
"(",
"self",
",",
"conn",
",",
"services",
",",
"timeout",
"=",
"1.0",
")",
":",
"success",
",",
"result",
"=",
"self",
".",
"_set_notification",
"(",
"conn",
",",
"services",
"[",
"TileBusService",
"]",
"[",
"'characteristics'",
"]"... | Prepare this device to receive RPCs | [
"Prepare",
"this",
"device",
"to",
"receive",
"RPCs"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L264-L274 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._disable_rpcs | def _disable_rpcs(self, conn, services, timeout=1.0):
"""Prevent this device from receiving more RPCs
"""
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], False, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], False, timeout) | python | def _disable_rpcs(self, conn, services, timeout=1.0):
"""Prevent this device from receiving more RPCs
"""
success, result = self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceiveHeaderCharacteristic], False, timeout)
if not success:
return success, result
return self._set_notification(conn, services[TileBusService]['characteristics'][TileBusReceivePayloadCharacteristic], False, timeout) | [
"def",
"_disable_rpcs",
"(",
"self",
",",
"conn",
",",
"services",
",",
"timeout",
"=",
"1.0",
")",
":",
"success",
",",
"result",
"=",
"self",
".",
"_set_notification",
"(",
"conn",
",",
"services",
"[",
"TileBusService",
"]",
"[",
"'characteristics'",
"]... | Prevent this device from receiving more RPCs | [
"Prevent",
"this",
"device",
"from",
"receiving",
"more",
"RPCs"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L290-L298 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._write_handle | def _write_handle(self, conn, handle, ack, value, timeout=1.0):
"""Write to a BLE device characteristic by its handle
Args:
conn (int): The connection handle for the device we should read from
handle (int): The characteristics handle we should read
ack (bool): Should this be an acknowledges write or unacknowledged
timeout (float): How long to wait before failing
value (bytearray): The value that we should write
"""
conn_handle = conn
char_handle = handle
def write_handle_acked(event):
if event.command_class == 4 and event.command == 1:
conn, _, char = unpack("<BHH", event.payload)
return conn_handle == conn and char_handle == char
data_len = len(value)
if data_len > 20:
return False, {'reason': 'Data too long to write'}
payload = struct.pack("<BHB%ds" % data_len, conn_handle, char_handle, data_len, value)
try:
if ack:
response = self._send_command(4, 5, payload)
else:
response = self._send_command(4, 6, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response to command in _write_handle'}
_, result = unpack("<BH", response.payload)
if result != 0:
return False, {'reason': 'Error writing to handle', 'error_code': result}
if ack:
events = self._wait_process_events(timeout, lambda x: False, write_handle_acked)
if len(events) == 0:
return False, {'reason': 'Timeout waiting for acknowledge on write'}
_, result, _ = unpack("<BHH", events[0].payload)
if result != 0:
return False, {'reason': 'Error received during write to handle', 'error_code': result}
return True, None | python | def _write_handle(self, conn, handle, ack, value, timeout=1.0):
"""Write to a BLE device characteristic by its handle
Args:
conn (int): The connection handle for the device we should read from
handle (int): The characteristics handle we should read
ack (bool): Should this be an acknowledges write or unacknowledged
timeout (float): How long to wait before failing
value (bytearray): The value that we should write
"""
conn_handle = conn
char_handle = handle
def write_handle_acked(event):
if event.command_class == 4 and event.command == 1:
conn, _, char = unpack("<BHH", event.payload)
return conn_handle == conn and char_handle == char
data_len = len(value)
if data_len > 20:
return False, {'reason': 'Data too long to write'}
payload = struct.pack("<BHB%ds" % data_len, conn_handle, char_handle, data_len, value)
try:
if ack:
response = self._send_command(4, 5, payload)
else:
response = self._send_command(4, 6, payload)
except InternalTimeoutError:
return False, {'reason': 'Timeout waiting for response to command in _write_handle'}
_, result = unpack("<BH", response.payload)
if result != 0:
return False, {'reason': 'Error writing to handle', 'error_code': result}
if ack:
events = self._wait_process_events(timeout, lambda x: False, write_handle_acked)
if len(events) == 0:
return False, {'reason': 'Timeout waiting for acknowledge on write'}
_, result, _ = unpack("<BHH", events[0].payload)
if result != 0:
return False, {'reason': 'Error received during write to handle', 'error_code': result}
return True, None | [
"def",
"_write_handle",
"(",
"self",
",",
"conn",
",",
"handle",
",",
"ack",
",",
"value",
",",
"timeout",
"=",
"1.0",
")",
":",
"conn_handle",
"=",
"conn",
"char_handle",
"=",
"handle",
"def",
"write_handle_acked",
"(",
"event",
")",
":",
"if",
"event",... | Write to a BLE device characteristic by its handle
Args:
conn (int): The connection handle for the device we should read from
handle (int): The characteristics handle we should read
ack (bool): Should this be an acknowledges write or unacknowledged
timeout (float): How long to wait before failing
value (bytearray): The value that we should write | [
"Write",
"to",
"a",
"BLE",
"device",
"characteristic",
"by",
"its",
"handle"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L373-L420 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._set_advertising_data | def _set_advertising_data(self, packet_type, data):
"""Set the advertising data for advertisements sent out by this bled112
Args:
packet_type (int): 0 for advertisement, 1 for scan response
data (bytearray): the data to set
"""
payload = struct.pack("<BB%ss" % (len(data)), packet_type, len(data), bytes(data))
response = self._send_command(6, 9, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting advertising data', 'code': result}
return True, None | python | def _set_advertising_data(self, packet_type, data):
"""Set the advertising data for advertisements sent out by this bled112
Args:
packet_type (int): 0 for advertisement, 1 for scan response
data (bytearray): the data to set
"""
payload = struct.pack("<BB%ss" % (len(data)), packet_type, len(data), bytes(data))
response = self._send_command(6, 9, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting advertising data', 'code': result}
return True, None | [
"def",
"_set_advertising_data",
"(",
"self",
",",
"packet_type",
",",
"data",
")",
":",
"payload",
"=",
"struct",
".",
"pack",
"(",
"\"<BB%ss\"",
"%",
"(",
"len",
"(",
"data",
")",
")",
",",
"packet_type",
",",
"len",
"(",
"data",
")",
",",
"bytes",
... | Set the advertising data for advertisements sent out by this bled112
Args:
packet_type (int): 0 for advertisement, 1 for scan response
data (bytearray): the data to set | [
"Set",
"the",
"advertising",
"data",
"for",
"advertisements",
"sent",
"out",
"by",
"this",
"bled112"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L513-L528 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._set_mode | def _set_mode(self, discover_mode, connect_mode):
"""Set the mode of the BLED112, used to enable and disable advertising
To enable advertising, use 4, 2.
To disable advertising use 0, 0.
Args:
discover_mode (int): The discoverability mode, 0 for off, 4 for on (user data)
connect_mode (int): The connectability mode, 0 for of, 2 for undirected connectable
"""
payload = struct.pack("<BB", discover_mode, connect_mode)
response = self._send_command(6, 1, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting mode', 'code': result}
return True, None | python | def _set_mode(self, discover_mode, connect_mode):
"""Set the mode of the BLED112, used to enable and disable advertising
To enable advertising, use 4, 2.
To disable advertising use 0, 0.
Args:
discover_mode (int): The discoverability mode, 0 for off, 4 for on (user data)
connect_mode (int): The connectability mode, 0 for of, 2 for undirected connectable
"""
payload = struct.pack("<BB", discover_mode, connect_mode)
response = self._send_command(6, 1, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 setting mode', 'code': result}
return True, None | [
"def",
"_set_mode",
"(",
"self",
",",
"discover_mode",
",",
"connect_mode",
")",
":",
"payload",
"=",
"struct",
".",
"pack",
"(",
"\"<BB\"",
",",
"discover_mode",
",",
"connect_mode",
")",
"response",
"=",
"self",
".",
"_send_command",
"(",
"6",
",",
"1",
... | Set the mode of the BLED112, used to enable and disable advertising
To enable advertising, use 4, 2.
To disable advertising use 0, 0.
Args:
discover_mode (int): The discoverability mode, 0 for off, 4 for on (user data)
connect_mode (int): The connectability mode, 0 for of, 2 for undirected connectable | [
"Set",
"the",
"mode",
"of",
"the",
"BLED112",
"used",
"to",
"enable",
"and",
"disable",
"advertising"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L530-L548 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._send_notification | def _send_notification(self, handle, value):
"""Send a notification to all connected clients on a characteristic
Args:
handle (int): The handle we wish to notify on
value (bytearray): The value we wish to send
"""
value_len = len(value)
value = bytes(value)
payload = struct.pack("<BHB%ds" % value_len, 0xFF, handle, value_len, value)
response = self._send_command(2, 5, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 notifying a value', 'code': result, 'handle': handle, 'value': value}
return True, None | python | def _send_notification(self, handle, value):
"""Send a notification to all connected clients on a characteristic
Args:
handle (int): The handle we wish to notify on
value (bytearray): The value we wish to send
"""
value_len = len(value)
value = bytes(value)
payload = struct.pack("<BHB%ds" % value_len, 0xFF, handle, value_len, value)
response = self._send_command(2, 5, payload)
result, = unpack("<H", response.payload)
if result != 0:
return False, {'reason': 'Error code from BLED112 notifying a value', 'code': result, 'handle': handle, 'value': value}
return True, None | [
"def",
"_send_notification",
"(",
"self",
",",
"handle",
",",
"value",
")",
":",
"value_len",
"=",
"len",
"(",
"value",
")",
"value",
"=",
"bytes",
"(",
"value",
")",
"payload",
"=",
"struct",
".",
"pack",
"(",
"\"<BHB%ds\"",
"%",
"value_len",
",",
"0x... | Send a notification to all connected clients on a characteristic
Args:
handle (int): The handle we wish to notify on
value (bytearray): The value we wish to send | [
"Send",
"a",
"notification",
"to",
"all",
"connected",
"clients",
"on",
"a",
"characteristic"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L550-L568 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._disconnect | def _disconnect(self, handle):
"""Disconnect from a device that we have previously connected to
"""
payload = struct.pack('<B', handle)
response = self._send_command(3, 0, payload)
conn_handle, result = unpack("<BH", response.payload)
if result != 0:
self._logger.info("Disconnection failed result=%d", result)
return False, None
assert conn_handle == handle
def disconnect_succeeded(event):
if event.command_class == 3 and event.command == 4:
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
#FIXME Hardcoded timeout
events = self._wait_process_events(3.0, lambda x: False, disconnect_succeeded)
if len(events) != 1:
return False, None
return True, {'handle': handle} | python | def _disconnect(self, handle):
"""Disconnect from a device that we have previously connected to
"""
payload = struct.pack('<B', handle)
response = self._send_command(3, 0, payload)
conn_handle, result = unpack("<BH", response.payload)
if result != 0:
self._logger.info("Disconnection failed result=%d", result)
return False, None
assert conn_handle == handle
def disconnect_succeeded(event):
if event.command_class == 3 and event.command == 4:
event_handle, = unpack("B", event.payload[0:1])
return event_handle == handle
return False
#FIXME Hardcoded timeout
events = self._wait_process_events(3.0, lambda x: False, disconnect_succeeded)
if len(events) != 1:
return False, None
return True, {'handle': handle} | [
"def",
"_disconnect",
"(",
"self",
",",
"handle",
")",
":",
"payload",
"=",
"struct",
".",
"pack",
"(",
"'<B'",
",",
"handle",
")",
"response",
"=",
"self",
".",
"_send_command",
"(",
"3",
",",
"0",
",",
"payload",
")",
"conn_handle",
",",
"result",
... | Disconnect from a device that we have previously connected to | [
"Disconnect",
"from",
"a",
"device",
"that",
"we",
"have",
"previously",
"connected",
"to"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L657-L683 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._send_command | def _send_command(self, cmd_class, command, payload, timeout=3.0):
"""
Send a BGAPI packet to the dongle and return the response
"""
if len(payload) > 60:
return ValueError("Attempting to send a BGAPI packet with length > 60 is not allowed", actual_length=len(payload), command=command, command_class=cmd_class)
header = bytearray(4)
header[0] = 0
header[1] = len(payload)
header[2] = cmd_class
header[3] = command
packet = header + bytearray(payload)
self._stream.write(bytes(packet))
#Every command has a response so wait for the response here
response = self._receive_packet(timeout)
return response | python | def _send_command(self, cmd_class, command, payload, timeout=3.0):
"""
Send a BGAPI packet to the dongle and return the response
"""
if len(payload) > 60:
return ValueError("Attempting to send a BGAPI packet with length > 60 is not allowed", actual_length=len(payload), command=command, command_class=cmd_class)
header = bytearray(4)
header[0] = 0
header[1] = len(payload)
header[2] = cmd_class
header[3] = command
packet = header + bytearray(payload)
self._stream.write(bytes(packet))
#Every command has a response so wait for the response here
response = self._receive_packet(timeout)
return response | [
"def",
"_send_command",
"(",
"self",
",",
"cmd_class",
",",
"command",
",",
"payload",
",",
"timeout",
"=",
"3.0",
")",
":",
"if",
"len",
"(",
"payload",
")",
">",
"60",
":",
"return",
"ValueError",
"(",
"\"Attempting to send a BGAPI packet with length > 60 is n... | Send a BGAPI packet to the dongle and return the response | [
"Send",
"a",
"BGAPI",
"packet",
"to",
"the",
"dongle",
"and",
"return",
"the",
"response"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L685-L704 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._receive_packet | def _receive_packet(self, timeout=3.0):
"""
Receive a response packet to a command
"""
while True:
response_data = self._stream.read_packet(timeout=timeout)
response = BGAPIPacket(is_event=(response_data[0] == 0x80), command_class=response_data[2], command=response_data[3], payload=response_data[4:])
if response.is_event:
if self.event_handler is not None:
self.event_handler(response)
continue
return response | python | def _receive_packet(self, timeout=3.0):
"""
Receive a response packet to a command
"""
while True:
response_data = self._stream.read_packet(timeout=timeout)
response = BGAPIPacket(is_event=(response_data[0] == 0x80), command_class=response_data[2], command=response_data[3], payload=response_data[4:])
if response.is_event:
if self.event_handler is not None:
self.event_handler(response)
continue
return response | [
"def",
"_receive_packet",
"(",
"self",
",",
"timeout",
"=",
"3.0",
")",
":",
"while",
"True",
":",
"response_data",
"=",
"self",
".",
"_stream",
".",
"read_packet",
"(",
"timeout",
"=",
"timeout",
")",
"response",
"=",
"BGAPIPacket",
"(",
"is_event",
"=",
... | Receive a response packet to a command | [
"Receive",
"a",
"response",
"packet",
"to",
"a",
"command"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L706-L721 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py | BLED112CommandProcessor._wait_process_events | def _wait_process_events(self, total_time, return_filter, end_filter):
"""Synchronously process events until a specific event is found or we timeout
Args:
total_time (float): The aproximate maximum number of seconds we should wait for the end event
return_filter (callable): A function that returns True for events we should return and not process
normally via callbacks to the IOLoop
end_filter (callable): A function that returns True for the end event that we are looking for to
stop processing.
Returns:
list: A list of events that matched return_filter or end_filter
"""
acc = []
delta = 0.01
start_time = time.time()
end_time = start_time + total_time
while time.time() < end_time:
events = self._process_events(lambda x: return_filter(x) or end_filter(x), max_events=1)
acc += events
for event in events:
if end_filter(event):
return acc
if len(events) == 0:
time.sleep(delta)
return acc | python | def _wait_process_events(self, total_time, return_filter, end_filter):
"""Synchronously process events until a specific event is found or we timeout
Args:
total_time (float): The aproximate maximum number of seconds we should wait for the end event
return_filter (callable): A function that returns True for events we should return and not process
normally via callbacks to the IOLoop
end_filter (callable): A function that returns True for the end event that we are looking for to
stop processing.
Returns:
list: A list of events that matched return_filter or end_filter
"""
acc = []
delta = 0.01
start_time = time.time()
end_time = start_time + total_time
while time.time() < end_time:
events = self._process_events(lambda x: return_filter(x) or end_filter(x), max_events=1)
acc += events
for event in events:
if end_filter(event):
return acc
if len(events) == 0:
time.sleep(delta)
return acc | [
"def",
"_wait_process_events",
"(",
"self",
",",
"total_time",
",",
"return_filter",
",",
"end_filter",
")",
":",
"acc",
"=",
"[",
"]",
"delta",
"=",
"0.01",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"end_time",
"=",
"start_time",
"+",
"total_time"... | Synchronously process events until a specific event is found or we timeout
Args:
total_time (float): The aproximate maximum number of seconds we should wait for the end event
return_filter (callable): A function that returns True for events we should return and not process
normally via callbacks to the IOLoop
end_filter (callable): A function that returns True for the end event that we are looking for to
stop processing.
Returns:
list: A list of events that matched return_filter or end_filter | [
"Synchronously",
"process",
"events",
"until",
"a",
"specific",
"event",
"is",
"found",
"or",
"we",
"timeout"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/bled112_cmd.py#L774-L805 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.connect | def connect(self, client_id):
"""Connect to AWS IOT with the given client_id
Args:
client_id (string): The client ID passed to the MQTT message broker
"""
if self.client is not None:
raise InternalError("Connect called on an alreaded connected MQTT client")
client = AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient(client_id, useWebsocket=self.websockets)
if self.websockets:
client.configureEndpoint(self.endpoint, 443)
client.configureCredentials(self.root)
if self.iam_session is None:
client.configureIAMCredentials(self.iam_key, self.iam_secret)
else:
client.configureIAMCredentials(self.iam_key, self.iam_secret, self.iam_session)
else:
client.configureEndpoint(self.endpoint, 8883)
client.configureCredentials(self.root, self.key, self.cert)
client.configureOfflinePublishQueueing(0)
try:
client.connect()
self.client = client
except operationError as exc:
raise InternalError("Could not connect to AWS IOT", message=exc.message)
self.sequencer.reset() | python | def connect(self, client_id):
"""Connect to AWS IOT with the given client_id
Args:
client_id (string): The client ID passed to the MQTT message broker
"""
if self.client is not None:
raise InternalError("Connect called on an alreaded connected MQTT client")
client = AWSIoTPythonSDK.MQTTLib.AWSIoTMQTTClient(client_id, useWebsocket=self.websockets)
if self.websockets:
client.configureEndpoint(self.endpoint, 443)
client.configureCredentials(self.root)
if self.iam_session is None:
client.configureIAMCredentials(self.iam_key, self.iam_secret)
else:
client.configureIAMCredentials(self.iam_key, self.iam_secret, self.iam_session)
else:
client.configureEndpoint(self.endpoint, 8883)
client.configureCredentials(self.root, self.key, self.cert)
client.configureOfflinePublishQueueing(0)
try:
client.connect()
self.client = client
except operationError as exc:
raise InternalError("Could not connect to AWS IOT", message=exc.message)
self.sequencer.reset() | [
"def",
"connect",
"(",
"self",
",",
"client_id",
")",
":",
"if",
"self",
".",
"client",
"is",
"not",
"None",
":",
"raise",
"InternalError",
"(",
"\"Connect called on an alreaded connected MQTT client\"",
")",
"client",
"=",
"AWSIoTPythonSDK",
".",
"MQTTLib",
".",
... | Connect to AWS IOT with the given client_id
Args:
client_id (string): The client ID passed to the MQTT message broker | [
"Connect",
"to",
"AWS",
"IOT",
"with",
"the",
"given",
"client_id"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L78-L110 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.disconnect | def disconnect(self):
"""Disconnect from AWS IOT message broker
"""
if self.client is None:
return
try:
self.client.disconnect()
except operationError as exc:
raise InternalError("Could not disconnect from AWS IOT", message=exc.message) | python | def disconnect(self):
"""Disconnect from AWS IOT message broker
"""
if self.client is None:
return
try:
self.client.disconnect()
except operationError as exc:
raise InternalError("Could not disconnect from AWS IOT", message=exc.message) | [
"def",
"disconnect",
"(",
"self",
")",
":",
"if",
"self",
".",
"client",
"is",
"None",
":",
"return",
"try",
":",
"self",
".",
"client",
".",
"disconnect",
"(",
")",
"except",
"operationError",
"as",
"exc",
":",
"raise",
"InternalError",
"(",
"\"Could no... | Disconnect from AWS IOT message broker | [
"Disconnect",
"from",
"AWS",
"IOT",
"message",
"broker"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L112-L122 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.publish | def publish(self, topic, message):
"""Publish a json message to a topic with a type and a sequence number
The actual message will be published as a JSON object:
{
"sequence": <incrementing id>,
"message": message
}
Args:
topic (string): The MQTT topic to publish in
message (string, dict): The message to publish
"""
seq = self.sequencer.next_id(topic)
packet = {
'sequence': seq,
'message': message
}
# Need to encode bytes types for json.dumps
if 'key' in packet['message']:
packet['message']['key'] = packet['message']['key'].decode('utf8')
if 'payload' in packet['message']:
packet['message']['payload'] = packet['message']['payload'].decode('utf8')
if 'script' in packet['message']:
packet['message']['script'] = packet['message']['script'].decode('utf8')
if 'trace' in packet['message']:
packet['message']['trace'] = packet['message']['trace'].decode('utf8')
if 'report' in packet['message']:
packet['message']['report'] = packet['message']['report'].decode('utf8')
if 'received_time' in packet['message']:
packet['message']['received_time'] = packet['message']['received_time'].decode('utf8')
serialized_packet = json.dumps(packet)
try:
# Limit how much we log in case the message is very long
self._logger.debug("Publishing %s on topic %s", serialized_packet[:256], topic)
self.client.publish(topic, serialized_packet, 1)
except operationError as exc:
raise InternalError("Could not publish message", topic=topic, message=exc.message) | python | def publish(self, topic, message):
"""Publish a json message to a topic with a type and a sequence number
The actual message will be published as a JSON object:
{
"sequence": <incrementing id>,
"message": message
}
Args:
topic (string): The MQTT topic to publish in
message (string, dict): The message to publish
"""
seq = self.sequencer.next_id(topic)
packet = {
'sequence': seq,
'message': message
}
# Need to encode bytes types for json.dumps
if 'key' in packet['message']:
packet['message']['key'] = packet['message']['key'].decode('utf8')
if 'payload' in packet['message']:
packet['message']['payload'] = packet['message']['payload'].decode('utf8')
if 'script' in packet['message']:
packet['message']['script'] = packet['message']['script'].decode('utf8')
if 'trace' in packet['message']:
packet['message']['trace'] = packet['message']['trace'].decode('utf8')
if 'report' in packet['message']:
packet['message']['report'] = packet['message']['report'].decode('utf8')
if 'received_time' in packet['message']:
packet['message']['received_time'] = packet['message']['received_time'].decode('utf8')
serialized_packet = json.dumps(packet)
try:
# Limit how much we log in case the message is very long
self._logger.debug("Publishing %s on topic %s", serialized_packet[:256], topic)
self.client.publish(topic, serialized_packet, 1)
except operationError as exc:
raise InternalError("Could not publish message", topic=topic, message=exc.message) | [
"def",
"publish",
"(",
"self",
",",
"topic",
",",
"message",
")",
":",
"seq",
"=",
"self",
".",
"sequencer",
".",
"next_id",
"(",
"topic",
")",
"packet",
"=",
"{",
"'sequence'",
":",
"seq",
",",
"'message'",
":",
"message",
"}",
"if",
"'key'",
"in",
... | Publish a json message to a topic with a type and a sequence number
The actual message will be published as a JSON object:
{
"sequence": <incrementing id>,
"message": message
}
Args:
topic (string): The MQTT topic to publish in
message (string, dict): The message to publish | [
"Publish",
"a",
"json",
"message",
"to",
"a",
"topic",
"with",
"a",
"type",
"and",
"a",
"sequence",
"number"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L124-L165 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.subscribe | def subscribe(self, topic, callback, ordered=True):
"""Subscribe to future messages in the given topic
The contents of topic should be in the format created by self.publish with a
sequence number of message type encoded as a json string.
Wildcard topics containing + and # are allowed and
Args:
topic (string): The MQTT topic to subscribe to
callback (callable): The callback to call when a new mesage is received
The signature of callback should be callback(sequence, topic, type, message)
ordered (bool): Whether messages on this topic have a sequence number that must
be checked and queued to ensure that packets are received in order
"""
if '+' in topic or '#' in topic:
regex = re.compile(topic.replace('+', '[^/]+').replace('#', '.*'))
self.wildcard_queues.append((topic, regex, callback, ordered))
else:
self.queues[topic] = PacketQueue(0, callback, ordered)
try:
self.client.subscribe(topic, 1, self._on_receive)
except operationError as exc:
raise InternalError("Could not subscribe to topic", topic=topic, message=exc.message) | python | def subscribe(self, topic, callback, ordered=True):
"""Subscribe to future messages in the given topic
The contents of topic should be in the format created by self.publish with a
sequence number of message type encoded as a json string.
Wildcard topics containing + and # are allowed and
Args:
topic (string): The MQTT topic to subscribe to
callback (callable): The callback to call when a new mesage is received
The signature of callback should be callback(sequence, topic, type, message)
ordered (bool): Whether messages on this topic have a sequence number that must
be checked and queued to ensure that packets are received in order
"""
if '+' in topic or '#' in topic:
regex = re.compile(topic.replace('+', '[^/]+').replace('#', '.*'))
self.wildcard_queues.append((topic, regex, callback, ordered))
else:
self.queues[topic] = PacketQueue(0, callback, ordered)
try:
self.client.subscribe(topic, 1, self._on_receive)
except operationError as exc:
raise InternalError("Could not subscribe to topic", topic=topic, message=exc.message) | [
"def",
"subscribe",
"(",
"self",
",",
"topic",
",",
"callback",
",",
"ordered",
"=",
"True",
")",
":",
"if",
"'+'",
"in",
"topic",
"or",
"'#'",
"in",
"topic",
":",
"regex",
"=",
"re",
".",
"compile",
"(",
"topic",
".",
"replace",
"(",
"'+'",
",",
... | Subscribe to future messages in the given topic
The contents of topic should be in the format created by self.publish with a
sequence number of message type encoded as a json string.
Wildcard topics containing + and # are allowed and
Args:
topic (string): The MQTT topic to subscribe to
callback (callable): The callback to call when a new mesage is received
The signature of callback should be callback(sequence, topic, type, message)
ordered (bool): Whether messages on this topic have a sequence number that must
be checked and queued to ensure that packets are received in order | [
"Subscribe",
"to",
"future",
"messages",
"in",
"the",
"given",
"topic"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L167-L192 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.reset_sequence | def reset_sequence(self, topic):
"""Reset the expected sequence number for a topic
If the topic is unknown, this does nothing. This behaviour is
useful when you have wildcard topics that only create queues
once they receive the first message matching the topic.
Args:
topic (string): The topic to reset the packet queue on
"""
if topic in self.queues:
self.queues[topic].reset() | python | def reset_sequence(self, topic):
"""Reset the expected sequence number for a topic
If the topic is unknown, this does nothing. This behaviour is
useful when you have wildcard topics that only create queues
once they receive the first message matching the topic.
Args:
topic (string): The topic to reset the packet queue on
"""
if topic in self.queues:
self.queues[topic].reset() | [
"def",
"reset_sequence",
"(",
"self",
",",
"topic",
")",
":",
"if",
"topic",
"in",
"self",
".",
"queues",
":",
"self",
".",
"queues",
"[",
"topic",
"]",
".",
"reset",
"(",
")"
] | Reset the expected sequence number for a topic
If the topic is unknown, this does nothing. This behaviour is
useful when you have wildcard topics that only create queues
once they receive the first message matching the topic.
Args:
topic (string): The topic to reset the packet queue on | [
"Reset",
"the",
"expected",
"sequence",
"number",
"for",
"a",
"topic"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L194-L206 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient.unsubscribe | def unsubscribe(self, topic):
"""Unsubscribe from messages on a given topic
Args:
topic (string): The MQTT topic to unsubscribe from
"""
del self.queues[topic]
try:
self.client.unsubscribe(topic)
except operationError as exc:
raise InternalError("Could not unsubscribe from topic", topic=topic, message=exc.message) | python | def unsubscribe(self, topic):
"""Unsubscribe from messages on a given topic
Args:
topic (string): The MQTT topic to unsubscribe from
"""
del self.queues[topic]
try:
self.client.unsubscribe(topic)
except operationError as exc:
raise InternalError("Could not unsubscribe from topic", topic=topic, message=exc.message) | [
"def",
"unsubscribe",
"(",
"self",
",",
"topic",
")",
":",
"del",
"self",
".",
"queues",
"[",
"topic",
"]",
"try",
":",
"self",
".",
"client",
".",
"unsubscribe",
"(",
"topic",
")",
"except",
"operationError",
"as",
"exc",
":",
"raise",
"InternalError",
... | Unsubscribe from messages on a given topic
Args:
topic (string): The MQTT topic to unsubscribe from | [
"Unsubscribe",
"from",
"messages",
"on",
"a",
"given",
"topic"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L208-L220 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py | OrderedAWSIOTClient._on_receive | def _on_receive(self, client, userdata, message):
"""Callback called whenever we receive a message on a subscribed topic
Args:
client (string): The client id of the client receiving the message
userdata (string): Any user data set with the underlying MQTT client
message (object): The mesage with a topic and payload.
"""
topic = message.topic
encoded = message.payload
try:
packet = json.loads(encoded)
except ValueError:
self._logger.warn("Could not decode json packet: %s", encoded)
return
try:
seq = packet['sequence']
message_data = packet['message']
except KeyError:
self._logger.warn("Message received did not have required sequence and message keys: %s", packet)
return
# If we received a packet that does not fit into a queue, check our wildcard
# queues
if topic not in self.queues:
found = False
for _, regex, callback, ordered in self.wildcard_queues:
if regex.match(topic):
self.queues[topic] = PacketQueue(0, callback, ordered)
found = True
break
if not found:
self._logger.warn("Received message for unknown topic: %s", topic)
return
self.queues[topic].receive(seq, [seq, topic, message_data]) | python | def _on_receive(self, client, userdata, message):
"""Callback called whenever we receive a message on a subscribed topic
Args:
client (string): The client id of the client receiving the message
userdata (string): Any user data set with the underlying MQTT client
message (object): The mesage with a topic and payload.
"""
topic = message.topic
encoded = message.payload
try:
packet = json.loads(encoded)
except ValueError:
self._logger.warn("Could not decode json packet: %s", encoded)
return
try:
seq = packet['sequence']
message_data = packet['message']
except KeyError:
self._logger.warn("Message received did not have required sequence and message keys: %s", packet)
return
# If we received a packet that does not fit into a queue, check our wildcard
# queues
if topic not in self.queues:
found = False
for _, regex, callback, ordered in self.wildcard_queues:
if regex.match(topic):
self.queues[topic] = PacketQueue(0, callback, ordered)
found = True
break
if not found:
self._logger.warn("Received message for unknown topic: %s", topic)
return
self.queues[topic].receive(seq, [seq, topic, message_data]) | [
"def",
"_on_receive",
"(",
"self",
",",
"client",
",",
"userdata",
",",
"message",
")",
":",
"topic",
"=",
"message",
".",
"topic",
"encoded",
"=",
"message",
".",
"payload",
"try",
":",
"packet",
"=",
"json",
".",
"loads",
"(",
"encoded",
")",
"except... | Callback called whenever we receive a message on a subscribed topic
Args:
client (string): The client id of the client receiving the message
userdata (string): Any user data set with the underlying MQTT client
message (object): The mesage with a topic and payload. | [
"Callback",
"called",
"whenever",
"we",
"receive",
"a",
"message",
"on",
"a",
"subscribed",
"topic"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/mqtt_client.py#L222-L261 | train |
iotile/coretools | iotileship/iotile/ship/actions/sync_rtc_step.py | SyncRTCStep.run | def run(self, resources):
"""Sets the RTC timestamp to UTC.
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step.
"""
hwman = resources['connection']
con = hwman.hwman.controller()
test_interface = con.test_interface()
try:
test_interface.synchronize_clock()
print('Time currently set at %s' % test_interface.current_time_str())
except:
raise ArgumentError('Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated') | python | def run(self, resources):
"""Sets the RTC timestamp to UTC.
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step.
"""
hwman = resources['connection']
con = hwman.hwman.controller()
test_interface = con.test_interface()
try:
test_interface.synchronize_clock()
print('Time currently set at %s' % test_interface.current_time_str())
except:
raise ArgumentError('Error setting RTC time, check if controller actually has RTC or if iotile-support-lib-controller-3 is updated') | [
"def",
"run",
"(",
"self",
",",
"resources",
")",
":",
"hwman",
"=",
"resources",
"[",
"'connection'",
"]",
"con",
"=",
"hwman",
".",
"hwman",
".",
"controller",
"(",
")",
"test_interface",
"=",
"con",
".",
"test_interface",
"(",
")",
"try",
":",
"test... | Sets the RTC timestamp to UTC.
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step. | [
"Sets",
"the",
"RTC",
"timestamp",
"to",
"UTC",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/actions/sync_rtc_step.py#L21-L35 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.add | def add(self, command, *args):
"""Add a command to this command file.
Args:
command (str): The command to add
*args (str): The parameters to call the command with
"""
cmd = Command(command, args)
self.commands.append(cmd) | python | def add(self, command, *args):
"""Add a command to this command file.
Args:
command (str): The command to add
*args (str): The parameters to call the command with
"""
cmd = Command(command, args)
self.commands.append(cmd) | [
"def",
"add",
"(",
"self",
",",
"command",
",",
"*",
"args",
")",
":",
"cmd",
"=",
"Command",
"(",
"command",
",",
"args",
")",
"self",
".",
"commands",
".",
"append",
"(",
"cmd",
")"
] | Add a command to this command file.
Args:
command (str): The command to add
*args (str): The parameters to call the command with | [
"Add",
"a",
"command",
"to",
"this",
"command",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L38-L47 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.save | def save(self, outpath):
"""Save this command file as an ascii file.
Agrs:
outpath (str): The output path to save.
"""
with open(outpath, "w") as outfile:
outfile.write(self.dump()) | python | def save(self, outpath):
"""Save this command file as an ascii file.
Agrs:
outpath (str): The output path to save.
"""
with open(outpath, "w") as outfile:
outfile.write(self.dump()) | [
"def",
"save",
"(",
"self",
",",
"outpath",
")",
":",
"with",
"open",
"(",
"outpath",
",",
"\"w\"",
")",
"as",
"outfile",
":",
"outfile",
".",
"write",
"(",
"self",
".",
"dump",
"(",
")",
")"
] | Save this command file as an ascii file.
Agrs:
outpath (str): The output path to save. | [
"Save",
"this",
"command",
"file",
"as",
"an",
"ascii",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L49-L57 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.dump | def dump(self):
"""Dump all commands in this object to a string.
Returns:
str: An encoded list of commands separated by
\n characters suitable for saving to a file.
"""
out = []
out.append(self.filetype)
out.append("Format: {}".format(self.version))
out.append("Type: ASCII")
out.append("")
for cmd in self.commands:
out.append(self.encode(cmd))
return "\n".join(out) + "\n" | python | def dump(self):
"""Dump all commands in this object to a string.
Returns:
str: An encoded list of commands separated by
\n characters suitable for saving to a file.
"""
out = []
out.append(self.filetype)
out.append("Format: {}".format(self.version))
out.append("Type: ASCII")
out.append("")
for cmd in self.commands:
out.append(self.encode(cmd))
return "\n".join(out) + "\n" | [
"def",
"dump",
"(",
"self",
")",
":",
"out",
"=",
"[",
"]",
"out",
".",
"append",
"(",
"self",
".",
"filetype",
")",
"out",
".",
"append",
"(",
"\"Format: {}\"",
".",
"format",
"(",
"self",
".",
"version",
")",
")",
"out",
".",
"append",
"(",
"\"... | Dump all commands in this object to a string.
Returns:
str: An encoded list of commands separated by
\n characters suitable for saving to a file. | [
"Dump",
"all",
"commands",
"in",
"this",
"object",
"to",
"a",
"string",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L59-L77 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.FromString | def FromString(cls, indata):
"""Load a CommandFile from a string.
The string should be produced from a previous call to
encode.
Args:
indata (str): The encoded input data.
Returns:
CommandFile: The decoded CommandFile object.
"""
lines = [x.strip() for x in indata.split("\n") if not x.startswith('#') and not x.strip() == ""]
if len(lines) < 3:
raise DataError("Invalid CommandFile string that did not contain 3 header lines", lines=lines)
fmt_line, version_line, ascii_line = lines[:3]
if not version_line.startswith("Format: "):
raise DataError("Invalid format version that did not start with 'Format: '", line=version_line)
version = version_line[8:]
if ascii_line != "Type: ASCII":
raise DataError("Unknown file type line (expected Type: ASCII)", line=ascii_line)
cmds = [cls.decode(x) for x in lines[3:]]
return CommandFile(fmt_line, version, cmds) | python | def FromString(cls, indata):
"""Load a CommandFile from a string.
The string should be produced from a previous call to
encode.
Args:
indata (str): The encoded input data.
Returns:
CommandFile: The decoded CommandFile object.
"""
lines = [x.strip() for x in indata.split("\n") if not x.startswith('#') and not x.strip() == ""]
if len(lines) < 3:
raise DataError("Invalid CommandFile string that did not contain 3 header lines", lines=lines)
fmt_line, version_line, ascii_line = lines[:3]
if not version_line.startswith("Format: "):
raise DataError("Invalid format version that did not start with 'Format: '", line=version_line)
version = version_line[8:]
if ascii_line != "Type: ASCII":
raise DataError("Unknown file type line (expected Type: ASCII)", line=ascii_line)
cmds = [cls.decode(x) for x in lines[3:]]
return CommandFile(fmt_line, version, cmds) | [
"def",
"FromString",
"(",
"cls",
",",
"indata",
")",
":",
"lines",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"indata",
".",
"split",
"(",
"\"\\n\"",
")",
"if",
"not",
"x",
".",
"startswith",
"(",
"'#'",
")",
"and",
"not",
"x",
".... | Load a CommandFile from a string.
The string should be produced from a previous call to
encode.
Args:
indata (str): The encoded input data.
Returns:
CommandFile: The decoded CommandFile object. | [
"Load",
"a",
"CommandFile",
"from",
"a",
"string",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L80-L109 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.FromFile | def FromFile(cls, inpath):
"""Load a CommandFile from a path.
Args:
inpath (str): The path to the file to load
Returns:
CommandFile: The decoded CommandFile object.
"""
with open(inpath, "r") as infile:
indata = infile.read()
return cls.FromString(indata) | python | def FromFile(cls, inpath):
"""Load a CommandFile from a path.
Args:
inpath (str): The path to the file to load
Returns:
CommandFile: The decoded CommandFile object.
"""
with open(inpath, "r") as infile:
indata = infile.read()
return cls.FromString(indata) | [
"def",
"FromFile",
"(",
"cls",
",",
"inpath",
")",
":",
"with",
"open",
"(",
"inpath",
",",
"\"r\"",
")",
"as",
"infile",
":",
"indata",
"=",
"infile",
".",
"read",
"(",
")",
"return",
"cls",
".",
"FromString",
"(",
"indata",
")"
] | Load a CommandFile from a path.
Args:
inpath (str): The path to the file to load
Returns:
CommandFile: The decoded CommandFile object. | [
"Load",
"a",
"CommandFile",
"from",
"a",
"path",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L112-L125 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.encode | def encode(cls, command):
"""Encode a command as an unambiguous string.
Args:
command (Command): The command to encode.
Returns:
str: The encoded command
"""
args = []
for arg in command.args:
if not isinstance(arg, str):
arg = str(arg)
if "," in arg or arg.startswith(" ") or arg.endswith(" ") or arg.startswith("hex:"):
arg = "hex:{}".format(hexlify(arg.encode('utf-8')).decode('utf-8'))
args.append(arg)
argstr = ""
if len(args) > 0:
argstr = " {" + ",".join(args) + "}"
return command.name + argstr | python | def encode(cls, command):
"""Encode a command as an unambiguous string.
Args:
command (Command): The command to encode.
Returns:
str: The encoded command
"""
args = []
for arg in command.args:
if not isinstance(arg, str):
arg = str(arg)
if "," in arg or arg.startswith(" ") or arg.endswith(" ") or arg.startswith("hex:"):
arg = "hex:{}".format(hexlify(arg.encode('utf-8')).decode('utf-8'))
args.append(arg)
argstr = ""
if len(args) > 0:
argstr = " {" + ",".join(args) + "}"
return command.name + argstr | [
"def",
"encode",
"(",
"cls",
",",
"command",
")",
":",
"args",
"=",
"[",
"]",
"for",
"arg",
"in",
"command",
".",
"args",
":",
"if",
"not",
"isinstance",
"(",
"arg",
",",
"str",
")",
":",
"arg",
"=",
"str",
"(",
"arg",
")",
"if",
"\",\"",
"in",... | Encode a command as an unambiguous string.
Args:
command (Command): The command to encode.
Returns:
str: The encoded command | [
"Encode",
"a",
"command",
"as",
"an",
"unambiguous",
"string",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L128-L153 | train |
iotile/coretools | iotilecore/iotile/core/utilities/command_file.py | CommandFile.decode | def decode(cls, command_str):
"""Decode a string encoded command back into a Command object.
Args:
command_str (str): The encoded command string output from a
previous call to encode.
Returns:
Command: The decoded Command object.
"""
name, _, arg = command_str.partition(" ")
args = []
if len(arg) > 0:
if arg[0] != '{' or arg[-1] != '}':
raise DataError("Invalid command, argument is not contained in { and }", arg=arg, cmd=name)
arg = arg[1:-1]
args = arg.split(",")
proc = []
for arg in args:
if arg.startswith("hex:"):
arg = unhexlify(arg[4:]).decode('utf-8')
proc.append(arg)
return Command(name, proc) | python | def decode(cls, command_str):
"""Decode a string encoded command back into a Command object.
Args:
command_str (str): The encoded command string output from a
previous call to encode.
Returns:
Command: The decoded Command object.
"""
name, _, arg = command_str.partition(" ")
args = []
if len(arg) > 0:
if arg[0] != '{' or arg[-1] != '}':
raise DataError("Invalid command, argument is not contained in { and }", arg=arg, cmd=name)
arg = arg[1:-1]
args = arg.split(",")
proc = []
for arg in args:
if arg.startswith("hex:"):
arg = unhexlify(arg[4:]).decode('utf-8')
proc.append(arg)
return Command(name, proc) | [
"def",
"decode",
"(",
"cls",
",",
"command_str",
")",
":",
"name",
",",
"_",
",",
"arg",
"=",
"command_str",
".",
"partition",
"(",
"\" \"",
")",
"args",
"=",
"[",
"]",
"if",
"len",
"(",
"arg",
")",
">",
"0",
":",
"if",
"arg",
"[",
"0",
"]",
... | Decode a string encoded command back into a Command object.
Args:
command_str (str): The encoded command string output from a
previous call to encode.
Returns:
Command: The decoded Command object. | [
"Decode",
"a",
"string",
"encoded",
"command",
"back",
"into",
"a",
"Command",
"object",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/command_file.py#L156-L186 | train |
iotile/coretools | transport_plugins/awsiot/iotile_transport_awsiot/packet_queue.py | PacketQueue.receive | def receive(self, sequence, args):
"""Receive one packet
If the sequence number is one we've already seen before, it is dropped.
If it is not the next expected sequence number, it is put into the
_out_of_order queue to be processed once the holes in sequence number
are filled in.
Args:
sequence (int): The sequence number of the received packet
args (list): The list of packet contents that will be passed to callback
as callback(*args)
"""
# If we are told to ignore sequence numbers, just pass the packet on
if not self._reorder:
self._callback(*args)
return
# If this packet is in the past, drop it
if self._next_expected is not None and sequence < self._next_expected:
print("Dropping out of order packet, seq=%d" % sequence)
return
self._out_of_order.append((sequence, args))
self._out_of_order.sort(key=lambda x: x[0])
# If we have received packets, attempt to process them in order
while len(self._out_of_order) > 0:
seq, args = self._out_of_order[0]
if self._next_expected is not None and seq != self._next_expected:
return
self._callback(*args)
self._out_of_order.pop(0)
self._next_expected = seq+1 | python | def receive(self, sequence, args):
"""Receive one packet
If the sequence number is one we've already seen before, it is dropped.
If it is not the next expected sequence number, it is put into the
_out_of_order queue to be processed once the holes in sequence number
are filled in.
Args:
sequence (int): The sequence number of the received packet
args (list): The list of packet contents that will be passed to callback
as callback(*args)
"""
# If we are told to ignore sequence numbers, just pass the packet on
if not self._reorder:
self._callback(*args)
return
# If this packet is in the past, drop it
if self._next_expected is not None and sequence < self._next_expected:
print("Dropping out of order packet, seq=%d" % sequence)
return
self._out_of_order.append((sequence, args))
self._out_of_order.sort(key=lambda x: x[0])
# If we have received packets, attempt to process them in order
while len(self._out_of_order) > 0:
seq, args = self._out_of_order[0]
if self._next_expected is not None and seq != self._next_expected:
return
self._callback(*args)
self._out_of_order.pop(0)
self._next_expected = seq+1 | [
"def",
"receive",
"(",
"self",
",",
"sequence",
",",
"args",
")",
":",
"if",
"not",
"self",
".",
"_reorder",
":",
"self",
".",
"_callback",
"(",
"*",
"args",
")",
"return",
"if",
"self",
".",
"_next_expected",
"is",
"not",
"None",
"and",
"sequence",
... | Receive one packet
If the sequence number is one we've already seen before, it is dropped.
If it is not the next expected sequence number, it is put into the
_out_of_order queue to be processed once the holes in sequence number
are filled in.
Args:
sequence (int): The sequence number of the received packet
args (list): The list of packet contents that will be passed to callback
as callback(*args) | [
"Receive",
"one",
"packet"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/awsiot/iotile_transport_awsiot/packet_queue.py#L33-L70 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py | set_vars | def set_vars(env):
"""Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
MWCW_VERSIONS is set to a list of objects representing installed versions
MWCW_VERSION is set to the version object that will be used for building.
MWCW_VERSION can be set to a string during Environment
construction to influence which version is chosen, otherwise
the latest one from MWCW_VERSIONS is used.
Returns true if at least one version is found, false otherwise
"""
desired = env.get('MWCW_VERSION', '')
# return right away if the variables are already set
if isinstance(desired, MWVersion):
return 1
elif desired is None:
return 0
versions = find_versions()
version = None
if desired:
for v in versions:
if str(v) == desired:
version = v
elif versions:
version = versions[-1]
env['MWCW_VERSIONS'] = versions
env['MWCW_VERSION'] = version
if version is None:
return 0
env.PrependENVPath('PATH', version.clpath)
env.PrependENVPath('PATH', version.dllpath)
ENV = env['ENV']
ENV['CWFolder'] = version.path
ENV['LM_LICENSE_FILE'] = version.license
plus = lambda x: '+%s' % x
ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes))
ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs))
return 1 | python | def set_vars(env):
"""Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
MWCW_VERSIONS is set to a list of objects representing installed versions
MWCW_VERSION is set to the version object that will be used for building.
MWCW_VERSION can be set to a string during Environment
construction to influence which version is chosen, otherwise
the latest one from MWCW_VERSIONS is used.
Returns true if at least one version is found, false otherwise
"""
desired = env.get('MWCW_VERSION', '')
# return right away if the variables are already set
if isinstance(desired, MWVersion):
return 1
elif desired is None:
return 0
versions = find_versions()
version = None
if desired:
for v in versions:
if str(v) == desired:
version = v
elif versions:
version = versions[-1]
env['MWCW_VERSIONS'] = versions
env['MWCW_VERSION'] = version
if version is None:
return 0
env.PrependENVPath('PATH', version.clpath)
env.PrependENVPath('PATH', version.dllpath)
ENV = env['ENV']
ENV['CWFolder'] = version.path
ENV['LM_LICENSE_FILE'] = version.license
plus = lambda x: '+%s' % x
ENV['MWCIncludes'] = os.pathsep.join(map(plus, version.includes))
ENV['MWLibraries'] = os.pathsep.join(map(plus, version.libs))
return 1 | [
"def",
"set_vars",
"(",
"env",
")",
":",
"desired",
"=",
"env",
".",
"get",
"(",
"'MWCW_VERSION'",
",",
"''",
")",
"if",
"isinstance",
"(",
"desired",
",",
"MWVersion",
")",
":",
"return",
"1",
"elif",
"desired",
"is",
"None",
":",
"return",
"0",
"ve... | Set MWCW_VERSION, MWCW_VERSIONS, and some codewarrior environment vars
MWCW_VERSIONS is set to a list of objects representing installed versions
MWCW_VERSION is set to the version object that will be used for building.
MWCW_VERSION can be set to a string during Environment
construction to influence which version is chosen, otherwise
the latest one from MWCW_VERSIONS is used.
Returns true if at least one version is found, false otherwise | [
"Set",
"MWCW_VERSION",
"MWCW_VERSIONS",
"and",
"some",
"codewarrior",
"environment",
"vars"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py#L40-L84 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py | find_versions | def find_versions():
"""Return a list of MWVersion objects representing installed versions"""
versions = []
### This function finds CodeWarrior by reading from the registry on
### Windows. Some other method needs to be implemented for other
### platforms, maybe something that calls env.WhereIs('mwcc')
if SCons.Util.can_read_reg:
try:
HLM = SCons.Util.HKEY_LOCAL_MACHINE
product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
product_key = SCons.Util.RegOpenKeyEx(HLM, product)
i = 0
while True:
name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
name_key = SCons.Util.RegOpenKeyEx(HLM, name)
try:
version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
mwv = MWVersion(version[0], path[0], 'Win32-X86')
versions.append(mwv)
except SCons.Util.RegError:
pass
i = i + 1
except SCons.Util.RegError:
pass
return versions | python | def find_versions():
"""Return a list of MWVersion objects representing installed versions"""
versions = []
### This function finds CodeWarrior by reading from the registry on
### Windows. Some other method needs to be implemented for other
### platforms, maybe something that calls env.WhereIs('mwcc')
if SCons.Util.can_read_reg:
try:
HLM = SCons.Util.HKEY_LOCAL_MACHINE
product = 'SOFTWARE\\Metrowerks\\CodeWarrior\\Product Versions'
product_key = SCons.Util.RegOpenKeyEx(HLM, product)
i = 0
while True:
name = product + '\\' + SCons.Util.RegEnumKey(product_key, i)
name_key = SCons.Util.RegOpenKeyEx(HLM, name)
try:
version = SCons.Util.RegQueryValueEx(name_key, 'VERSION')
path = SCons.Util.RegQueryValueEx(name_key, 'PATH')
mwv = MWVersion(version[0], path[0], 'Win32-X86')
versions.append(mwv)
except SCons.Util.RegError:
pass
i = i + 1
except SCons.Util.RegError:
pass
return versions | [
"def",
"find_versions",
"(",
")",
":",
"versions",
"=",
"[",
"]",
"if",
"SCons",
".",
"Util",
".",
"can_read_reg",
":",
"try",
":",
"HLM",
"=",
"SCons",
".",
"Util",
".",
"HKEY_LOCAL_MACHINE",
"product",
"=",
"'SOFTWARE\\\\Metrowerks\\\\CodeWarrior\\\\Product Ve... | Return a list of MWVersion objects representing installed versions | [
"Return",
"a",
"list",
"of",
"MWVersion",
"objects",
"representing",
"installed",
"versions"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py#L87-L119 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py | generate | def generate(env):
"""Add Builders and construction variables for the mwcc to an Environment."""
import SCons.Defaults
import SCons.Tool
set_vars(env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
env['CC'] = 'mwcc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
env['CXX'] = 'mwcc'
env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = '$CCFLAGS'
env['SHCFLAGS'] = '$CFLAGS'
env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = '$CXXFLAGS'
env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cpp'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = '' | python | def generate(env):
"""Add Builders and construction variables for the mwcc to an Environment."""
import SCons.Defaults
import SCons.Tool
set_vars(env)
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
for suffix in CSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCAction)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, SCons.Defaults.CXXAction)
shared_obj.add_action(suffix, SCons.Defaults.ShCXXAction)
env['CCCOMFLAGS'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS -nolink -o $TARGET $SOURCES'
env['CC'] = 'mwcc'
env['CCCOM'] = '$CC $CFLAGS $CCFLAGS $CCCOMFLAGS'
env['CXX'] = 'mwcc'
env['CXXCOM'] = '$CXX $CXXFLAGS $CCCOMFLAGS'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = '$CCFLAGS'
env['SHCFLAGS'] = '$CFLAGS'
env['SHCCCOM'] = '$SHCC $SHCFLAGS $SHCCFLAGS $CCCOMFLAGS'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = '$CXXFLAGS'
env['SHCXXCOM'] = '$SHCXX $SHCXXFLAGS $CCCOMFLAGS'
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cpp'
env['CPPDEFPREFIX'] = '-D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '-I'
env['INCSUFFIX'] = '' | [
"def",
"generate",
"(",
"env",
")",
":",
"import",
"SCons",
".",
"Defaults",
"import",
"SCons",
".",
"Tool",
"set_vars",
"(",
"env",
")",
"static_obj",
",",
"shared_obj",
"=",
"SCons",
".",
"Tool",
".",
"createObjBuilders",
"(",
"env",
")",
"for",
"suffi... | Add Builders and construction variables for the mwcc to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"the",
"mwcc",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mwcc.py#L155-L194 | train |
iotile/coretools | iotilecore/iotile/core/hw/debug/flash_board_step.py | FlashBoardStep.run | def run(self, resources):
"""Runs the flash step
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step.
"""
if not resources['connection']._port.startswith('jlink'):
raise ArgumentError("FlashBoardStep is currently only possible through jlink", invalid_port=args['port'])
hwman = resources['connection']
debug = hwman.hwman.debug(self._debug_string)
debug.flash(self._file) | python | def run(self, resources):
"""Runs the flash step
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step.
"""
if not resources['connection']._port.startswith('jlink'):
raise ArgumentError("FlashBoardStep is currently only possible through jlink", invalid_port=args['port'])
hwman = resources['connection']
debug = hwman.hwman.debug(self._debug_string)
debug.flash(self._file) | [
"def",
"run",
"(",
"self",
",",
"resources",
")",
":",
"if",
"not",
"resources",
"[",
"'connection'",
"]",
".",
"_port",
".",
"startswith",
"(",
"'jlink'",
")",
":",
"raise",
"ArgumentError",
"(",
"\"FlashBoardStep is currently only possible through jlink\"",
",",... | Runs the flash step
Args:
resources (dict): A dictionary containing the required resources that
we needed access to in order to perform this step. | [
"Runs",
"the",
"flash",
"step"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/hw/debug/flash_board_step.py#L26-L38 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/filesystem.py | copyto_emitter | def copyto_emitter(target, source, env):
""" changes the path of the source to be under the target (which
are assumed to be directories.
"""
n_target = []
for t in target:
n_target = n_target + [t.File( str( s ) ) for s in source]
return (n_target, source) | python | def copyto_emitter(target, source, env):
""" changes the path of the source to be under the target (which
are assumed to be directories.
"""
n_target = []
for t in target:
n_target = n_target + [t.File( str( s ) ) for s in source]
return (n_target, source) | [
"def",
"copyto_emitter",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"n_target",
"=",
"[",
"]",
"for",
"t",
"in",
"target",
":",
"n_target",
"=",
"n_target",
"+",
"[",
"t",
".",
"File",
"(",
"str",
"(",
"s",
")",
")",
"for",
"s",
"in",
... | changes the path of the source to be under the target (which
are assumed to be directories. | [
"changes",
"the",
"path",
"of",
"the",
"source",
"to",
"be",
"under",
"the",
"target",
"(",
"which",
"are",
"assumed",
"to",
"be",
"directories",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/filesystem.py#L40-L49 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/PharLapCommon.py | getPharLapPath | def getPharLapPath():
"""Reads the registry to find the installed path of the Phar Lap ETS
development kit.
Raises UserError if no installed version of Phar Lap can
be found."""
if not SCons.Util.can_read_reg:
raise SCons.Errors.InternalError("No Windows registry module was found")
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Pharlap\\ETS')
val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
# The following is a hack...there is (not surprisingly)
# an odd issue in the Phar Lap plug in that inserts
# a bunch of junk data after the phar lap path in the
# registry. We must trim it.
idx=val.find('\0')
if idx >= 0:
val = val[:idx]
return os.path.normpath(val)
except SCons.Util.RegError:
raise SCons.Errors.UserError("Cannot find Phar Lap ETS path in the registry. Is it installed properly?") | python | def getPharLapPath():
"""Reads the registry to find the installed path of the Phar Lap ETS
development kit.
Raises UserError if no installed version of Phar Lap can
be found."""
if not SCons.Util.can_read_reg:
raise SCons.Errors.InternalError("No Windows registry module was found")
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
'SOFTWARE\\Pharlap\\ETS')
val, type = SCons.Util.RegQueryValueEx(k, 'BaseDir')
# The following is a hack...there is (not surprisingly)
# an odd issue in the Phar Lap plug in that inserts
# a bunch of junk data after the phar lap path in the
# registry. We must trim it.
idx=val.find('\0')
if idx >= 0:
val = val[:idx]
return os.path.normpath(val)
except SCons.Util.RegError:
raise SCons.Errors.UserError("Cannot find Phar Lap ETS path in the registry. Is it installed properly?") | [
"def",
"getPharLapPath",
"(",
")",
":",
"if",
"not",
"SCons",
".",
"Util",
".",
"can_read_reg",
":",
"raise",
"SCons",
".",
"Errors",
".",
"InternalError",
"(",
"\"No Windows registry module was found\"",
")",
"try",
":",
"k",
"=",
"SCons",
".",
"Util",
".",... | Reads the registry to find the installed path of the Phar Lap ETS
development kit.
Raises UserError if no installed version of Phar Lap can
be found. | [
"Reads",
"the",
"registry",
"to",
"find",
"the",
"installed",
"path",
"of",
"the",
"Phar",
"Lap",
"ETS",
"development",
"kit",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/PharLapCommon.py#L40-L64 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/PharLapCommon.py | addPharLapPaths | def addPharLapPaths(env):
"""This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there."""
ph_path = getPharLapPath()
try:
env_dict = env['ENV']
except KeyError:
env_dict = {}
env['ENV'] = env_dict
SCons.Util.AddPathIfNotExists(env_dict, 'PATH',
os.path.join(ph_path, 'bin'))
SCons.Util.AddPathIfNotExists(env_dict, 'INCLUDE',
os.path.join(ph_path, 'include'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, 'lib'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, os.path.normpath('lib/vclib')))
env['PHARLAP_PATH'] = getPharLapPath()
env['PHARLAP_VERSION'] = str(getPharLapVersion()) | python | def addPharLapPaths(env):
"""This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there."""
ph_path = getPharLapPath()
try:
env_dict = env['ENV']
except KeyError:
env_dict = {}
env['ENV'] = env_dict
SCons.Util.AddPathIfNotExists(env_dict, 'PATH',
os.path.join(ph_path, 'bin'))
SCons.Util.AddPathIfNotExists(env_dict, 'INCLUDE',
os.path.join(ph_path, 'include'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, 'lib'))
SCons.Util.AddPathIfNotExists(env_dict, 'LIB',
os.path.join(ph_path, os.path.normpath('lib/vclib')))
env['PHARLAP_PATH'] = getPharLapPath()
env['PHARLAP_VERSION'] = str(getPharLapVersion()) | [
"def",
"addPharLapPaths",
"(",
"env",
")",
":",
"ph_path",
"=",
"getPharLapPath",
"(",
")",
"try",
":",
"env_dict",
"=",
"env",
"[",
"'ENV'",
"]",
"except",
"KeyError",
":",
"env_dict",
"=",
"{",
"}",
"env",
"[",
"'ENV'",
"]",
"=",
"env_dict",
"SCons",... | This function adds the path to the Phar Lap binaries, includes,
and libraries, if they are not already there. | [
"This",
"function",
"adds",
"the",
"path",
"to",
"the",
"Phar",
"Lap",
"binaries",
"includes",
"and",
"libraries",
"if",
"they",
"are",
"not",
"already",
"there",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/PharLapCommon.py#L88-L108 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py | _update_or_init_po_files | def _update_or_init_po_files(target, source, env):
""" Action function for `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _init_po_files
for tgt in target:
if tgt.rexists():
action = SCons.Action.Action('$MSGMERGECOM', '$MSGMERGECOMSTR')
else:
action = _init_po_files
status = action([tgt], source, env)
if status : return status
return 0 | python | def _update_or_init_po_files(target, source, env):
""" Action function for `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _init_po_files
for tgt in target:
if tgt.rexists():
action = SCons.Action.Action('$MSGMERGECOM', '$MSGMERGECOMSTR')
else:
action = _init_po_files
status = action([tgt], source, env)
if status : return status
return 0 | [
"def",
"_update_or_init_po_files",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"import",
"SCons",
".",
"Action",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"_init_po_files",
"for",
"tgt",
"in",
"target",
":",
"if",
"tgt",
".",
"r... | Action function for `POUpdate` builder | [
"Action",
"function",
"for",
"POUpdate",
"builder"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py#L30-L41 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py | _POUpdateBuilder | def _POUpdateBuilder(env, **kw):
""" Create an object of `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _POFileBuilder
action = SCons.Action.Action(_update_or_init_po_files, None)
return _POFileBuilder(env, action=action, target_alias='$POUPDATE_ALIAS') | python | def _POUpdateBuilder(env, **kw):
""" Create an object of `POUpdate` builder """
import SCons.Action
from SCons.Tool.GettextCommon import _POFileBuilder
action = SCons.Action.Action(_update_or_init_po_files, None)
return _POFileBuilder(env, action=action, target_alias='$POUPDATE_ALIAS') | [
"def",
"_POUpdateBuilder",
"(",
"env",
",",
"**",
"kw",
")",
":",
"import",
"SCons",
".",
"Action",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"_POFileBuilder",
"action",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"_update_or_init_po... | Create an object of `POUpdate` builder | [
"Create",
"an",
"object",
"of",
"POUpdate",
"builder"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py#L45-L50 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py | _POUpdateBuilderWrapper | def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw):
""" Wrapper for `POUpdate` builder - make user's life easier """
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env and env['POTDOMAIN']:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POUpdateBuilder(target, source, **kw) | python | def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw):
""" Wrapper for `POUpdate` builder - make user's life easier """
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env and env['POTDOMAIN']:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POUpdateBuilder(target, source, **kw) | [
"def",
"_POUpdateBuilderWrapper",
"(",
"env",
",",
"target",
"=",
"None",
",",
"source",
"=",
"_null",
",",
"**",
"kw",
")",
":",
"if",
"source",
"is",
"_null",
":",
"if",
"'POTDOMAIN'",
"in",
"kw",
":",
"domain",
"=",
"kw",
"[",
"'POTDOMAIN'",
"]",
... | Wrapper for `POUpdate` builder - make user's life easier | [
"Wrapper",
"for",
"POUpdate",
"builder",
"-",
"make",
"user",
"s",
"life",
"easier"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py#L56-L66 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py | generate | def generate(env,**kw):
""" Generate the `xgettext` tool """
from SCons.Tool.GettextCommon import _detect_msgmerge
try:
env['MSGMERGE'] = _detect_msgmerge(env)
except:
env['MSGMERGE'] = 'msgmerge'
env.SetDefault(
POTSUFFIX = ['.pot'],
POSUFFIX = ['.po'],
MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE',
MSGMERGECOMSTR = '',
MSGMERGEFLAGS = [ ],
POUPDATE_ALIAS = 'po-update'
)
env.Append(BUILDERS = { '_POUpdateBuilder':_POUpdateBuilder(env) })
env.AddMethod(_POUpdateBuilderWrapper, 'POUpdate')
env.AlwaysBuild(env.Alias('$POUPDATE_ALIAS')) | python | def generate(env,**kw):
""" Generate the `xgettext` tool """
from SCons.Tool.GettextCommon import _detect_msgmerge
try:
env['MSGMERGE'] = _detect_msgmerge(env)
except:
env['MSGMERGE'] = 'msgmerge'
env.SetDefault(
POTSUFFIX = ['.pot'],
POSUFFIX = ['.po'],
MSGMERGECOM = '$MSGMERGE $MSGMERGEFLAGS --update $TARGET $SOURCE',
MSGMERGECOMSTR = '',
MSGMERGEFLAGS = [ ],
POUPDATE_ALIAS = 'po-update'
)
env.Append(BUILDERS = { '_POUpdateBuilder':_POUpdateBuilder(env) })
env.AddMethod(_POUpdateBuilderWrapper, 'POUpdate')
env.AlwaysBuild(env.Alias('$POUPDATE_ALIAS')) | [
"def",
"generate",
"(",
"env",
",",
"**",
"kw",
")",
":",
"from",
"SCons",
".",
"Tool",
".",
"GettextCommon",
"import",
"_detect_msgmerge",
"try",
":",
"env",
"[",
"'MSGMERGE'",
"]",
"=",
"_detect_msgmerge",
"(",
"env",
")",
"except",
":",
"env",
"[",
... | Generate the `xgettext` tool | [
"Generate",
"the",
"xgettext",
"tool"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/msgmerge.py#L70-L87 | train |
iotile/coretools | iotilebuild/iotile/build/build/depends.py | ProductResolver._create_filter | def _create_filter(self):
"""Create a filter of all of the dependency products that we have selected."""
self._product_filter = {}
for chip in itertools.chain(iter(self._family.targets(self._tile.short_name)),
iter([self._family.platform_independent_target()])):
for key, prods in chip.property('depends', {}).items():
name, _, _ = key.partition(',')
for prod in prods:
if prod not in self._product_filter:
self._product_filter[prod] = set()
self._product_filter[prod].add(name) | python | def _create_filter(self):
"""Create a filter of all of the dependency products that we have selected."""
self._product_filter = {}
for chip in itertools.chain(iter(self._family.targets(self._tile.short_name)),
iter([self._family.platform_independent_target()])):
for key, prods in chip.property('depends', {}).items():
name, _, _ = key.partition(',')
for prod in prods:
if prod not in self._product_filter:
self._product_filter[prod] = set()
self._product_filter[prod].add(name) | [
"def",
"_create_filter",
"(",
"self",
")",
":",
"self",
".",
"_product_filter",
"=",
"{",
"}",
"for",
"chip",
"in",
"itertools",
".",
"chain",
"(",
"iter",
"(",
"self",
".",
"_family",
".",
"targets",
"(",
"self",
".",
"_tile",
".",
"short_name",
")",
... | Create a filter of all of the dependency products that we have selected. | [
"Create",
"a",
"filter",
"of",
"all",
"of",
"the",
"dependency",
"products",
"that",
"we",
"have",
"selected",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/build/depends.py#L38-L52 | train |
iotile/coretools | iotilebuild/iotile/build/build/depends.py | ProductResolver._create_product_map | def _create_product_map(self):
"""Create a map of all products produced by this or a dependency."""
self._product_map = {}
for dep in self._tile.dependencies:
try:
dep_tile = IOTile(os.path.join('build', 'deps', dep['unique_id']))
except (ArgumentError, EnvironmentError):
raise BuildError("Could not find required dependency", name=dep['name'])
self._add_products(dep_tile)
self._add_products(self._tile, show_all=True) | python | def _create_product_map(self):
"""Create a map of all products produced by this or a dependency."""
self._product_map = {}
for dep in self._tile.dependencies:
try:
dep_tile = IOTile(os.path.join('build', 'deps', dep['unique_id']))
except (ArgumentError, EnvironmentError):
raise BuildError("Could not find required dependency", name=dep['name'])
self._add_products(dep_tile)
self._add_products(self._tile, show_all=True) | [
"def",
"_create_product_map",
"(",
"self",
")",
":",
"self",
".",
"_product_map",
"=",
"{",
"}",
"for",
"dep",
"in",
"self",
".",
"_tile",
".",
"dependencies",
":",
"try",
":",
"dep_tile",
"=",
"IOTile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"'bu... | Create a map of all products produced by this or a dependency. | [
"Create",
"a",
"map",
"of",
"all",
"products",
"produced",
"by",
"this",
"or",
"a",
"dependency",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/build/depends.py#L63-L76 | train |
iotile/coretools | iotilebuild/iotile/build/build/depends.py | ProductResolver._add_products | def _add_products(self, tile, show_all=False):
"""Add all products from a tile into our product map."""
products = tile.products
unique_id = tile.unique_id
base_path = tile.output_folder
for prod_path, prod_type in products.items():
# We need to handle include_directories and tilebus_definitions
# specially since those are stored reversed in module_settings.json
# for historical reasons. Currently we don't support resolving
# tilebus_definitions or include_directories in ProductResolver
if prod_path == 'tilebus_definitions' or prod_path == 'include_directories':
continue
if prod_type in self.IGNORED_PRODUCTS:
continue
prod_base = os.path.basename(prod_path)
if prod_type not in self._product_map:
self._product_map[prod_type] = {}
prod_map = self._product_map[prod_type]
if prod_base not in prod_map:
prod_map[prod_base] = []
full_path = os.path.normpath(os.path.join(base_path, prod_path))
info = ProductInfo(prod_base, full_path, unique_id, not show_all and prod_base not in self._product_filter)
prod_map[prod_base].append(info) | python | def _add_products(self, tile, show_all=False):
"""Add all products from a tile into our product map."""
products = tile.products
unique_id = tile.unique_id
base_path = tile.output_folder
for prod_path, prod_type in products.items():
# We need to handle include_directories and tilebus_definitions
# specially since those are stored reversed in module_settings.json
# for historical reasons. Currently we don't support resolving
# tilebus_definitions or include_directories in ProductResolver
if prod_path == 'tilebus_definitions' or prod_path == 'include_directories':
continue
if prod_type in self.IGNORED_PRODUCTS:
continue
prod_base = os.path.basename(prod_path)
if prod_type not in self._product_map:
self._product_map[prod_type] = {}
prod_map = self._product_map[prod_type]
if prod_base not in prod_map:
prod_map[prod_base] = []
full_path = os.path.normpath(os.path.join(base_path, prod_path))
info = ProductInfo(prod_base, full_path, unique_id, not show_all and prod_base not in self._product_filter)
prod_map[prod_base].append(info) | [
"def",
"_add_products",
"(",
"self",
",",
"tile",
",",
"show_all",
"=",
"False",
")",
":",
"products",
"=",
"tile",
".",
"products",
"unique_id",
"=",
"tile",
".",
"unique_id",
"base_path",
"=",
"tile",
".",
"output_folder",
"for",
"prod_path",
",",
"prod_... | Add all products from a tile into our product map. | [
"Add",
"all",
"products",
"from",
"a",
"tile",
"into",
"our",
"product",
"map",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/build/depends.py#L78-L106 | train |
iotile/coretools | iotilebuild/iotile/build/build/depends.py | ProductResolver.find_all | def find_all(self, product_type, short_name, include_hidden=False):
"""Find all providers of a given product by its short name.
This function will return all providers of a given product. If you
want to ensure that a product's name is unique among all dependencies,
you should use find_unique.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
list of ProductInfo: A list of all of the matching products. If no matching
products are found, an empty list is returned. If you want to raise
a BuildError in that case use find_unique.
"""
all_prods = []
# If product_type is not return products of all types
if product_type is None:
for prod_dict in self._product_map.values():
all_prods.extend([prod for prod in prod_dict.get(short_name, []) if include_hidden or not prod.hidden])
return all_prods
all_prods = self._product_map.get(product_type, {})
return [prod for prod in all_prods.get(short_name, []) if include_hidden or not prod.hidden] | python | def find_all(self, product_type, short_name, include_hidden=False):
"""Find all providers of a given product by its short name.
This function will return all providers of a given product. If you
want to ensure that a product's name is unique among all dependencies,
you should use find_unique.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
list of ProductInfo: A list of all of the matching products. If no matching
products are found, an empty list is returned. If you want to raise
a BuildError in that case use find_unique.
"""
all_prods = []
# If product_type is not return products of all types
if product_type is None:
for prod_dict in self._product_map.values():
all_prods.extend([prod for prod in prod_dict.get(short_name, []) if include_hidden or not prod.hidden])
return all_prods
all_prods = self._product_map.get(product_type, {})
return [prod for prod in all_prods.get(short_name, []) if include_hidden or not prod.hidden] | [
"def",
"find_all",
"(",
"self",
",",
"product_type",
",",
"short_name",
",",
"include_hidden",
"=",
"False",
")",
":",
"all_prods",
"=",
"[",
"]",
"if",
"product_type",
"is",
"None",
":",
"for",
"prod_dict",
"in",
"self",
".",
"_product_map",
".",
"values"... | Find all providers of a given product by its short name.
This function will return all providers of a given product. If you
want to ensure that a product's name is unique among all dependencies,
you should use find_unique.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
list of ProductInfo: A list of all of the matching products. If no matching
products are found, an empty list is returned. If you want to raise
a BuildError in that case use find_unique. | [
"Find",
"all",
"providers",
"of",
"a",
"given",
"product",
"by",
"its",
"short",
"name",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/build/depends.py#L108-L140 | train |
iotile/coretools | iotilebuild/iotile/build/build/depends.py | ProductResolver.find_unique | def find_unique(self, product_type, short_name, include_hidden=False):
"""Find the unique provider of a given product by its short name.
This function will ensure that the product is only provided by exactly
one tile (either this tile or one of its dependencies and raise a
BuildError if not.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
ProductInfo: The information of the one unique provider of this product.
"""
prods = self.find_all(product_type, short_name, include_hidden)
if len(prods) == 0:
raise BuildError("Could not find product by name in find_unique", name=short_name, type=product_type)
if len(prods) > 1:
raise BuildError("Multiple providers of the same product in find_unique", name=short_name, type=product_type, products=prods)
if self._tracking:
self._resolved_products.append(prods[0])
return prods[0] | python | def find_unique(self, product_type, short_name, include_hidden=False):
"""Find the unique provider of a given product by its short name.
This function will ensure that the product is only provided by exactly
one tile (either this tile or one of its dependencies and raise a
BuildError if not.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
ProductInfo: The information of the one unique provider of this product.
"""
prods = self.find_all(product_type, short_name, include_hidden)
if len(prods) == 0:
raise BuildError("Could not find product by name in find_unique", name=short_name, type=product_type)
if len(prods) > 1:
raise BuildError("Multiple providers of the same product in find_unique", name=short_name, type=product_type, products=prods)
if self._tracking:
self._resolved_products.append(prods[0])
return prods[0] | [
"def",
"find_unique",
"(",
"self",
",",
"product_type",
",",
"short_name",
",",
"include_hidden",
"=",
"False",
")",
":",
"prods",
"=",
"self",
".",
"find_all",
"(",
"product_type",
",",
"short_name",
",",
"include_hidden",
")",
"if",
"len",
"(",
"prods",
... | Find the unique provider of a given product by its short name.
This function will ensure that the product is only provided by exactly
one tile (either this tile or one of its dependencies and raise a
BuildError if not.
Args:
product_type (str): The type of product that we are looking for, like
firmware_image, library etc.
short_name (str): The short name of the product that we wish to find,
usually its os.path.basename()
include_hidden (bool): Return products that are hidden and not selected
as visible in the depends section of this tile's module settings.
This defaults to False.
Returns:
ProductInfo: The information of the one unique provider of this product. | [
"Find",
"the",
"unique",
"provider",
"of",
"a",
"given",
"product",
"by",
"its",
"short",
"name",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/build/depends.py#L142-L173 | train |
iotile/coretools | iotilebuild/iotile/build/scripts/iotile_tbcompile.py | main | def main(raw_args=None):
"""Run the iotile-tbcompile script.
Args:
raw_args (list): Optional list of command line arguments. If not
passed these are pulled from sys.argv.
"""
multifile_choices = frozenset(['c_files'])
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.output is None and args.format in multifile_choices:
print("You must specify an output file with -o, --output when "
"using a format that produces multiple files (-f %s)" % args.format)
return 1
desc = TBDescriptor(args.bus_definition)
if args.format == 'json':
print("JSON output is not yet supported")
return 1
block = desc.get_block()
template_map = {
'command_map_c': 'command_map_c.c.tpl',
'command_map_h': 'command_map_c.h.tpl',
'config_map_c': 'config_variables_c.c.tpl',
'config_map_h': 'config_variables_c.h.tpl'
}
template_name = template_map.get(args.format)
data = block.render_template(template_name)
print(data)
return 0 | python | def main(raw_args=None):
"""Run the iotile-tbcompile script.
Args:
raw_args (list): Optional list of command line arguments. If not
passed these are pulled from sys.argv.
"""
multifile_choices = frozenset(['c_files'])
if raw_args is None:
raw_args = sys.argv[1:]
parser = build_parser()
args = parser.parse_args(raw_args)
if args.output is None and args.format in multifile_choices:
print("You must specify an output file with -o, --output when "
"using a format that produces multiple files (-f %s)" % args.format)
return 1
desc = TBDescriptor(args.bus_definition)
if args.format == 'json':
print("JSON output is not yet supported")
return 1
block = desc.get_block()
template_map = {
'command_map_c': 'command_map_c.c.tpl',
'command_map_h': 'command_map_c.h.tpl',
'config_map_c': 'config_variables_c.c.tpl',
'config_map_h': 'config_variables_c.h.tpl'
}
template_name = template_map.get(args.format)
data = block.render_template(template_name)
print(data)
return 0 | [
"def",
"main",
"(",
"raw_args",
"=",
"None",
")",
":",
"multifile_choices",
"=",
"frozenset",
"(",
"[",
"'c_files'",
"]",
")",
"if",
"raw_args",
"is",
"None",
":",
"raw_args",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"parser",
"=",
"build_parser",
... | Run the iotile-tbcompile script.
Args:
raw_args (list): Optional list of command line arguments. If not
passed these are pulled from sys.argv. | [
"Run",
"the",
"iotile",
"-",
"tbcompile",
"script",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/scripts/iotile_tbcompile.py#L37-L77 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpcgen.py | generate | def generate(env):
"Add RPCGEN Builders and construction variables for an Environment."
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
env.Append(BUILDERS={'RPCGenClient' : client,
'RPCGenHeader' : header,
'RPCGenService' : service,
'RPCGenXDR' : xdr})
env['RPCGEN'] = 'rpcgen'
env['RPCGENFLAGS'] = SCons.Util.CLVar('')
env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') | python | def generate(env):
"Add RPCGEN Builders and construction variables for an Environment."
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_suffix='.x')
xdr = Builder(action=rpcgen_xdr, suffix='_xdr.c', src_suffix='.x')
env.Append(BUILDERS={'RPCGenClient' : client,
'RPCGenHeader' : header,
'RPCGenService' : service,
'RPCGenXDR' : xdr})
env['RPCGEN'] = 'rpcgen'
env['RPCGENFLAGS'] = SCons.Util.CLVar('')
env['RPCGENCLIENTFLAGS'] = SCons.Util.CLVar('')
env['RPCGENHEADERFLAGS'] = SCons.Util.CLVar('')
env['RPCGENSERVICEFLAGS'] = SCons.Util.CLVar('')
env['RPCGENXDRFLAGS'] = SCons.Util.CLVar('') | [
"def",
"generate",
"(",
"env",
")",
":",
"\"Add RPCGEN Builders and construction variables for an Environment.\"",
"client",
"=",
"Builder",
"(",
"action",
"=",
"rpcgen_client",
",",
"suffix",
"=",
"'_clnt.c'",
",",
"src_suffix",
"=",
"'.x'",
")",
"header",
"=",
"Bu... | Add RPCGEN Builders and construction variables for an Environment. | [
"Add",
"RPCGEN",
"Builders",
"and",
"construction",
"variables",
"for",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/rpcgen.py#L45-L61 | train |
iotile/coretools | scripts/release.py | build_parser | def build_parser():
"""Build argument parsers."""
parser = argparse.ArgumentParser("Release packages to pypi")
parser.add_argument('--check', '-c', action="store_true", help="Do a dry run without uploading")
parser.add_argument('component', help="The component to release as component-version")
return parser | python | def build_parser():
"""Build argument parsers."""
parser = argparse.ArgumentParser("Release packages to pypi")
parser.add_argument('--check', '-c', action="store_true", help="Do a dry run without uploading")
parser.add_argument('component', help="The component to release as component-version")
return parser | [
"def",
"build_parser",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"\"Release packages to pypi\"",
")",
"parser",
".",
"add_argument",
"(",
"'--check'",
",",
"'-c'",
",",
"action",
"=",
"\"store_true\"",
",",
"help",
"=",
"\"Do a dry ru... | Build argument parsers. | [
"Build",
"argument",
"parsers",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/scripts/release.py#L21-L27 | train |
iotile/coretools | scripts/release.py | get_release_component | def get_release_component(comp):
"""Split the argument passed on the command line into a component name and expected version"""
name, vers = comp.split("-")
if name not in comp_names:
print("Known components:")
for comp in comp_names:
print("- %s" % comp)
raise EnvironmentError("Unknown release component name '%s'" % name)
return name, vers | python | def get_release_component(comp):
"""Split the argument passed on the command line into a component name and expected version"""
name, vers = comp.split("-")
if name not in comp_names:
print("Known components:")
for comp in comp_names:
print("- %s" % comp)
raise EnvironmentError("Unknown release component name '%s'" % name)
return name, vers | [
"def",
"get_release_component",
"(",
"comp",
")",
":",
"name",
",",
"vers",
"=",
"comp",
".",
"split",
"(",
"\"-\"",
")",
"if",
"name",
"not",
"in",
"comp_names",
":",
"print",
"(",
"\"Known components:\"",
")",
"for",
"comp",
"in",
"comp_names",
":",
"p... | Split the argument passed on the command line into a component name and expected version | [
"Split",
"the",
"argument",
"passed",
"on",
"the",
"command",
"line",
"into",
"a",
"component",
"name",
"and",
"expected",
"version"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/scripts/release.py#L43-L55 | train |
iotile/coretools | scripts/release.py | check_compatibility | def check_compatibility(name):
"""Verify if we can release this component on the running interpreter.
All components are released from python 2.7 by default unless they specify
that they are python 3 only, in which case they are released from python 3.6
"""
comp = comp_names[name]
if sys.version_info.major < 3 and comp.compat == "python3":
return False
if sys.version_info.major >= 3 and comp.compat != "python3":
return False
return True | python | def check_compatibility(name):
"""Verify if we can release this component on the running interpreter.
All components are released from python 2.7 by default unless they specify
that they are python 3 only, in which case they are released from python 3.6
"""
comp = comp_names[name]
if sys.version_info.major < 3 and comp.compat == "python3":
return False
if sys.version_info.major >= 3 and comp.compat != "python3":
return False
return True | [
"def",
"check_compatibility",
"(",
"name",
")",
":",
"comp",
"=",
"comp_names",
"[",
"name",
"]",
"if",
"sys",
".",
"version_info",
".",
"major",
"<",
"3",
"and",
"comp",
".",
"compat",
"==",
"\"python3\"",
":",
"return",
"False",
"if",
"sys",
".",
"ve... | Verify if we can release this component on the running interpreter.
All components are released from python 2.7 by default unless they specify
that they are python 3 only, in which case they are released from python 3.6 | [
"Verify",
"if",
"we",
"can",
"release",
"this",
"component",
"on",
"the",
"running",
"interpreter",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/scripts/release.py#L58-L73 | train |
iotile/coretools | scripts/release.py | build_component | def build_component(component):
"""Create an sdist and a wheel for the desired component"""
comp = comp_names[component]
curr = os.getcwd()
os.chdir(comp.path)
args = ['-q', 'clean', 'sdist', 'bdist_wheel']
if comp.compat == 'universal':
args.append('--universal')
try:
setuptools.sandbox.run_setup('setup.py', args)
finally:
os.chdir(curr) | python | def build_component(component):
"""Create an sdist and a wheel for the desired component"""
comp = comp_names[component]
curr = os.getcwd()
os.chdir(comp.path)
args = ['-q', 'clean', 'sdist', 'bdist_wheel']
if comp.compat == 'universal':
args.append('--universal')
try:
setuptools.sandbox.run_setup('setup.py', args)
finally:
os.chdir(curr) | [
"def",
"build_component",
"(",
"component",
")",
":",
"comp",
"=",
"comp_names",
"[",
"component",
"]",
"curr",
"=",
"os",
".",
"getcwd",
"(",
")",
"os",
".",
"chdir",
"(",
"comp",
".",
"path",
")",
"args",
"=",
"[",
"'-q'",
",",
"'clean'",
",",
"'... | Create an sdist and a wheel for the desired component | [
"Create",
"an",
"sdist",
"and",
"a",
"wheel",
"for",
"the",
"desired",
"component"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/scripts/release.py#L90-L105 | train |
iotile/coretools | iotilecore/iotile/core/utilities/gid.py | uuid_to_slug | def uuid_to_slug(uuid):
"""
Return IOTile Cloud compatible Device Slug
:param uuid: UUID
:return: string in the form of d--0000-0000-0000-0001
"""
if not isinstance(uuid, int):
raise ArgumentError("Invalid id that is not an integer", id=uuid)
if uuid < 0 or uuid > 0x7fffffff:
# For now, limiting support to a signed integer (which on some platforms, can be 32bits)
raise ArgumentError("Integer should be a positive number and smaller than 0x7fffffff", id=uuid)
return '--'.join(['d', int64gid(uuid)]) | python | def uuid_to_slug(uuid):
"""
Return IOTile Cloud compatible Device Slug
:param uuid: UUID
:return: string in the form of d--0000-0000-0000-0001
"""
if not isinstance(uuid, int):
raise ArgumentError("Invalid id that is not an integer", id=uuid)
if uuid < 0 or uuid > 0x7fffffff:
# For now, limiting support to a signed integer (which on some platforms, can be 32bits)
raise ArgumentError("Integer should be a positive number and smaller than 0x7fffffff", id=uuid)
return '--'.join(['d', int64gid(uuid)]) | [
"def",
"uuid_to_slug",
"(",
"uuid",
")",
":",
"if",
"not",
"isinstance",
"(",
"uuid",
",",
"int",
")",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid id that is not an integer\"",
",",
"id",
"=",
"uuid",
")",
"if",
"uuid",
"<",
"0",
"or",
"uuid",
">",
"0... | Return IOTile Cloud compatible Device Slug
:param uuid: UUID
:return: string in the form of d--0000-0000-0000-0001 | [
"Return",
"IOTile",
"Cloud",
"compatible",
"Device",
"Slug"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilecore/iotile/core/utilities/gid.py#L7-L21 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/ipk.py | package | def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION,
SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL,
X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw):
""" This function prepares the packageroot directory for packaging with the
ipkg builder.
"""
SCons.Tool.Tool('ipkg').generate(env)
# setup the Ipkg builder
bld = env['BUILDERS']['Ipkg']
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
# This should be overrideable from the construction environment,
# which it is by using ARCHITECTURE=.
# Guessing based on what os.uname() returns at least allows it
# to work for both i386 and x86_64 Linux systems.
archmap = {
'i686' : 'i386',
'i586' : 'i386',
'i486' : 'i386',
}
buildarchitecture = os.uname()[4]
buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
if 'ARCHITECTURE' in kw:
buildarchitecture = kw['ARCHITECTURE']
# setup the kw to contain the mandatory arguments to this function.
# do this before calling any builder or setup function
loc=locals()
del loc['kw']
kw.update(loc)
del kw['source'], kw['target'], kw['env']
# generate the specfile
specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw)
# override the default target.
if str(target[0])=="%s-%s"%(NAME, VERSION):
target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ]
# now apply the Ipkg builder
return bld(env, target, specfile, **kw) | python | def package(env, target, source, PACKAGEROOT, NAME, VERSION, DESCRIPTION,
SUMMARY, X_IPK_PRIORITY, X_IPK_SECTION, SOURCE_URL,
X_IPK_MAINTAINER, X_IPK_DEPENDS, **kw):
""" This function prepares the packageroot directory for packaging with the
ipkg builder.
"""
SCons.Tool.Tool('ipkg').generate(env)
# setup the Ipkg builder
bld = env['BUILDERS']['Ipkg']
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
# This should be overrideable from the construction environment,
# which it is by using ARCHITECTURE=.
# Guessing based on what os.uname() returns at least allows it
# to work for both i386 and x86_64 Linux systems.
archmap = {
'i686' : 'i386',
'i586' : 'i386',
'i486' : 'i386',
}
buildarchitecture = os.uname()[4]
buildarchitecture = archmap.get(buildarchitecture, buildarchitecture)
if 'ARCHITECTURE' in kw:
buildarchitecture = kw['ARCHITECTURE']
# setup the kw to contain the mandatory arguments to this function.
# do this before calling any builder or setup function
loc=locals()
del loc['kw']
kw.update(loc)
del kw['source'], kw['target'], kw['env']
# generate the specfile
specfile = gen_ipk_dir(PACKAGEROOT, source, env, kw)
# override the default target.
if str(target[0])=="%s-%s"%(NAME, VERSION):
target=[ "%s_%s_%s.ipk"%(NAME, VERSION, buildarchitecture) ]
# now apply the Ipkg builder
return bld(env, target, specfile, **kw) | [
"def",
"package",
"(",
"env",
",",
"target",
",",
"source",
",",
"PACKAGEROOT",
",",
"NAME",
",",
"VERSION",
",",
"DESCRIPTION",
",",
"SUMMARY",
",",
"X_IPK_PRIORITY",
",",
"X_IPK_SECTION",
",",
"SOURCE_URL",
",",
"X_IPK_MAINTAINER",
",",
"X_IPK_DEPENDS",
",",... | This function prepares the packageroot directory for packaging with the
ipkg builder. | [
"This",
"function",
"prepares",
"the",
"packageroot",
"directory",
"for",
"packaging",
"with",
"the",
"ipkg",
"builder",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/ipk.py#L35-L79 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/ipk.py | build_specfiles | def build_specfiles(source, target, env):
""" Filter the targets for the needed files and use the variables in env
to create the specfile.
"""
#
# At first we care for the CONTROL/control file, which is the main file for ipk.
#
# For this we need to open multiple files in random order, so we store into
# a dict so they can be easily accessed.
#
#
opened_files={}
def open_file(needle, haystack):
try:
return opened_files[needle]
except KeyError:
file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0]
opened_files[needle]=open(file.get_abspath(), 'w')
return opened_files[needle]
control_file=open_file('control', target)
if 'X_IPK_DESCRIPTION' not in env:
env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'],
env['DESCRIPTION'].replace('\n', '\n '))
content = """
Package: $NAME
Version: $VERSION
Priority: $X_IPK_PRIORITY
Section: $X_IPK_SECTION
Source: $SOURCE_URL
Architecture: $ARCHITECTURE
Maintainer: $X_IPK_MAINTAINER
Depends: $X_IPK_DEPENDS
Description: $X_IPK_DESCRIPTION
"""
control_file.write(env.subst(content))
#
# now handle the various other files, which purpose it is to set post-,
# pre-scripts and mark files as config files.
#
# We do so by filtering the source files for files which are marked with
# the "config" tag and afterwards we do the same for x_ipk_postrm,
# x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags.
#
# The first one will write the name of the file into the file
# CONTROL/configfiles, the latter add the content of the x_ipk_* variable
# into the same named file.
#
for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]:
config=open_file('conffiles')
config.write(f.PACKAGING_INSTALL_LOCATION)
config.write('\n')
for str in 'POSTRM PRERM POSTINST PREINST'.split():
name="PACKAGING_X_IPK_%s"%str
for f in [x for x in source if name in dir(x)]:
file=open_file(name)
file.write(env[str])
#
# close all opened files
for f in list(opened_files.values()):
f.close()
# call a user specified function
if 'CHANGE_SPECFILE' in env:
content += env['CHANGE_SPECFILE'](target)
return 0 | python | def build_specfiles(source, target, env):
""" Filter the targets for the needed files and use the variables in env
to create the specfile.
"""
#
# At first we care for the CONTROL/control file, which is the main file for ipk.
#
# For this we need to open multiple files in random order, so we store into
# a dict so they can be easily accessed.
#
#
opened_files={}
def open_file(needle, haystack):
try:
return opened_files[needle]
except KeyError:
file=filter(lambda x: x.get_path().rfind(needle)!=-1, haystack)[0]
opened_files[needle]=open(file.get_abspath(), 'w')
return opened_files[needle]
control_file=open_file('control', target)
if 'X_IPK_DESCRIPTION' not in env:
env['X_IPK_DESCRIPTION']="%s\n %s"%(env['SUMMARY'],
env['DESCRIPTION'].replace('\n', '\n '))
content = """
Package: $NAME
Version: $VERSION
Priority: $X_IPK_PRIORITY
Section: $X_IPK_SECTION
Source: $SOURCE_URL
Architecture: $ARCHITECTURE
Maintainer: $X_IPK_MAINTAINER
Depends: $X_IPK_DEPENDS
Description: $X_IPK_DESCRIPTION
"""
control_file.write(env.subst(content))
#
# now handle the various other files, which purpose it is to set post-,
# pre-scripts and mark files as config files.
#
# We do so by filtering the source files for files which are marked with
# the "config" tag and afterwards we do the same for x_ipk_postrm,
# x_ipk_prerm, x_ipk_postinst and x_ipk_preinst tags.
#
# The first one will write the name of the file into the file
# CONTROL/configfiles, the latter add the content of the x_ipk_* variable
# into the same named file.
#
for f in [x for x in source if 'PACKAGING_CONFIG' in dir(x)]:
config=open_file('conffiles')
config.write(f.PACKAGING_INSTALL_LOCATION)
config.write('\n')
for str in 'POSTRM PRERM POSTINST PREINST'.split():
name="PACKAGING_X_IPK_%s"%str
for f in [x for x in source if name in dir(x)]:
file=open_file(name)
file.write(env[str])
#
# close all opened files
for f in list(opened_files.values()):
f.close()
# call a user specified function
if 'CHANGE_SPECFILE' in env:
content += env['CHANGE_SPECFILE'](target)
return 0 | [
"def",
"build_specfiles",
"(",
"source",
",",
"target",
",",
"env",
")",
":",
"opened_files",
"=",
"{",
"}",
"def",
"open_file",
"(",
"needle",
",",
"haystack",
")",
":",
"try",
":",
"return",
"opened_files",
"[",
"needle",
"]",
"except",
"KeyError",
":"... | Filter the targets for the needed files and use the variables in env
to create the specfile. | [
"Filter",
"the",
"targets",
"for",
"the",
"needed",
"files",
"and",
"use",
"the",
"variables",
"in",
"env",
"to",
"create",
"the",
"specfile",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/packaging/ipk.py#L106-L179 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/javah.py | emit_java_headers | def emit_java_headers(target, source, env):
"""Create and return lists of Java stub header files that will
be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[-len(class_suffix):] == class_suffix:
classname = classname[:-len(class_suffix)]
classname = SCons.Tool.javac.classname(classname)
s = src.rfile()
s.attributes.java_classname = classname
slist.append(s)
s = source[0].rfile()
if not hasattr(s.attributes, 'java_classdir'):
s.attributes.java_classdir = classdir
if target[0].__class__ is SCons.Node.FS.File:
tlist = target
else:
if not isinstance(target[0], SCons.Node.FS.Dir):
target[0].__class__ = SCons.Node.FS.Dir
target[0]._morph()
tlist = []
for s in source:
fname = s.attributes.java_classname.replace('.', '_') + '.h'
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | python | def emit_java_headers(target, source, env):
"""Create and return lists of Java stub header files that will
be created from a set of class files.
"""
class_suffix = env.get('JAVACLASSSUFFIX', '.class')
classdir = env.get('JAVACLASSDIR')
if not classdir:
try:
s = source[0]
except IndexError:
classdir = '.'
else:
try:
classdir = s.attributes.java_classdir
except AttributeError:
classdir = '.'
classdir = env.Dir(classdir).rdir()
if str(classdir) == '.':
c_ = None
else:
c_ = str(classdir) + os.sep
slist = []
for src in source:
try:
classname = src.attributes.java_classname
except AttributeError:
classname = str(src)
if c_ and classname[:len(c_)] == c_:
classname = classname[len(c_):]
if class_suffix and classname[-len(class_suffix):] == class_suffix:
classname = classname[:-len(class_suffix)]
classname = SCons.Tool.javac.classname(classname)
s = src.rfile()
s.attributes.java_classname = classname
slist.append(s)
s = source[0].rfile()
if not hasattr(s.attributes, 'java_classdir'):
s.attributes.java_classdir = classdir
if target[0].__class__ is SCons.Node.FS.File:
tlist = target
else:
if not isinstance(target[0], SCons.Node.FS.Dir):
target[0].__class__ = SCons.Node.FS.Dir
target[0]._morph()
tlist = []
for s in source:
fname = s.attributes.java_classname.replace('.', '_') + '.h'
t = target[0].File(fname)
t.attributes.java_lookupdir = target[0]
tlist.append(t)
return tlist, source | [
"def",
"emit_java_headers",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"class_suffix",
"=",
"env",
".",
"get",
"(",
"'JAVACLASSSUFFIX'",
",",
"'.class'",
")",
"classdir",
"=",
"env",
".",
"get",
"(",
"'JAVACLASSDIR'",
")",
"if",
"not",
"classdir",... | Create and return lists of Java stub header files that will
be created from a set of class files. | [
"Create",
"and",
"return",
"lists",
"of",
"Java",
"stub",
"header",
"files",
"that",
"will",
"be",
"created",
"from",
"a",
"set",
"of",
"class",
"files",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/javah.py#L44-L100 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/javah.py | generate | def generate(env):
"""Add Builders and construction variables for javah to an Environment."""
java_javah = SCons.Tool.CreateJavaHBuilder(env)
java_javah.emitter = emit_java_headers
env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator
env['JAVAH'] = 'javah'
env['JAVAHFLAGS'] = SCons.Util.CLVar('')
env['_JAVAHCLASSPATH'] = getJavaHClassPath
env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | python | def generate(env):
"""Add Builders and construction variables for javah to an Environment."""
java_javah = SCons.Tool.CreateJavaHBuilder(env)
java_javah.emitter = emit_java_headers
env['_JAVAHOUTFLAG'] = JavaHOutFlagGenerator
env['JAVAH'] = 'javah'
env['JAVAHFLAGS'] = SCons.Util.CLVar('')
env['_JAVAHCLASSPATH'] = getJavaHClassPath
env['JAVAHCOM'] = '$JAVAH $JAVAHFLAGS $_JAVAHOUTFLAG $_JAVAHCLASSPATH ${SOURCES.attributes.java_classname}'
env['JAVACLASSSUFFIX'] = '.class' | [
"def",
"generate",
"(",
"env",
")",
":",
"java_javah",
"=",
"SCons",
".",
"Tool",
".",
"CreateJavaHBuilder",
"(",
"env",
")",
"java_javah",
".",
"emitter",
"=",
"emit_java_headers",
"env",
"[",
"'_JAVAHOUTFLAG'",
"]",
"=",
"JavaHOutFlagGenerator",
"env",
"[",
... | Add Builders and construction variables for javah to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"javah",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/javah.py#L118-L128 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.dump | def dump(self):
"""Serialize the state of this InMemoryStorageEngine to a dict.
Returns:
dict: The serialized data.
"""
return {
u'storage_data': [x.asdict() for x in self.storage_data],
u'streaming_data': [x.asdict() for x in self.streaming_data]
} | python | def dump(self):
"""Serialize the state of this InMemoryStorageEngine to a dict.
Returns:
dict: The serialized data.
"""
return {
u'storage_data': [x.asdict() for x in self.storage_data],
u'streaming_data': [x.asdict() for x in self.streaming_data]
} | [
"def",
"dump",
"(",
"self",
")",
":",
"return",
"{",
"u'storage_data'",
":",
"[",
"x",
".",
"asdict",
"(",
")",
"for",
"x",
"in",
"self",
".",
"storage_data",
"]",
",",
"u'streaming_data'",
":",
"[",
"x",
".",
"asdict",
"(",
")",
"for",
"x",
"in",
... | Serialize the state of this InMemoryStorageEngine to a dict.
Returns:
dict: The serialized data. | [
"Serialize",
"the",
"state",
"of",
"this",
"InMemoryStorageEngine",
"to",
"a",
"dict",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L26-L36 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.restore | def restore(self, state):
"""Restore the state of this InMemoryStorageEngine from a dict."""
storage_data = state.get(u'storage_data', [])
streaming_data = state.get(u'streaming_data', [])
if len(storage_data) > self.storage_length or len(streaming_data) > self.streaming_length:
raise ArgumentError("Cannot restore InMemoryStorageEngine, too many readings",
storage_size=len(storage_data), storage_max=self.storage_length,
streaming_size=len(streaming_data), streaming_max=self.streaming_length)
self.storage_data = [IOTileReading.FromDict(x) for x in storage_data]
self.streaming_data = [IOTileReading.FromDict(x) for x in streaming_data] | python | def restore(self, state):
"""Restore the state of this InMemoryStorageEngine from a dict."""
storage_data = state.get(u'storage_data', [])
streaming_data = state.get(u'streaming_data', [])
if len(storage_data) > self.storage_length or len(streaming_data) > self.streaming_length:
raise ArgumentError("Cannot restore InMemoryStorageEngine, too many readings",
storage_size=len(storage_data), storage_max=self.storage_length,
streaming_size=len(streaming_data), streaming_max=self.streaming_length)
self.storage_data = [IOTileReading.FromDict(x) for x in storage_data]
self.streaming_data = [IOTileReading.FromDict(x) for x in streaming_data] | [
"def",
"restore",
"(",
"self",
",",
"state",
")",
":",
"storage_data",
"=",
"state",
".",
"get",
"(",
"u'storage_data'",
",",
"[",
"]",
")",
"streaming_data",
"=",
"state",
".",
"get",
"(",
"u'streaming_data'",
",",
"[",
"]",
")",
"if",
"len",
"(",
"... | Restore the state of this InMemoryStorageEngine from a dict. | [
"Restore",
"the",
"state",
"of",
"this",
"InMemoryStorageEngine",
"from",
"a",
"dict",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L38-L50 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.count_matching | def count_matching(self, selector, offset=0):
"""Count the number of readings matching selector.
Args:
selector (DataStreamSelector): The selector that we want to
count matching readings for.
offset (int): The starting offset that we should begin counting at.
Returns:
int: The number of matching readings.
"""
if selector.output:
data = self.streaming_data
elif selector.buffered:
data = self.storage_data
else:
raise ArgumentError("You can only pass a buffered selector to count_matching", selector=selector)
count = 0
for i in range(offset, len(data)):
reading = data[i]
stream = DataStream.FromEncoded(reading.stream)
if selector.matches(stream):
count += 1
return count | python | def count_matching(self, selector, offset=0):
"""Count the number of readings matching selector.
Args:
selector (DataStreamSelector): The selector that we want to
count matching readings for.
offset (int): The starting offset that we should begin counting at.
Returns:
int: The number of matching readings.
"""
if selector.output:
data = self.streaming_data
elif selector.buffered:
data = self.storage_data
else:
raise ArgumentError("You can only pass a buffered selector to count_matching", selector=selector)
count = 0
for i in range(offset, len(data)):
reading = data[i]
stream = DataStream.FromEncoded(reading.stream)
if selector.matches(stream):
count += 1
return count | [
"def",
"count_matching",
"(",
"self",
",",
"selector",
",",
"offset",
"=",
"0",
")",
":",
"if",
"selector",
".",
"output",
":",
"data",
"=",
"self",
".",
"streaming_data",
"elif",
"selector",
".",
"buffered",
":",
"data",
"=",
"self",
".",
"storage_data"... | Count the number of readings matching selector.
Args:
selector (DataStreamSelector): The selector that we want to
count matching readings for.
offset (int): The starting offset that we should begin counting at.
Returns:
int: The number of matching readings. | [
"Count",
"the",
"number",
"of",
"readings",
"matching",
"selector",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L61-L88 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.scan_storage | def scan_storage(self, area_name, callable, start=0, stop=None):
"""Iterate over streaming or storage areas, calling callable.
Args:
area_name (str): Either 'storage' or 'streaming' to indicate which
storage area to scan.
callable (callable): A function that will be called as (offset, reading)
for each reading between start_offset and end_offset (inclusive). If
the scan function wants to stop early it can return True. If it returns
anything else (including False or None), scanning will continue.
start (int): Optional offset to start at (included in scan).
stop (int): Optional offset to end at (included in scan).
Returns:
int: The number of entries scanned.
"""
if area_name == u'storage':
data = self.storage_data
elif area_name == u'streaming':
data = self.streaming_data
else:
raise ArgumentError("Unknown area name in scan_storage (%s) should be storage or streaming" % area_name)
if len(data) == 0:
return 0
if stop is None:
stop = len(data) - 1
elif stop >= len(data):
raise ArgumentError("Given stop offset is greater than the highest offset supported", length=len(data), stop_offset=stop)
scanned = 0
for i in range(start, stop + 1):
scanned += 1
should_break = callable(i, data[i])
if should_break is True:
break
return scanned | python | def scan_storage(self, area_name, callable, start=0, stop=None):
"""Iterate over streaming or storage areas, calling callable.
Args:
area_name (str): Either 'storage' or 'streaming' to indicate which
storage area to scan.
callable (callable): A function that will be called as (offset, reading)
for each reading between start_offset and end_offset (inclusive). If
the scan function wants to stop early it can return True. If it returns
anything else (including False or None), scanning will continue.
start (int): Optional offset to start at (included in scan).
stop (int): Optional offset to end at (included in scan).
Returns:
int: The number of entries scanned.
"""
if area_name == u'storage':
data = self.storage_data
elif area_name == u'streaming':
data = self.streaming_data
else:
raise ArgumentError("Unknown area name in scan_storage (%s) should be storage or streaming" % area_name)
if len(data) == 0:
return 0
if stop is None:
stop = len(data) - 1
elif stop >= len(data):
raise ArgumentError("Given stop offset is greater than the highest offset supported", length=len(data), stop_offset=stop)
scanned = 0
for i in range(start, stop + 1):
scanned += 1
should_break = callable(i, data[i])
if should_break is True:
break
return scanned | [
"def",
"scan_storage",
"(",
"self",
",",
"area_name",
",",
"callable",
",",
"start",
"=",
"0",
",",
"stop",
"=",
"None",
")",
":",
"if",
"area_name",
"==",
"u'storage'",
":",
"data",
"=",
"self",
".",
"storage_data",
"elif",
"area_name",
"==",
"u'streami... | Iterate over streaming or storage areas, calling callable.
Args:
area_name (str): Either 'storage' or 'streaming' to indicate which
storage area to scan.
callable (callable): A function that will be called as (offset, reading)
for each reading between start_offset and end_offset (inclusive). If
the scan function wants to stop early it can return True. If it returns
anything else (including False or None), scanning will continue.
start (int): Optional offset to start at (included in scan).
stop (int): Optional offset to end at (included in scan).
Returns:
int: The number of entries scanned. | [
"Iterate",
"over",
"streaming",
"or",
"storage",
"areas",
"calling",
"callable",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L90-L130 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.push | def push(self, value):
"""Store a new value for the given stream.
Args:
value (IOTileReading): The value to store. The stream
parameter must have the correct value
"""
stream = DataStream.FromEncoded(value.stream)
if stream.stream_type == DataStream.OutputType:
if len(self.streaming_data) == self.streaming_length:
raise StorageFullError('Streaming buffer full')
self.streaming_data.append(value)
else:
if len(self.storage_data) == self.storage_length:
raise StorageFullError('Storage buffer full')
self.storage_data.append(value) | python | def push(self, value):
"""Store a new value for the given stream.
Args:
value (IOTileReading): The value to store. The stream
parameter must have the correct value
"""
stream = DataStream.FromEncoded(value.stream)
if stream.stream_type == DataStream.OutputType:
if len(self.streaming_data) == self.streaming_length:
raise StorageFullError('Streaming buffer full')
self.streaming_data.append(value)
else:
if len(self.storage_data) == self.storage_length:
raise StorageFullError('Storage buffer full')
self.storage_data.append(value) | [
"def",
"push",
"(",
"self",
",",
"value",
")",
":",
"stream",
"=",
"DataStream",
".",
"FromEncoded",
"(",
"value",
".",
"stream",
")",
"if",
"stream",
".",
"stream_type",
"==",
"DataStream",
".",
"OutputType",
":",
"if",
"len",
"(",
"self",
".",
"strea... | Store a new value for the given stream.
Args:
value (IOTileReading): The value to store. The stream
parameter must have the correct value | [
"Store",
"a",
"new",
"value",
"for",
"the",
"given",
"stream",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L138-L157 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.get | def get(self, buffer_type, offset):
"""Get a reading from the buffer at offset.
Offset is specified relative to the start of the data buffer.
This means that if the buffer rolls over, the offset for a given
item will appear to change. Anyone holding an offset outside of this
engine object will need to be notified when rollovers happen (i.e.
popn is called so that they can update their offset indices)
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
offset (int): The offset of the reading to get
"""
if buffer_type == u'streaming':
chosen_buffer = self.streaming_data
else:
chosen_buffer = self.storage_data
if offset >= len(chosen_buffer):
raise StreamEmptyError("Invalid index given in get command", requested=offset, stored=len(chosen_buffer), buffer=buffer_type)
return chosen_buffer[offset] | python | def get(self, buffer_type, offset):
"""Get a reading from the buffer at offset.
Offset is specified relative to the start of the data buffer.
This means that if the buffer rolls over, the offset for a given
item will appear to change. Anyone holding an offset outside of this
engine object will need to be notified when rollovers happen (i.e.
popn is called so that they can update their offset indices)
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
offset (int): The offset of the reading to get
"""
if buffer_type == u'streaming':
chosen_buffer = self.streaming_data
else:
chosen_buffer = self.storage_data
if offset >= len(chosen_buffer):
raise StreamEmptyError("Invalid index given in get command", requested=offset, stored=len(chosen_buffer), buffer=buffer_type)
return chosen_buffer[offset] | [
"def",
"get",
"(",
"self",
",",
"buffer_type",
",",
"offset",
")",
":",
"if",
"buffer_type",
"==",
"u'streaming'",
":",
"chosen_buffer",
"=",
"self",
".",
"streaming_data",
"else",
":",
"chosen_buffer",
"=",
"self",
".",
"storage_data",
"if",
"offset",
">=",... | Get a reading from the buffer at offset.
Offset is specified relative to the start of the data buffer.
This means that if the buffer rolls over, the offset for a given
item will appear to change. Anyone holding an offset outside of this
engine object will need to be notified when rollovers happen (i.e.
popn is called so that they can update their offset indices)
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
offset (int): The offset of the reading to get | [
"Get",
"a",
"reading",
"from",
"the",
"buffer",
"at",
"offset",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L159-L181 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/engine/in_memory.py | InMemoryStorageEngine.popn | def popn(self, buffer_type, count):
"""Remove and return the oldest count values from the named buffer
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
count (int): The number of readings to pop
Returns:
list(IOTileReading): The values popped from the buffer
"""
buffer_type = str(buffer_type)
if buffer_type == u'streaming':
chosen_buffer = self.streaming_data
else:
chosen_buffer = self.storage_data
if count > len(chosen_buffer):
raise StreamEmptyError("Not enough data in buffer for popn command", requested=count, stored=len(chosen_buffer), buffer=buffer_type)
popped = chosen_buffer[:count]
remaining = chosen_buffer[count:]
if buffer_type == u'streaming':
self.streaming_data = remaining
else:
self.storage_data = remaining
return popped | python | def popn(self, buffer_type, count):
"""Remove and return the oldest count values from the named buffer
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
count (int): The number of readings to pop
Returns:
list(IOTileReading): The values popped from the buffer
"""
buffer_type = str(buffer_type)
if buffer_type == u'streaming':
chosen_buffer = self.streaming_data
else:
chosen_buffer = self.storage_data
if count > len(chosen_buffer):
raise StreamEmptyError("Not enough data in buffer for popn command", requested=count, stored=len(chosen_buffer), buffer=buffer_type)
popped = chosen_buffer[:count]
remaining = chosen_buffer[count:]
if buffer_type == u'streaming':
self.streaming_data = remaining
else:
self.storage_data = remaining
return popped | [
"def",
"popn",
"(",
"self",
",",
"buffer_type",
",",
"count",
")",
":",
"buffer_type",
"=",
"str",
"(",
"buffer_type",
")",
"if",
"buffer_type",
"==",
"u'streaming'",
":",
"chosen_buffer",
"=",
"self",
".",
"streaming_data",
"else",
":",
"chosen_buffer",
"="... | Remove and return the oldest count values from the named buffer
Args:
buffer_type (str): The buffer to pop from (either u"storage" or u"streaming")
count (int): The number of readings to pop
Returns:
list(IOTileReading): The values popped from the buffer | [
"Remove",
"and",
"return",
"the",
"oldest",
"count",
"values",
"from",
"the",
"named",
"buffer"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/engine/in_memory.py#L183-L212 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/device_adapter.py | WebSocketDeviceAdapter.send_script | async def send_script(self, conn_id, data):
"""Send a a script to this IOTile device
Args:
conn_id (int): A unique identifier that will refer to this connection
data (bytes): the script to send to the device
"""
self._ensure_connection(conn_id, True)
connection_string = self._get_property(conn_id, "connection_string")
msg = dict(connection_string=connection_string, fragment_count=1, fragment_index=0,
script=base64.b64encode(data))
await self._send_command(OPERATIONS.SEND_SCRIPT, msg, COMMANDS.SendScriptResponse) | python | async def send_script(self, conn_id, data):
"""Send a a script to this IOTile device
Args:
conn_id (int): A unique identifier that will refer to this connection
data (bytes): the script to send to the device
"""
self._ensure_connection(conn_id, True)
connection_string = self._get_property(conn_id, "connection_string")
msg = dict(connection_string=connection_string, fragment_count=1, fragment_index=0,
script=base64.b64encode(data))
await self._send_command(OPERATIONS.SEND_SCRIPT, msg, COMMANDS.SendScriptResponse) | [
"async",
"def",
"send_script",
"(",
"self",
",",
"conn_id",
",",
"data",
")",
":",
"self",
".",
"_ensure_connection",
"(",
"conn_id",
",",
"True",
")",
"connection_string",
"=",
"self",
".",
"_get_property",
"(",
"conn_id",
",",
"\"connection_string\"",
")",
... | Send a a script to this IOTile device
Args:
conn_id (int): A unique identifier that will refer to this connection
data (bytes): the script to send to the device | [
"Send",
"a",
"a",
"script",
"to",
"this",
"IOTile",
"device"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_adapter.py#L154-L167 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/device_adapter.py | WebSocketDeviceAdapter._on_report_notification | async def _on_report_notification(self, event):
"""Callback function called when a report event is received.
Args:
event (dict): The report_event
"""
conn_string = event.get('connection_string')
report = self._report_parser.deserialize_report(event.get('serialized_report'))
self.notify_event(conn_string, 'report', report) | python | async def _on_report_notification(self, event):
"""Callback function called when a report event is received.
Args:
event (dict): The report_event
"""
conn_string = event.get('connection_string')
report = self._report_parser.deserialize_report(event.get('serialized_report'))
self.notify_event(conn_string, 'report', report) | [
"async",
"def",
"_on_report_notification",
"(",
"self",
",",
"event",
")",
":",
"conn_string",
"=",
"event",
".",
"get",
"(",
"'connection_string'",
")",
"report",
"=",
"self",
".",
"_report_parser",
".",
"deserialize_report",
"(",
"event",
".",
"get",
"(",
... | Callback function called when a report event is received.
Args:
event (dict): The report_event | [
"Callback",
"function",
"called",
"when",
"a",
"report",
"event",
"is",
"received",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_adapter.py#L178-L188 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/device_adapter.py | WebSocketDeviceAdapter._on_trace_notification | async def _on_trace_notification(self, trace_event):
"""Callback function called when a trace chunk is received.
Args:
trace_chunk (dict): The received trace chunk information
"""
conn_string = trace_event.get('connection_string')
payload = trace_event.get('payload')
await self.notify_event(conn_string, 'trace', payload) | python | async def _on_trace_notification(self, trace_event):
"""Callback function called when a trace chunk is received.
Args:
trace_chunk (dict): The received trace chunk information
"""
conn_string = trace_event.get('connection_string')
payload = trace_event.get('payload')
await self.notify_event(conn_string, 'trace', payload) | [
"async",
"def",
"_on_trace_notification",
"(",
"self",
",",
"trace_event",
")",
":",
"conn_string",
"=",
"trace_event",
".",
"get",
"(",
"'connection_string'",
")",
"payload",
"=",
"trace_event",
".",
"get",
"(",
"'payload'",
")",
"await",
"self",
".",
"notify... | Callback function called when a trace chunk is received.
Args:
trace_chunk (dict): The received trace chunk information | [
"Callback",
"function",
"called",
"when",
"a",
"trace",
"chunk",
"is",
"received",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_adapter.py#L202-L212 | train |
iotile/coretools | transport_plugins/websocket/iotile_transport_websocket/device_adapter.py | WebSocketDeviceAdapter._on_progress_notification | async def _on_progress_notification(self, progress):
"""Callback function called when a progress notification is received.
Args:
progress (dict): The received notification containing the progress information
"""
conn_string = progress.get('connection_string')
done = progress.get('done_count')
total = progress.get('total_count')
operation = progress.get('operation')
await self.notify_progress(conn_string, operation, done, total, wait=True) | python | async def _on_progress_notification(self, progress):
"""Callback function called when a progress notification is received.
Args:
progress (dict): The received notification containing the progress information
"""
conn_string = progress.get('connection_string')
done = progress.get('done_count')
total = progress.get('total_count')
operation = progress.get('operation')
await self.notify_progress(conn_string, operation, done, total, wait=True) | [
"async",
"def",
"_on_progress_notification",
"(",
"self",
",",
"progress",
")",
":",
"conn_string",
"=",
"progress",
".",
"get",
"(",
"'connection_string'",
")",
"done",
"=",
"progress",
".",
"get",
"(",
"'done_count'",
")",
"total",
"=",
"progress",
".",
"g... | Callback function called when a progress notification is received.
Args:
progress (dict): The received notification containing the progress information | [
"Callback",
"function",
"called",
"when",
"a",
"progress",
"notification",
"is",
"received",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/websocket/iotile_transport_websocket/device_adapter.py#L214-L226 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | _extract_variables | def _extract_variables(param):
"""Find all template variables in args."""
variables = set()
if isinstance(param, list):
variables.update(*[_extract_variables(x) for x in param])
elif isinstance(param, dict):
variables.update(*[_extract_variables(x) for x in param.values()])
elif isinstance(param, str):
for match in re.finditer(TEMPLATE_REGEX, param):
if match.group('short_id') is not None:
variables.add(match.group('short_id'))
else:
variables.add(match.group('long_id'))
return variables | python | def _extract_variables(param):
"""Find all template variables in args."""
variables = set()
if isinstance(param, list):
variables.update(*[_extract_variables(x) for x in param])
elif isinstance(param, dict):
variables.update(*[_extract_variables(x) for x in param.values()])
elif isinstance(param, str):
for match in re.finditer(TEMPLATE_REGEX, param):
if match.group('short_id') is not None:
variables.add(match.group('short_id'))
else:
variables.add(match.group('long_id'))
return variables | [
"def",
"_extract_variables",
"(",
"param",
")",
":",
"variables",
"=",
"set",
"(",
")",
"if",
"isinstance",
"(",
"param",
",",
"list",
")",
":",
"variables",
".",
"update",
"(",
"*",
"[",
"_extract_variables",
"(",
"x",
")",
"for",
"x",
"in",
"param",
... | Find all template variables in args. | [
"Find",
"all",
"template",
"variables",
"in",
"args",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L506-L522 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | _run_step | def _run_step(step_obj, step_declaration, initialized_resources):
"""Actually run a step."""
start_time = time.time()
# Open any resources that need to be opened before we run this step
for res_name in step_declaration.resources.opened:
initialized_resources[res_name].open()
# Create a dictionary of all of the resources that are required for this step
used_resources = {local_name: initialized_resources[global_name] for local_name, global_name in step_declaration.resources.used.items()}
# Allow steps with no resources to not need a resources keyword parameter
if len(used_resources) > 0:
out = step_obj.run(resources=used_resources)
else:
out = step_obj.run()
# Close any resources that need to be closed before we run this step
for res_name in step_declaration.resources.closed:
initialized_resources[res_name].close()
end_time = time.time()
return (end_time - start_time, out) | python | def _run_step(step_obj, step_declaration, initialized_resources):
"""Actually run a step."""
start_time = time.time()
# Open any resources that need to be opened before we run this step
for res_name in step_declaration.resources.opened:
initialized_resources[res_name].open()
# Create a dictionary of all of the resources that are required for this step
used_resources = {local_name: initialized_resources[global_name] for local_name, global_name in step_declaration.resources.used.items()}
# Allow steps with no resources to not need a resources keyword parameter
if len(used_resources) > 0:
out = step_obj.run(resources=used_resources)
else:
out = step_obj.run()
# Close any resources that need to be closed before we run this step
for res_name in step_declaration.resources.closed:
initialized_resources[res_name].close()
end_time = time.time()
return (end_time - start_time, out) | [
"def",
"_run_step",
"(",
"step_obj",
",",
"step_declaration",
",",
"initialized_resources",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"for",
"res_name",
"in",
"step_declaration",
".",
"resources",
".",
"opened",
":",
"initialized_resources",
"[... | Actually run a step. | [
"Actually",
"run",
"a",
"step",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L525-L549 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject.archive | def archive(self, output_path):
"""Archive this recipe and all associated files into a .ship archive.
Args:
output_path (str): The path where the .ship file should be saved.
"""
if self.path is None:
raise ArgumentError("Cannot archive a recipe yet without a reference to its original yaml file in self.path")
outfile = zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED)
outfile.write(self.path, arcname="recipe_script.yaml")
written_files = set()
for _factory, args, _resources, files in self.steps:
for arg_name in files:
file_path = args[arg_name]
if file_path in written_files:
continue
if os.path.basename(file_path) != file_path:
raise ArgumentError("Cannot archive a recipe yet that references file not in the same directory as the recipe")
full_path = os.path.join(os.path.dirname(self.path), file_path)
outfile.write(full_path, arcname=file_path)
written_files.add(file_path) | python | def archive(self, output_path):
"""Archive this recipe and all associated files into a .ship archive.
Args:
output_path (str): The path where the .ship file should be saved.
"""
if self.path is None:
raise ArgumentError("Cannot archive a recipe yet without a reference to its original yaml file in self.path")
outfile = zipfile.ZipFile(output_path, 'w', zipfile.ZIP_DEFLATED)
outfile.write(self.path, arcname="recipe_script.yaml")
written_files = set()
for _factory, args, _resources, files in self.steps:
for arg_name in files:
file_path = args[arg_name]
if file_path in written_files:
continue
if os.path.basename(file_path) != file_path:
raise ArgumentError("Cannot archive a recipe yet that references file not in the same directory as the recipe")
full_path = os.path.join(os.path.dirname(self.path), file_path)
outfile.write(full_path, arcname=file_path)
written_files.add(file_path) | [
"def",
"archive",
"(",
"self",
",",
"output_path",
")",
":",
"if",
"self",
".",
"path",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Cannot archive a recipe yet without a reference to its original yaml file in self.path\"",
")",
"outfile",
"=",
"zipfile",
".",
... | Archive this recipe and all associated files into a .ship archive.
Args:
output_path (str): The path where the .ship file should be saved. | [
"Archive",
"this",
"recipe",
"and",
"all",
"associated",
"files",
"into",
"a",
".",
"ship",
"archive",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L86-L114 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject.FromArchive | def FromArchive(cls, path, actions_dict, resources_dict, temp_dir=None):
"""Create a RecipeObject from a .ship archive.
This archive should have been generated from a previous call to
iotile-ship -a <path to yaml file>
or via iotile-build using autobuild_shiparchive().
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
temp_dir (str): An optional temporary directory where this archive
should be unpacked. Otherwise a system wide temporary directory
is used.
"""
if not path.endswith(".ship"):
raise ArgumentError("Attempted to unpack a recipe archive from a file that did not end in .ship", path=path)
name = os.path.basename(path)[:-5]
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
extract_path = os.path.join(temp_dir, name)
archive = zipfile.ZipFile(path, "r")
archive.extractall(extract_path)
recipe_yaml = os.path.join(extract_path, 'recipe_script.yaml')
return cls.FromFile(recipe_yaml, actions_dict, resources_dict, name=name) | python | def FromArchive(cls, path, actions_dict, resources_dict, temp_dir=None):
"""Create a RecipeObject from a .ship archive.
This archive should have been generated from a previous call to
iotile-ship -a <path to yaml file>
or via iotile-build using autobuild_shiparchive().
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
temp_dir (str): An optional temporary directory where this archive
should be unpacked. Otherwise a system wide temporary directory
is used.
"""
if not path.endswith(".ship"):
raise ArgumentError("Attempted to unpack a recipe archive from a file that did not end in .ship", path=path)
name = os.path.basename(path)[:-5]
if temp_dir is None:
temp_dir = tempfile.mkdtemp()
extract_path = os.path.join(temp_dir, name)
archive = zipfile.ZipFile(path, "r")
archive.extractall(extract_path)
recipe_yaml = os.path.join(extract_path, 'recipe_script.yaml')
return cls.FromFile(recipe_yaml, actions_dict, resources_dict, name=name) | [
"def",
"FromArchive",
"(",
"cls",
",",
"path",
",",
"actions_dict",
",",
"resources_dict",
",",
"temp_dir",
"=",
"None",
")",
":",
"if",
"not",
"path",
".",
"endswith",
"(",
"\".ship\"",
")",
":",
"raise",
"ArgumentError",
"(",
"\"Attempted to unpack a recipe ... | Create a RecipeObject from a .ship archive.
This archive should have been generated from a previous call to
iotile-ship -a <path to yaml file>
or via iotile-build using autobuild_shiparchive().
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
temp_dir (str): An optional temporary directory where this archive
should be unpacked. Otherwise a system wide temporary directory
is used. | [
"Create",
"a",
"RecipeObject",
"from",
"a",
".",
"ship",
"archive",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L117-L153 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject.FromFile | def FromFile(cls, path, actions_dict, resources_dict, file_format="yaml", name=None):
"""Create a RecipeObject from a file.
The file should be a specially constructed yaml file that describes
the recipe as well as the actions that it performs.
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
name (str): The name of this recipe if we created it originally from an
archive.
"""
format_map = {
"yaml": cls._process_yaml
}
format_handler = format_map.get(file_format)
if format_handler is None:
raise ArgumentError("Unknown file format or file extension", file_format=file_format, \
known_formats=[x for x in format_map if format_map[x] is not None])
recipe_info = format_handler(path)
if name is None:
name, _ext = os.path.splitext(os.path.basename(path))
# Validate that the recipe file is correctly formatted
try:
recipe_info = RecipeSchema.verify(recipe_info)
except ValidationError as exc:
raise RecipeFileInvalid("Recipe file does not match expected schema", file=path, error_message=exc.msg, **exc.params)
description = recipe_info.get('description')
# Parse out global default and shared resource information
try:
resources = cls._parse_resource_declarations(recipe_info.get('resources', []), resources_dict)
defaults = cls._parse_variable_defaults(recipe_info.get("defaults", []))
steps = []
for i, action in enumerate(recipe_info.get('actions', [])):
action_name = action.pop('name')
if action_name is None:
raise RecipeFileInvalid("Action is missing required name parameter", \
parameters=action, path=path)
action_class = actions_dict.get(action_name)
if action_class is None:
raise UnknownRecipeActionType("Unknown step specified in recipe", \
action=action_name, step=i + 1, path=path)
# Parse out any resource usage in this step and make sure we only
# use named resources
step_resources = cls._parse_resource_usage(action, declarations=resources)
fixed_files, _variable_files = cls._parse_file_usage(action_class, action)
step = RecipeStep(action_class, action, step_resources, fixed_files)
steps.append(step)
return RecipeObject(name, description, steps, resources, defaults, path)
except RecipeFileInvalid as exc:
cls._future_raise(RecipeFileInvalid, RecipeFileInvalid(exc.msg, recipe=name, **exc.params),
sys.exc_info()[2]) | python | def FromFile(cls, path, actions_dict, resources_dict, file_format="yaml", name=None):
"""Create a RecipeObject from a file.
The file should be a specially constructed yaml file that describes
the recipe as well as the actions that it performs.
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
name (str): The name of this recipe if we created it originally from an
archive.
"""
format_map = {
"yaml": cls._process_yaml
}
format_handler = format_map.get(file_format)
if format_handler is None:
raise ArgumentError("Unknown file format or file extension", file_format=file_format, \
known_formats=[x for x in format_map if format_map[x] is not None])
recipe_info = format_handler(path)
if name is None:
name, _ext = os.path.splitext(os.path.basename(path))
# Validate that the recipe file is correctly formatted
try:
recipe_info = RecipeSchema.verify(recipe_info)
except ValidationError as exc:
raise RecipeFileInvalid("Recipe file does not match expected schema", file=path, error_message=exc.msg, **exc.params)
description = recipe_info.get('description')
# Parse out global default and shared resource information
try:
resources = cls._parse_resource_declarations(recipe_info.get('resources', []), resources_dict)
defaults = cls._parse_variable_defaults(recipe_info.get("defaults", []))
steps = []
for i, action in enumerate(recipe_info.get('actions', [])):
action_name = action.pop('name')
if action_name is None:
raise RecipeFileInvalid("Action is missing required name parameter", \
parameters=action, path=path)
action_class = actions_dict.get(action_name)
if action_class is None:
raise UnknownRecipeActionType("Unknown step specified in recipe", \
action=action_name, step=i + 1, path=path)
# Parse out any resource usage in this step and make sure we only
# use named resources
step_resources = cls._parse_resource_usage(action, declarations=resources)
fixed_files, _variable_files = cls._parse_file_usage(action_class, action)
step = RecipeStep(action_class, action, step_resources, fixed_files)
steps.append(step)
return RecipeObject(name, description, steps, resources, defaults, path)
except RecipeFileInvalid as exc:
cls._future_raise(RecipeFileInvalid, RecipeFileInvalid(exc.msg, recipe=name, **exc.params),
sys.exc_info()[2]) | [
"def",
"FromFile",
"(",
"cls",
",",
"path",
",",
"actions_dict",
",",
"resources_dict",
",",
"file_format",
"=",
"\"yaml\"",
",",
"name",
"=",
"None",
")",
":",
"format_map",
"=",
"{",
"\"yaml\"",
":",
"cls",
".",
"_process_yaml",
"}",
"format_handler",
"=... | Create a RecipeObject from a file.
The file should be a specially constructed yaml file that describes
the recipe as well as the actions that it performs.
Args:
path (str): The path to the recipe file that we wish to load
actions_dict (dict): A dictionary of named RecipeActionObject
types that is used to look up all of the steps listed in
the recipe file.
resources_dict (dict): A dictionary of named RecipeResource types
that is used to look up all of the shared resources listed in
the recipe file.
file_format (str): The file format of the recipe file. Currently
we only support yaml.
name (str): The name of this recipe if we created it originally from an
archive. | [
"Create",
"a",
"RecipeObject",
"from",
"a",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L156-L225 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._parse_file_usage | def _parse_file_usage(cls, action_class, args):
"""Find all external files referenced by an action."""
fixed_files = {}
variable_files = []
if not hasattr(action_class, 'FILES'):
return fixed_files, variable_files
for file_arg in action_class.FILES:
arg_value = args.get(file_arg)
if arg_value is None:
raise RecipeFileInvalid("Action lists a file argument but none was given", declared_argument=file_arg, passed_arguments=args)
variables = _extract_variables(arg_value)
if len(variables) == 0:
fixed_files[file_arg] = arg_value
else:
variable_files.append(arg_value)
return fixed_files, variable_files | python | def _parse_file_usage(cls, action_class, args):
"""Find all external files referenced by an action."""
fixed_files = {}
variable_files = []
if not hasattr(action_class, 'FILES'):
return fixed_files, variable_files
for file_arg in action_class.FILES:
arg_value = args.get(file_arg)
if arg_value is None:
raise RecipeFileInvalid("Action lists a file argument but none was given", declared_argument=file_arg, passed_arguments=args)
variables = _extract_variables(arg_value)
if len(variables) == 0:
fixed_files[file_arg] = arg_value
else:
variable_files.append(arg_value)
return fixed_files, variable_files | [
"def",
"_parse_file_usage",
"(",
"cls",
",",
"action_class",
",",
"args",
")",
":",
"fixed_files",
"=",
"{",
"}",
"variable_files",
"=",
"[",
"]",
"if",
"not",
"hasattr",
"(",
"action_class",
",",
"'FILES'",
")",
":",
"return",
"fixed_files",
",",
"variabl... | Find all external files referenced by an action. | [
"Find",
"all",
"external",
"files",
"referenced",
"by",
"an",
"action",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L240-L260 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._parse_resource_declarations | def _parse_resource_declarations(cls, declarations, resource_map):
"""Parse out what resources are declared as shared for this recipe."""
resources = {}
for decl in declarations:
name = decl.pop('name')
typename = decl.pop('type')
desc = decl.pop('description', None)
autocreate = decl.pop('autocreate', False)
args = decl
res_type = resource_map.get(typename)
if res_type is None:
raise UnknownRecipeResourceType("Could not find shared resource type", type=typename, name=name)
# If the resource defines an argument schema, make sure we enforce it.
if hasattr(res_type, "ARG_SCHEMA"):
try:
args = res_type.ARG_SCHEMA.verify(args)
except ValidationError as exc:
raise RecipeFileInvalid("Recipe file resource declarttion has invalid parameters", resource=name, error_message=exc.msg, **exc.params)
if name in resources:
raise RecipeFileInvalid("Attempted to add two shared resources with the same name", name=name)
res = ResourceDeclaration(name, resource_map.get(typename), args, autocreate, desc, typename)
resources[name] = res
return resources | python | def _parse_resource_declarations(cls, declarations, resource_map):
"""Parse out what resources are declared as shared for this recipe."""
resources = {}
for decl in declarations:
name = decl.pop('name')
typename = decl.pop('type')
desc = decl.pop('description', None)
autocreate = decl.pop('autocreate', False)
args = decl
res_type = resource_map.get(typename)
if res_type is None:
raise UnknownRecipeResourceType("Could not find shared resource type", type=typename, name=name)
# If the resource defines an argument schema, make sure we enforce it.
if hasattr(res_type, "ARG_SCHEMA"):
try:
args = res_type.ARG_SCHEMA.verify(args)
except ValidationError as exc:
raise RecipeFileInvalid("Recipe file resource declarttion has invalid parameters", resource=name, error_message=exc.msg, **exc.params)
if name in resources:
raise RecipeFileInvalid("Attempted to add two shared resources with the same name", name=name)
res = ResourceDeclaration(name, resource_map.get(typename), args, autocreate, desc, typename)
resources[name] = res
return resources | [
"def",
"_parse_resource_declarations",
"(",
"cls",
",",
"declarations",
",",
"resource_map",
")",
":",
"resources",
"=",
"{",
"}",
"for",
"decl",
"in",
"declarations",
":",
"name",
"=",
"decl",
".",
"pop",
"(",
"'name'",
")",
"typename",
"=",
"decl",
".",
... | Parse out what resources are declared as shared for this recipe. | [
"Parse",
"out",
"what",
"resources",
"are",
"declared",
"as",
"shared",
"for",
"this",
"recipe",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L263-L293 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._parse_variable_defaults | def _parse_variable_defaults(cls, defaults):
"""Parse out all of the variable defaults."""
default_dict = {}
for item in defaults:
key = next(iter(item))
value = item[key]
if key in default_dict:
raise RecipeFileInvalid("Default variable value specified twice", name=key, old_value=default_dict[key], new_value=value)
default_dict[key] = value
return default_dict | python | def _parse_variable_defaults(cls, defaults):
"""Parse out all of the variable defaults."""
default_dict = {}
for item in defaults:
key = next(iter(item))
value = item[key]
if key in default_dict:
raise RecipeFileInvalid("Default variable value specified twice", name=key, old_value=default_dict[key], new_value=value)
default_dict[key] = value
return default_dict | [
"def",
"_parse_variable_defaults",
"(",
"cls",
",",
"defaults",
")",
":",
"default_dict",
"=",
"{",
"}",
"for",
"item",
"in",
"defaults",
":",
"key",
"=",
"next",
"(",
"iter",
"(",
"item",
")",
")",
"value",
"=",
"item",
"[",
"key",
"]",
"if",
"key",... | Parse out all of the variable defaults. | [
"Parse",
"out",
"all",
"of",
"the",
"variable",
"defaults",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L296-L310 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._parse_resource_usage | def _parse_resource_usage(cls, action_dict, declarations):
"""Parse out what resources are used, opened and closed in an action step."""
raw_used = action_dict.pop('use', [])
opened = [x.strip() for x in action_dict.pop('open_before', [])]
closed = [x.strip() for x in action_dict.pop('close_after', [])]
used = {}
for resource in raw_used:
if 'as' in resource:
global_name, _, local_name = resource.partition('as')
global_name = global_name.strip()
local_name = local_name.strip()
if len(global_name) == 0 or len(local_name) == 0:
raise RecipeFileInvalid("Resource usage specified in action with invalid name using 'as' statement", global_name=global_name, local_name=local_name, statement=resource)
else:
global_name = resource.strip()
local_name = global_name
if local_name in used:
raise RecipeFileInvalid("Resource specified twice for action", args=action_dict, resource=local_name, used_resources=used)
used[local_name] = global_name
# Make sure we only use, open and close declared resources
for name in (x for x in used.values() if x not in declarations):
raise RecipeFileInvalid("Action makes use of non-declared shared resource", name=name)
for name in (x for x in opened if x not in declarations):
raise RecipeFileInvalid("Action specified a non-declared shared resource in open_before", name=name)
for name in (x for x in closed if x not in declarations):
raise RecipeFileInvalid("Action specified a non-declared shared resource in close_after", name=name)
return ResourceUsage(used, opened, closed) | python | def _parse_resource_usage(cls, action_dict, declarations):
"""Parse out what resources are used, opened and closed in an action step."""
raw_used = action_dict.pop('use', [])
opened = [x.strip() for x in action_dict.pop('open_before', [])]
closed = [x.strip() for x in action_dict.pop('close_after', [])]
used = {}
for resource in raw_used:
if 'as' in resource:
global_name, _, local_name = resource.partition('as')
global_name = global_name.strip()
local_name = local_name.strip()
if len(global_name) == 0 or len(local_name) == 0:
raise RecipeFileInvalid("Resource usage specified in action with invalid name using 'as' statement", global_name=global_name, local_name=local_name, statement=resource)
else:
global_name = resource.strip()
local_name = global_name
if local_name in used:
raise RecipeFileInvalid("Resource specified twice for action", args=action_dict, resource=local_name, used_resources=used)
used[local_name] = global_name
# Make sure we only use, open and close declared resources
for name in (x for x in used.values() if x not in declarations):
raise RecipeFileInvalid("Action makes use of non-declared shared resource", name=name)
for name in (x for x in opened if x not in declarations):
raise RecipeFileInvalid("Action specified a non-declared shared resource in open_before", name=name)
for name in (x for x in closed if x not in declarations):
raise RecipeFileInvalid("Action specified a non-declared shared resource in close_after", name=name)
return ResourceUsage(used, opened, closed) | [
"def",
"_parse_resource_usage",
"(",
"cls",
",",
"action_dict",
",",
"declarations",
")",
":",
"raw_used",
"=",
"action_dict",
".",
"pop",
"(",
"'use'",
",",
"[",
"]",
")",
"opened",
"=",
"[",
"x",
".",
"strip",
"(",
")",
"for",
"x",
"in",
"action_dict... | Parse out what resources are used, opened and closed in an action step. | [
"Parse",
"out",
"what",
"resources",
"are",
"used",
"opened",
"and",
"closed",
"in",
"an",
"action",
"step",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L313-L349 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.