repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value |
|---|---|---|---|---|---|---|---|---|---|---|---|
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject.prepare | def prepare(self, variables):
"""Initialize all steps in this recipe using their parameters.
Args:
variables (dict): A dictionary of global variable definitions
that may be used to replace or augment the parameters given
to each step.
Returns:
list of RecipeActionObject like instances: The list of instantiated
steps that can be used to execute this recipe.
"""
initializedsteps = []
if variables is None:
variables = dict()
for step, params, _resources, _files in self.steps:
new_params = _complete_parameters(params, variables)
initializedsteps.append(step(new_params))
return initializedsteps | python | def prepare(self, variables):
"""Initialize all steps in this recipe using their parameters.
Args:
variables (dict): A dictionary of global variable definitions
that may be used to replace or augment the parameters given
to each step.
Returns:
list of RecipeActionObject like instances: The list of instantiated
steps that can be used to execute this recipe.
"""
initializedsteps = []
if variables is None:
variables = dict()
for step, params, _resources, _files in self.steps:
new_params = _complete_parameters(params, variables)
initializedsteps.append(step(new_params))
return initializedsteps | [
"def",
"prepare",
"(",
"self",
",",
"variables",
")",
":",
"initializedsteps",
"=",
"[",
"]",
"if",
"variables",
"is",
"None",
":",
"variables",
"=",
"dict",
"(",
")",
"for",
"step",
",",
"params",
",",
"_resources",
",",
"_files",
"in",
"self",
".",
... | Initialize all steps in this recipe using their parameters.
Args:
variables (dict): A dictionary of global variable definitions
that may be used to replace or augment the parameters given
to each step.
Returns:
list of RecipeActionObject like instances: The list of instantiated
steps that can be used to execute this recipe. | [
"Initialize",
"all",
"steps",
"in",
"this",
"recipe",
"using",
"their",
"parameters",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L358-L376 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._prepare_resources | def _prepare_resources(self, variables, overrides=None):
"""Create and optionally open all shared resources."""
if overrides is None:
overrides = {}
res_map = {}
own_map = {}
for decl in self.resources.values():
resource = overrides.get(decl.name)
if resource is None:
args = _complete_parameters(decl.args, variables)
resource = decl.type(args)
own_map[decl.name] = resource
if decl.autocreate:
resource.open()
res_map[decl.name] = resource
return res_map, own_map | python | def _prepare_resources(self, variables, overrides=None):
"""Create and optionally open all shared resources."""
if overrides is None:
overrides = {}
res_map = {}
own_map = {}
for decl in self.resources.values():
resource = overrides.get(decl.name)
if resource is None:
args = _complete_parameters(decl.args, variables)
resource = decl.type(args)
own_map[decl.name] = resource
if decl.autocreate:
resource.open()
res_map[decl.name] = resource
return res_map, own_map | [
"def",
"_prepare_resources",
"(",
"self",
",",
"variables",
",",
"overrides",
"=",
"None",
")",
":",
"if",
"overrides",
"is",
"None",
":",
"overrides",
"=",
"{",
"}",
"res_map",
"=",
"{",
"}",
"own_map",
"=",
"{",
"}",
"for",
"decl",
"in",
"self",
".... | Create and optionally open all shared resources. | [
"Create",
"and",
"optionally",
"open",
"all",
"shared",
"resources",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L378-L400 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject._cleanup_resources | def _cleanup_resources(self, initialized_resources):
"""Cleanup all resources that we own that are open."""
cleanup_errors = []
# Make sure we clean up all resources that we can and don't error out at the
# first one.
for name, res in initialized_resources.items():
try:
if res.opened:
res.close()
except Exception:
_type, value, traceback = sys.exc_info()
cleanup_errors.append((name, value, traceback))
if len(cleanup_errors) > 0:
raise RecipeResourceManagementError(operation="resource cleanup", errors=cleanup_errors) | python | def _cleanup_resources(self, initialized_resources):
"""Cleanup all resources that we own that are open."""
cleanup_errors = []
# Make sure we clean up all resources that we can and don't error out at the
# first one.
for name, res in initialized_resources.items():
try:
if res.opened:
res.close()
except Exception:
_type, value, traceback = sys.exc_info()
cleanup_errors.append((name, value, traceback))
if len(cleanup_errors) > 0:
raise RecipeResourceManagementError(operation="resource cleanup", errors=cleanup_errors) | [
"def",
"_cleanup_resources",
"(",
"self",
",",
"initialized_resources",
")",
":",
"cleanup_errors",
"=",
"[",
"]",
"for",
"name",
",",
"res",
"in",
"initialized_resources",
".",
"items",
"(",
")",
":",
"try",
":",
"if",
"res",
".",
"opened",
":",
"res",
... | Cleanup all resources that we own that are open. | [
"Cleanup",
"all",
"resources",
"that",
"we",
"own",
"that",
"are",
"open",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L402-L418 | train |
iotile/coretools | iotileship/iotile/ship/recipe.py | RecipeObject.run | def run(self, variables=None, overrides=None):
"""Initialize and run this recipe.
By default all necessary shared resources are created and destroyed in
this function unless you pass them preinitizlied in overrides, in
which case they are used as is. The overrides parameter is designed
to allow testability of iotile-ship recipes by inspecting the shared
resources after the recipe has finished to ensure that it was properly
set up.
Args:
variables (dict): An optional dictionary of variable assignments.
There must be a single assignment for all free variables that
do not have a default value, otherwise the recipe will not
run.
overrides (dict): An optional dictionary of shared resource
objects that should be used instead of creating that resource
and destroying it inside this function.
"""
old_dir = os.getcwd()
try:
os.chdir(self.run_directory)
initialized_steps = self.prepare(variables)
owned_resources = {}
try:
print("Running in %s" % self.run_directory)
initialized_resources, owned_resources = self._prepare_resources(variables, overrides)
for i, (step, decl) in enumerate(zip(initialized_steps, self.steps)):
print("===> Step %d: %s\t Description: %s" % (i+1, self.steps[i][0].__name__, \
self.steps[i][1].get('description', '')))
runtime, out = _run_step(step, decl, initialized_resources)
print("======> Time Elapsed: %.2f seconds" % runtime)
if out is not None:
print(out[1])
finally:
self._cleanup_resources(owned_resources)
finally:
os.chdir(old_dir) | python | def run(self, variables=None, overrides=None):
"""Initialize and run this recipe.
By default all necessary shared resources are created and destroyed in
this function unless you pass them preinitizlied in overrides, in
which case they are used as is. The overrides parameter is designed
to allow testability of iotile-ship recipes by inspecting the shared
resources after the recipe has finished to ensure that it was properly
set up.
Args:
variables (dict): An optional dictionary of variable assignments.
There must be a single assignment for all free variables that
do not have a default value, otherwise the recipe will not
run.
overrides (dict): An optional dictionary of shared resource
objects that should be used instead of creating that resource
and destroying it inside this function.
"""
old_dir = os.getcwd()
try:
os.chdir(self.run_directory)
initialized_steps = self.prepare(variables)
owned_resources = {}
try:
print("Running in %s" % self.run_directory)
initialized_resources, owned_resources = self._prepare_resources(variables, overrides)
for i, (step, decl) in enumerate(zip(initialized_steps, self.steps)):
print("===> Step %d: %s\t Description: %s" % (i+1, self.steps[i][0].__name__, \
self.steps[i][1].get('description', '')))
runtime, out = _run_step(step, decl, initialized_resources)
print("======> Time Elapsed: %.2f seconds" % runtime)
if out is not None:
print(out[1])
finally:
self._cleanup_resources(owned_resources)
finally:
os.chdir(old_dir) | [
"def",
"run",
"(",
"self",
",",
"variables",
"=",
"None",
",",
"overrides",
"=",
"None",
")",
":",
"old_dir",
"=",
"os",
".",
"getcwd",
"(",
")",
"try",
":",
"os",
".",
"chdir",
"(",
"self",
".",
"run_directory",
")",
"initialized_steps",
"=",
"self"... | Initialize and run this recipe.
By default all necessary shared resources are created and destroyed in
this function unless you pass them preinitizlied in overrides, in
which case they are used as is. The overrides parameter is designed
to allow testability of iotile-ship recipes by inspecting the shared
resources after the recipe has finished to ensure that it was properly
set up.
Args:
variables (dict): An optional dictionary of variable assignments.
There must be a single assignment for all free variables that
do not have a default value, otherwise the recipe will not
run.
overrides (dict): An optional dictionary of shared resource
objects that should be used instead of creating that resource
and destroying it inside this function. | [
"Initialize",
"and",
"run",
"this",
"recipe",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileship/iotile/ship/recipe.py#L420-L463 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/yacc.py | generate | def generate(env):
"""Add Builders and construction variables for yacc to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action('.y', YaccAction)
c_file.add_emitter('.y', yEmitter)
c_file.add_action('.yacc', YaccAction)
c_file.add_emitter('.yacc', yEmitter)
# Objective-C
c_file.add_action('.ym', YaccAction)
c_file.add_emitter('.ym', ymEmitter)
# C++
cxx_file.add_action('.yy', YaccAction)
cxx_file.add_emitter('.yy', yyEmitter)
env['YACC'] = env.Detect('bison') or 'yacc'
env['YACCFLAGS'] = SCons.Util.CLVar('')
env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES'
env['YACCHFILESUFFIX'] = '.h'
env['YACCHXXFILESUFFIX'] = '.hpp'
env['YACCVCGFILESUFFIX'] = '.vcg' | python | def generate(env):
"""Add Builders and construction variables for yacc to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action('.y', YaccAction)
c_file.add_emitter('.y', yEmitter)
c_file.add_action('.yacc', YaccAction)
c_file.add_emitter('.yacc', yEmitter)
# Objective-C
c_file.add_action('.ym', YaccAction)
c_file.add_emitter('.ym', ymEmitter)
# C++
cxx_file.add_action('.yy', YaccAction)
cxx_file.add_emitter('.yy', yyEmitter)
env['YACC'] = env.Detect('bison') or 'yacc'
env['YACCFLAGS'] = SCons.Util.CLVar('')
env['YACCCOM'] = '$YACC $YACCFLAGS -o $TARGET $SOURCES'
env['YACCHFILESUFFIX'] = '.h'
env['YACCHXXFILESUFFIX'] = '.hpp'
env['YACCVCGFILESUFFIX'] = '.vcg' | [
"def",
"generate",
"(",
"env",
")",
":",
"c_file",
",",
"cxx_file",
"=",
"SCons",
".",
"Tool",
".",
"createCFileBuilders",
"(",
"env",
")",
"c_file",
".",
"add_action",
"(",
"'.y'",
",",
"YaccAction",
")",
"c_file",
".",
"add_emitter",
"(",
"'.y'",
",",
... | Add Builders and construction variables for yacc to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"yacc",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/yacc.py#L97-L123 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/ilink32.py | generate | def generate(env):
"""Add Builders and construction variables for Borland ilink to an
Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['LINK'] = '$CC'
env['LINKFLAGS'] = SCons.Util.CLVar('')
env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS'
env['LIBDIRPREFIX']=''
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX' | python | def generate(env):
"""Add Builders and construction variables for Borland ilink to an
Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['LINK'] = '$CC'
env['LINKFLAGS'] = SCons.Util.CLVar('')
env['LINKCOM'] = '$LINK -q $LINKFLAGS -e$TARGET $SOURCES $LIBS'
env['LIBDIRPREFIX']=''
env['LIBDIRSUFFIX']=''
env['LIBLINKPREFIX']=''
env['LIBLINKSUFFIX']='$LIBSUFFIX' | [
"def",
"generate",
"(",
"env",
")",
":",
"SCons",
".",
"Tool",
".",
"createSharedLibBuilder",
"(",
"env",
")",
"SCons",
".",
"Tool",
".",
"createProgBuilder",
"(",
"env",
")",
"env",
"[",
"'LINK'",
"]",
"=",
"'$CC'",
"env",
"[",
"'LINKFLAGS'",
"]",
"="... | Add Builders and construction variables for Borland ilink to an
Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"Borland",
"ilink",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/ilink32.py#L36-L48 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py | SDKDefinition.find_sdk_dir | def find_sdk_dir(self):
"""Try to find the MS SDK from the registry.
Return None if failed or the directory does not exist.
"""
if not SCons.Util.can_read_reg:
debug('find_sdk_dir(): can not read registry')
return None
hkey = self.HKEY_FMT % self.hkey_data
debug('find_sdk_dir(): checking registry:{}'.format(hkey))
try:
sdk_dir = common.read_reg(hkey)
except SCons.Util.WinError as e:
debug('find_sdk_dir(): no SDK registry key {}'.format(repr(hkey)))
return None
debug('find_sdk_dir(): Trying SDK Dir: {}'.format(sdk_dir))
if not os.path.exists(sdk_dir):
debug('find_sdk_dir(): {} not on file system'.format(sdk_dir))
return None
ftc = os.path.join(sdk_dir, self.sanity_check_file)
if not os.path.exists(ftc):
debug("find_sdk_dir(): sanity check {} not found".format(ftc))
return None
return sdk_dir | python | def find_sdk_dir(self):
"""Try to find the MS SDK from the registry.
Return None if failed or the directory does not exist.
"""
if not SCons.Util.can_read_reg:
debug('find_sdk_dir(): can not read registry')
return None
hkey = self.HKEY_FMT % self.hkey_data
debug('find_sdk_dir(): checking registry:{}'.format(hkey))
try:
sdk_dir = common.read_reg(hkey)
except SCons.Util.WinError as e:
debug('find_sdk_dir(): no SDK registry key {}'.format(repr(hkey)))
return None
debug('find_sdk_dir(): Trying SDK Dir: {}'.format(sdk_dir))
if not os.path.exists(sdk_dir):
debug('find_sdk_dir(): {} not on file system'.format(sdk_dir))
return None
ftc = os.path.join(sdk_dir, self.sanity_check_file)
if not os.path.exists(ftc):
debug("find_sdk_dir(): sanity check {} not found".format(ftc))
return None
return sdk_dir | [
"def",
"find_sdk_dir",
"(",
"self",
")",
":",
"if",
"not",
"SCons",
".",
"Util",
".",
"can_read_reg",
":",
"debug",
"(",
"'find_sdk_dir(): can not read registry'",
")",
"return",
"None",
"hkey",
"=",
"self",
".",
"HKEY_FMT",
"%",
"self",
".",
"hkey_data",
"d... | Try to find the MS SDK from the registry.
Return None if failed or the directory does not exist. | [
"Try",
"to",
"find",
"the",
"MS",
"SDK",
"from",
"the",
"registry",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py#L69-L98 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py | SDKDefinition.get_sdk_dir | def get_sdk_dir(self):
"""Return the MSSSDK given the version string."""
try:
return self._sdk_dir
except AttributeError:
sdk_dir = self.find_sdk_dir()
self._sdk_dir = sdk_dir
return sdk_dir | python | def get_sdk_dir(self):
"""Return the MSSSDK given the version string."""
try:
return self._sdk_dir
except AttributeError:
sdk_dir = self.find_sdk_dir()
self._sdk_dir = sdk_dir
return sdk_dir | [
"def",
"get_sdk_dir",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"_sdk_dir",
"except",
"AttributeError",
":",
"sdk_dir",
"=",
"self",
".",
"find_sdk_dir",
"(",
")",
"self",
".",
"_sdk_dir",
"=",
"sdk_dir",
"return",
"sdk_dir"
] | Return the MSSSDK given the version string. | [
"Return",
"the",
"MSSSDK",
"given",
"the",
"version",
"string",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py#L100-L107 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py | SDKDefinition.get_sdk_vc_script | def get_sdk_vc_script(self,host_arch, target_arch):
""" Return the script to initialize the VC compiler installed by SDK
"""
if (host_arch == 'amd64' and target_arch == 'x86'):
# No cross tools needed compiling 32 bits on 64 bit machine
host_arch=target_arch
arch_string=target_arch
if (host_arch != target_arch):
arch_string='%s_%s'%(host_arch,target_arch)
debug("sdk.py: get_sdk_vc_script():arch_string:%s host_arch:%s target_arch:%s"%(arch_string,
host_arch,
target_arch))
file=self.vc_setup_scripts.get(arch_string,None)
debug("sdk.py: get_sdk_vc_script():file:%s"%file)
return file | python | def get_sdk_vc_script(self,host_arch, target_arch):
""" Return the script to initialize the VC compiler installed by SDK
"""
if (host_arch == 'amd64' and target_arch == 'x86'):
# No cross tools needed compiling 32 bits on 64 bit machine
host_arch=target_arch
arch_string=target_arch
if (host_arch != target_arch):
arch_string='%s_%s'%(host_arch,target_arch)
debug("sdk.py: get_sdk_vc_script():arch_string:%s host_arch:%s target_arch:%s"%(arch_string,
host_arch,
target_arch))
file=self.vc_setup_scripts.get(arch_string,None)
debug("sdk.py: get_sdk_vc_script():file:%s"%file)
return file | [
"def",
"get_sdk_vc_script",
"(",
"self",
",",
"host_arch",
",",
"target_arch",
")",
":",
"if",
"(",
"host_arch",
"==",
"'amd64'",
"and",
"target_arch",
"==",
"'x86'",
")",
":",
"host_arch",
"=",
"target_arch",
"arch_string",
"=",
"target_arch",
"if",
"(",
"h... | Return the script to initialize the VC compiler installed by SDK | [
"Return",
"the",
"script",
"to",
"initialize",
"the",
"VC",
"compiler",
"installed",
"by",
"SDK"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/MSCommon/sdk.py#L109-L126 | train |
iotile/coretools | iotileemulate/iotile/emulate/utilities/format_rpc.py | format_rpc | def format_rpc(data):
"""Format an RPC call and response.
Args:
data (tuple): A tuple containing the address, rpc_id, argument and
response payloads and any error code.
Returns:
str: The formated RPC string.
"""
address, rpc_id, args, resp, _status = data
name = rpc_name(rpc_id)
if isinstance(args, (bytes, bytearray)):
arg_str = hexlify(args)
else:
arg_str = repr(args)
if isinstance(resp, (bytes, bytearray)):
resp_str = hexlify(resp)
else:
resp_str = repr(resp)
#FIXME: Check and print status as well
return "%s called on address %d, payload=%s, response=%s" % (name, address, arg_str, resp_str) | python | def format_rpc(data):
"""Format an RPC call and response.
Args:
data (tuple): A tuple containing the address, rpc_id, argument and
response payloads and any error code.
Returns:
str: The formated RPC string.
"""
address, rpc_id, args, resp, _status = data
name = rpc_name(rpc_id)
if isinstance(args, (bytes, bytearray)):
arg_str = hexlify(args)
else:
arg_str = repr(args)
if isinstance(resp, (bytes, bytearray)):
resp_str = hexlify(resp)
else:
resp_str = repr(resp)
#FIXME: Check and print status as well
return "%s called on address %d, payload=%s, response=%s" % (name, address, arg_str, resp_str) | [
"def",
"format_rpc",
"(",
"data",
")",
":",
"address",
",",
"rpc_id",
",",
"args",
",",
"resp",
",",
"_status",
"=",
"data",
"name",
"=",
"rpc_name",
"(",
"rpc_id",
")",
"if",
"isinstance",
"(",
"args",
",",
"(",
"bytes",
",",
"bytearray",
")",
")",
... | Format an RPC call and response.
Args:
data (tuple): A tuple containing the address, rpc_id, argument and
response payloads and any error code.
Returns:
str: The formated RPC string. | [
"Format",
"an",
"RPC",
"call",
"and",
"response",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/utilities/format_rpc.py#L6-L32 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/server_bled112.py | BLED112Server.start | async def start(self):
"""Start serving access to devices over bluetooth."""
self._command_task.start()
try:
await self._cleanup_old_connections()
except Exception:
await self.stop()
raise
#FIXME: This is a temporary hack, get the actual device we are serving.
iotile_id = next(iter(self.adapter.devices))
self.device = self.adapter.devices[iotile_id]
self._logger.info("Serving device 0x%04X over BLED112", iotile_id)
await self._update_advertisement()
self.setup_client(self.CLIENT_ID, scan=False, broadcast=True) | python | async def start(self):
"""Start serving access to devices over bluetooth."""
self._command_task.start()
try:
await self._cleanup_old_connections()
except Exception:
await self.stop()
raise
#FIXME: This is a temporary hack, get the actual device we are serving.
iotile_id = next(iter(self.adapter.devices))
self.device = self.adapter.devices[iotile_id]
self._logger.info("Serving device 0x%04X over BLED112", iotile_id)
await self._update_advertisement()
self.setup_client(self.CLIENT_ID, scan=False, broadcast=True) | [
"async",
"def",
"start",
"(",
"self",
")",
":",
"self",
".",
"_command_task",
".",
"start",
"(",
")",
"try",
":",
"await",
"self",
".",
"_cleanup_old_connections",
"(",
")",
"except",
"Exception",
":",
"await",
"self",
".",
"stop",
"(",
")",
"raise",
"... | Start serving access to devices over bluetooth. | [
"Start",
"serving",
"access",
"to",
"devices",
"over",
"bluetooth",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/server_bled112.py#L126-L144 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/server_bled112.py | BLED112Server.stop | async def stop(self):
"""Safely shut down this interface"""
await self._command_task.future_command(['_set_mode', 0, 0]) # Disable advertising
await self._cleanup_old_connections()
self._command_task.stop()
self._stream.stop()
self._serial_port.close()
await super(BLED112Server, self).stop() | python | async def stop(self):
"""Safely shut down this interface"""
await self._command_task.future_command(['_set_mode', 0, 0]) # Disable advertising
await self._cleanup_old_connections()
self._command_task.stop()
self._stream.stop()
self._serial_port.close()
await super(BLED112Server, self).stop() | [
"async",
"def",
"stop",
"(",
"self",
")",
":",
"await",
"self",
".",
"_command_task",
".",
"future_command",
"(",
"[",
"'_set_mode'",
",",
"0",
",",
"0",
"]",
")",
"await",
"self",
".",
"_cleanup_old_connections",
"(",
")",
"self",
".",
"_command_task",
... | Safely shut down this interface | [
"Safely",
"shut",
"down",
"this",
"interface"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/server_bled112.py#L146-L155 | train |
iotile/coretools | transport_plugins/bled112/iotile_transport_bled112/server_bled112.py | BLED112Server._call_rpc | async def _call_rpc(self, header):
"""Call an RPC given a header and possibly a previously sent payload
Args:
header (bytearray): The RPC header we should call
"""
length, _, cmd, feature, address = struct.unpack("<BBBBB", bytes(header))
rpc_id = (feature << 8) | cmd
payload = self.rpc_payload[:length]
self._logger.debug("Calling RPC %d:%04X with %s", address, rpc_id, binascii.hexlify(payload))
exception = None
response = None
try:
response = await self.send_rpc(self.CLIENT_ID, str(self.device.iotile_id), address, rpc_id, bytes(payload), timeout=30.0)
except VALID_RPC_EXCEPTIONS as err:
exception = err
except Exception as err:
self._logger.exception("Error calling RPC %d:%04X", address, rpc_id)
exception = err
status, response = pack_rpc_response(response, exception)
resp_header = struct.pack("<BBBB", status, 0, 0, len(response))
await self._send_notification(self.ReceiveHeaderHandle, resp_header)
if len(response) > 0:
await self._send_notification(self.ReceivePayloadHandle, response) | python | async def _call_rpc(self, header):
"""Call an RPC given a header and possibly a previously sent payload
Args:
header (bytearray): The RPC header we should call
"""
length, _, cmd, feature, address = struct.unpack("<BBBBB", bytes(header))
rpc_id = (feature << 8) | cmd
payload = self.rpc_payload[:length]
self._logger.debug("Calling RPC %d:%04X with %s", address, rpc_id, binascii.hexlify(payload))
exception = None
response = None
try:
response = await self.send_rpc(self.CLIENT_ID, str(self.device.iotile_id), address, rpc_id, bytes(payload), timeout=30.0)
except VALID_RPC_EXCEPTIONS as err:
exception = err
except Exception as err:
self._logger.exception("Error calling RPC %d:%04X", address, rpc_id)
exception = err
status, response = pack_rpc_response(response, exception)
resp_header = struct.pack("<BBBB", status, 0, 0, len(response))
await self._send_notification(self.ReceiveHeaderHandle, resp_header)
if len(response) > 0:
await self._send_notification(self.ReceivePayloadHandle, response) | [
"async",
"def",
"_call_rpc",
"(",
"self",
",",
"header",
")",
":",
"length",
",",
"_",
",",
"cmd",
",",
"feature",
",",
"address",
"=",
"struct",
".",
"unpack",
"(",
"\"<BBBBB\"",
",",
"bytes",
"(",
"header",
")",
")",
"rpc_id",
"=",
"(",
"feature",
... | Call an RPC given a header and possibly a previously sent payload
Args:
header (bytearray): The RPC header we should call | [
"Call",
"an",
"RPC",
"given",
"a",
"header",
"and",
"possibly",
"a",
"previously",
"sent",
"payload"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/transport_plugins/bled112/iotile_transport_bled112/server_bled112.py#L308-L339 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/output_formats/script.py | format_script | def format_script(sensor_graph):
"""Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data.
"""
records = []
records.append(SetGraphOnlineRecord(False, address=8))
records.append(ClearDataRecord(address=8))
records.append(ResetGraphRecord(address=8))
for node in sensor_graph.nodes:
records.append(AddNodeRecord(str(node), address=8))
for streamer in sensor_graph.streamers:
records.append(AddStreamerRecord(streamer, address=8))
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
records.append(SetConstantRecord(stream, value, address=8))
records.append(PersistGraphRecord(address=8))
records.append(ClearConfigVariablesRecord())
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for config_id in sorted(sensor_graph.config_database[slot]):
config_type, value = sensor_graph.config_database[slot][config_id]
byte_value = _convert_to_bytes(config_type, value)
records.append(SetConfigRecord(slot, config_id, byte_value))
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
records.append(SetDeviceTagRecord(app_tag=app_tag, app_version=app_version))
script = UpdateScript(records)
return script.encode() | python | def format_script(sensor_graph):
"""Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data.
"""
records = []
records.append(SetGraphOnlineRecord(False, address=8))
records.append(ClearDataRecord(address=8))
records.append(ResetGraphRecord(address=8))
for node in sensor_graph.nodes:
records.append(AddNodeRecord(str(node), address=8))
for streamer in sensor_graph.streamers:
records.append(AddStreamerRecord(streamer, address=8))
for stream, value in sorted(sensor_graph.constant_database.items(), key=lambda x: x[0].encode()):
records.append(SetConstantRecord(stream, value, address=8))
records.append(PersistGraphRecord(address=8))
records.append(ClearConfigVariablesRecord())
for slot in sorted(sensor_graph.config_database, key=lambda x: x.encode()):
for config_id in sorted(sensor_graph.config_database[slot]):
config_type, value = sensor_graph.config_database[slot][config_id]
byte_value = _convert_to_bytes(config_type, value)
records.append(SetConfigRecord(slot, config_id, byte_value))
# If we have an app tag and version set program them in
app_tag = sensor_graph.metadata_database.get('app_tag')
app_version = sensor_graph.metadata_database.get('app_version')
if app_tag is not None:
records.append(SetDeviceTagRecord(app_tag=app_tag, app_version=app_version))
script = UpdateScript(records)
return script.encode() | [
"def",
"format_script",
"(",
"sensor_graph",
")",
":",
"records",
"=",
"[",
"]",
"records",
".",
"append",
"(",
"SetGraphOnlineRecord",
"(",
"False",
",",
"address",
"=",
"8",
")",
")",
"records",
".",
"append",
"(",
"ClearDataRecord",
"(",
"address",
"=",... | Create a binary script containing this sensor graph.
This function produces a repeatable script by applying a known sorting
order to all constants and config variables when iterating over those
dictionaries.
Args:
sensor_graph (SensorGraph): the sensor graph that we want to format
Returns:
bytearray: The binary script data. | [
"Create",
"a",
"binary",
"script",
"containing",
"this",
"sensor",
"graph",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/output_formats/script.py#L12-L60 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.dump | def dump(self):
"""Dump the state of this SensorLog.
The purpose of this method is to be able to restore the same state
later. However there are links in the SensorLog for stream walkers.
So the dump process saves the state of each stream walker and upon
restore, it looks through the current set of stream walkers and
restores each one that existed when dump() was called to its state.
Returns:
dict: The serialized state of this SensorLog.
"""
walkers = {}
walkers.update({str(walker.selector): walker.dump() for walker in self._queue_walkers})
walkers.update({str(walker.selector): walker.dump() for walker in self._virtual_walkers})
return {
u'engine': self._engine.dump(),
u'rollover_storage': self._rollover_storage,
u'rollover_streaming': self._rollover_streaming,
u'last_values': {str(stream): reading.asdict() for stream, reading in self._last_values.items()},
u'walkers': walkers
} | python | def dump(self):
"""Dump the state of this SensorLog.
The purpose of this method is to be able to restore the same state
later. However there are links in the SensorLog for stream walkers.
So the dump process saves the state of each stream walker and upon
restore, it looks through the current set of stream walkers and
restores each one that existed when dump() was called to its state.
Returns:
dict: The serialized state of this SensorLog.
"""
walkers = {}
walkers.update({str(walker.selector): walker.dump() for walker in self._queue_walkers})
walkers.update({str(walker.selector): walker.dump() for walker in self._virtual_walkers})
return {
u'engine': self._engine.dump(),
u'rollover_storage': self._rollover_storage,
u'rollover_streaming': self._rollover_streaming,
u'last_values': {str(stream): reading.asdict() for stream, reading in self._last_values.items()},
u'walkers': walkers
} | [
"def",
"dump",
"(",
"self",
")",
":",
"walkers",
"=",
"{",
"}",
"walkers",
".",
"update",
"(",
"{",
"str",
"(",
"walker",
".",
"selector",
")",
":",
"walker",
".",
"dump",
"(",
")",
"for",
"walker",
"in",
"self",
".",
"_queue_walkers",
"}",
")",
... | Dump the state of this SensorLog.
The purpose of this method is to be able to restore the same state
later. However there are links in the SensorLog for stream walkers.
So the dump process saves the state of each stream walker and upon
restore, it looks through the current set of stream walkers and
restores each one that existed when dump() was called to its state.
Returns:
dict: The serialized state of this SensorLog. | [
"Dump",
"the",
"state",
"of",
"this",
"SensorLog",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L74-L98 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.set_rollover | def set_rollover(self, area, enabled):
"""Configure whether rollover is enabled for streaming or storage streams.
Normally a SensorLog is used in ring-buffer mode which means that old
readings are automatically overwritten as needed when new data is saved.
However, you can configure it into fill-stop mode by using:
set_rollover("streaming"|"storage", True|False)
By default rollover is set to True for both streaming and storage and can
be controlled individually for each one.
Args:
area (str): Either streaming or storage.
enabled (bool): Whether to enable or disable rollover.
"""
if area == u'streaming':
self._rollover_streaming = enabled
elif area == u'storage':
self._rollover_storage = enabled
else:
raise ArgumentError("You must pass one of 'storage' or 'streaming' to set_rollover", area=area) | python | def set_rollover(self, area, enabled):
"""Configure whether rollover is enabled for streaming or storage streams.
Normally a SensorLog is used in ring-buffer mode which means that old
readings are automatically overwritten as needed when new data is saved.
However, you can configure it into fill-stop mode by using:
set_rollover("streaming"|"storage", True|False)
By default rollover is set to True for both streaming and storage and can
be controlled individually for each one.
Args:
area (str): Either streaming or storage.
enabled (bool): Whether to enable or disable rollover.
"""
if area == u'streaming':
self._rollover_streaming = enabled
elif area == u'storage':
self._rollover_storage = enabled
else:
raise ArgumentError("You must pass one of 'storage' or 'streaming' to set_rollover", area=area) | [
"def",
"set_rollover",
"(",
"self",
",",
"area",
",",
"enabled",
")",
":",
"if",
"area",
"==",
"u'streaming'",
":",
"self",
".",
"_rollover_streaming",
"=",
"enabled",
"elif",
"area",
"==",
"u'storage'",
":",
"self",
".",
"_rollover_storage",
"=",
"enabled",... | Configure whether rollover is enabled for streaming or storage streams.
Normally a SensorLog is used in ring-buffer mode which means that old
readings are automatically overwritten as needed when new data is saved.
However, you can configure it into fill-stop mode by using:
set_rollover("streaming"|"storage", True|False)
By default rollover is set to True for both streaming and storage and can
be controlled individually for each one.
Args:
area (str): Either streaming or storage.
enabled (bool): Whether to enable or disable rollover. | [
"Configure",
"whether",
"rollover",
"is",
"enabled",
"for",
"streaming",
"or",
"storage",
"streams",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L146-L168 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.watch | def watch(self, selector, callback):
"""Call a function whenever a stream changes.
Args:
selector (DataStreamSelector): The selector to watch.
If this is None, it is treated as a wildcard selector
that matches every stream.
callback (callable): The function to call when a new
reading is pushed. Callback is called as:
callback(stream, value)
"""
if selector not in self._monitors:
self._monitors[selector] = set()
self._monitors[selector].add(callback) | python | def watch(self, selector, callback):
"""Call a function whenever a stream changes.
Args:
selector (DataStreamSelector): The selector to watch.
If this is None, it is treated as a wildcard selector
that matches every stream.
callback (callable): The function to call when a new
reading is pushed. Callback is called as:
callback(stream, value)
"""
if selector not in self._monitors:
self._monitors[selector] = set()
self._monitors[selector].add(callback) | [
"def",
"watch",
"(",
"self",
",",
"selector",
",",
"callback",
")",
":",
"if",
"selector",
"not",
"in",
"self",
".",
"_monitors",
":",
"self",
".",
"_monitors",
"[",
"selector",
"]",
"=",
"set",
"(",
")",
"self",
".",
"_monitors",
"[",
"selector",
"]... | Call a function whenever a stream changes.
Args:
selector (DataStreamSelector): The selector to watch.
If this is None, it is treated as a wildcard selector
that matches every stream.
callback (callable): The function to call when a new
reading is pushed. Callback is called as:
callback(stream, value) | [
"Call",
"a",
"function",
"whenever",
"a",
"stream",
"changes",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L190-L205 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.create_walker | def create_walker(self, selector, skip_all=True):
"""Create a stream walker based on the given selector.
This function returns a StreamWalker subclass that will
remain up to date and allow iterating over and popping readings
from the stream(s) specified by the selector.
When the stream walker is done, it should be passed to
destroy_walker so that it is removed from internal lists that
are used to always keep it in sync.
Args:
selector (DataStreamSelector): The selector describing the
streams that we want to iterate over.
skip_all (bool): Whether to start at the beginning of the data
or to skip everything and start at the end. Defaults
to skipping everything. This parameter only has any
effect on buffered stream selectors.
Returns:
StreamWalker: A properly updating stream walker with the given selector.
"""
if selector.buffered:
walker = BufferedStreamWalker(selector, self._engine, skip_all=skip_all)
self._queue_walkers.append(walker)
return walker
if selector.match_type == DataStream.CounterType:
walker = CounterStreamWalker(selector)
else:
walker = VirtualStreamWalker(selector)
self._virtual_walkers.append(walker)
return walker | python | def create_walker(self, selector, skip_all=True):
"""Create a stream walker based on the given selector.
This function returns a StreamWalker subclass that will
remain up to date and allow iterating over and popping readings
from the stream(s) specified by the selector.
When the stream walker is done, it should be passed to
destroy_walker so that it is removed from internal lists that
are used to always keep it in sync.
Args:
selector (DataStreamSelector): The selector describing the
streams that we want to iterate over.
skip_all (bool): Whether to start at the beginning of the data
or to skip everything and start at the end. Defaults
to skipping everything. This parameter only has any
effect on buffered stream selectors.
Returns:
StreamWalker: A properly updating stream walker with the given selector.
"""
if selector.buffered:
walker = BufferedStreamWalker(selector, self._engine, skip_all=skip_all)
self._queue_walkers.append(walker)
return walker
if selector.match_type == DataStream.CounterType:
walker = CounterStreamWalker(selector)
else:
walker = VirtualStreamWalker(selector)
self._virtual_walkers.append(walker)
return walker | [
"def",
"create_walker",
"(",
"self",
",",
"selector",
",",
"skip_all",
"=",
"True",
")",
":",
"if",
"selector",
".",
"buffered",
":",
"walker",
"=",
"BufferedStreamWalker",
"(",
"selector",
",",
"self",
".",
"_engine",
",",
"skip_all",
"=",
"skip_all",
")"... | Create a stream walker based on the given selector.
This function returns a StreamWalker subclass that will
remain up to date and allow iterating over and popping readings
from the stream(s) specified by the selector.
When the stream walker is done, it should be passed to
destroy_walker so that it is removed from internal lists that
are used to always keep it in sync.
Args:
selector (DataStreamSelector): The selector describing the
streams that we want to iterate over.
skip_all (bool): Whether to start at the beginning of the data
or to skip everything and start at the end. Defaults
to skipping everything. This parameter only has any
effect on buffered stream selectors.
Returns:
StreamWalker: A properly updating stream walker with the given selector. | [
"Create",
"a",
"stream",
"walker",
"based",
"on",
"the",
"given",
"selector",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L207-L242 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.destroy_walker | def destroy_walker(self, walker):
"""Destroy a previously created stream walker.
Args:
walker (StreamWalker): The walker to remove from internal updating
lists.
"""
if walker.buffered:
self._queue_walkers.remove(walker)
else:
self._virtual_walkers.remove(walker) | python | def destroy_walker(self, walker):
"""Destroy a previously created stream walker.
Args:
walker (StreamWalker): The walker to remove from internal updating
lists.
"""
if walker.buffered:
self._queue_walkers.remove(walker)
else:
self._virtual_walkers.remove(walker) | [
"def",
"destroy_walker",
"(",
"self",
",",
"walker",
")",
":",
"if",
"walker",
".",
"buffered",
":",
"self",
".",
"_queue_walkers",
".",
"remove",
"(",
"walker",
")",
"else",
":",
"self",
".",
"_virtual_walkers",
".",
"remove",
"(",
"walker",
")"
] | Destroy a previously created stream walker.
Args:
walker (StreamWalker): The walker to remove from internal updating
lists. | [
"Destroy",
"a",
"previously",
"created",
"stream",
"walker",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L244-L255 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.restore_walker | def restore_walker(self, dumped_state):
"""Restore a stream walker that was previously serialized.
Since stream walkers need to be tracked in an internal list for
notification purposes, we need to be careful with how we restore
them to make sure they remain part of the right list.
Args:
dumped_state (dict): The dumped state of a stream walker
from a previous call to StreamWalker.dump()
Returns:
StreamWalker: The correctly restored StreamWalker subclass.
"""
selector_string = dumped_state.get(u'selector')
if selector_string is None:
raise ArgumentError("Invalid stream walker state in restore_walker, missing 'selector' key", state=dumped_state)
selector = DataStreamSelector.FromString(selector_string)
walker = self.create_walker(selector)
walker.restore(dumped_state)
return walker | python | def restore_walker(self, dumped_state):
"""Restore a stream walker that was previously serialized.
Since stream walkers need to be tracked in an internal list for
notification purposes, we need to be careful with how we restore
them to make sure they remain part of the right list.
Args:
dumped_state (dict): The dumped state of a stream walker
from a previous call to StreamWalker.dump()
Returns:
StreamWalker: The correctly restored StreamWalker subclass.
"""
selector_string = dumped_state.get(u'selector')
if selector_string is None:
raise ArgumentError("Invalid stream walker state in restore_walker, missing 'selector' key", state=dumped_state)
selector = DataStreamSelector.FromString(selector_string)
walker = self.create_walker(selector)
walker.restore(dumped_state)
return walker | [
"def",
"restore_walker",
"(",
"self",
",",
"dumped_state",
")",
":",
"selector_string",
"=",
"dumped_state",
".",
"get",
"(",
"u'selector'",
")",
"if",
"selector_string",
"is",
"None",
":",
"raise",
"ArgumentError",
"(",
"\"Invalid stream walker state in restore_walke... | Restore a stream walker that was previously serialized.
Since stream walkers need to be tracked in an internal list for
notification purposes, we need to be careful with how we restore
them to make sure they remain part of the right list.
Args:
dumped_state (dict): The dumped state of a stream walker
from a previous call to StreamWalker.dump()
Returns:
StreamWalker: The correctly restored StreamWalker subclass. | [
"Restore",
"a",
"stream",
"walker",
"that",
"was",
"previously",
"serialized",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L257-L280 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.clear | def clear(self):
"""Clear all data from this sensor_log.
All readings in all walkers are skipped and buffered data is
destroyed.
"""
for walker in self._virtual_walkers:
walker.skip_all()
self._engine.clear()
for walker in self._queue_walkers:
walker.skip_all()
self._last_values = {} | python | def clear(self):
"""Clear all data from this sensor_log.
All readings in all walkers are skipped and buffered data is
destroyed.
"""
for walker in self._virtual_walkers:
walker.skip_all()
self._engine.clear()
for walker in self._queue_walkers:
walker.skip_all()
self._last_values = {} | [
"def",
"clear",
"(",
"self",
")",
":",
"for",
"walker",
"in",
"self",
".",
"_virtual_walkers",
":",
"walker",
".",
"skip_all",
"(",
")",
"self",
".",
"_engine",
".",
"clear",
"(",
")",
"for",
"walker",
"in",
"self",
".",
"_queue_walkers",
":",
"walker"... | Clear all data from this sensor_log.
All readings in all walkers are skipped and buffered data is
destroyed. | [
"Clear",
"all",
"data",
"from",
"this",
"sensor_log",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L297-L312 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.push | def push(self, stream, reading):
"""Push a reading into a stream, updating any associated stream walkers.
Args:
stream (DataStream): the stream to push the reading into
reading (IOTileReading): the reading to push
"""
# Make sure the stream is correct
reading = copy.copy(reading)
reading.stream = stream.encode()
if stream.buffered:
output_buffer = stream.output
if self.id_assigner is not None:
reading.reading_id = self.id_assigner(stream, reading)
try:
self._engine.push(reading)
except StorageFullError:
# If we are in fill-stop mode, don't auto erase old data.
if (stream.output and not self._rollover_streaming) or (not stream.output and not self._rollover_storage):
raise
self._erase_buffer(stream.output)
self._engine.push(reading)
for walker in self._queue_walkers:
# Only notify the walkers that are on this queue
if walker.selector.output == output_buffer:
walker.notify_added(stream)
# Activate any monitors we have for this stream
for selector in self._monitors:
if selector is None or selector.matches(stream):
for callback in self._monitors[selector]:
callback(stream, reading)
# Virtual streams live only in their walkers, so update each walker
# that contains this stream.
for walker in self._virtual_walkers:
if walker.matches(stream):
walker.push(stream, reading)
self._last_values[stream] = reading | python | def push(self, stream, reading):
"""Push a reading into a stream, updating any associated stream walkers.
Args:
stream (DataStream): the stream to push the reading into
reading (IOTileReading): the reading to push
"""
# Make sure the stream is correct
reading = copy.copy(reading)
reading.stream = stream.encode()
if stream.buffered:
output_buffer = stream.output
if self.id_assigner is not None:
reading.reading_id = self.id_assigner(stream, reading)
try:
self._engine.push(reading)
except StorageFullError:
# If we are in fill-stop mode, don't auto erase old data.
if (stream.output and not self._rollover_streaming) or (not stream.output and not self._rollover_storage):
raise
self._erase_buffer(stream.output)
self._engine.push(reading)
for walker in self._queue_walkers:
# Only notify the walkers that are on this queue
if walker.selector.output == output_buffer:
walker.notify_added(stream)
# Activate any monitors we have for this stream
for selector in self._monitors:
if selector is None or selector.matches(stream):
for callback in self._monitors[selector]:
callback(stream, reading)
# Virtual streams live only in their walkers, so update each walker
# that contains this stream.
for walker in self._virtual_walkers:
if walker.matches(stream):
walker.push(stream, reading)
self._last_values[stream] = reading | [
"def",
"push",
"(",
"self",
",",
"stream",
",",
"reading",
")",
":",
"reading",
"=",
"copy",
".",
"copy",
"(",
"reading",
")",
"reading",
".",
"stream",
"=",
"stream",
".",
"encode",
"(",
")",
"if",
"stream",
".",
"buffered",
":",
"output_buffer",
"=... | Push a reading into a stream, updating any associated stream walkers.
Args:
stream (DataStream): the stream to push the reading into
reading (IOTileReading): the reading to push | [
"Push",
"a",
"reading",
"into",
"a",
"stream",
"updating",
"any",
"associated",
"stream",
"walkers",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L314-L359 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog._erase_buffer | def _erase_buffer(self, output_buffer):
"""Erase readings in the specified buffer to make space."""
erase_size = self._model.get(u'buffer_erase_size')
buffer_type = u'storage'
if output_buffer:
buffer_type = u'streaming'
old_readings = self._engine.popn(buffer_type, erase_size)
# Now go through all of our walkers that could match and
# update their availability counts and data buffer pointers
for reading in old_readings:
stream = DataStream.FromEncoded(reading.stream)
for walker in self._queue_walkers:
# Only notify the walkers that are on this queue
if walker.selector.output == output_buffer:
walker.notify_rollover(stream) | python | def _erase_buffer(self, output_buffer):
"""Erase readings in the specified buffer to make space."""
erase_size = self._model.get(u'buffer_erase_size')
buffer_type = u'storage'
if output_buffer:
buffer_type = u'streaming'
old_readings = self._engine.popn(buffer_type, erase_size)
# Now go through all of our walkers that could match and
# update their availability counts and data buffer pointers
for reading in old_readings:
stream = DataStream.FromEncoded(reading.stream)
for walker in self._queue_walkers:
# Only notify the walkers that are on this queue
if walker.selector.output == output_buffer:
walker.notify_rollover(stream) | [
"def",
"_erase_buffer",
"(",
"self",
",",
"output_buffer",
")",
":",
"erase_size",
"=",
"self",
".",
"_model",
".",
"get",
"(",
"u'buffer_erase_size'",
")",
"buffer_type",
"=",
"u'storage'",
"if",
"output_buffer",
":",
"buffer_type",
"=",
"u'streaming'",
"old_re... | Erase readings in the specified buffer to make space. | [
"Erase",
"readings",
"in",
"the",
"specified",
"buffer",
"to",
"make",
"space",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L361-L380 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/sensor_log.py | SensorLog.inspect_last | def inspect_last(self, stream, only_allocated=False):
"""Return the last value pushed into a stream.
This function works even if the stream is virtual and no
virtual walker has been created for it. It is primarily
useful to aid in debugging sensor graphs.
Args:
stream (DataStream): The stream to inspect.
only_allocated (bool): Optional parameter to only allow inspection
of allocated virtual streams. This is useful for mimicking the
behavior of an embedded device that does not have a _last_values
array.
Returns:
IOTileReading: The data in the stream
Raises:
StreamEmptyError: if there has never been data written to
the stream.
UnresolvedIdentifierError: if only_allocated is True and there has not
been a virtual stream walker allocated to listen to this stream.
"""
if only_allocated:
found = False
for walker in self._virtual_walkers:
if walker.matches(stream):
found = True
break
if not found:
raise UnresolvedIdentifierError("inspect_last could not find an allocated virtual streamer for the desired stream", stream=stream)
if stream in self._last_values:
return self._last_values[stream]
raise StreamEmptyError(u"inspect_last called on stream that has never been written to", stream=stream) | python | def inspect_last(self, stream, only_allocated=False):
"""Return the last value pushed into a stream.
This function works even if the stream is virtual and no
virtual walker has been created for it. It is primarily
useful to aid in debugging sensor graphs.
Args:
stream (DataStream): The stream to inspect.
only_allocated (bool): Optional parameter to only allow inspection
of allocated virtual streams. This is useful for mimicking the
behavior of an embedded device that does not have a _last_values
array.
Returns:
IOTileReading: The data in the stream
Raises:
StreamEmptyError: if there has never been data written to
the stream.
UnresolvedIdentifierError: if only_allocated is True and there has not
been a virtual stream walker allocated to listen to this stream.
"""
if only_allocated:
found = False
for walker in self._virtual_walkers:
if walker.matches(stream):
found = True
break
if not found:
raise UnresolvedIdentifierError("inspect_last could not find an allocated virtual streamer for the desired stream", stream=stream)
if stream in self._last_values:
return self._last_values[stream]
raise StreamEmptyError(u"inspect_last called on stream that has never been written to", stream=stream) | [
"def",
"inspect_last",
"(",
"self",
",",
"stream",
",",
"only_allocated",
"=",
"False",
")",
":",
"if",
"only_allocated",
":",
"found",
"=",
"False",
"for",
"walker",
"in",
"self",
".",
"_virtual_walkers",
":",
"if",
"walker",
".",
"matches",
"(",
"stream"... | Return the last value pushed into a stream.
This function works even if the stream is virtual and no
virtual walker has been created for it. It is primarily
useful to aid in debugging sensor graphs.
Args:
stream (DataStream): The stream to inspect.
only_allocated (bool): Optional parameter to only allow inspection
of allocated virtual streams. This is useful for mimicking the
behavior of an embedded device that does not have a _last_values
array.
Returns:
IOTileReading: The data in the stream
Raises:
StreamEmptyError: if there has never been data written to
the stream.
UnresolvedIdentifierError: if only_allocated is True and there has not
been a virtual stream walker allocated to listen to this stream. | [
"Return",
"the",
"last",
"value",
"pushed",
"into",
"a",
"stream",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/sensor_log.py#L382-L419 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/exitfuncs.py | _run_exitfuncs | def _run_exitfuncs():
"""run any registered exit functions
_exithandlers is traversed in reverse order so functions are executed
last in, first out.
"""
while _exithandlers:
func, targs, kargs = _exithandlers.pop()
func(*targs, **kargs) | python | def _run_exitfuncs():
"""run any registered exit functions
_exithandlers is traversed in reverse order so functions are executed
last in, first out.
"""
while _exithandlers:
func, targs, kargs = _exithandlers.pop()
func(*targs, **kargs) | [
"def",
"_run_exitfuncs",
"(",
")",
":",
"while",
"_exithandlers",
":",
"func",
",",
"targs",
",",
"kargs",
"=",
"_exithandlers",
".",
"pop",
"(",
")",
"func",
"(",
"*",
"targs",
",",
"**",
"kargs",
")"
] | run any registered exit functions
_exithandlers is traversed in reverse order so functions are executed
last in, first out. | [
"run",
"any",
"registered",
"exit",
"functions"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/exitfuncs.py#L36-L45 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py | _windowsLdmodTargets | def _windowsLdmodTargets(target, source, env, for_signature):
"""Get targets for loadable modules."""
return _dllTargets(target, source, env, for_signature, 'LDMODULE') | python | def _windowsLdmodTargets(target, source, env, for_signature):
"""Get targets for loadable modules."""
return _dllTargets(target, source, env, for_signature, 'LDMODULE') | [
"def",
"_windowsLdmodTargets",
"(",
"target",
",",
"source",
",",
"env",
",",
"for_signature",
")",
":",
"return",
"_dllTargets",
"(",
"target",
",",
"source",
",",
"env",
",",
"for_signature",
",",
"'LDMODULE'",
")"
] | Get targets for loadable modules. | [
"Get",
"targets",
"for",
"loadable",
"modules",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py#L87-L89 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py | _windowsLdmodSources | def _windowsLdmodSources(target, source, env, for_signature):
"""Get sources for loadable modules."""
return _dllSources(target, source, env, for_signature, 'LDMODULE') | python | def _windowsLdmodSources(target, source, env, for_signature):
"""Get sources for loadable modules."""
return _dllSources(target, source, env, for_signature, 'LDMODULE') | [
"def",
"_windowsLdmodSources",
"(",
"target",
",",
"source",
",",
"env",
",",
"for_signature",
")",
":",
"return",
"_dllSources",
"(",
"target",
",",
"source",
",",
"env",
",",
"for_signature",
",",
"'LDMODULE'",
")"
] | Get sources for loadable modules. | [
"Get",
"sources",
"for",
"loadable",
"modules",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py#L91-L93 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py | _dllEmitter | def _dllEmitter(target, source, env, paramtp):
"""Common implementation of dll emitter."""
SCons.Tool.msvc.validate_vars(env)
extratargets = []
extrasources = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError('A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp))
insert_def = env.subst("$WINDOWS_INSERT_DEF")
if not insert_def in ['', '0', 0] and \
not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
# append a def file to the list of sources
extrasources.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and \
(env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)):
# MSVC 8 and above automatically generate .manifest files that must be installed
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
if version_num >= 11.0 and env.get('PCH', 0):
# MSVC 11 and above need the PCH object file to be added to the link line,
# otherwise you get link error LNK2011.
pchobj = SCons.Util.splitext(str(env['PCH']))[0] + '.obj'
# print "prog_emitter, version %s, appending pchobj %s"%(version_num, pchobj)
if pchobj not in extrasources:
extrasources.append(pchobj)
if not no_import_lib and \
not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
# Append an import library to the list of targets.
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"LIBPREFIX", "LIBSUFFIX"))
# and .exp file is created if there are exports from a DLL
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
return (target+extratargets, source+extrasources) | python | def _dllEmitter(target, source, env, paramtp):
"""Common implementation of dll emitter."""
SCons.Tool.msvc.validate_vars(env)
extratargets = []
extrasources = []
dll = env.FindIxes(target, '%sPREFIX' % paramtp, '%sSUFFIX' % paramtp)
no_import_lib = env.get('no_import_lib', 0)
if not dll:
raise SCons.Errors.UserError('A shared library should have exactly one target with the suffix: %s' % env.subst('$%sSUFFIX' % paramtp))
insert_def = env.subst("$WINDOWS_INSERT_DEF")
if not insert_def in ['', '0', 0] and \
not env.FindIxes(source, "WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"):
# append a def file to the list of sources
extrasources.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSDEFPREFIX", "WINDOWSDEFSUFFIX"))
version_num, suite = SCons.Tool.msvs.msvs_parse_version(env.get('MSVS_VERSION', '6.0'))
if version_num >= 8.0 and \
(env.get('WINDOWS_INSERT_MANIFEST', 0) or env.get('WINDOWS_EMBED_MANIFEST', 0)):
# MSVC 8 and above automatically generate .manifest files that must be installed
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSSHLIBMANIFESTPREFIX", "WINDOWSSHLIBMANIFESTSUFFIX"))
if 'PDB' in env and env['PDB']:
pdb = env.arg2nodes('$PDB', target=target, source=source)[0]
extratargets.append(pdb)
target[0].attributes.pdb = pdb
if version_num >= 11.0 and env.get('PCH', 0):
# MSVC 11 and above need the PCH object file to be added to the link line,
# otherwise you get link error LNK2011.
pchobj = SCons.Util.splitext(str(env['PCH']))[0] + '.obj'
# print "prog_emitter, version %s, appending pchobj %s"%(version_num, pchobj)
if pchobj not in extrasources:
extrasources.append(pchobj)
if not no_import_lib and \
not env.FindIxes(target, "LIBPREFIX", "LIBSUFFIX"):
# Append an import library to the list of targets.
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"LIBPREFIX", "LIBSUFFIX"))
# and .exp file is created if there are exports from a DLL
extratargets.append(
env.ReplaceIxes(dll,
'%sPREFIX' % paramtp, '%sSUFFIX' % paramtp,
"WINDOWSEXPPREFIX", "WINDOWSEXPSUFFIX"))
return (target+extratargets, source+extrasources) | [
"def",
"_dllEmitter",
"(",
"target",
",",
"source",
",",
"env",
",",
"paramtp",
")",
":",
"SCons",
".",
"Tool",
".",
"msvc",
".",
"validate_vars",
"(",
"env",
")",
"extratargets",
"=",
"[",
"]",
"extrasources",
"=",
"[",
"]",
"dll",
"=",
"env",
".",
... | Common implementation of dll emitter. | [
"Common",
"implementation",
"of",
"dll",
"emitter",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py#L95-L153 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py | embedManifestDllCheck | def embedManifestDllCheck(target, source, env):
"""Function run by embedManifestDllCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestDllAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].get_abspath() + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestDllAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0]))
return ret
else:
print('(embed: no %s.manifest found; not embedding.)'%str(target[0]))
return 0 | python | def embedManifestDllCheck(target, source, env):
"""Function run by embedManifestDllCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestDllAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].get_abspath() + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestDllAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0]))
return ret
else:
print('(embed: no %s.manifest found; not embedding.)'%str(target[0]))
return 0 | [
"def",
"embedManifestDllCheck",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"if",
"env",
".",
"get",
"(",
"'WINDOWS_EMBED_MANIFEST'",
",",
"0",
")",
":",
"manifestSrc",
"=",
"target",
"[",
"0",
"]",
".",
"get_abspath",
"(",
")",
"+",
"'.manifest'... | Function run by embedManifestDllCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestDllAction if so. | [
"Function",
"run",
"by",
"embedManifestDllCheckAction",
"to",
"check",
"for",
"existence",
"of",
"manifest",
"and",
"other",
"conditions",
"and",
"embed",
"the",
"manifest",
"by",
"calling",
"embedManifestDllAction",
"if",
"so",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py#L215-L227 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py | embedManifestExeCheck | def embedManifestExeCheck(target, source, env):
"""Function run by embedManifestExeCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestExeAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].get_abspath() + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestExeAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0]))
return ret
else:
print('(embed: no %s.manifest found; not embedding.)'%str(target[0]))
return 0 | python | def embedManifestExeCheck(target, source, env):
"""Function run by embedManifestExeCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestExeAction if so."""
if env.get('WINDOWS_EMBED_MANIFEST', 0):
manifestSrc = target[0].get_abspath() + '.manifest'
if os.path.exists(manifestSrc):
ret = (embedManifestExeAction) ([target[0]],None,env)
if ret:
raise SCons.Errors.UserError("Unable to embed manifest into %s" % (target[0]))
return ret
else:
print('(embed: no %s.manifest found; not embedding.)'%str(target[0]))
return 0 | [
"def",
"embedManifestExeCheck",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"if",
"env",
".",
"get",
"(",
"'WINDOWS_EMBED_MANIFEST'",
",",
"0",
")",
":",
"manifestSrc",
"=",
"target",
"[",
"0",
"]",
".",
"get_abspath",
"(",
")",
"+",
"'.manifest'... | Function run by embedManifestExeCheckAction to check for existence of manifest
and other conditions, and embed the manifest by calling embedManifestExeAction if so. | [
"Function",
"run",
"by",
"embedManifestExeCheckAction",
"to",
"check",
"for",
"existence",
"of",
"manifest",
"and",
"other",
"conditions",
"and",
"embed",
"the",
"manifest",
"by",
"calling",
"embedManifestExeAction",
"if",
"so",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/mslink.py#L229-L241 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/dvips.py | generate | def generate(env):
"""Add Builders and construction variables for dvips to an Environment."""
global PSAction
if PSAction is None:
PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR')
global DVIPSAction
if DVIPSAction is None:
DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction)
global PSBuilder
if PSBuilder is None:
PSBuilder = SCons.Builder.Builder(action = PSAction,
prefix = '$PSPREFIX',
suffix = '$PSSUFFIX',
src_suffix = '.dvi',
src_builder = 'DVI',
single_source=True)
env['BUILDERS']['PostScript'] = PSBuilder
env['DVIPS'] = 'dvips'
env['DVIPSFLAGS'] = SCons.Util.CLVar('')
# I'm not quite sure I got the directories and filenames right for variant_dir
# We need to be in the correct directory for the sake of latex \includegraphics eps included files.
env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}'
env['PSPREFIX'] = ''
env['PSSUFFIX'] = '.ps' | python | def generate(env):
"""Add Builders and construction variables for dvips to an Environment."""
global PSAction
if PSAction is None:
PSAction = SCons.Action.Action('$PSCOM', '$PSCOMSTR')
global DVIPSAction
if DVIPSAction is None:
DVIPSAction = SCons.Action.Action(DviPsFunction, strfunction = DviPsStrFunction)
global PSBuilder
if PSBuilder is None:
PSBuilder = SCons.Builder.Builder(action = PSAction,
prefix = '$PSPREFIX',
suffix = '$PSSUFFIX',
src_suffix = '.dvi',
src_builder = 'DVI',
single_source=True)
env['BUILDERS']['PostScript'] = PSBuilder
env['DVIPS'] = 'dvips'
env['DVIPSFLAGS'] = SCons.Util.CLVar('')
# I'm not quite sure I got the directories and filenames right for variant_dir
# We need to be in the correct directory for the sake of latex \includegraphics eps included files.
env['PSCOM'] = 'cd ${TARGET.dir} && $DVIPS $DVIPSFLAGS -o ${TARGET.file} ${SOURCE.file}'
env['PSPREFIX'] = ''
env['PSSUFFIX'] = '.ps' | [
"def",
"generate",
"(",
"env",
")",
":",
"global",
"PSAction",
"if",
"PSAction",
"is",
"None",
":",
"PSAction",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"'$PSCOM'",
",",
"'$PSCOMSTR'",
")",
"global",
"DVIPSAction",
"if",
"DVIPSAction",
"is",
"None"... | Add Builders and construction variables for dvips to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"dvips",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/dvips.py#L58-L85 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | build_library | def build_library(tile, libname, chip):
"""Build a static ARM cortex library"""
dirs = chip.build_dirs()
output_name = '%s_%s.a' % (libname, chip.arch_name())
# Support both firmware/src and just src locations for source code
if os.path.exists('firmware'):
VariantDir(dirs['build'], os.path.join('firmware', 'src'), duplicate=0)
else:
VariantDir(dirs['build'], 'src', duplicate=0)
library_env = setup_environment(chip)
library_env['OUTPUT'] = output_name
library_env['OUTPUT_PATH'] = os.path.join(dirs['build'], output_name)
library_env['BUILD_DIR'] = dirs['build']
# Check for any dependencies this library has
tilebus_defs = setup_dependencies(tile, library_env)
# Create header files for all tilebus config variables and commands that are defined in ourselves
# or in our dependencies
tilebus_defs += tile.find_products('tilebus_definitions')
compile_tilebus(tilebus_defs, library_env, header_only=True)
SConscript(os.path.join(dirs['build'], 'SConscript'), exports='library_env')
library_env.InstallAs(os.path.join(dirs['output'], output_name), os.path.join(dirs['build'], output_name))
# See if we should copy any files over to the output:
for src, dst in chip.property('copy_files', []):
srcpath = os.path.join(*src)
destpath = os.path.join(dirs['output'], dst)
library_env.InstallAs(destpath, srcpath)
return os.path.join(dirs['output'], output_name) | python | def build_library(tile, libname, chip):
"""Build a static ARM cortex library"""
dirs = chip.build_dirs()
output_name = '%s_%s.a' % (libname, chip.arch_name())
# Support both firmware/src and just src locations for source code
if os.path.exists('firmware'):
VariantDir(dirs['build'], os.path.join('firmware', 'src'), duplicate=0)
else:
VariantDir(dirs['build'], 'src', duplicate=0)
library_env = setup_environment(chip)
library_env['OUTPUT'] = output_name
library_env['OUTPUT_PATH'] = os.path.join(dirs['build'], output_name)
library_env['BUILD_DIR'] = dirs['build']
# Check for any dependencies this library has
tilebus_defs = setup_dependencies(tile, library_env)
# Create header files for all tilebus config variables and commands that are defined in ourselves
# or in our dependencies
tilebus_defs += tile.find_products('tilebus_definitions')
compile_tilebus(tilebus_defs, library_env, header_only=True)
SConscript(os.path.join(dirs['build'], 'SConscript'), exports='library_env')
library_env.InstallAs(os.path.join(dirs['output'], output_name), os.path.join(dirs['build'], output_name))
# See if we should copy any files over to the output:
for src, dst in chip.property('copy_files', []):
srcpath = os.path.join(*src)
destpath = os.path.join(dirs['output'], dst)
library_env.InstallAs(destpath, srcpath)
return os.path.join(dirs['output'], output_name) | [
"def",
"build_library",
"(",
"tile",
",",
"libname",
",",
"chip",
")",
":",
"dirs",
"=",
"chip",
".",
"build_dirs",
"(",
")",
"output_name",
"=",
"'%s_%s.a'",
"%",
"(",
"libname",
",",
"chip",
".",
"arch_name",
"(",
")",
")",
"if",
"os",
".",
"path",... | Build a static ARM cortex library | [
"Build",
"a",
"static",
"ARM",
"cortex",
"library"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L129-L165 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | setup_environment | def setup_environment(chip, args_file=None):
"""Setup the SCons environment for compiling arm cortex code.
This will return an env that has all of the correct settings and create a
command line arguments file for GCC that contains all of the required
flags. The use of a command line argument file passed with @./file_path is
important since there can be many flags that exceed the maximum allowed length
of a command line on Windows.
"""
config = ConfigManager()
# Make sure we never get MSVC settings for windows since that has the wrong command line flags for gcc
if platform.system() == 'Windows':
env = Environment(tools=['mingw'], ENV=os.environ)
else:
env = Environment(tools=['default'], ENV=os.environ)
env['INCPREFIX'] = '-I"'
env['INCSUFFIX'] = '"'
env['CPPDEFPREFIX'] = ''
env['CPPDEFSUFFIX'] = ''
env['CPPPATH'] = chip.includes()
env['ARCH'] = chip
# Setup Cross Compiler
env['CC'] = 'arm-none-eabi-gcc'
env['AS'] = 'arm-none-eabi-gcc'
env['LINK'] = 'arm-none-eabi-gcc'
env['AR'] = 'arm-none-eabi-ar'
env['RANLIB'] = 'arm-none-eabi-ranlib'
# AS command line is by default setup for call as directly so we need
# to modify it to call via *-gcc to allow for preprocessing
env['ASCOM'] = "$AS $ASFLAGS -o $TARGET -c $SOURCES"
# Setup nice display strings unless we're asked to show raw commands
if not config.get('build:show-commands'):
env['CCCOMSTR'] = "Compiling $TARGET"
env['ARCOMSTR'] = "Building static library $TARGET"
env['RANLIBCOMSTR'] = "Indexing static library $TARGET"
env['LINKCOMSTR'] = "Linking $TARGET"
# Setup Compiler Flags
env['CCFLAGS'] = chip.combined_properties('cflags')
env['LINKFLAGS'] = chip.combined_properties('ldflags')
env['ARFLAGS'].append(chip.combined_properties('arflags')) # There are default ARFLAGS that are necessary to keep
env['ASFLAGS'].append(chip.combined_properties('asflags'))
# Add in compile tile definitions
defines = utilities.build_defines(chip.property('defines', {}))
env['CPPDEFINES'] = defines
if args_file is not None:
env['CCCOM'] = "$CC $CCFLAGS $CPPFLAGS @{} -c -o $TARGET $SOURCES".format(args_file)
# Setup Target Architecture
env['CCFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
env['ASFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
env['LINKFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
# Initialize library paths (all libraries are added via dependencies)
env['LIBPATH'] = []
env['LIBS'] = []
return env | python | def setup_environment(chip, args_file=None):
"""Setup the SCons environment for compiling arm cortex code.
This will return an env that has all of the correct settings and create a
command line arguments file for GCC that contains all of the required
flags. The use of a command line argument file passed with @./file_path is
important since there can be many flags that exceed the maximum allowed length
of a command line on Windows.
"""
config = ConfigManager()
# Make sure we never get MSVC settings for windows since that has the wrong command line flags for gcc
if platform.system() == 'Windows':
env = Environment(tools=['mingw'], ENV=os.environ)
else:
env = Environment(tools=['default'], ENV=os.environ)
env['INCPREFIX'] = '-I"'
env['INCSUFFIX'] = '"'
env['CPPDEFPREFIX'] = ''
env['CPPDEFSUFFIX'] = ''
env['CPPPATH'] = chip.includes()
env['ARCH'] = chip
# Setup Cross Compiler
env['CC'] = 'arm-none-eabi-gcc'
env['AS'] = 'arm-none-eabi-gcc'
env['LINK'] = 'arm-none-eabi-gcc'
env['AR'] = 'arm-none-eabi-ar'
env['RANLIB'] = 'arm-none-eabi-ranlib'
# AS command line is by default setup for call as directly so we need
# to modify it to call via *-gcc to allow for preprocessing
env['ASCOM'] = "$AS $ASFLAGS -o $TARGET -c $SOURCES"
# Setup nice display strings unless we're asked to show raw commands
if not config.get('build:show-commands'):
env['CCCOMSTR'] = "Compiling $TARGET"
env['ARCOMSTR'] = "Building static library $TARGET"
env['RANLIBCOMSTR'] = "Indexing static library $TARGET"
env['LINKCOMSTR'] = "Linking $TARGET"
# Setup Compiler Flags
env['CCFLAGS'] = chip.combined_properties('cflags')
env['LINKFLAGS'] = chip.combined_properties('ldflags')
env['ARFLAGS'].append(chip.combined_properties('arflags')) # There are default ARFLAGS that are necessary to keep
env['ASFLAGS'].append(chip.combined_properties('asflags'))
# Add in compile tile definitions
defines = utilities.build_defines(chip.property('defines', {}))
env['CPPDEFINES'] = defines
if args_file is not None:
env['CCCOM'] = "$CC $CCFLAGS $CPPFLAGS @{} -c -o $TARGET $SOURCES".format(args_file)
# Setup Target Architecture
env['CCFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
env['ASFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
env['LINKFLAGS'].append('-mcpu=%s' % chip.property('cpu'))
# Initialize library paths (all libraries are added via dependencies)
env['LIBPATH'] = []
env['LIBS'] = []
return env | [
"def",
"setup_environment",
"(",
"chip",
",",
"args_file",
"=",
"None",
")",
":",
"config",
"=",
"ConfigManager",
"(",
")",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Windows'",
":",
"env",
"=",
"Environment",
"(",
"tools",
"=",
"[",
"'mingw'",
... | Setup the SCons environment for compiling arm cortex code.
This will return an env that has all of the correct settings and create a
command line arguments file for GCC that contains all of the required
flags. The use of a command line argument file passed with @./file_path is
important since there can be many flags that exceed the maximum allowed length
of a command line on Windows. | [
"Setup",
"the",
"SCons",
"environment",
"for",
"compiling",
"arm",
"cortex",
"code",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L168-L234 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | tb_h_file_creation | def tb_h_file_creation(target, source, env):
"""Compile tilebus file into only .h files corresponding to config variables for inclusion in a library"""
files = [str(x) for x in source]
try:
desc = TBDescriptor(files)
except pyparsing.ParseException as e:
raise BuildError("Could not parse tilebus file", parsing_exception=e)
block = desc.get_block(config_only=True)
block.render_template(block.CommandHeaderTemplate, out_path=str(target[0]))
block.render_template(block.ConfigHeaderTemplate, out_path=str(target[1])) | python | def tb_h_file_creation(target, source, env):
"""Compile tilebus file into only .h files corresponding to config variables for inclusion in a library"""
files = [str(x) for x in source]
try:
desc = TBDescriptor(files)
except pyparsing.ParseException as e:
raise BuildError("Could not parse tilebus file", parsing_exception=e)
block = desc.get_block(config_only=True)
block.render_template(block.CommandHeaderTemplate, out_path=str(target[0]))
block.render_template(block.ConfigHeaderTemplate, out_path=str(target[1])) | [
"def",
"tb_h_file_creation",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"files",
"=",
"[",
"str",
"(",
"x",
")",
"for",
"x",
"in",
"source",
"]",
"try",
":",
"desc",
"=",
"TBDescriptor",
"(",
"files",
")",
"except",
"pyparsing",
".",
"ParseE... | Compile tilebus file into only .h files corresponding to config variables for inclusion in a library | [
"Compile",
"tilebus",
"file",
"into",
"only",
".",
"h",
"files",
"corresponding",
"to",
"config",
"variables",
"for",
"inclusion",
"in",
"a",
"library"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L316-L328 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | checksum_creation_action | def checksum_creation_action(target, source, env):
"""Create a linker command file for patching an application checksum into a firmware image"""
# Important Notes:
# There are apparently many ways to calculate a CRC-32 checksum, we use the following options
# Initial seed value prepended to the input: 0xFFFFFFFF
# Whether the input is fed into the shift register least-significant bit or most-significant bit first: LSB
# Whether each data word is inverted: No
# Whether the final CRC value is inverted: No
# *These settings must agree between the executive and this function*
import crcmod
crc32_func = crcmod.mkCrcFun(0x104C11DB7, initCrc=0xFFFFFFFF, rev=False, xorOut=0)
with open(str(source[0]), 'rb') as f:
data = f.read()
# Ignore the last four bytes of the file since that is where the checksum will go
data = data[:-4]
# Make sure the magic number is correct so that we're dealing with an actual firmware image
magicbin = data[-4:]
magic, = struct.unpack('<L', magicbin)
if magic != 0xBAADDAAD:
raise BuildError("Attempting to patch a file that is not a CDB binary or has the wrong size", reason="invalid magic number found", actual_magic=magic, desired_magic=0xBAADDAAD)
# Calculate CRC32 in the same way as its done in the target microcontroller
checksum = crc32_func(data) & 0xFFFFFFFF
with open(str(target[0]), 'w') as f:
# hex strings end with L on windows and possibly some other systems
checkhex = hex(checksum)
if checkhex[-1] == 'L':
checkhex = checkhex[:-1]
f.write("--defsym=__image_checksum=%s\n" % checkhex) | python | def checksum_creation_action(target, source, env):
"""Create a linker command file for patching an application checksum into a firmware image"""
# Important Notes:
# There are apparently many ways to calculate a CRC-32 checksum, we use the following options
# Initial seed value prepended to the input: 0xFFFFFFFF
# Whether the input is fed into the shift register least-significant bit or most-significant bit first: LSB
# Whether each data word is inverted: No
# Whether the final CRC value is inverted: No
# *These settings must agree between the executive and this function*
import crcmod
crc32_func = crcmod.mkCrcFun(0x104C11DB7, initCrc=0xFFFFFFFF, rev=False, xorOut=0)
with open(str(source[0]), 'rb') as f:
data = f.read()
# Ignore the last four bytes of the file since that is where the checksum will go
data = data[:-4]
# Make sure the magic number is correct so that we're dealing with an actual firmware image
magicbin = data[-4:]
magic, = struct.unpack('<L', magicbin)
if magic != 0xBAADDAAD:
raise BuildError("Attempting to patch a file that is not a CDB binary or has the wrong size", reason="invalid magic number found", actual_magic=magic, desired_magic=0xBAADDAAD)
# Calculate CRC32 in the same way as its done in the target microcontroller
checksum = crc32_func(data) & 0xFFFFFFFF
with open(str(target[0]), 'w') as f:
# hex strings end with L on windows and possibly some other systems
checkhex = hex(checksum)
if checkhex[-1] == 'L':
checkhex = checkhex[:-1]
f.write("--defsym=__image_checksum=%s\n" % checkhex) | [
"def",
"checksum_creation_action",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"import",
"crcmod",
"crc32_func",
"=",
"crcmod",
".",
"mkCrcFun",
"(",
"0x104C11DB7",
",",
"initCrc",
"=",
"0xFFFFFFFF",
",",
"rev",
"=",
"False",
",",
"xorOut",
"=",
"0... | Create a linker command file for patching an application checksum into a firmware image | [
"Create",
"a",
"linker",
"command",
"file",
"for",
"patching",
"an",
"application",
"checksum",
"into",
"a",
"firmware",
"image"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L331-L367 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | create_arg_file | def create_arg_file(target, source, env):
"""Create an argument file containing -I and -D arguments to gcc.
This file will be passed to gcc using @<path>.
"""
output_name = str(target[0])
with open(output_name, "w") as outfile:
for define in env.get('CPPDEFINES', []):
outfile.write(define + '\n')
include_folders = target[0].RDirs(tuple(env.get('CPPPATH', [])))
include_folders.append('.')
for include_folder in include_folders:
include_folder = str(include_folder)
if not include_folder.startswith('build'):
include_folder = os.path.join('firmware', 'src', include_folder)
outfile.write('"-I{}"\n'.format(include_folder.replace('\\', '\\\\'))) | python | def create_arg_file(target, source, env):
"""Create an argument file containing -I and -D arguments to gcc.
This file will be passed to gcc using @<path>.
"""
output_name = str(target[0])
with open(output_name, "w") as outfile:
for define in env.get('CPPDEFINES', []):
outfile.write(define + '\n')
include_folders = target[0].RDirs(tuple(env.get('CPPPATH', [])))
include_folders.append('.')
for include_folder in include_folders:
include_folder = str(include_folder)
if not include_folder.startswith('build'):
include_folder = os.path.join('firmware', 'src', include_folder)
outfile.write('"-I{}"\n'.format(include_folder.replace('\\', '\\\\'))) | [
"def",
"create_arg_file",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"output_name",
"=",
"str",
"(",
"target",
"[",
"0",
"]",
")",
"with",
"open",
"(",
"output_name",
",",
"\"w\"",
")",
"as",
"outfile",
":",
"for",
"define",
"in",
"env",
"."... | Create an argument file containing -I and -D arguments to gcc.
This file will be passed to gcc using @<path>. | [
"Create",
"an",
"argument",
"file",
"containing",
"-",
"I",
"and",
"-",
"D",
"arguments",
"to",
"gcc",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L370-L391 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | merge_hex_executables | def merge_hex_executables(target, source, env):
"""Combine all hex files into a singular executable file."""
output_name = str(target[0])
hex_final = IntelHex()
for image in source:
file = str(image)
root, ext = os.path.splitext(file)
file_format = ext[1:]
if file_format == 'elf':
file = root + '.hex'
hex_data = IntelHex(file)
# merge will throw errors on mismatched Start Segment Addresses, which we don't need
# See <https://stackoverflow.com/questions/26295776/what-are-the-intel-hex-records-type-03-or-05-doing-in-ihex-program-for-arm>
hex_data.start_addr = None
hex_final.merge(hex_data, overlap='error')
with open(output_name, 'w') as outfile:
hex_final.write_hex_file(outfile) | python | def merge_hex_executables(target, source, env):
"""Combine all hex files into a singular executable file."""
output_name = str(target[0])
hex_final = IntelHex()
for image in source:
file = str(image)
root, ext = os.path.splitext(file)
file_format = ext[1:]
if file_format == 'elf':
file = root + '.hex'
hex_data = IntelHex(file)
# merge will throw errors on mismatched Start Segment Addresses, which we don't need
# See <https://stackoverflow.com/questions/26295776/what-are-the-intel-hex-records-type-03-or-05-doing-in-ihex-program-for-arm>
hex_data.start_addr = None
hex_final.merge(hex_data, overlap='error')
with open(output_name, 'w') as outfile:
hex_final.write_hex_file(outfile) | [
"def",
"merge_hex_executables",
"(",
"target",
",",
"source",
",",
"env",
")",
":",
"output_name",
"=",
"str",
"(",
"target",
"[",
"0",
"]",
")",
"hex_final",
"=",
"IntelHex",
"(",
")",
"for",
"image",
"in",
"source",
":",
"file",
"=",
"str",
"(",
"i... | Combine all hex files into a singular executable file. | [
"Combine",
"all",
"hex",
"files",
"into",
"a",
"singular",
"executable",
"file",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L394-L413 | train |
iotile/coretools | iotilebuild/iotile/build/config/site_scons/arm.py | ensure_image_is_hex | def ensure_image_is_hex(input_path):
"""Return a path to a hex version of a firmware image.
If the input file is already in hex format then input_path
is returned and nothing is done. If it is not in hex format
then an SCons action is added to convert it to hex and the
target output file path is returned.
A cache is kept so that each file is only converted once.
Args:
input_path (str): A path to a firmware image.
Returns:
str: The path to a hex version of input_path, this may
be equal to input_path if it is already in hex format.
"""
family = utilities.get_family('module_settings.json')
target = family.platform_independent_target()
build_dir = target.build_dirs()['build']
if platform.system() == 'Windows':
env = Environment(tools=['mingw'], ENV=os.environ)
else:
env = Environment(tools=['default'], ENV=os.environ)
input_path = str(input_path)
image_name = os.path.basename(input_path)
root, ext = os.path.splitext(image_name)
if len(ext) == 0:
raise BuildError("Unknown file format or missing file extension in ensure_image_is_hex", file_name=input_path)
file_format = ext[1:]
if file_format == 'hex':
return input_path
if file_format == 'elf':
new_file = os.path.join(build_dir, root + '.hex')
if new_file not in CONVERTED_HEX_FILES:
env.Command(new_file, input_path, action=Action("arm-none-eabi-objcopy -O ihex $SOURCE $TARGET",
"Creating intel hex file from: $SOURCE"))
CONVERTED_HEX_FILES.add(new_file)
return new_file
raise BuildError("Unknown file format extension in ensure_image_is_hex",
file_name=input_path, extension=file_format) | python | def ensure_image_is_hex(input_path):
"""Return a path to a hex version of a firmware image.
If the input file is already in hex format then input_path
is returned and nothing is done. If it is not in hex format
then an SCons action is added to convert it to hex and the
target output file path is returned.
A cache is kept so that each file is only converted once.
Args:
input_path (str): A path to a firmware image.
Returns:
str: The path to a hex version of input_path, this may
be equal to input_path if it is already in hex format.
"""
family = utilities.get_family('module_settings.json')
target = family.platform_independent_target()
build_dir = target.build_dirs()['build']
if platform.system() == 'Windows':
env = Environment(tools=['mingw'], ENV=os.environ)
else:
env = Environment(tools=['default'], ENV=os.environ)
input_path = str(input_path)
image_name = os.path.basename(input_path)
root, ext = os.path.splitext(image_name)
if len(ext) == 0:
raise BuildError("Unknown file format or missing file extension in ensure_image_is_hex", file_name=input_path)
file_format = ext[1:]
if file_format == 'hex':
return input_path
if file_format == 'elf':
new_file = os.path.join(build_dir, root + '.hex')
if new_file not in CONVERTED_HEX_FILES:
env.Command(new_file, input_path, action=Action("arm-none-eabi-objcopy -O ihex $SOURCE $TARGET",
"Creating intel hex file from: $SOURCE"))
CONVERTED_HEX_FILES.add(new_file)
return new_file
raise BuildError("Unknown file format extension in ensure_image_is_hex",
file_name=input_path, extension=file_format) | [
"def",
"ensure_image_is_hex",
"(",
"input_path",
")",
":",
"family",
"=",
"utilities",
".",
"get_family",
"(",
"'module_settings.json'",
")",
"target",
"=",
"family",
".",
"platform_independent_target",
"(",
")",
"build_dir",
"=",
"target",
".",
"build_dirs",
"(",... | Return a path to a hex version of a firmware image.
If the input file is already in hex format then input_path
is returned and nothing is done. If it is not in hex format
then an SCons action is added to convert it to hex and the
target output file path is returned.
A cache is kept so that each file is only converted once.
Args:
input_path (str): A path to a firmware image.
Returns:
str: The path to a hex version of input_path, this may
be equal to input_path if it is already in hex format. | [
"Return",
"a",
"path",
"to",
"a",
"hex",
"version",
"of",
"a",
"firmware",
"image",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/site_scons/arm.py#L419-L469 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice._dispatch_rpc | def _dispatch_rpc(self, address, rpc_id, arg_payload):
"""Background work queue handler to dispatch RPCs."""
if self.emulator.is_tile_busy(address):
self._track_change('device.rpc_busy_response', (address, rpc_id, arg_payload, None, None), formatter=format_rpc)
raise BusyRPCResponse()
try:
# Send the RPC immediately and wait for the response
resp = super(EmulatedDevice, self).call_rpc(address, rpc_id, arg_payload)
self._track_change('device.rpc_sent', (address, rpc_id, arg_payload, resp, None), formatter=format_rpc)
return resp
except AsynchronousRPCResponse:
self._track_change('device.rpc_started', (address, rpc_id, arg_payload, None, None), formatter=format_rpc)
raise
except Exception as exc:
self._track_change('device.rpc_exception', (address, rpc_id, arg_payload, None, exc), formatter=format_rpc)
raise | python | def _dispatch_rpc(self, address, rpc_id, arg_payload):
"""Background work queue handler to dispatch RPCs."""
if self.emulator.is_tile_busy(address):
self._track_change('device.rpc_busy_response', (address, rpc_id, arg_payload, None, None), formatter=format_rpc)
raise BusyRPCResponse()
try:
# Send the RPC immediately and wait for the response
resp = super(EmulatedDevice, self).call_rpc(address, rpc_id, arg_payload)
self._track_change('device.rpc_sent', (address, rpc_id, arg_payload, resp, None), formatter=format_rpc)
return resp
except AsynchronousRPCResponse:
self._track_change('device.rpc_started', (address, rpc_id, arg_payload, None, None), formatter=format_rpc)
raise
except Exception as exc:
self._track_change('device.rpc_exception', (address, rpc_id, arg_payload, None, exc), formatter=format_rpc)
raise | [
"def",
"_dispatch_rpc",
"(",
"self",
",",
"address",
",",
"rpc_id",
",",
"arg_payload",
")",
":",
"if",
"self",
".",
"emulator",
".",
"is_tile_busy",
"(",
"address",
")",
":",
"self",
".",
"_track_change",
"(",
"'device.rpc_busy_response'",
",",
"(",
"addres... | Background work queue handler to dispatch RPCs. | [
"Background",
"work",
"queue",
"handler",
"to",
"dispatch",
"RPCs",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L46-L64 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice.rpc | def rpc(self, address, rpc_id, *args, **kwargs):
"""Immediately dispatch an RPC inside this EmulatedDevice.
This function is meant to be used for testing purposes as well as by
tiles inside a complex EmulatedDevice subclass that need to
communicate with each other. It should only be called from the main
virtual device thread where start() was called from.
**Background workers may not call this method since it may cause them to deadlock.**
Args:
address (int): The address of the tile that has the RPC.
rpc_id (int): The 16-bit id of the rpc we want to call
*args: Any required arguments for the RPC as python objects.
**kwargs: Only two keyword arguments are supported:
- arg_format: A format specifier for the argument list
- result_format: A format specifier for the result
Returns:
list: A list of the decoded response members from the RPC.
"""
if isinstance(rpc_id, RPCDeclaration):
arg_format = rpc_id.arg_format
resp_format = rpc_id.resp_format
rpc_id = rpc_id.rpc_id
else:
arg_format = kwargs.get('arg_format', None)
resp_format = kwargs.get('resp_format', None)
arg_payload = b''
if arg_format is not None:
arg_payload = pack_rpc_payload(arg_format, args)
self._logger.debug("Sending rpc to %d:%04X, payload=%s", address, rpc_id, args)
resp_payload = self.call_rpc(address, rpc_id, arg_payload)
if resp_format is None:
return []
resp = unpack_rpc_payload(resp_format, resp_payload)
return resp | python | def rpc(self, address, rpc_id, *args, **kwargs):
"""Immediately dispatch an RPC inside this EmulatedDevice.
This function is meant to be used for testing purposes as well as by
tiles inside a complex EmulatedDevice subclass that need to
communicate with each other. It should only be called from the main
virtual device thread where start() was called from.
**Background workers may not call this method since it may cause them to deadlock.**
Args:
address (int): The address of the tile that has the RPC.
rpc_id (int): The 16-bit id of the rpc we want to call
*args: Any required arguments for the RPC as python objects.
**kwargs: Only two keyword arguments are supported:
- arg_format: A format specifier for the argument list
- result_format: A format specifier for the result
Returns:
list: A list of the decoded response members from the RPC.
"""
if isinstance(rpc_id, RPCDeclaration):
arg_format = rpc_id.arg_format
resp_format = rpc_id.resp_format
rpc_id = rpc_id.rpc_id
else:
arg_format = kwargs.get('arg_format', None)
resp_format = kwargs.get('resp_format', None)
arg_payload = b''
if arg_format is not None:
arg_payload = pack_rpc_payload(arg_format, args)
self._logger.debug("Sending rpc to %d:%04X, payload=%s", address, rpc_id, args)
resp_payload = self.call_rpc(address, rpc_id, arg_payload)
if resp_format is None:
return []
resp = unpack_rpc_payload(resp_format, resp_payload)
return resp | [
"def",
"rpc",
"(",
"self",
",",
"address",
",",
"rpc_id",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"rpc_id",
",",
"RPCDeclaration",
")",
":",
"arg_format",
"=",
"rpc_id",
".",
"arg_format",
"resp_format",
"=",
"rpc_id",
... | Immediately dispatch an RPC inside this EmulatedDevice.
This function is meant to be used for testing purposes as well as by
tiles inside a complex EmulatedDevice subclass that need to
communicate with each other. It should only be called from the main
virtual device thread where start() was called from.
**Background workers may not call this method since it may cause them to deadlock.**
Args:
address (int): The address of the tile that has the RPC.
rpc_id (int): The 16-bit id of the rpc we want to call
*args: Any required arguments for the RPC as python objects.
**kwargs: Only two keyword arguments are supported:
- arg_format: A format specifier for the argument list
- result_format: A format specifier for the result
Returns:
list: A list of the decoded response members from the RPC. | [
"Immediately",
"dispatch",
"an",
"RPC",
"inside",
"this",
"EmulatedDevice",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L133-L175 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice.trace_sync | def trace_sync(self, data, timeout=5.0):
"""Send tracing data and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.trace() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.trace_sync(data)
Args:
data (bytes): The raw data that should be traced.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent.
"""
done = AwaitableResponse()
self.trace(data, callback=done.set_result)
return done.wait(timeout) | python | def trace_sync(self, data, timeout=5.0):
"""Send tracing data and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.trace() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.trace_sync(data)
Args:
data (bytes): The raw data that should be traced.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent.
"""
done = AwaitableResponse()
self.trace(data, callback=done.set_result)
return done.wait(timeout) | [
"def",
"trace_sync",
"(",
"self",
",",
"data",
",",
"timeout",
"=",
"5.0",
")",
":",
"done",
"=",
"AwaitableResponse",
"(",
")",
"self",
".",
"trace",
"(",
"data",
",",
"callback",
"=",
"done",
".",
"set_result",
")",
"return",
"done",
".",
"wait",
"... | Send tracing data and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.trace() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.trace_sync(data)
Args:
data (bytes): The raw data that should be traced.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent. | [
"Send",
"tracing",
"data",
"and",
"wait",
"for",
"it",
"to",
"finish",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L194-L221 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice.stream_sync | def stream_sync(self, report, timeout=120.0):
"""Send a report and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.stream() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.stream_sync(data)
Args:
report (IOTileReport): The report that should be streamed.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent.
"""
done = AwaitableResponse()
self.stream(report, callback=done.set_result)
return done.wait(timeout) | python | def stream_sync(self, report, timeout=120.0):
"""Send a report and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.stream() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.stream_sync(data)
Args:
report (IOTileReport): The report that should be streamed.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent.
"""
done = AwaitableResponse()
self.stream(report, callback=done.set_result)
return done.wait(timeout) | [
"def",
"stream_sync",
"(",
"self",
",",
"report",
",",
"timeout",
"=",
"120.0",
")",
":",
"done",
"=",
"AwaitableResponse",
"(",
")",
"self",
".",
"stream",
"(",
"report",
",",
"callback",
"=",
"done",
".",
"set_result",
")",
"return",
"done",
".",
"wa... | Send a report and wait for it to finish.
This awaitable coroutine wraps VirtualIOTileDevice.stream() and turns
the callback into an awaitable object. The appropriate usage of this
method is by calling it inside the event loop as:
await device.stream_sync(data)
Args:
report (IOTileReport): The report that should be streamed.
timeout (float): The maximum number of seconds to wait before
timing out.
Returns:
awaitable: An awaitable object with the result.
The result will be True if the data was sent successfully
or False if the data could not be sent in its entirety.
When False is returned, there is no guarantee about how much of
the data was sent, if any, just that it was not known to be
successfully sent. | [
"Send",
"a",
"report",
"and",
"wait",
"for",
"it",
"to",
"finish",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L223-L250 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice.synchronize_task | def synchronize_task(self, func, *args, **kwargs):
"""Run callable in the rpc thread and wait for it to finish.
The callable ``func`` will be passed into the EmulationLoop and run
there. This method will block until ``func`` is finished and
return/raise whatever that callable returns/raises.
This method is mainly useful for performing an activity that needs to
be synchronized with the rpc thread for safety reasons.
If this method is called from the rpc thread itself, it will just
run the task and return its result.
Args:
func (callable): A method with signature callable(*args, **kwargs),
that will be called with the optional *args and **kwargs passed
to this method.
*args: Arguments that will be passed to callable.
**kwargs: Keyword arguments that will be passed to callable.
Returns:
object: Whatever callable returns after it runs.
"""
async def _runner():
return func(*args, **kwargs)
return self.emulator.run_task_external(_runner()) | python | def synchronize_task(self, func, *args, **kwargs):
"""Run callable in the rpc thread and wait for it to finish.
The callable ``func`` will be passed into the EmulationLoop and run
there. This method will block until ``func`` is finished and
return/raise whatever that callable returns/raises.
This method is mainly useful for performing an activity that needs to
be synchronized with the rpc thread for safety reasons.
If this method is called from the rpc thread itself, it will just
run the task and return its result.
Args:
func (callable): A method with signature callable(*args, **kwargs),
that will be called with the optional *args and **kwargs passed
to this method.
*args: Arguments that will be passed to callable.
**kwargs: Keyword arguments that will be passed to callable.
Returns:
object: Whatever callable returns after it runs.
"""
async def _runner():
return func(*args, **kwargs)
return self.emulator.run_task_external(_runner()) | [
"def",
"synchronize_task",
"(",
"self",
",",
"func",
",",
"*",
"args",
",",
"**",
"kwargs",
")",
":",
"async",
"def",
"_runner",
"(",
")",
":",
"return",
"func",
"(",
"*",
"args",
",",
"**",
"kwargs",
")",
"return",
"self",
".",
"emulator",
".",
"r... | Run callable in the rpc thread and wait for it to finish.
The callable ``func`` will be passed into the EmulationLoop and run
there. This method will block until ``func`` is finished and
return/raise whatever that callable returns/raises.
This method is mainly useful for performing an activity that needs to
be synchronized with the rpc thread for safety reasons.
If this method is called from the rpc thread itself, it will just
run the task and return its result.
Args:
func (callable): A method with signature callable(*args, **kwargs),
that will be called with the optional *args and **kwargs passed
to this method.
*args: Arguments that will be passed to callable.
**kwargs: Keyword arguments that will be passed to callable.
Returns:
object: Whatever callable returns after it runs. | [
"Run",
"callable",
"in",
"the",
"rpc",
"thread",
"and",
"wait",
"for",
"it",
"to",
"finish",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L252-L279 | train |
iotile/coretools | iotileemulate/iotile/emulate/virtual/emulated_device.py | EmulatedDevice.load_metascenario | def load_metascenario(self, scenario_list):
"""Load one or more scenarios from a list.
Each entry in scenario_list should be a dict containing at least a
name key and an optional tile key and args key. If tile is present
and its value is not None, the scenario specified will be loaded into
the given tile only. Otherwise it will be loaded into the entire
device.
If the args key is specified is will be passed as keyword arguments
to load_scenario.
Args:
scenario_list (list): A list of dicts for each scenario that should
be loaded.
"""
for scenario in scenario_list:
name = scenario.get('name')
if name is None:
raise DataError("Scenario in scenario list is missing a name parameter", scenario=scenario)
tile_address = scenario.get('tile')
args = scenario.get('args', {})
dest = self
if tile_address is not None:
dest = self._tiles.get(tile_address)
if dest is None:
raise DataError("Attempted to load a scenario into a tile address that does not exist", address=tile_address, valid_addresses=list(self._tiles))
dest.load_scenario(name, **args) | python | def load_metascenario(self, scenario_list):
"""Load one or more scenarios from a list.
Each entry in scenario_list should be a dict containing at least a
name key and an optional tile key and args key. If tile is present
and its value is not None, the scenario specified will be loaded into
the given tile only. Otherwise it will be loaded into the entire
device.
If the args key is specified is will be passed as keyword arguments
to load_scenario.
Args:
scenario_list (list): A list of dicts for each scenario that should
be loaded.
"""
for scenario in scenario_list:
name = scenario.get('name')
if name is None:
raise DataError("Scenario in scenario list is missing a name parameter", scenario=scenario)
tile_address = scenario.get('tile')
args = scenario.get('args', {})
dest = self
if tile_address is not None:
dest = self._tiles.get(tile_address)
if dest is None:
raise DataError("Attempted to load a scenario into a tile address that does not exist", address=tile_address, valid_addresses=list(self._tiles))
dest.load_scenario(name, **args) | [
"def",
"load_metascenario",
"(",
"self",
",",
"scenario_list",
")",
":",
"for",
"scenario",
"in",
"scenario_list",
":",
"name",
"=",
"scenario",
".",
"get",
"(",
"'name'",
")",
"if",
"name",
"is",
"None",
":",
"raise",
"DataError",
"(",
"\"Scenario in scenar... | Load one or more scenarios from a list.
Each entry in scenario_list should be a dict containing at least a
name key and an optional tile key and args key. If tile is present
and its value is not None, the scenario specified will be loaded into
the given tile only. Otherwise it will be loaded into the entire
device.
If the args key is specified is will be passed as keyword arguments
to load_scenario.
Args:
scenario_list (list): A list of dicts for each scenario that should
be loaded. | [
"Load",
"one",
"or",
"more",
"scenarios",
"from",
"a",
"list",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotileemulate/iotile/emulate/virtual/emulated_device.py#L320-L352 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStream.associated_stream | def associated_stream(self):
"""Return the corresponding output or storage stream for an important system input.
Certain system inputs are designed as important and automatically
copied to output streams without requiring any manual interaction.
This method returns the corresponding stream for an important system
input. It will raise an InternalError unlesss the self.important
property is True.
Returns:
DataStream: The corresponding output or storage stream.
Raises:
InternalError: If this stream is not marked as an important system input.
"""
if not self.important:
raise InternalError("You may only call autocopied_stream on when DataStream.important is True", stream=self)
if self.stream_id >= DataStream.ImportantSystemStorageStart:
stream_type = DataStream.BufferedType
else:
stream_type = DataStream.OutputType
return DataStream(stream_type, self.stream_id, True) | python | def associated_stream(self):
"""Return the corresponding output or storage stream for an important system input.
Certain system inputs are designed as important and automatically
copied to output streams without requiring any manual interaction.
This method returns the corresponding stream for an important system
input. It will raise an InternalError unlesss the self.important
property is True.
Returns:
DataStream: The corresponding output or storage stream.
Raises:
InternalError: If this stream is not marked as an important system input.
"""
if not self.important:
raise InternalError("You may only call autocopied_stream on when DataStream.important is True", stream=self)
if self.stream_id >= DataStream.ImportantSystemStorageStart:
stream_type = DataStream.BufferedType
else:
stream_type = DataStream.OutputType
return DataStream(stream_type, self.stream_id, True) | [
"def",
"associated_stream",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"important",
":",
"raise",
"InternalError",
"(",
"\"You may only call autocopied_stream on when DataStream.important is True\"",
",",
"stream",
"=",
"self",
")",
"if",
"self",
".",
"stream_id... | Return the corresponding output or storage stream for an important system input.
Certain system inputs are designed as important and automatically
copied to output streams without requiring any manual interaction.
This method returns the corresponding stream for an important system
input. It will raise an InternalError unlesss the self.important
property is True.
Returns:
DataStream: The corresponding output or storage stream.
Raises:
InternalError: If this stream is not marked as an important system input. | [
"Return",
"the",
"corresponding",
"output",
"or",
"storage",
"stream",
"for",
"an",
"important",
"system",
"input",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L80-L105 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStream.FromString | def FromString(cls, string_rep):
"""Create a DataStream from a string representation.
The format for stream designators when encoded as strings is:
[system] (buffered|unbuffered|constant|input|count|output) <integer>
Args:
string_rep (str): The string representation to turn into a
DataStream
"""
rep = str(string_rep)
parts = rep.split()
if len(parts) > 3:
raise ArgumentError("Too many whitespace separated parts of stream designator", input_string=string_rep)
elif len(parts) == 3 and parts[0] != u'system':
raise ArgumentError("Too many whitespace separated parts of stream designator", input_string=string_rep)
elif len(parts) < 2:
raise ArgumentError("Too few components in stream designator", input_string=string_rep)
# Now actually parse the string
if len(parts) == 3:
system = True
stream_type = parts[1]
stream_id = parts[2]
else:
system = False
stream_type = parts[0]
stream_id = parts[1]
try:
stream_id = int(stream_id, 0)
except ValueError as exc:
raise ArgumentError("Could not convert stream id to integer", error_string=str(exc), stream_id=stream_id)
try:
stream_type = cls.StringToType[stream_type]
except KeyError:
raise ArgumentError("Invalid stream type given", stream_type=stream_type, known_types=cls.StringToType.keys())
return DataStream(stream_type, stream_id, system) | python | def FromString(cls, string_rep):
"""Create a DataStream from a string representation.
The format for stream designators when encoded as strings is:
[system] (buffered|unbuffered|constant|input|count|output) <integer>
Args:
string_rep (str): The string representation to turn into a
DataStream
"""
rep = str(string_rep)
parts = rep.split()
if len(parts) > 3:
raise ArgumentError("Too many whitespace separated parts of stream designator", input_string=string_rep)
elif len(parts) == 3 and parts[0] != u'system':
raise ArgumentError("Too many whitespace separated parts of stream designator", input_string=string_rep)
elif len(parts) < 2:
raise ArgumentError("Too few components in stream designator", input_string=string_rep)
# Now actually parse the string
if len(parts) == 3:
system = True
stream_type = parts[1]
stream_id = parts[2]
else:
system = False
stream_type = parts[0]
stream_id = parts[1]
try:
stream_id = int(stream_id, 0)
except ValueError as exc:
raise ArgumentError("Could not convert stream id to integer", error_string=str(exc), stream_id=stream_id)
try:
stream_type = cls.StringToType[stream_type]
except KeyError:
raise ArgumentError("Invalid stream type given", stream_type=stream_type, known_types=cls.StringToType.keys())
return DataStream(stream_type, stream_id, system) | [
"def",
"FromString",
"(",
"cls",
",",
"string_rep",
")",
":",
"rep",
"=",
"str",
"(",
"string_rep",
")",
"parts",
"=",
"rep",
".",
"split",
"(",
")",
"if",
"len",
"(",
"parts",
")",
">",
"3",
":",
"raise",
"ArgumentError",
"(",
"\"Too many whitespace s... | Create a DataStream from a string representation.
The format for stream designators when encoded as strings is:
[system] (buffered|unbuffered|constant|input|count|output) <integer>
Args:
string_rep (str): The string representation to turn into a
DataStream | [
"Create",
"a",
"DataStream",
"from",
"a",
"string",
"representation",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L108-L149 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStream.FromEncoded | def FromEncoded(self, encoded):
"""Create a DataStream from an encoded 16-bit unsigned integer.
Returns:
DataStream: The decoded DataStream object
"""
stream_type = (encoded >> 12) & 0b1111
stream_system = bool(encoded & (1 << 11))
stream_id = (encoded & ((1 << 11) - 1))
return DataStream(stream_type, stream_id, stream_system) | python | def FromEncoded(self, encoded):
"""Create a DataStream from an encoded 16-bit unsigned integer.
Returns:
DataStream: The decoded DataStream object
"""
stream_type = (encoded >> 12) & 0b1111
stream_system = bool(encoded & (1 << 11))
stream_id = (encoded & ((1 << 11) - 1))
return DataStream(stream_type, stream_id, stream_system) | [
"def",
"FromEncoded",
"(",
"self",
",",
"encoded",
")",
":",
"stream_type",
"=",
"(",
"encoded",
">>",
"12",
")",
"&",
"0b1111",
"stream_system",
"=",
"bool",
"(",
"encoded",
"&",
"(",
"1",
"<<",
"11",
")",
")",
"stream_id",
"=",
"(",
"encoded",
"&",... | Create a DataStream from an encoded 16-bit unsigned integer.
Returns:
DataStream: The decoded DataStream object | [
"Create",
"a",
"DataStream",
"from",
"an",
"encoded",
"16",
"-",
"bit",
"unsigned",
"integer",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L152-L163 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.as_stream | def as_stream(self):
"""Convert this selector to a DataStream.
This function will only work if this is a singular selector that
matches exactly one DataStream.
"""
if not self.singular:
raise ArgumentError("Attempted to convert a non-singular selector to a data stream, it matches multiple", selector=self)
return DataStream(self.match_type, self.match_id, self.match_spec == DataStreamSelector.MatchSystemOnly) | python | def as_stream(self):
"""Convert this selector to a DataStream.
This function will only work if this is a singular selector that
matches exactly one DataStream.
"""
if not self.singular:
raise ArgumentError("Attempted to convert a non-singular selector to a data stream, it matches multiple", selector=self)
return DataStream(self.match_type, self.match_id, self.match_spec == DataStreamSelector.MatchSystemOnly) | [
"def",
"as_stream",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"singular",
":",
"raise",
"ArgumentError",
"(",
"\"Attempted to convert a non-singular selector to a data stream, it matches multiple\"",
",",
"selector",
"=",
"self",
")",
"return",
"DataStream",
"(",... | Convert this selector to a DataStream.
This function will only work if this is a singular selector that
matches exactly one DataStream. | [
"Convert",
"this",
"selector",
"to",
"a",
"DataStream",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L270-L280 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.FromStream | def FromStream(cls, stream):
"""Create a DataStreamSelector from a DataStream.
Args:
stream (DataStream): The data stream that we want to convert.
"""
if stream.system:
specifier = DataStreamSelector.MatchSystemOnly
else:
specifier = DataStreamSelector.MatchUserOnly
return DataStreamSelector(stream.stream_type, stream.stream_id, specifier) | python | def FromStream(cls, stream):
"""Create a DataStreamSelector from a DataStream.
Args:
stream (DataStream): The data stream that we want to convert.
"""
if stream.system:
specifier = DataStreamSelector.MatchSystemOnly
else:
specifier = DataStreamSelector.MatchUserOnly
return DataStreamSelector(stream.stream_type, stream.stream_id, specifier) | [
"def",
"FromStream",
"(",
"cls",
",",
"stream",
")",
":",
"if",
"stream",
".",
"system",
":",
"specifier",
"=",
"DataStreamSelector",
".",
"MatchSystemOnly",
"else",
":",
"specifier",
"=",
"DataStreamSelector",
".",
"MatchUserOnly",
"return",
"DataStreamSelector",... | Create a DataStreamSelector from a DataStream.
Args:
stream (DataStream): The data stream that we want to convert. | [
"Create",
"a",
"DataStreamSelector",
"from",
"a",
"DataStream",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L283-L295 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.FromEncoded | def FromEncoded(cls, encoded):
"""Create a DataStreamSelector from an encoded 16-bit value.
The binary value must be equivalent to what is produced by
a call to self.encode() and will turn that value back into
a a DataStreamSelector.
Note that the following operation is a no-op:
DataStreamSelector.FromEncode(value).encode()
Args:
encoded (int): The encoded binary representation of a
DataStreamSelector.
Returns:
DataStreamSelector: The decoded selector.
"""
match_spec = encoded & ((1 << 11) | (1 << 15))
match_type = (encoded & (0b111 << 12)) >> 12
match_id = encoded & ((1 << 11) - 1)
if match_spec not in cls.SpecifierEncodingMap:
raise ArgumentError("Unknown encoded match specifier", match_spec=match_spec, known_specifiers=cls.SpecifierEncodingMap.keys())
spec_name = cls.SpecifierEncodingMap[match_spec]
# Handle wildcard matches
if match_id == cls.MatchAllCode:
match_id = None
return DataStreamSelector(match_type, match_id, spec_name) | python | def FromEncoded(cls, encoded):
"""Create a DataStreamSelector from an encoded 16-bit value.
The binary value must be equivalent to what is produced by
a call to self.encode() and will turn that value back into
a a DataStreamSelector.
Note that the following operation is a no-op:
DataStreamSelector.FromEncode(value).encode()
Args:
encoded (int): The encoded binary representation of a
DataStreamSelector.
Returns:
DataStreamSelector: The decoded selector.
"""
match_spec = encoded & ((1 << 11) | (1 << 15))
match_type = (encoded & (0b111 << 12)) >> 12
match_id = encoded & ((1 << 11) - 1)
if match_spec not in cls.SpecifierEncodingMap:
raise ArgumentError("Unknown encoded match specifier", match_spec=match_spec, known_specifiers=cls.SpecifierEncodingMap.keys())
spec_name = cls.SpecifierEncodingMap[match_spec]
# Handle wildcard matches
if match_id == cls.MatchAllCode:
match_id = None
return DataStreamSelector(match_type, match_id, spec_name) | [
"def",
"FromEncoded",
"(",
"cls",
",",
"encoded",
")",
":",
"match_spec",
"=",
"encoded",
"&",
"(",
"(",
"1",
"<<",
"11",
")",
"|",
"(",
"1",
"<<",
"15",
")",
")",
"match_type",
"=",
"(",
"encoded",
"&",
"(",
"0b111",
"<<",
"12",
")",
")",
">>"... | Create a DataStreamSelector from an encoded 16-bit value.
The binary value must be equivalent to what is produced by
a call to self.encode() and will turn that value back into
a a DataStreamSelector.
Note that the following operation is a no-op:
DataStreamSelector.FromEncode(value).encode()
Args:
encoded (int): The encoded binary representation of a
DataStreamSelector.
Returns:
DataStreamSelector: The decoded selector. | [
"Create",
"a",
"DataStreamSelector",
"from",
"an",
"encoded",
"16",
"-",
"bit",
"value",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L298-L330 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.FromString | def FromString(cls, string_rep):
"""Create a DataStreamSelector from a string.
The format of the string should either be:
all <type>
OR
<type> <id>
Where type is [system] <stream type>, with <stream type>
defined as in DataStream
Args:
rep (str): The string representation to convert to a DataStreamSelector
"""
rep = str(string_rep)
rep = rep.replace(u'node', '')
rep = rep.replace(u'nodes', '')
if rep.startswith(u'all'):
parts = rep.split()
spec_string = u''
if len(parts) == 3:
spec_string = parts[1]
stream_type = parts[2]
elif len(parts) == 2:
stream_type = parts[1]
else:
raise ArgumentError("Invalid wildcard stream selector", string_rep=string_rep)
try:
# Remove pluralization that can come with e.g. 'all system outputs'
if stream_type.endswith(u's'):
stream_type = stream_type[:-1]
stream_type = DataStream.StringToType[stream_type]
except KeyError:
raise ArgumentError("Invalid stream type given", stream_type=stream_type, known_types=DataStream.StringToType.keys())
stream_spec = DataStreamSelector.SpecifierNames.get(spec_string, None)
if stream_spec is None:
raise ArgumentError("Invalid stream specifier given (should be system, user, combined or blank)", string_rep=string_rep, spec_string=spec_string)
return DataStreamSelector(stream_type, None, stream_spec)
# If we're not matching a wildcard stream type, then the match is exactly
# the same as a DataStream identifier, so use that to match it.
stream = DataStream.FromString(rep)
return DataStreamSelector.FromStream(stream) | python | def FromString(cls, string_rep):
"""Create a DataStreamSelector from a string.
The format of the string should either be:
all <type>
OR
<type> <id>
Where type is [system] <stream type>, with <stream type>
defined as in DataStream
Args:
rep (str): The string representation to convert to a DataStreamSelector
"""
rep = str(string_rep)
rep = rep.replace(u'node', '')
rep = rep.replace(u'nodes', '')
if rep.startswith(u'all'):
parts = rep.split()
spec_string = u''
if len(parts) == 3:
spec_string = parts[1]
stream_type = parts[2]
elif len(parts) == 2:
stream_type = parts[1]
else:
raise ArgumentError("Invalid wildcard stream selector", string_rep=string_rep)
try:
# Remove pluralization that can come with e.g. 'all system outputs'
if stream_type.endswith(u's'):
stream_type = stream_type[:-1]
stream_type = DataStream.StringToType[stream_type]
except KeyError:
raise ArgumentError("Invalid stream type given", stream_type=stream_type, known_types=DataStream.StringToType.keys())
stream_spec = DataStreamSelector.SpecifierNames.get(spec_string, None)
if stream_spec is None:
raise ArgumentError("Invalid stream specifier given (should be system, user, combined or blank)", string_rep=string_rep, spec_string=spec_string)
return DataStreamSelector(stream_type, None, stream_spec)
# If we're not matching a wildcard stream type, then the match is exactly
# the same as a DataStream identifier, so use that to match it.
stream = DataStream.FromString(rep)
return DataStreamSelector.FromStream(stream) | [
"def",
"FromString",
"(",
"cls",
",",
"string_rep",
")",
":",
"rep",
"=",
"str",
"(",
"string_rep",
")",
"rep",
"=",
"rep",
".",
"replace",
"(",
"u'node'",
",",
"''",
")",
"rep",
"=",
"rep",
".",
"replace",
"(",
"u'nodes'",
",",
"''",
")",
"if",
... | Create a DataStreamSelector from a string.
The format of the string should either be:
all <type>
OR
<type> <id>
Where type is [system] <stream type>, with <stream type>
defined as in DataStream
Args:
rep (str): The string representation to convert to a DataStreamSelector | [
"Create",
"a",
"DataStreamSelector",
"from",
"a",
"string",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L333-L386 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.matches | def matches(self, stream):
"""Check if this selector matches the given stream
Args:
stream (DataStream): The stream to check
Returns:
bool: True if this selector matches the stream
"""
if self.match_type != stream.stream_type:
return False
if self.match_id is not None:
return self.match_id == stream.stream_id
if self.match_spec == DataStreamSelector.MatchUserOnly:
return not stream.system
elif self.match_spec == DataStreamSelector.MatchSystemOnly:
return stream.system
elif self.match_spec == DataStreamSelector.MatchUserAndBreaks:
return (not stream.system) or (stream.system and (stream.stream_id in DataStream.KnownBreakStreams))
# The other case is that match_spec is MatchCombined, which matches everything
# regardless of system of user flag
return True | python | def matches(self, stream):
"""Check if this selector matches the given stream
Args:
stream (DataStream): The stream to check
Returns:
bool: True if this selector matches the stream
"""
if self.match_type != stream.stream_type:
return False
if self.match_id is not None:
return self.match_id == stream.stream_id
if self.match_spec == DataStreamSelector.MatchUserOnly:
return not stream.system
elif self.match_spec == DataStreamSelector.MatchSystemOnly:
return stream.system
elif self.match_spec == DataStreamSelector.MatchUserAndBreaks:
return (not stream.system) or (stream.system and (stream.stream_id in DataStream.KnownBreakStreams))
# The other case is that match_spec is MatchCombined, which matches everything
# regardless of system of user flag
return True | [
"def",
"matches",
"(",
"self",
",",
"stream",
")",
":",
"if",
"self",
".",
"match_type",
"!=",
"stream",
".",
"stream_type",
":",
"return",
"False",
"if",
"self",
".",
"match_id",
"is",
"not",
"None",
":",
"return",
"self",
".",
"match_id",
"==",
"stre... | Check if this selector matches the given stream
Args:
stream (DataStream): The stream to check
Returns:
bool: True if this selector matches the stream | [
"Check",
"if",
"this",
"selector",
"matches",
"the",
"given",
"stream"
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L407-L432 | train |
iotile/coretools | iotilesensorgraph/iotile/sg/stream.py | DataStreamSelector.encode | def encode(self):
"""Encode this stream as a packed 16-bit unsigned integer.
Returns:
int: The packed encoded stream
"""
match_id = self.match_id
if match_id is None:
match_id = (1 << 11) - 1
return (self.match_type << 12) | DataStreamSelector.SpecifierEncodings[self.match_spec] | match_id | python | def encode(self):
"""Encode this stream as a packed 16-bit unsigned integer.
Returns:
int: The packed encoded stream
"""
match_id = self.match_id
if match_id is None:
match_id = (1 << 11) - 1
return (self.match_type << 12) | DataStreamSelector.SpecifierEncodings[self.match_spec] | match_id | [
"def",
"encode",
"(",
"self",
")",
":",
"match_id",
"=",
"self",
".",
"match_id",
"if",
"match_id",
"is",
"None",
":",
"match_id",
"=",
"(",
"1",
"<<",
"11",
")",
"-",
"1",
"return",
"(",
"self",
".",
"match_type",
"<<",
"12",
")",
"|",
"DataStream... | Encode this stream as a packed 16-bit unsigned integer.
Returns:
int: The packed encoded stream | [
"Encode",
"this",
"stream",
"as",
"a",
"packed",
"16",
"-",
"bit",
"unsigned",
"integer",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilesensorgraph/iotile/sg/stream.py#L434-L445 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/m4.py | generate | def generate(env):
"""Add Builders and construction variables for m4 to an Environment."""
M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR')
bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4')
env['BUILDERS']['M4'] = bld
# .m4 files might include other files, and it would be pretty hard
# to write a scanner for it, so let's just cd to the dir of the m4
# file and run from there.
# The src_suffix setup is like so: file.c.m4 -> file.c,
# file.cpp.m4 -> file.cpp etc.
env['M4'] = 'm4'
env['M4FLAGS'] = SCons.Util.CLVar('-E')
env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' | python | def generate(env):
"""Add Builders and construction variables for m4 to an Environment."""
M4Action = SCons.Action.Action('$M4COM', '$M4COMSTR')
bld = SCons.Builder.Builder(action = M4Action, src_suffix = '.m4')
env['BUILDERS']['M4'] = bld
# .m4 files might include other files, and it would be pretty hard
# to write a scanner for it, so let's just cd to the dir of the m4
# file and run from there.
# The src_suffix setup is like so: file.c.m4 -> file.c,
# file.cpp.m4 -> file.cpp etc.
env['M4'] = 'm4'
env['M4FLAGS'] = SCons.Util.CLVar('-E')
env['M4COM'] = 'cd ${SOURCE.rsrcdir} && $M4 $M4FLAGS < ${SOURCE.file} > ${TARGET.abspath}' | [
"def",
"generate",
"(",
"env",
")",
":",
"M4Action",
"=",
"SCons",
".",
"Action",
".",
"Action",
"(",
"'$M4COM'",
",",
"'$M4COMSTR'",
")",
"bld",
"=",
"SCons",
".",
"Builder",
".",
"Builder",
"(",
"action",
"=",
"M4Action",
",",
"src_suffix",
"=",
"'.m... | Add Builders and construction variables for m4 to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"m4",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/m4.py#L40-L54 | train |
iotile/coretools | iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/latex.py | generate | def generate(env):
"""Add Builders and construction variables for LaTeX to an Environment."""
env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes)
from . import dvi
dvi.generate(env)
from . import pdf
pdf.generate(env)
bld = env['BUILDERS']['DVI']
bld.add_action('.ltx', LaTeXAuxAction)
bld.add_action('.latex', LaTeXAuxAction)
bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter)
bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter)
SCons.Tool.tex.generate_common(env) | python | def generate(env):
"""Add Builders and construction variables for LaTeX to an Environment."""
env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes)
from . import dvi
dvi.generate(env)
from . import pdf
pdf.generate(env)
bld = env['BUILDERS']['DVI']
bld.add_action('.ltx', LaTeXAuxAction)
bld.add_action('.latex', LaTeXAuxAction)
bld.add_emitter('.ltx', SCons.Tool.tex.tex_eps_emitter)
bld.add_emitter('.latex', SCons.Tool.tex.tex_eps_emitter)
SCons.Tool.tex.generate_common(env) | [
"def",
"generate",
"(",
"env",
")",
":",
"env",
".",
"AppendUnique",
"(",
"LATEXSUFFIXES",
"=",
"SCons",
".",
"Tool",
".",
"LaTeXSuffixes",
")",
"from",
".",
"import",
"dvi",
"dvi",
".",
"generate",
"(",
"env",
")",
"from",
".",
"import",
"pdf",
"pdf",... | Add Builders and construction variables for LaTeX to an Environment. | [
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"LaTeX",
"to",
"an",
"Environment",
"."
] | 2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec | https://github.com/iotile/coretools/blob/2d794f5f1346b841b0dcd16c9d284e9bf2f3c6ec/iotilebuild/iotile/build/config/scons-local-3.0.1/SCons/Tool/latex.py#L53-L70 | train |
jobec/rfc5424-logging-handler | rfc5424logging/handler.py | Rfc5424SysLogHandler.encode_priority | def encode_priority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
return (facility << 3) | self.priority_map.get(priority, self.LOG_WARNING) | python | def encode_priority(self, facility, priority):
"""
Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers.
"""
return (facility << 3) | self.priority_map.get(priority, self.LOG_WARNING) | [
"def",
"encode_priority",
"(",
"self",
",",
"facility",
",",
"priority",
")",
":",
"return",
"(",
"facility",
"<<",
"3",
")",
"|",
"self",
".",
"priority_map",
".",
"get",
"(",
"priority",
",",
"self",
".",
"LOG_WARNING",
")"
] | Encode the facility and priority. You can pass in strings or
integers - if strings are passed, the facility_names and
priority_names mapping dictionaries are used to convert them to
integers. | [
"Encode",
"the",
"facility",
"and",
"priority",
".",
"You",
"can",
"pass",
"in",
"strings",
"or",
"integers",
"-",
"if",
"strings",
"are",
"passed",
"the",
"facility_names",
"and",
"priority_names",
"mapping",
"dictionaries",
"are",
"used",
"to",
"convert",
"t... | 9c4f669c5e54cf382936cd950e2204caeb6d05f0 | https://github.com/jobec/rfc5424-logging-handler/blob/9c4f669c5e54cf382936cd950e2204caeb6d05f0/rfc5424logging/handler.py#L250-L257 | train |
jobec/rfc5424-logging-handler | rfc5424logging/handler.py | Rfc5424SysLogHandler.close | def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
if self.transport is not None:
self.transport.close()
super(Rfc5424SysLogHandler, self).close()
finally:
self.release() | python | def close(self):
"""
Closes the socket.
"""
self.acquire()
try:
if self.transport is not None:
self.transport.close()
super(Rfc5424SysLogHandler, self).close()
finally:
self.release() | [
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"acquire",
"(",
")",
"try",
":",
"if",
"self",
".",
"transport",
"is",
"not",
"None",
":",
"self",
".",
"transport",
".",
"close",
"(",
")",
"super",
"(",
"Rfc5424SysLogHandler",
",",
"self",
")",
... | Closes the socket. | [
"Closes",
"the",
"socket",
"."
] | 9c4f669c5e54cf382936cd950e2204caeb6d05f0 | https://github.com/jobec/rfc5424-logging-handler/blob/9c4f669c5e54cf382936cd950e2204caeb6d05f0/rfc5424logging/handler.py#L478-L488 | train |
benedictpaten/sonLib | misc.py | sonTraceRootPath | def sonTraceRootPath():
"""
function for finding external location
"""
import sonLib.bioio
i = os.path.abspath(sonLib.bioio.__file__)
return os.path.split(os.path.split(os.path.split(i)[0])[0])[0] | python | def sonTraceRootPath():
"""
function for finding external location
"""
import sonLib.bioio
i = os.path.abspath(sonLib.bioio.__file__)
return os.path.split(os.path.split(os.path.split(i)[0])[0])[0] | [
"def",
"sonTraceRootPath",
"(",
")",
":",
"import",
"sonLib",
".",
"bioio",
"i",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"sonLib",
".",
"bioio",
".",
"__file__",
")",
"return",
"os",
".",
"path",
".",
"split",
"(",
"os",
".",
"path",
".",
"spl... | function for finding external location | [
"function",
"for",
"finding",
"external",
"location"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/misc.py#L20-L26 | train |
benedictpaten/sonLib | misc.py | linOriginRegression | def linOriginRegression(points):
"""
computes a linear regression starting at zero
"""
j = sum([ i[0] for i in points ])
k = sum([ i[1] for i in points ])
if j != 0:
return k/j, j, k
return 1, j, k | python | def linOriginRegression(points):
"""
computes a linear regression starting at zero
"""
j = sum([ i[0] for i in points ])
k = sum([ i[1] for i in points ])
if j != 0:
return k/j, j, k
return 1, j, k | [
"def",
"linOriginRegression",
"(",
"points",
")",
":",
"j",
"=",
"sum",
"(",
"[",
"i",
"[",
"0",
"]",
"for",
"i",
"in",
"points",
"]",
")",
"k",
"=",
"sum",
"(",
"[",
"i",
"[",
"1",
"]",
"for",
"i",
"in",
"points",
"]",
")",
"if",
"j",
"!="... | computes a linear regression starting at zero | [
"computes",
"a",
"linear",
"regression",
"starting",
"at",
"zero"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/misc.py#L28-L36 | train |
benedictpaten/sonLib | misc.py | close | def close(i, j, tolerance):
"""
check two float values are within a bound of one another
"""
return i <= j + tolerance and i >= j - tolerance | python | def close(i, j, tolerance):
"""
check two float values are within a bound of one another
"""
return i <= j + tolerance and i >= j - tolerance | [
"def",
"close",
"(",
"i",
",",
"j",
",",
"tolerance",
")",
":",
"return",
"i",
"<=",
"j",
"+",
"tolerance",
"and",
"i",
">=",
"j",
"-",
"tolerance"
] | check two float values are within a bound of one another | [
"check",
"two",
"float",
"values",
"are",
"within",
"a",
"bound",
"of",
"one",
"another"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/misc.py#L38-L42 | train |
benedictpaten/sonLib | misc.py | filterOverlappingAlignments | def filterOverlappingAlignments(alignments):
"""Filter alignments to be non-overlapping.
"""
l = []
alignments = alignments[:]
sortAlignments(alignments)
alignments.reverse()
for pA1 in alignments:
for pA2 in l:
if pA1.contig1 == pA2.contig1 and getPositiveCoordinateRangeOverlap(pA1.start1+1, pA1.end1, pA2.start1+1, pA2.end1) is not None: #One offset, inclusive coordinates
break
if pA1.contig2 == pA2.contig2 and getPositiveCoordinateRangeOverlap(pA1.start2+1, pA1.end2, pA2.start2+1, pA2.end2) is not None: #One offset, inclusive coordinates
break
if pA1.contig2 == pA2.contig1 and getPositiveCoordinateRangeOverlap(pA1.start2+1, pA1.end2, pA2.start1+1, pA2.end1) is not None: #One offset, inclusive coordinates
break
if pA1.contig1 == pA2.contig2 and getPositiveCoordinateRangeOverlap(pA1.start1+1, pA1.end1, pA2.start2+1, pA2.end2) is not None: #One offset, inclusive coordinates
break
else:
l.append(pA1)
l.reverse()
return l | python | def filterOverlappingAlignments(alignments):
"""Filter alignments to be non-overlapping.
"""
l = []
alignments = alignments[:]
sortAlignments(alignments)
alignments.reverse()
for pA1 in alignments:
for pA2 in l:
if pA1.contig1 == pA2.contig1 and getPositiveCoordinateRangeOverlap(pA1.start1+1, pA1.end1, pA2.start1+1, pA2.end1) is not None: #One offset, inclusive coordinates
break
if pA1.contig2 == pA2.contig2 and getPositiveCoordinateRangeOverlap(pA1.start2+1, pA1.end2, pA2.start2+1, pA2.end2) is not None: #One offset, inclusive coordinates
break
if pA1.contig2 == pA2.contig1 and getPositiveCoordinateRangeOverlap(pA1.start2+1, pA1.end2, pA2.start1+1, pA2.end1) is not None: #One offset, inclusive coordinates
break
if pA1.contig1 == pA2.contig2 and getPositiveCoordinateRangeOverlap(pA1.start1+1, pA1.end1, pA2.start2+1, pA2.end2) is not None: #One offset, inclusive coordinates
break
else:
l.append(pA1)
l.reverse()
return l | [
"def",
"filterOverlappingAlignments",
"(",
"alignments",
")",
":",
"l",
"=",
"[",
"]",
"alignments",
"=",
"alignments",
"[",
":",
"]",
"sortAlignments",
"(",
"alignments",
")",
"alignments",
".",
"reverse",
"(",
")",
"for",
"pA1",
"in",
"alignments",
":",
... | Filter alignments to be non-overlapping. | [
"Filter",
"alignments",
"to",
"be",
"non",
"-",
"overlapping",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/misc.py#L86-L106 | train |
benedictpaten/sonLib | tree.py | binaryTree_depthFirstNumbers | def binaryTree_depthFirstNumbers(binaryTree, labelTree=True, dontStopAtID=True):
"""
get mid-order depth first tree numbers
"""
traversalIDs = {}
def traverse(binaryTree, mid=0, leafNo=0):
if binaryTree.internal and (dontStopAtID or binaryTree.iD is None):
midStart = mid
j, leafNo = traverse(binaryTree.left, mid, leafNo)
mid = j
j, leafNo = traverse(binaryTree.right, j+1, leafNo)
traversalIDs[binaryTree] = TraversalID(midStart, mid, j)
return j, leafNo
traversalID = TraversalID(mid, mid, mid+1)
traversalID.leafNo = leafNo
#thus nodes must be unique
traversalIDs[binaryTree] = traversalID
return mid+1, leafNo+1
traverse(binaryTree)
if labelTree:
for binaryTree in traversalIDs.keys():
binaryTree.traversalID = traversalIDs[binaryTree]
return traversalIDs | python | def binaryTree_depthFirstNumbers(binaryTree, labelTree=True, dontStopAtID=True):
"""
get mid-order depth first tree numbers
"""
traversalIDs = {}
def traverse(binaryTree, mid=0, leafNo=0):
if binaryTree.internal and (dontStopAtID or binaryTree.iD is None):
midStart = mid
j, leafNo = traverse(binaryTree.left, mid, leafNo)
mid = j
j, leafNo = traverse(binaryTree.right, j+1, leafNo)
traversalIDs[binaryTree] = TraversalID(midStart, mid, j)
return j, leafNo
traversalID = TraversalID(mid, mid, mid+1)
traversalID.leafNo = leafNo
#thus nodes must be unique
traversalIDs[binaryTree] = traversalID
return mid+1, leafNo+1
traverse(binaryTree)
if labelTree:
for binaryTree in traversalIDs.keys():
binaryTree.traversalID = traversalIDs[binaryTree]
return traversalIDs | [
"def",
"binaryTree_depthFirstNumbers",
"(",
"binaryTree",
",",
"labelTree",
"=",
"True",
",",
"dontStopAtID",
"=",
"True",
")",
":",
"traversalIDs",
"=",
"{",
"}",
"def",
"traverse",
"(",
"binaryTree",
",",
"mid",
"=",
"0",
",",
"leafNo",
"=",
"0",
")",
... | get mid-order depth first tree numbers | [
"get",
"mid",
"-",
"order",
"depth",
"first",
"tree",
"numbers"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L52-L74 | train |
benedictpaten/sonLib | tree.py | binaryTree_nodeNames | def binaryTree_nodeNames(binaryTree):
"""
creates names for the leave and internal nodes
of the newick tree from the leaf labels
"""
def fn(binaryTree, labels):
if binaryTree.internal:
fn(binaryTree.left, labels)
fn(binaryTree.right, labels)
labels[binaryTree.traversalID.mid] = labels[binaryTree.left.traversalID.mid] + "_" + labels[binaryTree.right.traversalID.mid]
return labels[binaryTree.traversalID.mid]
else:
labels[binaryTree.traversalID.mid] = str(binaryTree.iD)
return labels[binaryTree.traversalID.mid]
labels = [None]*binaryTree.traversalID.midEnd
fn(binaryTree, labels)
return labels | python | def binaryTree_nodeNames(binaryTree):
"""
creates names for the leave and internal nodes
of the newick tree from the leaf labels
"""
def fn(binaryTree, labels):
if binaryTree.internal:
fn(binaryTree.left, labels)
fn(binaryTree.right, labels)
labels[binaryTree.traversalID.mid] = labels[binaryTree.left.traversalID.mid] + "_" + labels[binaryTree.right.traversalID.mid]
return labels[binaryTree.traversalID.mid]
else:
labels[binaryTree.traversalID.mid] = str(binaryTree.iD)
return labels[binaryTree.traversalID.mid]
labels = [None]*binaryTree.traversalID.midEnd
fn(binaryTree, labels)
return labels | [
"def",
"binaryTree_nodeNames",
"(",
"binaryTree",
")",
":",
"def",
"fn",
"(",
"binaryTree",
",",
"labels",
")",
":",
"if",
"binaryTree",
".",
"internal",
":",
"fn",
"(",
"binaryTree",
".",
"left",
",",
"labels",
")",
"fn",
"(",
"binaryTree",
".",
"right"... | creates names for the leave and internal nodes
of the newick tree from the leaf labels | [
"creates",
"names",
"for",
"the",
"leave",
"and",
"internal",
"nodes",
"of",
"the",
"newick",
"tree",
"from",
"the",
"leaf",
"labels"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L76-L92 | train |
benedictpaten/sonLib | tree.py | makeRandomBinaryTree | def makeRandomBinaryTree(leafNodeNumber=None):
"""Creates a random binary tree.
"""
while True:
nodeNo = [-1]
def fn():
nodeNo[0] += 1
if random.random() > 0.6:
i = str(nodeNo[0])
return BinaryTree(0.00001 + random.random()*0.8, True, fn(), fn(), i)
else:
return BinaryTree(0.00001 + random.random()*0.8, False, None, None, str(nodeNo[0]))
tree = fn()
def fn2(tree):
if tree.internal:
return fn2(tree.left) + fn2(tree.right)
return 1
if leafNodeNumber is None or fn2(tree) == leafNodeNumber:
return tree | python | def makeRandomBinaryTree(leafNodeNumber=None):
"""Creates a random binary tree.
"""
while True:
nodeNo = [-1]
def fn():
nodeNo[0] += 1
if random.random() > 0.6:
i = str(nodeNo[0])
return BinaryTree(0.00001 + random.random()*0.8, True, fn(), fn(), i)
else:
return BinaryTree(0.00001 + random.random()*0.8, False, None, None, str(nodeNo[0]))
tree = fn()
def fn2(tree):
if tree.internal:
return fn2(tree.left) + fn2(tree.right)
return 1
if leafNodeNumber is None or fn2(tree) == leafNodeNumber:
return tree | [
"def",
"makeRandomBinaryTree",
"(",
"leafNodeNumber",
"=",
"None",
")",
":",
"while",
"True",
":",
"nodeNo",
"=",
"[",
"-",
"1",
"]",
"def",
"fn",
"(",
")",
":",
"nodeNo",
"[",
"0",
"]",
"+=",
"1",
"if",
"random",
".",
"random",
"(",
")",
">",
"0... | Creates a random binary tree. | [
"Creates",
"a",
"random",
"binary",
"tree",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L123-L141 | train |
benedictpaten/sonLib | tree.py | getRandomBinaryTreeLeafNode | def getRandomBinaryTreeLeafNode(binaryTree):
"""Get random binary tree node.
"""
if binaryTree.internal == True:
if random.random() > 0.5:
return getRandomBinaryTreeLeafNode(binaryTree.left)
else:
return getRandomBinaryTreeLeafNode(binaryTree.right)
else:
return binaryTree | python | def getRandomBinaryTreeLeafNode(binaryTree):
"""Get random binary tree node.
"""
if binaryTree.internal == True:
if random.random() > 0.5:
return getRandomBinaryTreeLeafNode(binaryTree.left)
else:
return getRandomBinaryTreeLeafNode(binaryTree.right)
else:
return binaryTree | [
"def",
"getRandomBinaryTreeLeafNode",
"(",
"binaryTree",
")",
":",
"if",
"binaryTree",
".",
"internal",
"==",
"True",
":",
"if",
"random",
".",
"random",
"(",
")",
">",
"0.5",
":",
"return",
"getRandomBinaryTreeLeafNode",
"(",
"binaryTree",
".",
"left",
")",
... | Get random binary tree node. | [
"Get",
"random",
"binary",
"tree",
"node",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L143-L152 | train |
benedictpaten/sonLib | tree.py | transformByDistance | def transformByDistance(wV, subModel, alphabetSize=4):
"""
transform wV by given substitution matrix
"""
nc = [0.0]*alphabetSize
for i in xrange(0, alphabetSize):
j = wV[i]
k = subModel[i]
for l in xrange(0, alphabetSize):
nc[l] += j * k[l]
return nc | python | def transformByDistance(wV, subModel, alphabetSize=4):
"""
transform wV by given substitution matrix
"""
nc = [0.0]*alphabetSize
for i in xrange(0, alphabetSize):
j = wV[i]
k = subModel[i]
for l in xrange(0, alphabetSize):
nc[l] += j * k[l]
return nc | [
"def",
"transformByDistance",
"(",
"wV",
",",
"subModel",
",",
"alphabetSize",
"=",
"4",
")",
":",
"nc",
"=",
"[",
"0.0",
"]",
"*",
"alphabetSize",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"alphabetSize",
")",
":",
"j",
"=",
"wV",
"[",
"i",
"]",
... | transform wV by given substitution matrix | [
"transform",
"wV",
"by",
"given",
"substitution",
"matrix"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L162-L172 | train |
benedictpaten/sonLib | tree.py | normaliseWV | def normaliseWV(wV, normFac=1.0):
"""
make char probs divisible by one
"""
f = sum(wV) / normFac
return [ i/f for i in wV ] | python | def normaliseWV(wV, normFac=1.0):
"""
make char probs divisible by one
"""
f = sum(wV) / normFac
return [ i/f for i in wV ] | [
"def",
"normaliseWV",
"(",
"wV",
",",
"normFac",
"=",
"1.0",
")",
":",
"f",
"=",
"sum",
"(",
"wV",
")",
"/",
"normFac",
"return",
"[",
"i",
"/",
"f",
"for",
"i",
"in",
"wV",
"]"
] | make char probs divisible by one | [
"make",
"char",
"probs",
"divisible",
"by",
"one"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L180-L185 | train |
benedictpaten/sonLib | tree.py | felsensteins | def felsensteins(binaryTree, subMatrices, ancestorProbs, leaves, alphabetSize):
"""
calculates the un-normalised probabilties of each non-gap residue position
"""
l = {}
def upPass(binaryTree):
if binaryTree.internal: #is internal binaryTree
i = branchUp(binaryTree.left)
j = branchUp(binaryTree.right)
k = multiplyWV(i, j, alphabetSize)
l[binaryTree.traversalID.mid] = (k, i, j)
return k
l[binaryTree.traversalID.mid] = leaves[binaryTree.traversalID.leafNo]
return leaves[binaryTree.traversalID.leafNo]
def downPass(binaryTree, ancestorProbs):
if binaryTree.internal: #is internal binaryTree
i = l[binaryTree.traversalID.mid]
l[binaryTree.traversalID.mid] = multiplyWV(ancestorProbs, i[0], alphabetSize)
branchDown(binaryTree.left, multiplyWV(ancestorProbs, i[2], alphabetSize))
branchDown(binaryTree.right, multiplyWV(ancestorProbs, i[1], alphabetSize))
def branchUp(binaryTree):
return transformByDistance(upPass(binaryTree), subMatrices[binaryTree.traversalID.mid], alphabetSize)
def branchDown(binaryTree, ancestorProbs):
downPass(binaryTree, transformByDistance(ancestorProbs, subMatrices[binaryTree.traversalID.mid], alphabetSize))
upPass(binaryTree)
downPass(binaryTree, ancestorProbs)
return l | python | def felsensteins(binaryTree, subMatrices, ancestorProbs, leaves, alphabetSize):
"""
calculates the un-normalised probabilties of each non-gap residue position
"""
l = {}
def upPass(binaryTree):
if binaryTree.internal: #is internal binaryTree
i = branchUp(binaryTree.left)
j = branchUp(binaryTree.right)
k = multiplyWV(i, j, alphabetSize)
l[binaryTree.traversalID.mid] = (k, i, j)
return k
l[binaryTree.traversalID.mid] = leaves[binaryTree.traversalID.leafNo]
return leaves[binaryTree.traversalID.leafNo]
def downPass(binaryTree, ancestorProbs):
if binaryTree.internal: #is internal binaryTree
i = l[binaryTree.traversalID.mid]
l[binaryTree.traversalID.mid] = multiplyWV(ancestorProbs, i[0], alphabetSize)
branchDown(binaryTree.left, multiplyWV(ancestorProbs, i[2], alphabetSize))
branchDown(binaryTree.right, multiplyWV(ancestorProbs, i[1], alphabetSize))
def branchUp(binaryTree):
return transformByDistance(upPass(binaryTree), subMatrices[binaryTree.traversalID.mid], alphabetSize)
def branchDown(binaryTree, ancestorProbs):
downPass(binaryTree, transformByDistance(ancestorProbs, subMatrices[binaryTree.traversalID.mid], alphabetSize))
upPass(binaryTree)
downPass(binaryTree, ancestorProbs)
return l | [
"def",
"felsensteins",
"(",
"binaryTree",
",",
"subMatrices",
",",
"ancestorProbs",
",",
"leaves",
",",
"alphabetSize",
")",
":",
"l",
"=",
"{",
"}",
"def",
"upPass",
"(",
"binaryTree",
")",
":",
"if",
"binaryTree",
".",
"internal",
":",
"i",
"=",
"branc... | calculates the un-normalised probabilties of each non-gap residue position | [
"calculates",
"the",
"un",
"-",
"normalised",
"probabilties",
"of",
"each",
"non",
"-",
"gap",
"residue",
"position"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L194-L220 | train |
benedictpaten/sonLib | tree.py | annotateTree | def annotateTree(bT, fn):
"""
annotate a tree in an external array using the given function
"""
l = [None]*bT.traversalID.midEnd
def fn2(bT):
l[bT.traversalID.mid] = fn(bT)
if bT.internal:
fn2(bT.left)
fn2(bT.right)
fn2(bT)
return l | python | def annotateTree(bT, fn):
"""
annotate a tree in an external array using the given function
"""
l = [None]*bT.traversalID.midEnd
def fn2(bT):
l[bT.traversalID.mid] = fn(bT)
if bT.internal:
fn2(bT.left)
fn2(bT.right)
fn2(bT)
return l | [
"def",
"annotateTree",
"(",
"bT",
",",
"fn",
")",
":",
"l",
"=",
"[",
"None",
"]",
"*",
"bT",
".",
"traversalID",
".",
"midEnd",
"def",
"fn2",
"(",
"bT",
")",
":",
"l",
"[",
"bT",
".",
"traversalID",
".",
"mid",
"]",
"=",
"fn",
"(",
"bT",
")"... | annotate a tree in an external array using the given function | [
"annotate",
"a",
"tree",
"in",
"an",
"external",
"array",
"using",
"the",
"given",
"function"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L543-L554 | train |
benedictpaten/sonLib | tree.py | remodelTreeRemovingRoot | def remodelTreeRemovingRoot(root, node):
"""
Node is mid order number
"""
import bioio
assert root.traversalID.mid != node
hash = {}
def fn(bT):
if bT.traversalID.mid == node:
assert bT.internal == False
return [ bT ]
elif bT.internal:
i = fn(bT.left)
if i is None:
i = fn(bT.right)
if i is not None:
hash[i[-1]]= bT
i.append(bT)
return i
return None
l = fn(root)
def fn2(i, j):
if i.left == j:
return i.right
assert i.right == j
return i.left
def fn3(bT):
if hash[bT] == root:
s = '(' + bioio.printBinaryTree(fn2(hash[bT], bT), bT, True)[:-1] + ')'
else:
s = '(' + bioio.printBinaryTree(fn2(hash[bT], bT), bT, True)[:-1] + ',' + fn3(hash[bT]) + ')'
return s + ":" + str(bT.distance)
s = fn3(l[0]) + ';'
t = bioio.newickTreeParser(s)
return t | python | def remodelTreeRemovingRoot(root, node):
"""
Node is mid order number
"""
import bioio
assert root.traversalID.mid != node
hash = {}
def fn(bT):
if bT.traversalID.mid == node:
assert bT.internal == False
return [ bT ]
elif bT.internal:
i = fn(bT.left)
if i is None:
i = fn(bT.right)
if i is not None:
hash[i[-1]]= bT
i.append(bT)
return i
return None
l = fn(root)
def fn2(i, j):
if i.left == j:
return i.right
assert i.right == j
return i.left
def fn3(bT):
if hash[bT] == root:
s = '(' + bioio.printBinaryTree(fn2(hash[bT], bT), bT, True)[:-1] + ')'
else:
s = '(' + bioio.printBinaryTree(fn2(hash[bT], bT), bT, True)[:-1] + ',' + fn3(hash[bT]) + ')'
return s + ":" + str(bT.distance)
s = fn3(l[0]) + ';'
t = bioio.newickTreeParser(s)
return t | [
"def",
"remodelTreeRemovingRoot",
"(",
"root",
",",
"node",
")",
":",
"import",
"bioio",
"assert",
"root",
".",
"traversalID",
".",
"mid",
"!=",
"node",
"hash",
"=",
"{",
"}",
"def",
"fn",
"(",
"bT",
")",
":",
"if",
"bT",
".",
"traversalID",
".",
"mi... | Node is mid order number | [
"Node",
"is",
"mid",
"order",
"number"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L595-L629 | train |
benedictpaten/sonLib | tree.py | moveRoot | def moveRoot(root, branch):
"""
Removes the old root and places the new root at the mid point along the given branch
"""
import bioio
if root.traversalID.mid == branch:
return bioio.newickTreeParser(bioio.printBinaryTree(root, True))
def fn2(tree, seq):
if seq is not None:
return '(' + bioio.printBinaryTree(tree, True)[:-1] + ',' + seq + ')'
return bioio.printBinaryTree(tree, True)[:-1]
def fn(tree, seq):
if tree.traversalID.mid == branch:
i = tree.distance
tree.distance /= 2
seq = '(' + bioio.printBinaryTree(tree, True)[:-1] + ',(' + seq + ('):%s' % tree.distance) + ');'
tree.distance = i
return seq
if tree.internal:
if branch < tree.traversalID.mid:
seq = fn2(tree.right, seq)
return fn(tree.left, seq)
else:
assert branch > tree.traversalID.mid
seq = fn2(tree.left, seq)
return fn(tree.right, seq)
else:
return bioio.printBinaryTree(tree, True)[:-1]
s = fn(root, None)
return bioio.newickTreeParser(s) | python | def moveRoot(root, branch):
"""
Removes the old root and places the new root at the mid point along the given branch
"""
import bioio
if root.traversalID.mid == branch:
return bioio.newickTreeParser(bioio.printBinaryTree(root, True))
def fn2(tree, seq):
if seq is not None:
return '(' + bioio.printBinaryTree(tree, True)[:-1] + ',' + seq + ')'
return bioio.printBinaryTree(tree, True)[:-1]
def fn(tree, seq):
if tree.traversalID.mid == branch:
i = tree.distance
tree.distance /= 2
seq = '(' + bioio.printBinaryTree(tree, True)[:-1] + ',(' + seq + ('):%s' % tree.distance) + ');'
tree.distance = i
return seq
if tree.internal:
if branch < tree.traversalID.mid:
seq = fn2(tree.right, seq)
return fn(tree.left, seq)
else:
assert branch > tree.traversalID.mid
seq = fn2(tree.left, seq)
return fn(tree.right, seq)
else:
return bioio.printBinaryTree(tree, True)[:-1]
s = fn(root, None)
return bioio.newickTreeParser(s) | [
"def",
"moveRoot",
"(",
"root",
",",
"branch",
")",
":",
"import",
"bioio",
"if",
"root",
".",
"traversalID",
".",
"mid",
"==",
"branch",
":",
"return",
"bioio",
".",
"newickTreeParser",
"(",
"bioio",
".",
"printBinaryTree",
"(",
"root",
",",
"True",
")"... | Removes the old root and places the new root at the mid point along the given branch | [
"Removes",
"the",
"old",
"root",
"and",
"places",
"the",
"new",
"root",
"at",
"the",
"mid",
"point",
"along",
"the",
"given",
"branch"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L631-L660 | train |
benedictpaten/sonLib | tree.py | checkGeneTreeMatchesSpeciesTree | def checkGeneTreeMatchesSpeciesTree(speciesTree, geneTree, processID):
"""
Function to check ids in gene tree all match nodes in species tree
"""
def fn(tree, l):
if tree.internal:
fn(tree.left, l)
fn(tree.right, l)
else:
l.append(processID(tree.iD))
l = []
fn(speciesTree, l)
l2 = []
fn(geneTree, l2)
for i in l2:
#print "node", i, l
assert i in l | python | def checkGeneTreeMatchesSpeciesTree(speciesTree, geneTree, processID):
"""
Function to check ids in gene tree all match nodes in species tree
"""
def fn(tree, l):
if tree.internal:
fn(tree.left, l)
fn(tree.right, l)
else:
l.append(processID(tree.iD))
l = []
fn(speciesTree, l)
l2 = []
fn(geneTree, l2)
for i in l2:
#print "node", i, l
assert i in l | [
"def",
"checkGeneTreeMatchesSpeciesTree",
"(",
"speciesTree",
",",
"geneTree",
",",
"processID",
")",
":",
"def",
"fn",
"(",
"tree",
",",
"l",
")",
":",
"if",
"tree",
".",
"internal",
":",
"fn",
"(",
"tree",
".",
"left",
",",
"l",
")",
"fn",
"(",
"tr... | Function to check ids in gene tree all match nodes in species tree | [
"Function",
"to",
"check",
"ids",
"in",
"gene",
"tree",
"all",
"match",
"nodes",
"in",
"species",
"tree"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L662-L678 | train |
benedictpaten/sonLib | tree.py | calculateProbableRootOfGeneTree | def calculateProbableRootOfGeneTree(speciesTree, geneTree, processID=lambda x : x):
"""
Goes through each root possible branch making it the root.
Returns tree that requires the minimum number of duplications.
"""
#get all rooted trees
#run dup calc on each tree
#return tree with fewest number of dups
if geneTree.traversalID.midEnd <= 3:
return (0, 0, geneTree)
checkGeneTreeMatchesSpeciesTree(speciesTree, geneTree, processID)
l = []
def fn(tree):
if tree.traversalID.mid != geneTree.left.traversalID.mid and tree.traversalID.mid != geneTree.right.traversalID.mid:
newGeneTree = moveRoot(geneTree, tree.traversalID.mid)
binaryTree_depthFirstNumbers(newGeneTree)
dupCount, lossCount = calculateDupsAndLossesByReconcilingTrees(speciesTree, newGeneTree, processID)
l.append((dupCount, lossCount, newGeneTree))
if tree.internal:
fn(tree.left)
fn(tree.right)
fn(geneTree)
l.sort()
return l[0][2], l[0][0], l[0][1] | python | def calculateProbableRootOfGeneTree(speciesTree, geneTree, processID=lambda x : x):
"""
Goes through each root possible branch making it the root.
Returns tree that requires the minimum number of duplications.
"""
#get all rooted trees
#run dup calc on each tree
#return tree with fewest number of dups
if geneTree.traversalID.midEnd <= 3:
return (0, 0, geneTree)
checkGeneTreeMatchesSpeciesTree(speciesTree, geneTree, processID)
l = []
def fn(tree):
if tree.traversalID.mid != geneTree.left.traversalID.mid and tree.traversalID.mid != geneTree.right.traversalID.mid:
newGeneTree = moveRoot(geneTree, tree.traversalID.mid)
binaryTree_depthFirstNumbers(newGeneTree)
dupCount, lossCount = calculateDupsAndLossesByReconcilingTrees(speciesTree, newGeneTree, processID)
l.append((dupCount, lossCount, newGeneTree))
if tree.internal:
fn(tree.left)
fn(tree.right)
fn(geneTree)
l.sort()
return l[0][2], l[0][0], l[0][1] | [
"def",
"calculateProbableRootOfGeneTree",
"(",
"speciesTree",
",",
"geneTree",
",",
"processID",
"=",
"lambda",
"x",
":",
"x",
")",
":",
"if",
"geneTree",
".",
"traversalID",
".",
"midEnd",
"<=",
"3",
":",
"return",
"(",
"0",
",",
"0",
",",
"geneTree",
"... | Goes through each root possible branch making it the root.
Returns tree that requires the minimum number of duplications. | [
"Goes",
"through",
"each",
"root",
"possible",
"branch",
"making",
"it",
"the",
"root",
".",
"Returns",
"tree",
"that",
"requires",
"the",
"minimum",
"number",
"of",
"duplications",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/tree.py#L753-L776 | train |
benedictpaten/sonLib | bioio.py | redirectLoggerStreamHandlers | def redirectLoggerStreamHandlers(oldStream, newStream):
"""Redirect the stream of a stream handler to a different stream
"""
for handler in list(logger.handlers): #Remove old handlers
if handler.stream == oldStream:
handler.close()
logger.removeHandler(handler)
for handler in logger.handlers: #Do not add a duplicate handler
if handler.stream == newStream:
return
logger.addHandler(logging.StreamHandler(newStream)) | python | def redirectLoggerStreamHandlers(oldStream, newStream):
"""Redirect the stream of a stream handler to a different stream
"""
for handler in list(logger.handlers): #Remove old handlers
if handler.stream == oldStream:
handler.close()
logger.removeHandler(handler)
for handler in logger.handlers: #Do not add a duplicate handler
if handler.stream == newStream:
return
logger.addHandler(logging.StreamHandler(newStream)) | [
"def",
"redirectLoggerStreamHandlers",
"(",
"oldStream",
",",
"newStream",
")",
":",
"for",
"handler",
"in",
"list",
"(",
"logger",
".",
"handlers",
")",
":",
"if",
"handler",
".",
"stream",
"==",
"oldStream",
":",
"handler",
".",
"close",
"(",
")",
"logge... | Redirect the stream of a stream handler to a different stream | [
"Redirect",
"the",
"stream",
"of",
"a",
"stream",
"handler",
"to",
"a",
"different",
"stream"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L52-L62 | train |
benedictpaten/sonLib | bioio.py | popen | def popen(command, tempFile):
"""Runs a command and captures standard out in the given temp file.
"""
fileHandle = open(tempFile, 'w')
logger.debug("Running the command: %s" % command)
sts = subprocess.call(command, shell=True, stdout=fileHandle, bufsize=-1)
fileHandle.close()
if sts != 0:
raise RuntimeError("Command: %s exited with non-zero status %i" % (command, sts))
return sts | python | def popen(command, tempFile):
"""Runs a command and captures standard out in the given temp file.
"""
fileHandle = open(tempFile, 'w')
logger.debug("Running the command: %s" % command)
sts = subprocess.call(command, shell=True, stdout=fileHandle, bufsize=-1)
fileHandle.close()
if sts != 0:
raise RuntimeError("Command: %s exited with non-zero status %i" % (command, sts))
return sts | [
"def",
"popen",
"(",
"command",
",",
"tempFile",
")",
":",
"fileHandle",
"=",
"open",
"(",
"tempFile",
",",
"'w'",
")",
"logger",
".",
"debug",
"(",
"\"Running the command: %s\"",
"%",
"command",
")",
"sts",
"=",
"subprocess",
".",
"call",
"(",
"command",
... | Runs a command and captures standard out in the given temp file. | [
"Runs",
"a",
"command",
"and",
"captures",
"standard",
"out",
"in",
"the",
"given",
"temp",
"file",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L188-L197 | train |
benedictpaten/sonLib | bioio.py | popenCatch | def popenCatch(command, stdinString=None):
"""Runs a command and return standard out.
"""
logger.debug("Running the command: %s" % command)
if stdinString != None:
process = subprocess.Popen(command, shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=-1)
output, nothing = process.communicate(stdinString)
else:
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=sys.stderr, bufsize=-1)
output, nothing = process.communicate() #process.stdout.read().strip()
sts = process.wait()
if sts != 0:
raise RuntimeError("Command: %s with stdin string '%s' exited with non-zero status %i" % (command, stdinString, sts))
return output | python | def popenCatch(command, stdinString=None):
"""Runs a command and return standard out.
"""
logger.debug("Running the command: %s" % command)
if stdinString != None:
process = subprocess.Popen(command, shell=True,
stdin=subprocess.PIPE, stdout=subprocess.PIPE, bufsize=-1)
output, nothing = process.communicate(stdinString)
else:
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=sys.stderr, bufsize=-1)
output, nothing = process.communicate() #process.stdout.read().strip()
sts = process.wait()
if sts != 0:
raise RuntimeError("Command: %s with stdin string '%s' exited with non-zero status %i" % (command, stdinString, sts))
return output | [
"def",
"popenCatch",
"(",
"command",
",",
"stdinString",
"=",
"None",
")",
":",
"logger",
".",
"debug",
"(",
"\"Running the command: %s\"",
"%",
"command",
")",
"if",
"stdinString",
"!=",
"None",
":",
"process",
"=",
"subprocess",
".",
"Popen",
"(",
"command... | Runs a command and return standard out. | [
"Runs",
"a",
"command",
"and",
"return",
"standard",
"out",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L199-L213 | train |
benedictpaten/sonLib | bioio.py | getTotalCpuTimeAndMemoryUsage | def getTotalCpuTimeAndMemoryUsage():
"""Gives the total cpu time and memory usage of itself and its children.
"""
me = resource.getrusage(resource.RUSAGE_SELF)
childs = resource.getrusage(resource.RUSAGE_CHILDREN)
totalCpuTime = me.ru_utime+me.ru_stime+childs.ru_utime+childs.ru_stime
totalMemoryUsage = me.ru_maxrss+ me.ru_maxrss
return totalCpuTime, totalMemoryUsage | python | def getTotalCpuTimeAndMemoryUsage():
"""Gives the total cpu time and memory usage of itself and its children.
"""
me = resource.getrusage(resource.RUSAGE_SELF)
childs = resource.getrusage(resource.RUSAGE_CHILDREN)
totalCpuTime = me.ru_utime+me.ru_stime+childs.ru_utime+childs.ru_stime
totalMemoryUsage = me.ru_maxrss+ me.ru_maxrss
return totalCpuTime, totalMemoryUsage | [
"def",
"getTotalCpuTimeAndMemoryUsage",
"(",
")",
":",
"me",
"=",
"resource",
".",
"getrusage",
"(",
"resource",
".",
"RUSAGE_SELF",
")",
"childs",
"=",
"resource",
".",
"getrusage",
"(",
"resource",
".",
"RUSAGE_CHILDREN",
")",
"totalCpuTime",
"=",
"me",
".",... | Gives the total cpu time and memory usage of itself and its children. | [
"Gives",
"the",
"total",
"cpu",
"time",
"and",
"memory",
"usage",
"of",
"itself",
"and",
"its",
"children",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L231-L238 | train |
benedictpaten/sonLib | bioio.py | saveInputs | def saveInputs(savedInputsDir, listOfFilesAndDirsToSave):
"""Copies the list of files to a directory created in the save inputs dir,
and returns the name of this directory.
"""
logger.info("Saving the inputs: %s to the directory: %s" % (" ".join(listOfFilesAndDirsToSave), savedInputsDir))
assert os.path.isdir(savedInputsDir)
#savedInputsDir = getTempDirectory(saveInputsDir)
createdFiles = []
for fileName in listOfFilesAndDirsToSave:
if os.path.isfile(fileName):
copiedFileName = os.path.join(savedInputsDir, os.path.split(fileName)[-1])
system("cp %s %s" % (fileName, copiedFileName))
else:
copiedFileName = os.path.join(savedInputsDir, os.path.split(fileName)[-1]) + ".tar"
system("tar -cf %s %s" % (copiedFileName, fileName))
createdFiles.append(copiedFileName)
return createdFiles | python | def saveInputs(savedInputsDir, listOfFilesAndDirsToSave):
"""Copies the list of files to a directory created in the save inputs dir,
and returns the name of this directory.
"""
logger.info("Saving the inputs: %s to the directory: %s" % (" ".join(listOfFilesAndDirsToSave), savedInputsDir))
assert os.path.isdir(savedInputsDir)
#savedInputsDir = getTempDirectory(saveInputsDir)
createdFiles = []
for fileName in listOfFilesAndDirsToSave:
if os.path.isfile(fileName):
copiedFileName = os.path.join(savedInputsDir, os.path.split(fileName)[-1])
system("cp %s %s" % (fileName, copiedFileName))
else:
copiedFileName = os.path.join(savedInputsDir, os.path.split(fileName)[-1]) + ".tar"
system("tar -cf %s %s" % (copiedFileName, fileName))
createdFiles.append(copiedFileName)
return createdFiles | [
"def",
"saveInputs",
"(",
"savedInputsDir",
",",
"listOfFilesAndDirsToSave",
")",
":",
"logger",
".",
"info",
"(",
"\"Saving the inputs: %s to the directory: %s\"",
"%",
"(",
"\" \"",
".",
"join",
"(",
"listOfFilesAndDirsToSave",
")",
",",
"savedInputsDir",
")",
")",
... | Copies the list of files to a directory created in the save inputs dir,
and returns the name of this directory. | [
"Copies",
"the",
"list",
"of",
"files",
"to",
"a",
"directory",
"created",
"in",
"the",
"save",
"inputs",
"dir",
"and",
"returns",
"the",
"name",
"of",
"this",
"directory",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L318-L334 | train |
benedictpaten/sonLib | bioio.py | nameValue | def nameValue(name, value, valueType=str, quotes=False):
"""Little function to make it easier to make name value strings for commands.
"""
if valueType == bool:
if value:
return "--%s" % name
return ""
if value is None:
return ""
if quotes:
return "--%s '%s'" % (name, valueType(value))
return "--%s %s" % (name, valueType(value)) | python | def nameValue(name, value, valueType=str, quotes=False):
"""Little function to make it easier to make name value strings for commands.
"""
if valueType == bool:
if value:
return "--%s" % name
return ""
if value is None:
return ""
if quotes:
return "--%s '%s'" % (name, valueType(value))
return "--%s %s" % (name, valueType(value)) | [
"def",
"nameValue",
"(",
"name",
",",
"value",
",",
"valueType",
"=",
"str",
",",
"quotes",
"=",
"False",
")",
":",
"if",
"valueType",
"==",
"bool",
":",
"if",
"value",
":",
"return",
"\"--%s\"",
"%",
"name",
"return",
"\"\"",
"if",
"value",
"is",
"N... | Little function to make it easier to make name value strings for commands. | [
"Little",
"function",
"to",
"make",
"it",
"easier",
"to",
"make",
"name",
"value",
"strings",
"for",
"commands",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L400-L411 | train |
benedictpaten/sonLib | bioio.py | makeSubDir | def makeSubDir(dirName):
"""Makes a given subdirectory if it doesn't already exist, making sure it us public.
"""
if not os.path.exists(dirName):
os.mkdir(dirName)
os.chmod(dirName, 0777)
return dirName | python | def makeSubDir(dirName):
"""Makes a given subdirectory if it doesn't already exist, making sure it us public.
"""
if not os.path.exists(dirName):
os.mkdir(dirName)
os.chmod(dirName, 0777)
return dirName | [
"def",
"makeSubDir",
"(",
"dirName",
")",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"dirName",
")",
":",
"os",
".",
"mkdir",
"(",
"dirName",
")",
"os",
".",
"chmod",
"(",
"dirName",
",",
"0777",
")",
"return",
"dirName"
] | Makes a given subdirectory if it doesn't already exist, making sure it us public. | [
"Makes",
"a",
"given",
"subdirectory",
"if",
"it",
"doesn",
"t",
"already",
"exist",
"making",
"sure",
"it",
"us",
"public",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L426-L432 | train |
benedictpaten/sonLib | bioio.py | getTempFile | def getTempFile(suffix="", rootDir=None):
"""Returns a string representing a temporary file, that must be manually deleted
"""
if rootDir is None:
handle, tmpFile = tempfile.mkstemp(suffix)
os.close(handle)
return tmpFile
else:
tmpFile = os.path.join(rootDir, "tmp_" + getRandomAlphaNumericString() + suffix)
open(tmpFile, 'w').close()
os.chmod(tmpFile, 0777) #Ensure everyone has access to the file.
return tmpFile | python | def getTempFile(suffix="", rootDir=None):
"""Returns a string representing a temporary file, that must be manually deleted
"""
if rootDir is None:
handle, tmpFile = tempfile.mkstemp(suffix)
os.close(handle)
return tmpFile
else:
tmpFile = os.path.join(rootDir, "tmp_" + getRandomAlphaNumericString() + suffix)
open(tmpFile, 'w').close()
os.chmod(tmpFile, 0777) #Ensure everyone has access to the file.
return tmpFile | [
"def",
"getTempFile",
"(",
"suffix",
"=",
"\"\"",
",",
"rootDir",
"=",
"None",
")",
":",
"if",
"rootDir",
"is",
"None",
":",
"handle",
",",
"tmpFile",
"=",
"tempfile",
".",
"mkstemp",
"(",
"suffix",
")",
"os",
".",
"close",
"(",
"handle",
")",
"retur... | Returns a string representing a temporary file, that must be manually deleted | [
"Returns",
"a",
"string",
"representing",
"a",
"temporary",
"file",
"that",
"must",
"be",
"manually",
"deleted"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L434-L445 | train |
benedictpaten/sonLib | bioio.py | getTempDirectory | def getTempDirectory(rootDir=None):
"""
returns a temporary directory that must be manually deleted. rootDir will be
created if it does not exist.
"""
if rootDir is None:
return tempfile.mkdtemp()
else:
if not os.path.exists(rootDir):
try:
os.makedirs(rootDir)
except OSError:
# Maybe it got created between the test and the makedirs call?
pass
while True:
# Keep trying names until we find one that doesn't exist. If one
# does exist, don't nest inside it, because someone else may be
# using it for something.
tmpDir = os.path.join(rootDir, "tmp_" + getRandomAlphaNumericString())
if not os.path.exists(tmpDir):
break
os.mkdir(tmpDir)
os.chmod(tmpDir, 0777) #Ensure everyone has access to the file.
return tmpDir | python | def getTempDirectory(rootDir=None):
"""
returns a temporary directory that must be manually deleted. rootDir will be
created if it does not exist.
"""
if rootDir is None:
return tempfile.mkdtemp()
else:
if not os.path.exists(rootDir):
try:
os.makedirs(rootDir)
except OSError:
# Maybe it got created between the test and the makedirs call?
pass
while True:
# Keep trying names until we find one that doesn't exist. If one
# does exist, don't nest inside it, because someone else may be
# using it for something.
tmpDir = os.path.join(rootDir, "tmp_" + getRandomAlphaNumericString())
if not os.path.exists(tmpDir):
break
os.mkdir(tmpDir)
os.chmod(tmpDir, 0777) #Ensure everyone has access to the file.
return tmpDir | [
"def",
"getTempDirectory",
"(",
"rootDir",
"=",
"None",
")",
":",
"if",
"rootDir",
"is",
"None",
":",
"return",
"tempfile",
".",
"mkdtemp",
"(",
")",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"rootDir",
")",
":",
"try",
":",
... | returns a temporary directory that must be manually deleted. rootDir will be
created if it does not exist. | [
"returns",
"a",
"temporary",
"directory",
"that",
"must",
"be",
"manually",
"deleted",
".",
"rootDir",
"will",
"be",
"created",
"if",
"it",
"does",
"not",
"exist",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L447-L472 | train |
benedictpaten/sonLib | bioio.py | catFiles | def catFiles(filesToCat, catFile):
"""Cats a bunch of files into one file. Ensures a no more than maxCat files
are concatenated at each step.
"""
if len(filesToCat) == 0: #We must handle this case or the cat call will hang waiting for input
open(catFile, 'w').close()
return
maxCat = 25
system("cat %s > %s" % (" ".join(filesToCat[:maxCat]), catFile))
filesToCat = filesToCat[maxCat:]
while len(filesToCat) > 0:
system("cat %s >> %s" % (" ".join(filesToCat[:maxCat]), catFile))
filesToCat = filesToCat[maxCat:] | python | def catFiles(filesToCat, catFile):
"""Cats a bunch of files into one file. Ensures a no more than maxCat files
are concatenated at each step.
"""
if len(filesToCat) == 0: #We must handle this case or the cat call will hang waiting for input
open(catFile, 'w').close()
return
maxCat = 25
system("cat %s > %s" % (" ".join(filesToCat[:maxCat]), catFile))
filesToCat = filesToCat[maxCat:]
while len(filesToCat) > 0:
system("cat %s >> %s" % (" ".join(filesToCat[:maxCat]), catFile))
filesToCat = filesToCat[maxCat:] | [
"def",
"catFiles",
"(",
"filesToCat",
",",
"catFile",
")",
":",
"if",
"len",
"(",
"filesToCat",
")",
"==",
"0",
":",
"open",
"(",
"catFile",
",",
"'w'",
")",
".",
"close",
"(",
")",
"return",
"maxCat",
"=",
"25",
"system",
"(",
"\"cat %s > %s\"",
"%"... | Cats a bunch of files into one file. Ensures a no more than maxCat files
are concatenated at each step. | [
"Cats",
"a",
"bunch",
"of",
"files",
"into",
"one",
"file",
".",
"Ensures",
"a",
"no",
"more",
"than",
"maxCat",
"files",
"are",
"concatenated",
"at",
"each",
"step",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L651-L663 | train |
benedictpaten/sonLib | bioio.py | prettyXml | def prettyXml(elem):
""" Return a pretty-printed XML string for the ElementTree Element.
"""
roughString = ET.tostring(elem, "utf-8")
reparsed = minidom.parseString(roughString)
return reparsed.toprettyxml(indent=" ") | python | def prettyXml(elem):
""" Return a pretty-printed XML string for the ElementTree Element.
"""
roughString = ET.tostring(elem, "utf-8")
reparsed = minidom.parseString(roughString)
return reparsed.toprettyxml(indent=" ") | [
"def",
"prettyXml",
"(",
"elem",
")",
":",
"roughString",
"=",
"ET",
".",
"tostring",
"(",
"elem",
",",
"\"utf-8\"",
")",
"reparsed",
"=",
"minidom",
".",
"parseString",
"(",
"roughString",
")",
"return",
"reparsed",
".",
"toprettyxml",
"(",
"indent",
"=",... | Return a pretty-printed XML string for the ElementTree Element. | [
"Return",
"a",
"pretty",
"-",
"printed",
"XML",
"string",
"for",
"the",
"ElementTree",
"Element",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L665-L670 | train |
benedictpaten/sonLib | bioio.py | fastaEncodeHeader | def fastaEncodeHeader(attributes):
"""Decodes the fasta header
"""
for i in attributes:
assert len(str(i).split()) == 1
return "|".join([ str(i) for i in attributes ]) | python | def fastaEncodeHeader(attributes):
"""Decodes the fasta header
"""
for i in attributes:
assert len(str(i).split()) == 1
return "|".join([ str(i) for i in attributes ]) | [
"def",
"fastaEncodeHeader",
"(",
"attributes",
")",
":",
"for",
"i",
"in",
"attributes",
":",
"assert",
"len",
"(",
"str",
"(",
"i",
")",
".",
"split",
"(",
")",
")",
"==",
"1",
"return",
"\"|\"",
".",
"join",
"(",
"[",
"str",
"(",
"i",
")",
"for... | Decodes the fasta header | [
"Decodes",
"the",
"fasta",
"header"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L700-L705 | train |
benedictpaten/sonLib | bioio.py | fastaWrite | def fastaWrite(fileHandleOrFile, name, seq, mode="w"):
"""Writes out fasta file
"""
fileHandle = _getFileHandle(fileHandleOrFile, mode)
valid_chars = {x for x in string.ascii_letters + "-"}
try:
assert any([isinstance(seq, unicode), isinstance(seq, str)])
except AssertionError:
raise RuntimeError("Sequence is not unicode or string")
try:
assert all(x in valid_chars for x in seq)
except AssertionError:
bad_chars = {x for x in seq if x not in valid_chars}
raise RuntimeError("Invalid FASTA character(s) see in fasta sequence: {}".format(bad_chars))
fileHandle.write(">%s\n" % name)
chunkSize = 100
for i in xrange(0, len(seq), chunkSize):
fileHandle.write("%s\n" % seq[i:i+chunkSize])
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | python | def fastaWrite(fileHandleOrFile, name, seq, mode="w"):
"""Writes out fasta file
"""
fileHandle = _getFileHandle(fileHandleOrFile, mode)
valid_chars = {x for x in string.ascii_letters + "-"}
try:
assert any([isinstance(seq, unicode), isinstance(seq, str)])
except AssertionError:
raise RuntimeError("Sequence is not unicode or string")
try:
assert all(x in valid_chars for x in seq)
except AssertionError:
bad_chars = {x for x in seq if x not in valid_chars}
raise RuntimeError("Invalid FASTA character(s) see in fasta sequence: {}".format(bad_chars))
fileHandle.write(">%s\n" % name)
chunkSize = 100
for i in xrange(0, len(seq), chunkSize):
fileHandle.write("%s\n" % seq[i:i+chunkSize])
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | [
"def",
"fastaWrite",
"(",
"fileHandleOrFile",
",",
"name",
",",
"seq",
",",
"mode",
"=",
"\"w\"",
")",
":",
"fileHandle",
"=",
"_getFileHandle",
"(",
"fileHandleOrFile",
",",
"mode",
")",
"valid_chars",
"=",
"{",
"x",
"for",
"x",
"in",
"string",
".",
"as... | Writes out fasta file | [
"Writes",
"out",
"fasta",
"file"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L741-L760 | train |
benedictpaten/sonLib | bioio.py | fastqRead | def fastqRead(fileHandleOrFile):
"""Reads a fastq file iteratively
"""
fileHandle = _getFileHandle(fileHandleOrFile)
line = fileHandle.readline()
while line != '':
if line[0] == '@':
name = line[1:-1]
seq = fileHandle.readline()[:-1]
plus = fileHandle.readline()
if plus[0] != '+':
raise RuntimeError("Got unexpected line: %s" % plus)
qualValues = [ ord(i) for i in fileHandle.readline()[:-1] ]
if len(seq) != len(qualValues):
logger.critical("Got a mismatch between the number of sequence characters (%s) and number of qual values (%s) for sequence: %s, ignoring returning None" % (len(seq), len(qualValues), name))
qualValues = None
else:
for i in qualValues:
if i < 33 or i > 126:
raise RuntimeError("Got a qual value out of range %s (range is 33 to 126)" % i)
for i in seq:
#For safety and sanity I only allows roman alphabet characters in fasta sequences.
if not ((i >= 'A' and i <= 'Z') or (i >= 'a' and i <= 'z') or i == '-'):
raise RuntimeError("Invalid FASTQ character, ASCII code = \'%d\', found in input sequence %s" % (ord(i), name))
yield name, seq, qualValues
line = fileHandle.readline()
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | python | def fastqRead(fileHandleOrFile):
"""Reads a fastq file iteratively
"""
fileHandle = _getFileHandle(fileHandleOrFile)
line = fileHandle.readline()
while line != '':
if line[0] == '@':
name = line[1:-1]
seq = fileHandle.readline()[:-1]
plus = fileHandle.readline()
if plus[0] != '+':
raise RuntimeError("Got unexpected line: %s" % plus)
qualValues = [ ord(i) for i in fileHandle.readline()[:-1] ]
if len(seq) != len(qualValues):
logger.critical("Got a mismatch between the number of sequence characters (%s) and number of qual values (%s) for sequence: %s, ignoring returning None" % (len(seq), len(qualValues), name))
qualValues = None
else:
for i in qualValues:
if i < 33 or i > 126:
raise RuntimeError("Got a qual value out of range %s (range is 33 to 126)" % i)
for i in seq:
#For safety and sanity I only allows roman alphabet characters in fasta sequences.
if not ((i >= 'A' and i <= 'Z') or (i >= 'a' and i <= 'z') or i == '-'):
raise RuntimeError("Invalid FASTQ character, ASCII code = \'%d\', found in input sequence %s" % (ord(i), name))
yield name, seq, qualValues
line = fileHandle.readline()
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | [
"def",
"fastqRead",
"(",
"fileHandleOrFile",
")",
":",
"fileHandle",
"=",
"_getFileHandle",
"(",
"fileHandleOrFile",
")",
"line",
"=",
"fileHandle",
".",
"readline",
"(",
")",
"while",
"line",
"!=",
"''",
":",
"if",
"line",
"[",
"0",
"]",
"==",
"'@'",
":... | Reads a fastq file iteratively | [
"Reads",
"a",
"fastq",
"file",
"iteratively"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L762-L789 | train |
benedictpaten/sonLib | bioio.py | _getMultiFastaOffsets | def _getMultiFastaOffsets(fasta):
"""Reads in columns of multiple alignment and returns them iteratively
"""
f = open(fasta, 'r')
i = 0
j = f.read(1)
l = []
while j != '':
i += 1
if j == '>':
i += 1
while f.read(1) != '\n':
i += 1
l.append(i)
j = f.read(1)
f.close()
return l | python | def _getMultiFastaOffsets(fasta):
"""Reads in columns of multiple alignment and returns them iteratively
"""
f = open(fasta, 'r')
i = 0
j = f.read(1)
l = []
while j != '':
i += 1
if j == '>':
i += 1
while f.read(1) != '\n':
i += 1
l.append(i)
j = f.read(1)
f.close()
return l | [
"def",
"_getMultiFastaOffsets",
"(",
"fasta",
")",
":",
"f",
"=",
"open",
"(",
"fasta",
",",
"'r'",
")",
"i",
"=",
"0",
"j",
"=",
"f",
".",
"read",
"(",
"1",
")",
"l",
"=",
"[",
"]",
"while",
"j",
"!=",
"''",
":",
"i",
"+=",
"1",
"if",
"j",... | Reads in columns of multiple alignment and returns them iteratively | [
"Reads",
"in",
"columns",
"of",
"multiple",
"alignment",
"and",
"returns",
"them",
"iteratively"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L811-L827 | train |
benedictpaten/sonLib | bioio.py | fastaReadHeaders | def fastaReadHeaders(fasta):
"""Returns a list of fasta header lines, excluding
"""
headers = []
fileHandle = open(fasta, 'r')
line = fileHandle.readline()
while line != '':
assert line[-1] == '\n'
if line[0] == '>':
headers.append(line[1:-1])
line = fileHandle.readline()
fileHandle.close()
return headers | python | def fastaReadHeaders(fasta):
"""Returns a list of fasta header lines, excluding
"""
headers = []
fileHandle = open(fasta, 'r')
line = fileHandle.readline()
while line != '':
assert line[-1] == '\n'
if line[0] == '>':
headers.append(line[1:-1])
line = fileHandle.readline()
fileHandle.close()
return headers | [
"def",
"fastaReadHeaders",
"(",
"fasta",
")",
":",
"headers",
"=",
"[",
"]",
"fileHandle",
"=",
"open",
"(",
"fasta",
",",
"'r'",
")",
"line",
"=",
"fileHandle",
".",
"readline",
"(",
")",
"while",
"line",
"!=",
"''",
":",
"assert",
"line",
"[",
"-",... | Returns a list of fasta header lines, excluding | [
"Returns",
"a",
"list",
"of",
"fasta",
"header",
"lines",
"excluding"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L829-L841 | train |
benedictpaten/sonLib | bioio.py | fastaAlignmentRead | def fastaAlignmentRead(fasta, mapFn=(lambda x : x), l=None):
"""
reads in columns of multiple alignment and returns them iteratively
"""
if l is None:
l = _getMultiFastaOffsets(fasta)
else:
l = l[:]
seqNo = len(l)
for i in xrange(0, seqNo):
j = open(fasta, 'r')
j.seek(l[i])
l[i] = j
column = [sys.maxint]*seqNo
if seqNo != 0:
while True:
for j in xrange(0, seqNo):
i = l[j].read(1)
while i == '\n':
i = l[j].read(1)
column[j] = i
if column[0] == '>' or column[0] == '':
for j in xrange(1, seqNo):
assert column[j] == '>' or column[j] == ''
break
for j in xrange(1, seqNo):
assert column[j] != '>' and column[j] != ''
column[j] = mapFn(column[j])
yield column[:]
for i in l:
i.close() | python | def fastaAlignmentRead(fasta, mapFn=(lambda x : x), l=None):
"""
reads in columns of multiple alignment and returns them iteratively
"""
if l is None:
l = _getMultiFastaOffsets(fasta)
else:
l = l[:]
seqNo = len(l)
for i in xrange(0, seqNo):
j = open(fasta, 'r')
j.seek(l[i])
l[i] = j
column = [sys.maxint]*seqNo
if seqNo != 0:
while True:
for j in xrange(0, seqNo):
i = l[j].read(1)
while i == '\n':
i = l[j].read(1)
column[j] = i
if column[0] == '>' or column[0] == '':
for j in xrange(1, seqNo):
assert column[j] == '>' or column[j] == ''
break
for j in xrange(1, seqNo):
assert column[j] != '>' and column[j] != ''
column[j] = mapFn(column[j])
yield column[:]
for i in l:
i.close() | [
"def",
"fastaAlignmentRead",
"(",
"fasta",
",",
"mapFn",
"=",
"(",
"lambda",
"x",
":",
"x",
")",
",",
"l",
"=",
"None",
")",
":",
"if",
"l",
"is",
"None",
":",
"l",
"=",
"_getMultiFastaOffsets",
"(",
"fasta",
")",
"else",
":",
"l",
"=",
"l",
"[",... | reads in columns of multiple alignment and returns them iteratively | [
"reads",
"in",
"columns",
"of",
"multiple",
"alignment",
"and",
"returns",
"them",
"iteratively"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L843-L873 | train |
benedictpaten/sonLib | bioio.py | fastaAlignmentWrite | def fastaAlignmentWrite(columnAlignment, names, seqNo, fastaFile,
filter=lambda x : True):
"""
Writes out column alignment to given file multi-fasta format
"""
fastaFile = open(fastaFile, 'w')
columnAlignment = [ i for i in columnAlignment if filter(i) ]
for seq in xrange(0, seqNo):
fastaFile.write(">%s\n" % names[seq])
for column in columnAlignment:
fastaFile.write(column[seq])
fastaFile.write("\n")
fastaFile.close() | python | def fastaAlignmentWrite(columnAlignment, names, seqNo, fastaFile,
filter=lambda x : True):
"""
Writes out column alignment to given file multi-fasta format
"""
fastaFile = open(fastaFile, 'w')
columnAlignment = [ i for i in columnAlignment if filter(i) ]
for seq in xrange(0, seqNo):
fastaFile.write(">%s\n" % names[seq])
for column in columnAlignment:
fastaFile.write(column[seq])
fastaFile.write("\n")
fastaFile.close() | [
"def",
"fastaAlignmentWrite",
"(",
"columnAlignment",
",",
"names",
",",
"seqNo",
",",
"fastaFile",
",",
"filter",
"=",
"lambda",
"x",
":",
"True",
")",
":",
"fastaFile",
"=",
"open",
"(",
"fastaFile",
",",
"'w'",
")",
"columnAlignment",
"=",
"[",
"i",
"... | Writes out column alignment to given file multi-fasta format | [
"Writes",
"out",
"column",
"alignment",
"to",
"given",
"file",
"multi",
"-",
"fasta",
"format"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L875-L887 | train |
benedictpaten/sonLib | bioio.py | getRandomSequence | def getRandomSequence(length=500):
"""Generates a random name and sequence.
"""
fastaHeader = ""
for i in xrange(int(random.random()*100)):
fastaHeader = fastaHeader + random.choice([ 'A', 'C', '0', '9', ' ', '\t' ])
return (fastaHeader, \
"".join([ random.choice([ 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'N' ]) for i in xrange((int)(random.random() * length))])) | python | def getRandomSequence(length=500):
"""Generates a random name and sequence.
"""
fastaHeader = ""
for i in xrange(int(random.random()*100)):
fastaHeader = fastaHeader + random.choice([ 'A', 'C', '0', '9', ' ', '\t' ])
return (fastaHeader, \
"".join([ random.choice([ 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'A', 'C', 'T', 'G', 'N' ]) for i in xrange((int)(random.random() * length))])) | [
"def",
"getRandomSequence",
"(",
"length",
"=",
"500",
")",
":",
"fastaHeader",
"=",
"\"\"",
"for",
"i",
"in",
"xrange",
"(",
"int",
"(",
"random",
".",
"random",
"(",
")",
"*",
"100",
")",
")",
":",
"fastaHeader",
"=",
"fastaHeader",
"+",
"random",
... | Generates a random name and sequence. | [
"Generates",
"a",
"random",
"name",
"and",
"sequence",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L889-L896 | train |
benedictpaten/sonLib | bioio.py | mutateSequence | def mutateSequence(seq, distance):
"""Mutates the DNA sequence for use in testing.
"""
subProb=distance
inProb=0.05*distance
deProb=0.05*distance
contProb=0.9
l = []
bases = [ 'A', 'C', 'T', 'G' ]
i=0
while i < len(seq):
if random.random() < subProb:
l.append(random.choice(bases))
else:
l.append(seq[i])
if random.random() < inProb:
l += getRandomSequence(_expLength(0, contProb))[1]
if random.random() < deProb:
i += int(_expLength(0, contProb))
i += 1
return "".join(l) | python | def mutateSequence(seq, distance):
"""Mutates the DNA sequence for use in testing.
"""
subProb=distance
inProb=0.05*distance
deProb=0.05*distance
contProb=0.9
l = []
bases = [ 'A', 'C', 'T', 'G' ]
i=0
while i < len(seq):
if random.random() < subProb:
l.append(random.choice(bases))
else:
l.append(seq[i])
if random.random() < inProb:
l += getRandomSequence(_expLength(0, contProb))[1]
if random.random() < deProb:
i += int(_expLength(0, contProb))
i += 1
return "".join(l) | [
"def",
"mutateSequence",
"(",
"seq",
",",
"distance",
")",
":",
"subProb",
"=",
"distance",
"inProb",
"=",
"0.05",
"*",
"distance",
"deProb",
"=",
"0.05",
"*",
"distance",
"contProb",
"=",
"0.9",
"l",
"=",
"[",
"]",
"bases",
"=",
"[",
"'A'",
",",
"'C... | Mutates the DNA sequence for use in testing. | [
"Mutates",
"the",
"DNA",
"sequence",
"for",
"use",
"in",
"testing",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L903-L923 | train |
benedictpaten/sonLib | bioio.py | newickTreeParser | def newickTreeParser(newickTree, defaultDistance=DEFAULT_DISTANCE, \
sortNonBinaryNodes=False, reportUnaryNodes=False):
"""
lax newick tree parser
"""
newickTree = newickTree.replace("(", " ( ")
newickTree = newickTree.replace(")", " ) ")
newickTree = newickTree.replace(":", " : ")
newickTree = newickTree.replace(";", "")
newickTree = newickTree.replace(",", " , ")
newickTree = re.compile("[\s]*").split(newickTree)
while "" in newickTree:
newickTree.remove("")
def fn(newickTree, i):
if i[0] < len(newickTree):
if newickTree[i[0]] == ':':
d = float(newickTree[i[0]+1])
i[0] += 2
return d
return defaultDistance
def fn2(newickTree, i):
if i[0] < len(newickTree):
j = newickTree[i[0]]
if j != ':' and j != ')' and j != ',':
i[0] += 1
return j
return None
def fn3(newickTree, i):
if newickTree[i[0]] == '(':
#subTree1 = None
subTreeList = []
i[0] += 1
k = []
while newickTree[i[0]] != ')':
if newickTree[i[0]] == ',':
i[0] += 1
subTreeList.append(fn3(newickTree, i))
i[0] += 1
def cmp(i, j):
if i.distance < j.distance:
return -1
if i.distance > j.distance:
return 1
return 0
if sortNonBinaryNodes:
subTreeList.sort(cmp)
subTree1 = subTreeList[0]
if len(subTreeList) > 1:
for subTree2 in subTreeList[1:]:
subTree1 = BinaryTree(0.0, True, subTree1, subTree2, None)
subTree1.iD = fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
elif reportUnaryNodes:
subTree1 = BinaryTree(0.0, True, subTree1, None, None)
subTree1.iD = fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
else:
fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
return subTree1
leafID = fn2(newickTree, i)
return BinaryTree(fn(newickTree, i), False, None, None, leafID)
return fn3(newickTree, [0]) | python | def newickTreeParser(newickTree, defaultDistance=DEFAULT_DISTANCE, \
sortNonBinaryNodes=False, reportUnaryNodes=False):
"""
lax newick tree parser
"""
newickTree = newickTree.replace("(", " ( ")
newickTree = newickTree.replace(")", " ) ")
newickTree = newickTree.replace(":", " : ")
newickTree = newickTree.replace(";", "")
newickTree = newickTree.replace(",", " , ")
newickTree = re.compile("[\s]*").split(newickTree)
while "" in newickTree:
newickTree.remove("")
def fn(newickTree, i):
if i[0] < len(newickTree):
if newickTree[i[0]] == ':':
d = float(newickTree[i[0]+1])
i[0] += 2
return d
return defaultDistance
def fn2(newickTree, i):
if i[0] < len(newickTree):
j = newickTree[i[0]]
if j != ':' and j != ')' and j != ',':
i[0] += 1
return j
return None
def fn3(newickTree, i):
if newickTree[i[0]] == '(':
#subTree1 = None
subTreeList = []
i[0] += 1
k = []
while newickTree[i[0]] != ')':
if newickTree[i[0]] == ',':
i[0] += 1
subTreeList.append(fn3(newickTree, i))
i[0] += 1
def cmp(i, j):
if i.distance < j.distance:
return -1
if i.distance > j.distance:
return 1
return 0
if sortNonBinaryNodes:
subTreeList.sort(cmp)
subTree1 = subTreeList[0]
if len(subTreeList) > 1:
for subTree2 in subTreeList[1:]:
subTree1 = BinaryTree(0.0, True, subTree1, subTree2, None)
subTree1.iD = fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
elif reportUnaryNodes:
subTree1 = BinaryTree(0.0, True, subTree1, None, None)
subTree1.iD = fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
else:
fn2(newickTree, i)
subTree1.distance += fn(newickTree, i)
return subTree1
leafID = fn2(newickTree, i)
return BinaryTree(fn(newickTree, i), False, None, None, leafID)
return fn3(newickTree, [0]) | [
"def",
"newickTreeParser",
"(",
"newickTree",
",",
"defaultDistance",
"=",
"DEFAULT_DISTANCE",
",",
"sortNonBinaryNodes",
"=",
"False",
",",
"reportUnaryNodes",
"=",
"False",
")",
":",
"newickTree",
"=",
"newickTree",
".",
"replace",
"(",
"\"(\"",
",",
"\" ( \"",
... | lax newick tree parser | [
"lax",
"newick",
"tree",
"parser"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L944-L1007 | train |
benedictpaten/sonLib | bioio.py | pWMRead | def pWMRead(fileHandle, alphabetSize=4):
"""reads in standard position weight matrix format,
rows are different types of base, columns are individual residues
"""
lines = fileHandle.readlines()
assert len(lines) == alphabetSize
l = [ [ float(i) ] for i in lines[0].split() ]
for line in lines[1:]:
l2 = [ float(i) for i in line.split() ]
assert len(l) == len(l2)
for i in xrange(0, len(l)):
l[i].append(l2[i])
for i in xrange(0, len(l)):
j = sum(l[i]) + 0.0
l[i] = [ k/j for k in l[i] ]
return l | python | def pWMRead(fileHandle, alphabetSize=4):
"""reads in standard position weight matrix format,
rows are different types of base, columns are individual residues
"""
lines = fileHandle.readlines()
assert len(lines) == alphabetSize
l = [ [ float(i) ] for i in lines[0].split() ]
for line in lines[1:]:
l2 = [ float(i) for i in line.split() ]
assert len(l) == len(l2)
for i in xrange(0, len(l)):
l[i].append(l2[i])
for i in xrange(0, len(l)):
j = sum(l[i]) + 0.0
l[i] = [ k/j for k in l[i] ]
return l | [
"def",
"pWMRead",
"(",
"fileHandle",
",",
"alphabetSize",
"=",
"4",
")",
":",
"lines",
"=",
"fileHandle",
".",
"readlines",
"(",
")",
"assert",
"len",
"(",
"lines",
")",
"==",
"alphabetSize",
"l",
"=",
"[",
"[",
"float",
"(",
"i",
")",
"]",
"for",
... | reads in standard position weight matrix format,
rows are different types of base, columns are individual residues | [
"reads",
"in",
"standard",
"position",
"weight",
"matrix",
"format",
"rows",
"are",
"different",
"types",
"of",
"base",
"columns",
"are",
"individual",
"residues"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1036-L1051 | train |
benedictpaten/sonLib | bioio.py | pWMWrite | def pWMWrite(fileHandle, pWM, alphabetSize=4):
"""Writes file in standard PWM format, is reverse of pWMParser
"""
for i in xrange(0, alphabetSize):
fileHandle.write("%s\n" % ' '.join([ str(pWM[j][i]) for j in xrange(0, len(pWM)) ])) | python | def pWMWrite(fileHandle, pWM, alphabetSize=4):
"""Writes file in standard PWM format, is reverse of pWMParser
"""
for i in xrange(0, alphabetSize):
fileHandle.write("%s\n" % ' '.join([ str(pWM[j][i]) for j in xrange(0, len(pWM)) ])) | [
"def",
"pWMWrite",
"(",
"fileHandle",
",",
"pWM",
",",
"alphabetSize",
"=",
"4",
")",
":",
"for",
"i",
"in",
"xrange",
"(",
"0",
",",
"alphabetSize",
")",
":",
"fileHandle",
".",
"write",
"(",
"\"%s\\n\"",
"%",
"' '",
".",
"join",
"(",
"[",
"str",
... | Writes file in standard PWM format, is reverse of pWMParser | [
"Writes",
"file",
"in",
"standard",
"PWM",
"format",
"is",
"reverse",
"of",
"pWMParser"
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1053-L1057 | train |
benedictpaten/sonLib | bioio.py | cigarRead | def cigarRead(fileHandleOrFile):
"""Reads a list of pairwise alignments into a pairwise alignment structure.
Query and target are reversed!
"""
fileHandle = _getFileHandle(fileHandleOrFile)
#p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+(.*)\\s*)*")
p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+([^\\s]+)(\\s+(.*)\\s*)*")
line = fileHandle.readline()
while line != '':
pA = cigarReadFromString(line)
if pA != None:
yield pA
line = fileHandle.readline()
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | python | def cigarRead(fileHandleOrFile):
"""Reads a list of pairwise alignments into a pairwise alignment structure.
Query and target are reversed!
"""
fileHandle = _getFileHandle(fileHandleOrFile)
#p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+(.*)\\s*)*")
p = re.compile("cigar:\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+(.+)\\s+([0-9]+)\\s+([0-9]+)\\s+([\\+\\-\\.])\\s+([^\\s]+)(\\s+(.*)\\s*)*")
line = fileHandle.readline()
while line != '':
pA = cigarReadFromString(line)
if pA != None:
yield pA
line = fileHandle.readline()
if isinstance(fileHandleOrFile, "".__class__):
fileHandle.close() | [
"def",
"cigarRead",
"(",
"fileHandleOrFile",
")",
":",
"fileHandle",
"=",
"_getFileHandle",
"(",
"fileHandleOrFile",
")",
"p",
"=",
"re",
".",
"compile",
"(",
"\"cigar:\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\\+\\\\-\\\\.])\\\\s+(.+)\\\\s+([0-9]+)\\\\s+([0-9]+)\\\\s+([\\\... | Reads a list of pairwise alignments into a pairwise alignment structure.
Query and target are reversed! | [
"Reads",
"a",
"list",
"of",
"pairwise",
"alignments",
"into",
"a",
"pairwise",
"alignment",
"structure",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1184-L1199 | train |
benedictpaten/sonLib | bioio.py | cigarWrite | def cigarWrite(fileHandle, pairwiseAlignment, withProbs=True):
"""Writes out the pairwiseAlignment to the file stream.
Query and target are reversed from normal order.
"""
if len(pairwiseAlignment.operationList) == 0:
logger.info("Writing zero length pairwiseAlignment to file!")
strand1 = "+"
if not pairwiseAlignment.strand1:
strand1 = "-"
strand2 = "+"
if not pairwiseAlignment.strand2:
strand2 = "-"
fileHandle.write("cigar: %s %i %i %s %s %i %i %s %f" % (pairwiseAlignment.contig2, pairwiseAlignment.start2, pairwiseAlignment.end2, strand2,\
pairwiseAlignment.contig1, pairwiseAlignment.start1, pairwiseAlignment.end1, strand1,\
pairwiseAlignment.score))
if withProbs == True:
hashMap = { PairwiseAlignment.PAIRWISE_INDEL_Y:'Z',PairwiseAlignment.PAIRWISE_INDEL_X:'Y', PairwiseAlignment.PAIRWISE_MATCH:'X' }
for op in pairwiseAlignment.operationList:
fileHandle.write(' %s %i %f' % (hashMap[op.type], op.length, op.score))
else:
hashMap = { PairwiseAlignment.PAIRWISE_INDEL_Y:'I',PairwiseAlignment.PAIRWISE_INDEL_X:'D', PairwiseAlignment.PAIRWISE_MATCH:'M' }
for op in pairwiseAlignment.operationList:
fileHandle.write(' %s %i' % (hashMap[op.type], op.length))
fileHandle.write("\n") | python | def cigarWrite(fileHandle, pairwiseAlignment, withProbs=True):
"""Writes out the pairwiseAlignment to the file stream.
Query and target are reversed from normal order.
"""
if len(pairwiseAlignment.operationList) == 0:
logger.info("Writing zero length pairwiseAlignment to file!")
strand1 = "+"
if not pairwiseAlignment.strand1:
strand1 = "-"
strand2 = "+"
if not pairwiseAlignment.strand2:
strand2 = "-"
fileHandle.write("cigar: %s %i %i %s %s %i %i %s %f" % (pairwiseAlignment.contig2, pairwiseAlignment.start2, pairwiseAlignment.end2, strand2,\
pairwiseAlignment.contig1, pairwiseAlignment.start1, pairwiseAlignment.end1, strand1,\
pairwiseAlignment.score))
if withProbs == True:
hashMap = { PairwiseAlignment.PAIRWISE_INDEL_Y:'Z',PairwiseAlignment.PAIRWISE_INDEL_X:'Y', PairwiseAlignment.PAIRWISE_MATCH:'X' }
for op in pairwiseAlignment.operationList:
fileHandle.write(' %s %i %f' % (hashMap[op.type], op.length, op.score))
else:
hashMap = { PairwiseAlignment.PAIRWISE_INDEL_Y:'I',PairwiseAlignment.PAIRWISE_INDEL_X:'D', PairwiseAlignment.PAIRWISE_MATCH:'M' }
for op in pairwiseAlignment.operationList:
fileHandle.write(' %s %i' % (hashMap[op.type], op.length))
fileHandle.write("\n") | [
"def",
"cigarWrite",
"(",
"fileHandle",
",",
"pairwiseAlignment",
",",
"withProbs",
"=",
"True",
")",
":",
"if",
"len",
"(",
"pairwiseAlignment",
".",
"operationList",
")",
"==",
"0",
":",
"logger",
".",
"info",
"(",
"\"Writing zero length pairwiseAlignment to fil... | Writes out the pairwiseAlignment to the file stream.
Query and target are reversed from normal order. | [
"Writes",
"out",
"the",
"pairwiseAlignment",
"to",
"the",
"file",
"stream",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1201-L1228 | train |
benedictpaten/sonLib | bioio.py | getRandomPairwiseAlignment | def getRandomPairwiseAlignment():
"""Gets a random pairwiseAlignment.
"""
i, j, k, l = _getRandomSegment()
m, n, o, p = _getRandomSegment()
score = random.choice(xrange(-1000, 1000))
return PairwiseAlignment(i, j, k, l, m, n, o, p, score, getRandomOperationList(abs(k - j), abs(o - n))) | python | def getRandomPairwiseAlignment():
"""Gets a random pairwiseAlignment.
"""
i, j, k, l = _getRandomSegment()
m, n, o, p = _getRandomSegment()
score = random.choice(xrange(-1000, 1000))
return PairwiseAlignment(i, j, k, l, m, n, o, p, score, getRandomOperationList(abs(k - j), abs(o - n))) | [
"def",
"getRandomPairwiseAlignment",
"(",
")",
":",
"i",
",",
"j",
",",
"k",
",",
"l",
"=",
"_getRandomSegment",
"(",
")",
"m",
",",
"n",
",",
"o",
",",
"p",
"=",
"_getRandomSegment",
"(",
")",
"score",
"=",
"random",
".",
"choice",
"(",
"xrange",
... | Gets a random pairwiseAlignment. | [
"Gets",
"a",
"random",
"pairwiseAlignment",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1260-L1266 | train |
benedictpaten/sonLib | bioio.py | addEdgeToGraph | def addEdgeToGraph(parentNodeName, childNodeName, graphFileHandle, colour="black", length="10", weight="1", dir="none", label="", style=""):
"""Links two nodes in the graph together.
"""
graphFileHandle.write('edge[color=%s,len=%s,weight=%s,dir=%s,label="%s",style=%s];\n' % (colour, length, weight, dir, label, style))
graphFileHandle.write("%s -- %s;\n" % (parentNodeName, childNodeName)) | python | def addEdgeToGraph(parentNodeName, childNodeName, graphFileHandle, colour="black", length="10", weight="1", dir="none", label="", style=""):
"""Links two nodes in the graph together.
"""
graphFileHandle.write('edge[color=%s,len=%s,weight=%s,dir=%s,label="%s",style=%s];\n' % (colour, length, weight, dir, label, style))
graphFileHandle.write("%s -- %s;\n" % (parentNodeName, childNodeName)) | [
"def",
"addEdgeToGraph",
"(",
"parentNodeName",
",",
"childNodeName",
",",
"graphFileHandle",
",",
"colour",
"=",
"\"black\"",
",",
"length",
"=",
"\"10\"",
",",
"weight",
"=",
"\"1\"",
",",
"dir",
"=",
"\"none\"",
",",
"label",
"=",
"\"\"",
",",
"style",
... | Links two nodes in the graph together. | [
"Links",
"two",
"nodes",
"in",
"the",
"graph",
"together",
"."
] | 1decb75bb439b70721ec776f685ce98e25217d26 | https://github.com/benedictpaten/sonLib/blob/1decb75bb439b70721ec776f685ce98e25217d26/bioio.py#L1282-L1286 | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.