desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Collates the join inputs prior to delegating to the superclass.'
def _run_command(self, execute, copyfiles=True):
self._collate_join_field_inputs() return super(JoinNode, self)._run_command(execute, copyfiles)
'Collects each override join item field into the interface join field input.'
def _collate_join_field_inputs(self):
for field in self.inputs.copyable_trait_names(): if (field in self.joinfield): val = self._collate_input_value(field) try: setattr(self._interface.inputs, field, val) except Exception as e: raise ValueError((u'>>JN %s %s %s %s ...
'Collects the join item field values into a list or set value for the given field, as follows: - If the field trait is a Set, then the values are collected into a set. - Otherwise, the values are collected into a list which preserves the iterables order. If the ``unique`` flag is set, then duplicate values are removed ...
def _collate_input_value(self, field):
val = [self._slot_value(field, idx) for idx in range(self._next_slot_index)] basetrait = self._interface.inputs.trait(field) if isinstance(basetrait.trait_type, traits.Set): return set(val) elif self._unique: return list(OrderedDict.fromkeys(val)) else: return val
'Parameters interface : interface object node specific interface (fsl.Bet(), spm.Coregister()) iterfield : string or list of strings name(s) of input fields that will receive a list of whatever kind of input they take. the node will be run separately for each value in these lists. for more than one input, the values ar...
def __init__(self, interface, iterfield, name, serial=False, nested=False, **kwargs):
super(MapNode, self).__init__(interface, name, **kwargs) if isinstance(iterfield, (str, bytes)): iterfield = [iterfield] self.iterfield = iterfield self.nested = nested self._inputs = self._create_dynamic_traits(self._interface.inputs, fields=self.iterfield) self._inputs.on_trait_change(...
'Convert specific fields of a trait to accept multiple inputs'
def _create_dynamic_traits(self, basetraits, fields=None, nitems=None):
output = DynamicTraitedSpec() if (fields is None): fields = basetraits.copyable_trait_names() for (name, spec) in list(basetraits.items()): if ((name in fields) and ((nitems is None) or (nitems > 1))): logger.debug(u'adding multipath trait: %s', name) if self...
'Set interface input value or nodewrapper attribute Priority goes to interface.'
def set_input(self, parameter, val):
logger.debug(u'setting nodelevel(%s) input %s = %s', to_str(self), parameter, to_str(val)) self._set_mapnode_input(self.inputs, parameter, deepcopy(val))
'Compute hash including iterfield lists.'
def _get_hashval(self):
if (not self._got_inputs): self._get_inputs() self._got_inputs = True self._check_iterfield() hashinputs = deepcopy(self._interface.inputs) for name in self.iterfield: hashinputs.remove_trait(name) hashinputs.add_trait(name, InputMultiPath(self._interface.inputs.traits()[...
'Checks iterfield * iterfield must be in inputs * number of elements must match across iterfield'
def _check_iterfield(self):
for iterfield in self.iterfield: if (not isdefined(getattr(self.inputs, iterfield))): raise ValueError((u'Input %s was not set but it is listed in iterfields.' % iterfield)) if (len(self.iterfield) > 1): first_len = len(filename_to_list(getattr(self.inpu...
'Run the mapnode interface This is primarily intended for serial execution of mapnode. A parallel execution requires creation of new nodes that can be spawned'
def _run_interface(self, execute=True, updatehash=False):
old_cwd = os.getcwd() cwd = self.output_dir() os.chdir(cwd) self._check_iterfield() if execute: if self.nested: nitems = len(filename_to_list(flatten(getattr(self.inputs, self.iterfield[0])))) else: nitems = len(filename_to_list(getattr(self.inputs, self.iterf...
'Create a workflow object. Parameters name : alphanumeric string unique identifier for the workflow base_dir : string, optional path to workflow storage'
def __init__(self, name, base_dir=None):
super(Workflow, self).__init__(name, base_dir) self._graph = nx.DiGraph() self.config = deepcopy(config._sections)
'Clone a workflow .. note:: Will reset attributes used for executing workflow. See _init_runtime_fields. Parameters name: alphanumeric name unique name for the workflow'
def clone(self, name):
clone = super(Workflow, self).clone(name) clone._reset_hierarchy() return clone
'Connect nodes in the pipeline. This routine also checks if inputs and outputs are actually provided by the nodes that are being connected. Creates edges in the directed graph using the nodes and edges specified in the `connection_list`. Uses the NetworkX method DiGraph.add_edges_from. Parameters args : list or a set ...
def connect(self, *args, **kwargs):
if (len(args) == 1): connection_list = args[0] elif (len(args) == 4): connection_list = [(args[0], args[2], [(args[1], args[3])])] else: raise TypeError((u'connect() takes either 4 arguments, or 1 list of connection tuples (%d args given)' % len...
'Disconnect nodes See the docstring for connect for format.'
def disconnect(self, *args):
if (len(args) == 1): connection_list = args[0] elif (len(args) == 4): connection_list = [(args[0], args[2], [(args[1], args[3])])] else: raise TypeError((u'disconnect() takes either 4 arguments, or 1 list of connection tuples (%d args given)' % ...
'Add nodes to a workflow Parameters nodes : list A list of EngineBase-based objects'
def add_nodes(self, nodes):
newnodes = [] all_nodes = self._get_all_nodes() for node in nodes: if self._has_node(node): raise IOError((u'Node %s already exists in the workflow' % node)) if isinstance(node, Workflow): for subnode in node._get_all_nodes(): if (sub...
'Remove nodes from a workflow Parameters nodes : list A list of EngineBase-based objects'
def remove_nodes(self, nodes):
self._graph.remove_nodes_from(nodes)
'Return an internal node by name'
def get_node(self, name):
nodenames = name.split(u'.') nodename = nodenames[0] outnode = [node for node in self._graph.nodes() if str(node).endswith((u'.' + nodename))] if outnode: outnode = outnode[0] if (nodenames[1:] and issubclass(outnode.__class__, Workflow)): outnode = outnode.get_node(u'.'.join...
'List names of all nodes in a workflow'
def list_node_names(self):
outlist = [] for node in nx.topological_sort(self._graph): if isinstance(node, Workflow): outlist.extend([u'.'.join((node.name, nodename)) for nodename in node.list_node_names()]) else: outlist.append(node.name) return sorted(outlist)
'Generates a graphviz dot file and a png file Parameters graph2use: \'orig\', \'hierarchical\' (default), \'flat\', \'exec\', \'colored\' orig - creates a top level graph without expanding internal workflow nodes; flat - expands workflow nodes recursively; hierarchical - expands workflow nodes recursively with a notion...
def write_graph(self, dotfilename=u'graph.dot', graph2use=u'hierarchical', format=u'png', simple_form=True):
graphtypes = [u'orig', u'flat', u'hierarchical', u'exec', u'colored'] if (graph2use not in graphtypes): raise ValueError((u'Unknown graph2use keyword. Must be one of: ' + str(graphtypes))) (base_dir, dotfilename) = op.split(dotfilename) if (base_dir == u''): if self....
'Export object into a different format Parameters filename: string file to save the code to; overrides prefix prefix: string prefix to use for output file format: string one of "python" include_config: boolean whether to include node and workflow config values'
def export(self, filename=None, prefix=u'output', format=u'python', include_config=False):
formats = [u'python'] if (format not in formats): raise ValueError((u'format must be one of: %s' % u'|'.join(formats))) flatgraph = self._create_flat_graph() nodes = nx.topological_sort(flatgraph) lines = [u'# Workflow'] importlines = [u'from nipype.pipeline.engine ...
'Execute the workflow Parameters plugin: plugin name or object Plugin to use for execution. You can create your own plugins for execution. plugin_args : dictionary containing arguments to be sent to plugin constructor. see individual plugin doc strings for details.'
def run(self, plugin=None, plugin_args=None, updatehash=False):
if (plugin is None): plugin = config.get(u'execution', u'plugin') if (not isinstance(plugin, (str, bytes))): runner = plugin else: name = u'.'.join((__name__.split(u'.')[:(-2)] + [u'plugins'])) try: __import__(name) except ImportError: msg = (u...
'Initialize node with list of which outputs are needed.'
def _set_needed_outputs(self, graph):
rm_outputs = self.config[u'execution'][u'remove_unnecessary_outputs'] if (not str2bool(rm_outputs)): return for node in graph.nodes(): node.needed_outputs = [] for edge in graph.out_edges_iter(node): data = graph.get_edge_data(*edge) sourceinfo = [(v1[0] if is...
'Ensure that each node knows where to get inputs from'
def _configure_exec_nodes(self, graph):
for node in graph.nodes(): node.input_source = {} for edge in graph.in_edges_iter(node): data = graph.get_edge_data(*edge) for (sourceinfo, field) in data[u'connect']: node.input_source[field] = (op.join(edge[0].output_dir(), (u'result_%s.pklz' % edge[0].name)...
'Checks if any of the nodes are already in the graph'
def _check_nodes(self, nodes):
node_names = [node.name for node in self._graph.nodes()] node_lineage = [node._hierarchy for node in self._graph.nodes()] for node in nodes: if (node.name in node_names): idx = node_names.index(node.name) if (node_lineage[idx] in [node._hierarchy, self.name]): ...
'Checks if a parameter is available as an input or output'
def _has_attr(self, parameter, subtype=u'in'):
if (subtype == u'in'): subobject = self.inputs else: subobject = self.outputs attrlist = parameter.split(u'.') cur_out = subobject for attr in attrlist: if (not hasattr(cur_out, attr)): return False cur_out = getattr(cur_out, attr) return True
'Returns the underlying node corresponding to an input or output parameter'
def _get_parameter_node(self, parameter, subtype=u'in'):
if (subtype == u'in'): subobject = self.inputs else: subobject = self.outputs attrlist = parameter.split(u'.') cur_out = subobject for attr in attrlist[:(-1)]: cur_out = getattr(cur_out, attr) return cur_out.traits()[attrlist[(-1)]].node
'Returns the inputs of a workflow This function does not return any input ports that are already connected'
def _get_inputs(self):
inputdict = TraitedSpec() for node in self._graph.nodes(): inputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): setattr(inputdict, node.name, node.inputs) else: taken_inputs = [] for (_, _, d) in self._graph.in_ed...
'Returns all possible output ports that are not already connected'
def _get_outputs(self):
outputdict = TraitedSpec() for node in self._graph.nodes(): outputdict.add_trait(node.name, traits.Instance(TraitedSpec)) if isinstance(node, Workflow): setattr(outputdict, node.name, node.outputs) elif node.outputs: outputs = TraitedSpec() for (key, _...
'Trait callback function to update a node input'
def _set_input(self, object, name, newvalue):
object.traits()[name].node.set_input(name, newvalue)
'Set inputs of a node given the edge connection'
def _set_node_input(self, node, param, source, sourceinfo):
if isinstance(sourceinfo, (str, bytes)): val = source.get_output(sourceinfo) elif isinstance(sourceinfo, tuple): if callable(sourceinfo[1]): val = sourceinfo[1](source.get_output(sourceinfo[0]), *sourceinfo[2:]) newval = val if isinstance(val, TraitDictObject): newval...
'Make a simple DAG where no node is a workflow.'
def _create_flat_graph(self):
logger.debug(u'Creating flat graph for workflow: %s', self.name) workflowcopy = deepcopy(self) workflowcopy._generate_flatgraph() return workflowcopy._graph
'Reset the hierarchy on a graph'
def _reset_hierarchy(self):
for node in self._graph.nodes(): if isinstance(node, Workflow): node._reset_hierarchy() for innernode in node._graph.nodes(): innernode._hierarchy = u'.'.join((self.name, innernode._hierarchy)) else: node._hierarchy = self.name
'Generate a graph containing only Nodes or MapNodes'
def _generate_flatgraph(self):
logger.debug(u'expanding workflow: %s', self) nodes2remove = [] if (not nx.is_directed_acyclic_graph(self._graph)): raise Exception((u'Workflow: %s is not a directed acyclic graph (DAG)' % self.name)) nodes = nx.topological_sort(self._graph) for node in nodes: ...
'Create a dot file with connection info'
def _get_dot(self, prefix=None, hierarchy=None, colored=False, simple_form=True, level=0):
if (prefix is None): prefix = u' ' if (hierarchy is None): hierarchy = [] colorset = [u'#FFFFC8', u'#0000FF', u'#B4B4FF', u'#E6E6FF', u'#FF0000', u'#FFB4B4', u'#FFE6E6', u'#00A300', u'#B4FFB4', u'#E6FFE6', u'#0000FF', u'#B4B4FF'] if (level > (len(colorset) - 2)): level = 3...
'Open the specified file and use it as the stream for logging. By default, the file grows indefinitely. You can specify particular values of maxBytes and backupCount to allow the file to rollover at a predetermined size. Rollover occurs whenever the current log file is nearly maxBytes in length. If backupCount is >= 1,...
def __init__(self, filename, mode='a', maxBytes=0, backupCount=0, encoding=None, debug=True, supress_abs_warn=False):
if (not os.path.isabs(filename)): if (FORCE_ABSOLUTE_PATH or (not os.path.split(filename)[0])): filename = os.path.abspath(filename) elif (not supress_abs_warn): from warnings import warn warn("The given 'filename' should be an absolute path. ...
'Acquire thread and file locks. Also re-opening log file when running in \'degraded\' mode.'
def acquire(self):
Handler.acquire(self) lock(self.stream_lock, LOCK_EX) if self.stream.closed: self._openFile(self.mode)
'Release file and thread locks. Flush stream and take care of closing stream in \'degraded\' mode.'
def release(self):
try: if (not self.stream.closed): self.stream.flush() if self._rotateFailed: self.stream.close() except IOError: if self._rotateFailed: self.stream.close() finally: try: unlock(self.stream_lock) finally: ...
'Closes the stream.'
def close(self):
if (not self.stream.closed): self.stream.flush() self.stream.close() Handler.close(self)
'flush(): Do nothing. Since a flush is issued in release(), we don\'t do it here. To do a flush here, it would be necessary to re-lock everything, and it is just easier and cleaner to do it all in release(), rather than requiring two lock ops per handle() call. Doing a flush() here would also introduces a window of op...
def flush(self):
pass
'Set degrade mode or not. Ignore msg.'
def _degrade(self, degrade, msg, *args):
self._rotateFailed = degrade del msg, args
'A more colorful version of _degade(). (This is enabled by passing "debug=True" at initialization).'
def _degrade_debug(self, degrade, msg, *args):
if degrade: if (not self._rotateFailed): sys.stderr.write(('Degrade mode - ENTERING - (pid=%d) %s\n' % (os.getpid(), (msg % args)))) self._rotateFailed = True elif self._rotateFailed: sys.stderr.write(('Degrade mode - EXITING - (pi...
'Do a rollover, as described in __init__().'
def doRollover(self):
if (self.backupCount <= 0): self.stream.close() self._openFile('w') return self.stream.close() try: tmpname = None while ((not tmpname) or os.path.exists(tmpname)): tmpname = ('%s.rotate.%08d' % (self.baseFilename, randint(0, 99999999))) try: ...
'Determine if rollover should occur. For those that are keeping track. This differs from the standard library\'s RotatingLogHandler class. Because there is no promise to keep the file size under maxBytes we ignore the length of the current record.'
def shouldRollover(self, record):
del record if self._shouldRollover(): self.stream.close() self._openFile(self.mode) return self._shouldRollover() return False
'Perform no good and no bad'
def _donothing(self, *args, **kwargs):
pass
'If I could cite I would'
def dcite(self, *args, **kwargs):
def nondecorating_decorator(func): return func return nondecorating_decorator
'Temporary filesystem for testing non-POSIX filesystems on a POSIX system. with TempFATFS() as fatdir: target = os.path.join(fatdir, \'target\') copyfile(file1, target, copy=False) assert not os.path.islink(target) Arguments size_in_mbytes : int Size (in MiB) of filesystem to create delay : float Time (in seconds) to w...
def __init__(self, size_in_mbytes=8, delay=0.5):
self.delay = delay self.tmpdir = mkdtemp() self.dev_null = open(os.devnull, u'wb') vfatfile = os.path.join(self.tmpdir, u'vfatblock') self.vfatmount = os.path.join(self.tmpdir, u'vfatmount') self.canary = os.path.join(self.vfatmount, u'.canary') with open(vfatfile, u'wb') as fobj: fo...
'test that usage of misc.TSNR trips a warning to use confounds.TSNR instead'
@mock.patch('warnings.warn') def test_warning(self, mock_warn):
misc.TSNR(in_file=self.in_filenames['in_file']) assert (True in [(args[0].count('confounds') > 0) for (_, args, _) in mock_warn.mock_calls])
'exactly the same as compcor except the header'
def __init__(self, *args, **kwargs):
super(CompCor, self).__init__(*args, **kwargs) self._header = u'CompCor'
'exactly the same as compcor except the header'
def __init__(self, *args, **kwargs):
super(ACompCor, self).__init__(*args, **kwargs) self._header = u'aCompCor'
'exactly the same as compcor except the header'
def __init__(self, *args, **kwargs):
super(TCompCor, self).__init__(*args, **kwargs) self._header = u'tCompCor' self._mask_files = []
'Generates a standard design matrix paradigm given information about each run'
def _generate_standard_design(self, infolist, functional_runs=None, realignment_parameters=None, outliers=None):
sessinfo = [] output_units = u'secs' if (u'output_units' in self.inputs.traits()): output_units = self.inputs.output_units for (i, info) in enumerate(infolist): sessinfo.insert(i, dict(cond=[])) if isdefined(self.inputs.high_pass_filter_cutoff): sessinfo[i][u'hpf'] = ...
'Generate design specification for a typical fmri paradigm'
def _generate_design(self, infolist=None):
realignment_parameters = [] if isdefined(self.inputs.realignment_parameters): for parfile in self.inputs.realignment_parameters: realignment_parameters.append(np.apply_along_axis(func1d=normalize_mc_params, axis=1, arr=np.loadtxt(parfile), source=self.inputs.parameter_source)) outliers =...
''
def _run_interface(self, runtime):
self._sessioninfo = None self._generate_design() return runtime
'Generates a regressor for a sparse/clustered-sparse acquisition'
def _gen_regress(self, i_onsets, i_durations, i_amplitudes, nscans):
bplot = False if (isdefined(self.inputs.save_plot) and self.inputs.save_plot): bplot = True import matplotlib matplotlib.use(config.get(u'execution', u'matplotlib_backend')) import matplotlib.pyplot as plt TR = np.round((self.inputs.time_repetition * 1000)) if self.inputs...
'Converts condition information to full regressors'
def _cond_to_regress(self, info, nscans):
reg = [] regnames = [] for (i, cond) in enumerate(info.conditions): if (hasattr(info, u'amplitudes') and info.amplitudes): amplitudes = info.amplitudes[i] else: amplitudes = None regnames.insert(len(regnames), cond) scaled_onsets = scale_timings(info.o...
'Generates condition information for sparse-clustered designs.'
def _generate_clustered_design(self, infolist):
infoout = deepcopy(infolist) for (i, info) in enumerate(infolist): infoout[i].conditions = None infoout[i].onsets = None infoout[i].durations = None if info.conditions: img = load(self.inputs.functional_runs[i], mmap=NUMPY_MMAP) nscans = img.shape[3] ...
'Generate output files based on motion filenames Parameters motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated'
def _get_output_filenames(self, motionfile, output_dir):
if isinstance(motionfile, (str, bytes)): infile = motionfile elif isinstance(motionfile, list): infile = motionfile[0] else: raise Exception(u'Unknown type of file') (_, filename, ext) = split_filename(infile) artifactfile = os.path.join(output_dir, u''.join((u'art.'...
'Core routine for detecting outliers'
def _detect_outliers_core(self, imgfile, motionfile, runidx, cwd=None):
if (not cwd): cwd = os.getcwd() if isinstance(imgfile, (str, bytes)): nim = load(imgfile, mmap=NUMPY_MMAP) elif isinstance(imgfile, list): if (len(imgfile) == 1): nim = load(imgfile[0], mmap=NUMPY_MMAP) else: images = [load(f, mmap=NUMPY_MMAP) for f in...
'Execute this module.'
def _run_interface(self, runtime):
funcfilelist = filename_to_list(self.inputs.realigned_files) motparamlist = filename_to_list(self.inputs.realignment_parameters) for (i, imgf) in enumerate(funcfilelist): self._detect_outliers_core(imgf, motparamlist[i], i, cwd=os.getcwd()) return runtime
'Generate output files based on motion filenames Parameters motionfile: file/string Filename for motion parameter file output_dir: string output directory in which the files will be generated'
def _get_output_filenames(self, motionfile, output_dir):
(_, filename) = os.path.split(motionfile) (filename, _) = os.path.splitext(filename) corrfile = os.path.join(output_dir, u''.join((u'qa.', filename, u'_stimcorr.txt'))) return corrfile
'Core routine for determining stimulus correlation'
def _stimcorr_core(self, motionfile, intensityfile, designmatrix, cwd=None):
if (not cwd): cwd = os.getcwd() mc_in = np.loadtxt(motionfile) g_in = np.loadtxt(intensityfile) g_in.shape = (g_in.shape[0], 1) dcol = designmatrix.shape[1] mccol = mc_in.shape[1] concat_matrix = np.hstack((np.hstack((designmatrix, mc_in)), g_in)) cm = np.corrcoef(concat_matrix, ...
'Parameters spmmat: scipy matlab object full SPM.mat file loaded into a scipy object sessidx: int index to session that needs to be extracted.'
def _get_spm_submatrix(self, spmmat, sessidx, rows=None):
designmatrix = spmmat[u'SPM'][0][0].xX[0][0].X U = spmmat[u'SPM'][0][0].Sess[0][sessidx].U[0] if (rows is None): rows = (spmmat[u'SPM'][0][0].Sess[0][sessidx].row[0] - 1) cols = (spmmat[u'SPM'][0][0].Sess[0][sessidx].col[0][list(range(len(U)))] - 1) outmatrix = designmatrix.take(rows.tolist(...
'Execute this module.'
def _run_interface(self, runtime):
motparamlist = self.inputs.realignment_parameters intensityfiles = self.inputs.intensity_values spmmat = sio.loadmat(self.inputs.spm_mat_file, struct_as_record=False) nrows = [] for i in range(len(motparamlist)): sessidx = i rows = None if self.inputs.concatenated_design: ...
'Generates a copy of an image with a certain amount of added gaussian noise (rayleigh for background in mask)'
def gen_noise(self, image, mask=None, snr_db=10.0, dist=u'normal', bg_dist=u'normal'):
from math import sqrt snr = sqrt(np.power(10.0, (snr_db / 10.0))) if (mask is None): mask = np.ones_like(image) else: mask[(mask > 0)] = 1 mask[(mask < 1)] = 0 if (mask.ndim < image.ndim): mask = np.rollaxis(np.array(([mask] * image.shape[3])), 0, 4) signa...
'Parameters interface: a nipype interface class The interface class to wrap base_dir: a string The directory in which the computation will be stored callback: a callable An optional callable called each time after the function is called.'
def __init__(self, interface, base_dir, callback=None):
if (not (isinstance(interface, type) and issubclass(interface, BaseInterface))): raise ValueError((u'the interface argument should be a nipype interface class, but %s (type %s) was passed.' % (interface, type(interface)))) self.interface = interface base_dir...
'Returns a callable that caches the output of an interface Parameters interface: nipype interface The nipype interface class to be wrapped and cached Returns pipe_func: a PipeFunc callable object An object that can be used as a function to apply the interface to arguments. Inputs of the interface are given as keyword a...
def cache(self, interface):
return PipeFunc(interface, self.base_dir, _MemoryCallback(self))
'Increment counters tracking which cached function get executed.'
def _log_name(self, dir_name, job_name):
base_dir = self.base_dir with open(os.path.join(base_dir, u'log.current'), u'a') as currentlog: currentlog.write((u'%s/%s\n' % (dir_name, job_name))) t = time.localtime() year_dir = os.path.join(base_dir, (u'log.%i' % t.tm_year)) try: os.mkdir(year_dir) except OSError: u'...
'Remove all the cache that where not used in the latest run of the memory object: i.e. since the corresponding Python object was created. Parameters warn: boolean, optional If true, echoes warning messages for all directory removed'
def clear_previous_runs(self, warn=True):
base_dir = self.base_dir latest_runs = read_log(os.path.join(base_dir, u'log.current')) self._clear_all_but(latest_runs, warn=warn)
'Remove all the cache that where not used since the given date Parameters day, month, year: integers, optional The integers specifying the latest day (in localtime) that a node should have been accessed to be kept. If not given, the current date is used. warn: boolean, optional If true, echoes warning messages for all ...
def clear_runs_since(self, day=None, month=None, year=None, warn=True):
t = time.localtime() day = (day if (day is not None) else t.tm_mday) month = (month if (month is not None) else t.tm_mon) year = (year if (year is not None) else t.tm_year) base_dir = self.base_dir cut_off_file = (u'%s/log.%i/%02i/%02i.log' % (base_dir, year, month, day)) logs_to_flush = lis...
'Remove all the runs appart from those given to the function input.'
def _clear_all_but(self, runs, warn=True):
rm_all_but(self.base_dir, set(runs.keys()), warn=warn) for (dir_name, job_names) in list(runs.items()): rm_all_but(os.path.join(self.base_dir, dir_name), job_names, warn=warn)
'Convert input to appropriate format for seg_maths.'
def _format_arg(self, opt, spec, val):
if ((opt in ['proportion', 'prob_update_flag', 'set_pq', 'mrf_value', 'max_iter', 'unc_thresh', 'conv']) and (self.inputs.classifier_type not in ['STAPLE', 'STEPS'])): return '' if (opt == 'sm_ranking'): return self.get_staple_args(val) if ((opt == 'classifier_type') and (val == 'STEPS')): ...
'Convert input to appropriate format for seg_EM.'
def _format_arg(self, opt, spec, val):
if (opt == 'priors'): _nb_priors = len(self.inputs.priors) return ('-priors %d %s' % (_nb_priors, ' '.join(self.inputs.priors))) else: return super(EM, self)._format_arg(opt, spec, val)
'Convert input to appropriate format for seg_maths.'
def _format_arg(self, opt, spec, val):
if ((opt == 'operand_str') and (self.inputs.operation != 'splitinter')): err = 'operand_str set but with an operation different than "splitinter"' raise NipypeInterfaceError(err) if (opt == 'operation'): if (val in ['pow', 'thr', 'uthr', 'smo', 'edge', 'sobel3', '...
'Convert input to appropriate format for seg_maths.'
def _format_arg(self, opt, spec, val):
if (opt == 'merge_files'): return ('-merge %d %d %s' % (len(val), self.inputs.dimension, ' '.join(val))) return super(Merge, self)._format_arg(opt, spec, val)
'update existing attribute, or create new attribute Note: update is very much like HasTraits.set'
def update(self, *args, **kwargs):
self.__dict__.update(*args, **kwargs)
'iterates over bunch attributes as key, value pairs'
def items(self):
return list(self.__dict__.items())
'iterates over bunch attributes as key, value pairs'
def iteritems(self):
warn(u'iteritems is deprecated, use items instead') return list(self.items())
'Support dictionary get() functionality'
def get(self, *args):
return self.__dict__.get(*args)
'Support dictionary get() functionality'
def set(self, **kwargs):
return self.__dict__.update(**kwargs)
'returns a deep copy of existing Bunch as a dictionary'
def dictcopy(self):
return deepcopy(self.__dict__)
'representation of the sorted Bunch as a string Currently, this string representation of the `inputs` Bunch of interfaces is hashed to determine if the process\' dirty-bit needs setting or not. Till that mechanism changes, only alter this after careful consideration.'
def __repr__(self):
outstr = [u'Bunch('] first = True for (k, v) in sorted(self.items()): if (not first): outstr.append(u', ') if isinstance(v, dict): pairs = [] for (key, value) in sorted(v.items()): pairs.append((u"'%s': %s" % (key, value))) ...
'Return a dictionary of our items with hashes for each file. Searches through dictionary items and if an item is a file, it calculates the md5 hash of the file contents and stores the file name and hash value as the new key value. However, the overall bunch hash is calculated only on the hash value of a file. The path ...
def _get_bunch_hash(self):
infile_list = [] for (key, val) in list(self.items()): if is_container(val): if isinstance(val, dict): item = None else: if (len(val) == 0): raise AttributeError((u'%s attribute is empty' % key)) item = ...
'Support for the pretty module pretty is included in ipython.externals for ipython > 0.10'
def __pretty__(self, p, cycle):
if cycle: p.text(u'Bunch(...)') else: p.begin_group(6, u'Bunch(') first = True for (k, v) in sorted(self.items()): if (not first): p.text(u',') p.breakable() p.text((k + u'=')) p.pretty(v) first = Fal...
'Initialize handlers and inputs'
def __init__(self, **kwargs):
super(BaseTraitedSpec, self).__init__(**kwargs) traits.push_exception_handler(reraise_exceptions=True) undefined_traits = {} for trait in self.copyable_trait_names(): if (not self.traits()[trait].usedefault): undefined_traits[trait] = Undefined self.trait_set(trait_change_notify=...
'Name, trait generator for user modifiable traits'
def items(self):
for name in sorted(self.copyable_trait_names()): (yield (name, self.traits()[name]))
'Return a well-formatted representation of the traits'
def __repr__(self):
outstr = [] for (name, value) in sorted(self.trait_get().items()): outstr.append((u'%s = %s' % (name, value))) return u'\n{}\n'.format(u'\n'.join(outstr))
'Find all traits with the \'xor\' metadata and attach an event handler to them.'
def _generate_handlers(self):
has_xor = dict(xor=(lambda t: (t is not None))) xors = self.trait_names(**has_xor) for elem in xors: self.on_trait_change(self._xor_warn, elem) has_deprecation = dict(deprecated=(lambda t: (t is not None))) deprecated = self.trait_names(**has_deprecation) for elem in deprecated: ...
'Generates warnings for xor traits'
def _xor_warn(self, obj, name, old, new):
if isdefined(new): trait_spec = self.traits()[name] for trait_name in trait_spec.xor: if (trait_name == name): continue if isdefined(getattr(self, trait_name)): self.trait_set(trait_change_notify=False, **{(u'%s' % name): Undefined}) ...
'Part of the xor behavior'
def _requires_warn(self, obj, name, old, new):
if isdefined(new): trait_spec = self.traits()[name] msg = None for trait_name in trait_spec.requires: if (not isdefined(getattr(self, trait_name))): if (not msg): msg = (u'Input %s requires inputs: %s' % (name, u', '.join(trait_s...
'Checks if a user assigns a value to a deprecated trait'
def _deprecated_warn(self, obj, name, old, new):
if isdefined(new): trait_spec = self.traits()[name] msg1 = (u'Input %s in interface %s is deprecated.' % (name, self.__class__.__name__.split(u'InputSpec')[0])) msg2 = (u'Will be removed or raise an error as of release %s' % trait_spec.deprecat...
'Inject file hashes into adict[key]'
def _hash_infile(self, adict, key):
stuff = adict[key] if (not is_container(stuff)): stuff = [stuff] file_list = [] for afile in stuff: if is_container(afile): hashlist = self._hash_infile({u'infiles': afile}, u'infiles') hash = [val[1] for val in hashlist] elif (config.get(u'execution', u'h...
'Returns traited class as a dict Augments the trait get function to return a dictionary without notification handles'
def get(self, **kwargs):
out = super(BaseTraitedSpec, self).get(**kwargs) out = self._clean_container(out, Undefined) return out
'Returns traited class as a dict Augments the trait get function to return a dictionary without any traits. The dictionary does not contain any attributes that were Undefined'
def get_traitsfree(self, **kwargs):
out = super(BaseTraitedSpec, self).get(**kwargs) out = self._clean_container(out, skipundefined=True) return out
'Convert a traited obejct into a pure python representation.'
def _clean_container(self, object, undefinedval=None, skipundefined=False):
if (isinstance(object, TraitDictObject) or isinstance(object, dict)): out = {} for (key, val) in list(object.items()): if isdefined(val): out[key] = self._clean_container(val, undefinedval) elif (not skipundefined): out[key] = undefinedval ...
'Return has_metadata for the requested trait name in this interface'
def has_metadata(self, name, metadata, value=None, recursive=True):
return has_metadata(self.trait(name).trait_type, metadata, value, recursive)
'Return a dictionary of our items with hashes for each file. Searches through dictionary items and if an item is a file, it calculates the md5 hash of the file contents and stores the file name and hash value as the new key value. However, the overall bunch hash is calculated only on the hash value of a file. The path ...
def get_hashval(self, hash_method=None):
dict_withhash = [] dict_nofilename = [] for (name, val) in sorted(self.get().items()): if ((not isdefined(val)) or self.has_metadata(name, u'nohash', True)): continue hash_files = ((not self.has_metadata(name, u'hash_files', False)) and (not self.has_metadata(name, u'name_source'...
'bug in deepcopy for HasTraits results in weird cloning behavior for added traits'
def __deepcopy__(self, memo):
id_self = id(self) if (id_self in memo): return memo[id_self] dup_dict = deepcopy(self.get(), memo) for key in self.copyable_trait_names(): if (key in self.__dict__.keys()): _ = getattr(self, key) dup = self.clone_traits(memo=memo) for key in self.copyable_trait_names...
'Initialize command with given args and inputs.'
def __init__(self, **inputs):
raise NotImplementedError
'Prints class help'
@classmethod def help(cls):
raise NotImplementedError
'Prints inputs help'
@classmethod def _inputs_help(cls):
raise NotImplementedError
'Prints outputs help'
@classmethod def _outputs_help(cls):
raise NotImplementedError