desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Update the learning state. Called from compute() on every iteration :param activeColumns List of active column indices'
def _updateLearningState(self, activeColumns):
self.lrnPredictedState['t-1'][:, :] = self.lrnPredictedState['t'][:, :] self.lrnActiveState['t-1'][:, :] = self.lrnActiveState['t'][:, :] if (self.maxLrnBacktrack > 0): if (len(self._prevLrnPatterns) > self.maxLrnBacktrack): self._prevLrnPatterns.pop(0) self._prevLrnPatterns.appe...
'Handle one compute, possibly learning. .. note:: It is an error to have both ``enableLearn`` and ``enableInference`` set to False .. note:: By default, we don\'t compute the inference output when learning because it slows things down, but you can override this by passing in True for ``enableInference``. :param bottom...
def compute(self, bottomUpInput, enableLearn, enableInference=None):
if (enableInference is None): if enableLearn: enableInference = False else: enableInference = True assert (enableLearn or enableInference) activeColumns = bottomUpInput.nonzero()[0] if enableLearn: self.lrnIterationIdx += 1 self.iterationIdx += 1 i...
'TODO: document :param bottomUpInput: :return:'
def infer(self, bottomUpInput):
return self.compute(bottomUpInput, enableLearn=False)
'TODO: document :param bottomUpInput: :param enableInference: :return:'
def learn(self, bottomUpInput, enableInference=None):
return self.compute(bottomUpInput, enableLearn=True, enableInference=enableInference)
'Returns the stored cell confidences from the last compute. :returns: Column confidence scores'
def _columnConfidences(self):
return self.colConfidence['t']
'For now, we will assume there is no one above us and that bottomUpOut is simply the output that corresponds to our currently stored column confidences. :returns: the same thing as :meth:`columnConfidences`'
def topDownCompute(self):
return self._columnConfidences()
'This method goes through a list of segments for a given cell and deletes all synapses whose permanence is less than minPermanence and deletes any segments that have less than minNumSyns synapses remaining. :param colIdx Column index :param cellIdx Cell index within the column :param segList List of ...
def _trimSegmentsInCell(self, colIdx, cellIdx, segList, minPermanence, minNumSyns):
if (minPermanence is None): minPermanence = self.connectedPerm if (minNumSyns is None): minNumSyns = self.activationThreshold (nSegsRemoved, nSynsRemoved) = (0, 0) segsToDel = [] for segment in segList: synsToDel = [syn for syn in segment.syns if (syn[2] < minPermanence)] ...
'This method deletes all synapses whose permanence is less than minPermanence and deletes any segments that have less than minNumSyns synapses remaining. :param minPermanence: (float) Any syn whose permanence is 0 or < ``minPermanence`` will be deleted. If None is passed in, then ``self.connectedPerm`` is used. :param...
def trimSegments(self, minPermanence=None, minNumSyns=None):
if (minPermanence is None): minPermanence = self.connectedPerm if (minNumSyns is None): minNumSyns = self.activationThreshold (totalSegsRemoved, totalSynsRemoved) = (0, 0) for (c, i) in itertools.product(xrange(self.numberOfCols), xrange(self.cellsPerColumn)): (segsRemoved, synsR...
'Removes any update that would be for the given col, cellIdx, segIdx. NOTE: logically, we need to do this when we delete segments, so that if an update refers to a segment that was just deleted, we also remove that update from the update list. However, I haven\'t seen it trigger in any of the unit tests yet, so it migh...
def _cleanUpdatesList(self, col, cellIdx, seg):
for (key, updateList) in self.segmentUpdates.iteritems(): (c, i) = (key[0], key[1]) if ((c == col) and (i == cellIdx)): for update in updateList: if (update[1].segment == seg): self._removeSegmentUpdate(update)
'Called when learning has been completed. This method just calls :meth:`trimSegments` and then clears out caches.'
def finishLearning(self):
self.trimSegments(minPermanence=0.0001) for (c, i) in itertools.product(xrange(self.numberOfCols), xrange(self.cellsPerColumn)): for segment in self.cells[c][i]: segment.dutyCycle() if (self.cellsPerColumn > 1): for c in xrange(self.numberOfCols): assert (self.getNumS...
'This function produces goodness-of-match scores for a set of input patterns, by checking for their presence in the current and predicted output of the TM. Returns a global count of the number of extra and missing bits, the confidence scores for each input pattern, and (if requested) the bits in each input pattern that...
def _checkPrediction(self, patternNZs, output=None, colConfidence=None, details=False):
numPatterns = len(patternNZs) orAll = set() orAll = orAll.union(*patternNZs) if (output is None): assert (self.currentOutput is not None) output = self.currentOutput output = set(output.sum(axis=1).nonzero()[0]) totalExtras = len(output.difference(orAll)) totalMissing = len(o...
'A segment is active if it has >= activationThreshold connected synapses that are active due to activeState. Notes: studied various cutoffs, none of which seem to be worthwhile list comprehension didn\'t help either :param seg TODO: document :param activeState TODO: document'
def _isSegmentActive(self, seg, activeState):
return isSegmentActive(seg.syns, activeState, self.connectedPerm, self.activationThreshold)
'This routine computes the activity level of a segment given activeState. It can tally up only connected synapses (permanence >= connectedPerm), or all the synapses of the segment, at either t or t-1. :param seg TODO: document :param activeState TODO: document :param connectedSynapsesOnly TODO: document'
def _getSegmentActivityLevel(self, seg, activeState, connectedSynapsesOnly=False):
return getSegmentActivityLevel(seg.syns, activeState, connectedSynapsesOnly, self.connectedPerm)
'Find weakly activated cell in column with at least minThreshold active synapses. :param c which column to look at :param activeState the active cells :param minThreshold minimum number of synapses required :returns: tuple (cellIdx, segment, numActiveSynapses)'
def _getBestMatchingCell(self, c, activeState, minThreshold):
bestActivityInCol = minThreshold bestSegIdxInCol = (-1) bestCellInCol = (-1) for i in xrange(self.cellsPerColumn): maxSegActivity = 0 maxSegIdx = 0 for (j, s) in enumerate(self.cells[c][i]): activity = self._getSegmentActivityLevel(s, activeState) if (acti...
'For the given cell, find the segment with the largest number of active synapses. This routine is aggressive in finding the best match. The permanence value of synapses is allowed to be below connectedPerm. The number of active synapses is allowed to be below activationThreshold, but must be above minThreshold. The rou...
def _getBestMatchingSegment(self, c, i, activeState):
(maxActivity, which) = (self.minThreshold, (-1)) for (j, s) in enumerate(self.cells[c][i]): activity = self._getSegmentActivityLevel(s, activeState, connectedSynapsesOnly=False) if (activity >= maxActivity): (maxActivity, which) = (activity, j) if (which == (-1)): return ...
'Return the index of a cell in this column which is a good candidate for adding a new segment. When we have fixed size resources in effect, we insure that we pick a cell which does not already have the max number of allowed segments. If none exists, we choose the least used segment in the column to re-allocate. :param ...
def _getCellForNewSegment(self, colIdx):
if (self.maxSegmentsPerCell < 0): if (self.cellsPerColumn > 1): i = (self._random.getUInt32((self.cellsPerColumn - 1)) + 1) else: i = 0 return i candidateCellIdxs = [] if (self.cellsPerColumn == 1): minIdx = 0 maxIdx = 0 else: minId...
'Return a segmentUpdate data structure containing a list of proposed changes to segment s. Let activeSynapses be the list of active synapses where the originating cells have their activeState output = 1 at time step t. (This list is empty if s is None since the segment doesn\'t exist.) newSynapses is an optional argume...
def _getSegmentActiveSynapses(self, c, i, s, activeState, newSynapses=False):
activeSynapses = [] if (s is not None): activeSynapses = [idx for (idx, syn) in enumerate(s.syns) if activeState[(syn[0], syn[1])]] if newSynapses: nSynapsesToAdd = (self.newSynapseCount - len(activeSynapses)) activeSynapses += self._chooseCellsToLearnFrom(c, i, s, nSynapsesToAdd, ac...
'Choose n random cells to learn from. This function is called several times while learning with timeStep = t-1, so we cache the set of candidates for that case. It\'s also called once with timeStep = t, and we cache that set of candidates. :returns: tuple (column index, cell index).'
def _chooseCellsToLearnFrom(self, c, i, s, n, activeState):
if (n <= 0): return [] tmpCandidates = numpy.where((activeState == 1)) if (len(tmpCandidates[0]) == 0): return [] if (s is None): cands = [syn for syn in zip(tmpCandidates[0], tmpCandidates[1])] else: synapsesAlreadyInSegment = set(((syn[0], syn[1]) for syn in s.syns)...
'Go through the list of accumulated segment updates and process them as follows: if the segment update is too old, remove the update else if the cell received bottom-up, update its permanences else if it\'s still being predicted, leave it in the queue else remove it. :param activeColumns TODO: document'
def _processSegmentUpdates(self, activeColumns):
removeKeys = [] trimSegments = [] for (key, updateList) in self.segmentUpdates.iteritems(): (c, i) = (key[0], key[1]) if (c in activeColumns): action = 'update' elif (self.doPooling and (self.lrnPredictedState['t'][(c, i)] == 1)): action = 'keep' else:...
'This function applies segment update information to a segment in a cell. Synapses on the active list get their permanence counts incremented by permanenceInc. All other synapses get their permanence counts decremented by permanenceDec. We also increment the positiveActivations count of the segment. :param segUpdate Se...
def _adaptSegment(self, segUpdate):
trimSegment = False (c, i, segment) = (segUpdate.columnIdx, segUpdate.cellIdx, segUpdate.segment) activeSynapses = segUpdate.activeSynapses synToUpdate = set([syn for syn in activeSynapses if (type(syn) == int)]) if (segment is not None): if (self.verbosity >= 4): print ('Reinfor...
'Returns information about the distribution of segments, synapses and permanence values in the current TM. If requested, also returns information regarding the number of currently active segments and synapses. :returns: tuple described below: nSegments, nSynapses, nActiveSegs, nActiveSynapses, distSegSizes, distNSegsPe...
def getSegmentInfo(self, collectActiveData=False):
(nSegments, nSynapses) = (0, 0) (nActiveSegs, nActiveSynapses) = (0, 0) (distSegSizes, distNSegsPerCell) = ({}, {}) distPermValues = {} numAgeBuckets = 20 distAges = [] ageBucketSize = int(((self.lrnIterationIdx + 20) / 20)) for i in range(numAgeBuckets): distAges.append([('%d-%d...
'Compute/update and return the positive activations duty cycle of this segment. This is a measure of how often this segment is providing good predictions. :param active True if segment just provided a good prediction :param readOnly If True, compute the updated duty cycle, but don\'t change the cached value. This is ...
def dutyCycle(self, active=False, readOnly=False):
if (self.tm.lrnIterationIdx <= self.dutyCycleTiers[1]): dutyCycle = (float(self.positiveActivations) / self.tm.lrnIterationIdx) if (not readOnly): self._lastPosDutyCycleIteration = self.tm.lrnIterationIdx self._lastPosDutyCycle = dutyCycle return dutyCycle age = (...
'Print segment information for verbose messaging and debugging. This uses the following format: ID:54413 True 0.64801 (24/36) 101 [9,1]0.75 [10,1]0.75 [11,1]0.75 where: 54413 - is the unique segment id True - is sequence segment 0.64801 - moving average duty cycle (24/36) - (numPositiveActivations / numTotalActivations...
def debugPrint(self):
print ('ID:%-5d' % self.segID), if self.isSequenceSeg: print 'True', else: print 'False', print ('%9.7f' % self.dutyCycle(readOnly=True)), print ('(%4d/%-4d)' % (self.positiveActivations, self.totalActivations)), print ('%4d' % (self.tm.lrnIterationIdx - self.lastActiveIteration)...
'Free up some synapses in this segment. We always free up inactive synapses (lowest permanence freed up first) before we start to free up active ones. :param numToFree number of synapses to free up :param inactiveSynapseIndices list of the inactive synapse indices.'
def freeNSynapses(self, numToFree, inactiveSynapseIndices, verbosity=0):
assert (numToFree <= len(self.syns)) if (verbosity >= 4): print '\nIn PY freeNSynapses with numToFree =', numToFree, print 'inactiveSynapseIndices =', for i in inactiveSynapseIndices: print self.syns[i][0:2], print if (len(inactiveSynapseIndices)...
'Add a new synapse :param srcCellCol source cell column :param srcCellIdx source cell index within the column :param perm initial permanence'
def addSynapse(self, srcCellCol, srcCellIdx, perm):
self.syns.append([int(srcCellCol), int(srcCellIdx), numpy.float32(perm)])
'Update a set of synapses in the segment. :param tm The owner TM :param synapses List of synapse indices to update :param delta How much to add to each permanence :returns: True if synapse reached 0'
def updateSynapses(self, synapses, delta):
reached0 = False if (delta > 0): for synapse in synapses: self.syns[synapse][2] = newValue = (self.syns[synapse][2] + delta) if (newValue > self.tm.permanenceMax): self.syns[synapse][2] = self.tm.permanenceMax else: for synapse in synapses: ...
'Initialize ephemeral instance variables (those that aren\'t serialized)'
def __constructEphemeralInstanceVars(self):
assert (not hasattr(self, 'ephemeral')) self.ephemeral = DictObj() self.ephemeral.logPathInput = '' self.ephemeral.logPathOutput = '' self.ephemeral.logPathOutputDense = '' self.ephemeral._fpLogInput = None self.ephemeral._fpLogOutput = None self.ephemeral._fpLogOutputDense = None re...
'Called by network after all links have been set up'
def initialize(self):
self.identityPolicy.initialize(self) _debugOut(self.identityPolicy.getName())
'Run one iteration of the region\'s compute. The guts of the compute are contained in the _compute() call so that we can profile it if requested.'
def compute(self, inputs, outputs):
self.identityPolicy.compute(inputs, outputs) _debugOut(('%s: inputs=%s; outputs=%s' % (self.identityPolicy.getName(), inputs, outputs))) return
'Return the base Spec for TestRegion.'
@classmethod def getSpec(cls):
spec = dict(description='TestRegion', singleNodeOnly=True, inputs=dict(bottomUpIn=dict(description='The input vector.', dataType='Real32', count=0, required=False, regionLevel=True, isDefaultInput=True, requireSplitterMap=False), topDownIn=dict(description='The top-down input signal, generated ...
'Get the value of a NodeSpec parameter. Most parameters are handled automatically by PyRegion\'s parameter get mechanism. The ones that need special treatment are explicitly handled here.'
def getParameter(self, parameterName, index=(-1)):
assert (not ((parameterName in self.__dict__) and (parameterName in self.ephemeral))) if (parameterName in self.ephemeral): assert (parameterName not in self.__dict__) return self.ephemeral[parameterName] else: return super(PyRegion, self).getParameter(parameterName, index)
'Set the value of a Spec parameter. Most parameters are handled automatically by PyRegion\'s parameter set mechanism. The ones that need special treatment are explicitly handled here.'
def setParameter(self, parameterName, index, parameterValue):
assert (not ((parameterName in self.__dict__) and (parameterName in self.ephemeral))) if (parameterName in self.ephemeral): if (parameterName == 'logPathInput'): self.ephemeral.logPathInput = parameterValue if self.ephemeral._fpLogInput: self.ephemeral._fpLogInput...
'TestRegion command that sets identity policy instance. The instance MUST be derived from TestRegion\'s RegionIdentityPolicyBase class. Users MUST set the identity instance BEFORE running the network Exception: AssertionError if identity policy instance has already been set or if the passed-in instance is not derived ...
def setIdentityPolicyInstance(self, identityPolicyObj):
assert (not self.identityPolicy) assert isinstance(identityPolicyObj, RegionIdentityPolicyBase) self.identityPolicy = identityPolicyObj return
'TestRegion command that returns the identity policy instance that was associated with this TestRegion instance via setIdentityPolicyInstance(). Returns: a RegionIdentityPolicyBase-based instance that was associated with this TestRegion intstance. Exception: AssertionError if no identity policy instance has been set.'
def getIdentityPolicyInstance(self):
assert self.identityPolicy return self.identityPolicy
'Save the region\'s state. The ephemerals and identity policy are excluded from the saved state. :param proto: an instance of TestRegionProto to serialize'
def write(self, proto):
proto.breakPdb = self.breakPdb proto.breakKomodo = self.breakKomodo
'Load the state from the given proto instance. The saved state does not include the identity policy so this must be constructed and set after the region is deserialized. This can be done by calling \'setIdentityPolicyInstance\'. :param proto: an instance of TestRegionProto to load state from'
def read(self, proto):
self.breakPdb = proto.breakPdb self.breakKomodo = proto.breakKomodo self.__constructEphemeralInstanceVars()
'Return serializable state. This function will return a version of the __dict__ with all "ephemeral" members stripped out. "Ephemeral" members are defined as those that do not need to be (nor should be) stored in any kind of persistent file (e.g., NuPIC network XML file.)'
def __getstate__(self):
state = self.__dict__.copy() state.pop('ephemeral') return state
'Set the state of ourself from a serialized state.'
def __setstate__(self, state):
assert ('ephemeral' not in state) self.__dict__.update(state) self.__constructEphemeralInstanceVars() return
'Initialize all ephemeral data members, and give the derived class the opportunity to do the same by invoking the virtual member _initEphemerals(), which is intended to be overridden. NOTE: this is used by both __init__ and __setstate__ code paths.'
def _initializeEphemeralMembers(self):
for attrName in self._getEphemeralMembersBase(): if (attrName != '_loaded'): if hasattr(self, attrName): if self._loaded: pass else: print self.__class__.__name__, ("contains base class member '%s'" % attrName) ...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.initialize`.'
def initialize(self):
self._spatialPoolerOutput = numpy.zeros(self.columnCount, dtype=GetNTAReal()) self._spatialPoolerInput = numpy.zeros((1, self.inputWidth), dtype=GetNTAReal()) self._allocateSpatialFDR(None)
'Allocate the spatial pooler instance.'
def _allocateSpatialFDR(self, rfInput):
if self._sfdr: return autoArgs = dict(((name, getattr(self, name)) for name in self._spatialArgNames)) if ((self.SpatialClass == CPPSpatialPooler) or (self.SpatialClass == PYSpatialPooler)): autoArgs['columnDimensions'] = [self.columnCount] autoArgs['inputDimensions'] = [self.inputWi...
'Run one iteration, profiling it if requested. :param inputs: (dict) mapping region input names to numpy.array values :param outputs: (dict) mapping region output names to numpy.arrays that should be populated with output values by this method'
def compute(self, inputs, outputs):
if (False and self.learningMode and (self._iterations > 0) and (self._iterations <= 10)): import hotshot if (self._iterations == 10): print '\n Collecting and sorting internal node profiling stats generated by hotshot...' stats = hotshot.stat...
'Run one iteration of SPRegion\'s compute'
def _compute(self, inputs, outputs):
if (self._sfdr is None): raise RuntimeError('Spatial pooler has not been initialized') if (not self.topDownMode): self._iterations += 1 buInputVector = inputs['bottomUpIn'] resetSignal = False if ('resetIn' in inputs): assert (len(inputs['resetI...
'Do one iteration of inference and/or learning and return the result Parameters: rfInput: Input vector. Shape is: (1, inputVectorLen). resetSignal: True if reset is asserted'
def _doBottomUpCompute(self, rfInput, resetSignal):
self._conditionalBreak() self._spatialPoolerInput = rfInput.reshape((-1)) assert (rfInput.shape[0] == 1) inputVector = numpy.array(rfInput[0]).astype('uint32') outputVector = numpy.zeros(self._sfdr.getNumColumns()).astype('uint32') self._sfdr.compute(inputVector, self.learningMode, outputVector)...
'Do one iteration of top-down inference. Parameters: tdInput: Top-down input retval: (spatialTopDownOut, temporalTopDownOut) spatialTopDownOut is the top down output computed only from the SP, using it\'s current bottom-up output. temporalTopDownOut is the top down output computed from the topDown in of the le...
def _doTopDownInfer(self, topDownInput=None):
return (None, None)
'Doesn\'t include the spatial, temporal and other parameters :returns: (dict) The base Spec for SPRegion.'
@classmethod def getBaseSpec(cls):
spec = dict(description=SPRegion.__doc__, singleNodeOnly=True, inputs=dict(bottomUpIn=dict(description='The input vector.', dataType='Real32', count=0, required=True, regionLevel=False, isDefaultInput=True, requireSplitterMap=False), resetIn=dict(description='A boolean flag that indicates wheth...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getSpec`. The parameters collection is constructed based on the parameters specified by the various components (spatialSpec, temporalSpec and otherSpec)'
@classmethod def getSpec(cls):
spec = cls.getBaseSpec() (s, o) = _getAdditionalSpecs(spatialImp=getDefaultSPImp()) spec['parameters'].update(s) spec['parameters'].update(o) return spec
':returns: (:class:`~nupic.algorithms.spatial_pooler.SpatialPooler`) instance of the underlying algorithm object.'
def getAlgorithmInstance(self):
return self._sfdr
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameter`. Most parameters are handled automatically by PyRegion\'s parameter get mechanism. The ones that need special treatment are explicitly handled here.'
def getParameter(self, parameterName, index=(-1)):
if (parameterName == 'activeOutputCount'): return self.columnCount elif (parameterName == 'spatialPoolerInput'): return list(self._spatialPoolerInput.reshape((-1))) elif (parameterName == 'spatialPoolerOutput'): return list(self._spatialPoolerOutput) elif (parameterName == 'spNum...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.setParameter`. Set the value of a Spec parameter. Most parameters are handled automatically by PyRegion\'s parameter set mechanism. The ones that need special treatment are explicitly handled here.'
def setParameter(self, parameterName, index, parameterValue):
if (parameterName in self._spatialArgNames): setattr(self._sfdr, parameterName, parameterValue) elif (parameterName == 'logPathInput'): self.logPathInput = parameterValue if self._fpLogSPInput: self._fpLogSPInput.close() self._fpLogSPInput = None if parame...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getSchema`.'
@staticmethod def getSchema():
return SPRegionProto
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.writeToProto`. Write state to proto object. :param proto: SPRegionProto capnproto object'
def writeToProto(self, proto):
proto.spatialImp = self.spatialImp proto.columnCount = self.columnCount proto.inputWidth = self.inputWidth proto.learningMode = (1 if self.learningMode else 0) proto.inferenceMode = (1 if self.inferenceMode else 0) proto.anomalyMode = (1 if self.anomalyMode else 0) proto.topDownMode = (1 if ...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.readFromProto`. Read state from proto object. :param proto: SPRegionProto capnproto object'
@classmethod def readFromProto(cls, proto):
instance = cls(proto.columnCount, proto.inputWidth) instance.spatialImp = proto.spatialImp instance.learningMode = proto.learningMode instance.inferenceMode = proto.inferenceMode instance.anomalyMode = proto.anomalyMode instance.topDownMode = proto.topDownMode spatialImp = proto.spatialImp ...
'Return serializable state. This function will return a version of the __dict__ with all "ephemeral" members stripped out. "Ephemeral" members are defined as those that do not need to be (nor should be) stored in any kind of persistent file (e.g., NuPIC network XML file.)'
def __getstate__(self):
state = self.__dict__.copy() for ephemeralMemberName in self._getEphemeralMembersAll(): state.pop(ephemeralMemberName, None) return state
'Set the state of ourself from a serialized state.'
def __setstate__(self, state):
self.__dict__.update(state) self._loaded = True if (not hasattr(self, 'SpatialClass')): self.SpatialClass = self._sfdr.__class__ self._initializeEphemeralMembers() self._allocateSpatialFDR(None)
'Initialize all ephemerals used by derived classes.'
def _initEphemerals(self):
if (hasattr(self, '_sfdr') and self._sfdr): self._spatialPoolerOutput = numpy.zeros(self.columnCount, dtype=GetNTAReal()) else: self._spatialPoolerOutput = None self._fpLogSPInput = None self._fpLogSP = None self._fpLogSPDense = None self.logPathInput = '' self.logPathOutput ...
'Callback that returns a list of all "ephemeral" members (i.e., data members that should not and/or cannot be pickled.)'
def _getEphemeralMembers(self):
return ['_spatialPoolerOutput', '_fpLogSP', '_fpLogSPDense', 'logPathInput', 'logPathOutput', 'logPathOutputDense']
'Returns list of all ephemeral members.'
def _getEphemeralMembersBase(self):
return ['_loaded', '_profileObj', '_iterations']
'Returns a concatenated list of both the standard base class ephemeral members, as well as any additional ephemeral members (e.g., file handles, etc.).'
def _getEphemeralMembersAll(self):
return (self._getEphemeralMembersBase() + self._getEphemeralMembers())
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameterArrayCount`. TODO: as a temporary hack, getParameterArrayCount checks to see if there\'s a variable, private or not, with that name. If so, it returns the value of the variable.'
def getParameterArrayCount(self, name, index):
p = self.getParameter(name) if (not hasattr(p, '__len__')): raise Exception(("Attempt to access parameter '%s' as an array but it is not an array" % name)) return len(p)
'Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameterArray`. TODO: as a temporary hack, getParameterArray checks to see if there\'s a variable, private or not, with that name. If so, it returns the value of the variable.'
def getParameterArray(self, name, index, a):
p = self.getParameter(name) if (not hasattr(p, '__len__')): raise Exception(("Attempt to access parameter '%s' as an array but it is not an array" % name)) if (len(p) > 0): a[:] = p[:]
'Verify the validity of the node spec object The type of each sub-object is verified and then the validity of each node spec item is verified by calling it invariant() method. It also makes sure that there is at most one default input and one default output.'
def invariant(self):
assert isinstance(self.description, str) assert isinstance(self.singleNodeOnly, bool) assert isinstance(self.inputs, dict) assert isinstance(self.outputs, dict) assert isinstance(self.parameters, dict) assert isinstance(self.commands, dict) hasDefaultInput = False for (k, v) in self.inpu...
'Convert the information of the node spec to a plain dict of basic types The description and singleNodeOnly attributes are placed directly in the result dicts. The inputs, outputs, parameters and commands dicts contain Spec item objects (InputSpec, OutputSpec, etc). Each such object is converted also to a plain dict us...
def toDict(self):
def items2dict(items): 'Convert a dict of node spec items to a plain dict\n\n Each node spec item object will be converted to a dict of its\n attributes. The entire items dict will ...
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getSpec`.'
@classmethod def getSpec(cls):
ns = dict(description=KNNClassifierRegion.__doc__, singleNodeOnly=True, inputs=dict(categoryIn=dict(description='Vector of zero or more category indices for this inputsample. -1 implies no category.', dataType='Real32', count=0, required=True, regionLevel=True, isDefaultInput=...
'List of attributes to not save with serialized state.'
def _getEphemeralAttributes(self):
return ['_firstComputeCall', '_accuracy', '_protoScores', '_categoryDistances']
'Initialize attributes that are not saved with the checkpoint.'
def _initEphemerals(self):
self._firstComputeCall = True self._accuracy = None self._protoScores = None self._categoryDistances = None self._knn = knn_classifier.KNNClassifier(**self.knnParams) for x in ('_partitions', '_useAuxiliary', '_doSphering', '_scanInfo', '_protoScores'): if (not hasattr(self, x)): ...
'Set state from serialized state.'
def __setstate__(self, state):
if ('version' not in state): self.__dict__.update(state) elif (state['version'] == 1): if ('doSelfValidation' in state): state.pop('doSelfValidation') knnState = state['_knn_state'] del state['_knn_state'] self.__dict__.update(state) self._initEphemera...
'Get serializable state.'
def __getstate__(self):
state = self.__dict__.copy() state['_knn_state'] = self._knn.__getstate__() del state['_knn'] for field in self._getEphemeralAttributes(): del state[field] return state
':returns: (:class:`~nupic.algorithms.knn_classifier.KNNClassifier`)'
def getAlgorithmInstance(self):
return self._knn
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getParameter`.'
def getParameter(self, name, index=(-1)):
if (name == 'patternCount'): return self._knn._numPatterns elif (name == 'patternMatrix'): return self._getPatternMatrix() elif (name == 'k'): return self._knn.k elif (name == 'distanceNorm'): return self._knn.distanceNorm elif (name == 'distanceMethod'): retu...
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.setParameter`.'
def setParameter(self, name, index, value):
if (name == 'learningMode'): self.learningMode = bool(int(value)) self._epoch = 0 elif (name == 'inferenceMode'): self._epoch = 0 if (int(value) and (not self.inferenceMode)): self._finishLearning() self.inferenceMode = bool(int(value)) elif (name == 'dist...
'Resets confusion matrix.'
def reset(self):
self.confusion = numpy.zeros((1, 1))
'Begin writing output tap files. :param tapPath: (string) base name of the output tap files to write.'
def enableTap(self, tapPath):
self._tapFileIn = open((tapPath + '.in'), 'w') self._tapFileOut = open((tapPath + '.out'), 'w')
'Disable writing of output tap files.'
def disableTap(self):
if (self._tapFileIn is not None): self._tapFileIn.close() self._tapFileIn = None if (self._tapFileOut is not None): self._tapFileOut.close() self._tapFileOut = None
'Write inputs to output tap file. :param inputs: (iter) some inputs.'
def handleLogInput(self, inputs):
if (self._tapFileIn is not None): for input in inputs: for k in range(len(input)): print >>self._tapFileIn, input[k], print >>self._tapFileIn
'Write outputs to output tap file. :param outputs: (iter) some outputs.'
def handleLogOutput(self, output):
if (self._tapFileOut is not None): for k in range(len(output)): print >>self._tapFileOut, output[k], print >>self._tapFileOut
'Store a training sample and associated category label'
def _storeSample(self, inputVector, trueCatIndex, partition=0):
if (self._samples is None): self._samples = numpy.zeros((0, len(inputVector)), dtype=RealNumpyDType) assert (self._labels is None) self._labels = [] self._samples = numpy.concatenate((self._samples, numpy.atleast_2d(inputVector)), axis=0) self._labels += [trueCatIndex] if (self._...
'Process one input sample. This method is called by the runtime engine. .. note:: the number of input categories may vary, but the array size is fixed to the max number of categories allowed (by a lower region), so "unused" indices of the input category array are filled with -1s. TODO: confusion matrix does not support...
def compute(self, inputs, outputs):
if (self._useAuxiliary is None): self._useAuxiliary = False if self._firstComputeCall: self._firstComputeCall = False if self._useAuxiliary: if (self._justUseAuxiliary == True): print ' Warning: You have chosen to ignore the image ...
'Public API for returning the category list. This is a required API of the NearestNeighbor inspector. :returns: (list) which has one entry per stored prototype. The value of the entry is the category # of that stored prototype.'
def getCategoryList(self):
return self._knn._categoryList
'Removes a category. :param categoryToRemove: (string) label to remove'
def removeCategory(self, categoryToRemove):
return self._knn.removeCategory(categoryToRemove)
'Public API for returning the full scores (distance to each prototype) from the last :meth:`compute` inference call. This is a required API of the NearestNeighbor inspector. :returns: (list) which has one entry per stored prototype. The value of the entry is distance of the most recenty inferred input from the stored p...
def getLatestDistances(self):
if (self._protoScores is not None): if self.keepAllDistances: return self._protoScores[(self._protoScoreCount - 1), :] else: return self._protoScores else: return None
'Like :meth:`~nupic.regions.knn_classifier_region.KNNClassifierRegion.getLatestDistances`, but returns all the scores if more than one set is available. :meth:`~nupic.regions.knn_classifier_region.KNNClassifierRegion.getLatestDistances` will always just return one set of scores. :returns: (list) all the prototype dist...
def getAllDistances(self):
if (self._protoScores is None): return None return self._protoScores[:self._protoScoreCount, :]
'Does nothing. Kept here for API compatibility'
def _finishLearning(self):
if self._doSphering: self._finishSphering() self._knn.finishLearning() self._accuracy = None
'Compute normalization constants for each feature dimension based on the collected training samples. Then normalize our training samples using these constants (so that each input dimension has mean and variance of zero and one, respectively.) Then feed these "sphered" training samples into the underlying SVM model.'
def _finishSphering(self):
self._normOffset = (self._samples.mean(axis=0) * (-1.0)) self._samples += self._normOffset variance = self._samples.var(axis=0) variance[numpy.where((variance == 0.0))] = 1.0 self._normScale = (1.0 / numpy.sqrt(variance)) self._samples *= self._normScale for sampleIndex in range(len(self._la...
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getOutputElementCount`.'
def getOutputElementCount(self, name):
if (name == 'categoriesOut'): return self.maxCategoryCount elif (name == 'categoryProbabilitiesOut'): return self.maxCategoryCount elif (name == 'bestPrototypeIndices'): return (self._bestPrototypeIndexCount if self._bestPrototypeIndexCount else 0) else: raise Exception((...
'Reset the sensor to beginning of data.'
def rewind(self):
self._iterNum = 0 if (self.dataSource is not None): self.dataSource.rewind()
'Get the next record to encode. Includes getting a record from the `dataSource` and applying filters. If the filters request more data from the `dataSource` continue to get data from the `dataSource` until all filters are satisfied. This method is separate from :meth:`.RecordSensor.compute` so that we can use a standal...
def getNextRecord(self):
allFiltersHaveEnoughData = False while (not allFiltersHaveEnoughData): data = self.dataSource.getNextRecordDict() if (not data): raise StopIteration('Datasource has no more data') if ('_reset' not in data): data['_reset'] = 0 if ('_sequenceId' ...
'Apply pre-encoding filters. These filters may modify or add data. If a filter needs another record (e.g. a delta filter) it will request another record by returning False and the current record will be skipped (but will still be given to all filters). We have to be very careful about resets. A filter may add a reset, ...
def applyFilters(self, data):
if (self.verbosity > 0): print ('RecordSensor got data: %s' % data) allFiltersHaveEnoughData = True if (len(self.preEncodingFilters) > 0): originalReset = data['_reset'] actualReset = originalReset for f in self.preEncodingFilters: filterHasEnoughData = f...
'Populate the output array with the category indices. .. note:: Non-categories are represented with ``-1``. :param categories: (list) of category strings :param output: (list) category output, will be overwritten'
def populateCategoriesOut(self, categories, output):
if (categories[0] is None): output[:] = (-1) else: for (i, cat) in enumerate(categories[:len(output)]): output[i] = cat output[len(categories):] = (-1)
'Get a record from the dataSource and encode it. Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.compute`.'
def compute(self, inputs, outputs):
if (not self.topDownMode): data = self.getNextRecord() reset = data['_reset'] sequenceId = data['_sequenceId'] categories = data['_category'] self.encoder.encodeIntoArray(data, outputs['dataOut']) if ((self.predictedField is not None) and (self.predictedField != 'vect...
'Converts all of the non-numeric fields from spatialOutput and temporalOutput into their scalar equivalents and records them in the output dictionary. :param spatialOutput: The results of topDownCompute() for the spatial input. :param temporalOutput: The results of topDownCompute() for the temporal input. :param output...
def _convertNonNumericData(self, spatialOutput, temporalOutput, output):
encoders = self.encoder.getEncoderList() types = self.encoder.getDecoderOutputFieldTypes() for (i, (encoder, type)) in enumerate(zip(encoders, types)): spatialData = spatialOutput[i] temporalData = temporalOutput[i] if ((type != FieldMetaType.integer) and (type != FieldMetaType.float...
'.. note:: These are normal Python lists, rather than numpy arrays. This is to support lists with mixed scalars and strings, as in the case of records with categorical variables. :returns: (dict) output values.'
def getOutputValues(self, outputName):
return self._outputValues[outputName]
'Computes the width of dataOut. Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getOutputElementCount`.'
def getOutputElementCount(self, name):
if (name == 'resetOut'): print 'WARNING: getOutputElementCount should not have been called with resetOut' return 1 elif (name == 'sequenceIdOut'): print 'WARNING: getOutputElementCount should not have been called with sequenceIdOut' ...
'Set the value of a Spec parameter. Most parameters are handled automatically by PyRegion\'s parameter set mechanism. The ones that need special treatment are explicitly handled here.'
def setParameter(self, parameterName, index, parameterValue):
if (parameterName == 'topDownMode'): self.topDownMode = parameterValue elif (parameterName == 'predictedField'): self.predictedField = parameterValue else: raise Exception(('Unknown parameter: ' + parameterName))
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.getSchema`.'
@staticmethod def getSchema():
return RecordSensorProto
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.writeToProto`.'
def writeToProto(self, proto):
self.encoder.write(proto.encoder) if (self.disabledEncoder is not None): self.disabledEncoder.write(proto.disabledEncoder) proto.topDownMode = int(self.topDownMode) proto.verbosity = self.verbosity proto.numCategories = self.numCategories
'Overrides :meth:`nupic.bindings.regions.PyRegion.PyRegion.readFromProto`.'
@classmethod def readFromProto(cls, proto):
instance = cls() instance.encoder = MultiEncoder.read(proto.encoder) if (proto.disabledEncoder is not None): instance.disabledEncoder = MultiEncoder.read(proto.disabledEncoder) instance.topDownMode = bool(proto.topDownMode) instance.verbosity = proto.verbosity instance.numCategories = pr...
'Initialize all ephemeral data members, and give the derived class the opportunity to do the same by invoking the virtual member _initEphemerals(), which is intended to be overridden.'
def _initialize(self):
for attrName in self._getEphemeralMembersBase(): if (attrName != '_loaded'): if hasattr(self, attrName): if self._loaded: pass else: print self.__class__.__name__, ("contains base class member '%s'" % attrName) ...
'Overrides :meth:`~nupic.bindings.regions.PyRegion.initialize`.'
def initialize(self):
autoArgs = dict(((name, getattr(self, name)) for name in self._temporalArgNames)) if (self._tfdr is None): tpClass = _getTPClass(self.temporalImp) if (self.temporalImp in ['py', 'cpp', 'r', 'tm_py', 'tm_cpp', 'monitored_tm_py']): self._tfdr = tpClass(numberOfCols=self.columnCount, ce...
'Run one iteration of :class:`~nupic.regions.tm_region.TMRegion` compute, profiling it if requested. :param inputs: (dict) mapping region input names to numpy.array values :param outputs: (dict) mapping region output names to numpy.arrays that should be populated with output values by this method'
def compute(self, inputs, outputs):
if (False and self.learningMode and (self._iterations > 0) and (self._iterations <= 10)): import hotshot if (self._iterations == 10): print '\n Collecting and sorting internal node profiling stats generated by hotshot...' stats = hotshot.stat...