desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Metric for number of predicted => active cells per column for each sequence @return (Metric) metric'
def mmGetMetricSequencesPredictedActiveCellsPerColumn(self):
self._mmComputeTransitionTraces() numCellsPerColumn = [] for predictedActiveCells in self._mmData['predictedActiveCellsForSequence'].values(): cellsForColumn = self.mapCellsToColumns(predictedActiveCells) numCellsPerColumn += [len(x) for x in cellsForColumn.values()] return Metric(self, ...
'Metric for number of sequences each predicted => active cell appears in Note: This metric is flawed when it comes to high-order sequences. @return (Metric) metric'
def mmGetMetricSequencesPredictedActiveCellsShared(self):
self._mmComputeTransitionTraces() numSequencesForCell = defaultdict((lambda : 0)) for predictedActiveCells in self._mmData['predictedActiveCellsForSequence'].values(): for cell in predictedActiveCells: numSequencesForCell[cell] += 1 return Metric(self, '# sequences each pred...
'Pretty print the connections in the temporal memory. TODO: Use PrettyTable. @return (string) Pretty-printed text'
def mmPrettyPrintConnections(self):
text = '' text += 'Segments: (format => (#) [(source cell=permanence ...), ...]\n' text += '------------------------------------\n' columns = range(self.numberOfColumns()) for column in columns: cells = self.cellsForColumn(column) for cell...
'Pretty print the cell representations for sequences in the history. @param sortby (string) Column of table to sort by @return (string) Pretty-printed text'
def mmPrettyPrintSequenceCellRepresentations(self, sortby='Column'):
self._mmComputeTransitionTraces() table = PrettyTable(['Pattern', 'Column', 'predicted=>active cells']) for (sequenceLabel, predictedActiveCells) in self._mmData['predictedActiveCellsForSequence'].iteritems(): cellsForColumn = self.mapCellsToColumns(predictedActiveCells) for (column, cell...
'Computes the transition traces, if necessary. Transition traces are the following: predicted => active cells predicted => inactive cells predicted => active columns predicted => inactive columns unpredicted => active columns'
def _mmComputeTransitionTraces(self):
if (not self._mmTransitionTracesStale): return self._mmData['predictedActiveCellsForSequence'] = defaultdict(set) self._mmTraces['predictedActiveCells'] = IndicesTrace(self, 'predicted => active cells (correct)') self._mmTraces['predictedInactiveCells'] = IndicesTrace(self, 'predicte...
'Returns plot of the cell activity. @param title (string) an optional title for the figure @param showReset (bool) if true, the first set of cell activities after a reset will have a gray background @param resetShading (float) if showReset is true, this float specifies the intensity of the reset backgro...
def mmGetCellActivityPlot(self, title='', showReset=False, resetShading=0.25, activityType='activeCells'):
if (activityType == 'predictedActiveCells'): self._mmComputeTransitionTraces() cellTrace = copy.deepcopy(self._mmTraces[activityType].data) for i in xrange(len(cellTrace)): cellTrace[i] = self.getCellIndices(cellTrace[i]) return self.mmGetCellTracePlot(cellTrace, self.numberOfCells(), ac...
'@param monitor (MonitorMixinBase) Monitor Mixin instance that generated this trace @param title (string) Title @param data (list) List of numbers to compute metric from'
def __init__(self, monitor, title, data):
self.monitor = monitor self.title = title self.min = None self.max = None self.sum = None self.mean = None self.standardDeviation = None self._computeStats(data)
'@param monitor (MonitorMixinBase) Monitor Mixin instance that generated this plot @param title (string) Plot title'
def __init__(self, monitor, title, show=True):
self._monitor = monitor self._title = title self._fig = self._initFigure() self._show = show if self._show: plt.ion() plt.show()
'Adds a graph to the plot\'s figure. @param data See matplotlib.Axes.plot documentation. @param position A 3-digit number. The first two digits define a 2D grid where subplots may be added. The final digit specifies the nth grid location for the added subplot @param xlabel text to be displayed on the x-axis @param ylab...
def addGraph(self, data, position=111, xlabel=None, ylabel=None):
ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel) ax.plot(data) plt.draw()
'Adds a histogram to the plot\'s figure. @param data See matplotlib.Axes.hist documentation. @param position A 3-digit number. The first two digits define a 2D grid where subplots may be added. The final digit specifies the nth grid location for the added subplot @param xlabel text to be displayed on the x-axis @param ...
def addHistogram(self, data, position=111, xlabel=None, ylabel=None, bins=None):
ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel) ax.hist(data, bins=bins, color='green', alpha=0.8) plt.draw()
'Adds an image to the plot\'s figure. @param data a 2D array. See matplotlib.Axes.imshow documentation. @param position A 3-digit number. The first two digits define a 2D grid where subplots may be added. The final digit specifies the nth grid location for the added subplot @param xlabel text to be displayed on the x-a...
def add2DArray(self, data, position=111, xlabel=None, ylabel=None, cmap=None, aspect='auto', interpolation='nearest', name=None):
if (cmap is None): cmap = cm.Greys ax = self._addBase(position, xlabel=xlabel, ylabel=ylabel) ax.imshow(data, cmap=cmap, aspect=aspect, interpolation=interpolation) if self._show: plt.draw() if (name is not None): if (not os.path.exists('log')): os.mkdir('log') ...
'Adds a subplot to the plot\'s figure at specified position. @param position A 3-digit number. The first two digits define a 2D grid where subplots may be added. The final digit specifies the nth grid location for the added subplot @param xlabel text to be displayed on the x-axis @param ylabel text to be displayed on t...
def _addBase(self, position, xlabel=None, ylabel=None):
ax = self._fig.add_subplot(position) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) return ax
'Note: If you set the kwarg "mmName", then pretty-printing of traces and metrics will include the name you specify as a tag before every title.'
def __init__(self, *args, **kwargs):
self.mmName = kwargs.get('mmName') if ('mmName' in kwargs): del kwargs['mmName'] super(MonitorMixinBase, self).__init__(*args, **kwargs) self._mmTraces = None self._mmData = None self.mmClearHistory()
'Clears the stored history.'
def mmClearHistory(self):
self._mmTraces = {} self._mmData = {}
'Returns pretty-printed table of traces. @param traces (list) Traces to print in table @param breakOnResets (BoolsTrace) Trace of resets to break table on @return (string) Pretty-printed table of traces.'
@staticmethod def mmPrettyPrintTraces(traces, breakOnResets=None):
assert (len(traces) > 0), 'No traces found' table = PrettyTable((['#'] + [trace.prettyPrintTitle() for trace in traces])) for i in xrange(len(traces[0].data)): if (breakOnResets and breakOnResets.data[i]): table.add_row((['<reset>'] * (len(traces) + 1))) table.add_row(([i] ...
'Returns pretty-printed table of metrics. @param metrics (list) Traces to print in table @param sigFigs (int) Number of significant figures to print @return (string) Pretty-printed table of metrics.'
@staticmethod def mmPrettyPrintMetrics(metrics, sigFigs=5):
assert (len(metrics) > 0), 'No metrics found' table = PrettyTable(['Metric', 'mean', 'standard deviation', 'min', 'max', 'sum']) for metric in metrics: table.add_row(([metric.prettyPrintTitle()] + metric.getStats())) return table.get_string().encode('utf-8')
'Returns list of default traces. (To be overridden.) @param verbosity (int) Verbosity level @return (list) Default traces'
def mmGetDefaultTraces(self, verbosity=1):
return []
'Returns list of default metrics. (To be overridden.) @param verbosity (int) Verbosity level @return (list) Default metrics'
def mmGetDefaultMetrics(self, verbosity=1):
return []
'Returns plot of the cell activity. Note that if many timesteps of activities are input, matplotlib\'s image interpolation may omit activities (columns in the image). @param cellTrace (list) a temporally ordered list of sets of cell activities @param cellCount (int) number of cells in the space being rendere...
def mmGetCellTracePlot(self, cellTrace, cellCount, activityType, title='', showReset=False, resetShading=0.25):
plot = Plot(self, title) resetTrace = self.mmGetTraceResets().data data = numpy.zeros((cellCount, 1)) for i in xrange(len(cellTrace)): if (showReset and resetTrace[i]): activity = (numpy.ones((cellCount, 1)) * resetShading) else: activity = numpy.zeros((cellCount,...
'Translate parameters and initialize member variables specific to TemporalMemory'
def __init__(self, columnDimensions=(2048,), cellsPerColumn=32, activationThreshold=13, initialPermanence=0.21, connectedPermanence=0.5, minThreshold=10, maxNewSynapseCount=20, permanenceIncrement=0.1, permanenceDecrement=0.1, seed=42):
numberOfCols = 1 for n in columnDimensions: numberOfCols *= n super(TemporalMemoryShim, self).__init__(numberOfCols=numberOfCols, cellsPerColumn=cellsPerColumn, initialPerm=initialPermanence, connectedPerm=connectedPermanence, minThreshold=minThreshold, newSynapseCount=maxNewSynapseCount, permanence...
'Feeds input record through TM, performing inference and learning. Updates member variables with new state. @param activeColumns (set) Indices of active columns in `t`'
def compute(self, activeColumns, learn=True):
bottomUpInput = numpy.zeros(self.numberOfCols, dtype=dtype) bottomUpInput[list(activeColumns)] = 1 super(TemporalMemoryShim, self).compute(bottomUpInput, enableLearn=learn, enableInference=True) predictedState = self.getPredictedState() self.predictiveCells = set(numpy.flatnonzero(predictedState))
'Process one input sample. This method is called by outer loop code outside the nupic-engine. We use this instead of the nupic engine compute() because our inputs and outputs aren\'t fixed size vectors of reals. :param recordNum: Record number of this input pattern. Record numbers normally increase sequentially by 1 ea...
def compute(self, recordNum, patternNZ, classification, learn, infer):
if (self.verbosity >= 1): print ' learn:', learn print ' recordNum:', recordNum print (' patternNZ (%d):' % len(patternNZ)), patternNZ print ' classificationIn:', classification if (len(self._patternNZHistory) > 0): if (recordNum < self._pat...
'Return the inference value from one input sample. The actual learning happens in compute(). :param patternNZ: list of the active indices from the output below :param classification: dict of the classification information: bucketIdx: index of the encoder bucket actValue: actual value going into the encoder :return: ...
def infer(self, patternNZ, actValueList):
if ((self.steps[0] == 0) or (actValueList is None)): defaultValue = 0 else: defaultValue = actValueList[0] actValues = [(x if (x is not None) else defaultValue) for x in self._actualValues] retval = {'actualValues': actValues} for nSteps in self.steps: predictDist = self.infe...
'Perform inference for a single step. Given an SDR input and a weight matrix, return a predicted distribution. :param patternNZ: list of the active indices from the output below :param weightMatrix: numpy array of the weight matrix :return: numpy array of the predicted class label distribution'
def inferSingleStep(self, patternNZ, weightMatrix):
outputActivation = weightMatrix[patternNZ].sum(axis=0) expOutputActivation = numpy.exp(outputActivation) predictDist = (expOutputActivation / numpy.sum(expOutputActivation)) return predictDist
'Calculate error signal :param bucketIdxList: list of encoder buckets :return: dict containing error. The key is the number of steps The value is a numpy array of error at the output layer'
def _calculateError(self, recordNum, bucketIdxList):
error = dict() targetDist = numpy.zeros((self._maxBucketIdx + 1)) numCategories = len(bucketIdxList) for bucketIdx in bucketIdxList: targetDist[bucketIdx] = (1.0 / numCategories) for (learnRecordNum, learnPatternNZ) in self._patternNZHistory: nSteps = (recordNum - learnRecordNum) ...
'Explicitly implement this for unit testing. The flatIdx is not designed to be consistent after serialize / deserialize, and the synapses might not enumerate in the same order.'
def __eq__(self, other):
return ((self.cell == other.cell) and (sorted(self._synapses, key=(lambda x: x._ordinal)) == sorted(other._synapses, key=(lambda x: x._ordinal))))
'Explicitly implement this for unit testing. Allow floating point differences for synapse permanence.'
def __eq__(self, other):
return ((self.segment.cell == other.segment.cell) and (self.presynapticCell == other.presynapticCell) and (abs((self.permanence - other.permanence)) < EPSILON))
'Returns the segments that belong to a cell. :param cell: (int) Cell index :returns: (list) Segment objects representing segments on the given cell.'
def segmentsForCell(self, cell):
return self._cells[cell]._segments
'Returns the synapses on a segment. :param segment: (int) Segment index :returns: (set) Synapse objects representing synapses on the given segment.'
def synapsesForSegment(self, segment):
return segment._synapses
'Returns the data for a synapse. .. note:: This method exists to match the interface of the C++ Connections. This allows tests and tools to inspect the connections using a common interface. :param synapse: (:class:`Synapse`) :returns: Synapse data'
def dataForSynapse(self, synapse):
return synapse
'Returns the data for a segment. .. note:: This method exists to match the interface of the C++ Connections. This allows tests and tools to inspect the connections using a common interface. :param segment (:class:`Segment`) :returns: segment data'
def dataForSegment(self, segment):
return segment
'Returns a :class:`Segment` object of the specified segment using data from the ``self._cells`` array. :param cell: (int) cell index :param idx: (int) segment index on a cell :returns: (:class:`Segment`) Segment object with index idx on the specified cell'
def getSegment(self, cell, idx):
return self._cells[cell]._segments[idx]
'Get the segment with the specified flatIdx. :param flatIdx: (int) The segment\'s flattened list index. :returns: (:class:`Segment`)'
def segmentForFlatIdx(self, flatIdx):
return self._segmentForFlatIdx[flatIdx]
'Get the needed length for a list to hold a value for every segment\'s flatIdx. :returns: (int) Required list length'
def segmentFlatListLength(self):
return self._nextFlatIdx
'Returns the synapses for the source cell that they synapse on. :param presynapticCell: (int) Source cell index :returns: (set) :class:`Synapse` objects'
def synapsesForPresynapticCell(self, presynapticCell):
return self._synapsesForPresynapticCell[presynapticCell]
'Adds a new segment on a cell. :param cell: (int) Cell index :returns: (int) New segment index'
def createSegment(self, cell):
cellData = self._cells[cell] if (len(self._freeFlatIdxs) > 0): flatIdx = self._freeFlatIdxs.pop() else: flatIdx = self._nextFlatIdx self._segmentForFlatIdx.append(None) self._nextFlatIdx += 1 ordinal = self._nextSegmentOrdinal self._nextSegmentOrdinal += 1 segment...
'Destroys a segment. :param segment: (:class:`Segment`) representing the segment to be destroyed.'
def destroySegment(self, segment):
for synapse in segment._synapses: self._removeSynapseFromPresynapticMap(synapse) self._numSynapses -= len(segment._synapses) segments = self._cells[segment.cell]._segments i = segments.index(segment) del segments[i] self._freeFlatIdxs.append(segment.flatIdx) self._segmentForFlatIdx[s...
'Creates a new synapse on a segment. :param segment: (:class:`Segment`) Segment object for synapse to be synapsed to. :param presynapticCell: (int) Source cell index. :param permanence: (float) Initial permanence of synapse. :returns: (:class:`Synapse`) created synapse'
def createSynapse(self, segment, presynapticCell, permanence):
idx = len(segment._synapses) synapse = Synapse(segment, presynapticCell, permanence, self._nextSynapseOrdinal) self._nextSynapseOrdinal += 1 segment._synapses.add(synapse) self._synapsesForPresynapticCell[presynapticCell].add(synapse) self._numSynapses += 1 return synapse
'Destroys a synapse. :param synapse: (:class:`Synapse`) synapse to destroy'
def destroySynapse(self, synapse):
self._numSynapses -= 1 self._removeSynapseFromPresynapticMap(synapse) synapse.segment._synapses.remove(synapse)
'Updates the permanence for a synapse. :param synapse: (class:`Synapse`) to be updated. :param permanence: (float) New permanence.'
def updateSynapsePermanence(self, synapse, permanence):
synapse.permanence = permanence
'Compute each segment\'s number of active synapses for a given input. In the returned lists, a segment\'s active synapse count is stored at index ``segment.flatIdx``. :param activePresynapticCells: (iter) Active cells. :param connectedPermanence: (float) Permanence threshold for a synapse to be considered connected :re...
def computeActivity(self, activePresynapticCells, connectedPermanence):
numActiveConnectedSynapsesForSegment = ([0] * self._nextFlatIdx) numActivePotentialSynapsesForSegment = ([0] * self._nextFlatIdx) threshold = (connectedPermanence - EPSILON) for cell in activePresynapticCells: for synapse in self._synapsesForPresynapticCell[cell]: flatIdx = synapse.s...
'Returns the number of segments. :param cell: (int) Optional parameter to get the number of segments on a cell. :returns: (int) Number of segments on all cells if cell is not specified, or on a specific specified cell'
def numSegments(self, cell=None):
if (cell is not None): return len(self._cells[cell]._segments) return (self._nextFlatIdx - len(self._freeFlatIdxs))
'Returns the number of Synapses. :param segment: (:class:`Segment`) Optional parameter to get the number of synapses on a segment. :returns: (int) Number of synapses on all segments if segment is not specified, or on a specified segment.'
def numSynapses(self, segment=None):
if (segment is not None): return len(segment._synapses) return self._numSynapses
'Return a numeric key for sorting this segment. This can be used with the python built-in ``sorted()`` function. :param segment: (:class:`Segment`) within this :class:`Connections` instance. :returns: (float) A numeric key for sorting.'
def segmentPositionSortKey(self, segment):
return (segment.cell + (segment._ordinal / float(self._nextSegmentOrdinal)))
'Writes serialized data to proto object. :param proto: (DynamicStructBuilder) Proto object'
def write(self, proto):
protoCells = proto.init('cells', self.numCells) for i in xrange(self.numCells): segments = self._cells[i]._segments protoSegments = protoCells[i].init('segments', len(segments)) for (j, segment) in enumerate(segments): synapses = segment._synapses protoSynapses = ...
'Reads deserialized data from proto object :param proto: (DynamicStructBuilder) Proto object :returns: (:class:`Connections`) instance'
@classmethod def read(cls, proto):
protoCells = proto.cells connections = cls(len(protoCells)) for (cellIdx, protoCell) in enumerate(protoCells): protoCell = protoCells[cellIdx] protoSegments = protoCell.segments connections._cells[cellIdx] = CellData() segments = connections._cells[cellIdx]._segments ...
'Equality operator for Connections instances. Checks if two instances are functionally identical :param other: (:class:`Connections`) Connections instance to compare to'
def __eq__(self, other):
for i in xrange(self.numCells): segments = self._cells[i]._segments otherSegments = other._cells[i]._segments if (len(segments) != len(otherSegments)): return False for j in xrange(len(segments)): segment = segments[j] otherSegment = otherSegments[...
'Non-equality operator for Connections instances. Checks if two instances are not functionally identical :param other: (:class:`Connections`) Connections instance to compare to'
def __ne__(self, other):
return (not self.__eq__(other))
'List of our member variables that we don\'t need to be saved.'
def _getEphemeralMembers(self):
return []
'Initialize all ephemeral members after being restored to a pickled state.'
def _initEphemerals(self):
self.segmentUpdates = {} self.resetStats() self._prevInfPatterns = [] self._prevLrnPatterns = [] stateShape = (self.numberOfCols, self.cellsPerColumn) self.lrnActiveState = {} self.lrnActiveState['t'] = numpy.zeros(stateShape, dtype='int8') self.lrnActiveState['t-1'] = numpy.zeros(stateS...
'@internal Return serializable state. This function will return a version of the __dict__ with all "ephemeral" members stripped out. "Ephemeral" members are defined as those that do not need to be (nor should be) stored in any kind of persistent file (e.g., NuPIC network XML file.)'
def __getstate__(self):
state = self.__dict__.copy() for ephemeralMemberName in self._getEphemeralMembers(): state.pop(ephemeralMemberName, None) state['_random'] = self._getRandomState() state['version'] = TM_VERSION return state
'@internal Set the state of ourself from a serialized state.'
def __setstate__(self, state):
self._setRandomState(state['_random']) del state['_random'] version = state.pop('version') assert (version == TM_VERSION) self.__dict__.update(state)
'Populate serialization proto instance. :param proto: (BacktrackingTMProto) the proto instance to populate'
def write(self, proto):
proto.version = TM_VERSION self._random.write(proto.random) proto.numberOfCols = self.numberOfCols proto.cellsPerColumn = self.cellsPerColumn proto.initialPerm = float(self.initialPerm) proto.connectedPerm = float(self.connectedPerm) proto.minThreshold = self.minThreshold proto.newSynaps...
'Deserialize from proto instance. :param proto: (BacktrackingTMProto) the proto instance to read from'
@classmethod def read(cls, proto):
assert (proto.version == TM_VERSION) obj = object.__new__(cls) obj._random = Random() obj._random.read(proto.random) obj.numberOfCols = int(proto.numberOfCols) obj.cellsPerColumn = int(proto.cellsPerColumn) obj._numberOfCells = (obj.numberOfCols * obj.cellsPerColumn) obj.initialPerm = nu...
'@internal Patch __getattr__ so that we can catch the first access to \'cells\' and load. This function is only called when we try to access an attribute that doesn\'t exist. We purposely make sure that "self.cells" doesn\'t exist after unpickling so that we\'ll hit this, then we can load it on the first access. If th...
def __getattr__(self, name):
try: return super(BacktrackingTM, self).__getattr__(name) except AttributeError: raise AttributeError(("'TM' object has no attribute '%s'" % name))
'Implemented in :meth:`nupic.algorithms.backtracking_tm_cpp.BacktrackingTMCPP.saveToFile`.'
def saveToFile(self, filePath):
pass
'Implemented in :meth:`nupic.algorithms.backtracking_tm_cpp.BacktrackingTMCPP.loadFromFile`.'
def loadFromFile(self, filePath):
pass
'@internal Return the random number state. This is used during unit testing to generate repeatable results.'
def _getRandomState(self):
return pickle.dumps(self._random)
'@internal Set the random number state. This is used during unit testing to generate repeatable results.'
def _setRandomState(self, state):
self._random = pickle.loads(state)
'Reset the state of all cells. This is normally used between sequences while training. All internal states are reset to 0.'
def reset(self):
if (self.verbosity >= 3): print '\n==== RESET =====' self.lrnActiveState['t-1'].fill(0) self.lrnActiveState['t'].fill(0) self.lrnPredictedState['t-1'].fill(0) self.lrnPredictedState['t'].fill(0) self.infActiveState['t-1'].fill(0) self.infActiveState['t'].fill(0) self.infPre...
'Reset the learning and inference stats. This will usually be called by user code at the start of each inference run (for a particular data set).'
def resetStats(self):
self._stats = dict() self._internalStats = dict() self._internalStats['nInfersSinceReset'] = 0 self._internalStats['nPredictions'] = 0 self._internalStats['curPredictionScore'] = 0 self._internalStats['curPredictionScore2'] = 0 self._internalStats['predictionScoreTotal2'] = 0 self._inter...
'Return the current learning and inference stats. This returns a dict containing all the learning and inference stats we have collected since the last :meth:`resetStats` call. If :class:`BacktrackingTM` ``collectStats`` parameter is False, then None is returned. :returns: (dict) The following keys are returned in the d...
def getStats(self):
if (not self.collectStats): return None self._stats['nPredictions'] = self._internalStats['nPredictions'] self._stats['curMissing'] = self._internalStats['curMissing'] self._stats['curExtra'] = self._internalStats['curExtra'] self._stats['totalMissing'] = self._internalStats['totalMissing'] ...
'Called at the end of learning and inference, this routine will update a number of stats in our _internalStats dictionary, including our computed prediction score. :param stats internal stats dictionary :param bottomUpNZ list of the active bottom-up inputs :param predictedState The columns we predict...
def _updateStatsInferEnd(self, stats, bottomUpNZ, predictedState, colConfidence):
if (not self.collectStats): return stats['nInfersSinceReset'] += 1 (numExtra2, numMissing2, confidences2) = self._checkPrediction(patternNZs=[bottomUpNZ], output=predictedState, colConfidence=colConfidence) (predictionScore, positivePredictionScore, negativePredictionScore) = confidences2[0] ...
'Print an integer array that is the same shape as activeState. :param aState: TODO: document'
def printState(self, aState):
def formatRow(var, i): s = '' for c in range(self.numberOfCols): if ((c > 0) and ((c % 10) == 0)): s += ' ' s += str(var[(c, i)]) s += ' ' return s for i in xrange(self.cellsPerColumn): print formatRow(aState, i)
'Print a floating point array that is the same shape as activeState. :param aState: TODO: document :param maxCols: TODO: document'
def printConfidence(self, aState, maxCols=20):
def formatFPRow(var, i): s = '' for c in range(min(maxCols, self.numberOfCols)): if ((c > 0) and ((c % 10) == 0)): s += ' ' s += (' %5.3f' % var[(c, i)]) s += ' ' return s for i in xrange(self.cellsPerColumn): print...
'Print up to maxCols number from a flat floating point array. :param aState: TODO: document :param maxCols: TODO: document'
def printColConfidence(self, aState, maxCols=20):
def formatFPRow(var): s = '' for c in range(min(maxCols, self.numberOfCols)): if ((c > 0) and ((c % 10) == 0)): s += ' ' s += (' %5.3f' % var[c]) s += ' ' return s print formatFPRow(aState)
'TODO: document :param printPrevious: :param printLearnState: :return:'
def printStates(self, printPrevious=True, printLearnState=True):
def formatRow(var, i): s = '' for c in range(self.numberOfCols): if ((c > 0) and ((c % 10) == 0)): s += ' ' s += str(var[(c, i)]) s += ' ' return s print '\nInference Active state' for i in xrange(self.cellsPerColumn): ...
'TODO: document :param y: :return:'
def printOutput(self, y):
print 'Output' for i in xrange(self.cellsPerColumn): for c in xrange(self.numberOfCols): print int(y[(c, i)]), print
'TODO: document :param x: :return:'
def printInput(self, x):
print 'Input' for c in xrange(self.numberOfCols): print int(x[c]), print
'Print the parameter settings for the TM.'
def printParameters(self):
print 'numberOfCols=', self.numberOfCols print 'cellsPerColumn=', self.cellsPerColumn print 'minThreshold=', self.minThreshold print 'newSynapseCount=', self.newSynapseCount print 'activationThreshold=', self.activationThreshold print print 'initialPerm=', self.initialPerm print 'connect...
'Print the list of ``[column, cellIdx]`` indices for each of the active cells in state. :param state: TODO: document :param andValues: TODO: document'
def printActiveIndices(self, state, andValues=False):
if (len(state.shape) == 2): (cols, cellIdxs) = state.nonzero() else: cols = state.nonzero()[0] cellIdxs = numpy.zeros(len(cols)) if (len(cols) == 0): print 'NONE' return prevCol = (-1) for (col, cellIdx) in zip(cols, cellIdxs): if (col != prevCol): ...
'Called at the end of inference to print out various diagnostic information based on the current verbosity level. :param output: TODO: document :param learn: TODO: document'
def printComputeEnd(self, output, learn=False):
if (self.verbosity >= 3): print '----- computeEnd summary: ' print 'learn:', learn print ('numBurstingCols: %s, ' % self.infActiveState['t'].min(axis=1).sum()), print ('curPredScore2: %s, ' % self._internalStats['curPredictionScore2']), print ('curFalsePo...
'TODO: document :return:'
def printSegmentUpdates(self):
print '=== SEGMENT UPDATES ===, Num = ', len(self.segmentUpdates) for (key, updateList) in self.segmentUpdates.iteritems(): (c, i) = (key[0], key[1]) print c, i, updateList
'TODO: document :param c: :param i: :param onlyActiveSegments: :return:'
def printCell(self, c, i, onlyActiveSegments=False):
if (len(self.cells[c][i]) > 0): print 'Column', c, 'Cell', i, ':', print len(self.cells[c][i]), 'segment(s)' for (j, s) in enumerate(self.cells[c][i]): isActive = self._isSegmentActive(s, self.infActiveState['t']) if ((not onlyActiveSegments) or isActive): ...
'TODO: document :param predictedOnly: :return:'
def printCells(self, predictedOnly=False):
if predictedOnly: print '--- PREDICTED CELLS ---' else: print '--- ALL CELLS ---' print 'Activation threshold=', self.activationThreshold, print 'min threshold=', self.minThreshold, print 'connected perm=', self.connectedPerm for c in xrange(self.number...
':param c: (int) column index :param i: (int) cell index within column :returns: (int) the total number of synapses in cell (c, i)'
def getNumSegmentsInCell(self, c, i):
return len(self.cells[c][i])
':returns: (int) the total number of synapses'
def getNumSynapses(self):
nSyns = self.getSegmentInfo()[1] return nSyns
':returns: (int) the average number of synapses per segment'
def getNumSynapsesPerSegmentAvg(self):
return (float(self.getNumSynapses()) / max(1, self.getNumSegments()))
':returns: (int) the total number of segments'
def getNumSegments(self):
nSegs = self.getSegmentInfo()[0] return nSegs
':returns: (int) the total number of cells'
def getNumCells(self):
return (self.numberOfCols * self.cellsPerColumn)
':param c: (int) column index :param i: (int) cell index in column :param segIdx: (int) segment index to match :returns: (list) representing the the segment on cell (c, i) with index ``segIdx``. [ [segmentID, sequenceSegmentFlag, positiveActivations, totalActivations, lastActiveIteration, lastPosDutyCycle, lastPosDuty...
def getSegmentOnCell(self, c, i, segIdx):
seg = self.cells[c][i][segIdx] retlist = [[seg.segID, seg.isSequenceSeg, seg.positiveActivations, seg.totalActivations, seg.lastActiveIteration, seg._lastPosDutyCycle, seg._lastPosDutyCycleIteration]] retlist += seg.syns return retlist
'Store a dated potential segment update. The "date" (iteration index) is used later to determine whether the update is too old and should be forgotten. This is controlled by parameter ``segUpdateValidDuration``. :param c: TODO: document :param i: TODO: document :param segUpdate: TODO: document'
def _addToSegmentUpdates(self, c, i, segUpdate):
if ((segUpdate is None) or (len(segUpdate.activeSynapses) == 0)): return key = (c, i) if self.segmentUpdates.has_key(key): self.segmentUpdates[key] += [(self.lrnIterationIdx, segUpdate)] else: self.segmentUpdates[key] = [(self.lrnIterationIdx, segUpdate)]
'Remove a segment update (called when seg update expires or is processed) :param updateInfo: (tuple) (creationDate, SegmentUpdate)'
def _removeSegmentUpdate(self, updateInfo):
(creationDate, segUpdate) = updateInfo key = (segUpdate.columnIdx, segUpdate.cellIdx) self.segmentUpdates[key].remove(updateInfo)
'Computes output for both learning and inference. In both cases, the output is the boolean OR of ``activeState`` and ``predictedState`` at ``t``. Stores ``currentOutput`` for ``checkPrediction``. :returns: TODO: document'
def _computeOutput(self):
if (self.outputType == 'activeState1CellPerCol'): mostActiveCellPerCol = self.cellConfidence['t'].argmax(axis=1) self.currentOutput = numpy.zeros(self.infActiveState['t'].shape, dtype='float32') numCols = self.currentOutput.shape[0] self.currentOutput[(xrange(numCols), mostActiveCell...
'Return the current active state. This is called by the node to obtain the sequence output of the TM. :returns: TODO: document'
def _getActiveState(self):
return self.infActiveState['t'].reshape((-1)).astype('float32')
':returns: numpy array of predicted cells, representing the current predicted state. ``predictedCells[c][i]`` represents the state of the i\'th cell in the c\'th column.'
def getPredictedState(self):
return self.infPredictedState['t']
'This function gives the future predictions for <nSteps> timesteps starting from the current TM state. The TM is returned to its original state at the end before returning. 1. We save the TM state. 2. Loop for nSteps a. Turn-on with lateral support from the current active cells b. Set the predicted cells as the next st...
def predict(self, nSteps):
pristineTPDynamicState = self._getTPDynamicState() assert (nSteps > 0) multiStepColumnPredictions = numpy.zeros((nSteps, self.numberOfCols), dtype='float32') step = 0 while True: multiStepColumnPredictions[step, :] = self.topDownCompute() if (step == (nSteps - 1)): break ...
'Any newly added dynamic states in the TM should be added to this list. Parameters: retval: The list of names of TM dynamic state variables.'
def _getTPDynamicStateVariableNames(self):
return ['infActiveState', 'infPredictedState', 'lrnActiveState', 'lrnPredictedState', 'cellConfidence', 'colConfidence']
'Parameters: retval: A dict with all the dynamic state variable names as keys and their values at this instant as values.'
def _getTPDynamicState(self):
tpDynamicState = dict() for variableName in self._getTPDynamicStateVariableNames(): tpDynamicState[variableName] = copy.deepcopy(self.__dict__[variableName]) return tpDynamicState
'Set all the dynamic state variables from the <tpDynamicState> dict. <tpDynamicState> dict has all the dynamic state variable names as keys and their values at this instant as values. We set the dynamic state variables in the tm object with these items.'
def _setTPDynamicState(self, tpDynamicState):
for variableName in self._getTPDynamicStateVariableNames(): self.__dict__[variableName] = tpDynamicState.pop(variableName)
'Update our moving average of learned sequence length.'
def _updateAvgLearnedSeqLength(self, prevSeqLength):
if (self.lrnIterationIdx < 100): alpha = 0.5 else: alpha = 0.1 self.avgLearnedSeqLength = (((1.0 - alpha) * self.avgLearnedSeqLength) + (alpha * prevSeqLength))
':returns: Moving average of learned sequence length'
def getAvgLearnedSeqLength(self):
return self.avgLearnedSeqLength
'This "backtracks" our inference state, trying to see if we can lock onto the current set of inputs by assuming the sequence started up to N steps ago on start cells. This will adjust @ref infActiveState[\'t\'] if it does manage to lock on to a sequence that started earlier. It will also compute infPredictedState[\'t\'...
def _inferBacktrack(self, activeColumns):
numPrevPatterns = len(self._prevInfPatterns) if (numPrevPatterns <= 0): return currentTimeStepsOffset = (numPrevPatterns - 1) self.infActiveState['backup'][:, :] = self.infActiveState['t'][:, :] self.infPredictedState['backup'][:, :] = self.infPredictedState['t-1'][:, :] badPatterns = []...
'Update the inference active state from the last set of predictions and the current bottom-up. This looks at: - ``infPredictedState[\'t-1\']`` This modifies: - ``infActiveState[\'t\']`` :param activeColumns: (list) active bottom-ups :param useStartCells: (bool) If true, ignore previous predictions and simply turn on th...
def _inferPhase1(self, activeColumns, useStartCells):
self.infActiveState['t'].fill(0) numPredictedColumns = 0 if useStartCells: for c in activeColumns: self.infActiveState['t'][(c, 0)] = 1 else: for c in activeColumns: predictingCells = numpy.where((self.infPredictedState['t-1'][c] == 1))[0] numPredictin...
'Phase 2 for the inference state. The computes the predicted state, then checks to insure that the predicted state is not over-saturated, i.e. look too close like a burst. This indicates that there were so many separate paths learned from the current input columns to the predicted input columns that bursting on the cur...
def _inferPhase2(self):
self.infPredictedState['t'].fill(0) self.cellConfidence['t'].fill(0) self.colConfidence['t'].fill(0) for c in xrange(self.numberOfCols): for i in xrange(self.cellsPerColumn): for s in self.cells[c][i]: numActiveSyns = self._getSegmentActivityLevel(s, self.infActiveSta...
'Update the inference state. Called from :meth:`compute` on every iteration. :param activeColumns: (list) active column indices.'
def _updateInferenceState(self, activeColumns):
self.infActiveState['t-1'][:, :] = self.infActiveState['t'][:, :] self.infPredictedState['t-1'][:, :] = self.infPredictedState['t'][:, :] self.cellConfidence['t-1'][:, :] = self.cellConfidence['t'][:, :] self.colConfidence['t-1'][:] = self.colConfidence['t'][:] if (self.maxInfBacktrack > 0): ...
'A utility method called from learnBacktrack. This will backtrack starting from the given startOffset in our prevLrnPatterns queue. It returns True if the backtrack was successful and we managed to get predictions all the way up to the current time step. If readOnly, then no segments are updated or modified, otherwise,...
def _learnBacktrackFrom(self, startOffset, readOnly=True):
numPrevPatterns = len(self._prevLrnPatterns) currentTimeStepsOffset = (numPrevPatterns - 1) if (not readOnly): self.segmentUpdates = {} if (self.verbosity >= 3): if readOnly: print (('Trying to lock-on using startCell state from %d steps ago:' % ((n...
'This "backtracks" our learning state, trying to see if we can lock onto the current set of inputs by assuming the sequence started up to N steps ago on start cells. This will adjust @ref lrnActiveState[\'t\'] if it does manage to lock on to a sequence that started earlier. :returns: >0 if we managed to lock o...
def _learnBacktrack(self):
numPrevPatterns = (len(self._prevLrnPatterns) - 1) if (numPrevPatterns <= 0): if (self.verbosity >= 3): print 'lrnBacktrack: No available history to backtrack from' return False badPatterns = [] inSequence = False for startOffset in range(0, numPrevPatte...
'Compute the learning active state given the predicted state and the bottom-up input. :param activeColumns list of active bottom-ups :param readOnly True if being called from backtracking logic. This tells us not to increment any segment duty cycles or queue up any updates. :returns: True if the current input was ...
def _learnPhase1(self, activeColumns, readOnly=False):
self.lrnActiveState['t'].fill(0) numUnpredictedColumns = 0 for c in activeColumns: predictingCells = numpy.where((self.lrnPredictedState['t-1'][c] == 1))[0] numPredictedCells = len(predictingCells) assert (numPredictedCells <= 1) if (numPredictedCells == 1): i = p...
'Compute the predicted segments given the current set of active cells. :param readOnly True if being called from backtracking logic. This tells us not to increment any segment duty cycles or queue up any updates. This computes the lrnPredictedState[\'t\'] and queues up any segments that became active (and the lis...
def _learnPhase2(self, readOnly=False):
self.lrnPredictedState['t'].fill(0) for c in xrange(self.numberOfCols): (i, s, numActive) = self._getBestMatchingCell(c, self.lrnActiveState['t'], minThreshold=self.activationThreshold) if (i is None): continue self.lrnPredictedState['t'][(c, i)] = 1 if readOnly: ...