desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'inputRecord - dict containing the input to the sensor Return a \'SensorInput\' object, which represents the \'parsed\' representation of the input record'
def _getSensorInputRecord(self, inputRecord):
sensor = self._getSensorRegion() dataRow = copy.deepcopy(sensor.getSelf().getOutputValues('sourceOut')) dataDict = copy.deepcopy(inputRecord) inputRecordEncodings = sensor.getSelf().getOutputValues('sourceEncodings') inputRecordCategory = int(sensor.getOutputData('categoryOut')[0]) resetOut = se...
'inputRecord - dict containing the input to the sensor Return a \'ClassifierInput\' object, which contains the mapped bucket index for input Record'
def _getClassifierInputRecord(self, inputRecord):
absoluteValue = None bucketIdx = None if ((self._predictedFieldName is not None) and (self._classifierInputEncoder is not None)): absoluteValue = inputRecord[self._predictedFieldName] bucketIdx = self._classifierInputEncoder.getBucketIndices(absoluteValue)[0] return ClassifierInput(dataR...
'Compute Anomaly score, if required'
def _anomalyCompute(self):
inferenceType = self.getInferenceType() inferences = {} sp = self._getSPRegion() score = None if (inferenceType == InferenceType.NontemporalAnomaly): score = sp.getOutputData('anomalyScore')[0] elif (inferenceType == InferenceType.TemporalAnomaly): tm = self._getTPRegion() ...
'Handle the CLA Classifier compute logic when implementing multi-step prediction. This is where the patternNZ is associated with one of the other fields from the dataset 0 to N steps in the future. This method is used by each type of network (encoder only, SP only, SP +TM) to handle the compute logic through the CLA Cl...
def _handleSDRClassifierMultiStep(self, patternNZ, inputTSRecordIdx, rawInput):
inferenceArgs = self.getInferenceArgs() predictedFieldName = inferenceArgs.get('predictedField', None) if (predictedFieldName is None): raise ValueError('No predicted field was enabled! Did you call enableInference()?') self._predictedFieldName = predictedFieldName cl...
'Remove entries with 0 likelihood or likelihood less than minLikelihoodThreshold, but don\'t leave an empty dict.'
@classmethod def _removeUnlikelyPredictions(cls, likelihoodsDict, minLikelihoodThreshold, maxPredictionsPerStep):
maxVal = (None, None) for (k, v) in likelihoodsDict.items(): if (len(likelihoodsDict) <= 1): break if ((maxVal[0] is None) or (v >= maxVal[1])): if ((maxVal[0] is not None) and (maxVal[1] < minLikelihoodThreshold)): del likelihoodsDict[maxVal[0]] ...
'Only returns data for a stat called ``numRunCalls``. :return:'
def getRuntimeStats(self):
ret = {'numRunCalls': self.__numRunCalls} temporalStats = dict() if self._hasTP: for stat in self._netInfo.statsCollectors: sdict = stat.getStats() temporalStats.update(sdict) ret[InferenceType.getLabel(InferenceType.TemporalNextStep)] = temporalStats return ret
'Get the logger for this object. This is a protected method that is used by the Model to access the logger created by the subclass return: A logging.Logger object. Should not be None'
def _getLogger(self):
return self.__logger
'Returns reference to the network\'s SP region'
def _getSPRegion(self):
return self._netInfo.net.regions.get('SP', None)
'Returns reference to the network\'s TM region'
def _getTPRegion(self):
return self._netInfo.net.regions.get('TM', None)
'Returns reference to the network\'s Sensor region'
def _getSensorRegion(self):
return self._netInfo.net.regions['sensor']
'Returns reference to the network\'s Classifier region'
def _getClassifierRegion(self):
if ((self._netInfo.net is not None) and ('Classifier' in self._netInfo.net.regions)): return self._netInfo.net.regions['Classifier'] else: return None
'Returns: sensor region\'s encoder for the given network'
def _getEncoder(self):
return self._getSensorRegion().getSelf().encoder
'Returns: sensor region\'s encoder that is sent only to the classifier, not to the bottom of the network'
def _getClassifierOnlyEncoder(self):
return self._getSensorRegion().getSelf().disabledEncoder
'Returns: data source that we installed in sensor region'
def _getDataSource(self):
return self._getSensorRegion().getSelf().dataSource
'Create a CLA network and return it. description: HTMPredictionModel description dictionary (TODO: define schema) Returns: NetworkInfo instance;'
def __createHTMNetwork(self, sensorParams, spEnable, spParams, tmEnable, tmParams, clEnable, clParams, anomalyParams):
n = Network() n.addRegion('sensor', 'py.RecordSensor', json.dumps(dict(verbosity=sensorParams['verbosity']))) sensor = n.regions['sensor'].getSelf() enabledEncoders = copy.deepcopy(sensorParams['encoders']) for (name, params) in enabledEncoders.items(): if (params is not None): c...
'Return serializable state. This function will return a version of the __dict__ with data that shouldn\'t be pickled stripped out. In particular, the CLA Network is stripped out because it has it\'s own serialization mechanism) See also: _serializeExtraData()'
def __getstate__(self):
state = self.__dict__.copy() state['_netInfo'] = NetworkInfo(net=None, statsCollectors=self._netInfo.statsCollectors) for ephemeral in [self.__manglePrivateMemberName('__restoringFromState'), self.__manglePrivateMemberName('__logger')]: state.pop(ephemeral) return state
'Set the state of ourself from a serialized state. See also: _deSerializeExtraData'
def __setstate__(self, state):
self.__dict__.update(state) self.__restoringFromState = True self.__logger = initLogger(self) if (not hasattr(self, '_Model__inferenceType')): self.__restoringFromV1 = True self._hasSP = True if (self.__temporalNetInfo is not None): self._Model__inferenceType = Infere...
':param proto: capnp HTMPredictionModelProto message builder'
def write(self, proto):
super(HTMPredictionModel, self).writeBaseToProto(proto.modelBase) proto.numRunCalls = self.__numRunCalls proto.minLikelihoodThreshold = self._minLikelihoodThreshold proto.maxPredictionsPerStep = self._maxPredictionsPerStep self._netInfo.net.write(proto.network)
':param proto: capnp HTMPredictionModelProto message reader'
@classmethod def read(cls, proto):
network = Network.read(proto.network) spEnable = ('SP' in network.regions) tmEnable = ('TM' in network.regions) clEnable = ('Classifier' in network.regions) model = cls(spEnable=spEnable, tmEnable=tmEnable, clEnable=clEnable, network=network, baseProto=proto.modelBase) model.__numRunCalls = prot...
'[virtual method override] This method is called during serialization with an external directory path that can be used to bypass pickle for saving large binary states. extraDataDir: Model\'s extra data directory path'
def _serializeExtraData(self, extraDataDir):
makeDirectoryFromAbsolutePath(extraDataDir) outputDir = self.__getNetworkStateDirectory(extraDataDir=extraDataDir) self.__logger.debug('Serializing network...') self._netInfo.net.save(outputDir) self.__logger.debug('Finished serializing network') return
'[virtual method override] This method is called during deserialization (after __setstate__) with an external directory path that can be used to bypass pickle for loading large binary states. extraDataDir: Model\'s extra data directory path'
def _deSerializeExtraData(self, extraDataDir):
assert self.__restoringFromState assert (self._netInfo.net is None), 'Network was already unpickled' stateDir = self.__getNetworkStateDirectory(extraDataDir=extraDataDir) self.__logger.debug('(%s) De-serializing network...', self) self._netInfo.net = Network(stateDir) self.__logge...
'Attaches an \'AnomalyClassifier\' region to the network. Will remove current \'AnomalyClassifier\' region if it exists. Parameters network - network to add the AnomalyClassifier region params - parameters to pass to the region spEnable - True if network has an SP region tmEnable - True if network has a TM region; Curr...
def _addAnomalyClassifierRegion(self, network, params, spEnable, tmEnable):
allParams = copy.deepcopy(params) knnParams = dict(k=1, distanceMethod='rawOverlap', distanceNorm=1, doBinarization=1, replaceDuplicates=0, maxStoredPatterns=1000) allParams.update(knnParams) if (allParams['trainRecords'] is None): allParams['trainRecords'] = DEFAULT_ANOMALY_TRAINRECORDS if ...
'extraDataDir: Model\'s extra data directory path Returns: Absolute directory path for saving CLA Network'
def __getNetworkStateDirectory(self, extraDataDir):
if self.__restoringFromV1: if (self.getInferenceType() == InferenceType.TemporalNextStep): leafName = ('temporal' + '-network.nta') else: leafName = ('nonTemporal' + '-network.nta') else: leafName = (InferenceType.getLabel(self.getInferenceType()) + '-network.nta'...
'Mangles the given mangled (private) member name; a mangled member name is one whose name begins with two or more underscores and ends with one or zero underscores. privateMemberName: The private member name (e.g., "__logger") skipCheck: Pass True to skip test for presence of the demangled member in our instance. Re...
def __manglePrivateMemberName(self, privateMemberName, skipCheck=False):
assert privateMemberName.startswith('__'), ("%r doesn't start with __" % privateMemberName) assert (not privateMemberName.startswith('___')), ('%r starts with ___' % privateMemberName) assert (not privateMemberName.endswith('__')), ('%r ends with more than one undersco...
'Initialize interpreter with blacklisted nodes removed from supported nodes.'
def __init__(self, *args, **kwargs):
self.supported_nodes = tuple((set(self.supported_nodes) - self.blacklisted_nodes)) asteval.Interpreter.__init__(self, *args, **kwargs)
'Validates control dictionary for the experiment context'
def __validateExperimentControl(self, control):
taskList = control.get('tasks', None) if (taskList is not None): taskLabelsList = [] for task in taskList: validateOpfJsonValue(task, 'opfTaskSchema.json') validateOpfJsonValue(task['taskControl'], 'opfTaskControlSchema.json') taskLabel = task['taskLabel'] ...
'Validates control dictionary for the nupic engine context'
def __validateNupicControl(self, control):
validateOpfJsonValue(control, 'nupicControlSchema.json')
'TODO: document :param stream:'
def normalizeStreamSource(self, stream):
source = stream['source'][len(FILE_SCHEME):] if os.path.isabs(source): sourcePath = source else: sourcePath = resource_filename('nupic.datafiles', source) if (not os.path.exists(sourcePath)): sourcePath = os.path.join(os.getcwd(), source) stream['source'] = (FILE_SCHE...
'TODO: document'
def normalizeStreamSources(self):
task = dict(self.__control) if ('dataset' in task): for stream in task['dataset']['streams']: self.normalizeStreamSource(stream) else: for subtask in task['tasks']: for stream in subtask['dataset']['streams']: self.normalizeStreamSource(stream)
'TODO: document'
def convertNupicEnvToOPF(self):
task = dict(self.__control) task.pop('environment') inferenceArgs = task.pop('inferenceArgs') task['taskLabel'] = 'DefaultTask' iterationCount = task.get('iterationCount', (-1)) iterationCountInferOnly = task.pop('iterationCountInferOnly', 0) if (iterationCountInferOnly == (-1)): ite...
'requestedActivities: a sequence of PeriodicActivityRequest elements'
def __init__(self, requestedActivities=[]):
self.__activities = [] self.__appendActivities(requestedActivities) return
'Adds activities periodicActivities: A sequence of PeriodicActivityRequest elements'
def addActivities(self, periodicActivities):
self.__appendActivities(periodicActivities) return
'Activity tick handler; services all activities Returns: True if controlling iterator says it\'s okay to keep going; False to stop'
def tick(self):
for act in self.__activities: if (not act.iteratorHolder[0]): continue try: next(act.iteratorHolder[0]) except StopIteration: act.cb() if act.repeating: act.iteratorHolder[0] = iter(xrange((act.period - 1))) else: ...
'periodicActivities: A sequence of PeriodicActivityRequest elements'
def __appendActivities(self, periodicActivities):
for req in periodicActivities: act = self.Activity(repeating=req.repeating, period=req.period, cb=req.cb, iteratorHolder=[iter(xrange((req.period - 1)))]) self.__activities.append(act) return
'Add the label labelName to each record with record ROWID in range from start to end, noninclusive of end. This will recalculate all points from end to the last record stored in the internal cache of this classifier.'
def addLabel(self, start, end, labelName):
if (len(self.saved_states) == 0): raise HTMPredictionModelInvalidRangeError("Invalid supplied range for 'addLabel'. Model has no saved records.") startID = self.saved_states[0].ROWID clippedStart = max(0, (start - startID)) clippedEnd = max(0, min(len(self.saved_states...
'Remove labels from each record with record ROWID in range from start to end, noninclusive of end. Removes all records if labelFilter is None, otherwise only removes the labels eqaul to labelFilter. This will recalculate all points from end to the last record stored in the internal cache of this classifier.'
def removeLabels(self, start=None, end=None, labelFilter=None):
if (len(self.saved_states) == 0): raise HTMPredictionModelInvalidRangeError("Invalid supplied range for 'removeLabels'. Model has no saved records.") startID = self.saved_states[0].ROWID clippedStart = (0 if (start is None) else max(0, (start - startID))) clippedEnd = ...
'This method will add the record to the KNN classifier.'
def _addRecordToKNN(self, record):
classifier = self.htm_prediction_model._getAnomalyClassifier() knn = classifier.getSelf()._knn prototype_idx = classifier.getSelf().getParameter('categoryRecencyList') category = self._labelListToCategoryNumber(record.anomalyLabel) if (record.ROWID in prototype_idx): knn.prototypeSetCategory...
'This method will remove the given records from the classifier. parameters recordsToDelete - list of records to delete from the classififier'
def _deleteRecordsFromKNN(self, recordsToDelete):
classifier = self.htm_prediction_model._getAnomalyClassifier() knn = classifier.getSelf()._knn prototype_idx = classifier.getSelf().getParameter('categoryRecencyList') idsToDelete = [r.ROWID for r in recordsToDelete if ((not r.setByUser) and (r.ROWID in prototype_idx))] nProtos = knn._numPatterns ...
'This method will remove any stored records within the range from start to end. Noninclusive of end. parameters start - integer representing the ROWID of the start of the deletion range, end - integer representing the ROWID of the end of the deletion range, if None, it will default to end.'
def _deleteRangeFromKNN(self, start=0, end=None):
classifier = self.htm_prediction_model._getAnomalyClassifier() knn = classifier.getSelf()._knn prototype_idx = numpy.array(classifier.getSelf().getParameter('categoryRecencyList')) if (end is None): end = (prototype_idx.max() + 1) idsIdxToDelete = numpy.logical_and((prototype_idx >= start), ...
'return the classified labeling of record'
def _recomputeRecordFromKNN(self, record):
inputs = {'categoryIn': [None], 'bottomUpIn': self._getStateAnomalyVector(record)} outputs = {'categoriesOut': numpy.zeros((1,)), 'bestPrototypeIndices': numpy.zeros((1,)), 'categoryProbabilitiesOut': numpy.zeros((1,))} classifier = self.htm_prediction_model._getAnomalyClassifier() knn = classifier.getS...
'Construct a _HTMClassificationRecord based on the current state of the htm_prediction_model of this classifier. ***This will look into the internals of the model and may depend on the SP, TM, and KNNClassifier***'
def _constructClassificationRecord(self):
model = self.htm_prediction_model sp = model._getSPRegion() tm = model._getTPRegion() tpImp = tm.getSelf()._tfdr activeColumns = sp.getOutputData('bottomUpOut').nonzero()[0] score = numpy.in1d(activeColumns, self._prevPredictedColumns).sum() score = ((self._activeColumnCount - score) / float...
'Run an iteration of this anomaly classifier'
def compute(self):
result = self._constructClassificationRecord() if (result.ROWID >= self._autoDetectWaitRecords): self._updateState(result) self.saved_states.append(result) if (len(self.saved_states) > self._history_length): self.saved_states.pop(0) return result
'Sets the autoDetectWaitRecords.'
def setAutoDetectWaitRecords(self, waitRecords):
if (not isinstance(waitRecords, int)): raise HTMPredictionModelInvalidArgument(("Invalid argument type '%s'. WaitRecord must be a number." % type(waitRecords))) if ((len(self.saved_states) > 0) and (waitRecords < self.saved_states[0].ROWID)): raise HTMPredictionModelInval...
'Return the autoDetectWaitRecords.'
def getAutoDetectWaitRecords(self):
return self._autoDetectWaitRecords
'Sets the autoDetectThreshold. TODO: Ensure previously classified points outside of classifier are valid.'
def setAutoDetectThreshold(self, threshold):
if (not (isinstance(threshold, float) or isinstance(threshold, int))): raise HTMPredictionModelInvalidArgument(("Invalid argument type '%s'. threshold must be a number." % type(threshold))) self._autoDetectThreshold = threshold for state in self.saved_states: self._up...
'Return the autoDetectThreshold.'
def getAutoDetectThreshold(self):
return self._autoDetectThreshold
'Since the KNN Classifier stores categories as numbers, we must store each label as a number. This method converts from a label to a unique number. Each label is assigned a unique bit so multiple labels may be assigned to a single record.'
def _labelToCategoryNumber(self, label):
if (label not in self.saved_categories): self.saved_categories.append(label) return pow(2, self.saved_categories.index(label))
'This method takes a list of labels and returns a unique category number. This enables this class to store a list of categories for each point since the KNN classifier only stores a single number category for each record.'
def _labelListToCategoryNumber(self, labelList):
categoryNumber = 0 for label in labelList: categoryNumber += self._labelToCategoryNumber(label) return categoryNumber
'Converts a category number into a list of labels'
def _categoryToLabelList(self, category):
if (category is None): return [] labelList = [] labelNum = 0 while (category > 0): if ((category % 2) == 1): labelList.append(self.saved_categories[labelNum]) labelNum += 1 category = (category >> 1) return labelList
'Returns a state\'s anomaly vertor converting it from spare to dense'
def _getStateAnomalyVector(self, state):
vector = numpy.zeros(self._anomalyVectorLength) vector[state.anomalyVector] = 1 return vector
'new instance of MovingAverage, so method .next() can be used @param windowSize - length of sliding window @param existingHistoricalValues - construct the object with already some values in it.'
def __init__(self, windowSize, existingHistoricalValues=None):
if (not isinstance(windowSize, numbers.Integral)): raise TypeError('MovingAverage - windowSize must be integer type') if (windowSize <= 0): raise ValueError('MovingAverage - windowSize must be >0') self.windowSize = windowSize if (existingHistoricalValues...
'Routine for computing a moving average. @param slidingWindow a list of previous values to use in computation that will be modified and returned @param total the sum of the values in slidingWindow to be used in the calculation of the moving average @param newVal a new number compute the new windowed average @param wind...
@staticmethod def compute(slidingWindow, total, newVal, windowSize):
if (len(slidingWindow) == windowSize): total -= slidingWindow.pop(0) slidingWindow.append(newVal) total += newVal return ((float(total) / len(slidingWindow)), slidingWindow, total)
'Instance method wrapper around compute.'
def next(self, newValue):
(newAverage, self.slidingWindow, self.total) = self.compute(self.slidingWindow, self.total, newValue, self.windowSize) return newAverage
'get current average'
def getCurrentAvg(self):
return (float(self.total) / len(self.slidingWindow))
'for loading this object'
def __setstate__(self, state):
self.__dict__.update(state) if (not hasattr(self, 'slidingWindow')): self.slidingWindow = [] if (not hasattr(self, 'total')): self.total = 0 self.slidingWindow = sum(self.slidingWindow)
'Get Cap\'n Proto schema. ..warning: This is an abstract method. Per abc protocol, attempts to subclass without overriding will fail. @returns Cap\'n Proto schema'
@classmethod @abstractmethod def getSchema(cls):
pass
'Create a new object initialized from Cap\'n Proto obj. Note: This is an abstract method. Per abc protocol, attempts to subclass without overriding will fail. :param proto: Cap\'n Proto obj :return: Obj initialized from proto'
@classmethod @abstractmethod def read(cls, proto):
pass
'Write obj instance to Cap\'n Proto object .. warning: This is an abstract method. Per abc protocol, attempts to subclass without overriding will fail. :param proto: Cap\'n Proto obj'
@abstractmethod def write(self, proto):
pass
'Read serialized object from file. :param f: input file :param packed: If true, will assume content is packed :return: first-class instance initialized from proto obj'
@classmethod def readFromFile(cls, f, packed=True):
schema = cls.getSchema() if packed: proto = schema.read_packed(f) else: proto = schema.read(f) return cls.read(proto)
'Write serialized object to file. :param f: output file :param packed: If true, will pack contents.'
def writeToFile(self, f, packed=True):
schema = self.getSchema() proto = schema.new_message() self.write(proto) if packed: proto.write_packed(f) else: proto.write(f)
'Create a :class:`~nupic.algorithms.connections.Connections` instance. :class:`TemporalMemory` subclasses may override this method to choose a different :class:`~nupic.algorithms.connections.Connections` implementation, or to augment the instance otherwise returned by the default :class:`~nupic.algorithms.connections.C...
@staticmethod def connectionsFactory(*args, **kwargs):
return Connections(*args, **kwargs)
'Perform one time step of the Temporal Memory algorithm. This method calls :meth:`activateCells`, then calls :meth:`activateDendrites`. Using :class:`TemporalMemory` via its :meth:`compute` method ensures that you\'ll always be able to call :meth:`getPredictiveCells` to get predictions for the next time step. :param ac...
def compute(self, activeColumns, learn=True):
self.activateCells(sorted(activeColumns), learn) self.activateDendrites(learn)
'Calculate the active cells, using the current active columns and dendrite segments. Grow and reinforce synapses. :param activeColumns: (iter) A sorted list of active column indices. :param learn: (bool) If true, reinforce / punish / grow synapses. **Pseudocode:** for each column if column is active and has active dist...
def activateCells(self, activeColumns, learn=True):
prevActiveCells = self.activeCells prevWinnerCells = self.winnerCells self.activeCells = [] self.winnerCells = [] segToCol = (lambda segment: int((segment.cell / self.cellsPerColumn))) identity = (lambda x: x) for columnData in groupby2(activeColumns, identity, self.activeSegments, segToCol,...
'Calculate dendrite segment activity, using the current active cells. :param learn: (bool) If true, segment activations will be recorded. This information is used during segment cleanup. **Pseudocode:** for each distal dendrite segment with activity >= activationThreshold mark the segment as active for each distal dend...
def activateDendrites(self, learn=True):
(numActiveConnected, numActivePotential) = self.connections.computeActivity(self.activeCells, self.connectedPermanence) activeSegments = (self.connections.segmentForFlatIdx(i) for i in xrange(len(numActiveConnected)) if (numActiveConnected[i] >= self.activationThreshold)) matchingSegments = (self.connection...
'Indicates the start of a new sequence. Clears any predictions and makes sure synapses don\'t grow to the currently active cells in the next time step.'
def reset(self):
self.activeCells = [] self.winnerCells = [] self.activeSegments = [] self.matchingSegments = []
'Determines which cells in a predicted column should be added to winner cells list, and learns on the segments that correctly predicted this column. :param column: (int) Index of bursting column. :param columnActiveSegments: (iter) Active segments in this column. :param columnMatchingSegments: (iter) Matching segments ...
def activatePredictedColumn(self, column, columnActiveSegments, columnMatchingSegments, prevActiveCells, prevWinnerCells, learn):
return self._activatePredictedColumn(self.connections, self._random, columnActiveSegments, prevActiveCells, prevWinnerCells, self.numActivePotentialSynapsesForSegment, self.maxNewSynapseCount, self.initialPermanence, self.permanenceIncrement, self.permanenceDecrement, self.maxSynapsesPerSegment, learn)
'Activates all of the cells in an unpredicted active column, chooses a winner cell, and, if learning is turned on, learns on one segment, growing a new segment if necessary. :param column: (int) Index of bursting column. :param columnMatchingSegments: (iter) Matching segments in this column, or None if there aren\'t an...
def burstColumn(self, column, columnMatchingSegments, prevActiveCells, prevWinnerCells, learn):
start = (self.cellsPerColumn * column) cellsForColumn = xrange(start, (start + self.cellsPerColumn)) return self._burstColumn(self.connections, self._random, self.lastUsedIterationForSegment, column, columnMatchingSegments, prevActiveCells, prevWinnerCells, cellsForColumn, self.numActivePotentialSynapsesFor...
'Punishes the Segments that incorrectly predicted a column to be active. :param column: (int) Index of bursting column. :param columnActiveSegments: (iter) Active segments for this column, or None if there aren\'t any. :param columnMatchingSegments: (iter) Matching segments for this column, or None if there aren\'t any...
def punishPredictedColumn(self, column, columnActiveSegments, columnMatchingSegments, prevActiveCells, prevWinnerCells):
self._punishPredictedColumn(self.connections, columnMatchingSegments, prevActiveCells, self.predictedSegmentDecrement)
'Create a :class:`~nupic.algorithms.connections.Segment` on the specified cell. This method calls :meth:`~nupic.algorithms.connections.Connections.createSegment` on the underlying :class:`~nupic.algorithms.connections.Connections`, and it does some extra bookkeeping. Unit tests should call this method, and not :meth:`~...
def createSegment(self, cell):
return self._createSegment(self.connections, self.lastUsedIterationForSegment, cell, self.iteration, self.maxSegmentsPerCell)
':param connections: (Object) Connections for the TM. Gets mutated. :param random: (Object) Random number generator. Gets mutated. :param columnActiveSegments: (iter) Active segments in this column. :param prevActiveCells: (list) Active cells in `t-1`. :param prevWinnerCells: (list) Winner cells in `t-1`. :param numAct...
@classmethod def _activatePredictedColumn(cls, connections, random, columnActiveSegments, prevActiveCells, prevWinnerCells, numActivePotentialSynapsesForSegment, maxNewSynapseCount, initialPermanence, permanenceIncrement, permanenceDecrement, maxSynapsesPerSegment, learn):
cellsToAdd = [] previousCell = None for segment in columnActiveSegments: if (segment.cell != previousCell): cellsToAdd.append(segment.cell) previousCell = segment.cell if learn: cls._adaptSegment(connections, segment, prevActiveCells, permanenceIncrement, ...
':param connections: (Object) Connections for the TM. Gets mutated. :param random: (Object) Random number generator. Gets mutated. :param lastUsedIterationForSegment: (list) Last used iteration for each segment, indexed by the segment\'s flatIdx. Gets mutated. :param column: (int) Index of bursting column. :param colum...
@classmethod def _burstColumn(cls, connections, random, lastUsedIterationForSegment, column, columnMatchingSegments, prevActiveCells, prevWinnerCells, cellsForColumn, numActivePotentialSynapsesForSegment, iteration, maxNewSynapseCount, initialPermanence, permanenceIncrement, permanenceDecrement, maxSegmentsPerCell, max...
if (columnMatchingSegments is not None): numActive = (lambda s: numActivePotentialSynapsesForSegment[s.flatIdx]) bestMatchingSegment = max(columnMatchingSegments, key=numActive) winnerCell = bestMatchingSegment.cell if learn: cls._adaptSegment(connections, bestMatchingSeg...
':param connections: (Object) Connections for the TM. Gets mutated. :param columnMatchingSegments: (iter) Matching segments for this column. :param prevActiveCells: (list) Active cells in `t-1`. :param predictedSegmentDecrement: (float) Amount by which segments are punished for incorrect predictions. Pseudocode: for ea...
@classmethod def _punishPredictedColumn(cls, connections, columnMatchingSegments, prevActiveCells, predictedSegmentDecrement):
if ((predictedSegmentDecrement > 0.0) and (columnMatchingSegments is not None)): for segment in columnMatchingSegments: cls._adaptSegment(connections, segment, prevActiveCells, (- predictedSegmentDecrement), 0.0)
'Create a segment on the connections, enforcing the maxSegmentsPerCell parameter.'
@classmethod def _createSegment(cls, connections, lastUsedIterationForSegment, cell, iteration, maxSegmentsPerCell):
while (connections.numSegments(cell) >= maxSegmentsPerCell): leastRecentlyUsedSegment = min(connections.segmentsForCell(cell), key=(lambda segment: lastUsedIterationForSegment[segment.flatIdx])) connections.destroySegment(leastRecentlyUsedSegment) segment = connections.createSegment(cell) if...
'Destroy nDestroy synapses on the specified segment, but don\'t destroy synapses to the "excludeCells".'
@classmethod def _destroyMinPermanenceSynapses(cls, connections, random, segment, nDestroy, excludeCells):
destroyCandidates = sorted((synapse for synapse in connections.synapsesForSegment(segment) if (synapse.presynapticCell not in excludeCells)), key=(lambda s: s._ordinal)) for _ in xrange(nDestroy): if (len(destroyCandidates) == 0): break minSynapse = None minPermanence = float...
'Gets the cell with the smallest number of segments. Break ties randomly. :param random: (Object) Random number generator. Gets mutated. :param cells: (list) Indices of cells. :param connections: (Object) Connections instance for the TM. :returns: (int) Cell index.'
@classmethod def _leastUsedCell(cls, random, cells, connections):
leastUsedCells = [] minNumSegments = float('inf') for cell in cells: numSegments = connections.numSegments(cell) if (numSegments < minNumSegments): minNumSegments = numSegments leastUsedCells = [] if (numSegments == minNumSegments): leastUsedCells....
'Creates nDesiredNewSynapes synapses on the segment passed in if possible, choosing random cells from the previous winner cells that are not already on the segment. :param connections: (Object) Connections instance for the tm :param random: (Object) TM object used to generate random numbers :param se...
@classmethod def _growSynapses(cls, connections, random, segment, nDesiredNewSynapes, prevWinnerCells, initialPermanence, maxSynapsesPerSegment):
candidates = list(prevWinnerCells) for synapse in connections.synapsesForSegment(segment): i = binSearch(candidates, synapse.presynapticCell) if (i != (-1)): del candidates[i] nActual = min(nDesiredNewSynapes, len(candidates)) overrun = ((connections.numSynapses(segment) + nA...
'Updates synapses on segment. Strengthens active synapses; weakens inactive synapses. :param connections: (Object) Connections instance for the tm :param segment: (int) Segment to adapt :param prevActiveCells: (list) Active cells in `t-1` :param permanenceIncrement: (float) Amount to i...
@classmethod def _adaptSegment(cls, connections, segment, prevActiveCells, permanenceIncrement, permanenceDecrement):
synapsesToDestroy = [] for synapse in connections.synapsesForSegment(segment): permanence = synapse.permanence if (binSearch(prevActiveCells, synapse.presynapticCell) != (-1)): permanence += permanenceIncrement else: permanence -= permanenceDecrement perma...
'Returns the index of the column that a cell belongs to. :param cell: (int) Cell index :returns: (int) Column index'
def columnForCell(self, cell):
self._validateCell(cell) return int((cell / self.cellsPerColumn))
'Returns the indices of cells that belong to a column. :param column: (int) Column index :returns: (list) Cell indices'
def cellsForColumn(self, column):
self._validateColumn(column) start = (self.cellsPerColumn * column) end = (start + self.cellsPerColumn) return range(start, end)
'Returns the number of columns in this layer. :returns: (int) Number of columns'
def numberOfColumns(self):
return reduce(mul, self.columnDimensions, 1)
'Returns the number of cells in this layer. :returns: (int) Number of cells'
def numberOfCells(self):
return (self.numberOfColumns() * self.cellsPerColumn)
'Maps cells to the columns they belong to. :param cells: (set) Cells :returns: (dict) Mapping from columns to their cells in `cells`'
def mapCellsToColumns(self, cells):
cellsForColumns = defaultdict(set) for cell in cells: column = self.columnForCell(cell) cellsForColumns[column].add(cell) return cellsForColumns
'Returns the indices of the active cells. :returns: (list) Indices of active cells.'
def getActiveCells(self):
return self.getCellIndices(self.activeCells)
'Returns the indices of the predictive cells. :returns: (list) Indices of predictive cells.'
def getPredictiveCells(self):
previousCell = None predictiveCells = [] for segment in self.activeSegments: if (segment.cell != previousCell): predictiveCells.append(segment.cell) previousCell = segment.cell return predictiveCells
'Returns the indices of the winner cells. :returns: (list) Indices of winner cells.'
def getWinnerCells(self):
return self.getCellIndices(self.winnerCells)
'Returns the active segments. :returns: (list) Active segments'
def getActiveSegments(self):
return self.activeSegments
'Returns the matching segments. :returns: (list) Matching segments'
def getMatchingSegments(self):
return self.matchingSegments
'Returns the number of cells per column. :returns: (int) The number of cells per column.'
def getCellsPerColumn(self):
return self.cellsPerColumn
'Returns the dimensions of the columns in the region. :returns: (tuple) Column dimensions'
def getColumnDimensions(self):
return self.columnDimensions
'Returns the activation threshold. :returns: (int) The activation threshold.'
def getActivationThreshold(self):
return self.activationThreshold
'Sets the activation threshold. :param activationThreshold: (int) activation threshold.'
def setActivationThreshold(self, activationThreshold):
self.activationThreshold = activationThreshold
'Get the initial permanence. :returns: (float) The initial permanence.'
def getInitialPermanence(self):
return self.initialPermanence
'Sets the initial permanence. :param initialPermanence: (float) The initial permanence.'
def setInitialPermanence(self, initialPermanence):
self.initialPermanence = initialPermanence
'Returns the min threshold. :returns: (int) The min threshold.'
def getMinThreshold(self):
return self.minThreshold
'Sets the min threshold. :param minThreshold: (int) min threshold.'
def setMinThreshold(self, minThreshold):
self.minThreshold = minThreshold
'Returns the max new synapse count. :returns: (int) The max new synapse count.'
def getMaxNewSynapseCount(self):
return self.maxNewSynapseCount
'Sets the max new synapse count. :param maxNewSynapseCount: (int) Max new synapse count.'
def setMaxNewSynapseCount(self, maxNewSynapseCount):
self.maxNewSynapseCount = maxNewSynapseCount
'Get the permanence increment. :returns: (float) The permanence increment.'
def getPermanenceIncrement(self):
return self.permanenceIncrement
'Sets the permanence increment. :param permanenceIncrement: (float) The permanence increment.'
def setPermanenceIncrement(self, permanenceIncrement):
self.permanenceIncrement = permanenceIncrement
'Get the permanence decrement. :returns: (float) The permanence decrement.'
def getPermanenceDecrement(self):
return self.permanenceDecrement