desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'Read in all standard configuration files'
@classmethod def _readStdConfigFiles(cls):
cls.readConfigFile(DEFAULT_CONFIG) cls.readConfigFile(USER_CONFIG)
'Return a dict containing all custom configuration properties Parameters: retval: dict containing all custom configuration properties.'
@classmethod def getCustomDict(cls):
return _CustomConfigurationFileWrapper.getCustomDict()
'Set a single custom setting and persist it to the custom configuration store. Parameters: propertyName: string containing the name of the property to get value: value to set the property to'
@classmethod def setCustomProperty(cls, propertyName, value):
cls.setCustomProperties({propertyName: value})
'Set multiple custom properties and persist them to the custom configuration store. Parameters: properties: a dict of property name/value pairs to set'
@classmethod def setCustomProperties(cls, properties):
_getLogger().info('Setting custom configuration properties=%r; caller=%r', properties, traceback.format_stack()) _CustomConfigurationFileWrapper.edit(properties) for (propertyName, value) in properties.iteritems(): cls.set(propertyName, value)
'Clear all configuration properties from in-memory cache, but do NOT alter the custom configuration file. Used in unit-testing.'
@classmethod def clear(cls):
super(Configuration, cls).clear() _CustomConfigurationFileWrapper.clear(persistent=False)
'Clear all custom configuration settings and delete the persistent custom configuration store.'
@classmethod def resetCustomConfig(cls):
_getLogger().info('Resetting all custom configuration properties; caller=%r', traceback.format_stack()) super(Configuration, cls).clear() _CustomConfigurationFileWrapper.clear(persistent=True)
'Loads custom configuration settings from their persistent storage. DO NOT CALL THIS: It\'s typically not necessary to call this method directly - see NOTE below. NOTE: this method exists *solely* for the benefit of prepare_conf.py, which needs to load configuration files selectively.'
@classmethod def loadCustomConfig(cls):
cls.readConfigFile(_CustomConfigurationFileWrapper.customFileName)
'Intercept the _readStdConfigFiles call from our base config class to read in base and custom configuration settings.'
@classmethod def _readStdConfigFiles(cls):
super(Configuration, cls)._readStdConfigFiles() cls.loadCustomConfig()
'If persistent is True, delete the temporary file Parameters: persistent: if True, custom configuration file is deleted'
@classmethod def clear(cls, persistent=False):
if persistent: try: os.unlink(cls.getPath()) except OSError as e: if (e.errno != errno.ENOENT): _getLogger().exception('Error %s while trying to remove dynamic configuration file: %s', e.errno, cls.getPath()) raise ...
'Returns a dict of all temporary values in custom configuration file'
@classmethod def getCustomDict(cls):
if (not os.path.exists(cls.getPath())): return dict() properties = Configuration._readConfigFile(os.path.basename(cls.getPath()), os.path.dirname(cls.getPath())) values = dict() for propName in properties: if ('value' in properties[propName]): values[propName] = properties[pr...
'Edits the XML configuration file with the parameters specified by properties Parameters: properties: dict of settings to be applied to the custom configuration store (key is property name, value is value)'
@classmethod def edit(cls, properties):
copyOfProperties = copy(properties) configFilePath = cls.getPath() try: with open(configFilePath, 'r') as fp: contents = fp.read() except IOError as e: if (e.errno != errno.ENOENT): _getLogger().exception('Error %s reading custom configuration store...
'Sets the path of the custom configuration file'
@classmethod def _setPath(cls):
cls._path = os.path.join(os.environ['NTA_DYNAMIC_CONF_DIR'], cls.customFileName)
'Get the path of the custom configuration file'
@classmethod def getPath(cls):
if (cls._path is None): cls._setPath() return cls._path
'Return the current state of this particle. This is used for communicating our state into a model record entry so that it can be instantiated on another worker.'
def getState(self):
raise NotImplementedError
'Set the current state of this particle. This is counterpart to getState.'
def setState(self, state):
raise NotImplementedError
'for int vars, returns position to nearest int Parameters: retval: current position'
def getPosition(self):
raise NotImplementedError
'This causes the variable to jiggle away from its current position. It does this by increasing its velocity by a multiplicative factor. Every time agitate() is called, the velocity will increase. In this way, you can call agitate over and over again until the variable reaches a new position.'
def agitate(self):
raise NotImplementedError
'Choose a new position based on results obtained so far from other particles and the passed in globalBestPosition. Parameters: globalBestPosition: global best position for this colony rng: instance of random.Random() used for generating random numbers retval: new position'
def newPosition(self, globalBestPosition, rng):
raise NotImplementedError
'Choose a new position that is as far away as possible from all \'otherVars\', where \'otherVars\' is a list of PermuteVariable instances. Parameters: otherVars: list of other PermuteVariables to push away from rng: instance of random.Random() used for generating random numbers'
def pushAwayFrom(self, otherVars, rng):
raise NotImplementedError
'Reset the velocity to be some fraction of the total distance. This is called usually when we start a new swarm and want to start at the previous best position found in the previous swarm but with a velocity which is a known fraction of the total distance between min and max. Parameters: rng: instance ...
def resetVelocity(self, rng):
raise NotImplementedError
'Construct a variable that permutes over floating point values using the Particle Swarm Optimization (PSO) algorithm. See descriptions of PSO (i.e. http://en.wikipedia.org/wiki/Particle_swarm_optimization) for references to the inertia, cogRate, and socRate parameters. Parameters: min: min allowed value of pos...
def __init__(self, min, max, stepSize=None, inertia=None, cogRate=None, socRate=None):
super(PermuteFloat, self).__init__() self.min = min self.max = max self.stepSize = stepSize self._position = ((self.max + self.min) / 2.0) self._velocity = ((self.max - self.min) / 5.0) self._inertia = (float(Configuration.get('nupic.hypersearch.inertia')) if (inertia is None) else inertia) ...
'See comments in base class.'
def __repr__(self):
return ('PermuteFloat(min=%f, max=%f, stepSize=%s) [position=%f(%f), velocity=%f, _bestPosition=%s, _bestResult=%s]' % (self.min, self.max, self.stepSize, self.getPosition(), self._position, self._velocity, self._bestPosition, self._bestResult))
'See comments in base class.'
def getState(self):
return dict(_position=self._position, position=self.getPosition(), velocity=self._velocity, bestPosition=self._bestPosition, bestResult=self._bestResult)
'See comments in base class.'
def setState(self, state):
self._position = state['_position'] self._velocity = state['velocity'] self._bestPosition = state['bestPosition'] self._bestResult = state['bestResult']
'See comments in base class.'
def getPosition(self):
if (self.stepSize is None): return self._position numSteps = ((self._position - self.min) / self.stepSize) numSteps = int(round(numSteps)) position = (self.min + (numSteps * self.stepSize)) position = max(self.min, position) position = min(self.max, position) return position
'See comments in base class.'
def agitate(self):
self._velocity *= (1.5 / self._inertia) maxV = ((self.max - self.min) / 2) if (self._velocity > maxV): self._velocity = maxV elif (self._velocity < (- maxV)): self._velocity = (- maxV) if ((self._position == self.max) and (self._velocity > 0)): self._velocity *= (-1) if (...
'See comments in base class.'
def newPosition(self, globalBestPosition, rng):
lb = float(Configuration.get('nupic.hypersearch.randomLowerBound')) ub = float(Configuration.get('nupic.hypersearch.randomUpperBound')) self._velocity = ((self._velocity * self._inertia) + ((rng.uniform(lb, ub) * self._cogRate) * (self._bestPosition - self.getPosition()))) if (globalBestPosition is not ...
'See comments in base class.'
def pushAwayFrom(self, otherPositions, rng):
if (self.max == self.min): return numPositions = (len(otherPositions) * 4) if (numPositions == 0): return stepSize = (float((self.max - self.min)) / numPositions) positions = numpy.arange(self.min, (self.max + stepSize), stepSize) numPositions = len(positions) weights = numpy...
'See comments in base class.'
def resetVelocity(self, rng):
maxVelocity = ((self.max - self.min) / 5.0) self._velocity = maxVelocity self._velocity *= rng.choice([1, (-1)])
'See comments in base class.'
def __repr__(self):
return ('PermuteInt(min=%d, max=%d, stepSize=%d) [position=%d(%f), velocity=%f, _bestPosition=%s, _bestResult=%s]' % (self.min, self.max, self.stepSize, self.getPosition(), self._position, self._velocity, self._bestPosition, self._bestResult))
'See comments in base class.'
def getPosition(self):
position = super(PermuteInt, self).getPosition() position = int(round(position)) return position
'See comments in base class.'
def __repr__(self):
return ('PermuteChoices(choices=%s) [position=%s]' % (self.choices, self.choices[self._positionIdx]))
'See comments in base class.'
def getState(self):
return dict(_position=self.getPosition(), position=self.getPosition(), velocity=None, bestPosition=self.choices[self._bestPositionIdx], bestResult=self._bestResult)
'See comments in base class.'
def setState(self, state):
self._positionIdx = self.choices.index(state['_position']) self._bestPositionIdx = self.choices.index(state['bestPosition']) self._bestResult = state['bestResult']
'Setup our resultsPerChoice history based on the passed in resultsPerChoice. For example, if this variable has the following choices: [\'a\', \'b\', \'c\'] resultsPerChoice will have up to 3 elements, each element is a tuple containing (choiceValue, errors) where errors is the list of errors received from models that u...
def setResultsPerChoice(self, resultsPerChoice):
self._resultsPerChoice = ([[]] * len(self.choices)) for (choiceValue, values) in resultsPerChoice: choiceIndex = self.choices.index(choiceValue) self._resultsPerChoice[choiceIndex] = list(values)
'See comments in base class.'
def getPosition(self):
return self.choices[self._positionIdx]
'See comments in base class.'
def agitate(self):
pass
'See comments in base class.'
def newPosition(self, globalBestPosition, rng):
numChoices = len(self.choices) meanScorePerChoice = [] overallSum = 0 numResults = 0 for i in range(numChoices): if (len(self._resultsPerChoice[i]) > 0): data = numpy.array(self._resultsPerChoice[i]) meanScorePerChoice.append(data.mean()) overallSum += dat...
'See comments in base class.'
def pushAwayFrom(self, otherPositions, rng):
positions = [self.choices.index(x) for x in otherPositions] positionCounts = ([0] * len(self.choices)) for pos in positions: positionCounts[pos] += 1 self._positionIdx = numpy.array(positionCounts).argmin() self._bestPositionIdx = self._positionIdx
'See comments in base class.'
def resetVelocity(self, rng):
pass
'See comments in base class.'
def __repr__(self):
suffix = '' for (key, value) in self.kwArgs.items(): suffix += ('%s=%s, ' % (key, value)) return ('PermuteEncoder(fieldName=%s, encoderClass=%s, name=%s, %s)' % (self.fieldName, self.encoderClass, self.name, suffix))
'Return a dict that can be used to construct this encoder. This dict can be passed directly to the addMultipleEncoders() method of the multi encoder. Parameters: encoderName: name of the encoder flattenedChosenValues: dict of the flattened permutation variables. Any variables within this dict whose key star...
def getDict(self, encoderName, flattenedChosenValues):
encoder = dict(fieldname=self.fieldName, name=self.name) for (encoderArg, value) in self.kwArgs.iteritems(): if isinstance(value, PermuteVariable): value = flattenedChosenValues[('%s:%s' % (encoderName, encoderArg))] encoder[encoderArg] = value if ('.' in self.encoderClass): ...
'Run a bunch of iterations on a PermuteVar and collect which positions were visited. Verify that they were all valid.'
def _testValidPositions(self, varClass, minValue, maxValue, stepSize, iterations=100):
positions = set() cogRate = 2.0 socRate = 2.0 inertia = None gBestPosition = maxValue lBestPosition = minValue foundBestPosition = None foundBestResult = None rng = random.Random() rng.seed(42) var = varClass(min=minValue, max=maxValue, stepSize=stepSize, inertia=inertia, cog...
'Test that we can converge on the right answer.'
def _testConvergence(self, varClass, minValue, maxValue, targetValue, iterations=100):
gBestPosition = targetValue lBestPosition = targetValue foundBestPosition = None foundBestResult = None rng = random.Random() rng.seed(42) var = varClass(min=minValue, max=maxValue) for _ in xrange(iterations): pos = var.getPosition() if (self.verbosity >= 1): ...
'Run unit tests on this module.'
def run(self):
self.verbosity = 0 self._testValidPositions(varClass=PermuteFloat, minValue=2.1, maxValue=5.1, stepSize=0.5) self._testValidPositions(varClass=PermuteInt, minValue=2, maxValue=11, stepSize=3) self._testValidPositions(varClass=PermuteInt, minValue=2, maxValue=11, stepSize=1) self._testConvergence(var...
'Create a particle. There are 3 fundamentally different methods of instantiating a particle: 1.) You can instantiate a new one from scratch, at generation index #0. This particle gets a new particleId. required: swarmId optional: newFarFrom must be None: evolveFromState, newFromClone 2.) You can instantiate one from sa...
def __init__(self, hsObj, resultsDB, flattenedPermuteVars, swarmId=None, newFarFrom=None, evolveFromState=None, newFromClone=None, newParticleId=False):
self._hsObj = hsObj self.logger = hsObj.logger self._resultsDB = resultsDB self._rng = random.Random() self._rng.seed(42) def _setupVars(flattenedPermuteVars): allowedEncoderNames = self.swarmId.split('.') self.permuteVars = copy.deepcopy(flattenedPermuteVars) varNames = ...
'Get the particle state as a dict. This is enough information to instantiate this particle on another worker.'
def getState(self):
varStates = dict() for (varName, var) in self.permuteVars.iteritems(): varStates[varName] = var.getState() return dict(id=self.particleId, genIdx=self.genIdx, swarmId=self.swarmId, varStates=varStates)
'Init all of our variable positions, velocities, and optionally the best result and best position from the given particle. If newBest is true, we get the best result and position for this new generation from the resultsDB, This is used when evoloving a particle because the bestResult and position as stored in was the b...
def initStateFrom(self, particleId, particleState, newBest):
if newBest: (bestResult, bestPosition) = self._resultsDB.getParticleBest(particleId) else: bestResult = bestPosition = None varStates = particleState['varStates'] for varName in varStates.keys(): varState = copy.deepcopy(varStates[varName]) if newBest: varStat...
'Copy all encoder variables from particleState into this particle. Parameters: particleState: dict produced by a particle\'s getState() method'
def copyEncoderStatesFrom(self, particleState):
allowedToMove = True for varName in particleState['varStates']: if (':' in varName): if (varName not in self.permuteVars): continue state = copy.deepcopy(particleState['varStates'][varName]) state['_position'] = state['position'] state['bes...
'Copy specific variables from particleState into this particle. Parameters: particleState: dict produced by a particle\'s getState() method varNames: which variables to copy'
def copyVarStatesFrom(self, particleState, varNames):
allowedToMove = True for varName in particleState['varStates']: if (varName in varNames): if (varName not in self.permuteVars): continue state = copy.deepcopy(particleState['varStates'][varName]) state['_position'] = state['position'] state...
'Return the position of this particle. This returns a dict() of key value pairs where each key is the name of the flattened permutation variable and the value is its chosen value. Parameters: retval: dict() of flattened permutation choices'
def getPosition(self):
result = dict() for (varName, value) in self.permuteVars.iteritems(): result[varName] = value.getPosition() return result
'Return the position of a particle given its state dict. Parameters: retval: dict() of particle position, keys are the variable names, values are their positions'
@staticmethod def getPositionFromState(pState):
result = dict() for (varName, value) in pState['varStates'].iteritems(): result[varName] = value['position'] return result
'Agitate this particle so that it is likely to go to a new position. Every time agitate is called, the particle is jiggled an even greater amount. Parameters: retval: None'
def agitate(self):
for (varName, var) in self.permuteVars.iteritems(): var.agitate() self.newPosition()
'Choose a new position based on results obtained so far from all other particles. Parameters: whichVars: If not None, only move these variables retval: new position'
def newPosition(self, whichVars=None):
globalBestPosition = None if self._hsObj._speculativeParticles: genIdx = self.genIdx else: genIdx = (self.genIdx - 1) if (genIdx >= 0): (bestModelId, _) = self._resultsDB.bestModelIdAndErrScore(self.swarmId, genIdx) if (bestModelId is not None): (particleState...
'TODO: Documentation'
def __init__(self, jobID, jobsDAO, logLevel=None):
self._jobID = jobID self._cjDB = jobsDAO self._lastUpdateAttemptTime = 0 initLogging(verbose=True) self.logger = logging.getLogger('.'.join(['com.numenta', self.__class__.__module__, self.__class__.__name__])) if (logLevel is not None): self.logger.setLevel(logLevel) self.logger.info...
'Chooses the best model for a given job. Parameters forceUpdate: (True/False). If True, the update will ignore all the restrictions on the minimum time to update and the minimum number of records to update. This should typically only be set to true if the model has completed running'
def updateResultsForJob(self, forceUpdate=True):
updateInterval = (time.time() - self._lastUpdateAttemptTime) if ((updateInterval < self._MIN_UPDATE_INTERVAL) and (not forceUpdate)): return self.logger.info(('Attempting model selection for jobID=%d: time=%f lastUpdate=%f' % (self._jobID, time.time(), self._lastUpdateAttemptTi...
'Parameters: options: NupicRunPermutations options dict retval: nothing'
def __init__(self, options):
self.__cjDAO = _clientJobsDB() self._options = options self.__searchJob = None self.__foundMetrcsKeySet = set() self._workers = None return
'Start a new hypersearch job and monitor it to completion Parameters: retval: nothing'
def runNewSearch(self):
self.__searchJob = self.__startSearch() self.monitorSearchJob()
'Pick up the latest search from a saved jobID and monitor it to completion Parameters: retval: nothing'
def pickupSearch(self):
self.__searchJob = self.loadSavedHyperSearchJob(permWorkDir=self._options['permWorkDir'], outputLabel=self._options['outputLabel']) self.monitorSearchJob()
'Parameters: retval: nothing'
def monitorSearchJob(self):
assert (self.__searchJob is not None) jobID = self.__searchJob.getJobID() startTime = time.time() lastUpdateTime = datetime.now() expectedNumModels = self.__searchJob.getExpectedNumModels(searchMethod=self._options['searchMethod']) lastNumFinished = 0 finishedModelIDs = set() finishedMod...
'Launch worker processes to execute the given command line Parameters: cmdLine: The command line for each worker numWorkers: number of workers to launch'
def _launchWorkers(self, cmdLine, numWorkers):
self._workers = [] for i in range(numWorkers): stdout = tempfile.TemporaryFile() stderr = tempfile.TemporaryFile() p = subprocess.Popen(cmdLine, bufsize=1, env=os.environ, shell=True, stdin=None, stdout=stdout, stderr=stderr) self._workers.append(p)
'Starts HyperSearch as a worker or runs it inline for the "dryRun" action Parameters: retval: the new _HyperSearchJob instance representing the HyperSearch job'
def __startSearch(self):
params = _ClientJobUtils.makeSearchJobParamsDict(options=self._options, forRunning=True) if (self._options['action'] == 'dryRun'): args = [sys.argv[0], ('--params=%s' % json.dumps(params))] print print '==================================================================' print 'RU...
'Retrieves the runner\'s _HyperSearchJob instance; NOTE: only available after run(). Parameters: retval: _HyperSearchJob instance or None'
def peekSearchJob(self):
assert (self.__searchJob is not None) return self.__searchJob
'Returns a tuple of all metrics keys discovered while running HyperSearch. NOTE: This is an optimization so that our client may use this info for generating the report csv file without having to pre-scan all modelInfos Parameters: retval: Tuple of metrics keys discovered while running HyperSearch;'
def getDiscoveredMetricsKeys(self):
return tuple(self.__foundMetrcsKeySet)
'Prints a listing of experiments that would take place without actually executing them. Parameters: options: NupicRunPermutations options dict retval: nothing'
@classmethod def printModels(cls, options):
print 'Generating experiment requests...' searchParams = _ClientJobUtils.makeSearchJobParamsDict(options=options)
'Prints all available results in the given HyperSearch job and emits model information to the permutations report csv. The job may be completed or still in progress. Parameters: options: NupicRunPermutations options dict replaceReport: True to replace existing report csv, if any; False to append to existing rep...
@classmethod def generateReport(cls, options, replaceReport, hyperSearchJob, metricsKeys):
if (hyperSearchJob is None): hyperSearchJob = cls.loadSavedHyperSearchJob(permWorkDir=options['permWorkDir'], outputLabel=options['outputLabel']) modelIDs = hyperSearchJob.queryModelIDs() bestModel = None metricstmp = set() searchVar = set() for modelInfo in _iterModels(modelIDs): ...
'Instantiates a _HyperSearchJob instance from info saved in file Parameters: permWorkDir: Directory path for saved jobID file outputLabel: Label string for incorporating into file name for saved jobID retval: _HyperSearchJob instance; raises exception if not found'
@classmethod def loadSavedHyperSearchJob(cls, permWorkDir, outputLabel):
jobID = cls.__loadHyperSearchJobID(permWorkDir=permWorkDir, outputLabel=outputLabel) searchJob = _HyperSearchJob(nupicJobID=jobID) return searchJob
'Saves the given _HyperSearchJob instance\'s jobID to file Parameters: permWorkDir: Directory path for saved jobID file outputLabel: Label string for incorporating into file name for saved jobID hyperSearchJob: _HyperSearchJob instance retval: nothing'
@classmethod def __saveHyperSearchJobID(cls, permWorkDir, outputLabel, hyperSearchJob):
jobID = hyperSearchJob.getJobID() filePath = cls.__getHyperSearchJobIDFilePath(permWorkDir=permWorkDir, outputLabel=outputLabel) if os.path.exists(filePath): _backupFile(filePath) d = dict(hyperSearchJobID=jobID) with open(filePath, 'wb') as jobIdPickleFile: pickle.dump(d, jobIdPickl...
'Loads a saved jobID from file Parameters: permWorkDir: Directory path for saved jobID file outputLabel: Label string for incorporating into file name for saved jobID retval: HyperSearch jobID; raises exception if not found.'
@classmethod def __loadHyperSearchJobID(cls, permWorkDir, outputLabel):
filePath = cls.__getHyperSearchJobIDFilePath(permWorkDir=permWorkDir, outputLabel=outputLabel) jobID = None with open(filePath, 'r') as jobIdPickleFile: jobInfo = pickle.load(jobIdPickleFile) jobID = jobInfo['hyperSearchJobID'] return jobID
'Returns filepath where to store HyperSearch JobID Parameters: permWorkDir: Directory path for saved jobID file outputLabel: Label string for incorporating into file name for saved jobID retval: Filepath where to store HyperSearch JobID'
@classmethod def __getHyperSearchJobIDFilePath(cls, permWorkDir, outputLabel):
basePath = permWorkDir filename = ('%s_HyperSearchJobID.pkl' % (outputLabel,)) filepath = os.path.join(basePath, filename) return filepath
'Parameters: hyperSearchJob: _HyperSearchJob instance metricsKeys: sequence of report metrics key names to include in report outputDirAbsPath: Directory for creating report CSV file (absolute path) outputLabel: A string label to incorporate into report CSV file name replaceReport: True to replace existing report...
def __init__(self, hyperSearchJob, metricsKeys, searchVar, outputDirAbsPath, outputLabel, replaceReport):
self.__searchJob = hyperSearchJob self.__searchJobID = hyperSearchJob.getJobID() self.__sortedMetricsKeys = sorted(metricsKeys) self.__outputDirAbsPath = os.path.abspath(outputDirAbsPath) self.__outputLabel = outputLabel self.__replaceReport = replaceReport self.__sortedVariableNames = searc...
'Emit model info to csv file Parameters: modelInfo: _NupicModelInfo instance retval: nothing'
def emit(self, modelInfo):
if (self.__csvFileObj is None): self.__openAndInitCSVFile(modelInfo) csv = self.__csvFileObj print >>csv, ('%s, ' % self.__searchJobID), print >>csv, ('%s, ' % modelInfo.getModelID()), print >>csv, ('%s, ' % modelInfo.statusAsString()), if modelInfo.isFinished(): print >...
'Close file and print report/backup csv file paths Parameters: retval: nothing'
def finalize(self):
if (self.__csvFileObj is not None): self.__csvFileObj.close() self.__csvFileObj = None print ('Report csv saved in %s' % (self.__reportCSVPath,)) if self.__backupCSVPath: print ('Previous report csv file was backed up to %s' % (self.__b...
'- Backs up old report csv file; - opens the report csv file in append or overwrite mode (per self.__replaceReport); - emits column fields; - sets up self.__sortedVariableNames, self.__csvFileObj, self.__backupCSVPath, and self.__reportCSVPath Parameters: modelInfo: First _NupicModelInfo instance passed to emit() ...
def __openAndInitCSVFile(self, modelInfo):
basePath = self.__outputDirAbsPath reportCSVName = ('%s_Report.csv' % (self.__outputLabel,)) reportCSVPath = self.__reportCSVPath = os.path.join(basePath, reportCSVName) backupCSVPath = None if os.path.exists(reportCSVPath): backupCSVPath = self.__backupCSVPath = _backupFile(reportCSVPath) ...
'_NupicJob constructor Parameters: retval: Nupic Client JobID of the job'
def __init__(self, nupicJobID):
self.__nupicJobID = nupicJobID jobInfo = _clientJobsDB().jobInfo(nupicJobID) assert (jobInfo is not None), ('jobID=%s not found' % nupicJobID) assert (jobInfo.jobId == nupicJobID), ('%s != %s' % (jobInfo.jobId, nupicJobID)) _emit(Verbosity.DEBUG, ('_NupicJob: \n%s' % pprint.pformat(jo...
'Parameters: retval: representation of this _NupicJob instance'
def __repr__(self):
return ('%s(jobID=%s)' % (self.__class__.__name__, self.__nupicJobID))
'Parameters: workers: If this job was launched outside of the nupic job engine, then this is an array of subprocess Popen instances, one for each worker retval: _NupicJob.JobStatus instance'
def getJobStatus(self, workers):
jobInfo = self.JobStatus(self.__nupicJobID, workers) return jobInfo
'Semi-private method for retrieving the jobId Parameters: retval: Nupic Client JobID of this _NupicJob instance'
def getJobID(self):
return self.__nupicJobID
'Semi-private method for retrieving the job-specific params Parameters: retval: Job params dict corresponding to the JSON params value returned by ClientJobsDAO.jobInfo()'
def getParams(self):
return self.__params
'_NupicJob.JobStatus Constructor Parameters: nupicJobID: Nupic ClientJob ID workers: If this job was launched outside of the Nupic job engine, then this is an array of subprocess Popen instances, one for each worker retval: nothing'
def __init__(self, nupicJobID, workers):
jobInfo = _clientJobsDB().jobInfo(nupicJobID) assert (jobInfo.jobId == nupicJobID), ('%s != %s' % (jobInfo.jobId, nupicJobID)) if (workers is not None): runningCount = 0 for worker in workers: retCode = worker.poll() if (retCode is None): running...
'Parameters: retval: Job status as a human-readable string'
def statusAsString(self):
return self.__jobInfo.status
'Parameters: retval: True if the job has not been started yet'
def isWaitingToStart(self):
waiting = (self.__jobInfo.status == self.__nupicJobStatus_NotStarted) return waiting
'Parameters: retval: True if the job is starting'
def isStarting(self):
starting = (self.__jobInfo.status == self.__nupicJobStatus_Starting) return starting
'Parameters: retval: True if the job is running'
def isRunning(self):
running = (self.__jobInfo.status == self.__nupicJobStatus_running) return running
'Parameters: retval: True if the job has finished (either with success or failure)'
def isFinished(self):
done = (self.__jobInfo.status == self.__nupicJobStatus_completed) return done
'Returns _JobCompletionReason. NOTE: it\'s an error to call this method if isFinished() would return False. Parameters: retval: _JobCompletionReason instance'
def getCompletionReason(self):
assert self.isFinished(), ('Too early to tell: %s' % self) return _JobCompletionReason(self.__jobInfo.completionReason)
'Returns job completion message. NOTE: it\'s an error to call this method if isFinished() would return False. Parameters: retval: completion message'
def getCompletionMsg(self):
assert self.isFinished(), ('Too early to tell: %s' % self) return ('%s' % self.__jobInfo.completionMsg)
'Returns the worker generated completion message. NOTE: it\'s an error to call this method if isFinished() would return False. Parameters: retval: completion message'
def getWorkerCompletionMsg(self):
assert self.isFinished(), ('Too early to tell: %s' % self) return ('%s' % self.__jobInfo.workerCompletionMsg)
'Returns job start time. NOTE: it\'s an error to call this method if isWaitingToStart() would return True. Parameters: retval: job processing start time'
def getStartTime(self):
assert (not self.isWaitingToStart()), ('Too early to tell: %s' % self) return ('%s' % self.__jobInfo.startTime)
'Returns job end time. NOTE: it\'s an error to call this method if isFinished() would return False. Parameters: retval: job processing end time'
def getEndTime(self):
assert self.isFinished(), ('Too early to tell: %s' % self) return ('%s' % self.__jobInfo.endTime)
'Returns the worker state field. Parameters: retval: worker state field as a dict'
def getWorkerState(self):
if (self.__jobInfo.engWorkerState is not None): return json.loads(self.__jobInfo.engWorkerState) else: return None
'Returns the results field. Parameters: retval: job results field as a dict'
def getResults(self):
if (self.__jobInfo.results is not None): return json.loads(self.__jobInfo.results) else: return None
'Returns the model milestones field. Parameters: retval: model milestones as a dict'
def getModelMilestones(self):
if (self.__jobInfo.engModelMilestones is not None): return json.loads(self.__jobInfo.engModelMilestones) else: return None
'Returns the engine status field - used for progress messages Parameters: retval: engine status field as string'
def getEngStatus(self):
return self.__jobInfo.engStatus
'Parameters: reason: completion reason value from ClientJobsDAO.jobInfo()'
def __init__(self, reason):
self.__reason = reason
'Parameters: nupicJobID: Nupic Client JobID of a HyperSearch job retval: nothing'
def __init__(self, nupicJobID):
super(_HyperSearchJob, self).__init__(nupicJobID) self.__expectedNumModels = None
'Queuries DB for model IDs of all currently instantiated models associated with this HyperSearch job. See also: _iterModels() Parameters: retval: A sequence of Nupic modelIDs'
def queryModelIDs(self):
jobID = self.getJobID() modelCounterPairs = _clientJobsDB().modelsGetUpdateCounters(jobID) modelIDs = tuple((x[0] for x in modelCounterPairs)) return modelIDs
'Returns: the total number of expected models if known, -1 if it can\'t be determined. NOTE: this can take a LONG time to complete for HyperSearches with a huge number of possible permutations. Parameters: searchMethod: "v2" is the only method currently supported retval: The total number of expected models, ...
def getExpectedNumModels(self, searchMethod):
return self.__expectedNumModels
'Constructs a dictionary of HyperSearch parameters suitable for converting to json and passing as the params argument to ClientJobsDAO.jobInsert() Parameters: options: NupicRunPermutations options dict forRunning: True if the params are for running a Hypersearch job; False if params are for introspection onl...
@classmethod def makeSearchJobParamsDict(cls, options, forRunning=False):
if (options['searchMethod'] == 'v2'): hsVersion = 'v2' else: raise Exception(('Unsupported search method: %r' % options['searchMethod'])) maxModels = options['maxPermutations'] if ((options['action'] == 'dryRun') and (maxModels is None)): maxModels = 1 useTerminators...
'Retrives the optimization key name and optimization function. Parameters: searchJobParams: Parameter for passing as the searchParams arg to Hypersearch constructor. retval: (optimizationMetricKey, maximize) optimizationMetricKey: which report key to optimize for maximize: True if we should try and maximize the o...
@classmethod def getOptimizationMetricInfo(cls, searchJobParams):
if (searchJobParams['hsVersion'] == 'v2'): search = HypersearchV2(searchParams=searchJobParams) else: raise RuntimeError(('Unsupported hypersearch version "%s"' % searchJobParams['hsVersion'])) info = search.getOptimizationMetricInfo() return info