desc
stringlengths
3
26.7k
decl
stringlengths
11
7.89k
bodies
stringlengths
8
553k
'scalar space encoder'
def testScalarSpaceEncoder(self):
sse = ScalarSpaceEncoder(1, 1, 2, False, 2, 1, 1, None, 0, False, 'delta', forced=True) self.assertTrue(isinstance(sse, DeltaEncoder)) sse = ScalarSpaceEncoder(1, 1, 2, False, 2, 1, 1, None, 0, False, 'absolute', forced=True) self.assertFalse(isinstance(sse, DeltaEncoder))
'assert unrelated areas don"t share bits (outside of chance collisions)'
def testEncodeUnrelatedAreas(self):
avgThreshold = 0.3 maxThreshold = 0.12 overlaps = overlapsForUnrelatedAreas(1499, 37, 5) self.assertLess(np.max(overlaps), maxThreshold) self.assertLess(np.average(overlaps), avgThreshold) maxThreshold = 0.12 overlaps = overlapsForUnrelatedAreas(1499, 37, 10) self.assertLess(np.max(overl...
'simple delta reconstruction test'
def testDeltaEncoder(self):
for i in range(5): encarr = self._dencoder.encodeIntoArray(i, np.zeros(100), learn=True) self._dencoder.setStateLock(True) for i in range(5, 7): encarr = self._dencoder.encodeIntoArray(i, np.zeros(100), learn=True) res = self._dencoder.topDownCompute(encarr) self.assertEqual(res[0].v...
'encoding verification test passed'
def testEncodingVerification(self):
feedIn = [1, 10, 4, 7, 9, 6, 3, 1] expectedOut = [0, 9, (-6), 3, 2, (-3), (-3), (-2)] self._dencoder.setStateLock(False) for i in range(len(feedIn)): aseencode = np.zeros(100) self._adaptscalar.encodeIntoArray(expectedOut[i], aseencode, learn=True) delencode = np.zeros(100) ...
'Check that locking the state works correctly'
def testLockingState(self):
feedIn = [1, 10, 9, 7, 9, 6, 3, 1] expectedOut = [0, 9, (-6), 3, 2, (-3), (-3), (-2)] for i in range(len(feedIn)): if (i == 3): self._dencoder.setStateLock(True) aseencode = np.zeros(100) self._adaptscalar.encodeIntoArray(expectedOut[i], aseencode, learn=True) del...
'creating date encoder instance'
def testDateEncoder(self):
self.assertSequenceEqual(self._e.getDescription(), [('season', 0), ('day of week', 12), ('weekend', 19), ('time of day', 21)]) self.assertTrue(numpy.array_equal(self._expected, self._bits))
'missing values'
def testMissingValues(self):
mvOutput = self._e.encode(SENTINEL_VALUE_FOR_MISSING_DATA) self.assertEqual(sum(mvOutput), 0)
'decoding date'
def testDecoding(self):
decoded = self._e.decode(self._bits) (fieldsDict, _) = decoded self.assertEqual(len(fieldsDict), 4) (ranges, _) = fieldsDict['season'] self.assertEqual(len(ranges), 1) self.assertSequenceEqual(ranges[0], [305, 305]) (ranges, _) = fieldsDict['time of day'] self.assertEqual(len(range...
'Check topDownCompute'
def testTopDownCompute(self):
topDown = self._e.topDownCompute(self._bits) topDownValues = numpy.array([elem.value for elem in topDown]) errs = (topDownValues - numpy.array([320.25, 3.5, 0.167, 14.8])) self.assertAlmostEqual(errs.max(), 0, 4)
'Check bucket index support'
def testBucketIndexSupport(self):
bucketIndices = self._e.getBucketIndices(self._d) topDown = self._e.getBucketInfo(bucketIndices) topDownValues = numpy.array([elem.value for elem in topDown]) errs = (topDownValues - numpy.array([320.25, 3.5, 0.167, 14.8])) self.assertAlmostEqual(errs.max(), 0, 4) encodings = [] for x in top...
'look at holiday more carefully because of the smooth transition'
def testHoliday(self):
e = DateEncoder(holiday=5, forced=True) holiday = numpy.array([0, 0, 0, 0, 0, 1, 1, 1, 1, 1], dtype='uint8') notholiday = numpy.array([1, 1, 1, 1, 1, 0, 0, 0, 0, 0], dtype='uint8') holiday2 = numpy.array([0, 0, 0, 1, 1, 1, 1, 1, 0, 0], dtype='uint8') d = datetime.datetime(2010, 12, 25, 4, 55) se...
'Test weekend encoder'
def testWeekend(self):
e = DateEncoder(customDays=(21, ['sat', 'sun', 'fri']), forced=True) mon = DateEncoder(customDays=(21, 'Monday'), forced=True) e2 = DateEncoder(weekend=(21, 1), forced=True) d = datetime.datetime(1988, 5, 29, 20, 0) self.assertTrue(numpy.array_equal(e.encode(d), e2.encode(d))) for _ in range(300...
'Send bitmap as array of indicies'
def testEncodeArray(self):
e = self._encoder(self.n, name=self.name) bitmap = [2, 7, 15, 18, 23] out = e.encode(bitmap) self.assertEqual(out.sum(), len(bitmap)) x = e.decode(out) self.assertIsInstance(x[0], dict) self.assertTrue((self.name in x[0]))
'Send bitmap as array of indicies'
def testEncodeArrayInvalidW(self):
e = self._encoder(self.n, 3, name=self.name) with self.assertRaises(ValueError): e.encode([2]) with self.assertRaises(ValueError): e.encode([2, 7, 15, 18, 23])
'Compare two bitmaps for closeness'
def testClosenessScores(self):
e = self._encoder(self.n, name=self.name) 'Identical => 1' bitmap1 = [2, 7, 15, 18, 23] bitmap2 = [2, 7, 15, 18, 23] out1 = e.encode(bitmap1) out2 = e.encode(bitmap2) c = e.closenessScores(out1, out2) self.assertEqual(c[0], 1.0) 'No overlap => 0' bitmap1 = [2, 7, 1...
'Verify that the values of buckets are as expected for given init params'
def testGetBucketValues(self):
le = LogEncoder(w=5, resolution=0.1, minval=1, maxval=10000, name='amount', forced=True) inc = 0.1 exp = 0 expected = [] while (exp <= 4.0001): val = (10 ** exp) expected.append(val) exp += inc expected = numpy.array(expected) actual = numpy.array(le.getBucketValues()...
'Verifies you can use radius to specify a log encoder'
def testInitWithRadius(self):
le = LogEncoder(w=1, radius=1, minval=1, maxval=10000, name='amount', forced=True) self.assertEqual(le.encoder.n, 5) value = 1.0 output = le.encode(value) expected = [1, 0, 0, 0, 0] expected = numpy.array(expected, dtype='uint8') self.assertTrue(numpy.array_equal(output, expected)) value...
'Verifies you can use N to specify a log encoder'
def testInitWithN(self):
n = 100 le = LogEncoder(n=n, forced=True) self.assertEqual(le.encoder.n, n)
'Verifies unusual instances of minval and maxval are handled properly'
def testMinvalMaxVal(self):
self.assertRaises(ValueError, LogEncoder, n=100, minval=0, maxval=(-100), forced=True) self.assertRaises(ValueError, LogEncoder, n=100, minval=0, maxval=1e-07, forced=True) le = LogEncoder(n=100, minval=42, maxval=1300000000000.0, forced=True) expectedRadius = 0.552141792732 expectedResolution = 0.1...
'configuration.Configuration relies on static methods which load files by name. Since we need to be able to run tests and potentially change the content of those files between tests without interfering with one another and with the system configuration, this setUp() function will allocate temporary files used only dur...
def setUp(self):
self.files = {} with tempfile.NamedTemporaryFile(prefix='nupic-default.xml-unittest-', delete=False) as outp: self.addCleanup(os.remove, outp.name) with open(resource_filename(__name__, 'conf/nupic-default.xml')) as inp: outp.write(inp.read()) self.files['nupic-default.xml'] ...
'Configures mocks for time.time and time.sleep such that every call to time.sleep(x) increments the return value of time.time() by x. mockTime: time.time mock mockSleep: time.sleep mock'
def mockSleepTime(self, mockTime, mockSleep):
class _TimeContainer(object, ): accumulatedTime = 0 def testTime(): return _TimeContainer.accumulatedTime def testSleep(duration): _TimeContainer.accumulatedTime += duration mockTime.side_effect = testTime mockSleep.side_effect = testSleep
'Test that when timeoutSec == 0, function is executed exactly once with no retries, and raises an exception on failure.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testRetryNoTimeForRetries(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=0, initialRetryDelaySec=0.2, maxRetryDelaySec=10) testFunction = Mock(side_effect=TestParentException('Test exception'), __name__='testFunction', autospec=True) with self.assertRaises(TestParentException): re...
'Test that delay times are correct.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testRetryWaitsInitialRetryDelaySec(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=30, initialRetryDelaySec=2, maxRetryDelaySec=10) testFunction = Mock(side_effect=TestParentException('Test exception'), __name__='testFunction', autospec=True) with self.assertRaises(TestParentException): ret...
'Test that retry is triggered if raised exception is in retryExceptions.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testRetryRetryExceptionIncluded(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=1, initialRetryDelaySec=1, maxRetryDelaySec=10, retryExceptions=(TestParentException,)) @retryDecorator def testFunction(): raise TestChildException('Test exception') with self.assertRaises(TestChildExcep...
'Test that retry is not triggered if raised exception is not in retryExceptions'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testRetryRetryExceptionExcluded(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) class TestExceptionA(Exception, ): pass class TestExceptionB(Exception, ): pass retryDecorator = decorators.retry(timeoutSec=1, initialRetryDelaySec=1, maxRetryDelaySec=10, retryExceptions=(TestExceptionA,)) @retryDecorator def testFunction...
'Test that if retryFilter is specified and exception is in retryExceptions, retries iff retryFilter returns true.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testRetryRetryFilter(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecoratorTrueFilter = decorators.retry(timeoutSec=1, initialRetryDelaySec=1, maxRetryDelaySec=10, retryExceptions=(TestParentException,), retryFilter=(lambda _1, _2, _3: True)) @retryDecoratorTrueFilter def testFunctionTrue(): raise TestChildException...
'Test that docorated function receives only expected args and that it returns the expected value on success.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testReturnsExpectedWithExpectedArgs(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=30, initialRetryDelaySec=2, maxRetryDelaySec=10) testFunction = Mock(return_value=321, __name__='testFunction', autospec=True) returnValue = retryDecorator(testFunction)(1, 2, a=3, b=4) self.assertEqual(returnValue,...
'If the initial call succeeds, test that no retries are performed.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testNoRetryIfCallSucceeds(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=30, initialRetryDelaySec=2, maxRetryDelaySec=10) testFunction = Mock(__name__='testFunction', autospec=True) retryDecorator(testFunction)() testFunction.assert_called_once_with()
'If initial attempts fail but subsequent attempt succeeds, ensure that expected number of retries is performed and expected value is returned.'
@patch('time.sleep', autospec=True) @patch('time.time', autospec=True) def testFailsFirstSucceedsLater(self, mockTime, mockSleep):
self.mockSleepTime(mockTime, mockSleep) retryDecorator = decorators.retry(timeoutSec=30, initialRetryDelaySec=2, maxRetryDelaySec=10) testFunction = Mock(side_effect=[TestParentException('Test exception 1'), TestParentException('Test exception 2'), 321], __name__='testFunction', autospec=True) ...
'Test generic usage of serializable mixin class'
def testReadFromAndWriteToFile(self):
class Bar(object, ): pass class Foo(Bar, Serializable, ): def __init__(self, bar): self.bar = bar @classmethod def getSchema(cls): return serializable_test_capnp.Foo @classmethod def read(cls, proto): foo = object.__new__(cls) ...
'Get the predictions and prediction confidences for all examples.'
@classmethod def setUpClass(cls):
for example in cls.examples: predictionGenerator = _getPredictionsGenerator(cls.examplesDir, example) for prediction in predictionGenerator(MAX_PREDICTIONS): cls.oneStepPredictions[example].append(prediction[0]) cls.oneStepConfidences[example].append(prediction[1]) ...
'Make sure the examples directory is in the correct location'
def testExamplesDirExists(self):
failMsg = ('Path to examples does not exist: %s' % ExamplesTest.examplesDir) self.assertTrue(os.path.exists(ExamplesTest.examplesDir), failMsg)
'Make sure all examples output the same number of oneStepPredictions.'
def testNumberOfOneStepPredictions(self):
self.assertEquals(len(ExamplesTest.oneStepPredictions['opf']), len(ExamplesTest.oneStepPredictions['algo'])) self.assertEquals(len(ExamplesTest.oneStepPredictions['opf']), len(ExamplesTest.oneStepPredictions['network']))
'Make sure one-step predictions are the same for OPF and Algo API.'
@unittest.expectedFailure def testOneStepPredictionsOpfVsAlgo(self):
for resultPair in zip(self.oneStepPredictions['opf'], self.oneStepPredictions['algo']): assert_approx_equal(err_msg="one-step 'opf' and 'algo' differ", *resultPair)
'Make sure one-step predictions are the same for OPF and Network API.'
@unittest.expectedFailure def testOneStepPredictionsOpfVsNetwork(self):
for resultPair in zip(self.oneStepPredictions['opf'], self.oneStepPredictions['network']): assert_approx_equal(err_msg="one-step 'opf' and 'network' differ", *resultPair)
'Make sure one-step predictions are the same for Algo and Network API.'
@unittest.expectedFailure def testOneStepPredictionsAlgoVsNetwork(self):
for resultPair in zip(self.oneStepPredictions['algo'], self.oneStepPredictions['network']): assert_approx_equal(err_msg="one-step 'algo' and 'network' differ", *resultPair)
'Make sure five-step predictions are the same for OPF and Network API.'
@unittest.expectedFailure def testFiveStepPredictionsOpfVsNetwork(self):
for resultPair in zip(self.fiveStepPredictions['opf'], self.fiveStepPredictions['network']): assert_approx_equal(err_msg="five-step 'opf' and 'network' differ", *resultPair)
'Make sure one-step confidences are the same for OPF and Algo API.'
@unittest.expectedFailure def testOneStepConfidencesOpfVsAlgo(self):
for resultPair in zip(self.oneStepConfidences['opf'], self.oneStepConfidences['algo']): assert_approx_equal(err_msg="one-step 'opf' and 'algo' differ", *resultPair)
'Make sure one-step confidences are the same for OPF and Network API.'
@unittest.expectedFailure def testOneStepConfidencesOpfVsNetwork(self):
for resultPair in zip(self.oneStepConfidences['opf'], self.oneStepConfidences['network']): assert_approx_equal(err_msg="one-step 'opf' and 'network' differ", *resultPair)
'Make sure one-step confidences are the same for Algo and Network API.'
@unittest.expectedFailure def testOneStepConfidencesAlgoVsNetwork(self):
for resultPair in zip(self.oneStepConfidences['algo'], self.oneStepConfidences['network']): assert_approx_equal(err_msg="one-step 'algo' and 'network' differ", *resultPair)
'Make sure five-step confidences are the same for OPF and Network API.'
@unittest.expectedFailure def testFiveStepConfidencesOpfVsNetwork(self):
for resultPair in zip(self.fiveStepConfidences['opf'], self.fiveStepConfidences['network']): assert_approx_equal(err_msg="five-step 'opf' and 'network' differ", *resultPair)
'Runs basic FileRecordStream tests.'
def testBasic(self):
filename = _getTempFileName() fields = [FieldMetaInfo('name', FieldMetaType.string, FieldMetaSpecial.none), FieldMetaInfo('timestamp', FieldMetaType.datetime, FieldMetaSpecial.timestamp), FieldMetaInfo('integer', FieldMetaType.integer, FieldMetaSpecial.none), FieldMetaInfo('real', FieldMetaType.float, FieldMeta...
'Runs FileRecordStream tests with multiple category fields.'
def testMultipleClasses(self):
filename = _getTempFileName() fields = [FieldMetaInfo('name', FieldMetaType.string, FieldMetaSpecial.none), FieldMetaInfo('timestamp', FieldMetaType.datetime, FieldMetaSpecial.timestamp), FieldMetaInfo('integer', FieldMetaType.integer, FieldMetaSpecial.none), FieldMetaInfo('real', FieldMetaType.float, FieldMeta...
'data looks like: should generate deltas "t" "s" "dt" "ds" t 10 X t+1s 20 1s 10 t+1d 50 86399 30 r t+1d+1s 60 X r+1d+3s 65 2s 5'
@unittest.skip('Disabled until we figure out why it is failing in internal tests') def testDeltaFilter(self):
r = RecordSensor() filename = resource_filename('nupic.datafiles', 'extra/qa/delta.csv') datasource = FileRecordStream(filename) r.dataSource = datasource n = 50 encoder = MultiEncoder({'blah': dict(fieldname='s', type='ScalarEncoder', n=n, w=11, minval=0, maxval=100)}) r.encoder = encoder ...
'[ABC method implementation] retval: a data row (a list or tuple) if available; None, if no more records in the table (End of Stream - EOS); empty sequence (list or tuple) when timing out while waiting for the next record.'
def getNextRecord(self, useCache=True):
pass
'[ABC method implementation]'
def getFieldNames(self):
return self._fieldNames
'[ABC method implementation]'
def getFields(self):
return self._fieldsMeta
'Test that the (internal) moving average maintains the averages correctly, even for null initial condition and when the number of values goes over windowSize. Pass in integers and floats.'
def testMovingAverage(self):
historicalValues = [] total = 0 windowSize = 3 (newAverage, historicalValues, total) = MovingAverage.compute(historicalValues, total, 3, windowSize) self.assertEqual(newAverage, 3.0) self.assertEqual(historicalValues, [3.0]) self.assertEqual(total, 3.0) (newAverage, historicalValues, tot...
'Test that the (internal) moving average maintains the averages correctly, even for null initial condition and when the number of values goes over windowSize. Pass in integers and floats. this is for the instantce method next()'
def testMovingAverageInstance(self):
ma = MovingAverage(windowSize=3) newAverage = ma.next(3) self.assertEqual(newAverage, 3.0) self.assertListEqual(ma.getSlidingWindow(), [3.0]) self.assertEqual(ma.total, 3.0) newAverage = ma.next(4) self.assertEqual(newAverage, 3.5) self.assertListEqual(ma.getSlidingWindow(), [3.0, 4.0]) ...
'Test the slidingWindow value is correctly assigned when initializing a new MovingAverage object.'
def testMovingAverageSlidingWindowInit(self):
ma = MovingAverage(windowSize=3, existingHistoricalValues=[3.0, 4.0, 5.0]) self.assertListEqual(ma.getSlidingWindow(), [3.0, 4.0, 5.0]) ma = MovingAverage(windowSize=3) self.assertListEqual(ma.getSlidingWindow(), [])
'serialization using pickle'
def testSerialization(self):
ma = MovingAverage(windowSize=3) ma.next(3) ma.next(4.5) ma.next(5) stored = pickle.dumps(ma) restored = pickle.loads(stored) self.assertEqual(restored, ma) self.assertEqual(ma.next(6), restored.next(6))
'See datetime.tzinfo.fromutc'
def fromutc(self, dt):
return (dt + self._utcoffset).replace(tzinfo=self)
'See datetime.tzinfo.utcoffset'
def utcoffset(self, dt):
return self._utcoffset
'See datetime.tzinfo.dst'
def dst(self, dt):
return _notime
'See datetime.tzinfo.tzname'
def tzname(self, dt):
return self._tzname
'Convert naive time to local time'
def localize(self, dt, is_dst=False):
if (dt.tzinfo is not None): raise ValueError, 'Not naive datetime (tzinfo is already set)' return dt.replace(tzinfo=self)
'Correct the timezone information on the given datetime'
def normalize(self, dt, is_dst=False):
if (dt.tzinfo is None): raise ValueError, 'Naive time - no tzinfo set' return dt.replace(tzinfo=self)
'See datetime.tzinfo.fromutc'
def fromutc(self, dt):
dt = dt.replace(tzinfo=None) idx = max(0, (bisect_right(self._utc_transition_times, dt) - 1)) inf = self._transition_info[idx] return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf])
'Correct the timezone information on the given datetime If date arithmetic crosses DST boundaries, the tzinfo is not magically adjusted. This method normalizes the tzinfo to the correct one. To test, first we need to do some setup >>> from pytz import timezone >>> utc = timezone(\'UTC\') >>> eastern = timezone(\'US/Eas...
def normalize(self, dt):
if (dt.tzinfo is None): raise ValueError, 'Naive time - no tzinfo set' offset = dt.tzinfo._utcoffset dt = dt.replace(tzinfo=None) dt = (dt - offset) return self.fromutc(dt)
'Convert naive time to local time. This method should be used to construct localtimes, rather than passing a tzinfo argument to a datetime constructor. is_dst is used to determine the correct timezone in the ambigous period at the end of daylight savings time. >>> from pytz import timezone >>> fmt = \'%Y-%m-%d %H:%M:%S...
def localize(self, dt, is_dst=False):
if (dt.tzinfo is not None): raise ValueError, 'Not naive datetime (tzinfo is already set)' possible_loc_dt = set() for tzinfo in self._tzinfos.values(): loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo)) if (loc_dt.replace(tzinfo=None) == dt): possible...
'See datetime.tzinfo.utcoffset'
def utcoffset(self, dt):
return self._utcoffset
'See datetime.tzinfo.dst'
def dst(self, dt):
return self._dst
'See datetime.tzinfo.tzname'
def tzname(self, dt):
return self._tzname
'Convert naive time to local time'
def localize(self, dt, is_dst=False):
if (dt.tzinfo is not None): raise ValueError, 'Not naive datetime (tzinfo is already set)' return dt.replace(tzinfo=self)
'Correct the timezone information on the given datetime'
def normalize(self, dt, is_dst=False):
if (dt.tzinfo is None): raise ValueError, 'Naive time - no tzinfo set' return dt.replace(tzinfo=self)
'Convert naive time to local time'
def localize(self, dt, is_dst=False):
if (dt.tzinfo is not None): raise ValueError, 'Not naive datetime (tzinfo is already set)' return dt.replace(tzinfo=self)
'Correct the timezone information on the given datetime'
def normalize(self, dt, is_dst=False):
if (dt.tzinfo is None): raise ValueError, 'Naive time - no tzinfo set' return dt.replace(tzinfo=self)
'Create working set from list of path entries (default=sys.path)'
def __init__(self, entries=None):
self.entries = [] self.entry_keys = {} self.by_key = {} self.callbacks = [] if (entries is None): entries = sys.path for entry in entries: self.add_entry(entry)
'Add a path item to ``.entries``, finding any distributions on it ``find_distributions(entry,False)`` is used to find distributions corresponding to the path entry, and they are added. `entry` is always appended to ``.entries``, even if it is already present. (This is because ``sys.path`` can contain the same value mo...
def add_entry(self, entry):
self.entry_keys.setdefault(entry, []) self.entries.append(entry) for dist in find_distributions(entry, True): self.add(dist, entry, False)
'True if `dist` is the active distribution for its project'
def __contains__(self, dist):
return (self.by_key.get(dist.key) == dist)
'Find a distribution matching requirement `req` If there is an active distribution for the requested project, this returns it as long as it meets the version requirement specified by `req`. But, if there is an active distribution for the project and it does *not* meet the `req` requirement, ``VersionConflict`` is rais...
def find(self, req):
dist = self.by_key.get(req.key) if ((dist is not None) and (dist not in req)): raise VersionConflict(dist, req) else: return dist
'Yield entry point objects from `group` matching `name` If `name` is None, yields all entry points in `group` from all distributions in the working set, otherwise only ones matching both `group` and `name` are yielded (in distribution order).'
def iter_entry_points(self, group, name=None):
for dist in self: entries = dist.get_entry_map(group) if (name is None): for ep in entries.values(): (yield ep) elif (name in entries): (yield entries[name])
'Locate distribution for `requires` and run `script_name` script'
def run_script(self, requires, script_name):
ns = sys._getframe(1).f_globals name = ns['__name__'] ns.clear() ns['__name__'] = name self.require(requires)[0].run_script(script_name, ns)
'Yield distributions for non-duplicate projects in the working set The yield order is the order in which the items\' path entries were added to the working set.'
def __iter__(self):
seen = {} for item in self.entries: for key in self.entry_keys[item]: if (key not in seen): seen[key] = 1 (yield self.by_key[key])
'Add `dist` to working set, associated with `entry` If `entry` is unspecified, it defaults to the ``.location`` of `dist`. On exit from this routine, `entry` is added to the end of the working set\'s ``.entries`` (if it wasn\'t already present). `dist` is only added to the working set if it\'s for a project that doesn\...
def add(self, dist, entry=None, insert=True):
if insert: dist.insert_on(self.entries, entry) if (entry is None): entry = dist.location keys = self.entry_keys.setdefault(entry, []) keys2 = self.entry_keys.setdefault(dist.location, []) if (dist.key in self.by_key): return self.by_key[dist.key] = dist if (dist.key n...
'List all distributions needed to (recursively) meet `requirements` `requirements` must be a sequence of ``Requirement`` objects. `env`, if supplied, should be an ``Environment`` instance. If not supplied, it defaults to all distributions available within any entry or distribution in the working set. `installer`, if...
def resolve(self, requirements, env=None, installer=None):
requirements = list(requirements)[::(-1)] processed = {} best = {} to_activate = [] while requirements: req = requirements.pop(0) if (req in processed): continue dist = best.get(req.key) if (dist is None): dist = self.by_key.get(req.key) ...
'Find all activatable distributions in `plugin_env` Example usage:: distributions, errors = working_set.find_plugins( Environment(plugin_dirlist) map(working_set.add, distributions) # add plugins+libs to sys.path print "Couldn\'t load", errors # display errors The `plugin_env` should be an ``Environment`` insta...
def find_plugins(self, plugin_env, full_env=None, installer=None, fallback=True):
plugin_projects = list(plugin_env) plugin_projects.sort() error_info = {} distributions = {} if (full_env is None): env = Environment(self.entries) env += plugin_env else: env = (full_env + plugin_env) shadow_set = self.__class__([]) map(shadow_set.add, self) ...
'Ensure that distributions matching `requirements` are activated `requirements` must be a string or a (possibly-nested) sequence thereof, specifying the distributions and versions required. The return value is a sequence of the distributions that needed to be activated to fulfill the requirements; all relevant distrib...
def require(self, *requirements):
needed = self.resolve(parse_requirements(requirements)) for dist in needed: self.add(dist) return needed
'Invoke `callback` for all distributions (including existing ones)'
def subscribe(self, callback):
if (callback in self.callbacks): return self.callbacks.append(callback) for dist in self: callback(dist)
'Snapshot distributions available on a search path Any distributions found on `search_path` are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. `platform` is an optional string specifying the name of the platform that platform-specific distribu...
def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
self._distmap = {} self._cache = {} self.platform = platform self.python = python self.scan(search_path)
'Is distribution `dist` acceptable for this environment? The distribution must match the platform and python version requirements specified when this environment was created, or False is returned.'
def can_add(self, dist):
return (((self.python is None) or (dist.py_version is None) or (dist.py_version == self.python)) and compatible_platforms(dist.platform, self.platform))
'Remove `dist` from the environment'
def remove(self, dist):
self._distmap[dist.key].remove(dist)
'Scan `search_path` for distributions usable in this environment Any distributions found are added to the environment. `search_path` should be a sequence of ``sys.path`` items. If not supplied, ``sys.path`` is used. Only distributions conforming to the platform/python version defined at initialization are added.'
def scan(self, search_path=None):
if (search_path is None): search_path = sys.path for item in search_path: for dist in find_distributions(item): self.add(dist)
'Return a newest-to-oldest list of distributions for `project_name`'
def __getitem__(self, project_name):
try: return self._cache[project_name] except KeyError: project_name = project_name.lower() if (project_name not in self._distmap): return [] if (project_name not in self._cache): dists = self._cache[project_name] = self._distmap[project_name] _sort_dists(d...
'Add `dist` if we ``can_add()`` it and it isn\'t already added'
def add(self, dist):
if (self.can_add(dist) and dist.has_version()): dists = self._distmap.setdefault(dist.key, []) if (dist not in dists): dists.append(dist) if (dist.key in self._cache): _sort_dists(self._cache[dist.key])
'Find distribution best matching `req` and usable on `working_set` This calls the ``find(req)`` method of the `working_set` to see if a suitable distribution is already active. (This may raise ``VersionConflict`` if an unsuitable version of the project is already active in the specified `working_set`.) If a suitable ...
def best_match(self, req, working_set, installer=None):
dist = working_set.find(req) if (dist is not None): return dist for dist in self[req.key]: if (dist in req): return dist return self.obtain(req, installer)
'Obtain a distribution matching `requirement` (e.g. via download) Obtain a distro that matches requirement (e.g. via download). In the base ``Environment`` class, this routine just returns ``installer(requirement)``, unless `installer` is None, in which case None is returned instead. This method is a hook that allows...
def obtain(self, requirement, installer=None):
if (installer is not None): return installer(requirement)
'Yield the unique project names of the available distributions'
def __iter__(self):
for key in self._distmap.keys(): if self[key]: (yield key)
'In-place addition of a distribution or environment'
def __iadd__(self, other):
if isinstance(other, Distribution): self.add(other) elif isinstance(other, Environment): for project in other: for dist in other[project]: self.add(dist) else: raise TypeError(("Can't add %r to environment" % (other,))) return self
'Add an environment or distribution to an environment'
def __add__(self, other):
new = self.__class__([], platform=None, python=None) for env in (self, other): new += env return new
'Does the named resource exist?'
def resource_exists(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).has_resource(resource_name)
'Is the named resource an existing directory?'
def resource_isdir(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_isdir(resource_name)
'Return a true filesystem path for specified resource'
def resource_filename(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_filename(self, resource_name)
'Return a readable file-like object for specified resource'
def resource_stream(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_stream(self, resource_name)
'Return specified resource as a string'
def resource_string(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).get_resource_string(self, resource_name)
'List the contents of the named resource directory'
def resource_listdir(self, package_or_requirement, resource_name):
return get_provider(package_or_requirement).resource_listdir(resource_name)
'Give an error message for problems extracting file(s)'
def extraction_error(self):
old_exc = sys.exc_info()[1] cache_path = (self.extraction_path or get_default_cache()) err = ExtractionError(("Can't extract file(s) to egg cache\n\nThe following error occurred while trying to extract file(s) to the Python egg\ncache:\n\n %s\n\nThe ...
'Return absolute location in cache for `archive_name` and `names` The parent directory of the resulting path will be created if it does not already exist. `archive_name` should be the base filename of the enclosing egg (which may not be the name of the enclosing zipfile!), including its ".egg" extension. `names`, if ...
def get_cache_path(self, archive_name, names=()):
extract_path = (self.extraction_path or get_default_cache()) target_path = os.path.join(extract_path, (archive_name + '-tmp'), *names) try: ensure_directory(target_path) except: self.extraction_error() self.cached_files[target_path] = 1 return target_path
'Perform any platform-specific postprocessing of `tempname` This is where Mac header rewrites should be done; other platforms don\'t have anything special they should do. Resource providers should call this method ONLY after successfully extracting a compressed resource. They must NOT call it on resources that are alr...
def postprocess(self, tempname, filename):
if (os.name == 'posix'): mode = ((os.stat(tempname).st_mode | 365) & 4095) os.chmod(tempname, mode)
'Set the base path where resources will be extracted to, if needed. If you do not call this routine before any extractions take place, the path defaults to the return value of ``get_default_cache()``. (Which is based on the ``PYTHON_EGG_CACHE`` environment variable, with various platform-specific fallbacks. See that ...
def set_extraction_path(self, path):
if self.cached_files: raise ValueError("Can't change extraction path, files already extracted") self.extraction_path = path