code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
|---|---|---|---|---|---|
self.log = 1
self.logFile = fileName
self._logPtr = open(fileName, "w")
if writeName:
self._namePtr = open(fileName + ".name", "w")
|
def setLog(self, fileName, writeName=False)
|
Opens a log file with name fileName.
| 3.753624
| 3.795694
| 0.988916
|
self._logPtr.close()
if self._namePtr:
self._namePtr.close()
self.log = 0
|
def closeLog(self)
|
Closes the log file.
| 7.413503
| 6.697528
| 1.106901
|
if self.log:
writeArray(self._logPtr, self.activation)
if self._namePtr:
self._namePtr.write(network.getWord(self.activation))
self._namePtr.write("\n")
|
def writeLog(self, network)
|
Writes to the log file.
| 8.52209
| 8.281719
| 1.029024
|
string = "Layer '%s': (Kind: %s, Size: %d, Active: %d, Frozen: %d)\n" % (
self.name, self.kind, self.size, self.active, self.frozen)
if (self.type == 'Output'):
string += toStringArray('Target ', self.target, self.displayWidth)
string += toStringArray('Activation', self.activation, self.displayWidth)
if (self.type != 'Input' and self._verbosity > 1):
string += toStringArray('Error ', self.error, self.displayWidth)
if (self._verbosity > 4 and self.type != 'Input'):
string += toStringArray('weight ', self.weight, self.displayWidth)
string += toStringArray('dweight ', self.dweight, self.displayWidth)
string += toStringArray('delta ', self.delta, self.displayWidth)
string += toStringArray('netinput ', self.netinput, self.displayWidth)
string += toStringArray('wed ', self.wed, self.displayWidth)
return string
|
def toString(self)
|
Returns a string representation of Layer instance.
| 3.153685
| 3.019557
| 1.04442
|
if self.displayWidth == 0: return
print("=============================")
print("Layer '%s': (Kind: %s, Size: %d, Active: %d, Frozen: %d)" % (
self.name, self.kind, self.size, self.active, self.frozen))
if (self.type == 'Output'):
displayArray('Target ', self.target, self.displayWidth)
displayArray('Activation', self.activation, self.displayWidth)
if (self.type != 'Input' and self._verbosity > 1):
displayArray('Error ', self.error, self.displayWidth)
if (self._verbosity > 4 and self.type != 'Input'):
print(" ", end=" "); displayArray('weight', self.weight)
print(" ", end=" "); displayArray('dweight', self.dweight)
print(" ", end=" "); displayArray('delta', self.delta)
print(" ", end=" "); displayArray('netinput', self.netinput)
print(" ", end=" "); displayArray('wed', self.wed)
|
def display(self)
|
Displays the Layer instance to the screen.
| 3.429385
| 3.351904
| 1.023116
|
#if self.verify and not self.activationSet == 0:
# raise LayerError, \
# ('Activation flag not reset. Activations may have been set multiple times without any intervening call to propagate().', self.activationSet)
Numeric.put(self.activation, Numeric.arange(len(self.activation)), value)
self.activationSet = 1
|
def setActivations(self, value)
|
Sets all activations to the value of the argument. Value should be in the range [0,1].
| 9.711465
| 9.060555
| 1.07184
|
array = Numeric.array(arr)
if not len(array) == self.size:
raise LayerError('Mismatched activation size and layer size in call to copyActivations()', \
(len(array), self.size))
if self.verify and not self.activationSet == 0:
if not reckless:
raise LayerError('Activation flag not reset before call to copyActivations()', \
self.activationSet)
self.activation = array
self.activationSet = 1
|
def copyActivations(self, arr, reckless = 0)
|
Copies activations from the argument array into
layer activations.
| 5.883564
| 5.634982
| 1.044114
|
# Removed this because both propagate and backprop (via compute_error) set targets
#if self.verify and not self.targetSet == 0:
# if not self.warningIssued:
# print 'Warning! Targets have already been set and no intervening backprop() was called.', \
# (self.name, self.targetSet)
# print "(Warning will not be issued again)"
# self.warningIssued = 1
if value > self.maxActivation or value < self.minActivation:
raise LayerError('Targets for this layer are out of the proper interval.', (self.name, value))
Numeric.put(self.target, Numeric.arange(len(self.target)), value)
self.targetSet = 1
|
def setTargets(self, value)
|
Sets all targets the the value of the argument. This value must be in the range [min,max].
| 7.321119
| 7.184808
| 1.018972
|
array = Numeric.array(arr)
if not len(array) == self.size:
raise LayerError('Mismatched target size and layer size in call to copyTargets()', \
(len(array), self.size))
# Removed this because both propagate and backprop (via compute_error) set targets
#if self.verify and not self.targetSet == 0:
# if not self.warningIssued:
# print 'Warning! Targets have already been set and no intervening backprop() was called.', \
# (self.name, self.targetSet)
# print "(Warning will not be issued again)"
# self.warningIssued = 1
if Numeric.add.reduce(array < self.minTarget) or Numeric.add.reduce(array > self.maxTarget):
print(self.name, self.minTarget, self.maxTarget)
raise LayerError('Targets for this layer are out of range.', (self.name, array))
self.target = array
self.targetSet = 1
|
def copyTargets(self, arr)
|
Copies the targets of the argument array into the self.target attribute.
| 5.773539
| 5.536612
| 1.042793
|
self.randomize()
self.dweight = Numeric.zeros((self.fromLayer.size, \
self.toLayer.size), 'f')
self.wed = Numeric.zeros((self.fromLayer.size, \
self.toLayer.size), 'f')
self.wedLast = Numeric.zeros((self.fromLayer.size, \
self.toLayer.size), 'f')
|
def initialize(self)
|
Initializes self.dweight and self.wed to zero matrices.
| 3.541173
| 2.477596
| 1.429278
|
if toLayerSize <= 0 or fromLayerSize <= 0:
raise LayerError('changeSize() called with invalid layer size.', \
(fromLayerSize, toLayerSize))
dweight = Numeric.zeros((fromLayerSize, toLayerSize), 'f')
wed = Numeric.zeros((fromLayerSize, toLayerSize), 'f')
wedLast = Numeric.zeros((fromLayerSize, toLayerSize), 'f')
weight = randomArray((fromLayerSize, toLayerSize),
self.toLayer._maxRandom)
# copy from old to new, considering one is smaller
minFromLayerSize = min( fromLayerSize, self.fromLayer.size)
minToLayerSize = min( toLayerSize, self.toLayer.size)
for i in range(minFromLayerSize):
for j in range(minToLayerSize):
wed[i][j] = self.wed[i][j]
wedLast[i][j] = self.wedLast[i][j]
dweight[i][j] = self.dweight[i][j]
weight[i][j] = self.weight[i][j]
self.dweight = dweight
self.wed = wed
self.wedLast = wedLast
self.weight = weight
|
def changeSize(self, fromLayerSize, toLayerSize)
|
Changes the size of the connection depending on the size
change of either source or destination layer. Should only be
called through Network.changeLayerSize().
| 2.700445
| 2.675365
| 1.009374
|
if self.toLayer._verbosity > 4:
print("wed: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'")
for j in range(self.toLayer.size):
print(self.toLayer.name, "[", j, "]", end=" ")
print('')
for i in range(self.fromLayer.size):
print(self.fromLayer.name, "[", i, "]", ": ", end=" ")
for j in range(self.toLayer.size):
print(self.wed[i][j], end=" ")
print('')
print('')
print("dweight: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'")
for j in range(self.toLayer.size):
print(self.toLayer.name, "[", j, "]", end=" ")
print('')
for i in range(self.fromLayer.size):
print(self.fromLayer.name, "[", i, "]", ": ", end=" ")
for j in range(self.toLayer.size):
print(self.dweight[i][j], end=" ")
print('')
print('')
if self.toLayer._verbosity > 2:
print("Weights: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'")
print(" ", end=" ")
for j in range(self.toLayer.size):
print(self.toLayer.name, "[", j, "]", end=" ")
print('')
for i in range(self.fromLayer.size):
print(self.fromLayer.name, "[", i, "]", ": ", end=" ")
for j in range(self.toLayer.size):
print(self.weight[i][j], end=" ")
print('')
print('')
|
def display(self)
|
Displays connection information to the screen.
| 1.741864
| 1.71866
| 1.013501
|
string = ""
if self.toLayer._verbosity > 4:
string += "wed: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'\n"
string += " "
for j in range(self.toLayer.size):
string += " " + self.toLayer.name + "[" + str(j) + "]"
string += '\n'
for i in range(self.fromLayer.size):
string += self.fromLayer.name+ "["+ str(i)+ "]"+ ": "
for j in range(self.toLayer.size):
string += " " + str(self.wed[i][j])
string += '\n'
string += '\n'
string += "dweight: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'\n"
string += " "
for j in range(self.toLayer.size):
string += " " + self.toLayer.name+ "["+ str(j)+ "]"
string += '\n'
for i in range(self.fromLayer.size):
string += self.fromLayer.name+ "["+ str(i)+ "]"+ ": "
for j in range(self.toLayer.size):
string += " " + str(self.dweight[i][j])
string += '\n'
string += '\n'
if self.toLayer._verbosity > 2:
string += "Weights: from '" + self.fromLayer.name + "' to '" + self.toLayer.name +"'\n"
string += " "
for j in range(self.toLayer.size):
string += " " + self.toLayer.name+ "["+ str(j)+ "]"
string += '\n'
for i in range(self.fromLayer.size):
string += self.fromLayer.name+ "["+ str(i)+ "]"+ ": "
for j in range(self.toLayer.size):
string += " " + str(self.weight[i][j])
string += '\n'
string += '\n'
return string
|
def toString(self)
|
Connection information as a string.
| 1.81451
| 1.811505
| 1.001659
|
# first clear the old cached values
self.cacheConnections = []
self.cacheLayers = []
if val:
for layer in self.layers:
if layer.active and not layer.frozen:
self.cacheLayers.append( layer )
for connection in self.connections:
if connection.active and not connection.frozen:
self.cacheConnections.append( connection )
|
def setCache(self, val = 1)
|
Sets cache on (or updates), or turns off
| 3.250793
| 3.301904
| 0.984521
|
next = {startLayer.name : startLayer}
visited = {}
while next != {}:
for item in list(next.items()):
# item[0] : name, item[1] : layer reference
# add layer to visited dict and del from next
visited[item[0]] = item[1]
del next[item[0]]
for connection in self.connections:
if connection.fromLayer.name == item[0]:
if connection.toLayer.name == endLayer.name:
return 1 # a path!
elif connection.toLayer.name in next:
pass # already in the list to be traversed
elif connection.toLayer.name in visited:
pass # already been there
else:
# add to next
next[connection.toLayer.name] = connection.toLayer
return 0 # didn't find it and ran out of places to go
|
def path(self, startLayer, endLayer)
|
Used in error checking with verifyArchitecture() and in prop_from().
| 3.58305
| 3.499677
| 1.023823
|
for i in range(len(self.layers)):
if layer == self.layers[i]: # shallow cmp
return i
return -1 # not in list
|
def getLayerIndex(self, layer)
|
Given a reference to a layer, returns the index of that layer in
self.layers.
| 6.267767
| 5.848995
| 1.071597
|
layer._verbosity = verbosity
layer._maxRandom = self._maxRandom
layer.minTarget = 0.0
layer.maxTarget = 1.0
layer.minActivation = 0.0
layer.maxActivation = 1.0
if position == None:
self.layers.append(layer)
else:
self.layers.insert(position, layer)
self.layersByName[layer.name] = layer
|
def add(self, layer, verbosity = 0, position = None)
|
Adds a layer. Layer verbosity is optional (default 0).
| 2.953656
| 2.846718
| 1.037565
|
for c in self.connections:
if (c.fromLayer.name == fromName and
c.toLayer.name == toName):
return 1
return 0
|
def isConnected(self, fromName, toName)
|
Are these two layers connected this way?
| 3.017599
| 2.358615
| 1.279394
|
fromName, toName, rest = names[0], names[1], names[2:]
self.connectAt(fromName, toName)
if len(rest) != 0:
self.connect(toName, *rest)
|
def connect(self, *names)
|
Connects a list of names, one to the next.
| 3.522999
| 3.000835
| 1.174006
|
fromLayer = self.getLayer(fromName)
toLayer = self.getLayer(toName)
if self.getLayerIndex(fromLayer) >= self.getLayerIndex(toLayer):
raise NetworkError('Layers out of order.', (fromLayer.name, toLayer.name))
if (fromLayer.type == 'Output'):
fromLayer.type = 'Hidden'
fromLayer.patternReport = 0 # automatically turned off for hidden layers
if fromLayer.kind == 'Output':
fromLayer.kind = 'Hidden'
elif (fromLayer.type == 'Undefined'):
fromLayer.type = 'Input'
fromLayer.patternReport = 0 # automatically turned off for input layers
if fromLayer.kind == 'Undefined':
fromLayer.kind = 'Input'
if (toLayer.type == 'Input'):
raise NetworkError('Connections out of order', (fromLayer.name, toLayer.name))
elif (toLayer.type == 'Undefined'):
toLayer.type = 'Output'
toLayer.patternReport = 1 # automatically turned on for output layers
if toLayer.kind == 'Undefined':
toLayer.kind = 'Output'
if position == None:
self.connections.append(Connection(fromLayer, toLayer))
else:
self.connections.insert(position, Connection(fromLayer, toLayer))
|
def connectAt(self, fromName, toName, position = None)
|
Connects two layers by instantiating an instance of Connection
class. Allows a position number, indicating the ordering of
the connection.
| 2.281813
| 2.177569
| 1.047872
|
netType = "serial"
if "type" in kw:
netType = kw["type"]
self.addLayer('input', arg[0])
hiddens = []
if len(arg) > 3:
hcount = 0
for hidc in arg[1:-1]:
name = 'hidden%d' % hcount
self.addLayer(name, hidc)
hiddens.append(name)
hcount += 1
elif len(arg) == 3:
name = 'hidden'
self.addLayer(name, arg[1])
hiddens.append(name)
elif len(arg) == 2:
pass
else:
raise AttributeError("not enough layers! need >= 2")
self.addLayer('output', arg[-1])
lastName = "input"
for name in hiddens:
if netType == "parallel":
self.connect('input', name)
self.connect(name, 'output')
else: # serial
self.connect(lastName, name)
lastName = name
if netType == "serial" or lastName == "input":
self.connect(lastName, "output")
|
def addLayers(self, *arg, **kw)
|
Creates an N layer network with 'input', 'hidden1', 'hidden2',...
and 'output' layers. Keyword type indicates "parallel" or
"serial". If only one hidden layer, it is called "hidden".
| 2.801404
| 2.479959
| 1.129617
|
# first, construct an array of all of the weights
# that won't be deleted:
gene = []
for layer in self.layers:
if layer.type != 'Input':
for i in range(layer.size):
if layer.name == layername and i == nodeNum:
pass # skip it
else:
gene.append(layer.weight[i])
for connection in self.connections:
for i in range(connection.fromLayer.size):
for j in range(connection.toLayer.size):
if ((connection.fromLayer.name == layername and i == nodeNum) or
(connection.toLayer.name == layername and j == nodeNum)):
pass # skip weights from/to nodeNum
else:
gene.append(connection.weight[i][j])
# now, change the size (removes rightmost node):
self.changeLayerSize(layername, self[layername].size - 1)
# and put the good weights where they go:
self.unArrayify(gene)
|
def deleteLayerNode(self, layername, nodeNum)
|
Removes a particular unit/node from a layer.
| 3.700994
| 3.764171
| 0.983216
|
self.changeLayerSize(layerName, self[layerName].size + 1)
if bias != None:
self[layerName].weight[-1] = bias
for name in list(weights.keys()):
for c in self.connections:
if c.fromLayer.name == name and c.toLayer.name == layerName:
for i in range(self[name].size):
self[name, layerName].weight[i][-1] = weights[name][i]
elif c.toLayer.name == name and c.fromLayer.name == layerName:
for j in range(self[name].size):
self[layerName, name].weight[-1][j] = weights[name][j]
|
def addLayerNode(self, layerName, bias = None, weights = {})
|
Adds a new node to a layer, and puts in new weights. Adds node on the end.
Weights will be random, unless specified.
bias = the new node's bias weight
weights = dict of {connectedLayerName: [weights], ...}
Example:
>>> net = Network() # doctest: +ELLIPSIS
Conx using seed: ...
>>> net.addLayers(2, 5, 1)
>>> net.addLayerNode("hidden", bias = -0.12, weights = {"input": [1, 0], "output": [0]})
| 2.563605
| 2.70731
| 0.94692
|
# for all connection from to this layer, change matrix:
if self.sharedWeights:
raise AttributeError("shared weights broken")
for connection in self.connections:
if connection.fromLayer.name == layername:
connection.changeSize( newsize, connection.toLayer.size )
if connection.toLayer.name == layername:
connection.changeSize( connection.fromLayer.size, newsize )
# then, change the actual layer size:
self.getLayer(layername).changeSize(newsize)
|
def changeLayerSize(self, layername, newsize)
|
Changes layer size. Newsize must be greater than zero.
| 4.803629
| 4.841828
| 0.992111
|
print("Initializing '%s' weights..." % self.name, end=" ", file=sys.stderr)
if self.sharedWeights:
raise AttributeError("shared weights broken")
self.count = 0
for connection in self.connections:
connection.initialize()
for layer in self.layers:
layer.initialize()
|
def initialize(self)
|
Initializes network by calling Connection.initialize() and
Layer.initialize(). self.count is set to zero.
| 5.699436
| 4.259496
| 1.338054
|
for name in dict:
self.layersByName[name].copyActivations( dict[name] )
|
def putActivations(self, dict)
|
Puts a dict of name: activations into their respective layers.
| 8.777266
| 5.664796
| 1.549441
|
retval = {}
for name in nameList:
retval[name] = self.layersByName[name].getActivationsList()
return retval
|
def getActivationsDict(self, nameList)
|
Returns a dictionary of layer names that map to a list of activations.
| 3.988672
| 3.380132
| 1.180034
|
self.seed = value
random.seed(self.seed)
if self.verbosity >= 0:
print("Conx using seed:", self.seed)
|
def setSeed(self, value)
|
Sets the seed to value.
| 6.607796
| 6.217268
| 1.062813
|
for connection in self.connections:
if connection.fromLayer.name == lfrom and \
connection.toLayer.name == lto:
return connection
raise NetworkError('Connection was not found.', (lfrom, lto))
|
def getConnection(self, lfrom, lto)
|
Returns the connection instance connecting the specified (string)
layer names.
| 3.739953
| 3.616097
| 1.034251
|
self._verbosity = value
for layer in self.layers:
layer._verbosity = value
|
def setVerbosity(self, value)
|
Sets network self._verbosity and each layer._verbosity to value.
| 5.549171
| 2.277388
| 2.436638
|
self._maxRandom = value
for layer in self.layers:
layer._maxRandom = value
|
def setMaxRandom(self, value)
|
Sets the maxRandom Layer attribute for each layer to value.Specifies
the global range for randomly initialized values, [-max, max].
| 5.303014
| 4.363204
| 1.215394
|
for connection in self.connections:
if connection.fromLayer.name == fromName and \
connection.toLayer.name == toName:
return connection.weight
raise NetworkError('Connection was not found.', (fromName, toName))
|
def getWeights(self, fromName, toName)
|
Gets the weights of the connection between two layers (argument strings).
| 3.526367
| 3.164948
| 1.114195
|
for connection in self.connections:
if connection.fromLayer.name == fromName and \
connection.toLayer.name == toName:
connection.weight[fromPos][toPos] = value
return value
raise NetworkError('Connection was not found.', (fromName, toName))
|
def setWeight(self, fromName, fromPos, toName, toPos, value)
|
Sets the weight of the connection between two layers (argument strings).
| 3.903016
| 3.600243
| 1.084098
|
self.orderedInputs = value
if self.orderedInputs:
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i
|
def setOrderedInputs(self, value)
|
Sets self.orderedInputs to value. Specifies if inputs
should be ordered and if so orders the inputs.
| 2.523868
| 2.408971
| 1.047696
|
for l in arg:
if not type(l) == list and \
not type(l) == type(Numeric.array([0.0])) and \
not type(l) == tuple and \
not type(l) == dict:
return 0
if type(l) == dict:
for i in l:
if not type(i) == str and i not in list(self.layers.keys()):
return 0
else:
for i in l:
if not type(i) == float and not type(i) == int:
return 0
return 1
|
def verifyArguments(self, arg)
|
Verifies that arguments to setInputs and setTargets are appropriately formatted.
| 2.678951
| 2.594142
| 1.032693
|
if not self.verifyArguments(inputs) and not self.patterned:
raise NetworkError('setInputs() requires [[...],[...],...] or [{"layerName": [...]}, ...].', inputs)
self.inputs = inputs
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i
|
def setInputs(self, inputs)
|
Sets self.input to inputs. Load order is by default random. Use setOrderedInputs() to order inputs.
| 6.87915
| 5.950742
| 1.156016
|
if not self.verifyArguments(targets) and not self.patterned:
raise NetworkError('setTargets() requires [[...],[...],...] or [{"layerName": [...]}, ...].', targets)
self.targets = targets
|
def setTargets(self, targets)
|
Sets the targets.
| 18.639322
| 18.541338
| 1.005285
|
if data2 == None: # format #1
inputs = [x[0] for x in data1]
targets = [x[1] for x in data1]
else: # format 2
inputs = data1
targets = data2
self.setInputs(inputs)
self.setTargets(targets)
|
def setInputsAndTargets(self, data1, data2=None)
|
Network.setInputsAndTargets()
Sets the corpus of data for training. Can be in one of two formats:
Format 1: setInputsAndTargets([[input0, target0], [input1, target1]...])
Network.setInputsAndTargets([[[i00, i01, ...], [t00, t01, ...]],
[[i10, i11, ...], [t10, t11, ...]],
...])
Format 2: setInputsAndTargets([input0, input1, ...], [target0, target1, ...])
Network.setInputsAndTargets([[i00, i01, ...], [i10, i11, ...],...],
[[t00, t01, ...], [t10, t11, ...],...] )
| 2.52532
| 2.184158
| 1.156199
|
flag = [0] * len(self.inputs)
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
pos = int(random.random() * len(self.inputs))
while (flag[pos] == 1):
pos = int(random.random() * len(self.inputs))
flag[pos] = 1
self.loadOrder[pos] = i
|
def randomizeOrder(self)
|
Randomizes self.loadOrder, the order in which inputs set with
self.setInputs() are presented.
| 2.274006
| 1.983799
| 1.146288
|
vector2 = self.replacePatterns(vec2)
length = min(len(vector1), len(vector2))
if self.verbosity > 4:
print("Copying Vector: ", vector2[start:start+length])
p = 0
for i in range(start, start + length):
vector1[p] = vector2[i]
p += 1
|
def copyVector(self, vector1, vec2, start)
|
Copies vec2 into vector1 being sure to replace patterns if
necessary. Use self.copyActivations() or self.copyTargets()
instead.
| 3.383491
| 3.074591
| 1.100468
|
vector = self.replacePatterns(vec, layer.name)
if self.verbosity > 4:
print("Copying Activations: ", vector[start:start+layer.size])
layer.copyActivations(vector[start:start+layer.size])
|
def copyActivations(self, layer, vec, start = 0)
|
Copies activations in vec to the specified layer, replacing
patterns if necessary.
| 4.733831
| 3.835621
| 1.234176
|
set = {}
if type(self.inputs[pos]) == dict:
set.update(self.inputs[pos])
else:
set["input"] = self.inputs[pos]
if self.targets:
if type(self.targets[pos]) == dict:
set.update(self.targets[pos])
else:
set["output"] = self.targets[pos]
return set
|
def getDataCrossValidation(self, pos)
|
Returns the inputs/targets for a pattern pos, or assumes that
the layers are called input and output and uses the lists
in self.inputs and self.targets.
| 2.366688
| 2.029571
| 1.166103
|
if intype == "input":
vector = self.inputs
elif intype == "target":
vector = self.targets
else:
raise AttributeError("invalid map type '%s'" % intype)
return vector[pos][offset:offset+self[name].size]
|
def getDataMap(self, intype, pos, name, offset = 0)
|
Hook defined to lookup a name, and get it from a vector.
Can be overloaded to get it from somewhere else.
| 4.042145
| 3.92911
| 1.028769
|
retval = {}
if pos >= len(self.inputs):
raise IndexError('getData() pattern beyond range.', pos)
if self.verbosity >= 1: print("Getting input", pos, "...")
if len(self.inputMap) == 0:
if type(self.inputs[pos]) == dict: # allow inputs to be a dict
retval.update(self.inputs[pos])
else:
retval[self.layers[0].name] = self.inputs[pos]
else: # mapInput set manually
for vals in self.inputMap:
(name, offset) = vals
retval[name] = self.getDataMap("input", pos, name, offset)
if self.verbosity > 1: print("Loading target", pos, "...")
if len(self.targets) == 0:
pass # ok, no targets
elif len(self.targetMap) == 0:
if type(self.targets[pos]) == dict: # allow targets to be a dict
retval.update(self.targets[pos])
else:
retval[self.layers[len(self.layers)-1].name] = self.targets[pos]
else: # set manually
for vals in self.targetMap:
(name, offset) = vals
retval[name] = self.getDataMap("target", pos, name, offset)
return retval
|
def getData(self, pos)
|
Returns dictionary with input and target given pos.
| 3.12065
| 2.975226
| 1.048878
|
if len(self.cacheLayers) != 0 or len(self.cacheConnections) != 0: return
# flags for layer type tests
hiddenInput = 1
hiddenOutput = 1
outputLayerFlag = 1
inputLayerFlag = 1
# must have layers and connections
if len(self.layers) == 0:
raise NetworkError('No network layers.', \
self.layers)
if len(self.connections) == 0:
raise NetworkError('No network connections.', \
self.connections)
# layers should not have the same name
for x, y in [(x, y) for x in range(len(self.layers)) for y in range(len(self.layers))]:
if x == y:
pass # same layer so same name
else:
# different layers same name
if self.layers[x].name == self.layers[y].name:
raise NetworkError('Two layers have the same name.', (x,y))
# no multiple connections between layers
for x, y in [(x,y) for x in range(len(self.connections)) for y in range(len(self.connections))]:
if x == y:
pass # same connection
else:
# multiple connections between fromLayer and toLayer
if self.connections[x].fromLayer.name == self.connections[y].fromLayer.name and \
self.connections[x].toLayer.name == self.connections[y].toLayer.name:
raise NetworkError('Multiple connections between two layers.', \
(self.connections[x].fromLayer.name, \
self.connections[x].toLayer.name))
# layer type tests
for layer in self.layers:
# no undefined layers
if layer.type == 'Undefined':
raise NetworkError('There is an unconnected layer.', layer.name)
elif layer.type == 'Input':
for connection in self.connections:
# input layers must have outgoing connections
if connection.fromLayer.name == layer.name:
inputLayerFlag = 0
# input layers must have no incoming connections
if connection.toLayer.name == layer.name:
raise NetworkError('Layer has type \'Input\' and an incoming connection.', layer.name)
if inputLayerFlag:
raise NetworkError('Layer has type \'Input\' but no outgoing connections', layer.name)
elif layer.type == 'Output':
for connection in self.connections:
# output layers must have no outgoing connections`
if connection.fromLayer.name == layer.name:
raise NetworkError('Layer has type \'Output\' and an outgoing connections.',layer.name)
# output layers must have an incoming connection
if connection.toLayer.name == layer.name:
outputLayerFlag = 0
if outputLayerFlag:
raise NetworkError('Layer has type \'Output\' and no incoming connections.', layer.name)
elif layer.type == 'Hidden':
for connection in self.connections:
# hidden layers must have incoming and outgoing connections.
if connection.toLayer.name == layer.name:
hiddenInput = 0
if connection.fromLayer.name == layer.name:
hiddenOutput = 0
if hiddenInput or hiddenOutput:
raise NetworkError('Layer has type \'Hidden\' but does not have both input and output connections.',\
layer.name)
else:
raise LayerError('Unknown layer encountered in verifyArchitecture().', layer.name)
# network should not have unconnected sub networks
# every input layer should have a path to every output layer
for inLayer in self.layers:
if inLayer.type == 'Input':
for outLayer in self.layers:
if outLayer.type == 'Output':
if not self.path(inLayer, outLayer):
raise NetworkError('Network contains disconnected sub networks.', \
(inLayer.name, outLayer.name))
# network should not have directed cycles
for layer in self.layers:
if self.path(layer, layer):
raise NetworkError('Network contains a cycle.', layer.name)
|
def verifyArchitecture(self)
|
Check for orphaned layers or connections. Assure that network
architecture is feed-forward (no-cycles). Check connectivity. Check naming.
| 2.270205
| 2.23267
| 1.016812
|
for layer in self.layers:
if (layer.verify and
layer.type == 'Input' and
layer.kind != 'Context' and
layer.active and
not layer.activationSet):
raise LayerError("Inputs are not set and verifyInputs() was called on layer '%s'." % layer.name)
else:
layer.resetActivationFlag()
|
def verifyInputs(self)
|
Used in propagate() to verify that the network input
activations have been set.
| 7.749248
| 5.918747
| 1.309272
|
for layer in self.layers:
if layer.verify and layer.type == 'Output' and layer.active and not layer.targetSet:
raise LayerError('Targets are not set and verifyTargets() was called.',\
(layer.name, layer.type))
else:
layer.resetTargetFlag()
|
def verifyTargets(self)
|
Used in backprop() to verify that the network targets have
been set.
| 9.301382
| 7.018334
| 1.325298
|
tss = 0.0
size = 0
for layer in self.layers:
if layer.type == 'Output':
tss += layer.TSSError()
size += layer.size
return math.sqrt( tss / size )
|
def RMSError(self)
|
Returns Root Mean Squared Error for all output layers in this network.
| 5.626673
| 3.625951
| 1.551779
|
if self.verbosity > 0:
print("Network.step() called with:", args)
# First, copy the values into either activations or targets:
retargs = self.preStep(**args)
if retargs: args = retargs # replace the args
# Propagate activation through network:
self.propagate(**args)
retargs = self.postPropagate(**args)
if retargs: args = retargs # replace the args
# Next, take care of any Auto-association, and copy
# activations to targets
for aa in self.association:
(inName, outName) = aa
inLayer = self.getLayer(inName)
if inLayer.type not in ['Input', "Hidden"]:
raise LayerError('Associated input layer not type \'Input\' or \'Hidden\'.', \
inLayer.type)
outLayer = self.getLayer(outName)
if not outLayer.type == 'Output':
raise LayerError('Associated output layer not type \'Output\'.', \
outLayer.type)
outLayer.copyTargets(inLayer.activation)
# Compute error, and back prop it:
retargs = self.preBackprop(**args)
if retargs: args = retargs # replace the args
(error, correct, total, pcorrect) = self.backprop(**args) # compute_error()
if self.verbosity > 2 or self.interactive:
self.display()
if self.interactive:
self.prompt()
retargs = self.postBackprop(**args)
if retargs: args = retargs # replace the args
# if learning is true, and need to update weights here:
if self.learning and not self.batch:
self.change_weights() # else change weights in sweep
retargs = self.postStep(**args)
if retargs: args = retargs # replace the args
self.reportPattern()
return (error, correct, total, pcorrect)
|
def step(self, **args)
|
Network.step()
Does a single step. Calls propagate(), backprop(), and
change_weights() if learning is set.
Format for parameters: <layer name> = <activation/target list>
| 4.354535
| 3.946701
| 1.103335
|
self.preSweep()
if self.loadOrder == []:
raise NetworkError('No loadOrder for the inputs. Make sure inputs are properly set.', self.loadOrder)
if len(self.targets) != 0 and len(self.targets) != len(self.inputs):
raise NetworkError("Number of inputs does not equal number of targets (inputs=%d, targets=%d)" % (len(self.targets), len(self.inputs)))
if self.verbosity >= 1: print("Epoch #", self.epoch, "Cycle...")
if not self.orderedInputs:
self.randomizeOrder()
tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {}
cnt = 0
if self.saveResults:
self.results = [(0,0,0) for x in self.loadOrder]
for i in self.loadOrder:
if self.verbosity >= 1 or self.interactive:
print("-----------------------------------Pattern #", self.loadOrder[i] + 1)
datum = self.getData(i) # creates a dictionary of input/targets from self.inputs, self.targets
if cnt < len(self.loadOrder) - 1:
self.currentSweepCount = cnt
else:
self.currentSweepCount = None
self._sweeping = 1
(error, correct, total, pcorrect) = self.step( **datum )
self._sweeping = 0
if self.saveResults:
self.results[i] = (error, correct, total, pcorrect)
tssError += error
totalCorrect += correct
totalCount += total
sumMerge(totalPCorrect, pcorrect)
if self.sweepReportRate and (cnt + 1) % self.sweepReportRate == 0:
print(" Step # %6d | TSS Error: %.4f | Correct: %.4f" % \
(cnt + 1, tssError, totalCorrect * 1.0 / totalCount))
if self.crossValidationSampleRate and self.epoch % self.crossValidationSampleRate == 0:
self.saveNetworkForCrossValidation(self.crossValidationSampleFile)
cnt += 1
if self.learning and self.batch:
self.change_weights() # batch mode, otherwise change weights in step
self.postSweep()
return (tssError, totalCorrect, totalCount, totalPCorrect)
|
def sweep(self)
|
Runs through entire dataset.
Returns TSS error, total correct, total count, pcorrect (a dict of layer data)
| 4.731277
| 4.202988
| 1.125694
|
# get learning value and then turn it off
oldLearning = self.learning
self.learning = 0
tssError = 0.0; totalCorrect = 0; totalCount = 0; totalPCorrect = {}
self._cv = True # in cross validation
if self.autoCrossValidation:
for i in range(len(self.inputs)):
set = self.getDataCrossValidation(i)
self._sweeping = 1
(error, correct, total, pcorrect) = self.step( **set )
self._sweeping = 0
if self.crossValidationReportLayers != []:
(error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers )
tssError += error
totalCorrect += correct
totalCount += total
sumMerge(totalPCorrect, pcorrect)
else:
for set in self.crossValidationCorpus:
self._sweeping = 1
(error, correct, total, pcorrect) = self.step( **set )
self._sweeping = 0
if self.crossValidationReportLayers != []:
(error, correct, total, pcorrect) = self.getError( *self.crossValidationReportLayers )
tssError += error
totalCorrect += correct
totalCount += total
sumMerge(totalPCorrect, pcorrect)
self.learning = oldLearning
self._cv = False
return (tssError, totalCorrect, totalCount, totalPCorrect)
|
def sweepCrossValidation(self)
|
sweepCrossValidation() will go through each of the crossvalidation input/targets.
The crossValidationCorpus is a list of dictionaries of input/targets
referenced by layername.
Example: ({"input": [0.0, 0.1], "output": [1.0]}, {"input": [0.5, 0.9], "output": [0.0]})
| 3.463199
| 3.378297
| 1.025132
|
count = 0
if self[layerName].active:
count += 1 # 1 = bias
for connection in self.connections:
if connection.active and connection.fromLayer.active and connection.toLayer.name == layerName:
count += connection.fromLayer.size
return count
|
def numConnects(self, layerName)
|
Number of incoming weights, including bias. Assumes fully connected.
| 5.326702
| 4.749633
| 1.121497
|
if self.verbosity > 2: print("Partially propagating network:")
# find all the layers involved in the propagation
propagateLayers = []
# propagateLayers should not include startLayers (no loops)
for startLayer in startLayers:
for layer in self.layers:
if self.path(startLayer, layer):
propagateLayers.append(layer)
for layer in propagateLayers:
if layer.active:
layer.netinput = (layer.weight).copy()
for layer in propagateLayers:
if layer.active:
for connection in self.connections:
if connection.active and connection.toLayer.name == layer.name:
connection.toLayer.netinput = connection.toLayer.netinput + \
Numeric.matrixmultiply(connection.fromLayer.activation,\
connection.weight) # propagate!
if layer.type != 'Input':
layer.activation = self.activationFunction(layer.netinput)
for layer in propagateLayers:
if layer.log and layer.active:
layer.writeLog(self)
|
def prop_from(self, startLayers)
|
Start propagation from the layers in the list
startLayers. Make sure startLayers are initialized with the
desired activations. NO ERROR CHECKING.
| 4.876056
| 4.746204
| 1.027359
|
self.prePropagate(**args)
for key in args:
layer = self.getLayer(key)
if layer.kind == 'Input':
if self[key].verify and not self[key].activationSet == 0:
raise AttributeError("attempt to set activations on input layer '%s' without reset" % key)
self.copyActivations(layer, args[key])
elif layer.kind == 'Context':
self.copyActivations(layer, args[key])
elif layer.kind == 'Output' and len(args[key]) == layer.size: # in case you expect propagate to handle the outputs
self.copyTargets(layer, args[key])
self.verifyInputs() # better have inputs set
if self.verbosity > 2: print("Propagate Network '" + self.name + "':")
# initialize netinput:
for layer in self.layers:
if layer.type != 'Input' and layer.active:
layer.netinput = (layer.weight).copy()
# for each connection, in order:
for layer in self.layers:
if layer.active:
for connection in self.connections:
if (connection.toLayer.name == layer.name
and connection.fromLayer.active
and connection.active):
a = connection.fromLayer.activation
w = connection.weight
m = Numeric.matrixmultiply(a, w)
ni = m + connection.toLayer.netinput
connection.toLayer.netinput = ni
#connection.toLayer.netinput = \
# (connection.toLayer.netinput +
# Numeric.matrixmultiply(connection.fromLayer.activation,
# connection.weight)) # propagate!
if layer.type != 'Input':
layer.activation = self.activationFunction(layer.netinput)
for layer in self.layers:
if layer.log and layer.active:
layer.writeLog(self)
self.count += 1 # counts number of times propagate() is called
if len(args) != 0:
dict = {}
for layer in self.layers:
if layer.type == "Output":
dict[layer.name] = layer.activation.copy()
if len(dict) == 1:
return dict[list(dict.keys())[0]]
else:
return dict
|
def propagate(self, **args)
|
Propagates activation through the network. Optionally, takes input layer names
as keywords, and their associated activations. If input layer(s) are given, then
propagate() will return the output layer's activation. If there is more than
one output layer, then a dictionary is returned.
Examples:
>>> net = Network() # doctest: +ELLIPSIS
Conx using seed: ...
>>> net.addLayers(2, 5, 1)
>>> len(net.propagate(input = [0, .5]))
1
| 4.295707
| 4.230118
| 1.015505
|
for layerName in args:
self[layerName].activationSet = 0 # force it to be ok
self[layerName].copyActivations(args[layerName])
# init toLayer:
self[toLayer].netinput = (self[toLayer].weight).copy()
# for each connection, in order:
for connection in self.connections:
if connection.active and connection.toLayer.name == toLayer and connection.fromLayer.active:
connection.toLayer.netinput = connection.toLayer.netinput + \
Numeric.matrixmultiply(connection.fromLayer.activation,\
connection.weight) # propagate!
if self[toLayer].type != 'Input':
self[toLayer].activation = self.activationFunction(self[toLayer].netinput)
return self[toLayer].activation.copy()
|
def propagateTo(self, toLayer, **args)
|
Propagates activation to a layer. Optionally, takes input layer names
as keywords, and their associated activations. Returns the toLayer's activation.
Examples:
>>> net = Network() # doctest: +ELLIPSIS
Conx using seed: ...
>>> net.addLayers(2, 5, 1)
>>> len(net.propagateTo("output"))
1
>>> len(net.propagateTo("hidden"))
5
>>> len(net.propagateTo("hidden", input = [0, 0]))
5
| 5.793977
| 5.87167
| 0.986768
|
for layerName in args:
self[layerName].copyActivations(args[layerName])
# initialize netinput:
started = 0
for layer in self.layers:
if layer.name == startLayer:
started = 1
continue # don't set this one
if not started: continue
if layer.type != 'Input' and layer.active:
layer.netinput = (layer.weight).copy()
# for each connection, in order:
started = 0
for layer in self.layers:
if layer.name == startLayer:
started = 1
continue # don't get inputs into this one
if not started: continue
if layer.active:
for connection in self.connections:
if connection.active and connection.toLayer.name == layer.name and connection.fromLayer.active:
connection.toLayer.netinput = connection.toLayer.netinput + \
Numeric.matrixmultiply(connection.fromLayer.activation,\
connection.weight) # propagate!
if layer.type != 'Input':
layer.activation = self.activationFunction(layer.netinput)
for layer in self.layers:
if layer.log and layer.active:
layer.writeLog(self)
self.count += 1 # counts number of times propagate() is called
if len(args) != 0:
dict = {}
for layer in self.layers:
if layer.type == "Output":
dict[layer.name] = layer.activation.copy()
if len(dict) == 1:
return dict[list(dict.keys())[0]]
else:
return dict
|
def propagateFrom(self, startLayer, **args)
|
Propagates activation through the network. Optionally, takes input layer names
as keywords, and their associated activations. If input layer(s) are given, then
propagate() will return the output layer's activation. If there is more than
one output layer, then a dictionary is returned.
Examples:
>>> net = Network() # doctest: +ELLIPSIS
Conx using seed: ...
>>> net.addLayers(2, 5, 1)
>>> len(net.propagate(input = [1, .5]))
1
| 3.549366
| 3.485392
| 1.018355
|
def act(v):
if v < -15.0: return 0.0
elif v > 15.0: return 1.0
else: return 1.0 / (1.0 + Numeric.exp(-v))
return Numeric.array(list(map(act, x)), 'f')
|
def activationFunctionASIG(self, x)
|
Determine the activation of a node based on that nodes net input.
| 3.343799
| 3.275915
| 1.020722
|
def act(v):
if v < -15.0: return 0.0
elif v > 15.0: return 1.0
else: return 1.0 / (1.0 + Numeric.exp(-v))
return (act(x) * (1.0 - act(x))) + self.sigmoid_prime_offset
|
def actDerivASIG(self, x)
|
Only works on scalars.
| 4.386049
| 4.255604
| 1.030653
|
self.activationFunction = self.activationFunctionTANH
self.ACTPRIME = self.ACTPRIMETANH
self.actDeriv = self.actDerivTANH
for layer in self:
layer.minTarget, layer.minActivation = -1.7159, -1.7159
layer.maxTarget, layer.maxActivation = 1.7159, 1.7159
|
def useTanhActivationFunction(self)
|
Change the network to use the hyperbolic tangent activation function for all layers.
Must be called after all layers have been added.
| 4.324133
| 4.088301
| 1.057685
|
self.activationFunction = self.activationFunctionFahlman
self.ACTPRIME = self.ACTPRIME_Fahlman
self.actDeriv = self.actDerivFahlman
for layer in self:
layer.minTarget, layer.minActivation = -0.5, -0.5
layer.maxTarget, layer.maxActivation = 0.5, 0.5
|
def useFahlmanActivationFunction(self)
|
Change the network to use Fahlman's default activation function for all layers.
Must be called after all layers have been added.
| 4.726653
| 4.406635
| 1.072622
|
retval = self.compute_error(**args)
if self.learning:
self.compute_wed()
return retval
|
def backprop(self, **args)
|
Computes error and wed for back propagation of error.
| 14.008018
| 5.650739
| 2.478971
|
#print "WEIGHT = ", w
shrinkFactor = self.mu / (1.0 + self.mu)
if self.splitEpsilon:
e /= float(n)
if self._quickprop:
nextStep = Numeric.zeros(len(dweightLast), 'f')
for i in range(len(dweightLast)):
s = wed[i]
d = dweightLast[i]
p = wedLast[i]
#print self.mu
#print "slopes[node] = %f QP w=%s d=%f s=%f p=%f eps=%f" % (s, w, d, s, p, e)
#print type(nextStep[i]), nextStep[i]
if (d > 0.0):
if (s > 0.0):
#print "CASE A1"
nextStep[i] = nextStep[i] + (e * s)
if (s >= (shrinkFactor * p)):
#print "CASE B1"
nextStep[i] = nextStep[i] + (self.mu * d)
else:
#print "CASE C1"
nextStep[i] = nextStep[i] + (d * s / (p - s))
elif (d < 0.0):
if (s < 0.0):
#print "CASE A2"
nextStep[i] = nextStep[i] + (e * s)
if (s <= (shrinkFactor * p)):
#print "CASE B2"
nextStep[i] = nextStep[i] + (self.mu * d)
else:
#print "CASE C2"
nextStep[i] = nextStep[i] + (d * s / (p - s))
else:
#print "CASE D"
nextStep[i] = nextStep[i] + (e * s + m * d)
## Last step was zero, so only use linear ##
newDweight = nextStep
#print "Next Step = ", nextStep[i]
else: # backprop
newDweight = e * wed + m * dweightLast # gradient descent
return newDweight
|
def deltaWeight(self, e, wed, m, dweightLast, wedLast, w, n)
|
e - learning rate
wed - weight error delta vector (slope)
m - momentum
dweightLast - previous dweight vector (slope)
wedLast - only used in quickprop; last weight error delta vector
w - weight vector
n - fan-in, number of connections coming in (counts bias, too)
| 3.102102
| 3.043586
| 1.019226
|
dw_count, dw_sum = 0, 0.0
if len(self.cacheLayers) != 0:
changeLayers = self.cacheLayers
else:
changeLayers = self.layers
for layer in changeLayers:
if layer.active and layer.type != 'Input':
if not layer.frozen:
if self._quickprop or self.splitEpsilon:
layer.dweight = self.deltaWeight(self.epsilon,
layer.wed,
self.momentum,
layer.dweight,
layer.wedLast,
layer.weight,
self.numConnects(layer.name))
else:
layer.dweight = self.epsilon * layer.wed + self.momentum * layer.dweight
layer.weight += layer.dweight
#print "layer.wed = ",layer.wed
#print "layer.weight = ",layer.weight," layer.dweight = ",layer.dweight
layer.wedLast = Numeric.array(layer.wed) # make copy
if self._quickprop:
layer.wed = layer.weight * self.decay # reset to last weight, with decay
else:
layer.wed = layer.wed * 0.0 # keep same numeric type, just zero it
dw_count += len(layer.dweight)
dw_sum += Numeric.add.reduce(abs(layer.dweight))
if len(self.cacheConnections) != 0:
changeConnections = self.cacheConnections
else:
changeConnections = self.connections
for connection in changeConnections:
if (connection.active
and connection.fromLayer.active
and connection.toLayer.active
and not connection.frozen):
toLayer = connection.toLayer
if self._quickprop or self.splitEpsilon:
# doing it one vector at a time, to match layer bias training (a quickprop abstraction)
for i in range(len(connection.dweight)):
Numeric.put(connection.dweight[i],
Numeric.arange(len(connection.dweight[i])),
self.deltaWeight(self.epsilon,
connection.wed[i],
self.momentum,
connection.dweight[i],
connection.wedLast[i],
connection.weight[i],
self.numConnects(connection.toLayer.name)))
else:
connection.dweight = self.epsilon * connection.wed + self.momentum * connection.dweight
connection.weight += connection.dweight
#print "connection.wed = ",connection.wed
#print "connection.weight = ",connection.weight," connection.dweight = ",connection.dweight
# reset values:
connection.wedLast = Numeric.array(connection.wed) # make copy
if self._quickprop:
connection.wed = connection.weight * self.decay
else:
connection.wed = connection.wed * 0.0 # keeps the same Numeric type, but makes it zero
# get some stats
dw_count += Numeric.multiply.reduce(connection.dweight.shape)
dw_sum += Numeric.add.reduce(Numeric.add.reduce(abs(connection.dweight)))
if self.verbosity >= 1:
print("WEIGHTS CHANGED")
if self.verbosity > 2:
self.display()
return (dw_count, dw_sum)
|
def change_weights(self)
|
Changes the weights according to the error values calculated
during backprop(). Learning must be set.
| 3.140409
| 3.113398
| 1.008676
|
def difference(v):
if not self.hyperbolicError:
#if -0.1 < v < 0.1: return 0.0
#else:
return v
else:
if v < -0.9999999: return -17.0
elif v > 0.9999999: return 17.0
else: return math.log( (1.0 + v) / (1.0 - v) )
#else: return Numeric.arctanh(v) # half that above
return list(map(difference, t - a))
|
def errorFunction(self, t, a)
|
Using a hyperbolic arctan on the error slightly exaggerates
the actual error non-linearly. Return t - a to just use the difference.
t - target vector
a - activation vector
| 4.570711
| 4.317435
| 1.058664
|
retval = 0.0; correct = 0; totalCount = 0
for layer in self.layers:
if layer.active:
if layer.type == 'Output':
layer.error = self.errorFunction(layer.target, layer.activation)
totalCount += layer.size
retval += Numeric.add.reduce((layer.target - layer.activation) ** 2)
correct += Numeric.add.reduce(Numeric.fabs(layer.target - layer.activation) < self.tolerance)
elif (layer.type == 'Hidden'):
for i in range(layer.size): # do it this way so you don't break reference links
layer.error[i] = 0.0
return (retval, correct, totalCount)
|
def ce_init(self)
|
Initializes error computation. Calculates error for output
layers and initializes hidden layer error to zero.
| 4.667273
| 3.984853
| 1.171254
|
for key in args:
layer = self.getLayer(key)
if layer.kind == 'Output':
self.copyTargets(layer, args[key])
self.verifyTargets() # better have targets set
error, correct, total = self.ce_init()
pcorrect = {}
# go backwards through each proj but don't redo output errors!
if len(self.cacheConnections) != 0:
changeConnections = self.cacheConnections
else:
changeConnections = self.connections
for connect in reverse(changeConnections):
if connect.active and connect.toLayer.active and connect.fromLayer.active:
connect.toLayer.delta = (connect.toLayer.error *
(self.ACTPRIME(connect.toLayer.activation)))
connect.fromLayer.error = connect.fromLayer.error + \
Numeric.matrixmultiply(connect.weight, connect.toLayer.delta)
# now all errors are set on all layers!
pcorrect = self.getLayerErrors()
return (error, correct, total, pcorrect)
|
def compute_error(self, **args)
|
Computes error for all non-output layers backwards through all
projections.
| 7.813644
| 7.455156
| 1.048086
|
if len(self.cacheConnections) != 0:
changeConnections = self.cacheConnections
else:
changeConnections = self.connections
for connect in reverse(changeConnections):
if connect.active and connect.fromLayer.active and connect.toLayer.active:
connect.wed = connect.wed + Numeric.outerproduct(connect.fromLayer.activation,
connect.toLayer.delta)
if len(self.cacheLayers) != 0:
changeLayers = self.cacheLayers
else:
changeLayers = self.layers
for layer in changeLayers:
if layer.active:
layer.wed = layer.wed + layer.delta
|
def compute_wed(self)
|
Computes weight error derivative for all connections in
self.connections starting with the last connection.
| 4.173933
| 3.387254
| 1.232247
|
output = ""
for layer in reverse(self.layers):
output += layer.toString()
return output
|
def toString(self)
|
Returns the network layers as a string.
| 6.46934
| 4.140639
| 1.562401
|
print("Display network '" + self.name + "':")
size = list(range(len(self.layers)))
size.reverse()
for i in size:
if self.layers[i].active:
self.layers[i].display()
if self.patterned and self.layers[i].type != 'Hidden':
targetWord, diff = self.getWord( self.layers[i].target, returnDiff = 1)
if self.layers[i].kind == 'Output':
if targetWord == None:
print("Target Pattern = %s" % "No match")
else:
if diff == 0.0:
print("Target Pattern = '%s'; (exact)" % targetWord)
else:
print("Target Pattern = '%s'; Match difference = %f)" % (targetWord, diff))
actWord, diff = self.getWord( self.layers[i].activation, returnDiff = 1 )
if (self.layers[i].kind == 'Input' or self.layers[i].kind == 'Output'):
if actWord == None:
print("Matching Pattern = %s" % "No match")
else:
if diff == 0.0:
print("Matching Pattern = '%s'; (exact)" % actWord)
else:
print("Matching Pattern = '%s'; Match difference = %f" % (actWord, diff))
if self.verbosity >= 1:
weights = list(range(len(self.connections)))
weights.reverse()
for j in weights:
if self.connections[j].toLayer.name == self.layers[i].name:
self.connections[j].display()
|
def display(self)
|
Displays the network to the screen.
| 2.924464
| 2.851573
| 1.025561
|
gene = []
for layer in self.layers:
if layer.type != 'Input':
for i in range(layer.size):
gene.append( layer.weight[i] )
for connection in self.connections:
for i in range(connection.fromLayer.size):
for j in range(connection.toLayer.size):
gene.append( connection.weight[i][j] )
return gene
|
def arrayify(self)
|
Returns an array of node bias values and connection weights
for use in a GA.
| 2.786719
| 2.374938
| 1.173386
|
g = 0
# if gene is too small an IndexError will be thrown
for layer in self.layers:
if layer.type != 'Input':
for i in range(layer.size):
layer.weight[i] = float( gene[g])
g += 1
for connection in self.connections:
for i in range(connection.fromLayer.size):
for j in range(connection.toLayer.size):
connection.weight[i][j] = gene[g]
g += 1
# if gene is too long we may have a problem
if len(gene) > g:
raise IndexError('Argument to unArrayify is too long.', len(gene))
|
def unArrayify(self, gene)
|
Copies gene bias values and weights to network bias values and
weights.
| 3.827153
| 3.657962
| 1.046253
|
self.saveWeights(filename, mode, counter)
|
def saveWeightsToFile(self, filename, mode='pickle', counter=None)
|
Deprecated. Use saveWeights instead.
| 5.608176
| 3.431507
| 1.634319
|
# modes: pickle/conx, plain, tlearn
if "?" in filename: # replace ? pattern in filename with epoch number
import re
char = "?"
match = re.search(re.escape(char) + "+", filename)
if match:
num = self.epoch
if counter != None:
num = counter
elif self.totalEpoch != 0: # use a total epoch, if one:
num = self.totalEpoch
fstring = "%%0%dd" % len(match.group())
filename = filename[:match.start()] + \
fstring % self.epoch + \
filename[match.end():]
self.lastAutoSaveWeightsFilename = filename
if mode == 'pickle':
mylist = self.arrayify()
import pickle
fp = open(filename, "w")
pickle.dump(mylist, fp)
fp.close()
elif mode in ['plain', 'conx']:
fp = open(filename, "w")
fp.write("# Biases\n")
for layer in self.layers:
if layer.type != 'Input':
fp.write("# Layer: " + layer.name + "\n")
for i in range(layer.size):
fp.write("%f " % layer.weight[i] )
fp.write("\n")
fp.write("# Weights\n")
for connection in self.connections:
fp.write("# from " + connection.fromLayer.name + " to " +
connection.toLayer.name + "\n")
for i in range(connection.fromLayer.size):
for j in range(connection.toLayer.size):
fp.write("%f " % connection.weight[i][j] )
fp.write("\n")
fp.close()
elif mode == 'tlearn':
fp = open(filename, "w")
fp.write("NETWORK CONFIGURED BY TLEARN\n")
fp.write("# weights after %d sweeps\n" % self.epoch)
fp.write("# WEIGHTS\n")
cnt = 1
for lto in self.layers:
if lto.type != 'Input':
for j in range(lto.size):
fp.write("# TO NODE %d\n" % cnt)
fp.write("%f\n" % lto.weight[j] )
for lfrom in self.layers:
try:
connection = self.getConnection(lfrom.name,lto.name)
for i in range(connection.fromLayer.size):
fp.write("%f\n" % connection.weight[i][j])
except NetworkError: # should return an exception here
for i in range(lfrom.size):
fp.write("%f\n" % 0.0)
cnt += 1
fp.close()
else:
raise ValueError('Unknown mode in saveWeights().', mode)
|
def saveWeights(self, filename, mode='pickle', counter=None)
|
Saves weights to file in pickle, plain, or tlearn mode.
| 2.955677
| 2.84632
| 1.03842
|
self.saveNetworkToFile(filename, makeWrapper, mode, counter)
|
def saveNetwork(self, filename, makeWrapper = 1, mode = "pickle", counter = None)
|
Saves network to file using pickle.
| 5.121316
| 3.813132
| 1.343073
|
if "?" in filename: # replace ? pattern in filename with epoch number
import re
char = "?"
match = re.search(re.escape(char) + "+", filename)
if match:
num = self.epoch
if counter != None:
num = counter
elif self.totalEpoch != 0: # use a total epoch, if one:
num = self.totalEpoch
fstring = "%%0%dd" % len(match.group())
filename = filename[:match.start()] + \
fstring % num + \
filename[match.end():]
self.lastAutoSaveNetworkFilename = filename
if mode == "pickle":
# dump network via pickle:
import pickle
basename = filename.split('.')[0]
filename += ".pickle"
fp = open(filename, 'w')
pickle.dump(self, fp)
fp.close()
# make wrapper python file:
if makeWrapper:
fp = open(basename + ".py", "w")
fp.write("from pyrobot.brain.conx import *\n")
fp.write("import pickle\n")
fp.write("fp = open('%s', 'r')\n" % filename)
fp.write("network = pickle.load(fp)")
fp.close()
# give some help:
print("To load network:")
print(" %% python -i %s " % (basename + ".py"))
print(" >>> network.train() # for example")
print("--- OR ---")
print(" % python")
print(" >>> from pyrobot.brain.conx import *")
print(" >>> network = loadNetwork(%s)" % filename)
print(" >>> network.train() # for example")
elif mode in ["plain", "conx"]:
fp = open(filename, "w")
fp.write("network, %s\n" % (self.__class__.__name__))
for layer in self.layers:
fp.write("layer, %s, %s\n" % (layer.name, layer.size))
# biases:
for i in range(layer.size):
fp.write("%f " % layer.weight[i])
fp.write("\n")
for connection in self.connections:
fp.write("connection, %s, %s\n" %(connection.fromLayer.name, connection.toLayer.name))
# weights:
for i in range(connection.fromLayer.size):
for j in range(connection.toLayer.size):
fp.write("%f " % connection.weight[i][j])
fp.write("\n")
fp.close()
|
def saveNetworkToFile(self, filename, makeWrapper = 1, mode = "pickle", counter = None)
|
Deprecated.
| 2.975024
| 2.958533
| 1.005574
|
return self.loadVectorsFromFile(filename, cols, everyNrows,
delim, checkEven, patterned)
|
def loadVectors(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1, patterned = 0)
|
Load a set of vectors from a file. Takes a filename, list of cols
you want (or None for all), get every everyNrows (or 1 for no
skipping), and a delimeter.
| 2.796693
| 3.672297
| 0.761565
|
fp = open(filename, "r")
line = fp.readline()
lineno = 0
lastLength = None
data = []
while line:
if lineno % everyNrows == 0:
if patterned:
linedata1 = [x for x in line.strip().split(delim)]
else:
linedata1 = [float(x) for x in line.strip().split(delim)]
else:
lineno += 1
line = fp.readline()
continue
if cols == None: # get em all
newdata = linedata1
else: # just get some cols
newdata = []
for i in cols:
newdata.append( linedata1[i] )
if lastLength == None or (not checkEven) or (checkEven and len(newdata) == lastLength):
data.append( newdata )
else:
raise ValueError("Data Format Error: line = %d, data = %s" % (lineno, newdata))
lastLength = len(newdata)
lineno += 1
line = fp.readline()
fp.close()
return data
|
def loadVectorsFromFile(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1, patterned = 0)
|
Deprecated.
| 2.553171
| 2.569296
| 0.993724
|
self.loadInputPatternsFromFile(filename, cols, everyNrows,
delim, checkEven)
|
def loadInputPatterns(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Loads inputs as patterns from file.
| 3.261273
| 3.028318
| 1.076926
|
self.inputs = self.loadVectors(filename, cols, everyNrows, delim, checkEven, patterned = 1)
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i
|
def loadInputPatternsFromFile(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Deprecated.
| 3.447868
| 3.323411
| 1.037449
|
self.loadInputsFromFile(filename, cols, everyNrows,
delim, checkEven)
|
def loadInputs(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Loads inputs from file. Patterning is lost.
| 3.478043
| 3.179086
| 1.094039
|
fp = open(filename, 'w')
for input in self.inputs:
vec = self.replacePatterns(input)
for item in vec:
fp.write("%f " % item)
fp.write("\n")
|
def saveInputsToFile(self, filename)
|
Deprecated.
| 4.011614
| 4.063489
| 0.987234
|
self.loadTargetsFromFile(filename, cols, everyNrows,
delim, checkEven)
|
def loadTargets(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Loads targets from file.
| 3.432257
| 3.056506
| 1.122935
|
self.targets = self.loadVectors(filename, cols, everyNrows,
delim, checkEven)
|
def loadTargetsFromFile(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Loads targets from file.
| 4.242649
| 3.891349
| 1.090277
|
self.loadTargetPatternssFromFile(filename, cols, everyNrows,
delim, checkEven)
|
def loadTargetPatterns(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Loads targets as patterns from file.
| 5.495135
| 5.083567
| 1.080961
|
self.targets = self.loadVectors(filename, cols, everyNrows,
delim, checkEven, patterned=1)
|
def loadTargetPatternsFromFile(self, filename, cols = None, everyNrows = 1,
delim = ' ', checkEven = 1)
|
Deprecated.
| 7.078886
| 6.394175
| 1.107084
|
fp = open(filename, 'w')
for target in self.targets:
vec = self.replacePatterns(target)
for item in vec:
fp.write("%f " % item)
fp.write("\n")
|
def saveTargetsToFile(self, filename)
|
Deprecated.
| 4.131644
| 4.17554
| 0.989487
|
fp = open(filename, 'w')
for i in range(len(self.inputs)):
try:
vec = self.replacePatterns(self.inputs[i])
for item in vec:
fp.write("%f " % item)
except:
pass
try:
vec = self.replacePatterns(self.targets[i])
for item in vec:
fp.write("%f " % item)
except:
pass
fp.write("\n")
|
def saveDataToFile(self, filename)
|
Deprecated.
| 2.426016
| 2.424046
| 1.000813
|
if ocnt == -1:
ocnt = int(self.layers[len(self.layers) - 1].size)
fp = open(filename, 'r')
line = fp.readline()
self.targets = []
self.inputs = []
while line:
data = list(map(float, line.split()))
cnt = len(data)
icnt = cnt - ocnt
self.inputs.append(self.patternVector(data[0:icnt]))
self.targets.append(self.patternVector(data[icnt:]))
line = fp.readline()
self.loadOrder = [0] * len(self.inputs)
for i in range(len(self.inputs)):
self.loadOrder[i] = i
|
def loadDataFromFile(self, filename, ocnt = -1)
|
Deprecated.
| 2.522768
| 2.489645
| 1.013304
|
if (name, layer) in self.patterns:
return self.patterns[(name, layer)]
else:
return self.patterns[name]
|
def lookupPattern(self, name, layer)
|
See if there is a name/layer pattern combo, else return the name pattern.
| 3.00954
| 2.240391
| 1.34331
|
if not self.patterned: return vector
if type(vector) == str:
return self.replacePatterns(self.lookupPattern(vector, layer), layer)
elif type(vector) != list:
return vector
# should be a vector if we made it here
vec = []
for v in vector:
if type(v) == str:
retval = self.replacePatterns(self.lookupPattern(v, layer), layer)
if type(retval) == list:
vec.extend( retval )
else:
vec.append( retval )
else:
vec.append( v )
return vec
|
def replacePatterns(self, vector, layer = None)
|
Replaces patterned inputs or targets with activation vectors.
| 2.616416
| 2.523278
| 1.036912
|
if not self.patterned: return vector
if type(vector) == int:
if self.getWord(vector) != '':
return self.getWord(vector)
else:
return vector
elif type(vector) == float:
if self.getWord(vector) != '':
return self.getWord(vector)
else:
return vector
elif type(vector) == str:
return vector
elif type(vector) == list:
if self.getWord(vector) != '':
return self.getWord(vector)
# should be a list
vec = []
for v in vector:
if self.getWord(v) != '':
retval = self.getWord(v)
vec.append( retval )
else:
retval = self.patternVector(v)
vec.append( retval )
return vec
|
def patternVector(self, vector)
|
Replaces vector with patterns. Used for loading inputs or
targets from a file and still preserving patterns.
| 2.333917
| 2.307332
| 1.011522
|
if word in self.patterns:
return self.patterns[word]
else:
raise ValueError('Unknown pattern in getPattern().', word)
|
def getPattern(self, word)
|
Returns the pattern with key word.
Example: net.getPattern("tom") => [0, 0, 0, 1]
| 4.651327
| 5.490072
| 0.847225
|
minDist = 10000
closest = None
for w in self.patterns:
# There may be some patterns that are scalars; we don't search
# those in this function:
if type(self.patterns[w]) in [int, float, int]: continue
if len(self.patterns[w]) == len(pattern):
dist = reduce(operator.add, [(a - b) ** 2 for (a,b) in zip(self.patterns[w], pattern )])
if dist == 0.0:
if returnDiff:
return w, dist
else:
return w
if dist < minDist:
minDist = dist
closest = w
if returnDiff:
return closest, minDist
else:
return closest
|
def getWord(self, pattern, returnDiff = 0)
|
Returns the word associated with pattern.
Example: net.getWord([0, 0, 0, 1]) => "tom"
This method now returns the closest pattern based on distance.
| 3.18249
| 3.034282
| 1.048844
|
if word in self.patterns:
raise NetworkError('Pattern key already in use. Call delPattern to free key.', word)
else:
self.patterns[word] = vector
|
def addPattern(self, word, vector)
|
Adds a pattern with key word.
Example: net.addPattern("tom", [0, 0, 0, 1])
| 8.442193
| 7.705558
| 1.095598
|
try:
if len(v1) != len(v2): return 0
for x, y in zip(v1, v2):
if abs( x - y ) > self.tolerance:
return 0
return 1
except:
# some patterns may not be vectors
try:
if abs( v1 - v2 ) > self.tolerance:
return 0
else:
return 1
except:
return 0
|
def compare(self, v1, v2)
|
Compares two values. Returns 1 if all values are withing
self.tolerance of each other.
| 2.45364
| 2.276907
| 1.07762
|
if listOfLayerNamePairs == None:
listOfLayerNamePairs = []
for c in self.connections:
listOfLayerNamePairs.append( [c.fromLayer.name, c.toLayer.name] )
if self.verbosity > 1:
print("sharing weights:", self.name, listOfLayerNamePairs)
# first, check to see if this will work:
count = 0
for (fromLayerName, toLayerName) in listOfLayerNamePairs:
for c1 in range(len(self.connections)):
if self.connections[c1].fromLayer.name == fromLayerName and \
self.connections[c1].toLayer.name == toLayerName:
for c2 in range(len(network.connections)):
if network.connections[c2].fromLayer.name == fromLayerName and \
network.connections[c2].toLayer.name == toLayerName:
if (self.connections[c1].fromLayer.size != network.connections[c2].fromLayer.size) or \
(self.connections[c1].toLayer.size != network.connections[c2].toLayer.size):
raise AttributeError("shareSomeWeights: layer sizes did not match")
count += 1
if count != len(listOfLayerNamePairs):
raise AttributeError("shareSomeWeights: layer names did not match")
# ok, now let's share!
self.sharedWeights = 1
network.sharedWeights = 1
for (fromLayerName, toLayerName) in listOfLayerNamePairs:
for c1 in range(len(self.connections)):
if self.connections[c1].fromLayer.name == fromLayerName and \
self.connections[c1].toLayer.name == toLayerName:
for c2 in range(len(network.connections)):
if network.connections[c2].fromLayer.name == fromLayerName and \
network.connections[c2].toLayer.name == toLayerName:
self.connections[c1].weight = network.connections[c2].weight
for (fromLayerName, toLayerName) in listOfLayerNamePairs:
for l1 in range(len(self.layers)):
if self.layers[l1].name == toLayerName:
for l2 in range(len(network.layers)):
if network.layers[l2].name == toLayerName:
self.layers[l1].weight = network.layers[l2].weight
|
def shareWeights(self, network, listOfLayerNamePairs = None)
|
Share weights with another network. Connection
is broken after a randomize or change of size. Layers must have the same
names and sizes for shared connections in both networks.
Example: net.shareWeights(otherNet, [["hidden", "output"]])
This example will take the weights between the hidden and output layers
of otherNet and share them with net. Also, the bias values of
otherNet["output"] will be shared with net["output"].
If no list is given, will share all weights.
| 1.669627
| 1.683343
| 0.991852
|
output = Network.propagate(self, *arg, **kw)
if self.interactive:
self.updateGraphics()
# IMPORTANT: convert results from numpy.floats to conventional floats
if type(output) == dict:
for layerName in output:
output[layerName] = [float(x) for x in output[layerName]]
return output
else:
return [float(x) for x in output]
|
def propagate(self, *arg, **kw)
|
Propagates activation through the network.
| 4.484244
| 4.344665
| 1.032127
|
Network.loadWeights(self, filename, mode)
self.updateGraphics()
|
def loadWeightsFromFile(self, filename, mode='pickle')
|
Deprecated. Use loadWeights instead.
| 12.380614
| 9.175974
| 1.349243
|
size = list(range(len(self.layers)))
size.reverse()
for i in size:
layer = self.layers[i]
if layer.active:
print('%s layer (size %d)' % (layer.name, layer.size))
tlabel, olabel = '', ''
if (layer.type == 'Output'):
if self.countWrong:
tlabel = ' (%s)' % self.classify(layer.target.tolist())
olabel = ' (%s)' % self.classify(layer.activation.tolist())
if olabel == tlabel:
self.numRight += 1
else:
olabel += ' *** WRONG ***'
self.numWrong += 1
if self.actDisplay is not None:
self.actDisplay.showWrong()
print('Target : %s%s' % (pretty(layer.target, max=15), tlabel))
print('Activation: %s%s' % (pretty(layer.activation, max=15), olabel))
if self.patterned and layer.type != 'Hidden':
targetWord, diff = self.getWord( layer.target, returnDiff = 1)
if layer.kind == 'Output':
if targetWord == None:
print("Target Pattern = %s" % "No match")
else:
if diff == 0.0:
print("Target Pattern = '%s'" % targetWord)
else:
print("Target Pattern = '%s'; difference = %f)" % (targetWord, diff))
actWord, diff = self.getWord( layer.activation, returnDiff = 1 )
if (layer.kind == 'Input' or layer.kind == 'Output'):
if actWord == None:
print("Matching Pattern = %s" % "No match")
else:
if diff == 0.0:
print("Matching Pattern = '%s'" % actWord)
else:
print("Matching Pattern = '%s'; difference = %f" % (actWord, diff))
print("------------------------------------")
|
def display(self)
|
Displays the network to the screen.
| 3.291332
| 3.281693
| 1.002937
|
if value == "ordered-continuous":
self.orderedInputs = 1
self.initContext = 0
elif value == "random-segmented":
self.orderedInputs = 0
self.initContext = 1
elif value == "random-continuous":
self.orderedInputs = 0
self.initContext = 0
elif value == "ordered-segmented":
self.orderedInputs = 1
self.initContext = 1
else:
raise AttributeError("invalid sequence type: '%s'" % value)
self.sequenceType = value
|
def setSequenceType(self, value)
|
You must set this!
| 3.106692
| 3.008504
| 1.032637
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.