Hello everyone,
I am a beginner with nengo, but I have experiences with ANN framework like tensorflow, keras. I have 2 questions:
-
When I train the nengo network with batch-input, how can I first reload the weights of communication channel(decoder), and continue the training based on the weights.
-
I not clear with the simulation time in nengo simulator, in my case(mnist) I found no differences with I set the time to 1 or 100, the acc of prediction is the same. What I want is calculate the loss in simulation, if the loss is smaller than the threshold, I will stop the simulation.
My mnist-classification code is there, this code is implemented based on the code someone write in nengo_extra.
If it is ok, please write a new function show me how to do it.
I think this case if pretty general for the beginner of nengo, just like we start to learn artificial neural network, if we solve this mnist problem, we can solve a lot of problem just change the shape of input and output.
I quickly explain how I train the network and use the network to do prediction,
When I train the network, I connect the input neuron to the output node, and set eval_points=train_data (image)
,
function=train_targets(label)
in connection.
When I do prediction, I connect input.neuron
to the output node, and use the trained weights as decoder, directly set the transform=decoder
.
import nengo
import numpy as np
from keras.datasets import mnist
from keras.utils import np_utils
from vision import Gabor, Mask
from sklearn.metrics import accuracy_score
class Q_network:
def __init__(self, input_shape, output_shape, nb_hidden, decoder):
'''
:param input_shape: the input dimension without batch_size
:param output_shape: the output dimension without batch_size
:param nb_hidden: the number of neurons in ensemble
:param decoder: the path to save weights of connection channel
'''
self.input_shape = input_shape
self.output_shape = output_shape
self.nb_hidden = nb_hidden
self.decoder = decoder
def encoder_initialization(self, way="default"):
if way=="random":
encoders = np.random.normal(0, 1, size=(self.nb_hidden, self.input_shape))
else:
rng = np.random.RandomState(self.output_shape)
encoders = Gabor().generate(self.nb_hidden, (self.output_shape, self.output_shape), rng=rng)
encoders = Mask((28, 28)).populate(encoders, rng=rng, flatten=True)
return encoders
def train_network(self, train_data, train_targets, simulation_time):
encoders = self.encoder_initialization()
solver = nengo.solvers.LstsqL2(reg=0.01)
model = nengo.Network(seed=3)
with model:
input_neuron = nengo.Ensemble(n_neurons=self.nb_hidden,
dimensions=self.input_shape,
neuron_type=nengo.LIFRate(),
intercepts=nengo.dists.Choice([-0.5]),
max_rates=nengo.dists.Choice([100]),
eval_points=train_data,
encoders=encoders,
)
output = nengo.Node(size_in=self.output_shape)
conn = nengo.Connection(input_neuron,
output,
synapse=None,
eval_points=train_data,
function=train_targets,
solver=solver
)
conn_weights = nengo.Probe(conn, 'weights', sample_every=1.0)
with nengo.Simulator(model) as sim:
sim.run(simulation_time)
# save the connection weights after training
np.save(self.decoder, sim.data[conn_weights][-1].T)
def predict(self, input):
encoders = self.encoder_initialization()
try:
decoder = np.load(self.decoder)
except IOError:
decoder = np.zeros((self.nb_hidden, self.output_shape))
model = nengo.Network(seed=3)
with model:
input_neuron = nengo.Ensemble(n_neurons=self.nb_hidden,
dimensions=self.input_shape,
neuron_type=nengo.LIFRate(),
intercepts=nengo.dists.Choice([-0.5]),
max_rates=nengo.dists.Choice([100]),
encoders=encoders,
)
output = nengo.Node(size_in=self.output_shape)
conn = nengo.Connection(input_neuron.neurons,
output,
synapse=None,
transform=decoder.T
)
with nengo.Simulator(model) as sim:
_, acts = nengo.utils.ensemble.tuning_curves(input_neuron, sim, inputs=input)
return np.dot(acts, sim.data[conn].weights.T)