Hello!
Changing the output synapse of a converted keras model outside of a Simulator
object changes order of the weights if use_bias=False
which breaks load_params
,
also, when use_bias=True
the behavior is different compared to if changing the synapse inside a Simulator
.
Is changing the output synapse outside a Simulator
or Converter.net
context something you’re not supposed to do?
import nengo_loihi
from numpy import shape as npshape, reshape as npreshape, concatenate as npconcatenate
from numpy import random as nprandom
from tensorflow import optimizers, losses, metrics, get_logger, random as tfrandom
from tensorflow.nn import softmax, relu
from tensorflow.keras import Model, regularizers
from tensorflow.keras.layers import Input, Conv1D, BatchNormalization, Dense, Flatten
from tensorflow.keras.callbacks import EarlyStopping
get_logger().setLevel('INFO')
from sklearn.model_selection import train_test_split
import mat73
from nengo_dl import Simulator, Converter
seed = 42
noBatch = 8
nprandom.seed(seed)
tfrandom.set_seed(seed)
matFile = mat73.loadmat(f"data.mat")
x_train, x_val, y_train, y_val = train_test_split(matFile['train'], matFile['labels'], test_size=0.2, random_state=seed)
#create the model
size = 2048
callback = EarlyStopping(monitor='accuracy', patience=3, min_delta=0.01, mode='auto')
inputLayer = Input(shape=(size,1))
conv0 = Conv1D(512, kernel_size= 2, strides=2, activation=relu, use_bias=False,)(inputLayer)
batchLayer = BatchNormalization()(conv0)
conv1 = Conv1D(128, kernel_size= 2, strides=2, activation=relu, use_bias=False,)(batchLayer)
conv2 = Conv1D(4, kernel_size= 2, strides=2, activation=relu, use_bias=False,)(conv1)
flatLayer = Flatten()(conv2)
dense0 = Dense(8, kernel_regularizer=regularizers.L1(0.001), activation=relu, use_bias=False,)(flatLayer)
outputLayer = Dense(2, activation=softmax, use_bias=False,)(dense0)
model = Model(inputs=inputLayer, outputs=outputLayer)
converter = Converter(model)
nengo_output = converter.outputs[converter.model.output]
with Simulator(converter.net, seed=seed, minibatch_size=noBatch,) as sim:
sim.compile(
optimizer=optimizers.Adam(0.001),
loss=losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=[metrics.sparse_categorical_crossentropy],
)
sim.fit(
{converter.inputs[converter.model.input]: x_train},
{converter.outputs[converter.model.output]: y_train},
validation_data=(
{converter.inputs[converter.model.input]: x_val},
{converter.outputs[converter.model.output]: y_val},
),
epochs=50,
verbose="auto",
)
# save the parameters to file
sim.save_params("model")
weights = sim.keras_model.get_weights()
for weight in weights:
print(weight.shape)
converter = Converter(
model,
swap_activations={relu: nengo_loihi.neurons.LoihiSpikingRectifiedLinear()},
scale_firing_rates=100,
synapse=0.02,
)
converter.outputs[converter.model.output].synapse = 0.007
with Simulator(converter.net, seed=seed, minibatch_size=noBatch,) as sim:
sim.compile(
optimizer=optimizers.Adam(0.001),
loss=losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=[metrics.sparse_categorical_crossentropy],
)
weights = sim.keras_model.get_weights()
for weight in weights:
print(weight.shape)
# Load previously trained model
sim.load_params("model")
sim.fit(
{converter.inputs[converter.model.input]: x_train},
{converter.outputs[converter.model.output]: y_train},
validation_data=(
{converter.inputs[converter.model.input]: x_val},
{converter.outputs[converter.model.output]: y_val},
),
epochs=50,
verbose="auto",
)
These are the shapes I got:
(2, 512, 128)
(2, 1, 512)
(2, 128, 4)
(2, 8)
(512,)
(512,)
(1024, 8)
(512,)
(512,)
(2, 512, 128)
(2, 1, 512)
(2, 128, 4)
(2, 8)
(1024, 8)
(512,)
(512,)
(512,)
(512,)