Open ritabratamaiti opened 5 years ago
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import mean_squared_error
import numpy
import matplotlib.pyplot as plt
import pandas
import math
# fix random seed for reproducibility
numpy.random.seed(7)
from keras.layers.core import Dense
from keras.layers.recurrent import LSTM
from keras.models import Sequential
from keras.optimizers import Adam, SGD
from keras.initializers import RandomNormal
from keras.models import Sequential
from keras.optimizers import Adam
from ntm import NeuralTuringMachine as NTM
import model_ntm
from ntm import controller_input_output_shape as controller_shape
dataframe = pandas.read_csv('international-airline-passengers.csv', usecols=[1], engine='python', skipfooter=3)
dataset = dataframe.values
dataset = dataset.astype('float32')
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
train, test = dataset[0:train_size,:], dataset[train_size:len(dataset),:]
print(len(train), len(test))
def create_dataset(dataset, look_back=1):
dataX, dataY = [], []
for i in range(len(dataset)-look_back-1):
a = dataset[i:(i+look_back), 0]
dataX.append(a)
dataY.append(dataset[i + look_back, 0])
return numpy.array(dataX), numpy.array(dataY)
look_back = 1
trainX, trainY = create_dataset(train, look_back)
testX, testY = create_dataset(test, look_back)
# reshape input to be [samples, time steps, features]
trainX = numpy.reshape(trainX, (trainX.shape[0],1,1))
testX = numpy.reshape(testX, (testX.shape[0], 1, testX.shape[1]))
print(trainX.shape)
scaler = MinMaxScaler(feature_range=(0, 1))
dataset = scaler.fit_transform(dataset)
output_dim = 94
input_dim = 94# this is the actual input dim of the network, that includes two dims for flags
batch_size = 100
read_heads = 1
write_heads = 1
lr = 5e-4
clipnorm = 10
sgd = Adam(lr=lr, clipnorm=clipnorm)
controller_input_dim, controller_output_dim = controller_shape(input_dim, output_dim, 20, 128, 3, read_heads, write_heads)
print(controller_input_dim, controller_output_dim)
controller = Sequential()
controller.add(LSTM(units=controller_output_dim,
kernel_initializer='random_normal',
bias_initializer='random_normal',
activation='linear',
stateful=True,
implementation=2, # best for gpu. other ones also might not work.
batch_input_shape=(batch_size, None, controller_input_dim)))
controller.compile(loss='binary_crossentropy', optimizer=sgd, metrics = ['binary_accuracy'], sample_weight_mode="temporal")
#model = model_ntm.gen_model(input_dim=input_dim, output_dim=output_dim, batch_size=batch_size,
# controller_model=controller, read_heads=read_heads, write_heads=write_heads,
# activation="sigmoid")
model = Sequential()
ntm = NTM(output_dim, n_slots=50, m_depth=20, shift_range=3,
controller_model=controller,
return_sequences=True,
input_shape=(None, input_dim),
batch_size = 100)
model.add(ntm)
sgd = Adam(lr, clipnorm)
model.compile(loss='binary_crossentropy', optimizer=sgd,
metrics = ['binary_accuracy'], sample_weight_mode="temporal")
model.fit(trainX, trainY, epochs=1, batch_size=100, verbose=2)
# make predictions
trainPredict = model.predict(trainX)
testPredict = model.predict(testX)
# invert predictions
trainPredict = scaler.inverse_transform(trainPredict)
trainY = scaler.inverse_transform([trainY])
testPredict = scaler.inverse_transform(testPredict)
testY = scaler.inverse_transform([testY])
# calculate root mean squared error
trainScore = math.sqrt(mean_squared_error(trainY[0], trainPredict[:,0]))
print('Train Score: %.2f RMSE' % (trainScore))
testScore = math.sqrt(mean_squared_error(testY[0], testPredict[:,0]))
print('Test Score: %.2f RMSE' % (testScore))
Accidentally closed the issue...
What should the input and output dimensions be for the test dataset. For example, I have a test dataset of dimensions (94,1,1). What should the output dimensions be? I am getting the error: Error when checking input: expected neural_turing_machine_22_input to have shape (None, 94) but got array with shape (1, 1)