Import numpy as np
from keras.models import Sequential
from keras.layers import Dense
from keras.datasets import imdb
(tr, tl), (te, tel) = imdb.load_data(num_words=1000)
trm = np.zeros((len(tr), 1000))
fori, x in enumerate(tr):
trm[i, x] = 1
tem=np.zeros((len(te),1000))
fori, x in enumerate(te):
tem[i, x] = 1
tex = tem[:10000]
tey = tel[:10000]
tx = trm[10000:]
ty = tl[10000:]
n = Sequential()
n.add(Dense(50, activation = "relu", input_shape=(1000, )))
n.add(Dense(50, activation = "relu"))
n.add(Dense(50, activation = "relu"))
n.add(Dense(1, activation = "sigmoid"))
n.summary()
n.compile(optimizer = "adam",loss = "binary_crossentropy",metrics = ["accuracy"])
n.fit(tx,ty,epochs=2)
yp=n.predict(tex)
print(Actual Label, Predicted Label)
print(tey[0],yp[0])
Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/imdb.npz
17464789/17464789 [==============================] - 0s 0us/step
Model: "sequential_1"
Layer (type) Output Shape Param #
=================================================================
dense_3 (Dense) (None, 50) 50050
dense_4 (Dense) (None, 50) 2550
dense_5 (Dense) (None, 50) 2550
dense_6 (Dense) (None, 1) 51
=================================================================
Total params: 55201 (215.63 KB)
Trainable params: 55201 (215.63 KB)
Non-trainable params: 0 (0.00 Byte)
Epoch 1/2
469/469 [==============================] - 4s 5ms/step - loss: 0.4038 - accuracy: 0.8157
Epoch 2/2
469/469 [==============================] - 2s 5ms/step - loss: 0.3081 - accuracy: 0.8669
313/313 [==============================] - 1s 3ms/step