forked from imdeepmind/NeuralPy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
example.py
62 lines (41 loc) · 1.31 KB
/
example.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
# Dependencies
from neuralpy.models import Sequential
from neuralpy.layers.linear import Dense
from neuralpy.layers.regularizers import Dropout
from neuralpy.layers.activation_functions import ReLU
from neuralpy.loss_functions import CrossEntropyLoss
from neuralpy.optimizer import Adam
import pandas as pd
import numpy as np
# Model
model = Sequential()
model.add(Dense(n_nodes=264, n_inputs=784))
model.add(ReLU())
model.add(Dropout())
model.add(Dense(n_nodes=10))
model.build()
model.compile(optimizer=Adam(), loss_function=CrossEntropyLoss(),
metrics=["accuracy"])
print(model.summary())
# Reading data
train_data = pd.read_csv("./data/mnist_train.csv", header=None)
test_data = pd.read_csv("./data/mnist_test.csv", header=None)
train_data = train_data.sample(frac=1)
train_data = train_data.values
test_data = test_data.sample(frac=1)
test_data = test_data.values
X = train_data[:, 1:] / 255.
y = train_data[:, 0]
X_test = test_data[:, 1:]
y_test = test_data[:, 0]
del train_data
n = len(X)
X_train = X[:int(n*0.8)]
y_train = y[:int(n*0.8)]
X_validation = X[int(n*0.8):]
y_validation = y[int(n*0.8):]
del X, y
model.fit(train_data=(X_train, y_train), validation_data=(
X_validation, y_validation), epochs=10, batch_size=32)
ev = model.evaluate(test_data=(X_test, y_test), batch_size=32)
print(ev)