-
Notifications
You must be signed in to change notification settings - Fork 33
/
Copy pathMonitor_ML_runs_live.py
58 lines (47 loc) · 1.48 KB
/
Monitor_ML_runs_live.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
import neptune
from tensorflow import keras
run = neptune.init_run(project="common/quickstarts", api_token=neptune.ANONYMOUS_API_TOKEN)
params = {
"epoch_nr": 10,
"batch_size": 256,
"lr": 0.005,
"momentum": 0.4,
"use_nesterov": True,
"unit_nr": 256,
"dropout": 0.05,
}
mnist = keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
model = keras.models.Sequential(
[
keras.layers.Flatten(),
keras.layers.Dense(params["unit_nr"], activation=keras.activations.relu),
keras.layers.Dropout(params["dropout"]),
keras.layers.Dense(10, activation=keras.activations.softmax),
]
)
optimizer = keras.optimizers.SGD(
learning_rate=params["lr"],
momentum=params["momentum"],
nesterov=params["use_nesterov"],
)
model.compile(optimizer=optimizer, loss="sparse_categorical_crossentropy", metrics=["accuracy"])
# log metrics during training
class NeptuneLogger(keras.callbacks.Callback):
def on_batch_end(self, batch, logs=None):
if logs is None:
logs = {}
for log_name, log_value in logs.items():
run[f"batch/{log_name}"].append(log_value)
def on_epoch_end(self, epoch, logs=None):
if logs is None:
logs = {}
for log_name, log_value in logs.items():
run[f"epoch/{log_name}"].append(log_value)
model.fit(
x_train,
y_train,
epochs=params["epoch_nr"],
batch_size=params["batch_size"],
callbacks=[NeptuneLogger()],
)