-
Notifications
You must be signed in to change notification settings - Fork 2
/
WRN-28-10.py
65 lines (48 loc) · 1.89 KB
/
WRN-28-10.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
from toolz import curry
import tensorflow as tf
from tensorflow.keras.metrics import CategoricalAccuracy, Mean, CategoricalCrossentropy
from hanser.distribute import setup_runtime, distribute_datasets
from hanser.datasets.classification.cifar import make_cifar100_dataset
from hanser.transform import random_crop, normalize, to_tensor
from hanser.train.optimizers import SGD
from hanser.models.cifar.preactresnet import ResNet
from hanser.train.cls import SuperLearner
from hanser.train.lr_schedule import CosineLR
from hanser.losses import CrossEntropy
@curry
def transform(image, label, training):
if training:
image = random_crop(image, (32, 32), (4, 4))
image = tf.image.random_flip_left_right(image)
image, label = to_tensor(image, label)
image = normalize(image, [0.491, 0.482, 0.447], [0.247, 0.243, 0.262])
label = tf.one_hot(label, 100)
return image, label
batch_size = 128
eval_batch_size = 2048
ds_train, ds_test, steps_per_epoch, test_steps = make_cifar100_dataset(
batch_size, eval_batch_size, transform)
setup_runtime(fp16=True)
ds_train, ds_test = distribute_datasets(ds_train, ds_test)
model = ResNet(depth=16, k=8, num_classes=100)
model.build((None, 32, 32, 3))
model.summary()
criterion = CrossEntropy(label_smoothing=0)
base_lr = 0.1
epochs = 200
lr_schedule = CosineLR(base_lr, steps_per_epoch, epochs=epochs, min_lr=0)
optimizer = SGD(lr_schedule, momentum=0.9, weight_decay=5e-4, nesterov=True)
train_metrics = {
'loss': Mean(),
'acc': CategoricalAccuracy(),
}
eval_metrics = {
'loss': CategoricalCrossentropy(from_logits=True),
'acc': CategoricalAccuracy(),
}
learner = SuperLearner(
model, criterion, optimizer,
train_metrics=train_metrics, eval_metrics=eval_metrics,
work_dir="./drive/MyDrive/models/CIFAR100")
learner.fit(ds_train, epochs, ds_test, val_freq=1,
steps_per_epoch=steps_per_epoch, val_steps=test_steps)