-
Notifications
You must be signed in to change notification settings - Fork 0
/
search_hyperparams.py
82 lines (65 loc) · 2.86 KB
/
search_hyperparams.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import os
from subprocess import check_call
import sys
import tensorflow as tf
from model.input_fn import train_input_fn, test_input_fn
from model.model_fn import model_fn
from model.utils import Params
from config import *
PYTHON = sys.executable
parent_dir = 'experiments/learning_rate'
data_dir = DATASET_DIR_PATH
def train(model_dir, params):
train_dir = os.path.join(DATASET_DIR_PATH,"images_background")
test_dir = os.path.join(DATASET_DIR_PATH,"images_evaluation")
tf.reset_default_graph()
tf.logging.set_verbosity(tf.logging.INFO)
# Define the model
tf.logging.info("Creating the model...")
config = tf.estimator.RunConfig(tf_random_seed=230,
model_dir=model_dir,
save_summary_steps=params.save_summary_steps)
estimator = tf.estimator.Estimator(model_fn, params=params, config=config)
# Train the model
tf.logging.info("Starting training for {} epoch(s).".format(params.num_epochs))
estimator.train(lambda: train_input_fn(train_dir, params))
# Evaluate the model on the test set
tf.logging.info("Evaluation on test set.")
res = estimator.evaluate(lambda: test_input_fn(test_dir, params))
for key in res:
print("{}: {}".format(key, res[key]))
def launch_training_job(parent_dir, data_dir, job_name, params):
"""Launch training of the model with a set of hyperparameters in parent_dir/job_name
Args:
parent_dir: (string) directory containing config, weights and log
data_dir: (string) directory containing the dataset
params: (dict) containing hyperparameters
"""
# Create a new folder in parent_dir with unique_name "job_name"
model_dir = os.path.join(parent_dir, job_name)
if not os.path.exists(model_dir):
os.makedirs(model_dir)
# Write parameters in json file
json_path = os.path.join(model_dir, 'params.json')
params.save(json_path)
train(model_dir, params)
"""
# Launch training with this config
cmd = "{python} train.py --model_dir {model_dir} --data_dir {data_dir}"
cmd = cmd.format(python=PYTHON, model_dir=model_dir, data_dir=data_dir)
print(cmd)
check_call(cmd, shell=True)
"""
if __name__ == "__main__":
# Load the "reference" parameters from parent_dir json file
json_path = os.path.join(parent_dir, 'params.json')
assert os.path.isfile(json_path), "No json configuration file found at {}".format(json_path)
params = Params(json_path)
# Perform hypersearch over one parameter
learning_rates = [1e-4, 3e-4, 1e-3, 3e-3]
for learning_rate in learning_rates:
# Modify the relevant parameter in params
params.learning_rate = learning_rate
# Launch job (name has to be unique)
job_name = "learning_rate_{}".format(learning_rate)
launch_training_job(parent_dir, data_dir, job_name, params)