-
Notifications
You must be signed in to change notification settings - Fork 0
/
train_bazel.sh
71 lines (64 loc) · 2.06 KB
/
train_bazel.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
# Copyright 2020 The Magenta Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#!/bin/bash
set -x
set -e
# Change this to directory where you want to save experiment logs:
logdir=./logs
# Change this to directory where data is loaded from:
data_dir=./data/
# Change this to your dataset class, which can be defined in lib_data.py.
dataset=stringquartet16thseparated
# Data preprocessing.
crop_piece_len=64
separate_instruments=True
quantization_level=0.125 # 16th notes
# Hyperparameters.
maskout_method=orderless
num_layers=64
num_filters=128
batch_size=10
use_sep_conv=True
architecture='dilated'
num_dilation_blocks=1
dilate_time_only=False
repeat_last_dilation_level=False
num_pointwise_splits=2
interleave_split_every_n_layers=2
num_epochs=120
learning_rate=0.03125
# Run command.
python coconet_train.py \
--learning_rate=$learning_rate \
--num_epochs=$num_epochs \
--logdir=$logdir \
--log_process=True \
--data_dir=$data_dir \
--dataset=$dataset \
--crop_piece_len=$crop_piece_len \
--separate_instruments=$separate_instruments \
--quantization_level=$quantization_level \
--maskout_method=$maskout_method \
--num_layers=$num_layers \
--num_filters=$num_filters \
--use_residual \
--batch_size=$batch_size \
--use_sep_conv=$use_sep_conv \
--architecture=$architecture \
--num_dilation_blocks=$num_dilation_blocks \
--dilate_time_only=$dilate_time_only \
--repeat_last_dilation_level=$repeat_last_dilation_level \
--num_pointwise_splits=$num_pointwise_splits \
--interleave_split_every_n_layers=$interleave_split_every_n_layers \
--logtostderr