Skip to content

Commit

Permalink
Bug Fix: Parsing Argument --finetune always True (#676)
Browse files Browse the repository at this point in the history
* Bug Fix: Parsing Argument --finetune always True

* Update finetune_cli.py, fix also log_samples & bnb_optimizer flags

* Update finetune_gradio.py. fix flags

* Update finetune_gradio.py. formatting

---------

Co-authored-by: Yushen CHEN <[email protected]>
  • Loading branch information
hndrbrm and SWivid authored Dec 29, 2024
1 parent 20aa6a1 commit dc2d2d3
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 9 deletions.
8 changes: 3 additions & 5 deletions src/f5_tts/train/finetune_cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ def parse_args():
parser.add_argument("--num_warmup_updates", type=int, default=300, help="Warmup steps")
parser.add_argument("--save_per_updates", type=int, default=10000, help="Save checkpoint every X steps")
parser.add_argument("--last_per_steps", type=int, default=50000, help="Save last checkpoint every X steps")
parser.add_argument("--finetune", type=bool, default=True, help="Use Finetune")
parser.add_argument("--finetune", action="store_true", help="Use Finetune")
parser.add_argument("--pretrain", type=str, default=None, help="the path to the checkpoint")
parser.add_argument(
"--tokenizer", type=str, default="pinyin", choices=["pinyin", "char", "custom"], help="Tokenizer type"
Expand All @@ -60,15 +60,13 @@ def parse_args():
)
parser.add_argument(
"--log_samples",
type=bool,
default=False,
action="store_true",
help="Log inferenced samples per ckpt save steps",
)
parser.add_argument("--logger", type=str, default=None, choices=["wandb", "tensorboard"], help="logger")
parser.add_argument(
"--bnb_optimizer",
type=bool,
default=False,
action="store_true",
help="Use 8-bit Adam optimizer from bitsandbytes",
)

Expand Down
9 changes: 5 additions & 4 deletions src/f5_tts/train/finetune_gradio.py
Original file line number Diff line number Diff line change
Expand Up @@ -452,20 +452,21 @@ def start_training(
f"--dataset_name {dataset_name}"
)

cmd += f" --finetune {finetune}"
if finetune:
cmd += " --finetune"

if file_checkpoint_train != "":
cmd += f" --pretrain {file_checkpoint_train}"

if tokenizer_file != "":
cmd += f" --tokenizer_path {tokenizer_file}"

cmd += f" --tokenizer {tokenizer_type} "
cmd += f" --tokenizer {tokenizer_type}"

cmd += f" --log_samples True --logger {logger} "
cmd += f" --log_samples --logger {logger}"

if ch_8bit_adam:
cmd += " --bnb_optimizer True "
cmd += " --bnb_optimizer"

print("run command : \n" + cmd + "\n")

Expand Down

0 comments on commit dc2d2d3

Please sign in to comment.