From b29544ebb054583ca1855996e933b06fac8c8afa Mon Sep 17 00:00:00 2001 From: Faych Chen <90372299+neverbiasu@users.noreply.github.com> Date: Sat, 22 Jun 2024 00:50:10 +0800 Subject: [PATCH] doc: revise the Load Models part of examples/ipynb (#397) Co-authored-by: neverbiasu --- examples/ipynb/colab.ipynb | 84 +++++++++++++++++++++++++++++------- examples/ipynb/example.ipynb | 65 +++++++++++++++++++++++++--- 2 files changed, 126 insertions(+), 23 deletions(-) diff --git a/examples/ipynb/colab.ipynb b/examples/ipynb/colab.ipynb index d351a4b5c..9d78a2a4a 100644 --- a/examples/ipynb/colab.ipynb +++ b/examples/ipynb/colab.ipynb @@ -2,12 +2,12 @@ "cells": [ { "cell_type": "markdown", - "source": [ - "## Clone Repo" - ], "metadata": { "id": "xYJFXKP9xhQM" - } + }, + "source": [ + "## Clone Repo" + ] }, { "cell_type": "code", @@ -26,12 +26,12 @@ }, { "cell_type": "markdown", - "source": [ - "## Import Libs" - ], "metadata": { "id": "zdzEFoknxqTH" - } + }, + "source": [ + "## Import Libs" + ] }, { "cell_type": "code", @@ -70,13 +70,65 @@ }, "outputs": [], "source": [ - "chat = ChatTTS.Chat()\n", - "\n", - "# Use force_redownload=True if the weights updated.\n", - "chat.load_models(source = 'huggingface')\n", - "\n", - "# If you download the weights manually, set source='custom'.\n", - "# chat.load_models(source='custom', custom_path='YOUR CUSTOM PATH')" + "chat = ChatTTS.Chat()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Here are three choices for loading models:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 1. Load models from Hugging Face:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# use force_redownload=True if the weights have been updated.\n", + "chat.load_models(source='huggingface', force_redownload=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2. Load models from local directories 'asset' and 'config':" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "chat.load_models()\n", + "# chat.load_models(source='local') same as above" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 3. Load models from a custom path:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# write the model path into custom_path\n", + "chat.load_models(source='custom', custom_path='YOUR CUSTOM PATH')" ] }, { @@ -352,4 +404,4 @@ }, "nbformat": 4, "nbformat_minor": 0 -} \ No newline at end of file +} diff --git a/examples/ipynb/example.ipynb b/examples/ipynb/example.ipynb index 4fc5a7be4..12ffb2b7b 100644 --- a/examples/ipynb/example.ipynb +++ b/examples/ipynb/example.ipynb @@ -51,14 +51,65 @@ "source": [ "os.chdir(root_dir)\n", "\n", - "chat = ChatTTS.Chat()\n", + "chat = ChatTTS.Chat()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Here are three choices for loading models:" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 1. Load models from Hugging Face:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# use force_redownload=True if the weights have been updated.\n", + "chat.load_models(source='huggingface', force_redownload=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 2. Load models from local directories 'asset' and 'config':" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ "chat.load_models()\n", - "\n", - "# Use force_redownload=True if the weights updated.\n", - "# chat.load_models(force_redownload=True)\n", - "\n", - "# If you download the weights manually, set source='locals'.\n", - "# chat.load_models(source='local', local_path='YOUR LOCAL PATH')" + "# chat.load_models(source='local') same as above" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### 3. Load models from a custom path:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# write the model path into custom_path\n", + "chat.load_models(source='custom', custom_path='YOUR CUSTOM PATH')" ] }, {