Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix colab stop rendering issues, handle dependencies #2646

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
150 changes: 67 additions & 83 deletions colab/demo.ipynb
Original file line number Diff line number Diff line change
@@ -1,15 +1,5 @@
{
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/github/nerfstudio-project/nerfstudio/blob/alex%2Fcolab310/colab/demo.ipynb\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "markdown",
"metadata": {
Expand Down Expand Up @@ -65,8 +55,28 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "45HQzfPVGmwb",
"outputId": "4e9349f6-6857-4c15-a992-f30b5e45bd77"
},
"outputs": [],
"source": [
"%cd /content/\n",
"!pip install fastapi kaleido python-multipart uvicorn pyzmq"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "9oyLHl8QfYwP",
"cellView": "form"
"outputId": "66208631-7a4b-432a-d41a-f34227038ecc"
},
"outputs": [],
"source": [
Expand Down Expand Up @@ -116,7 +126,11 @@
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "msVLprI4gRA4"
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "msVLprI4gRA4",
"outputId": "74f7be14-6c7d-440e-9d4f-a21f71cf98ec"
},
"outputs": [],
"source": [
Expand Down Expand Up @@ -194,46 +208,12 @@
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/",
"height": 928
"height": 926
},
"id": "VoKDxqEcjmfC",
"outputId": "d2919aa4-96dd-4e50-829f-289e4208882b"
"outputId": "32775f39-6859-470d-f12d-ab0faa0a7958"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"/content\n",
"\u001b[K\u001b[?25h/tools/node/bin/lt -> /tools/node/lib/node_modules/localtunnel/bin/lt.js\n",
"\u001b[K\u001b[?25h+ [email protected]\n",
"added 22 packages from 22 contributors in 2.07s\n",
"https://viewer.nerf.studio/?websocket_url=wss://cyan-facts-matter-34-91-1-218.loca.lt\n",
"You may need to click Refresh Page after you start training!\n"
]
},
{
"data": {
"text/html": [
"\n",
" <iframe\n",
" width=\"100%\"\n",
" height=\"800\"\n",
" src=\"https://viewer.nerf.studio/?websocket_url=wss://cyan-facts-matter-34-91-1-218.loca.lt\"\n",
" frameborder=\"0\"\n",
" allowfullscreen\n",
" ></iframe>\n",
" "
],
"text/plain": [
"<IPython.lib.display.IFrame at 0x7f1d0da6f950>"
]
},
"execution_count": 2,
"metadata": {},
"output_type": "execute_result"
}
],
"outputs": [],
"source": [
"#@markdown <h1>Set up and Start Viewer</h1>\n",
"\n",
Expand Down Expand Up @@ -269,60 +249,65 @@
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"id": "m_N8_cLfjoXD"
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "SpQDBT9yHpc7",
"outputId": "85f6e473-121c-4b89-86d3-8e64bc39c92a"
},
"outputs": [],
"source": [
"%cd /content/\n",
"!pip install --upgrade torchaudio\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "m_N8_cLfjoXD",
"outputId": "4ec64264-2f6a-426b-e553-57002425960c"
},
"outputs": [],
"source": [
"#@markdown <h1>Start Training</h1>\n",
"\n",
"%cd /content\n",
"if os.path.exists(f\"data/nerfstudio/{scene}/transforms.json\"):\n",
" !ns-train nerfacto --viewer.websocket-port 7007 nerfstudio-data --data data/nerfstudio/$scene --downscale-factor 4\n",
" !ns-train nerfacto nerfstudio-data --data data/nerfstudio/$scene\n",
"else:\n",
" from IPython.core.display import display, HTML\n",
" display(HTML('<h3 style=\"color:red\">Error: Data processing did not complete</h3>'))\n",
" display(HTML('<h3>Please re-run `Downloading and Processing Data`, or view the FAQ for more info.</h3>'))"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"id": "bRyqDTujLZHr"
},
"outputs": [],
"source": [
"!pip freeze > requirements.txt"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"cellView": "form",
"colab": {
"base_uri": "https://localhost:8080/"
"base_uri": "https://localhost:8080/",
"height": 245
},
"id": "WGt8ukG6Htg3",
"outputId": "fa946890-c7d8-4e46-a54e-7231bc5a2059"
"outputId": "9c2fe3c6-9011-42a0-fa2f-0c0d3fe8fa92"
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"\u001b[2;36m[19:48:48]\u001b[0m\u001b[2;36m \u001b[0mSkipping \u001b[1;36m0\u001b[0m files in dataset split train. \u001b]8;id=527413;file:///content/nerfstudio/nerfstudio/data/dataparsers/nerfstudio_dataparser.py\u001b\\\u001b[2mnerfstudio_dataparser.py\u001b[0m\u001b]8;;\u001b\\\u001b[2m:\u001b[0m\u001b]8;id=243595;file:///content/nerfstudio/nerfstudio/data/dataparsers/nerfstudio_dataparser.py#91\u001b\\\u001b[2m91\u001b[0m\u001b]8;;\u001b\\\n",
"\u001b[2;36m \u001b[0m\u001b[2;36m \u001b[0mSkipping \u001b[1;36m0\u001b[0m files in dataset split test. \u001b]8;id=109270;file:///content/nerfstudio/nerfstudio/data/dataparsers/nerfstudio_dataparser.py\u001b\\\u001b[2mnerfstudio_dataparser.py\u001b[0m\u001b]8;;\u001b\\\u001b[2m:\u001b[0m\u001b]8;id=464675;file:///content/nerfstudio/nerfstudio/data/dataparsers/nerfstudio_dataparser.py#91\u001b\\\u001b[2m91\u001b[0m\u001b]8;;\u001b\\\n",
"\u001b[2KLoading data batch \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[35m100%\u001b[0m \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25h/usr/local/lib/python3.7/site-packages/torch/utils/data/dataloader.py:566: UserWarning: This DataLoader will create 4 worker processes in total. Our suggested max number of worker in current system is 2, which is smaller than what this DataLoader is going to create. Please be aware that excessive worker creation might get DataLoader running slow or even freeze, lower the worker number to avoid potential slowness/freeze if necessary.\n",
" cpuset_checked))\n",
"\u001b[2KLoading data batch \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[35m100%\u001b[0m \u001b[36m0:00:00\u001b[0m\n",
"\u001b[?25h/usr/local/lib/python3.7/site-packages/torchvision/models/_utils.py:209: UserWarning: The parameter 'pretrained' is deprecated since 0.13 and will be removed in 0.15, please use 'weights' instead.\n",
" f\"The parameter '{pretrained_param}' is deprecated since 0.13 and will be removed in 0.15, \"\n",
"/usr/local/lib/python3.7/site-packages/torchvision/models/_utils.py:223: UserWarning: Arguments other than a weight enum or `None` for 'weights' are deprecated since 0.13 and will be removed in 0.15. The current behavior is equivalent to passing `weights=AlexNet_Weights.IMAGENET1K_V1`. You can also use `weights=AlexNet_Weights.DEFAULT` to get the most up-to-date weights.\n",
" warnings.warn(msg)\n",
"Loading latest checkpoint from load_dir\n",
"✅ Done loading checkpoint from \n",
"outputs/data-nerfstudio-poster/nerfacto/\u001b[1;36m2022\u001b[0m-\u001b[1;36m10\u001b[0m-29_192844/nerfstudio_models/step-\u001b[1;36m000014000.\u001b[0mckpt\n",
"\u001b[1;32mCreating trajectory video\u001b[0m\n",
"\u001b[2K🎥 Rendering 🎥 \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[35m100%\u001b[0m \u001b[31m0.14 fps\u001b[0m \u001b[33m11:47\u001b[0m\n",
"\u001b[2K\u001b[32m( ● )\u001b[0m \u001b[33mSaving video\u001b[0m\n",
"\u001b[1A\u001b[2K\u001b[92m────────────────────────────────────────────── \u001b[0m\u001b[32m 🎉 🎉 🎉 Success 🎉 🎉 🎉\u001b[0m\u001b[92m ──────────────────────────────────────────────\u001b[0m\n",
" \u001b[32mSaved video to renders/output.mp4\u001b[0m \n",
"\u001b[0m"
]
}
],
"outputs": [],
"source": [
"#@title # Render Video { vertical-output: true }\n",
"#@markdown <h3>Export the camera path from within the viewer, then run this cell.</h3>\n",
Expand Down Expand Up @@ -350,8 +335,7 @@
"metadata": {
"accelerator": "GPU",
"colab": {
"provenance": [],
"include_colab_link": true
"provenance": []
},
"gpuClass": "standard",
"kernelspec": {
Expand Down