From b5c49a791470b54c7e995e9e039a50940925c470 Mon Sep 17 00:00:00 2001 From: cl Date: Sat, 15 Apr 2023 00:18:25 +0800 Subject: [PATCH 1/2] add windows run scripts --- .gitignore | 3 +++ README.md | 2 ++ data/colmap2nerf.py | 2 +- scripts/train_360_v2_garden.ps1 | 1 + scripts/train_from_video.ps1 | 20 ++++++++++++++++++++ scripts/train_nsvf_lego.ps1 | 1 + 6 files changed, 28 insertions(+), 1 deletion(-) create mode 100644 scripts/train_360_v2_garden.ps1 create mode 100644 scripts/train_from_video.ps1 create mode 100644 scripts/train_nsvf_lego.ps1 diff --git a/.gitignore b/.gitignore index d7f0944..f7c47e0 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,6 @@ data/*.MOV data/*.mp4 data/colmap_text data/transforms.json +lightning_logs +*.zip +external \ No newline at end of file diff --git a/README.md b/README.md index 492e4f1..c1344a8 100644 --- a/README.md +++ b/README.md @@ -58,6 +58,8 @@ Place your video in `data` folder and pass the video path to the script. There a ./scripts/train_from_video.sh -v {your_video_name} -s {scale} -f {video_fps} ``` +Nerf use colmap to get the posture of the camera, so you need to download the colmap from [here](https://github.com/colmap/colmap/releases), and rename the directory to "colmap" and put it in the "external" directory. + ## [Preview] Mobile Deployment Using [Taichi AOT](https://docs.taichi-lang.org/docs/tutorial), you can easily deploy a NeRF rendering pipeline on any mobile devices! diff --git a/data/colmap2nerf.py b/data/colmap2nerf.py index 064f7e1..1fa8940 100755 --- a/data/colmap2nerf.py +++ b/data/colmap2nerf.py @@ -97,7 +97,7 @@ def run_colmap(args): # On Windows, if FFmpeg isn't found, try automatically downloading it from the internet if os.name == "nt" and os.system(f"where {colmap_binary} >nul 2>nul") != 0: - colmap_glob = os.path.join(ROOT_DIR, "external", "colmap", "*", "COLMAP.bat") + colmap_glob = os.path.join(ROOT_DIR, "external", "colmap", "COLMAP.bat") candidates = glob(colmap_glob) if not candidates: print("COLMAP not found. Attempting to download COLMAP from the internet.") diff --git a/scripts/train_360_v2_garden.ps1 b/scripts/train_360_v2_garden.ps1 new file mode 100644 index 0000000..ebe5196 --- /dev/null +++ b/scripts/train_360_v2_garden.ps1 @@ -0,0 +1 @@ +python train.py --root_dir ./360_v2/garden --dataset_name colmap --exp_name garden --downsample 0.25 --no_save_test --num_epochs 20 --scale 16.0 --gui \ No newline at end of file diff --git a/scripts/train_from_video.ps1 b/scripts/train_from_video.ps1 new file mode 100644 index 0000000..108231b --- /dev/null +++ b/scripts/train_from_video.ps1 @@ -0,0 +1,20 @@ +# Put your video in data/ folder and update filename VIDEO_FILE +# SCALE choose from 1, 4, 8, 16, 64; 16 is recommended for a real screne +# VIDEO_FPS = 2 is suitable for a one minute video +set VIDEO_FILE 'video.mp4' +set SCALE 16 +set VIDEO_FPS 2 + +echo "video path $VIDEO_FILE" +echo "scale $SCALE" +echo "video fps $VIDEO_FPS" + +cd "data" + +python colmap2nerf.py --video_in $VIDEO_FILE --video_fps $VIDEO_FPS --run_colmap --aabb_scale $SCALE --images images + +Move-Item colmap_sparse sparse +cd .. + + +python train.py --root_dir data --dataset_name colmap --exp_name custom --downsample 0.25 --num_epochs 20 --scale $SCALE --gui \ No newline at end of file diff --git a/scripts/train_nsvf_lego.ps1 b/scripts/train_nsvf_lego.ps1 new file mode 100644 index 0000000..7c634e2 --- /dev/null +++ b/scripts/train_nsvf_lego.ps1 @@ -0,0 +1 @@ +python train.py --root_dir "./Synthetic_NeRF/Lego" --exp_name Lego --perf --num_epochs 20 --batch_size 8192 --lr 1e-2 --no_save_test --gui --ckpt_path=./ckpts/nsvf/Lego/epoch=19-v2.ckpt --val_only \ No newline at end of file From ad578db25c27aeb6ca994d7bdd54b4c6f7c04e18 Mon Sep 17 00:00:00 2001 From: chunleili Date: Tue, 18 Apr 2023 15:06:10 +0800 Subject: [PATCH 2/2] Update README.md Add windows users install guide --- README.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index c1344a8..6e675ef 100644 --- a/README.md +++ b/README.md @@ -42,6 +42,11 @@ To reach the best performance, here are the steps to follow: 3. Uncomment `--half2_opt` to enable half2 optimization in the script, then `./scripts/train_nsvf_lego.sh`. For now, half2 optimization is only supported on Linux with Graphics Card Architecture >Pascal. +**For Windows users** +``` +./scripts/train_nsvf_lego.ps1 +``` + ### 360_v2 dataset Download [360 v2 dataset](http://storage.googleapis.com/gresearch/refraw360/360_v2.zip) and unzip it. Please keep the folder name unchanged. The default `batch_size=8192` takes up to 18GB RAM on a RTX3090. Please adjust `batch_size` according to your hardware spec. @@ -50,6 +55,12 @@ Download [360 v2 dataset](http://storage.googleapis.com/gresearch/refraw360/360_ ./scripts/train_360_v2_garden.sh ``` +**For Windows users** +``` +./scripts/train_360_v2_garden.ps1 +``` + + ## Train with your own video Place your video in `data` folder and pass the video path to the script. There are several key parameters for producing a sound dataset for NeRF training. For a real scene, `scale` is recommended to set to 16. `video_fps` determines the number of images generated from the video, typically 150~200 images are sufficient. For a one minute video, 2 is a suitable number. Running this script will preprocess your video and start training a NeRF out of it: @@ -58,7 +69,12 @@ Place your video in `data` folder and pass the video path to the script. There a ./scripts/train_from_video.sh -v {your_video_name} -s {scale} -f {video_fps} ``` -Nerf use colmap to get the posture of the camera, so you need to download the colmap from [here](https://github.com/colmap/colmap/releases), and rename the directory to "colmap" and put it in the "external" directory. +**For Windows users** +``` +./scripts/train_from_video.ps1 +``` +You need to download the colmap to extract the camera posture information. Download the colmap from [here](https://github.com/colmap/colmap/releases), and rename the directory to "colmap" and put it in the "external" directory under the project directory. + ## [Preview] Mobile Deployment