diff --git a/README.md b/README.md index b639ff7..4606d94 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,20 @@ You can also use the Gradio to run the script with an interface. To do so, run t python app.py ``` +## Google Colab + +Google Colab users can using it by executing the following command and accessing the generated Gradio Public URL. +(Probably, I think this is currently only available in the Colab Pro.) + +```bash +%cd /content +!git clone https://github.com/Flode-Labs/vid2densepose.git +%cd /content/vid2densepose +!pip install -r requirements.txt +!git clone https://github.com/facebookresearch/detectron2.git +!python app.py --share +``` + ## Integration with MagicAnimate For integration with MagicAnimate: diff --git a/app.py b/app.py index ec144e7..f053ab0 100644 --- a/app.py +++ b/app.py @@ -9,6 +9,7 @@ from densepose.vis.densepose_results import DensePoseResultsFineSegmentationVisualizer as Visualizer import tempfile import shutil +import argparse # Function to process video def process_video(input_video_path): @@ -46,7 +47,7 @@ def process_video(input_video_path): # Visualizer outputs black for background, but we want the 0 value of # the colormap, so we initialize the array with that value arr = cv2.applyColorMap(np.zeros((height, width), dtype=np.uint8), cmap) - out_frame = Visualizer(alpha=1, cmap=cmap).visualize(arr, results) + out_frame = Visualizer(alpha=1, cmap=cmap).visualize(arr, results) out.write(out_frame) # Release resources @@ -64,5 +65,9 @@ def process_video(input_video_path): title="Video 2 DensePose" ) +parser = argparse.ArgumentParser(description='Convert your videos to densepose and use it on MagicAnimate') +parser.add_argument('--share', action='store_true', default=False, help='Share the app on Gradio') +args = parser.parse_args() + # Run the app -iface.launch() +iface.launch(share=args.share)