zerchen commited on
Commit
327743d
·
1 Parent(s): 97c18ab

update env

Browse files
Files changed (2) hide show
  1. app.py +6 -5
  2. requirements.txt +1 -0
app.py CHANGED
@@ -1,10 +1,10 @@
 
1
  import os
2
  import sys
3
  os.environ["PYOPENGL_PLATFORM"] = "egl"
4
  os.environ["MESA_GL_VERSION_OVERRIDE"] = "4.1"
5
 
6
  import gradio as gr
7
- #import spaces
8
  import cv2
9
  import numpy as np
10
  import torch
@@ -28,6 +28,7 @@ from ultralytics import YOLO
28
  LIGHT_PURPLE=(0.25098039, 0.274117647, 0.65882353)
29
  STEEL_BLUE=(0.2745098, 0.5098039, 0.7058824)
30
 
 
31
  wilor_checkpoint_path = hf_hub_download(repo_id="zerchen/hort_models", filename="wilor_final.ckpt")
32
  hort_checkpoint_path = hf_hub_download(repo_id="zerchen/hort_models", filename="hort_final.pth.tar")
33
 
@@ -196,10 +197,10 @@ with gr.Blocks(title="HORT: Monocular Hand-held Objects Reconstruction with Tran
196
  with gr.Row():
197
  example_images = gr.Examples([
198
  ['/home/user/app/assets/test1.png'],
199
- ['./demo_img/app/assets/test2.png'],
200
- ['./demo_img/app/assets/test3.jpg'],
201
- ['./demo_img/app/assets/test4.jpeg'],
202
- ['./demo_img/app/assets/test5.jpeg']
203
  ],
204
  inputs=input_image)
205
 
 
1
+ import spaces
2
  import os
3
  import sys
4
  os.environ["PYOPENGL_PLATFORM"] = "egl"
5
  os.environ["MESA_GL_VERSION_OVERRIDE"] = "4.1"
6
 
7
  import gradio as gr
 
8
  import cv2
9
  import numpy as np
10
  import torch
 
28
  LIGHT_PURPLE=(0.25098039, 0.274117647, 0.65882353)
29
  STEEL_BLUE=(0.2745098, 0.5098039, 0.7058824)
30
 
31
+ os.system("cd /home/user/app/hort/models/tgs/models/snowflake/pointnet2_ops_lib && python setup.py install && cd /home/user/app")
32
  wilor_checkpoint_path = hf_hub_download(repo_id="zerchen/hort_models", filename="wilor_final.ckpt")
33
  hort_checkpoint_path = hf_hub_download(repo_id="zerchen/hort_models", filename="hort_final.pth.tar")
34
 
 
197
  with gr.Row():
198
  example_images = gr.Examples([
199
  ['/home/user/app/assets/test1.png'],
200
+ ['/home/user/app/assets/test2.png'],
201
+ ['/home/user/app/assets/test3.jpg'],
202
+ ['/home/user/app/assets/test4.jpeg'],
203
+ ['/home/user/app/assets/test5.jpeg']
204
  ],
205
  inputs=input_image)
206
 
requirements.txt CHANGED
@@ -44,5 +44,6 @@ rich
44
  webdataset
45
  ultralytics
46
  dill
 
47
  git+https://github.com/zerchen/lang-segment-anything.git
48
  typeguard
 
44
  webdataset
45
  ultralytics
46
  dill
47
+ git+https://github.com/facebookresearch/pytorch3d.git@stable
48
  git+https://github.com/zerchen/lang-segment-anything.git
49
  typeguard