Spaces:
Running
on
Zero
Running
on
Zero
fix bug
Browse files
app.py
CHANGED
@@ -234,7 +234,15 @@ with gr.Blocks(theme=theme, title="HORT: Monocular Hand-held Objects Reconstruct
|
|
234 |
input_image = gr.Image(label="Input image", type="numpy")
|
235 |
threshold = gr.Slider(value=0.3, minimum=0.05, maximum=0.95, step=0.05, label='Detection Confidence Threshold')
|
236 |
submit = gr.Button("Submit", variant="primary")
|
237 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
238 |
|
239 |
with gr.Column():
|
240 |
reconstruction = gr.Image(label="Reconstructions", type="numpy")
|
@@ -243,15 +251,4 @@ with gr.Blocks(theme=theme, title="HORT: Monocular Hand-held Objects Reconstruct
|
|
243 |
|
244 |
submit.click(fn=render_reconstruction, inputs=[input_image, threshold], outputs=[reconstruction, hands_detected, output_meshes])
|
245 |
|
246 |
-
with gr.Row():
|
247 |
-
example_images = gr.Examples([
|
248 |
-
['/home/user/app/assets/test1.png'],
|
249 |
-
['/home/user/app/assets/test2.png'],
|
250 |
-
['/home/user/app/assets/test3.jpg'],
|
251 |
-
['/home/user/app/assets/test4.jpeg'],
|
252 |
-
['/home/user/app/assets/test5.jpeg'],
|
253 |
-
['/home/user/app/assets/test6.jpg']
|
254 |
-
],
|
255 |
-
inputs=input_image)
|
256 |
-
|
257 |
demo.launch(share=True)
|
|
|
234 |
input_image = gr.Image(label="Input image", type="numpy")
|
235 |
threshold = gr.Slider(value=0.3, minimum=0.05, maximum=0.95, step=0.05, label='Detection Confidence Threshold')
|
236 |
submit = gr.Button("Submit", variant="primary")
|
237 |
+
example_images = gr.Examples([
|
238 |
+
['/home/user/app/assets/test1.png'],
|
239 |
+
['/home/user/app/assets/test2.png'],
|
240 |
+
['/home/user/app/assets/test3.jpg'],
|
241 |
+
['/home/user/app/assets/test4.jpeg'],
|
242 |
+
['/home/user/app/assets/test5.jpeg'],
|
243 |
+
['/home/user/app/assets/test6.jpg']
|
244 |
+
],
|
245 |
+
inputs=input_image)
|
246 |
|
247 |
with gr.Column():
|
248 |
reconstruction = gr.Image(label="Reconstructions", type="numpy")
|
|
|
251 |
|
252 |
submit.click(fn=render_reconstruction, inputs=[input_image, threshold], outputs=[reconstruction, hands_detected, output_meshes])
|
253 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
254 |
demo.launch(share=True)
|