joeWabbit commited on
Commit
9bcecd2
·
verified ·
1 Parent(s): 4200d56

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -11
app.py CHANGED
@@ -3,20 +3,29 @@ from PIL import Image, ImageFilter
3
  import gradio as gr
4
  import torch
5
  import numpy as np
 
 
 
6
  depth_pipe = pipeline(task="depth-estimation", model="depth-anything/Depth-Anything-V2-Small-hf")
7
 
8
  def compute_depth_map_pipeline(image: Image.Image, scale_factor: float) -> np.ndarray:
9
  """
10
- Computes a depth map using the HF pipeline.
11
  The returned depth is inverted (so near=0 and far=1) and scaled.
12
  """
13
- result = depth_pipe(image)[0]
14
  depth_map = np.array(result["depth"])
 
15
  depth_map = 1.0 - depth_map
16
  depth_map *= scale_factor
17
  return depth_map
18
 
19
  def layered_blur(image: Image.Image, depth_map: np.ndarray, num_layers: int, max_blur: float) -> Image.Image:
 
 
 
 
 
20
  blur_radii = np.linspace(0, max_blur, num_layers)
21
  blur_versions = [image.filter(ImageFilter.GaussianBlur(r)) for r in blur_radii]
22
  upper_bound = depth_map.max()
@@ -30,6 +39,11 @@ def layered_blur(image: Image.Image, depth_map: np.ndarray, num_layers: int, max
30
  return final_image
31
 
32
  def process_depth_blur_pipeline(uploaded_image, max_blur_value, scale_factor, num_layers):
 
 
 
 
 
33
  if not isinstance(uploaded_image, Image.Image):
34
  uploaded_image = Image.open(uploaded_image)
35
  image = uploaded_image.convert("RGB").resize((512, 512))
@@ -39,6 +53,10 @@ def process_depth_blur_pipeline(uploaded_image, max_blur_value, scale_factor, nu
39
 
40
  # --- Segmentation-Based Blur using BEN2 ---
41
  def load_segmentation_model():
 
 
 
 
42
  global seg_model, seg_device
43
  if "seg_model" not in globals():
44
  from ben2 import BEN_Base # Import BEN2
@@ -48,7 +66,11 @@ def load_segmentation_model():
48
  return seg_model, seg_device
49
 
50
  def process_segmentation_blur(uploaded_image, seg_blur_radius: float):
51
-
 
 
 
 
52
  if not isinstance(uploaded_image, Image.Image):
53
  uploaded_image = Image.open(uploaded_image)
54
  image = uploaded_image.convert("RGB").resize((512, 512))
@@ -65,24 +87,25 @@ def process_segmentation_blur(uploaded_image, seg_blur_radius: float):
65
 
66
  # --- Merged Gradio Interface ---
67
  with gr.Blocks() as demo:
68
- gr.Markdown("# Lens Blur & Gaussian Blur")
69
  with gr.Tabs():
70
- with gr.Tab("Lens Blur"):
71
  depth_img = gr.Image(type="pil", label="Upload Image")
72
  depth_max_blur = gr.Slider(1.0, 5.0, value=3.0, step=0.1, label="Maximum Blur Radius")
73
  depth_scale = gr.Slider(0.1, 1.0, value=0.5, step=0.1, label="Depth Scale Factor")
74
  depth_layers = gr.Slider(2, 20, value=8, step=1, label="Number of Layers")
75
- depth_out = gr.Image(label="Lens Blurred Image")
76
- depth_button = gr.Button("Process Lens Blur")
77
  depth_button.click(process_depth_blur_pipeline,
78
  inputs=[depth_img, depth_max_blur, depth_scale, depth_layers],
79
  outputs=depth_out)
80
- with gr.Tab("Guassian Blur"):
81
  seg_img = gr.Image(type="pil", label="Upload Image")
82
  seg_blur = gr.Slider(5, 30, value=15, step=1, label="Segmentation Blur Radius")
83
- seg_out = gr.Image(label="Gaussian Blurred Image")
84
- seg_button = gr.Button("Gaussian Blur")
85
  seg_button.click(process_segmentation_blur, inputs=[seg_img, seg_blur], outputs=seg_out)
86
 
87
  if __name__ == "__main__":
88
- demo.launch()
 
 
3
  import gradio as gr
4
  import torch
5
  import numpy as np
6
+
7
+ # --- Depth-Based Blur using a Pipeline ---
8
+ # Use the pipeline for depth estimation with the small model.
9
  depth_pipe = pipeline(task="depth-estimation", model="depth-anything/Depth-Anything-V2-Small-hf")
10
 
11
  def compute_depth_map_pipeline(image: Image.Image, scale_factor: float) -> np.ndarray:
12
  """
13
+ Computes a depth map using the Hugging Face pipeline.
14
  The returned depth is inverted (so near=0 and far=1) and scaled.
15
  """
16
+ result = depth_pipe(image) # No [0] index; the pipeline returns a dictionary
17
  depth_map = np.array(result["depth"])
18
+ # Invert depth so that near becomes 0 and far becomes 1
19
  depth_map = 1.0 - depth_map
20
  depth_map *= scale_factor
21
  return depth_map
22
 
23
  def layered_blur(image: Image.Image, depth_map: np.ndarray, num_layers: int, max_blur: float) -> Image.Image:
24
+ """
25
+ Applies multiple levels of Gaussian blur based on depth.
26
+ The image is blurred with increasing radii and then composited
27
+ using a mask derived from the depth map divided into bins.
28
+ """
29
  blur_radii = np.linspace(0, max_blur, num_layers)
30
  blur_versions = [image.filter(ImageFilter.GaussianBlur(r)) for r in blur_radii]
31
  upper_bound = depth_map.max()
 
39
  return final_image
40
 
41
  def process_depth_blur_pipeline(uploaded_image, max_blur_value, scale_factor, num_layers):
42
+ """
43
+ Processes an uploaded image using depth-based blur.
44
+ The image is resized to 512x512, its depth is computed via the pipeline,
45
+ and a layered blur is applied.
46
+ """
47
  if not isinstance(uploaded_image, Image.Image):
48
  uploaded_image = Image.open(uploaded_image)
49
  image = uploaded_image.convert("RGB").resize((512, 512))
 
53
 
54
  # --- Segmentation-Based Blur using BEN2 ---
55
  def load_segmentation_model():
56
+ """
57
+ Loads and caches the segmentation model from BEN2.
58
+ Ensure you have ben2 installed and accessible in your path.
59
+ """
60
  global seg_model, seg_device
61
  if "seg_model" not in globals():
62
  from ben2 import BEN_Base # Import BEN2
 
66
  return seg_model, seg_device
67
 
68
  def process_segmentation_blur(uploaded_image, seg_blur_radius: float):
69
+ """
70
+ Processes the image with segmentation-based blur.
71
+ The image is resized to 512x512. A Gaussian blur with the specified radius is applied,
72
+ then the segmentation mask is computed to composite the sharp foreground over the blurred background.
73
+ """
74
  if not isinstance(uploaded_image, Image.Image):
75
  uploaded_image = Image.open(uploaded_image)
76
  image = uploaded_image.convert("RGB").resize((512, 512))
 
87
 
88
  # --- Merged Gradio Interface ---
89
  with gr.Blocks() as demo:
90
+ gr.Markdown("# Depth-Based vs Segmentation-Based Blur")
91
  with gr.Tabs():
92
+ with gr.Tab("Depth-Based Blur (Pipeline)"):
93
  depth_img = gr.Image(type="pil", label="Upload Image")
94
  depth_max_blur = gr.Slider(1.0, 5.0, value=3.0, step=0.1, label="Maximum Blur Radius")
95
  depth_scale = gr.Slider(0.1, 1.0, value=0.5, step=0.1, label="Depth Scale Factor")
96
  depth_layers = gr.Slider(2, 20, value=8, step=1, label="Number of Layers")
97
+ depth_out = gr.Image(label="Depth-Based Blurred Image")
98
+ depth_button = gr.Button("Process Depth Blur")
99
  depth_button.click(process_depth_blur_pipeline,
100
  inputs=[depth_img, depth_max_blur, depth_scale, depth_layers],
101
  outputs=depth_out)
102
+ with gr.Tab("Segmentation-Based Blur (BEN2)"):
103
  seg_img = gr.Image(type="pil", label="Upload Image")
104
  seg_blur = gr.Slider(5, 30, value=15, step=1, label="Segmentation Blur Radius")
105
+ seg_out = gr.Image(label="Segmentation-Based Blurred Image")
106
+ seg_button = gr.Button("Process Segmentation Blur")
107
  seg_button.click(process_segmentation_blur, inputs=[seg_img, seg_blur], outputs=seg_out)
108
 
109
  if __name__ == "__main__":
110
+ # Optionally, set share=True to generate a public link.
111
+ demo.launch(share=True)