Spaces:
Runtime error
Runtime error
File size: 929 Bytes
04bef66 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
import gradio as gr
import numpy as np
from PIL import Image
from pathlib import Path
from sentence_transformers import SentenceTransformer
cache_path = Path('/app/cache')
if cache_path.exists():
print('Cache folder exists, loading from cache')
model = SentenceTransformer('clip-ViT-B-32', cache_folder=cache_path)
else:
print('Cache folder does not exist, loading from web')
model = SentenceTransformer('clip-ViT-B-32')
def image_to_embedding(img: np.ndarray = None, txt: str = None) -> np.ndarray:
if img is None and not txt:
return []
if img is not None:
embedding = model.encode(sentences=[Image.fromarray(img)], batch_size=128)
else:
embedding = model.encode(sentences=[txt], batch_size=128)
return embedding
demo = gr.Interface(fn=image_to_embedding, inputs=["image", "textbox"], outputs="textbox", cache_examples=True)
demo.launch(server_name="0.0.0.0")
|