test
Browse files
app.py
CHANGED
@@ -3,14 +3,6 @@ import numpy as np
|
|
3 |
import random
|
4 |
from diffusers import DiffusionPipeline
|
5 |
import torch
|
6 |
-
# Use a pipeline as a high-level helper
|
7 |
-
from transformers import pipeline
|
8 |
-
|
9 |
-
messages = [
|
10 |
-
{"role": "user", "content": "Who are you?"},
|
11 |
-
]
|
12 |
-
pipe = pipeline("text-generation", model="lmms-lab/LLaVA-NeXT-Video-32B-Qwen")
|
13 |
-
pipe(messages)
|
14 |
|
15 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
16 |
|
|
|
3 |
import random
|
4 |
from diffusers import DiffusionPipeline
|
5 |
import torch
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
6 |
|
7 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
8 |
|