z-coder commited on
Commit
980de81
Β·
verified Β·
1 Parent(s): e939a56

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -0
app.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoProcessor, AutoModelForCausalLM
3
+ from PIL import Image
4
+ import torch
5
+
6
+ model = AutoModelForCausalLM.from_pretrained("llava-hf/llava-1.5-7b-hf", torch_dtype=torch.float16).to("cuda")
7
+ processor = AutoProcessor.from_pretrained("llava-hf/llava-1.5-7b-hf")
8
+
9
+ def chat(image, prompt):
10
+ inputs = processor(prompt, images=image, return_tensors="pt").to("cuda")
11
+ output = model.generate(**inputs, max_new_tokens=50)
12
+ return processor.tokenizer.decode(output[0], skip_special_tokens=True)
13
+
14
+ gr.Interface(fn=chat, inputs=["image", "text"], outputs="text").launch()