Projecto_texto / app.py
Teddy-Project's picture
Update app.py
a3d2054 verified
raw
history blame
603 Bytes
from transformers import pipeline
import gradio as gr
chatbot = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.1", trust_remote_code=True)
def predict(user_input, history=[]):
prompt = user_input
response = chatbot(prompt, max_new_tokens=200, do_sample=True)[0]["generated_text"]
history.append((user_input, response))
return history, history
gr.Interface(
fn=predict,
inputs=["text", "state"],
outputs=["chatbot", "state"],
title="Mi Chatbot Sexy",
description="Habla con una IA encantadora.",
theme="finlaymacklon/boxy_violet"
).launch()