prompt-enhancer / app.py
ayan4m1
feat: add pytorch to deps and run pipeline directly via transformers
1e6822b
raw
history blame
1.18 kB
import gradio as gr
from transformers import pipeline
pipe = pipeline("text-generation", model="pszemraj/distilgpt2-magicprompt-SD")
def respond(
message,
_: list[tuple[str, str]],
max_new_tokens: int,
temperature: float,
top_p: float,
top_k: int
):
yield pipe(
message,
max_new_tokens=max_new_tokens,
do_sample=True,
temperature=temperature,
top_p=top_p,
top_k=top_k
)
"""
For information on how to customize the ChatInterface, peruse the gradio docs: https://www.gradio.app/docs/chatinterface
"""
demo = gr.ChatInterface(
respond,
additional_inputs=[
gr.Slider(minimum=8, maximum=128, value=64, step=8, label="Max new tokens"),
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.95,
step=0.05,
label="Top-p",
),
gr.Slider(
minimum=10,
maximum=100,
value=30,
step=5,
label="Top-k",
),
],
)
if __name__ == "__main__":
demo.launch()