ayan4m1 commited on
Commit
1e6822b
·
1 Parent(s): 4bba648

feat: add pytorch to deps and run pipeline directly via transformers

Browse files
Files changed (2) hide show
  1. app.py +4 -14
  2. requirements.txt +4 -1
app.py CHANGED
@@ -1,10 +1,7 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
3
 
4
- """
5
- For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
- """
7
- client = InferenceClient(model="pszemraj/distilgpt2-magicprompt-SD")
8
 
9
 
10
  def respond(
@@ -15,21 +12,14 @@ def respond(
15
  top_p: float,
16
  top_k: int
17
  ):
18
- response = ""
19
-
20
- for message in client.text_generation(
21
  message,
22
  max_new_tokens=max_new_tokens,
23
- stream=True,
24
  do_sample=True,
25
  temperature=temperature,
26
  top_p=top_p,
27
  top_k=top_k
28
- ):
29
- token = message.choices[0].delta.content
30
-
31
- response += token
32
- yield response
33
 
34
 
35
  """
 
1
  import gradio as gr
2
+ from transformers import pipeline
3
 
4
+ pipe = pipeline("text-generation", model="pszemraj/distilgpt2-magicprompt-SD")
 
 
 
5
 
6
 
7
  def respond(
 
12
  top_p: float,
13
  top_k: int
14
  ):
15
+ yield pipe(
 
 
16
  message,
17
  max_new_tokens=max_new_tokens,
 
18
  do_sample=True,
19
  temperature=temperature,
20
  top_p=top_p,
21
  top_k=top_k
22
+ )
 
 
 
 
23
 
24
 
25
  """
requirements.txt CHANGED
@@ -1 +1,4 @@
1
- huggingface_hub==0.25.2
 
 
 
 
1
+ huggingface_hub==0.25.2
2
+ pytorch==2.7.0
3
+ torchvision
4
+ transformers