Spaces:
Sleeping
Sleeping
change space for hw5 #2
Browse files
app.py
CHANGED
@@ -5,7 +5,7 @@ import numpy as np
|
|
5 |
import torch
|
6 |
|
7 |
# import spaces #[uncomment to use ZeroGPU]
|
8 |
-
from diffusers import
|
9 |
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
11 |
# model_repo_id = "stabilityai/sdxl-turbo" # Replace to the model you would like to use
|
@@ -50,7 +50,7 @@ def infer(
|
|
50 |
|
51 |
generator = torch.Generator().manual_seed(seed)
|
52 |
|
53 |
-
pipe =
|
54 |
model_id,
|
55 |
torch_dtype=torch_dtype,
|
56 |
requires_safety_checker=False,
|
@@ -66,6 +66,7 @@ def infer(
|
|
66 |
width=width,
|
67 |
height=height,
|
68 |
generator=generator,
|
|
|
69 |
).images[0]
|
70 |
|
71 |
return image, seed
|
@@ -198,6 +199,7 @@ with gr.Blocks(css=css) as demo:
|
|
198 |
seed,
|
199 |
guidance_scale,
|
200 |
num_inference_steps,
|
|
|
201 |
],
|
202 |
outputs=[result, seed],
|
203 |
)
|
|
|
5 |
import torch
|
6 |
|
7 |
# import spaces #[uncomment to use ZeroGPU]
|
8 |
+
from diffusers import StableDiffusionPipeline
|
9 |
|
10 |
device = "cuda" if torch.cuda.is_available() else "cpu"
|
11 |
# model_repo_id = "stabilityai/sdxl-turbo" # Replace to the model you would like to use
|
|
|
50 |
|
51 |
generator = torch.Generator().manual_seed(seed)
|
52 |
|
53 |
+
pipe = StableDiffusionPipeline.from_pretrained(
|
54 |
model_id,
|
55 |
torch_dtype=torch_dtype,
|
56 |
requires_safety_checker=False,
|
|
|
66 |
width=width,
|
67 |
height=height,
|
68 |
generator=generator,
|
69 |
+
cross_attention_kwargs={"scale": lora_scale},
|
70 |
).images[0]
|
71 |
|
72 |
return image, seed
|
|
|
199 |
seed,
|
200 |
guidance_scale,
|
201 |
num_inference_steps,
|
202 |
+
lora_scale,
|
203 |
],
|
204 |
outputs=[result, seed],
|
205 |
)
|