File size: 962 Bytes
d37b44c c10891c efeeb39 d37b44c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 |
from diffusers import DiffusionPipeline
import os
class FluxPipeline(DiffusionPipeline):
def __init__(
self,
vae,
text_encoder,
text_encoder_2,
tokenizer,
tokenizer_2,
transformer,
scheduler,
**kwargs
):
super().__init__()
self.vae = vae
self.text_encoder = text_encoder
self.text_encoder_2 = text_encoder_2
self.tokenizer = tokenizer
self.tokenizer_2 = tokenizer_2
self.transformer = transformer
self.scheduler = scheduler
# сюда можно добавить доп. обработку kwargs
for k, v in kwargs.items():
setattr(self, k, v)
def load_attn_procs(self, path: str):
if not os.path.exists(path):
raise FileNotFoundError(f"LoRA file not found: {path}")
print(f"[FluxPipeline] Loading LoRA from {path}")
self.load_lora_weights(path)
|