Spaces:
Running
on
A100
Running
on
A100
update randomness
Browse files
hyimage/diffusion/pipelines/hunyuanimage_pipeline.py
CHANGED
@@ -672,6 +672,7 @@ class HunyuanImagePipeline:
|
|
672 |
"""
|
673 |
if seed is not None:
|
674 |
generator = torch.Generator(device='cpu').manual_seed(seed)
|
|
|
675 |
else:
|
676 |
generator = None
|
677 |
|
|
|
672 |
"""
|
673 |
if seed is not None:
|
674 |
generator = torch.Generator(device='cpu').manual_seed(seed)
|
675 |
+
torch.manual_seed(seed)
|
676 |
else:
|
677 |
generator = None
|
678 |
|
hyimage/diffusion/pipelines/hunyuanimage_refiner_pipeline.py
CHANGED
@@ -121,6 +121,7 @@ class HunYuanImageRefinerPipeline(HunyuanImagePipeline):
|
|
121 |
|
122 |
if seed is not None:
|
123 |
generator = torch.Generator(device='cpu').manual_seed(seed)
|
|
|
124 |
else:
|
125 |
generator = None
|
126 |
|
|
|
121 |
|
122 |
if seed is not None:
|
123 |
generator = torch.Generator(device='cpu').manual_seed(seed)
|
124 |
+
torch.manual_seed(seed)
|
125 |
else:
|
126 |
generator = None
|
127 |
|
hyimage/models/reprompt/reprompt.py
CHANGED
@@ -89,7 +89,7 @@ class RePrompt:
|
|
89 |
)
|
90 |
if self.model.device != torch.device('meta'):
|
91 |
tokenized_chat = tokenized_chat.to(self.model.device)
|
92 |
-
outputs = self.model.generate(tokenized_chat, max_new_tokens=2048
|
93 |
if self.enable_offloading:
|
94 |
self.offload_hook.offload()
|
95 |
output_res = self.tokenizer.decode(outputs[0])
|
|
|
89 |
)
|
90 |
if self.model.device != torch.device('meta'):
|
91 |
tokenized_chat = tokenized_chat.to(self.model.device)
|
92 |
+
outputs = self.model.generate(tokenized_chat, max_new_tokens=2048)
|
93 |
if self.enable_offloading:
|
94 |
self.offload_hook.offload()
|
95 |
output_res = self.tokenizer.decode(outputs[0])
|