Skip to content

Commit 6b586b7

Browse files
committed
web server: update image_progress callback for diffusers data
1 parent 1f83920 commit 6b586b7

File tree

2 files changed

+5
-2
lines changed

2 files changed

+5
-2
lines changed

backend/invoke_ai_web_server.py

+4-1
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@
1515
from threading import Event
1616

1717
from ldm.invoke.args import Args, APP_ID, APP_VERSION, calculate_init_img_hash
18+
from ldm.invoke.generator.diffusers_pipeline import PipelineIntermediateState
1819
from ldm.invoke.pngwriter import PngWriter, retrieve_metadata
1920
from ldm.invoke.prompt_parser import split_weighted_subprompts
2021

@@ -602,7 +603,9 @@ def generate_images(
602603
self.socketio.emit("progressUpdate", progress.to_formatted_dict())
603604
eventlet.sleep(0)
604605

605-
def image_progress(sample, step):
606+
def image_progress(progress_state: PipelineIntermediateState):
607+
step = progress_state.step
608+
sample = progress_state.latents
606609
if self.canceled.is_set():
607610
raise CanceledException
608611

ldm/invoke/generator/diffusers_pipeline.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -188,7 +188,7 @@ def image_from_embeddings(self, latents: torch.Tensor, num_inference_steps: int,
188188
result = None
189189
for result in self.generate_from_embeddings(
190190
latents, text_embeddings, guidance_scale, run_id, **extra_step_kwargs):
191-
if callback is not None:
191+
if callback is not None and isinstance(result, PipelineIntermediateState):
192192
callback(result)
193193
if result is None:
194194
raise AssertionError("why was that an empty generator?")

0 commit comments

Comments
 (0)