Spaces:
Running on Zero
Running on Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -530,6 +530,33 @@ def prepare_lora_cache(
|
|
| 530 |
progress(1.0, desc="Done!")
|
| 531 |
return f"Built and cached LoRA state: {cache_path.name}"
|
| 532 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 533 |
|
| 534 |
# =============================================================================
|
| 535 |
# Helper Functions
|
|
@@ -722,6 +749,8 @@ def generate_video(
|
|
| 722 |
|
| 723 |
log_memory("before pipeline call")
|
| 724 |
|
|
|
|
|
|
|
| 725 |
video, audio = pipeline(
|
| 726 |
prompt=prompt,
|
| 727 |
negative_prompt=negative_prompt,
|
|
|
|
| 530 |
progress(1.0, desc="Done!")
|
| 531 |
return f"Built and cached LoRA state: {cache_path.name}"
|
| 532 |
|
| 533 |
+
# =============================================================================
|
| 534 |
+
# LoRA State Application (called BEFORE pipeline generation)
|
| 535 |
+
# =============================================================================
|
| 536 |
+
|
| 537 |
+
def apply_prepared_lora_state_to_pipeline():
|
| 538 |
+
"""
|
| 539 |
+
Apply the prepared LoRA state from pipeline._cached_state to the preloaded
|
| 540 |
+
transformer. This should be called BEFORE pipeline generation, not during.
|
| 541 |
+
"""
|
| 542 |
+
if pipeline._cached_state is None:
|
| 543 |
+
print("[LoRA] No prepared LoRA state available; skipping.")
|
| 544 |
+
return False
|
| 545 |
+
|
| 546 |
+
try:
|
| 547 |
+
existing_transformer = _transformer # The preloaded transformer from globals
|
| 548 |
+
with torch.no_grad():
|
| 549 |
+
missing, unexpected = existing_transformer.load_state_dict(pipeline._cached_state, strict=False)
|
| 550 |
+
if missing:
|
| 551 |
+
print(f"[LoRA] load_state_dict mismatch: missing={len(missing)} keys")
|
| 552 |
+
if unexpected:
|
| 553 |
+
print(f"[LoRA] load_state_dict mismatch: unexpected={len(unexpected)} keys")
|
| 554 |
+
|
| 555 |
+
print("[LoRA] Prepared LoRA state applied to the pipeline.")
|
| 556 |
+
return True
|
| 557 |
+
except Exception as e:
|
| 558 |
+
print(f"[LoRA] Failed to apply LoRA state: {type(e).__name__}: {e}")
|
| 559 |
+
return False
|
| 560 |
|
| 561 |
# =============================================================================
|
| 562 |
# Helper Functions
|
|
|
|
| 749 |
|
| 750 |
log_memory("before pipeline call")
|
| 751 |
|
| 752 |
+
apply_prepared_lora_state_to_pipeline()
|
| 753 |
+
|
| 754 |
video, audio = pipeline(
|
| 755 |
prompt=prompt,
|
| 756 |
negative_prompt=negative_prompt,
|