dagloop5 commited on
Commit
45e9a0f
·
verified ·
1 Parent(s): d2437a0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -411,6 +411,8 @@ def prepare_distilled_default_state():
411
  gc.collect()
412
  print("Distilled default LoRA state prepared.")
413
 
 
 
414
  def _make_lora_key(pose_strength: float, general_strength: float, motion_strength: float, dreamlay_strength: float, mself_strength: float, dramatic_strength: float, fluid_strength: float, liquid_strength: float, demopose_strength: float) -> tuple[str, str]:
415
  rp = round(float(pose_strength), 2)
416
  rg = round(float(general_strength), 2)
@@ -584,13 +586,13 @@ _orig_gemma_embeddings_factory = ledger.gemma_embeddings_processor
584
 
585
  # Call the original factories once to create the cached instances we will serve by default.
586
  _transformer = _orig_transformer_factory()
587
- _transformer = _orig_transformer_factory()
588
  if DISTILLED_DEFAULT_STATE is not None:
589
  with torch.no_grad():
590
  missing, unexpected = _transformer.load_state_dict(DISTILLED_DEFAULT_STATE, strict=False)
591
  if missing or unexpected:
592
  print(f"[Distilled default] load_state_dict mismatch: missing={len(missing)}, unexpected={len(unexpected)}")
593
  print("[Distilled default] applied to transformer.")
 
594
  _video_encoder = _orig_video_encoder_factory()
595
  _video_decoder = _orig_video_decoder_factory()
596
  _audio_encoder = _orig_audio_encoder_factory()
 
411
  gc.collect()
412
  print("Distilled default LoRA state prepared.")
413
 
414
+ prepare_distilled_default_state()
415
+
416
  def _make_lora_key(pose_strength: float, general_strength: float, motion_strength: float, dreamlay_strength: float, mself_strength: float, dramatic_strength: float, fluid_strength: float, liquid_strength: float, demopose_strength: float) -> tuple[str, str]:
417
  rp = round(float(pose_strength), 2)
418
  rg = round(float(general_strength), 2)
 
586
 
587
  # Call the original factories once to create the cached instances we will serve by default.
588
  _transformer = _orig_transformer_factory()
 
589
  if DISTILLED_DEFAULT_STATE is not None:
590
  with torch.no_grad():
591
  missing, unexpected = _transformer.load_state_dict(DISTILLED_DEFAULT_STATE, strict=False)
592
  if missing or unexpected:
593
  print(f"[Distilled default] load_state_dict mismatch: missing={len(missing)}, unexpected={len(unexpected)}")
594
  print("[Distilled default] applied to transformer.")
595
+
596
  _video_encoder = _orig_video_encoder_factory()
597
  _video_decoder = _orig_video_decoder_factory()
598
  _audio_encoder = _orig_audio_encoder_factory()