Text-to-Image
Diffusers
Safetensors

fix: typo in lmm leads infer cannot work

#1

This fix aims to fix

---------------------------------------------------------------------------
AttributeError                            Traceback (most recent call last)
Cell In[5], line 1
----> 1 result = pipe(
      2     prompt="a racoon holding a shiny red apple over its head",
      3     height=512, width=512,
      4     num_inference_steps=50,
      5     guidance_scale=4.0,
      6     seed=42,
      7 )

File ~/Models/Playground/venv/lib/python3.12/site-packages/torch/utils/_contextlib.py:124, in context_decorator.<locals>.decorate_context(*args, **kwargs)
    120 @functools.wraps(func)
    121 def decorate_context(*args, **kwargs):
    122     # pyrefly: ignore [bad-context-manager]
    123     with ctx_factory():
--> 124         return func(*args, **kwargs)

File ~/.cache/huggingface/modules/diffusers_modules/local/deepgen_pipeline.py:1336, in DeepGenPipeline.__call__(self, prompt, image, negative_prompt, height, width, num_inference_steps, guidance_scale, seed, num_images_per_prompt)
   1332     text_inputs = self.prepare_text2image_prompts(prompt + cfg_prompt)
   1334 hidden_states = self.connector_module.meta_queries[None].expand(
   1335     2 * b, self.num_queries, -1)
-> 1336 inputs = self.prepare_forward_input(query_embeds=hidden_states, **text_inputs)
   1337 output = self.llm(**inputs, return_dict=True, output_hidden_states=True)
   1339 # SCB: extract multi-layer hidden states

File ~/.cache/huggingface/modules/diffusers_modules/local/deepgen_pipeline.py:1188, in DeepGenPipeline.prepare_forward_input(self, query_embeds, input_ids, image_embeds, image_grid_thw, attention_mask, past_key_values)
   1186 input_ids = input_ids[:, :-l]
   1187 if image_embeds is None:
-> 1188     inputs_embeds = self.llm.get_input_embeddings()(input_ids)
   1189 else:
   1190     inputs_embeds = torch.zeros(
   1191         *input_ids.shape, self.llm.config.hidden_size,
   1192         device=self._gpu_device, dtype=self.transformer.dtype)

File ~/Models/Playground/venv/lib/python3.12/site-packages/diffusers/configuration_utils.py:144, in ConfigMixin.__getattr__(self, name)
    141     deprecate("direct config name access", "1.0.0", deprecation_message, standard_warn=False)
    142     return self._internal_dict[name]
--> 144 raise AttributeError(f"'{type(self).__name__}' object has no attribute '{name}'")

AttributeError: 'DeepGenPipeline' object has no attribute 'llm'
KevinZonda changed pull request status to closed
deepgen org

Could you try creating a new env, and align the versions as:

PyTorch 2.8.0
diffusers 0.35.2
transformers 4.56.1

Look forward to your reply

Sign up or log in to comment