| | import os
|
| | import json
|
| | from PIL import Image
|
| | import gradio as gr
|
| |
|
| | def load_examples(examples_base_path=os.path.join("apps", "gradio_app",
|
| | "assets", "examples", "Stable-Diffusion-2.1-Openpose-ControlNet")):
|
| |
|
| | """Load example configurations and input images from the Stable-Diffusion-2.1-Openpose-ControlNet directory."""
|
| | examples = []
|
| |
|
| |
|
| | for folder in os.listdir(examples_base_path):
|
| | folder_path = os.path.join(examples_base_path, folder)
|
| | config_path = os.path.join(folder_path, "config.json")
|
| |
|
| | if os.path.exists(config_path):
|
| | try:
|
| | with open(config_path, 'r') as f:
|
| | config = json.load(f)
|
| |
|
| |
|
| | input_filename = config["input_image"]
|
| | output_filename = config["output_image"]
|
| | prompt = config.get("prompt", "a man is doing yoga")
|
| | negative_prompt = config.get("negative_prompt", "monochrome, lowres, bad anatomy, worst quality, low quality")
|
| | num_steps = config.get("num_steps", 30)
|
| | seed = config.get("seed", 42)
|
| | width = config.get("width", 512)
|
| | height = config.get("height", 512)
|
| | guidance_scale = config.get("guidance_scale", 7.5)
|
| | controlnet_conditioning_scale = config.get("controlnet_conditioning_scale", 1.0)
|
| |
|
| |
|
| | input_image_path = os.path.join(folder_path, input_filename)
|
| | output_image_path = os.path.join(folder_path, output_filename)
|
| |
|
| | if os.path.exists(input_image_path):
|
| | input_image_data = Image.open(input_image_path)
|
| | output_image_data = Image.open(output_image_path)
|
| |
|
| | examples.append([
|
| | input_image_data,
|
| | prompt,
|
| | negative_prompt,
|
| | output_image_data,
|
| | num_steps,
|
| | seed,
|
| | width,
|
| | height,
|
| | guidance_scale,
|
| | controlnet_conditioning_scale,
|
| | False
|
| | ])
|
| | else:
|
| | print(f"Input image not found at {input_image_path}")
|
| |
|
| | except json.JSONDecodeError as e:
|
| | print(f"Error decoding JSON from {config_path}: {str(e)}")
|
| | except Exception as e:
|
| | print(f"Error processing example in {folder_path}: {str(e)}")
|
| |
|
| | return examples
|
| |
|
| | def select_example(evt: gr.SelectData, examples_data):
|
| | """Handle selection of an example to populate Gradio inputs."""
|
| | example_index = evt.index
|
| |
|
| |
|
| | (
|
| | input_image_data,
|
| | prompt,
|
| | negative_prompt,
|
| | output_image_data,
|
| | num_steps,
|
| | seed,
|
| | width,
|
| | height,
|
| | guidance_scale,
|
| | controlnet_conditioning_scale,
|
| | use_random_seed,
|
| | ) = examples_data[example_index]
|
| |
|
| |
|
| |
|
| | return (
|
| | input_image_data,
|
| | prompt,
|
| | negative_prompt,
|
| | output_image_data,
|
| | num_steps,
|
| | seed,
|
| | width,
|
| | height,
|
| | guidance_scale,
|
| | controlnet_conditioning_scale,
|
| | use_random_seed,
|
| | f"Loaded example {example_index + 1} with prompt: {prompt}"
|
| | ) |