File size: 10,535 Bytes
c5292d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58bc784
c5292d8
 
58bc784
 
 
 
 
 
 
 
c5292d8
248099e
 
 
c5292d8
 
 
248099e
 
 
c5292d8
58bc784
c5292d8
 
248099e
 
 
c5292d8
248099e
c5292d8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248099e
 
 
 
 
 
 
 
 
 
c5292d8
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
"""LLM-based code generator for creating web applications."""
import json
from pathlib import Path
from typing import Any

import anthropic
import openai

from shared.config import settings
from shared.logger import setup_logger
from shared.models import Attachment, TaskRequest
from shared.utils import decode_data_uri

logger = setup_logger(__name__)


class CodeGenerator:
    """Generate code using LLM based on task requirements."""

    def __init__(self) -> None:
        """Initialize code generator with LLM client."""
        self.provider = settings.llm_provider
        self.model = settings.llm_model

        if self.provider == "anthropic":
            self.client = anthropic.Anthropic(api_key=settings.anthropic_api_key)
        elif self.provider == "openai":
            self.client = openai.OpenAI(api_key=settings.openai_api_key)
        elif self.provider == "aipipe":
            # Use OpenAI client with AIPipe endpoints
            self.client = openai.OpenAI(
                api_key=settings.aipipe_token,
                base_url=settings.aipipe_base_url
            )
        else:
            raise ValueError(f"Unsupported LLM provider: {self.provider}")

        logger.info(f"Initialized CodeGenerator with {self.provider}/{self.model}")

    def generate_app(self, task: TaskRequest, output_dir: Path) -> dict[str, str]:
        """Generate application code based on task requirements.

        Args:
            task: Task request containing brief and requirements
            output_dir: Directory to save generated files

        Returns:
            Dictionary mapping filenames to their content
        """
        logger.info(f"Generating app for task {task.task}")

        # Prepare context with attachments
        attachment_info = self._prepare_attachments(task.attachments, output_dir)

        # Build prompt
        prompt = self._build_generation_prompt(task, attachment_info)

        # Generate code
        generated_files = self._generate_with_llm(prompt)

        # Save files
        self._save_files(generated_files, output_dir)

        logger.info(f"Generated {len(generated_files)} files for task {task.task}")
        return generated_files

    def _prepare_attachments(
        self, attachments: list[Attachment], output_dir: Path
    ) -> list[dict[str, str]]:
        """Decode and save attachments, return metadata.

        Args:
            attachments: List of attachments with data URIs
            output_dir: Directory to save attachments

        Returns:
            List of attachment metadata
        """
        attachment_info = []

        for att in attachments:
            try:
                mime_type, content = decode_data_uri(att.url)
                file_path = output_dir / att.name
                file_path.write_bytes(content)

                attachment_info.append(
                    {
                        "name": att.name,
                        "mime_type": mime_type,
                        "size": len(content),
                        "preview": content[:200].decode("utf-8", errors="ignore")
                        if mime_type.startswith("text/")
                        else "[binary data]",
                    }
                )
                logger.debug(f"Saved attachment {att.name} ({mime_type}, {len(content)} bytes)")
            except Exception as e:
                logger.error(f"Failed to process attachment {att.name}: {e}")
                attachment_info.append({"name": att.name, "error": str(e)})

        return attachment_info

    def _build_generation_prompt(
        self, task: TaskRequest, attachment_info: list[dict[str, str]]
    ) -> str:
        """Build prompt for LLM code generation.

        Args:
            task: Task request
            attachment_info: Attachment metadata

        Returns:
            Formatted prompt
        """
        attachments_section = ""
        if attachment_info:
            attachments_section = "\n\n**Attachments:**\n" + "\n".join(
                f"- {att['name']}: {att.get('mime_type', 'unknown')}"
                for att in attachment_info
            )

        checks_section = "\n\n**Requirements (will be tested):**\n" + "\n".join(
            f"- {check}" for check in task.checks
        )

        prompt = f"""You are an expert web developer. Create a complete, production-ready single-page web application based on the following requirements.

**Task:** {task.task}

**Brief:** {task.brief}{attachments_section}{checks_section}

**Instructions:**
1. Create a minimal, functional web application that meets ALL requirements
2. Use only vanilla HTML, CSS, and JavaScript (no build tools required)
3. Include all necessary CDN links for external libraries (Bootstrap, marked, highlight.js, etc.)
4. Ensure the app is self-contained in a single index.html file or minimal files
5. Follow best practices for code quality, accessibility, and user experience
6. Include helpful comments explaining key functionality
7. Make the UI clean and professional using Bootstrap 5 or similar

**Output Format:**
Provide the complete code for each file in JSON format:
```json
{{
  "index.html": "<!DOCTYPE html>...",
  "style.css": "/* optional styles */",
  "script.js": "// optional separate JS",
  "README.md": "# Project Title\\n\\n..."
}}
```

Generate ONLY the JSON output, no other text. Ensure all code is complete and functional.
"""
        return prompt

    def _generate_with_llm(self, prompt: str) -> dict[str, str]:
        """Call LLM API to generate code.

        Args:
            prompt: Generation prompt

        Returns:
            Dictionary of filename -> content
        """
        logger.info(f"Calling {self.provider} API for code generation")

        try:
            if self.provider == "anthropic":
                response = self.client.messages.create(
                    model=self.model,
                    max_tokens=4096,
                    temperature=0.3,
                    messages=[{"role": "user", "content": prompt}],
                )
                content = response.content[0].text

            elif self.provider in ["openai", "aipipe"]:
                # Both OpenAI and AIPipe use the same API format
                response = self.client.chat.completions.create(
                    model=self.model,
                    messages=[{"role": "user", "content": prompt}],
                    temperature=0.3,
                    max_tokens=4096,
                )
                content = response.choices[0].message.content

            else:
                raise ValueError(f"Unsupported provider: {self.provider}")

            # Extract JSON from response
            files = self._extract_json(content)
            return files

        except Exception as e:
            logger.error(f"LLM generation failed: {e}")
            # Fallback to minimal template
            return self._get_fallback_template()

    def _extract_json(self, content: str) -> dict[str, str]:
        """Extract JSON from LLM response.

        Args:
            content: LLM response text

        Returns:
            Parsed JSON dictionary
        """
        # Try to find JSON in markdown code block (greedy match for nested braces)
        import re

        json_match = re.search(r"```json\s*(\{.*\})\s*```", content, re.DOTALL)
        if json_match:
            result = json.loads(json_match.group(1))
            # Validate that all values are strings
            return {k: v for k, v in result.items() if v is not None}

        # Try to find JSON in plain code block
        json_match = re.search(r"```\s*(\{.*\})\s*```", content, re.DOTALL)
        if json_match:
            result = json.loads(json_match.group(1))
            # Validate that all values are strings
            return {k: v for k, v in result.items() if v is not None}

        # Try to parse the whole content as JSON
        try:
            result = json.loads(content)
            # Validate that all values are strings
            return {k: v for k, v in result.items() if v is not None}
        except json.JSONDecodeError:
            # Try to find any JSON object (greedy match)
            json_match = re.search(r"\{.*\}", content, re.DOTALL)
            if json_match:
                result = json.loads(json_match.group(0))
                # Validate that all values are strings
                return {k: v for k, v in result.items() if v is not None}

        logger.error(f"Could not extract JSON from LLM response: {content[:200]}")
        raise ValueError("Could not extract JSON from LLM response")

    def _get_fallback_template(self) -> dict[str, str]:
        """Get fallback template when LLM generation fails.

        Returns:
            Basic HTML template
        """
        return {
            "index.html": """<!DOCTYPE html>
<html lang="en">
<head>
    <meta charset="UTF-8">
    <meta name="viewport" content="width=device-width, initial-scale=1.0">
    <title>Generated App</title>
    <link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/css/bootstrap.min.css" rel="stylesheet">
</head>
<body>
    <div class="container mt-5">
        <h1>Application</h1>
        <p>This is a minimal fallback template.</p>
    </div>
    <script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0/dist/js/bootstrap.bundle.min.js"></script>
</body>
</html>""",
            "README.md": """# Generated Application

This is an automatically generated web application.

## Setup

Simply open `index.html` in a web browser.

## License

MIT License
""",
        }

    def _save_files(self, files: dict[str, str], output_dir: Path) -> None:
        """Save generated files to output directory.

        Args:
            files: Dictionary of filename -> content
            output_dir: Directory to save files
        """
        output_dir.mkdir(parents=True, exist_ok=True)

        for filename, content in files.items():
            # Skip None or empty content
            if content is None:
                logger.warning(f"Skipping {filename} - content is None")
                continue
            
            # Ensure content is a string
            if not isinstance(content, str):
                logger.warning(f"Converting {filename} content to string")
                content = str(content)
            
            file_path = output_dir / filename
            file_path.write_text(content, encoding="utf-8")
            logger.debug(f"Saved {filename} ({len(content)} bytes)")