Spaces:
Sleeping
Sleeping
| FROM python:3.10-slim | |
| WORKDIR /app | |
| COPY requirements.txt . | |
| RUN pip install --no-cache-dir --upgrade pip && \ | |
| pip install --no-cache-dir -r requirements.txt | |
| # Pre-download models during build so runtime stays offline | |
| ENV TRANSFORMERS_CACHE=/app/.cache/huggingface/transformers | |
| ENV HF_HOME=/app/.cache/huggingface | |
| RUN python -c "\ | |
| from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM; \ | |
| AutoTokenizer.from_pretrained('ArchCoder/fine-tuned-bart-large'); \ | |
| AutoModelForSeq2SeqLM.from_pretrained('ArchCoder/fine-tuned-bart-large'); \ | |
| AutoTokenizer.from_pretrained('Qwen/Qwen2.5-Coder-0.5B-Instruct'); \ | |
| AutoModelForCausalLM.from_pretrained('Qwen/Qwen2.5-Coder-0.5B-Instruct'); \ | |
| print('Models downloaded successfully')" | |
| COPY . . | |
| RUN mkdir -p /app/data/uploads /app/static/images | |
| # Lock to offline at runtime — all models are already cached | |
| ENV HF_HUB_OFFLINE=1 | |
| ENV TRANSFORMERS_OFFLINE=1 | |
| ENV HF_HUB_DISABLE_TELEMETRY=1 | |
| EXPOSE 7860 | |
| CMD ["python", "app.py"] |