Spaces:
Sleeping
Sleeping
File size: 997 Bytes
9439512 cb6c215 9439512 cb6c215 9439512 cb6c215 962831e 9439512 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 | FROM python:3.10-slim
WORKDIR /app
COPY requirements.txt .
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir -r requirements.txt
# Pre-download models during build so runtime stays offline
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface/transformers
ENV HF_HOME=/app/.cache/huggingface
RUN python -c "\
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM; \
AutoTokenizer.from_pretrained('ArchCoder/fine-tuned-bart-large'); \
AutoModelForSeq2SeqLM.from_pretrained('ArchCoder/fine-tuned-bart-large'); \
AutoTokenizer.from_pretrained('Qwen/Qwen2.5-Coder-0.5B-Instruct'); \
AutoModelForCausalLM.from_pretrained('Qwen/Qwen2.5-Coder-0.5B-Instruct'); \
print('Models downloaded successfully')"
COPY . .
RUN mkdir -p /app/data/uploads /app/static/images
# Lock to offline at runtime — all models are already cached
ENV HF_HUB_OFFLINE=1
ENV TRANSFORMERS_OFFLINE=1
ENV HF_HUB_DISABLE_TELEMETRY=1
EXPOSE 7860
CMD ["python", "app.py"] |