| | FROM nvidia/cuda:12.1.1-cudnn8-devel-ubuntu22.04 |
| | LABEL maintainer="Hugging Face" |
| |
|
| | ARG DEBIAN_FRONTEND=noninteractive |
| |
|
| | |
| | SHELL ["sh", "-lc"] |
| |
|
| | |
| | |
| |
|
| | ARG PYTORCH='2.6.0' |
| | |
| | ARG CUDA='cu121' |
| | |
| | ENV DISABLE_KERNEL_MAPPING=1 |
| |
|
| | RUN apt update |
| | RUN apt install -y git libsndfile1-dev tesseract-ocr espeak-ng python3 python3-pip ffmpeg |
| | RUN python3 -m pip install --no-cache-dir --upgrade pip |
| |
|
| | ARG REF=main |
| | RUN git clone https://github.com/huggingface/transformers && cd transformers && git checkout $REF |
| |
|
| | RUN [ ${#PYTORCH} -gt 0 ] && VERSION='torch=='$PYTORCH'.*' || VERSION='torch'; echo "export VERSION='$VERSION'" >> ~/.profile |
| | RUN echo torch=$VERSION |
| | |
| | |
| | RUN python3 -m pip install --no-cache-dir -U $VERSION torchvision torchaudio --extra-index-url https://download.pytorch.org/whl/$CUDA |
| |
|
| | RUN python3 -m pip install --no-cache-dir git+https://github.com/huggingface/accelerate@main |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir einops |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir bitsandbytes |
| |
|
| | |
| | RUN python3 -m pip install lm_eval |
| | RUN git clone https://github.com/ModelCloud/GPTQModel.git && cd GPTQModel && pip install -v . --no-build-isolation |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir git+https://github.com/huggingface/optimum@main |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir git+https://github.com/huggingface/peft@main |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir aqlm[gpu]==1.0.2 |
| |
|
| | |
| | RUN pip install vptq |
| |
|
| | |
| | |
| | |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir hqq |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir gguf |
| |
|
| | |
| | |
| | RUN python3 -m pip install --no-cache-dir autoawq[kernels] |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir optimum-quanto |
| |
|
| | |
| | RUN git clone https://github.com/NetEase-FuXi/EETQ.git && cd EETQ/ && git submodule update --init --recursive && pip install . |
| |
|
| | |
| | |
| | |
| | |
| | |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir compressed-tensors |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir amd-quark |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir "auto-round>=0.5.0" |
| |
|
| | |
| | RUN python3 -m pip install --no-cache-dir -e ./transformers[dev-torch] |
| |
|
| | |
| | |
| | RUN cd transformers && python3 setup.py develop |
| |
|