Block a user
vllm (latest)
Published 2025-08-08 05:34:49 +00:00 by eric
Installation
docker pull git.ericxliu.me/eric/vllm:latest
sha256:1af98ce481e3eee77a9bc9e30483427bc169e75246281c94c346b005cece835e
Image Layers
ARG RELEASE |
ARG LAUNCHPAD_BUILD_ARCH |
LABEL org.opencontainers.image.ref.name=ubuntu |
LABEL org.opencontainers.image.version=24.04 |
ADD file:b4619a63cd7829e1338ddaa4995ca17003002dd54b0dfd675a6f54a2b69151a6 in / |
CMD ["/bin/bash"] |
ENV NVARCH=x86_64 |
ENV NVIDIA_REQUIRE_CUDA=cuda>=12.9 brand=unknown,driver>=535,driver<536 brand=grid,driver>=535,driver<536 brand=tesla,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=vapps,driver>=535,driver<536 brand=vpc,driver>=535,driver<536 brand=vcs,driver>=535,driver<536 brand=vws,driver>=535,driver<536 brand=cloudgaming,driver>=535,driver<536 brand=unknown,driver>=550,driver<551 brand=grid,driver>=550,driver<551 brand=tesla,driver>=550,driver<551 brand=nvidia,driver>=550,driver<551 brand=quadro,driver>=550,driver<551 brand=quadrortx,driver>=550,driver<551 brand=nvidiartx,driver>=550,driver<551 brand=vapps,driver>=550,driver<551 brand=vpc,driver>=550,driver<551 brand=vcs,driver>=550,driver<551 brand=vws,driver>=550,driver<551 brand=cloudgaming,driver>=550,driver<551 brand=unknown,driver>=560,driver<561 brand=grid,driver>=560,driver<561 brand=tesla,driver>=560,driver<561 brand=nvidia,driver>=560,driver<561 brand=quadro,driver>=560,driver<561 brand=quadrortx,driver>=560,driver<561 brand=nvidiartx,driver>=560,driver<561 brand=vapps,driver>=560,driver<561 brand=vpc,driver>=560,driver<561 brand=vcs,driver>=560,driver<561 brand=vws,driver>=560,driver<561 brand=cloudgaming,driver>=560,driver<561 brand=unknown,driver>=565,driver<566 brand=grid,driver>=565,driver<566 brand=tesla,driver>=565,driver<566 brand=nvidia,driver>=565,driver<566 brand=quadro,driver>=565,driver<566 brand=quadrortx,driver>=565,driver<566 brand=nvidiartx,driver>=565,driver<566 brand=vapps,driver>=565,driver<566 brand=vpc,driver>=565,driver<566 brand=vcs,driver>=565,driver<566 brand=vws,driver>=565,driver<566 brand=cloudgaming,driver>=565,driver<566 brand=unknown,driver>=570,driver<571 brand=grid,driver>=570,driver<571 brand=tesla,driver>=570,driver<571 brand=nvidia,driver>=570,driver<571 brand=quadro,driver>=570,driver<571 brand=quadrortx,driver>=570,driver<571 brand=nvidiartx,driver>=570,driver<571 brand=vapps,driver>=570,driver<571 brand=vpc,driver>=570,driver<571 brand=vcs,driver>=570,driver<571 brand=vws,driver>=570,driver<571 brand=cloudgaming,driver>=570,driver<571 |
ENV NV_CUDA_CUDART_VERSION=12.9.79-1 |
ARG TARGETARCH |
LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends gnupg2 curl ca-certificates && curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/${NVARCH}/3bf863cc.pub | apt-key add - && echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/${NVARCH} /" > /etc/apt/sources.list.d/cuda.list && apt-get purge --autoremove -y curl && rm -rf /var/lib/apt/lists/* # buildkit |
ENV CUDA_VERSION=12.9.1 |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-12-9=${NV_CUDA_CUDART_VERSION} cuda-compat-12-9 && rm -rf /var/lib/apt/lists/* # buildkit |
RUN |1 TARGETARCH=amd64 /bin/sh -c echo "/usr/local/cuda/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit |
ENV PATH=/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin |
ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64 |
COPY NGC-DL-CONTAINER-LICENSE / # buildkit |
ENV NVIDIA_VISIBLE_DEVICES=all |
ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility |
ENV NV_CUDA_LIB_VERSION=12.9.1-1 |
ENV NV_NVTX_VERSION=12.9.79-1 |
ENV NV_LIBNPP_VERSION=12.4.1.87-1 |
ENV NV_LIBNPP_PACKAGE=libnpp-12-9=12.4.1.87-1 |
ENV NV_LIBCUSPARSE_VERSION=12.5.10.65-1 |
ENV NV_LIBCUBLAS_PACKAGE_NAME=libcublas-12-9 |
ENV NV_LIBCUBLAS_VERSION=12.9.1.4-1 |
ENV NV_LIBCUBLAS_PACKAGE=libcublas-12-9=12.9.1.4-1 |
ENV NV_LIBNCCL_PACKAGE_NAME=libnccl2 |
ENV NV_LIBNCCL_PACKAGE_VERSION=2.27.3-1 |
ENV NCCL_VERSION=2.27.3-1 |
ENV NV_LIBNCCL_PACKAGE=libnccl2=2.27.3-1+cuda12.9 |
ARG TARGETARCH |
LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-libraries-12-9=${NV_CUDA_LIB_VERSION} ${NV_LIBNPP_PACKAGE} cuda-nvtx-12-9=${NV_NVTX_VERSION} libcusparse-12-9=${NV_LIBCUSPARSE_VERSION} ${NV_LIBCUBLAS_PACKAGE} ${NV_LIBNCCL_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} # buildkit |
COPY entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit |
COPY nvidia_entrypoint.sh /opt/nvidia/ # buildkit |
ENV NVIDIA_PRODUCT_NAME=CUDA |
ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"] |
ENV NV_CUDA_LIB_VERSION=12.9.1-1 |
ENV NV_CUDA_CUDART_DEV_VERSION=12.9.79-1 |
ENV NV_NVML_DEV_VERSION=12.9.79-1 |
ENV NV_LIBCUSPARSE_DEV_VERSION=12.5.10.65-1 |
ENV NV_LIBNPP_DEV_VERSION=12.4.1.87-1 |
ENV NV_LIBNPP_DEV_PACKAGE=libnpp-dev-12-9=12.4.1.87-1 |
ENV NV_LIBCUBLAS_DEV_VERSION=12.9.1.4-1 |
ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME=libcublas-dev-12-9 |
ENV NV_LIBCUBLAS_DEV_PACKAGE=libcublas-dev-12-9=12.9.1.4-1 |
ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.9.1-1 |
ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-9=12.9.1-1 |
ENV NV_NVPROF_VERSION=12.9.79-1 |
ENV NV_NVPROF_DEV_PACKAGE=cuda-nvprof-12-9=12.9.79-1 |
ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-dev |
ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.27.3-1 |
ENV NCCL_VERSION=2.27.3-1 |
ENV NV_LIBNCCL_DEV_PACKAGE=libnccl-dev=2.27.3-1+cuda12.9 |
ARG TARGETARCH |
LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-dev-12-9=${NV_CUDA_CUDART_DEV_VERSION} cuda-command-line-tools-12-9=${NV_CUDA_LIB_VERSION} cuda-minimal-build-12-9=${NV_CUDA_LIB_VERSION} cuda-libraries-dev-12-9=${NV_CUDA_LIB_VERSION} cuda-nvml-dev-12-9=${NV_NVML_DEV_VERSION} ${NV_NVPROF_DEV_PACKAGE} ${NV_LIBNPP_DEV_PACKAGE} libcusparse-dev-12-9=${NV_LIBCUSPARSE_DEV_VERSION} ${NV_LIBCUBLAS_DEV_PACKAGE} ${NV_LIBNCCL_DEV_PACKAGE} ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_DEV_PACKAGE_NAME} ${NV_LIBNCCL_DEV_PACKAGE_NAME} # buildkit |
ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs |
ENV DEBIAN_FRONTEND=noninteractive |
RUN /bin/sh -c apt-get update && apt-get install -y --no-install-recommends python3 python3-pip python3-venv python3-dev build-essential curl git ca-certificates && rm -rf /var/lib/apt/lists/* # buildkit |
RUN /bin/sh -c python3 -m pip install --no-cache-dir --break-system-packages uv # buildkit |
ARG USER=app |
ARG UID=1001 |
RUN |2 USER=app UID=1001 /bin/sh -c useradd -m -u ${UID} ${USER} # buildkit |
USER app |
WORKDIR /app |
RUN |2 USER=app UID=1001 /bin/sh -c uv venv --python 3.12 --seed && . .venv/bin/activate && uv pip install vllm==0.10.0 --torch-backend=auto # buildkit |
EXPOSE map[8000/tcp:{}] |
ENV PATH=/app/.venv/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin |
ENTRYPOINT ["python" "-m" "vllm.entrypoints.openai.api_server"] |
Labels
Key | Value |
---|---|
maintainer | NVIDIA CORPORATION <cudatools@nvidia.com> |
org.opencontainers.image.ref.name | ubuntu |
org.opencontainers.image.version | 24.04 |
Details
2025-08-08 05:34:49 +00:00
Versions (5)
View all
Container
0
OCI / Docker
linux/amd64
9.9 GiB
latest
2025-08-08
2025-08-08-fc8b89
2025-08-08
2025-06-08-3123ba
2025-06-08
2025-06-08-0e7123
2025-06-08
nightly
2025-06-08