Block a user
coder-image-cuda (latest)
Published 2025-09-19 06:42:36 +00:00 by eric
Installation
docker pull git.ericxliu.me/eric/coder-image-cuda:latestsha256:2d20eed6b7c46a2429fa101485c578934e7402574b36c9fa0f28bc1cfd738f1fImage Layers
| ARG RELEASE |
| ARG LAUNCHPAD_BUILD_ARCH |
| LABEL org.opencontainers.image.ref.name=ubuntu |
| LABEL org.opencontainers.image.version=24.04 |
| ADD file:b4619a63cd7829e1338ddaa4995ca17003002dd54b0dfd675a6f54a2b69151a6 in / |
| CMD ["/bin/bash"] |
| ENV NVARCH=x86_64 |
| ENV NVIDIA_REQUIRE_CUDA=cuda>=12.9 brand=unknown,driver>=535,driver<536 brand=grid,driver>=535,driver<536 brand=tesla,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=vapps,driver>=535,driver<536 brand=vpc,driver>=535,driver<536 brand=vcs,driver>=535,driver<536 brand=vws,driver>=535,driver<536 brand=cloudgaming,driver>=535,driver<536 brand=unknown,driver>=550,driver<551 brand=grid,driver>=550,driver<551 brand=tesla,driver>=550,driver<551 brand=nvidia,driver>=550,driver<551 brand=quadro,driver>=550,driver<551 brand=quadrortx,driver>=550,driver<551 brand=nvidiartx,driver>=550,driver<551 brand=vapps,driver>=550,driver<551 brand=vpc,driver>=550,driver<551 brand=vcs,driver>=550,driver<551 brand=vws,driver>=550,driver<551 brand=cloudgaming,driver>=550,driver<551 brand=unknown,driver>=560,driver<561 brand=grid,driver>=560,driver<561 brand=tesla,driver>=560,driver<561 brand=nvidia,driver>=560,driver<561 brand=quadro,driver>=560,driver<561 brand=quadrortx,driver>=560,driver<561 brand=nvidiartx,driver>=560,driver<561 brand=vapps,driver>=560,driver<561 brand=vpc,driver>=560,driver<561 brand=vcs,driver>=560,driver<561 brand=vws,driver>=560,driver<561 brand=cloudgaming,driver>=560,driver<561 brand=unknown,driver>=565,driver<566 brand=grid,driver>=565,driver<566 brand=tesla,driver>=565,driver<566 brand=nvidia,driver>=565,driver<566 brand=quadro,driver>=565,driver<566 brand=quadrortx,driver>=565,driver<566 brand=nvidiartx,driver>=565,driver<566 brand=vapps,driver>=565,driver<566 brand=vpc,driver>=565,driver<566 brand=vcs,driver>=565,driver<566 brand=vws,driver>=565,driver<566 brand=cloudgaming,driver>=565,driver<566 brand=unknown,driver>=570,driver<571 brand=grid,driver>=570,driver<571 brand=tesla,driver>=570,driver<571 brand=nvidia,driver>=570,driver<571 brand=quadro,driver>=570,driver<571 brand=quadrortx,driver>=570,driver<571 brand=nvidiartx,driver>=570,driver<571 brand=vapps,driver>=570,driver<571 brand=vpc,driver>=570,driver<571 brand=vcs,driver>=570,driver<571 brand=vws,driver>=570,driver<571 brand=cloudgaming,driver>=570,driver<571 |
| ENV NV_CUDA_CUDART_VERSION=12.9.79-1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends gnupg2 curl ca-certificates && curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/${NVARCH}/3bf863cc.pub | apt-key add - && echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/${NVARCH} /" > /etc/apt/sources.list.d/cuda.list && apt-get purge --autoremove -y curl && rm -rf /var/lib/apt/lists/* # buildkit |
| ENV CUDA_VERSION=12.9.1 |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-12-9=${NV_CUDA_CUDART_VERSION} cuda-compat-12-9 && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c echo "/usr/local/cuda/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit |
| ENV PATH=/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin |
| ENV LD_LIBRARY_PATH=/usr/local/cuda/lib64 |
| COPY NGC-DL-CONTAINER-LICENSE / # buildkit |
| ENV NVIDIA_VISIBLE_DEVICES=all |
| ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility |
| ENV NV_CUDA_LIB_VERSION=12.9.1-1 |
| ENV NV_NVTX_VERSION=12.9.79-1 |
| ENV NV_LIBNPP_VERSION=12.4.1.87-1 |
| ENV NV_LIBNPP_PACKAGE=libnpp-12-9=12.4.1.87-1 |
| ENV NV_LIBCUSPARSE_VERSION=12.5.10.65-1 |
| ENV NV_LIBCUBLAS_PACKAGE_NAME=libcublas-12-9 |
| ENV NV_LIBCUBLAS_VERSION=12.9.1.4-1 |
| ENV NV_LIBCUBLAS_PACKAGE=libcublas-12-9=12.9.1.4-1 |
| ENV NV_LIBNCCL_PACKAGE_NAME=libnccl2 |
| ENV NV_LIBNCCL_PACKAGE_VERSION=2.27.3-1 |
| ENV NCCL_VERSION=2.27.3-1 |
| ENV NV_LIBNCCL_PACKAGE=libnccl2=2.27.3-1+cuda12.9 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-libraries-12-9=${NV_CUDA_LIB_VERSION} ${NV_LIBNPP_PACKAGE} cuda-nvtx-12-9=${NV_NVTX_VERSION} libcusparse-12-9=${NV_LIBCUSPARSE_VERSION} ${NV_LIBCUBLAS_PACKAGE} ${NV_LIBNCCL_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} # buildkit |
| COPY entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit |
| COPY nvidia_entrypoint.sh /opt/nvidia/ # buildkit |
| ENV NVIDIA_PRODUCT_NAME=CUDA |
| ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"] |
| ENV NV_CUDA_LIB_VERSION=12.9.1-1 |
| ENV NV_CUDA_CUDART_DEV_VERSION=12.9.79-1 |
| ENV NV_NVML_DEV_VERSION=12.9.79-1 |
| ENV NV_LIBCUSPARSE_DEV_VERSION=12.5.10.65-1 |
| ENV NV_LIBNPP_DEV_VERSION=12.4.1.87-1 |
| ENV NV_LIBNPP_DEV_PACKAGE=libnpp-dev-12-9=12.4.1.87-1 |
| ENV NV_LIBCUBLAS_DEV_VERSION=12.9.1.4-1 |
| ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME=libcublas-dev-12-9 |
| ENV NV_LIBCUBLAS_DEV_PACKAGE=libcublas-dev-12-9=12.9.1.4-1 |
| ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.9.1-1 |
| ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-9=12.9.1-1 |
| ENV NV_NVPROF_VERSION=12.9.79-1 |
| ENV NV_NVPROF_DEV_PACKAGE=cuda-nvprof-12-9=12.9.79-1 |
| ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-dev |
| ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.27.3-1 |
| ENV NCCL_VERSION=2.27.3-1 |
| ENV NV_LIBNCCL_DEV_PACKAGE=libnccl-dev=2.27.3-1+cuda12.9 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-dev-12-9=${NV_CUDA_CUDART_DEV_VERSION} cuda-command-line-tools-12-9=${NV_CUDA_LIB_VERSION} cuda-minimal-build-12-9=${NV_CUDA_LIB_VERSION} cuda-libraries-dev-12-9=${NV_CUDA_LIB_VERSION} cuda-nvml-dev-12-9=${NV_NVML_DEV_VERSION} ${NV_NVPROF_DEV_PACKAGE} ${NV_LIBNPP_DEV_PACKAGE} libcusparse-dev-12-9=${NV_LIBCUSPARSE_DEV_VERSION} ${NV_LIBCUBLAS_DEV_PACKAGE} ${NV_LIBNCCL_DEV_PACKAGE} ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_DEV_PACKAGE_NAME} ${NV_LIBNCCL_DEV_PACKAGE_NAME} # buildkit |
| ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs |
| ENV NV_CUDNN_VERSION=9.10.2.21-1 |
| ENV NV_CUDNN_PACKAGE_NAME=libcudnn9-cuda-12 |
| ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12=9.10.2.21-1 |
| ENV NV_CUDNN_PACKAGE_DEV=libcudnn9-dev-cuda-12=9.10.2.21-1 |
| ENV NV_CUDNN_PACKAGE_DEV_HEADERS=libcudnn9-headers-cuda-12=9.10.2.21-1 |
| ARG TARGETARCH |
| LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com> |
| LABEL com.nvidia.cudnn.version=9.10.2.21-1 |
| RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends ${NV_CUDNN_PACKAGE} ${NV_CUDNN_PACKAGE_DEV} ${NV_CUDNN_PACKAGE_DEV_HEADERS} && apt-mark hold ${NV_CUDNN_PACKAGE_NAME} && rm -rf /var/lib/apt/lists/* # buildkit |
| SHELL [/bin/bash -c] |
| ENV DEBIAN_FRONTEND=noninteractive |
| USER root |
| RUN /bin/bash -c apt-get update && apt-get install -y --no-install-recommends gnupg software-properties-common curl sudo ca-certificates # buildkit |
| RUN /bin/bash -c curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg && echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null # buildkit |
| RUN /bin/bash -c if id -u 1000 >/dev/null 2>&1; then existing_user=$(id -un 1000) && usermod -l coder -d /home/coder -m $existing_user && groupmod -n coder $existing_user; else useradd --create-home --shell /bin/bash --uid 1000 --user-group coder; fi && echo "coder ALL=(ALL) NOPASSWD:ALL" > /etc/sudoers.d/coder-nopasswd && chmod 0440 /etc/sudoers.d/coder-nopasswd # buildkit |
| RUN /bin/bash -c apt-get update && apt-get install --yes --no-install-recommends --no-install-suggests git-lfs locales zsh stow tmux neovim htop pipx cmake ffmpeg openssh-client texlive-full docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin # buildkit |
| RUN /bin/bash -c rm -rf /var/lib/apt/lists/* && locale-gen en_US.UTF-8 && update-locale LANG=en_US.UTF-8 # buildkit |
| ENV LANG=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8 |
| USER coder |
| WORKDIR /home/coder |
| COPY init-coder-env.sh /usr/local/bin/init-coder-env # buildkit |
| RUN /bin/bash -c pipx ensurepath # buildkit |
| CMD ["/bin/bash"] |
Labels
| Key | Value |
|---|---|
| com.nvidia.cudnn.version | 9.10.2.21-1 |
| maintainer | NVIDIA CORPORATION <cudatools@nvidia.com> |
| org.opencontainers.image.ref.name | ubuntu |
| org.opencontainers.image.version | 24.04 |
Details
2025-09-19 06:42:36 +00:00
Versions (5)
View all
Container
0
OCI / Docker
linux/amd64
8.4 GiB
2025-09-19-56114f
2025-09-19
latest
2025-09-19
2025-09-19-a91c42
2025-09-19
2025-09-19-2fe39d
2025-09-19
2025-09-19-448972
2025-09-19