Namespace
vastai
Image / Tag
base-image:cuda-12.4.1-auto
Content Digest
sha256:ae4007c76e74197a785b86e22a48d97855aa3f7faada84f0572e4994c52b5562
Details
Created

2026-01-01 02:05:09 UTC

Size

5.59 GB

Content Digest
Labels
  • com.nvidia.cudnn.version
    9.1.0.70-1
  • maintainer
    Vast.ai Inc <contact@vast.ai>
  • org.opencontainers.image.description
    Base image suitable for Vast.ai.
  • org.opencontainers.image.ref.name
    ubuntu
  • org.opencontainers.image.source
    https://github.com/vastai/
  • org.opencontainers.image.version
    22.04

Environment
CUDA_VERSION

12.4.1

DATA_DIRECTORY

/workspace

DEBIAN_FRONTEND

noninteractive

LD_LIBRARY_PATH

/usr/local/nvidia/lib:/usr/local/nvidia/lib64

LIBRARY_PATH

/usr/local/cuda/lib64/stubs

NCCL_VERSION

2.21.5-1

NVARCH

x86_64

NVIDIA_DRIVER_CAPABILITIES

all

NVIDIA_PRODUCT_NAME

CUDA

NVIDIA_REQUIRE_CUDA

cuda>=12.4 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526 brand=tesla,driver>=535,driver<536 brand=unknown,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=geforce,driver>=535,driver<536 brand=geforcertx,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=titan,driver>=535,driver<536 brand=titanrtx,driver>=535,driver<536

NVIDIA_VISIBLE_DEVICES

all

NV_CUDA_COMPAT_PACKAGE

cuda-compat-12-4

NV_CUDA_CUDART_DEV_VERSION

12.4.127-1

NV_CUDA_CUDART_VERSION

12.4.127-1

NV_CUDA_LIB_VERSION

12.4.1-1

NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE

cuda-nsight-compute-12-4=12.4.1-1

NV_CUDA_NSIGHT_COMPUTE_VERSION

12.4.1-1

NV_CUDNN_PACKAGE

libcudnn9-cuda-12=9.1.0.70-1

NV_CUDNN_PACKAGE_DEV

libcudnn9-dev-cuda-12=9.1.0.70-1

NV_CUDNN_PACKAGE_NAME

libcudnn9-cuda-12

NV_CUDNN_VERSION

9.1.0.70-1

NV_LIBCUBLAS_DEV_PACKAGE

libcublas-dev-12-4=12.4.5.8-1

NV_LIBCUBLAS_DEV_PACKAGE_NAME

libcublas-dev-12-4

NV_LIBCUBLAS_DEV_VERSION

12.4.5.8-1

NV_LIBCUBLAS_PACKAGE

libcublas-12-4=12.4.5.8-1

NV_LIBCUBLAS_PACKAGE_NAME

libcublas-12-4

NV_LIBCUBLAS_VERSION

12.4.5.8-1

NV_LIBCUSPARSE_DEV_VERSION

12.3.1.170-1

NV_LIBCUSPARSE_VERSION

12.3.1.170-1

NV_LIBNCCL_DEV_PACKAGE

libnccl-dev=2.21.5-1+cuda12.4

NV_LIBNCCL_DEV_PACKAGE_NAME

libnccl-dev

NV_LIBNCCL_DEV_PACKAGE_VERSION

2.21.5-1

NV_LIBNCCL_PACKAGE

libnccl2=2.21.5-1+cuda12.4

NV_LIBNCCL_PACKAGE_NAME

libnccl2

NV_LIBNCCL_PACKAGE_VERSION

2.21.5-1

NV_LIBNPP_DEV_PACKAGE

libnpp-dev-12-4=12.2.5.30-1

NV_LIBNPP_DEV_VERSION

12.2.5.30-1

NV_LIBNPP_PACKAGE

libnpp-12-4=12.2.5.30-1

NV_LIBNPP_VERSION

12.2.5.30-1

NV_NVML_DEV_VERSION

12.4.127-1

NV_NVPROF_DEV_PACKAGE

cuda-nvprof-12-4=12.4.127-1

NV_NVPROF_VERSION

12.4.127-1

NV_NVTX_VERSION

12.4.127-1

PATH

/opt/instance-tools/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

PIP_BREAK_SYSTEM_PACKAGES

1

PYTHONUNBUFFERED

1

PYTHON_VERSION

3.10

UV_CACHE_DIR

/.uv/cache

UV_LINK_MODE

copy

UV_NO_CACHE

1

UV_PYTHON_BIN_DIR

/.uv/python_bin

UV_PYTHON_INSTALL_DIR

/.uv/python_install

WORKSPACE

/workspace


Layers

[#000] sha256:3c645031de2917ade93ec54b118d5d3e45de72ef580b8f419a8cdc41e01d042c - 0.49% (28.2 MB)

[#001] sha256:0d6448aff88945ea46a37cfe4330bdb0ada228268b80da6258a0fec63086f404 - 0.08% (4.41 MB)

[#002] sha256:0a7674e3e8fe69dcd7f1424fa29aa033b32c42269aab46cbe9818f8dd7154754 - 0.96% (54.9 MB)

[#003] sha256:b71b637b97c5efb435b9965058ad414f07afa99d320cf05e89f10441ec1becf4 - 0.0% (185 Bytes)

[#004] sha256:56dc8550293751a1604e97ac949cfae82ba20cb2a28e034737bafd7382559609 - 0.0% (6.72 KB)

[#005] sha256:ec6d5f6c9ed94d2ee2eeaf048d90242af638325f57696909f1737b3158d838cf - 22.91% (1.28 GB)

[#006] sha256:47b8539d532f561cac6d7fb8ee2f46c902b66e4a60b103d19701829742a0d11e - 0.0% (62.5 KB)

[#007] sha256:fd9cc1ad8dee47ca559003714d462f4eb79cb6315a2708927c240b84d022b55f - 0.0% (1.64 KB)

[#008] sha256:83525caeeb359731f869f1ee87a32acdfdd5efb8af4cab06d8f4fdcf1f317daa - 0.0% (1.49 KB)

[#009] sha256:8e79813a7b9d5784bb880ca2909887465549de5183411b24f6de72fab0802bcd - 44.12% (2.46 GB)

[#010] sha256:312a542960e3345001fc709156a5139ff8a1d8cc21a51a50f83e87ec2982f579 - 0.0% (86.8 KB)

[#011] sha256:ae033ce9621d2cceaef2769ead17429ae8b29f098fb0350bdd4e0f55a36996db - 11.17% (639 MB)

[#012] sha256:2a9ec1d7587837e5ae5902a9153e535b30057109754902161d2024b87a8ea6f6 - 0.0% (1.53 KB)

[#013] sha256:fff05a8ab693151fbb9175f80415b53fc842d12663594c0d01568191c9e4416b - 0.0% (15.3 KB)

[#014] sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 - 0.0% (32 Bytes)

[#015] sha256:dc127ae2f3c895635f1d837fd08a3e0b4f5320a97ec2be906cb5172a5e1af592 - 2.94% (168 MB)

[#016] sha256:784879b07baeb0f6f566e1e82e476db346122d2369bfc1c9613c1ee6998cbecf - 6.97% (399 MB)

[#017] sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 - 0.0% (32 Bytes)

[#018] sha256:ff1a250e1e634be3cce13fb4906ada5177ae4242fdc0e25dc31d1aa2ee0be3c2 - 0.0% (3.99 KB)

[#019] sha256:2b3afbec9075c20f721c00608cbc6ce3bb89dce5f99586fba4f0b1a3d41e4e5b - 0.38% (21.7 MB)

[#020] sha256:50e1fcec0609d86cabc55690c41637a74c7466ee1b535f8b8a9c88a527ad25f5 - 1.56% (89.1 MB)

[#021] sha256:1ad830965da3889429634a59adc659930cf229db2dd614180e2d9ed13e9378a3 - 0.0% (41.2 KB)

[#022] sha256:a68b6693a66bddb3ec0b90760e9704fe73c5425cb6d60d3758cf333176225894 - 0.24% (13.7 MB)

[#023] sha256:eb3e1b45a36d7a1bb51558a36b8a0cb3661a854f0d10c90f13722f215be753e4 - 1.43% (82 MB)

[#024] sha256:dc5045964d6c0355d287d5d8a93026b99f509aab8b88d8cd25bf0d41b310acc1 - 2.56% (146 MB)

[#025] sha256:ee5ab5b3d0de18ae0ee19cbb4103bd57e2733a39a745e61233b4f0f453088876 - 0.24% (13.6 MB)

[#026] sha256:2544eac01800d8aad5325aabac8a92cde1cb07df853de1ac8cda067c901420b6 - 3.63% (207 MB)

[#027] sha256:7acf053bb0120d52213d06e13381ca9a30f527b4e652fc13d2717dd0eefcf0bd - 0.0% (632 Bytes)

[#028] sha256:8f7d0a930f83f89fa1af2e2adf5b24b57ad6b3da7205d815e2c2645bef2fafe6 - 0.32% (18.4 MB)

[#029] sha256:16a1d158c46f26f3bf48c27abc57c43b0a32e5c932f4fe4c9c7281a952efd823 - 0.0% (117 Bytes)

[#030] sha256:0f899c068e83f5e62f2b7ef514c9293e191006f9b195f13dabad5e9aecd08387 - 0.0% (15.8 KB)

[#031] sha256:bfd83e6305f8f1eb88a19a58b4535488781c6d917f9eeefec14cf8fbe027c69e - 0.0% (15.7 KB)


History
2024-04-10 18:52:02 UTC

/bin/sh -c #(nop) ARG RELEASE

2024-04-10 18:52:02 UTC

/bin/sh -c #(nop) ARG LAUNCHPAD_BUILD_ARCH

2024-04-10 18:52:02 UTC

/bin/sh -c #(nop) LABEL org.opencontainers.image.ref.name=ubuntu

2024-04-10 18:52:02 UTC

/bin/sh -c #(nop) LABEL org.opencontainers.image.version=22.04

2024-04-10 18:52:04 UTC

/bin/sh -c #(nop) ADD file:3bd10da0673e2e72cb06a1f64a9df49a36341df39b0f762e3d1b38ee4de296fa in /

2024-04-10 18:52:04 UTC

/bin/sh -c #(nop) CMD ["/bin/bash"]

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ENV NVARCH=x86_64

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_REQUIRE_CUDA=cuda>=12.4 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526 brand=tesla,driver>=535,driver<536 brand=unknown,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=geforce,driver>=535,driver<536 brand=geforcertx,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=titan,driver>=535,driver<536 brand=titanrtx,driver>=535,driver<536

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_CUDART_VERSION=12.4.127-1

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_COMPAT_PACKAGE=cuda-compat-12-4

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends gnupg2 curl ca-certificates && curl -fsSLO https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/${NVARCH}/cuda-keyring_1.1-1_all.deb && dpkg -i cuda-keyring_1.1-1_all.deb && apt-get purge --autoremove -y curl && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:42:16 UTC (buildkit.dockerfile.v0)

ENV CUDA_VERSION=12.4.1

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-12-4=${NV_CUDA_CUDART_VERSION} ${NV_CUDA_COMPAT_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

COPY NGC-DL-CONTAINER-LICENSE / # buildkit

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_VISIBLE_DEVICES=all

2024-04-22 23:42:28 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_LIB_VERSION=12.4.1-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_NVTX_VERSION=12.4.127-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_VERSION=12.2.5.30-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_PACKAGE=libnpp-12-4=12.2.5.30-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUSPARSE_VERSION=12.3.1.170-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_PACKAGE_NAME=libcublas-12-4

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_VERSION=12.4.5.8-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_PACKAGE=libcublas-12-4=12.4.5.8-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE_NAME=libnccl2

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE_VERSION=2.21.5-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NCCL_VERSION=2.21.5-1

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE=libnccl2=2.21.5-1+cuda12.4

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-libraries-12-4=${NV_CUDA_LIB_VERSION} ${NV_LIBNPP_PACKAGE} cuda-nvtx-12-4=${NV_NVTX_VERSION} libcusparse-12-4=${NV_LIBCUSPARSE_VERSION} ${NV_LIBCUBLAS_PACKAGE} ${NV_LIBNCCL_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} # buildkit

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

COPY entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

COPY nvidia_entrypoint.sh /opt/nvidia/ # buildkit

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_PRODUCT_NAME=CUDA

2024-04-22 23:46:26 UTC (buildkit.dockerfile.v0)

ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"]

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_LIB_VERSION=12.4.1-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_CUDART_DEV_VERSION=12.4.127-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_NVML_DEV_VERSION=12.4.127-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUSPARSE_DEV_VERSION=12.3.1.170-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_DEV_VERSION=12.2.5.30-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_DEV_PACKAGE=libnpp-dev-12-4=12.2.5.30-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_VERSION=12.4.5.8-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME=libcublas-dev-12-4

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_PACKAGE=libcublas-dev-12-4=12.4.5.8-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.4.1-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-4=12.4.1-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_NVPROF_VERSION=12.4.127-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_NVPROF_DEV_PACKAGE=cuda-nvprof-12-4=12.4.127-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-dev

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.21.5-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NCCL_VERSION=2.21.5-1

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE=libnccl-dev=2.21.5-1+cuda12.4

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:54:41 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-dev-12-4=${NV_CUDA_CUDART_DEV_VERSION} cuda-command-line-tools-12-4=${NV_CUDA_LIB_VERSION} cuda-minimal-build-12-4=${NV_CUDA_LIB_VERSION} cuda-libraries-dev-12-4=${NV_CUDA_LIB_VERSION} cuda-nvml-dev-12-4=${NV_NVML_DEV_VERSION} ${NV_NVPROF_DEV_PACKAGE} ${NV_LIBNPP_DEV_PACKAGE} libcusparse-dev-12-4=${NV_LIBCUSPARSE_DEV_VERSION} ${NV_LIBCUBLAS_DEV_PACKAGE} ${NV_LIBNCCL_DEV_PACKAGE} ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:54:42 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_DEV_PACKAGE_NAME} ${NV_LIBNCCL_DEV_PACKAGE_NAME} # buildkit

2024-04-22 23:54:42 UTC (buildkit.dockerfile.v0)

ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_VERSION=9.1.0.70-1

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE_NAME=libcudnn9-cuda-12

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12=9.1.0.70-1

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE_DEV=libcudnn9-dev-cuda-12=9.1.0.70-1

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

LABEL com.nvidia.cudnn.version=9.1.0.70-1

2024-04-23 00:11:02 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=amd64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends ${NV_CUDNN_PACKAGE} ${NV_CUDNN_PACKAGE_DEV} && apt-mark hold ${NV_CUDNN_PACKAGE_NAME} && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:14:21 UTC (buildkit.dockerfile.v0)

LABEL org.opencontainers.image.source=https://github.com/vastai/

2026-01-01 01:14:21 UTC (buildkit.dockerfile.v0)

LABEL org.opencontainers.image.description=Base image suitable for Vast.ai.

2026-01-01 01:14:21 UTC (buildkit.dockerfile.v0)

LABEL maintainer=Vast.ai Inc <contact@vast.ai>

2026-01-01 01:14:21 UTC (buildkit.dockerfile.v0)

SHELL [/bin/bash -c umask 002 && /bin/bash -c "$@" -]

2026-01-01 01:14:21 UTC (buildkit.dockerfile.v0)

RUN /bin/bash -c umask 002 && /bin/bash -c "$@" - sed -i '1i umask 002' /root/.bashrc # buildkit

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

COPY ./ROOT/ / # buildkit

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV DATA_DIRECTORY=/workspace

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV WORKSPACE=/workspace

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV PIP_BREAK_SYSTEM_PACKAGES=1

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV DEBIAN_FRONTEND=noninteractive

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV PYTHONUNBUFFERED=1

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_DRIVER_CAPABILITIES=all

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

ARG BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04

2026-01-01 01:14:22 UTC (buildkit.dockerfile.v0)

RUN |1 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if [[ "$BASE_IMAGE" == "nvidia/cuda:12.8"* ]]; then NCCL_VERSION=$(dpkg-query -W -f='${Version}' libnccl2 2>/dev/null | cut -d'-' -f1 || echo "0.0.0"); if dpkg --compare-versions "$NCCL_VERSION" lt "2.26.2"; then apt-get -y update; apt-get install -y --allow-change-held-packages libnccl2=2.26.2-1+cuda12.8 libnccl-dev=2.26.2-1+cuda12.8; fi; fi && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:17:05 UTC (buildkit.dockerfile.v0)

RUN |1 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if ! command -v unminimize >/dev/null 2>&1; then apt-get update; apt-get install -y --no-install-recommends unminimize; fi && printf "%s\n%s" y y | unminimize && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:17:05 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=amd64

2026-01-01 01:18:49 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && ([ $TARGETARCH = "arm64" ] && echo "Skipping i386 architecture for ARM builds" || dpkg --add-architecture i386) && apt-get update && apt-get upgrade -y && apt-get install --no-install-recommends -y acl bc ca-certificates gpg-agent software-properties-common locales lsb-release curl wget sudo moreutils nano vim less jq git git-lfs man tzdata fonts-dejavu fonts-freefont-ttf fonts-ubuntu ffmpeg mesa-utils-extra htop iotop strace libtcmalloc-minimal4 lsof procps psmisc nvtop rdma-core libibverbs1 ibverbs-providers libibumad3 librdmacm1 infiniband-diags build-essential cmake ninja-build gdb libssl-dev python3-full python3-dev python3-pip netcat-traditional net-tools dnsutils iproute2 iputils-ping traceroute dos2unix rsync rclone zip unzip xz-utils zstd linux-tools-common cron rsyslog clinfo pocl-opencl-icd opencl-headers ocl-icd-libopencl1 ocl-icd-dev ocl-icd-opencl-dev vulkan-tools && mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:18:49 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=amd64

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if ! compgen -G "/etc/apt/sources.list.d/cuda*" > /dev/null && ! compgen -G "/etc/apt/sources.list.d/rocm*" > /dev/null; then UBUNTU_VERSION=$(. /etc/os-release && echo "$VERSION_ID" | tr -d '.') && if [[ "$TARGETARCH" = "amd64" ]]; then ARCH="x86_64"; elif [[ "$TARGETARCH" = "arm64" ]]; then ARCH="sbsa"; else echo "Unsupported TARGETARCH: ${TARGETARCH}. Cannot configure Nvidia CUDA repository." >&2; exit 1; fi && curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu${UBUNTU_VERSION}/${ARCH}/3bf863cc.pub | gpg --dearmor --yes -o /usr/share/keyrings/nvidia-cuda.gpg && echo "deb [signed-by=/usr/share/keyrings/nvidia-cuda.gpg] https://developer.download.nvidia.com/compute/cuda/repos/ubuntu${UBUNTU_VERSION}/${ARCH} /" > /etc/apt/sources.list.d/cuda.list && apt-get update && apt-get clean && rm -rf /var/lib/apt/lists/*; fi # buildkit

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && useradd -ms /bin/bash user -u 1001 -g 0 && sed -i '1i umask 002' /home/user/.bashrc && echo "PATH=${PATH}" >> /home/user/.bashrc && echo "user ALL=(ALL) NOPASSWD:ALL" | tee /etc/sudoers.d/user && sudo chmod 0440 /etc/sudoers.d/user && mkdir -m 700 -p /run/user/1001 && chown 1001:0 /run/user/1001 && mkdir -p /run/dbus && mkdir -p /opt/workspace-internal/ # buildkit

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

ENV UV_CACHE_DIR=/.uv/cache

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

ENV UV_NO_CACHE=1

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

ENV UV_LINK_MODE=copy

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

ENV UV_PYTHON_BIN_DIR=/.uv/python_bin

2026-01-01 01:18:50 UTC (buildkit.dockerfile.v0)

ENV UV_PYTHON_INSTALL_DIR=/.uv/python_install

2026-01-01 01:18:53 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && mkdir -p "${UV_CACHE_DIR}" "${UV_PYTHON_BIN_DIR}" "${UV_PYTHON_INSTALL_DIR}" && curl -LsSf https://astral.sh/uv/install.sh -o /tmp/uv-install.sh && chmod +x /tmp/uv-install.sh && UV_UNMANAGED_INSTALL=/usr/local/bin /tmp/uv-install.sh && rm -rf /tmp/* # buildkit

2026-01-01 01:19:00 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && git clone https://github.com/nvm-sh/nvm.git /opt/nvm && (cd /opt/nvm/ && git checkout `git describe --abbrev=0 --tags --match "v[0-9]*" $(git rev-list --tags --max-count=1)`) && source /opt/nvm/nvm.sh && nvm install --lts # buildkit

2026-01-01 01:19:00 UTC (buildkit.dockerfile.v0)

COPY ./portal-aio /opt/portal-aio # buildkit

2026-01-01 01:19:01 UTC (buildkit.dockerfile.v0)

COPY /go/caddy /opt/portal-aio/caddy_manager/caddy # buildkit

2026-01-01 01:19:01 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=amd64

2026-01-01 01:19:09 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - chown -R 0:0 /opt/portal-aio && set -euo pipefail && uv venv --seed /opt/portal-aio/venv -p 3.11 && mkdir -m 770 -p /var/log/portal && chown 0:0 /var/log/portal/ && mkdir -p opt/instance-tools/bin/ && . /opt/portal-aio/venv/bin/activate && uv pip install -r /opt/portal-aio/requirements.txt && deactivate && wget -O /opt/portal-aio/tunnel_manager/cloudflared https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-${TARGETARCH} && chmod +x /opt/portal-aio/tunnel_manager/cloudflared && ln -s /opt/portal-aio/caddy_manager/caddy /opt/instance-tools/bin/caddy && ln -s /opt/portal-aio/tunnel_manager/cloudflared /opt/instance-tools/bin/cloudflared && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:19:52 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && cd /opt && git clone https://github.com/vast-ai/vast-cli && wget -O /usr/local/share/ca-certificates/jvastai.crt https://console.vast.ai/static/jvastai_root.cer && update-ca-certificates && pip install --no-cache-dir --ignore-installed jupyter supervisor tensorboard magic-wormhole && mkdir -p /var/log/supervisor # buildkit

2026-01-01 01:19:52 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=amd64

2026-01-01 01:19:54 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && SYNCTHING_VERSION="$(curl -fsSL "https://api.github.com/repos/syncthing/syncthing/releases/latest" | jq -r '.tag_name' | sed 's/[^0-9\.\-]*//g')" && SYNCTHING_URL="https://github.com/syncthing/syncthing/releases/download/v${SYNCTHING_VERSION}/syncthing-linux-${TARGETARCH}-v${SYNCTHING_VERSION}.tar.gz" && mkdir -p /opt/syncthing/config && mkdir -p /opt/syncthing/data && wget -O /opt/syncthing.tar.gz $SYNCTHING_URL && (cd /opt && tar -zxf syncthing.tar.gz -C /opt/syncthing/ --strip-components=1) && chown -R user:root /opt/syncthing && rm -f /opt/syncthing.tar.gz # buildkit

2026-01-01 01:19:54 UTC (buildkit.dockerfile.v0)

ARG BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04

2026-01-01 01:19:54 UTC (buildkit.dockerfile.v0)

ARG PYTHON_VERSION=3.10

2026-01-01 01:19:54 UTC (buildkit.dockerfile.v0)

ENV PYTHON_VERSION=3.10

2026-01-01 02:05:01 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && curl -L -o /tmp/miniforge3.sh "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" && bash /tmp/miniforge3.sh -b -p /opt/miniforge3 && /opt/miniforge3/bin/conda init && su -l user -c "/opt/miniforge3/bin/conda init" && mkdir -p /venv && /opt/miniforge3/bin/conda config --set auto_activate_base false && /opt/miniforge3/bin/conda config --set always_copy true && /opt/miniforge3/bin/conda config --set pip_interop_enabled true && /opt/miniforge3/bin/conda config --add envs_dirs /venv && /opt/miniforge3/bin/conda config --set env_prompt '({name}) ' && su -l user -c "/opt/miniforge3/bin/conda config --set auto_activate_base false" && su -l user -c "/opt/miniforge3/bin/conda config --set always_copy true" && su -l user -c "/opt/miniforge3/bin/conda config --set pip_interop_enabled true" && su -l user -c "/opt/miniforge3/bin/conda config --add envs_dirs /venv" && su -l user -c "/opt/miniforge3/bin/conda config --set env_prompt '({name}) '" && if [[ "$BASE_IMAGE" == *"nvidia"* ]]; then /opt/miniforge3/bin/conda config --add channels nvidia; su -l user -c "/opt/miniforge3/bin/conda config --add channels nvidia"; fi && /opt/miniforge3/bin/conda create -p /venv/main python="${PYTHON_VERSION}" -y && mkdir -p /venv/main/etc/conda/{activate.d,deactivate.d} && echo 'echo -e "\033[32mActivated conda/uv virtual environment at \033[36m$(realpath $CONDA_PREFIX)\033[0m"' > /venv/main/etc/conda/activate.d/environment.sh && /opt/miniforge3/bin/conda clean -ay && rm -rf /tmp/* # buildkit

2026-01-01 02:05:02 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - cat <<'CONDA_ACTIVATION_SCRIPT' > /venv/main/bin/activate if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then echo "This script must be sourced: source bin/activate" exit 1 fi # Define deactivate function deactivate() { # Deactivate conda environment if type conda &> /dev/null; then conda deactivate 2>/dev/null || true fi # Unset the deactivate function itself unset -f deactivate # Return success return 0 } # Check if conda is properly initialized by testing for the conda shell function # (not just the command existence) if ! type conda &> /dev/null || ! declare -F conda &> /dev/null; then # Add condabin to PATH if not already there if [[ "$PATH" != *"/opt/miniforge3/condabin"* ]]; then export PATH="/opt/miniforge3/condabin:$PATH" fi # Source the conda shell script to load shell functions if [[ -f /opt/miniforge3/etc/profile.d/conda.sh ]]; then source /opt/miniforge3/etc/profile.d/conda.sh fi fi # Activate the conda environment conda activate "$(realpath /venv/main)" CONDA_ACTIVATION_SCRIPT # buildkit

2026-01-01 02:05:09 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && . /venv/main/bin/activate && uv pip install wheel huggingface-hub[cli] ipykernel ipywidgets && python -m ipykernel install --name="main" --display-name="Python3 (main venv)" && python -m ipykernel install --name="python3" --display-name="Python3 (ipykernel)" && deactivate && /usr/bin/pip install conda-pack ipykernel && /usr/bin/python3 -m ipykernel install --name="system-python" --display-name="Python3 (System)" && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 02:05:09 UTC (buildkit.dockerfile.v0)

ENV PATH=/opt/instance-tools/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

2026-01-01 02:05:09 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=amd64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && env-hash > /.env_hash # buildkit

2026-01-01 02:05:09 UTC (buildkit.dockerfile.v0)

ENTRYPOINT ["/opt/instance-tools/bin/entrypoint.sh"]

2026-01-01 02:05:09 UTC (buildkit.dockerfile.v0)

CMD []

0001-01-01 00:00:00 UTC

0001-01-01 00:00:00 UTC

Details
Created

2026-01-01 02:11:18 UTC

Size

4.93 GB

Content Digest
Labels
  • com.nvidia.cudnn.version
    9.1.0.70-1
  • maintainer
    Vast.ai Inc <contact@vast.ai>
  • org.opencontainers.image.description
    Base image suitable for Vast.ai.
  • org.opencontainers.image.ref.name
    ubuntu
  • org.opencontainers.image.source
    https://github.com/vastai/
  • org.opencontainers.image.version
    22.04

Environment
CUDA_VERSION

12.4.1

DATA_DIRECTORY

/workspace

DEBIAN_FRONTEND

noninteractive

LD_LIBRARY_PATH

/usr/local/nvidia/lib:/usr/local/nvidia/lib64

LIBRARY_PATH

/usr/local/cuda/lib64/stubs

NCCL_VERSION

2.21.5-1

NVARCH

sbsa

NVIDIA_DRIVER_CAPABILITIES

all

NVIDIA_PRODUCT_NAME

CUDA

NVIDIA_REQUIRE_CUDA

cuda>=12.4 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526 brand=tesla,driver>=535,driver<536 brand=unknown,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=geforce,driver>=535,driver<536 brand=geforcertx,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=titan,driver>=535,driver<536 brand=titanrtx,driver>=535,driver<536

NVIDIA_VISIBLE_DEVICES

all

NV_CUDA_CUDART_DEV_VERSION

12.4.127-1

NV_CUDA_CUDART_VERSION

12.4.127-1

NV_CUDA_LIB_VERSION

12.4.1-1

NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE

cuda-nsight-compute-12-4=12.4.1-1

NV_CUDA_NSIGHT_COMPUTE_VERSION

12.4.1-1

NV_CUDNN_PACKAGE

libcudnn9-cuda-12=9.1.0.70-1

NV_CUDNN_PACKAGE_DEV

libcudnn9-dev-cuda-12=9.1.0.70-1

NV_CUDNN_PACKAGE_NAME

libcudnn9-cuda-12

NV_CUDNN_VERSION

9.1.0.70-1

NV_LIBCUBLAS_DEV_PACKAGE

libcublas-dev-12-4=12.4.5.8-1

NV_LIBCUBLAS_DEV_PACKAGE_NAME

libcublas-dev-12-4

NV_LIBCUBLAS_DEV_VERSION

12.4.5.8-1

NV_LIBCUBLAS_PACKAGE

libcublas-12-4=12.4.5.8-1

NV_LIBCUBLAS_PACKAGE_NAME

libcublas-12-4

NV_LIBCUBLAS_VERSION

12.4.5.8-1

NV_LIBCUSPARSE_DEV_VERSION

12.3.1.170-1

NV_LIBCUSPARSE_VERSION

12.3.1.170-1

NV_LIBNCCL_DEV_PACKAGE

libnccl-dev=2.21.5-1+cuda12.4

NV_LIBNCCL_DEV_PACKAGE_NAME

libnccl-dev

NV_LIBNCCL_DEV_PACKAGE_VERSION

2.21.5-1

NV_LIBNCCL_PACKAGE

libnccl2=2.21.5-1+cuda12.4

NV_LIBNCCL_PACKAGE_NAME

libnccl2

NV_LIBNCCL_PACKAGE_VERSION

2.21.5-1

NV_LIBNPP_DEV_PACKAGE

libnpp-dev-12-4=12.2.5.30-1

NV_LIBNPP_DEV_VERSION

12.2.5.30-1

NV_LIBNPP_PACKAGE

libnpp-12-4=12.2.5.30-1

NV_LIBNPP_VERSION

12.2.5.30-1

NV_NVML_DEV_VERSION

12.4.127-1

NV_NVTX_VERSION

12.4.127-1

PATH

/opt/instance-tools/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

PIP_BREAK_SYSTEM_PACKAGES

1

PYTHONUNBUFFERED

1

PYTHON_VERSION

3.10

UV_CACHE_DIR

/.uv/cache

UV_LINK_MODE

copy

UV_NO_CACHE

1

UV_PYTHON_BIN_DIR

/.uv/python_bin

UV_PYTHON_INSTALL_DIR

/.uv/python_install

WORKSPACE

/workspace


Layers

[#000] sha256:70104cd59e2a443b9d9a13a6bce3bbf1ae78261c4198a40bf69d6e0515abe06a - 0.52% (26.1 MB)

[#001] sha256:35e6dd55b641a91c7d2bf3bc31b81302d61a23df6473f9fad608c81f8852db6f - 0.09% (4.36 MB)

[#002] sha256:56c8cdb42d24e6e7cd545a41829891ad52c25e2ec883bc4be7d81b7804dfac52 - 0.01% (372 KB)

[#003] sha256:22748568967fe696328a740d644531af1504f15c53245a8de41ab57b24bfbb1a - 0.0% (187 Bytes)

[#004] sha256:56dc8550293751a1604e97ac949cfae82ba20cb2a28e034737bafd7382559609 - 0.0% (6.72 KB)

[#005] sha256:b97237f311660dcc393013cf58d7804c199fe0cb4d6f0265096ab8ddf706e7f6 - 25.94% (1.28 GB)

[#006] sha256:2e882515fad92f551268e3becd4df860ae4a0d015df30ea8abb6ce2234217f40 - 0.0% (62.3 KB)

[#007] sha256:e37bdbfc55edc529a8d47b715f30651f78a56c0b59122e238b2eba695df85f49 - 0.0% (1.64 KB)

[#008] sha256:4c60b5e7307e86fa82203b0c227d8001dce1fc4804fd49bbb2d42f6a5a805954 - 0.0% (1.49 KB)

[#009] sha256:8a791a9b45017619d9c8b0bac087727fe19db13c49d1782fa25f38bdf007b530 - 40.97% (2.02 GB)

[#010] sha256:59588f87dd82424f152239a2a3e72bb32880fceabd3c1f30c827214b19d951bb - 0.0% (86.2 KB)

[#011] sha256:e360b20280accb1303e5dbb366d8af5f9aaebe3de5047aeaf0d34eefb34aa12f - 12.63% (638 MB)

[#012] sha256:3b47bb63be7671a65918bd017d9dd68873e038ab010d3026e9468c59fedd3b11 - 0.0% (1.53 KB)

[#013] sha256:fff05a8ab693151fbb9175f80415b53fc842d12663594c0d01568191c9e4416b - 0.0% (15.3 KB)

[#014] sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 - 0.0% (32 Bytes)

[#015] sha256:04ca62a5f58779050878ca462e17b7edb2cee06e6b731f5e88979bc116d6c04a - 2.19% (110 MB)

[#016] sha256:928e4615d753b544457a1383d092ac16438ad928f7ed8b7badb4c360243dbd05 - 6.97% (352 MB)

[#017] sha256:4f4fb700ef54461cfa02571ae0db9a0dc1e0cdb5577484a6d75e68dc38e8acc1 - 0.0% (32 Bytes)

[#018] sha256:f85f7b95fd45ef54c959501119a529af0c2d58216a9da5403d80cedab7f120b1 - 0.0% (4 KB)

[#019] sha256:3e85b05b7d14559d8eb8e49fef1738a0af6f38572935068536542369308ee807 - 0.41% (21 MB)

[#020] sha256:25a573d384ab6ea8be64eb72237422e2efb9a454d876d5ba84ccb14208980cdd - 1.74% (88.1 MB)

[#021] sha256:650218de74ed84c0435b2dcd5d1e9d19defd5e538c0d198dea6cc6c5123f99a0 - 0.0% (41.2 KB)

[#022] sha256:2870b4ed82fcd0ee61c28fef6b025dcdbc34164e22b143a522ca4a45f306f091 - 0.25% (12.7 MB)

[#023] sha256:527f184b0466c094c635de6d760139dcb8a39d1e8fe7801ce759ae5d09724875 - 1.57% (79.3 MB)

[#024] sha256:a7c0fe7a2a1692582931f1715b1b9022fabc02ad99dc6eedca7a3488fa93ee17 - 2.74% (138 MB)

[#025] sha256:427a6d6b1d1249e2837d59c8577013b126245cea71fb6946187117923782c09a - 0.25% (12.8 MB)

[#026] sha256:e8e6dfa61335ea609123dbdeb782c70360458a66a7d6640284c5365623bb0e80 - 3.32% (168 MB)

[#027] sha256:39fd4cd1128696aee0a916a146d36ff3b8f8b116a24de9806fc899626b8ecfd6 - 0.0% (632 Bytes)

[#028] sha256:f1c89d6c11e4314488b05b0994b8603eb6ba54a9c92f59b98a03c10c63ca1e53 - 0.4% (20.3 MB)

[#029] sha256:1ab94826020e26349680fe991ad44c7c1b8b2fbe353382ca11322b911ab5d3ba - 0.0% (120 Bytes)

[#030] sha256:0f899c068e83f5e62f2b7ef514c9293e191006f9b195f13dabad5e9aecd08387 - 0.0% (15.8 KB)

[#031] sha256:bfd83e6305f8f1eb88a19a58b4535488781c6d917f9eeefec14cf8fbe027c69e - 0.0% (15.7 KB)


History
2024-04-10 18:26:15 UTC

/bin/sh -c #(nop) ARG RELEASE

2024-04-10 18:26:15 UTC

/bin/sh -c #(nop) ARG LAUNCHPAD_BUILD_ARCH

2024-04-10 18:26:15 UTC

/bin/sh -c #(nop) LABEL org.opencontainers.image.ref.name=ubuntu

2024-04-10 18:26:15 UTC

/bin/sh -c #(nop) LABEL org.opencontainers.image.version=22.04

2024-04-10 18:26:17 UTC

/bin/sh -c #(nop) ADD file:5523c8e2dfa5286893a32b66bdb3395b76e282d86d79b7320a5855e8f55481e1 in /

2024-04-10 18:26:17 UTC

/bin/sh -c #(nop) CMD ["/bin/bash"]

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

ENV NVARCH=sbsa

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_REQUIRE_CUDA=cuda>=12.4 brand=tesla,driver>=470,driver<471 brand=unknown,driver>=470,driver<471 brand=nvidia,driver>=470,driver<471 brand=nvidiartx,driver>=470,driver<471 brand=geforce,driver>=470,driver<471 brand=geforcertx,driver>=470,driver<471 brand=quadro,driver>=470,driver<471 brand=quadrortx,driver>=470,driver<471 brand=titan,driver>=470,driver<471 brand=titanrtx,driver>=470,driver<471 brand=tesla,driver>=525,driver<526 brand=unknown,driver>=525,driver<526 brand=nvidia,driver>=525,driver<526 brand=nvidiartx,driver>=525,driver<526 brand=geforce,driver>=525,driver<526 brand=geforcertx,driver>=525,driver<526 brand=quadro,driver>=525,driver<526 brand=quadrortx,driver>=525,driver<526 brand=titan,driver>=525,driver<526 brand=titanrtx,driver>=525,driver<526 brand=tesla,driver>=535,driver<536 brand=unknown,driver>=535,driver<536 brand=nvidia,driver>=535,driver<536 brand=nvidiartx,driver>=535,driver<536 brand=geforce,driver>=535,driver<536 brand=geforcertx,driver>=535,driver<536 brand=quadro,driver>=535,driver<536 brand=quadrortx,driver>=535,driver<536 brand=titan,driver>=535,driver<536 brand=titanrtx,driver>=535,driver<536

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_CUDART_VERSION=12.4.127-1

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends gnupg2 curl ca-certificates && curl -fsSLO https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2204/${NVARCH}/cuda-keyring_1.1-1_all.deb && dpkg -i cuda-keyring_1.1-1_all.deb && apt-get purge --autoremove -y curl && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:44:24 UTC (buildkit.dockerfile.v0)

ENV CUDA_VERSION=12.4.1

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-12-4=${NV_CUDA_CUDART_VERSION} ${NV_CUDA_COMPAT_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

ENV LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

COPY NGC-DL-CONTAINER-LICENSE / # buildkit

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_VISIBLE_DEVICES=all

2024-04-22 23:45:01 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_DRIVER_CAPABILITIES=compute,utility

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_LIB_VERSION=12.4.1-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_NVTX_VERSION=12.4.127-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_VERSION=12.2.5.30-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_PACKAGE=libnpp-12-4=12.2.5.30-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUSPARSE_VERSION=12.3.1.170-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_PACKAGE_NAME=libcublas-12-4

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_VERSION=12.4.5.8-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_PACKAGE=libcublas-12-4=12.4.5.8-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE_NAME=libnccl2

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE_VERSION=2.21.5-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NCCL_VERSION=2.21.5-1

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_PACKAGE=libnccl2=2.21.5-1+cuda12.4

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:49:34 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-libraries-12-4=${NV_CUDA_LIB_VERSION} ${NV_LIBNPP_PACKAGE} cuda-nvtx-12-4=${NV_NVTX_VERSION} libcusparse-12-4=${NV_LIBCUSPARSE_VERSION} ${NV_LIBCUBLAS_PACKAGE} ${NV_LIBNCCL_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:49:35 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_PACKAGE_NAME} ${NV_LIBNCCL_PACKAGE_NAME} # buildkit

2024-04-22 23:49:35 UTC (buildkit.dockerfile.v0)

COPY entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit

2024-04-22 23:49:35 UTC (buildkit.dockerfile.v0)

COPY nvidia_entrypoint.sh /opt/nvidia/ # buildkit

2024-04-22 23:49:35 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_PRODUCT_NAME=CUDA

2024-04-22 23:49:35 UTC (buildkit.dockerfile.v0)

ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"]

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_LIB_VERSION=12.4.1-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_CUDART_DEV_VERSION=12.4.127-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_NVML_DEV_VERSION=12.4.127-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUSPARSE_DEV_VERSION=12.3.1.170-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_DEV_VERSION=12.2.5.30-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNPP_DEV_PACKAGE=libnpp-dev-12-4=12.2.5.30-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_PACKAGE_NAME=libcublas-dev-12-4

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_VERSION=12.4.5.8-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBCUBLAS_DEV_PACKAGE=libcublas-dev-12-4=12.4.5.8-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_NSIGHT_COMPUTE_VERSION=12.4.1-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE=cuda-nsight-compute-12-4=12.4.1-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE_NAME=libnccl-dev

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE_VERSION=2.21.5-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NCCL_VERSION=2.21.5-1

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ENV NV_LIBNCCL_DEV_PACKAGE=libnccl-dev=2.21.5-1+cuda12.4

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-22 23:59:22 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends cuda-cudart-dev-12-4=${NV_CUDA_CUDART_DEV_VERSION} cuda-command-line-tools-12-4=${NV_CUDA_LIB_VERSION} cuda-minimal-build-12-4=${NV_CUDA_LIB_VERSION} cuda-libraries-dev-12-4=${NV_CUDA_LIB_VERSION} cuda-nvml-dev-12-4=${NV_NVML_DEV_VERSION} ${NV_NVPROF_DEV_PACKAGE} ${NV_LIBNPP_DEV_PACKAGE} libcusparse-dev-12-4=${NV_LIBCUSPARSE_DEV_VERSION} ${NV_LIBCUBLAS_DEV_PACKAGE} ${NV_LIBNCCL_DEV_PACKAGE} ${NV_CUDA_NSIGHT_COMPUTE_DEV_PACKAGE} && rm -rf /var/lib/apt/lists/* # buildkit

2024-04-22 23:59:23 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-mark hold ${NV_LIBCUBLAS_DEV_PACKAGE_NAME} ${NV_LIBNCCL_DEV_PACKAGE_NAME} # buildkit

2024-04-22 23:59:23 UTC (buildkit.dockerfile.v0)

ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_VERSION=9.1.0.70-1

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE_NAME=libcudnn9-cuda-12

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12=9.1.0.70-1

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

ENV NV_CUDNN_PACKAGE_DEV=libcudnn9-dev-cuda-12=9.1.0.70-1

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

LABEL maintainer=NVIDIA CORPORATION <cudatools@nvidia.com>

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

LABEL com.nvidia.cudnn.version=9.1.0.70-1

2024-04-23 00:12:57 UTC (buildkit.dockerfile.v0)

RUN |1 TARGETARCH=arm64 /bin/sh -c apt-get update && apt-get install -y --no-install-recommends ${NV_CUDNN_PACKAGE} ${NV_CUDNN_PACKAGE_DEV} && apt-mark hold ${NV_CUDNN_PACKAGE_NAME} && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

LABEL org.opencontainers.image.source=https://github.com/vastai/

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

LABEL org.opencontainers.image.description=Base image suitable for Vast.ai.

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

LABEL maintainer=Vast.ai Inc <contact@vast.ai>

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

SHELL [/bin/bash -c umask 002 && /bin/bash -c "$@" -]

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

RUN /bin/bash -c umask 002 && /bin/bash -c "$@" - sed -i '1i umask 002' /root/.bashrc # buildkit

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

COPY ./ROOT/ / # buildkit

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV DATA_DIRECTORY=/workspace

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV WORKSPACE=/workspace

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV PIP_BREAK_SYSTEM_PACKAGES=1

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV DEBIAN_FRONTEND=noninteractive

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV PYTHONUNBUFFERED=1

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ENV NVIDIA_DRIVER_CAPABILITIES=all

2026-01-01 01:13:26 UTC (buildkit.dockerfile.v0)

ARG BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04

2026-01-01 01:13:27 UTC (buildkit.dockerfile.v0)

RUN |1 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if [[ "$BASE_IMAGE" == "nvidia/cuda:12.8"* ]]; then NCCL_VERSION=$(dpkg-query -W -f='${Version}' libnccl2 2>/dev/null | cut -d'-' -f1 || echo "0.0.0"); if dpkg --compare-versions "$NCCL_VERSION" lt "2.26.2"; then apt-get -y update; apt-get install -y --allow-change-held-packages libnccl2=2.26.2-1+cuda12.8 libnccl-dev=2.26.2-1+cuda12.8; fi; fi && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:24:33 UTC (buildkit.dockerfile.v0)

RUN |1 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if ! command -v unminimize >/dev/null 2>&1; then apt-get update; apt-get install -y --no-install-recommends unminimize; fi && printf "%s\n%s" y y | unminimize && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:24:33 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=arm64

2026-01-01 01:32:39 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && ([ $TARGETARCH = "arm64" ] && echo "Skipping i386 architecture for ARM builds" || dpkg --add-architecture i386) && apt-get update && apt-get upgrade -y && apt-get install --no-install-recommends -y acl bc ca-certificates gpg-agent software-properties-common locales lsb-release curl wget sudo moreutils nano vim less jq git git-lfs man tzdata fonts-dejavu fonts-freefont-ttf fonts-ubuntu ffmpeg mesa-utils-extra htop iotop strace libtcmalloc-minimal4 lsof procps psmisc nvtop rdma-core libibverbs1 ibverbs-providers libibumad3 librdmacm1 infiniband-diags build-essential cmake ninja-build gdb libssl-dev python3-full python3-dev python3-pip netcat-traditional net-tools dnsutils iproute2 iputils-ping traceroute dos2unix rsync rclone zip unzip xz-utils zstd linux-tools-common cron rsyslog clinfo pocl-opencl-icd opencl-headers ocl-icd-libopencl1 ocl-icd-dev ocl-icd-opencl-dev vulkan-tools && mkdir -p /etc/OpenCL/vendors && echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:32:39 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=arm64

2026-01-01 01:32:39 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && if ! compgen -G "/etc/apt/sources.list.d/cuda*" > /dev/null && ! compgen -G "/etc/apt/sources.list.d/rocm*" > /dev/null; then UBUNTU_VERSION=$(. /etc/os-release && echo "$VERSION_ID" | tr -d '.') && if [[ "$TARGETARCH" = "amd64" ]]; then ARCH="x86_64"; elif [[ "$TARGETARCH" = "arm64" ]]; then ARCH="sbsa"; else echo "Unsupported TARGETARCH: ${TARGETARCH}. Cannot configure Nvidia CUDA repository." >&2; exit 1; fi && curl -fsSL https://developer.download.nvidia.com/compute/cuda/repos/ubuntu${UBUNTU_VERSION}/${ARCH}/3bf863cc.pub | gpg --dearmor --yes -o /usr/share/keyrings/nvidia-cuda.gpg && echo "deb [signed-by=/usr/share/keyrings/nvidia-cuda.gpg] https://developer.download.nvidia.com/compute/cuda/repos/ubuntu${UBUNTU_VERSION}/${ARCH} /" > /etc/apt/sources.list.d/cuda.list && apt-get update && apt-get clean && rm -rf /var/lib/apt/lists/*; fi # buildkit

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && useradd -ms /bin/bash user -u 1001 -g 0 && sed -i '1i umask 002' /home/user/.bashrc && echo "PATH=${PATH}" >> /home/user/.bashrc && echo "user ALL=(ALL) NOPASSWD:ALL" | tee /etc/sudoers.d/user && sudo chmod 0440 /etc/sudoers.d/user && mkdir -m 700 -p /run/user/1001 && chown 1001:0 /run/user/1001 && mkdir -p /run/dbus && mkdir -p /opt/workspace-internal/ # buildkit

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

ENV UV_CACHE_DIR=/.uv/cache

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

ENV UV_NO_CACHE=1

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

ENV UV_LINK_MODE=copy

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

ENV UV_PYTHON_BIN_DIR=/.uv/python_bin

2026-01-01 01:32:40 UTC (buildkit.dockerfile.v0)

ENV UV_PYTHON_INSTALL_DIR=/.uv/python_install

2026-01-01 01:32:49 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && mkdir -p "${UV_CACHE_DIR}" "${UV_PYTHON_BIN_DIR}" "${UV_PYTHON_INSTALL_DIR}" && curl -LsSf https://astral.sh/uv/install.sh -o /tmp/uv-install.sh && chmod +x /tmp/uv-install.sh && UV_UNMANAGED_INSTALL=/usr/local/bin /tmp/uv-install.sh && rm -rf /tmp/* # buildkit

2026-01-01 01:33:23 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && git clone https://github.com/nvm-sh/nvm.git /opt/nvm && (cd /opt/nvm/ && git checkout `git describe --abbrev=0 --tags --match "v[0-9]*" $(git rev-list --tags --max-count=1)`) && source /opt/nvm/nvm.sh && nvm install --lts # buildkit

2026-01-01 01:33:23 UTC (buildkit.dockerfile.v0)

COPY ./portal-aio /opt/portal-aio # buildkit

2026-01-01 01:33:24 UTC (buildkit.dockerfile.v0)

COPY /go/caddy /opt/portal-aio/caddy_manager/caddy # buildkit

2026-01-01 01:33:24 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=arm64

2026-01-01 01:34:01 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - chown -R 0:0 /opt/portal-aio && set -euo pipefail && uv venv --seed /opt/portal-aio/venv -p 3.11 && mkdir -m 770 -p /var/log/portal && chown 0:0 /var/log/portal/ && mkdir -p opt/instance-tools/bin/ && . /opt/portal-aio/venv/bin/activate && uv pip install -r /opt/portal-aio/requirements.txt && deactivate && wget -O /opt/portal-aio/tunnel_manager/cloudflared https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-${TARGETARCH} && chmod +x /opt/portal-aio/tunnel_manager/cloudflared && ln -s /opt/portal-aio/caddy_manager/caddy /opt/instance-tools/bin/caddy && ln -s /opt/portal-aio/tunnel_manager/cloudflared /opt/instance-tools/bin/cloudflared && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 01:40:17 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && cd /opt && git clone https://github.com/vast-ai/vast-cli && wget -O /usr/local/share/ca-certificates/jvastai.crt https://console.vast.ai/static/jvastai_root.cer && update-ca-certificates && pip install --no-cache-dir --ignore-installed jupyter supervisor tensorboard magic-wormhole && mkdir -p /var/log/supervisor # buildkit

2026-01-01 01:40:17 UTC (buildkit.dockerfile.v0)

ARG TARGETARCH=arm64

2026-01-01 01:40:21 UTC (buildkit.dockerfile.v0)

RUN |2 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && SYNCTHING_VERSION="$(curl -fsSL "https://api.github.com/repos/syncthing/syncthing/releases/latest" | jq -r '.tag_name' | sed 's/[^0-9\.\-]*//g')" && SYNCTHING_URL="https://github.com/syncthing/syncthing/releases/download/v${SYNCTHING_VERSION}/syncthing-linux-${TARGETARCH}-v${SYNCTHING_VERSION}.tar.gz" && mkdir -p /opt/syncthing/config && mkdir -p /opt/syncthing/data && wget -O /opt/syncthing.tar.gz $SYNCTHING_URL && (cd /opt && tar -zxf syncthing.tar.gz -C /opt/syncthing/ --strip-components=1) && chown -R user:root /opt/syncthing && rm -f /opt/syncthing.tar.gz # buildkit

2026-01-01 01:40:21 UTC (buildkit.dockerfile.v0)

ARG BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04

2026-01-01 01:40:21 UTC (buildkit.dockerfile.v0)

ARG PYTHON_VERSION=3.10

2026-01-01 01:40:21 UTC (buildkit.dockerfile.v0)

ENV PYTHON_VERSION=3.10

2026-01-01 02:10:01 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && curl -L -o /tmp/miniforge3.sh "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-$(uname)-$(uname -m).sh" && bash /tmp/miniforge3.sh -b -p /opt/miniforge3 && /opt/miniforge3/bin/conda init && su -l user -c "/opt/miniforge3/bin/conda init" && mkdir -p /venv && /opt/miniforge3/bin/conda config --set auto_activate_base false && /opt/miniforge3/bin/conda config --set always_copy true && /opt/miniforge3/bin/conda config --set pip_interop_enabled true && /opt/miniforge3/bin/conda config --add envs_dirs /venv && /opt/miniforge3/bin/conda config --set env_prompt '({name}) ' && su -l user -c "/opt/miniforge3/bin/conda config --set auto_activate_base false" && su -l user -c "/opt/miniforge3/bin/conda config --set always_copy true" && su -l user -c "/opt/miniforge3/bin/conda config --set pip_interop_enabled true" && su -l user -c "/opt/miniforge3/bin/conda config --add envs_dirs /venv" && su -l user -c "/opt/miniforge3/bin/conda config --set env_prompt '({name}) '" && if [[ "$BASE_IMAGE" == *"nvidia"* ]]; then /opt/miniforge3/bin/conda config --add channels nvidia; su -l user -c "/opt/miniforge3/bin/conda config --add channels nvidia"; fi && /opt/miniforge3/bin/conda create -p /venv/main python="${PYTHON_VERSION}" -y && mkdir -p /venv/main/etc/conda/{activate.d,deactivate.d} && echo 'echo -e "\033[32mActivated conda/uv virtual environment at \033[36m$(realpath $CONDA_PREFIX)\033[0m"' > /venv/main/etc/conda/activate.d/environment.sh && /opt/miniforge3/bin/conda clean -ay && rm -rf /tmp/* # buildkit

2026-01-01 02:10:01 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - cat <<'CONDA_ACTIVATION_SCRIPT' > /venv/main/bin/activate if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then echo "This script must be sourced: source bin/activate" exit 1 fi # Define deactivate function deactivate() { # Deactivate conda environment if type conda &> /dev/null; then conda deactivate 2>/dev/null || true fi # Unset the deactivate function itself unset -f deactivate # Return success return 0 } # Check if conda is properly initialized by testing for the conda shell function # (not just the command existence) if ! type conda &> /dev/null || ! declare -F conda &> /dev/null; then # Add condabin to PATH if not already there if [[ "$PATH" != *"/opt/miniforge3/condabin"* ]]; then export PATH="/opt/miniforge3/condabin:$PATH" fi # Source the conda shell script to load shell functions if [[ -f /opt/miniforge3/etc/profile.d/conda.sh ]]; then source /opt/miniforge3/etc/profile.d/conda.sh fi fi # Activate the conda environment conda activate "$(realpath /venv/main)" CONDA_ACTIVATION_SCRIPT # buildkit

2026-01-01 02:11:15 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && . /venv/main/bin/activate && uv pip install wheel huggingface-hub[cli] ipykernel ipywidgets && python -m ipykernel install --name="main" --display-name="Python3 (main venv)" && python -m ipykernel install --name="python3" --display-name="Python3 (ipykernel)" && deactivate && /usr/bin/pip install conda-pack ipykernel && /usr/bin/python3 -m ipykernel install --name="system-python" --display-name="Python3 (System)" && apt-get clean && rm -rf /var/lib/apt/lists/* # buildkit

2026-01-01 02:11:15 UTC (buildkit.dockerfile.v0)

ENV PATH=/opt/instance-tools/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin

2026-01-01 02:11:18 UTC (buildkit.dockerfile.v0)

RUN |3 BASE_IMAGE=nvidia/cuda:12.4.1-cudnn-devel-ubuntu22.04 TARGETARCH=arm64 PYTHON_VERSION=3.10 /bin/bash -c umask 002 && /bin/bash -c "$@" - set -euo pipefail && env-hash > /.env_hash # buildkit

2026-01-01 02:11:18 UTC (buildkit.dockerfile.v0)

ENTRYPOINT ["/opt/instance-tools/bin/entrypoint.sh"]

2026-01-01 02:11:18 UTC (buildkit.dockerfile.v0)

CMD []

0001-01-01 00:00:00 UTC

0001-01-01 00:00:00 UTC

Danger Zone
Delete Tag

Please be careful as this will not just delete the reference but also the actual content!

For example when you have latest and v1.2.3 both pointing to the same image
the deletion of latest will also permanently remove v1.2.3.

Delete