tritonserver (25.01-py3-sdk)

Published 2025-06-04 16:37:16 +07:00 by tonkaew131 in tonkaew131/tritonserver

Installation

docker pull git.athichal.com/tonkaew131/tritonserver:25.01-py3-sdk
sha256:f2c7eafa25fabc49f14322515ad768a93e8b924d8e2ed1c121cee3d02d30a6ac

Image Layers

ARG RELEASE
ARG LAUNCHPAD_BUILD_ARCH
LABEL org.opencontainers.image.ref.name=ubuntu
LABEL org.opencontainers.image.version=24.04
ADD file:bcebbf0fddcba5b864d5d267b68dd23bcfb01275e6ec7bcab69bf8b56af14804 in /
CMD ["/bin/bash"]
RUN /bin/sh -c export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -y --no-install-recommends apt-utils build-essential ca-certificates curl libncurses6 libncursesw6 patch wget rsync unzip jq gnupg libtcmalloc-minimal4 && rm -rf /var/lib/apt/lists/* && echo "hsts=0" > /root/.wgetrc # buildkit
ARG CUDA_VERSION=12.8.0.038
ARG CUDA_DRIVER_VERSION=570.86.10
ARG JETPACK_HOST_MOUNTS=
ENV CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 CUDA_CACHE_DISABLE=1 NVIDIA_REQUIRE_JETPACK_HOST_MOUNTS=
RUN |3 CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 JETPACK_HOST_MOUNTS= /bin/sh -c if [ -n "${JETPACK_HOST_MOUNTS}" ]; then echo "/usr/lib/aarch64-linux-gnu/tegra" > /etc/ld.so.conf.d/nvidia-tegra.conf && echo "/usr/lib/aarch64-linux-gnu/tegra-egl" >> /etc/ld.so.conf.d/nvidia-tegra.conf; fi # buildkit
RUN |3 CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 JETPACK_HOST_MOUNTS= /bin/sh -c /nvidia/build-scripts/installCUDA.sh # buildkit
RUN |3 CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 JETPACK_HOST_MOUNTS= /bin/sh -c cp -vprd /nvidia/. / && patch -p0 < /etc/startup_scripts.patch && rm -f /etc/startup_scripts.patch # buildkit
ENV _CUDA_COMPAT_PATH=/usr/local/cuda/compat ENV=/etc/shinit_v2 BASH_ENV=/etc/bash.bashrc SHELL=/bin/bash NVIDIA_REQUIRE_CUDA=cuda>=9.0
LABEL com.nvidia.volumes.needed=nvidia_driver com.nvidia.cuda.version=9.0
ARG NCCL_VERSION=2.25.1
ARG CUBLAS_VERSION=12.8.3.14
ARG CUFFT_VERSION=11.3.3.41
ARG CURAND_VERSION=10.3.9.55
ARG CUSPARSE_VERSION=12.5.7.53
ARG CUSOLVER_VERSION=11.7.2.55
ARG CUTENSOR_VERSION=2.1.0.9
ARG NPP_VERSION=12.3.3.65
ARG NVJPEG_VERSION=12.3.5.57
ARG CUFILE_VERSION=1.13.0.11
ARG NVJITLINK_VERSION=12.8.61
ARG CUDNN_VERSION=9.7.0.66
ARG CUDNN_FRONTEND_VERSION=1.9.0
ARG TRT_VERSION=10.8.0.43
ARG TRTOSS_VERSION=
ARG NSIGHT_SYSTEMS_VERSION=2024.6.2.225
ARG NSIGHT_COMPUTE_VERSION=2025.1.0.14
ARG CUSPARSELT_VERSION=0.6.3.2
ENV NCCL_VERSION=2.25.1 CUBLAS_VERSION=12.8.3.14 CUFFT_VERSION=11.3.3.41 CURAND_VERSION=10.3.9.55 CUSPARSE_VERSION=12.5.7.53 CUSPARSELT_VERSION=0.6.3.2 CUSOLVER_VERSION=11.7.2.55 CUTENSOR_VERSION=2.1.0.9 NPP_VERSION=12.3.3.65 NVJPEG_VERSION=12.3.5.57 CUFILE_VERSION=1.13.0.11 NVJITLINK_VERSION=12.8.61 CUDNN_VERSION=9.7.0.66 CUDNN_FRONTEND_VERSION=1.9.0 TRT_VERSION=10.8.0.43 TRTOSS_VERSION= NSIGHT_SYSTEMS_VERSION=2024.6.2.225 NSIGHT_COMPUTE_VERSION=2025.1.0.14
RUN |21 CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 JETPACK_HOST_MOUNTS= NCCL_VERSION=2.25.1 CUBLAS_VERSION=12.8.3.14 CUFFT_VERSION=11.3.3.41 CURAND_VERSION=10.3.9.55 CUSPARSE_VERSION=12.5.7.53 CUSOLVER_VERSION=11.7.2.55 CUTENSOR_VERSION=2.1.0.9 NPP_VERSION=12.3.3.65 NVJPEG_VERSION=12.3.5.57 CUFILE_VERSION=1.13.0.11 NVJITLINK_VERSION=12.8.61 CUDNN_VERSION=9.7.0.66 CUDNN_FRONTEND_VERSION=1.9.0 TRT_VERSION=10.8.0.43 TRTOSS_VERSION= NSIGHT_SYSTEMS_VERSION=2024.6.2.225 NSIGHT_COMPUTE_VERSION=2025.1.0.14 CUSPARSELT_VERSION=0.6.3.2 /bin/sh -c /nvidia/build-scripts/installLIBS.sh && /nvidia/build-scripts/installCUDNN.sh && /nvidia/build-scripts/installTRT.sh && /nvidia/build-scripts/installNSYS.sh && /nvidia/build-scripts/installNCU.sh && /nvidia/build-scripts/installCUTENSOR.sh && /nvidia/build-scripts/installCUSPARSELT.sh && if [ -z "${JETPACK_HOST_MOUNTS}" ]; then /nvidia/build-scripts/installNCCL.sh; fi; # buildkit
LABEL com.nvidia.nccl.version=2.25.1 com.nvidia.cublas.version=12.8.3.14 com.nvidia.cufft.version=11.3.3.41 com.nvidia.curand.version=10.3.9.55 com.nvidia.cusparse.version=12.5.7.53 com.nvidia.cusparselt.version=0.6.3.2 com.nvidia.cusolver.version=11.7.2.55 com.nvidia.cutensor.version=2.1.0.9 com.nvidia.npp.version=12.3.3.65 com.nvidia.nvjpeg.version=12.3.5.57 com.nvidia.cudnn.version=9.7.0.66 com.nvidia.tensorrt.version=10.8.0.43 com.nvidia.tensorrtoss.version= com.nvidia.nsightsystems.version=2024.6.2.225 com.nvidia.nsightcompute.version=2025.1.0.14
ARG DALI_VERSION=1.45.0
ARG DALI_BUILD=
ARG DALI_URL_SUFFIX=120
ARG POLYGRAPHY_VERSION=0.49.16
ARG TRANSFORMER_ENGINE_VERSION=1.14
ARG MODEL_OPT_VERSION=0.21.0
ENV DALI_VERSION=1.45.0 DALI_BUILD= DALI_URL_SUFFIX=120 POLYGRAPHY_VERSION=0.49.16 TRANSFORMER_ENGINE_VERSION=1.14 MODEL_OPT_VERSION=0.21.0
ADD docs.tgz / # buildkit
RUN |27 CUDA_VERSION=12.8.0.038 CUDA_DRIVER_VERSION=570.86.10 JETPACK_HOST_MOUNTS= NCCL_VERSION=2.25.1 CUBLAS_VERSION=12.8.3.14 CUFFT_VERSION=11.3.3.41 CURAND_VERSION=10.3.9.55 CUSPARSE_VERSION=12.5.7.53 CUSOLVER_VERSION=11.7.2.55 CUTENSOR_VERSION=2.1.0.9 NPP_VERSION=12.3.3.65 NVJPEG_VERSION=12.3.5.57 CUFILE_VERSION=1.13.0.11 NVJITLINK_VERSION=12.8.61 CUDNN_VERSION=9.7.0.66 CUDNN_FRONTEND_VERSION=1.9.0 TRT_VERSION=10.8.0.43 TRTOSS_VERSION= NSIGHT_SYSTEMS_VERSION=2024.6.2.225 NSIGHT_COMPUTE_VERSION=2025.1.0.14 CUSPARSELT_VERSION=0.6.3.2 DALI_VERSION=1.45.0 DALI_BUILD= DALI_URL_SUFFIX=120 POLYGRAPHY_VERSION=0.49.16 TRANSFORMER_ENGINE_VERSION=1.14 MODEL_OPT_VERSION=0.21.0 /bin/sh -c echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && echo "/usr/local/nvidia/lib64" >> /etc/ld.so.conf.d/nvidia.conf # buildkit
ARG _LIBPATH_SUFFIX=
ENV PATH=/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin LD_LIBRARY_PATH=/usr/local/cuda/compat/lib:/usr/local/nvidia/lib:/usr/local/nvidia/lib64 NVIDIA_VISIBLE_DEVICES=all NVIDIA_DRIVER_CAPABILITIES=compute,utility,video
COPY entrypoint/ /opt/nvidia/ # buildkit
ENV NVIDIA_PRODUCT_NAME=CUDA
ENTRYPOINT ["/opt/nvidia/nvidia_entrypoint.sh"]
COPY NVIDIA_Deep_Learning_Container_License.pdf /workspace/ # buildkit
RUN /bin/sh -c export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -y --no-install-recommends build-essential git libglib2.0-0 less libhwloc15 libnl-route-3-200 libnl-3-dev libnl-route-3-dev libnuma-dev libnuma1 libpmi2-0-dev nano numactl openssh-client vim wget && rm -rf /var/lib/apt/lists/* # buildkit
ARG GDRCOPY_VERSION=2.4.1
ARG HPCX_VERSION=2.21
ARG RDMACORE_VERSION=39.0
ARG MOFED_VERSION=5.4-rdmacore39.0
ARG OPENUCX_VERSION=1.18.0
ARG OPENMPI_VERSION=4.1.7
ARG EFA_VERSION=1.34.0
ARG AWS_OFI_NCCL_VERSION=1.12.1
ENV GDRCOPY_VERSION=2.4.1 HPCX_VERSION=2.21 MOFED_VERSION=5.4-rdmacore39.0 OPENUCX_VERSION=1.18.0 OPENMPI_VERSION=4.1.7 RDMACORE_VERSION=39.0 EFA_VERSION=1.34.0 AWS_OFI_NCCL_VERSION=1.12.1
ARG TARGETARCH=amd64
RUN |9 GDRCOPY_VERSION=2.4.1 HPCX_VERSION=2.21 RDMACORE_VERSION=39.0 MOFED_VERSION=5.4-rdmacore39.0 OPENUCX_VERSION=1.18.0 OPENMPI_VERSION=4.1.7 EFA_VERSION=1.34.0 AWS_OFI_NCCL_VERSION=1.12.1 TARGETARCH=amd64 /bin/sh -c cd /nvidia && ( export DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -y --no-install-recommends libibverbs1 libibverbs-dev librdmacm1 librdmacm-dev libibumad3 libibumad-dev ibverbs-utils ibverbs-providers && rm -rf /var/lib/apt/lists/* && rm $(dpkg-query -L libibverbs-dev librdmacm-dev libibumad-dev | grep "\(\.so\|\.a\)$") ) && ( cd opt/gdrcopy/ && dpkg -i libgdrapi_*.deb ) && ( cp -r opt/hpcx /opt/ && cp etc/ld.so.conf.d/hpcx.conf /etc/ld.so.conf.d/ && ln -sf /opt/hpcx/ompi /usr/local/mpi && ln -sf /opt/hpcx/ucx /usr/local/ucx && sed -i 's/^\(hwloc_base_binding_policy\) = core$/\1 = none/' /opt/hpcx/ompi/etc/openmpi-mca-params.conf && sed -i 's/^\(btl = self\)$/#\1/' /opt/hpcx/ompi/etc/openmpi-mca-params.conf ) && ( if [ ! -f /etc/ld.so.conf.d/nvidia-tegra.conf ]; then cd opt/amazon/efa/ && dpkg -i libfabric*.deb && rm /opt/amazon/efa/lib/libfabric.a && echo "/opt/amazon/efa/lib" > /etc/ld.so.conf.d/efa.conf; fi ) && ldconfig # buildkit
ENV OPAL_PREFIX=/opt/hpcx/ompi PATH=/usr/local/mpi/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ucx/bin:/opt/amazon/efa/bin
ENV OMPI_MCA_coll_hcoll_enable=0
ENV NCCL_NVLS_ENABLE=0
COPY cuda-*.patch /tmp # buildkit
RUN |9 GDRCOPY_VERSION=2.4.1 HPCX_VERSION=2.21 RDMACORE_VERSION=39.0 MOFED_VERSION=5.4-rdmacore39.0 OPENUCX_VERSION=1.18.0 OPENMPI_VERSION=4.1.7 EFA_VERSION=1.34.0 AWS_OFI_NCCL_VERSION=1.12.1 TARGETARCH=amd64 /bin/sh -c export DEVEL=1 BASE=0 && /nvidia/build-scripts/installNCU.sh && /nvidia/build-scripts/installCUDA.sh && /nvidia/build-scripts/installLIBS.sh && if [ ! -f /etc/ld.so.conf.d/nvidia-tegra.conf ]; then /nvidia/build-scripts/installNCCL.sh; fi && /nvidia/build-scripts/installCUDNN.sh && /nvidia/build-scripts/installCUTENSOR.sh && /nvidia/build-scripts/installTRT.sh && /nvidia/build-scripts/installNSYS.sh && /nvidia/build-scripts/installCUSPARSELT.sh && if [ -f "/tmp/cuda-${_CUDA_VERSION_MAJMIN}.patch" ]; then patch -p0 < /tmp/cuda-${_CUDA_VERSION_MAJMIN}.patch; fi && rm -f /tmp/cuda-*.patch # buildkit
ENV LIBRARY_PATH=/usr/local/cuda/lib64/stubs:
COPY /opt/amazon/aws-ofi-nccl /opt/amazon/aws-ofi-nccl # buildkit
RUN |9 GDRCOPY_VERSION=2.4.1 HPCX_VERSION=2.21 RDMACORE_VERSION=39.0 MOFED_VERSION=5.4-rdmacore39.0 OPENUCX_VERSION=1.18.0 OPENMPI_VERSION=4.1.7 EFA_VERSION=1.34.0 AWS_OFI_NCCL_VERSION=1.12.1 TARGETARCH=amd64 /bin/sh -c if [ ! -f /etc/ld.so.conf.d/nvidia-tegra.conf ]; then echo "/opt/amazon/aws-ofi-nccl/lib" > /etc/ld.so.conf.d/aws-ofi-nccl.conf && ldconfig; fi # buildkit
ENV DEBIAN_FRONTEND=noninteractive
ENV PIP_BREAK_SYSTEM_PACKAGES=1
ARG DCGM_VERSION=3.3.6
ARG TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server
ARG TRITON_CORE_REPO_TAG=r25.01
ARG TARGETPLATFORM=linux/amd64
ARG TRITON_ENABLE_GPU=ON
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c apt-get update && apt-get install -y --no-install-recommends software-properties-common curl git gperf libb64-dev libgoogle-perftools-dev libopencv-dev libopencv-core-dev libssl-dev libtool python3 python3-pip python3-dev python3-wheel python3-setuptools vim wget python3-pdfkit maven default-jdk && pip3 install "grpcio<1.68" "grpcio-tools<1.68" # buildkit
WORKDIR /workspace
COPY TRITON_VERSION . # buildkit
COPY NVIDIA_Deep_Learning_Container_License.pdf . # buildkit
COPY /workspace/client/ client/ # buildkit
COPY /workspace/perf_analyzer/ perf_analyzer/ # buildkit
COPY /workspace/install/ install/ # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c cd install && export VERSION=`cat /workspace/TRITON_VERSION` && tar zcf /workspace/v$VERSION.clients.tar.gz * # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c mkdir qa # buildkit
COPY qa/L0_sdk qa/L0_sdk # buildkit
COPY qa/L0_client_build_variants qa/L0_client_build_variants # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c mkdir -p qa/python_client_unit_tests/ # buildkit
COPY /workspace/client/src/python/library/tests/* qa/python_client_unit_tests/ # buildkit
COPY qa/images/mug.jpg images/mug.jpg # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c pip3 install --upgrade "numpy<2" pillow attrdict && find install/python/ -maxdepth 1 -type f -name "tritonclient-*linux*.whl" | xargs printf -- '%s[all]' | xargs pip3 install --upgrade # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c pip3 install install/python/genai_perf-*.whl # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c if [ "$TRITON_ENABLE_GPU" = "ON" ]; then [ "$(uname -m)" != "x86_64" ] && arch="sbsa" || arch="x86_64" && curl -o /tmp/cuda-keyring.deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2404/$arch/cuda-keyring_1.1-1_all.deb && apt install /tmp/cuda-keyring.deb && rm /tmp/cuda-keyring.deb && apt-get update && apt-get install -y datacenter-gpu-manager=1:${DCGM_VERSION}; fi # buildkit
RUN |5 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON /bin/sh -c rm -f /usr/bin/python && ln -s /usr/bin/python3 /usr/bin/python # buildkit
ARG TRITON_MODEL_ANALYZER_REPO_TAG=r25.01
ARG TRITON_MODEL_ANALYZER_REPO=https://github.com/triton-inference-server/model_analyzer@r25.01
RUN |7 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON TRITON_MODEL_ANALYZER_REPO_TAG=r25.01 TRITON_MODEL_ANALYZER_REPO=https://github.com/triton-inference-server/model_analyzer@r25.01 /bin/sh -c pip3 install "git+${TRITON_MODEL_ANALYZER_REPO}" # buildkit
ENV NVIDIA_PRODUCT_NAME=Triton Server SDK
COPY docker/entrypoint.d/ /opt/nvidia/entrypoint.d/ # buildkit
RUN |7 DCGM_VERSION=3.3.6 TRITON_REPO_ORGANIZATION=https://github.com/triton-inference-server TRITON_CORE_REPO_TAG=r25.01 TARGETPLATFORM=linux/amd64 TRITON_ENABLE_GPU=ON TRITON_MODEL_ANALYZER_REPO_TAG=r25.01 TRITON_MODEL_ANALYZER_REPO=https://github.com/triton-inference-server/model_analyzer@r25.01 /bin/sh -c sed 's/Server/Server SDK/' /opt/nvidia/entrypoint.d/10-banner.txt | sed 's/^===/=======/' > /opt/nvidia/entrypoint.d/10-banner.new && mv /opt/nvidia/entrypoint.d/10-banner.new /opt/nvidia/entrypoint.d/10-banner.txt # buildkit
ARG NVIDIA_TRITON_SERVER_SDK_VERSION=25.01
ARG NVIDIA_BUILD_ID=136230215
ENV NVIDIA_TRITON_SERVER_SDK_VERSION=25.01
ENV NVIDIA_BUILD_ID=136230215
ENV PATH=/workspace/install/bin:/usr/local/mpi/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ucx/bin:/opt/amazon/efa/bin
ENV LD_LIBRARY_PATH=/workspace/install/lib:/usr/local/cuda/compat/lib:/usr/local/nvidia/lib:/usr/local/nvidia/lib64
ENV LD_LIBRARY_PATH=/opt/hpcx/ompi/lib:/workspace/install/lib:/usr/local/cuda/compat/lib:/usr/local/nvidia/lib:/usr/local/nvidia/lib64
ENV TCMALLOC_RELEASE_RATE=200
ARG MINICONDA3_VERSION=latest
ENV MINICONDA3_VERSION=latest
ARG GO_VERSION=1.24.3
ENV GO_VERSION=1.24.3
ARG SINGULARITY_VERSION=4.3.1
ENV SINGULARITY_VERSION=4.3.1
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c echo -e '\n\n\n' >> ~/.bashrc # buildkit
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c apt-get update # buildkit
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c apt-get install -y python3.12-venv # buildkit
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c wget https://go.dev/dl/go${GO_VERSION}.linux-amd64.tar.gz && rm -rf /usr/local/go && tar -C /usr/local -xzf go${GO_VERSION}.linux-amd64.tar.gz && rm go${GO_VERSION}.linux-amd64.tar.gz # buildkit
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c echo 'export PATH=/usr/local/go/bin:$PATH' >> ~/.bashrc # buildkit
ENV PATH=/usr/local/go/bin:/workspace/install/bin:/usr/local/mpi/bin:/usr/local/nvidia/bin:/usr/local/cuda/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/ucx/bin:/opt/amazon/efa/bin
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c apt-get update && apt-get install -y autoconf automake cryptsetup fuse2fs git fuse libfuse-dev libseccomp-dev libtool pkg-config runc squashfs-tools squashfs-tools-ng uidmap wget zlib1g-dev libsubid-dev # buildkit
RUN |3 MINICONDA3_VERSION=latest GO_VERSION=1.24.3 SINGULARITY_VERSION=4.3.1 /bin/sh -c wget https://github.com/sylabs/singularity/releases/download/v${SINGULARITY_VERSION}/singularity-ce-${SINGULARITY_VERSION}.tar.gz && tar -xzf singularity-ce-${SINGULARITY_VERSION}.tar.gz && cd singularity-ce-${SINGULARITY_VERSION} && ./mconfig && make -C builddir && make -C builddir install && cd .. && rm -rf singularity-ce-${SINGULARITY_VERSION} singularity-ce-${SINGULARITY_VERSION}.tar.gz # buildkit

Labels

Key Value
com.nvidia.cublas.version 12.8.3.14
com.nvidia.cuda.version 9.0
com.nvidia.cudnn.version 9.7.0.66
com.nvidia.cufft.version 11.3.3.41
com.nvidia.curand.version 10.3.9.55
com.nvidia.cusolver.version 11.7.2.55
com.nvidia.cusparse.version 12.5.7.53
com.nvidia.cusparselt.version 0.6.3.2
com.nvidia.cutensor.version 2.1.0.9
com.nvidia.nccl.version 2.25.1
com.nvidia.npp.version 12.3.3.65
com.nvidia.nsightcompute.version 2025.1.0.14
com.nvidia.nsightsystems.version 2024.6.2.225
com.nvidia.nvjpeg.version 12.3.5.57
com.nvidia.tensorrt.version 10.8.0.43
com.nvidia.tensorrtoss.version
com.nvidia.volumes.needed nvidia_driver
org.opencontainers.image.ref.name ubuntu
org.opencontainers.image.version 24.04
Details
Container
2025-06-04 16:37:16 +07:00
3
OCI / Docker
linux/amd64
9.6 GiB
Versions (1) View all
25.01-py3-sdk 2025-06-04