Skip to content

Commit 974d44a

Browse files
committed
Merge remote-tracking branch 'upstream/main'
2 parents 965f563 + fb65049 commit 974d44a

File tree

42 files changed

+270
-407
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

42 files changed

+270
-407
lines changed

.dockerignore

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,5 +29,13 @@ venv/
2929
*.log
3030

3131
# OS-specific files
32-
.DS_Store
32+
**/.DS_Store
3333
Thumbs.db
34+
35+
# Node
36+
**/node_modules/
37+
38+
# Documentation
39+
*.md
40+
docs/
41+
examples/

base-images/cpu/c9s-python-3.12/Dockerfile.cpu

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ COPY --from=buildscripts /mnt/usr/bin/ /usr/bin/
6060

6161
RUN \
6262
--mount=from=buildscripts,source=/mnt,target=/mnt \
63-
--mount=type=cache,sharing=locked,id=dnf-c9s,target=/var/cache/dnf \
63+
--mount=type=cache,sharing=locked,id=notebooks-dnf,target=/var/cache/dnf \
6464
/bin/bash <<'EOF'
6565
set -Eeuxo pipefail
6666
/mnt/aipcc.sh

base-images/cpu/ubi9-python-3.12/Dockerfile.cpu

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ USER 0
1212

1313
RUN \
1414
--mount=from=buildscripts,source=/mnt,target=/mnt \
15-
--mount=type=cache,sharing=locked,id=dnf-ubi9,target=/var/cache/dnf \
15+
--mount=type=cache,sharing=locked,id=notebooks-dnf,target=/var/cache/dnf \
1616
/bin/bash <<'EOF'
1717
/mnt/aipcc.sh
1818
EOF

base-images/cuda/12.6/c9s-python-3.11/Dockerfile.cuda

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,9 @@ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3
3939
ENV CUDA_VERSION=12.6.3
4040

4141
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
42-
RUN dnf upgrade -y && dnf install -y \
42+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf upgrade -y --setopt=keepcache=1 && dnf install -y --setopt=keepcache=1 \
4343
cuda-cudart-12-6-${NV_CUDA_CUDART_VERSION} \
44-
cuda-compat-12-6 \
45-
&& dnf clean all \
46-
&& rm -rf /var/cache/yum/*
44+
cuda-compat-12-6
4745

4846
# nvidia-docker 1.0
4947
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
@@ -69,21 +67,17 @@ ENV NV_LIBNCCL_VERSION=2.23.4
6967
ENV NCCL_VERSION=2.23.4
7068
ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.6
7169

72-
RUN dnf install -y \
70+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
7371
cuda-libraries-12-6-${NV_CUDA_LIB_VERSION} \
7472
cuda-nvtx-12-6-${NV_NVTX_VERSION} \
7573
${NV_LIBNPP_PACKAGE} \
7674
libcublas-12-6-${NV_LIBCUBLAS_VERSION} \
77-
${NV_LIBNCCL_PACKAGE} \
78-
&& dnf clean all \
79-
&& rm -rf /var/cache/yum/*
75+
${NV_LIBNCCL_PACKAGE}
8076

8177
# Install devel tools
82-
RUN dnf install -y \
78+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
8379
make \
84-
findutils \
85-
&& dnf clean all \
86-
&& rm -rf /var/cache/yum/*
80+
findutils
8781

8882
# Install CUDA cudnn9 from:
8983
# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubi9/runtime/cudnn/Dockerfile
@@ -92,17 +86,14 @@ ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12-${NV_CUDNN_VERSION}
9286

9387
LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
9488

95-
RUN dnf install -y \
96-
${NV_CUDNN_PACKAGE} \
97-
&& dnf clean all \
98-
&& rm -rf /var/cache/yum/*
89+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
90+
${NV_CUDNN_PACKAGE}
9991

10092
# Set this flag so that libraries can find the location of CUDA
10193
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda
10294

10395
# Install CUDA toolkit 12.6
104-
RUN dnf -y install cuda-toolkit-12-6 && \
105-
dnf -y clean all --enablerepo="*"
96+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf -y install --setopt=keepcache=1 cuda-toolkit-12-6
10697

10798
# Restore user workspace
10899
USER 1001

base-images/cuda/12.6/c9s-python-3.12/Dockerfile.cuda

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,9 @@ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3
3939
ENV CUDA_VERSION=12.6.3
4040

4141
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
42-
RUN dnf upgrade -y && dnf install -y \
42+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf upgrade -y --setopt=keepcache=1 && dnf install -y --setopt=keepcache=1 \
4343
cuda-cudart-12-6-${NV_CUDA_CUDART_VERSION} \
44-
cuda-compat-12-6 \
45-
&& dnf clean all \
46-
&& rm -rf /var/cache/yum/*
44+
cuda-compat-12-6
4745

4846
# nvidia-docker 1.0
4947
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
@@ -69,21 +67,17 @@ ENV NV_LIBNCCL_VERSION=2.23.4
6967
ENV NCCL_VERSION=2.23.4
7068
ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.6
7169

72-
RUN dnf install -y \
70+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
7371
cuda-libraries-12-6-${NV_CUDA_LIB_VERSION} \
7472
cuda-nvtx-12-6-${NV_NVTX_VERSION} \
7573
${NV_LIBNPP_PACKAGE} \
7674
libcublas-12-6-${NV_LIBCUBLAS_VERSION} \
77-
${NV_LIBNCCL_PACKAGE} \
78-
&& dnf clean all \
79-
&& rm -rf /var/cache/yum/*
75+
${NV_LIBNCCL_PACKAGE}
8076

8177
# Install devel tools
82-
RUN dnf install -y \
78+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
8379
make \
84-
findutils \
85-
&& dnf clean all \
86-
&& rm -rf /var/cache/yum/*
80+
findutils
8781

8882
# Install CUDA cudnn9 from:
8983
# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubi9/runtime/cudnn/Dockerfile
@@ -92,17 +86,14 @@ ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12-${NV_CUDNN_VERSION}
9286

9387
LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
9488

95-
RUN dnf install -y \
96-
${NV_CUDNN_PACKAGE} \
97-
&& dnf clean all \
98-
&& rm -rf /var/cache/yum/*
89+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
90+
${NV_CUDNN_PACKAGE}
9991

10092
# Set this flag so that libraries can find the location of CUDA
10193
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda
10294

10395
# Install CUDA toolkit 12.6
104-
RUN dnf -y install cuda-toolkit-12-6 && \
105-
dnf -y clean all --enablerepo="*"
96+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf -y install --setopt=keepcache=1 cuda-toolkit-12-6
10697

10798
# Restore user workspace
10899
USER 1001

base-images/cuda/12.6/ubi9-python-3.12/Dockerfile.cuda

Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,9 @@ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3
3939
ENV CUDA_VERSION=12.6.3
4040

4141
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
42-
RUN dnf upgrade -y && dnf install -y \
42+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf upgrade -y --setopt=keepcache=1 && dnf install -y --setopt=keepcache=1 \
4343
cuda-cudart-12-6-${NV_CUDA_CUDART_VERSION} \
44-
cuda-compat-12-6 \
45-
&& dnf clean all \
46-
&& rm -rf /var/cache/yum/*
44+
cuda-compat-12-6
4745

4846
# nvidia-docker 1.0
4947
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
@@ -69,21 +67,17 @@ ENV NV_LIBNCCL_VERSION=2.23.4
6967
ENV NCCL_VERSION=2.23.4
7068
ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.6
7169

72-
RUN dnf install -y \
70+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
7371
cuda-libraries-12-6-${NV_CUDA_LIB_VERSION} \
7472
cuda-nvtx-12-6-${NV_NVTX_VERSION} \
7573
${NV_LIBNPP_PACKAGE} \
7674
libcublas-12-6-${NV_LIBCUBLAS_VERSION} \
77-
${NV_LIBNCCL_PACKAGE} \
78-
&& dnf clean all \
79-
&& rm -rf /var/cache/yum/*
75+
${NV_LIBNCCL_PACKAGE}
8076

8177
# Install devel tools
82-
RUN dnf install -y \
78+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
8379
make \
84-
findutils \
85-
&& dnf clean all \
86-
&& rm -rf /var/cache/yum/*
80+
findutils
8781

8882
# Install CUDA cudnn9 from:
8983
# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.6.3/ubi9/runtime/cudnn/Dockerfile
@@ -92,10 +86,8 @@ ENV NV_CUDNN_PACKAGE=libcudnn9-cuda-12-${NV_CUDNN_VERSION}
9286

9387
LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
9488

95-
RUN dnf install -y \
96-
${NV_CUDNN_PACKAGE} \
97-
&& dnf clean all \
98-
&& rm -rf /var/cache/yum/*
89+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
90+
${NV_CUDNN_PACKAGE}
9991

10092
# Set this flag so that libraries can find the location of CUDA
10193
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda

base-images/cuda/12.8/c9s-python-3.12/Dockerfile.cuda

Lines changed: 9 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,9 @@ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3
3939
ENV CUDA_VERSION=12.8.1
4040

4141
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
42-
RUN dnf upgrade -y && dnf install -y \
42+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf upgrade -y --setopt=keepcache=1 && dnf install -y --setopt=keepcache=1 \
4343
cuda-cudart-12-8-${NV_CUDA_CUDART_VERSION} \
44-
cuda-compat-12-8 \
45-
&& dnf clean all \
46-
&& rm -rf /var/cache/yum/*
44+
cuda-compat-12-8
4745

4846
# nvidia-docker 1.0
4947
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
@@ -69,21 +67,17 @@ ENV NV_LIBNCCL_VERSION=2.25.1
6967
ENV NCCL_VERSION=2.25.1
7068
ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.8
7169

72-
RUN dnf install -y \
70+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
7371
cuda-libraries-12-8-${NV_CUDA_LIB_VERSION} \
7472
cuda-nvtx-12-8-${NV_NVTX_VERSION} \
7573
${NV_LIBNPP_PACKAGE} \
7674
libcublas-12-8-${NV_LIBCUBLAS_VERSION} \
77-
${NV_LIBNCCL_PACKAGE} \
78-
&& dnf clean all \
79-
&& rm -rf /var/cache/yum/*
75+
${NV_LIBNCCL_PACKAGE}
8076

8177
# Install devel tools
82-
RUN dnf install -y \
78+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
8379
make \
84-
findutils \
85-
&& dnf clean all \
86-
&& rm -rf /var/cache/yum/*
80+
findutils
8781

8882
# Install CUDA cudnn9 from:
8983
# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.8.1/ubi9/runtime/cudnn/Dockerfile
@@ -92,17 +86,14 @@ ENV NV_CUDNN_PACKAGE libcudnn9-cuda-12-${NV_CUDNN_VERSION}
9286

9387
LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
9488

95-
RUN dnf install -y \
96-
${NV_CUDNN_PACKAGE} \
97-
&& dnf clean all \
98-
&& rm -rf /var/cache/yum/*
89+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
90+
${NV_CUDNN_PACKAGE}
9991

10092
# Set this flag so that libraries can find the location of CUDA
10193
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda
10294

10395
# Install CUDA toolkit 12.8
104-
RUN dnf -y install cuda-toolkit-12-8 && \
105-
dnf -y clean all --enablerepo="*"
96+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf -y install --setopt=keepcache=1 cuda-toolkit-12-8
10697

10798
# Restore user workspace
10899
USER 1001

base-images/cuda/12.8/ubi9-python-3.12/Dockerfile.cuda

Lines changed: 8 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -39,11 +39,9 @@ RUN NVIDIA_GPGKEY_SUM=d0664fbbdb8c32356d45de36c5984617217b2d0bef41b93ccecd326ba3
3939
ENV CUDA_VERSION=12.8.1
4040

4141
# For libraries in the cuda-compat-* package: https://docs.nvidia.com/cuda/eula/index.html#attachment-a
42-
RUN dnf upgrade -y && dnf install -y \
42+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf upgrade -y --setopt=keepcache=1 && dnf install -y --setopt=keepcache=1 \
4343
cuda-cudart-12-8-${NV_CUDA_CUDART_VERSION} \
44-
cuda-compat-12-8 \
45-
&& dnf clean all \
46-
&& rm -rf /var/cache/yum/*
44+
cuda-compat-12-8
4745

4846
# nvidia-docker 1.0
4947
RUN echo "/usr/local/nvidia/lib" >> /etc/ld.so.conf.d/nvidia.conf && \
@@ -69,21 +67,17 @@ ENV NV_LIBNCCL_VERSION=2.25.1
6967
ENV NCCL_VERSION=2.25.1
7068
ENV NV_LIBNCCL_PACKAGE=${NV_LIBNCCL_PACKAGE_NAME}-${NV_LIBNCCL_PACKAGE_VERSION}+cuda12.8
7169

72-
RUN dnf install -y \
70+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
7371
cuda-libraries-12-8-${NV_CUDA_LIB_VERSION} \
7472
cuda-nvtx-12-8-${NV_NVTX_VERSION} \
7573
${NV_LIBNPP_PACKAGE} \
7674
libcublas-12-8-${NV_LIBCUBLAS_VERSION} \
77-
${NV_LIBNCCL_PACKAGE} \
78-
&& dnf clean all \
79-
&& rm -rf /var/cache/yum/*
75+
${NV_LIBNCCL_PACKAGE}
8076

8177
# Install devel tools
82-
RUN dnf install -y \
78+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
8379
make \
84-
findutils \
85-
&& dnf clean all \
86-
&& rm -rf /var/cache/yum/*
80+
findutils
8781

8882
# Install CUDA cudnn9 from:
8983
# https://gitlab.com/nvidia/container-images/cuda/-/blob/master/dist/12.8.1/ubi9/runtime/cudnn/Dockerfile
@@ -92,10 +86,8 @@ ENV NV_CUDNN_PACKAGE libcudnn9-cuda-12-${NV_CUDNN_VERSION}
9286

9387
LABEL com.nvidia.cudnn.version="${NV_CUDNN_VERSION}"
9488

95-
RUN dnf install -y \
96-
${NV_CUDNN_PACKAGE} \
97-
&& dnf clean all \
98-
&& rm -rf /var/cache/yum/*
89+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf dnf install -y --setopt=keepcache=1 \
90+
${NV_CUDNN_PACKAGE}
9991

10092
# Set this flag so that libraries can find the location of CUDA
10193
ENV XLA_FLAGS=--xla_gpu_cuda_data_dir=/usr/local/cuda

base-images/rocm/6.2/c9s-python-3.12/Dockerfile.rocm

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ ARG AMDGPU_VERSION=6.2.4
2222
# Note: Based on 6.2 above new package mivisionx is a pre-requistes, which bring in more dependent packages
2323
# so we are only installing meta packages of rocm
2424
# ref: https://rocm.docs.amd.com/projects/install-on-linux/en/develop/reference/package-manager-integration.html#packages-in-rocm-programming-models
25-
RUN echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
25+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
2626
echo "name=ROCm" >> /etc/yum.repos.d/rocm.repo && \
2727
echo "baseurl=https://repo.radeon.com/rocm/rhel9/$ROCM_VERSION/main" >> /etc/yum.repos.d/rocm.repo && \
2828
echo "enabled=1" >> /etc/yum.repos.d/rocm.repo && \
@@ -32,16 +32,15 @@ RUN echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
3232
echo "baseurl=https://repo.radeon.com/amdgpu/$AMDGPU_VERSION/rhel/9.4/main/x86_64" >> /etc/yum.repos.d/amdgpu.repo && \
3333
echo "enabled=1" >> /etc/yum.repos.d/amdgpu.repo && \
3434
echo "gpgcheck=0" >> /etc/yum.repos.d/amdgpu.repo && \
35-
dnf install -y \
35+
dnf install -y --setopt=keepcache=1 \
3636
rocm-core hip-runtime-amd \
3737
# system utilities
3838
rocm-smi-lib rocminfo \
3939
rocblas rocsolver rocfft rocrand rocsparse miopen-hip rccl \
4040
# HIP (Heterogeneous-compute Interface for Portability)
4141
hipblas hipblaslt hipfft hipsparse hiprand hipsolver \
4242
# suitesparse's /lib64/libspqr.so.2.0.9 needs libtbb.so.2
43-
tbb && \
44-
dnf clean all && rm -rf /var/cache/yum
43+
tbb
4544

4645
# https://rocm.docs.amd.com/projects/install-on-linux/en/latest/install/post-install.html#configure-rocm-shared-objects
4746
RUN tee --append /etc/ld.so.conf.d/rocm.conf <<EOF

base-images/rocm/6.2/ubi9-python-3.12/Dockerfile.rocm

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ ARG AMDGPU_VERSION=6.2.4
2222
# Note: Based on 6.2 above new package mivisionx is a pre-requistes, which bring in more dependent packages
2323
# so we are only installing meta packages of rocm
2424
# ref: https://rocm.docs.amd.com/projects/install-on-linux/en/develop/reference/package-manager-integration.html#packages-in-rocm-programming-models
25-
RUN echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
25+
RUN --mount=type=cache,target=/var/cache/dnf,sharing=locked,id=notebooks-dnf echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
2626
echo "name=ROCm" >> /etc/yum.repos.d/rocm.repo && \
2727
echo "baseurl=https://repo.radeon.com/rocm/rhel9/$ROCM_VERSION/main" >> /etc/yum.repos.d/rocm.repo && \
2828
echo "enabled=1" >> /etc/yum.repos.d/rocm.repo && \
@@ -32,16 +32,15 @@ RUN echo "[ROCm]" > /etc/yum.repos.d/rocm.repo && \
3232
echo "baseurl=https://repo.radeon.com/amdgpu/$AMDGPU_VERSION/rhel/9.4/main/x86_64" >> /etc/yum.repos.d/amdgpu.repo && \
3333
echo "enabled=1" >> /etc/yum.repos.d/amdgpu.repo && \
3434
echo "gpgcheck=0" >> /etc/yum.repos.d/amdgpu.repo && \
35-
dnf install -y \
35+
dnf install -y --setopt=keepcache=1 \
3636
rocm-core hip-runtime-amd \
3737
# system utilities
3838
rocm-smi-lib rocminfo \
3939
rocblas rocsolver rocfft rocrand rocsparse miopen-hip rccl \
4040
# HIP (Heterogeneous-compute Interface for Portability)
4141
hipblas hipblaslt hipfft hipsparse hiprand hipsolver \
4242
# suitesparse's /lib64/libspqr.so.2.0.9 needs libtbb.so.2
43-
tbb && \
44-
dnf clean all && rm -rf /var/cache/yum
43+
tbb
4544

4645
# https://rocm.docs.amd.com/projects/install-on-linux/en/latest/install/post-install.html#configure-rocm-shared-objects
4746
RUN tee --append /etc/ld.so.conf.d/rocm.conf <<EOF

0 commit comments

Comments
 (0)