@@ -11,7 +11,6 @@ ARG PRODUCT
1111ARG ASYNC_PROFILER
1212ARG JMX_EXPORTER
1313ARG PROTOBUF
14- ARG HDFS_UTILS
1514ARG TARGETARCH
1615ARG TARGETOS
1716
@@ -59,19 +58,6 @@ RUN microdnf update && \
5958
6059WORKDIR /stackable
6160
62- # The Stackable HDFS utils contain an OPA authorizer, group mapper & topology provider.
63- # The topology provider provides rack awareness functionality for HDFS by allowing users to specify Kubernetes
64- # labels to build a rackID from.
65- # Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils.
66-
67- RUN curl --fail -L "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
68- cd hdfs-utils-${HDFS_UTILS} && \
69- mvn clean package -P hadoop-${PRODUCT} -DskipTests -Dmaven.javadoc.skip=true && \
70- mkdir -p /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib && \
71- cp target/hdfs-utils-$HDFS_UTILS.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar && \
72- rm -rf /stackable/hdfs-utils-main && \
73- cd -
74-
7561COPY hadoop/stackable/patches /stackable/patches
7662
7763# Hadoop Pipes requires libtirpc to build, whose headers are not packaged in RedHat UBI, so skip building this module
@@ -105,10 +91,30 @@ COPY shared/log4shell_scanner /bin/log4shell_scanner
10591RUN /bin/log4shell_scanner s "/stackable/hadoop-${PRODUCT}"
10692# ===
10793
94+ FROM stackable/image/java-devel as hdfs-utils-builder
95+
96+ ARG HDFS_UTILS
97+ ARG PRODUCT
98+
99+ WORKDIR /stackable
100+
101+ # The Stackable HDFS utils contain an OPA authorizer, group mapper & topology provider.
102+ # The topology provider provides rack awareness functionality for HDFS by allowing users to specify Kubernetes
103+ # labels to build a rackID from.
104+ # Starting with hdfs-utils version 0.3.0 the topology provider is not a standalone jar anymore and included in hdfs-utils.
105+
106+ RUN curl --fail -L "https://github.com/stackabletech/hdfs-utils/archive/refs/tags/v${HDFS_UTILS}.tar.gz" | tar -xzC . && \
107+ cd hdfs-utils-${HDFS_UTILS} && \
108+ mvn clean package -P hadoop-${PRODUCT} -DskipTests -Dmaven.javadoc.skip=true && \
109+ mkdir -p /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib && \
110+ cp target/hdfs-utils-$HDFS_UTILS.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar && \
111+ rm -rf /stackable/hdfs-utils-main
112+
108113FROM stackable/image/java-base as final
109114
110115ARG PRODUCT
111116ARG RELEASE
117+ ARG HDFS_UTILS
112118
113119LABEL name="Apache Hadoop" \
114120 maintainer="info@stackable.tech" \
@@ -143,6 +149,7 @@ WORKDIR /stackable
143149COPY --chown=stackable:stackable --from=builder /stackable/hadoop-${PRODUCT} /stackable/hadoop-${PRODUCT}/
144150COPY --chown=stackable:stackable --from=builder /stackable/jmx /stackable/jmx/
145151COPY --chown=stackable:stackable --from=builder /stackable/async-profiler /stackable/async-profiler/
152+ COPY --chown=stackable:stackable --from=hdfs-utils-builder /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar /stackable/hadoop-${PRODUCT}/share/hadoop/common/lib/hdfs-utils-${HDFS_UTILS}.jar
146153RUN ln -s /stackable/hadoop-${PRODUCT} /stackable/hadoop
147154
148155COPY hadoop/stackable/fuse_dfs_wrapper /stackable/hadoop/bin
0 commit comments