@@ -32,6 +32,9 @@ ENV SPARK_VERSION ${SPARK_VERSION}
32
32
ENV HADOOP_VERSION ${HADOOP_VERSION}
33
33
ENV SCALA_VERSION ${SCALA_VERSION}
34
34
35
+ # # Add missing gpg keys from https://downloads.apache.org/spark/KEYS
36
+ COPY MISSING-GPG-KEYS.yml .
37
+
35
38
RUN groupadd --system --gid=${spark_uid} spark && \
36
39
useradd --system --uid=${spark_uid} --gid=spark spark
37
40
@@ -58,16 +61,19 @@ RUN set -ex;\
58
61
curl --retry 3 --retry-all-errors -k ${SPARK_DIST_DOWNLOAD_URL}/${DIST}.tgz -o ${WORK_DIR}/spark.tgz; \
59
62
curl --retry 3 --retry-all-errors -k ${SPARK_DIST_DOWNLOAD_URL}/${DIST}.tgz.asc -o ${WORK_DIR}/spark.tgz.asc; \
60
63
curl --retry 3 --retry-all-errors -k https://downloads.apache.org/spark/KEYS -o ${WORK_DIR}/KEYS; \
64
+ MISSING_KEYS=($(cat MISSING-GPG-KEYS.yml | grep "keys:" -A300 | awk -F: '{ print $2 }' | tr -d '\n ' | tr -d \"\" )); \
61
65
export GNUPGHOME="$(mktemp -d)" ; \
62
66
gpg --batch --import ${WORK_DIR}/KEYS; \
67
+ gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys ${MISSING_KEYS} || true; \
68
+ gpg --batch --keyserver hkps://keyserver.ubuntu.com --recv-keys ${MISSING_KEYS} || true; \
63
69
gpg --batch --verify ${WORK_DIR}/spark.tgz.asc ${WORK_DIR}/spark.tgz; \
64
70
tar --strip-components=1 -zxvf ${WORK_DIR}/spark.tgz -C ${SPARK_HOME}/; \
65
71
chown -R spark:spark ${SPARK_HOME}/; \
66
72
mv ${SPARK_HOME}/kubernetes/dockerfiles/spark/decom.sh /opt/; \
67
73
mv ${SPARK_HOME}/kubernetes/tests ${SPARK_HOME}/; \
68
74
chmod a+x /opt/decom.sh; \
69
75
gpgconf --kill all; \
70
- rm -rf ${GNUPGHOME} ${WORK_DIR}; \
76
+ rm -rf ${GNUPGHOME} ${WORK_DIR} MISSING-GPG-KEYS.yml ; \
71
77
rm -fr ${SPARK_HOME}/conf rm -fr ${SPARK_HOME}/yarn rm -fr ${SPARK_HOME}/kubernetes
72
78
73
79
COPY entrypoint.sh /opt/entrypoint.sh
0 commit comments