Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions 3.4.0/scala2.12-java11-python3-r-ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE

RUN set -ex && \
apt-get update && \
apt install -y python3 python3-pip && \
apt install -y r-base r-base-dev && \
rm -rf /var/cache/apt/* && \
RUN set -ex; \
apt-get update; \
apt install -y python3 python3-pip; \
apt install -y r-base r-base-dev; \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*

ENV R_HOME /usr/lib/R
8 changes: 4 additions & 4 deletions 3.4.0/scala2.12-java11-python3-ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE

RUN set -ex && \
apt-get update && \
apt install -y python3 python3-pip && \
rm -rf /var/cache/apt/* && \
RUN set -ex; \
apt-get update; \
apt install -y python3 python3-pip; \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
8 changes: 4 additions & 4 deletions 3.4.0/scala2.12-java11-r-ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,10 @@
ARG BASE_IMAGE=spark:3.4.0-scala2.12-java11-ubuntu
FROM $BASE_IMAGE

RUN set -ex && \
apt-get update && \
apt install -y r-base r-base-dev && \
rm -rf /var/cache/apt/* && \
RUN set -ex; \
apt-get update; \
apt install -y r-base r-base-dev; \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*

ENV R_HOME /usr/lib/R
32 changes: 16 additions & 16 deletions 3.4.0/scala2.12-java11-ubuntu/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,22 @@ ARG spark_uid=185
RUN groupadd --system --gid=${spark_uid} spark && \
useradd --system --uid=${spark_uid} --gid=spark spark

RUN set -ex && \
apt-get update && \
ln -s /lib /lib64 && \
apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \
mkdir -p /opt/spark && \
mkdir /opt/spark/python && \
mkdir -p /opt/spark/examples && \
mkdir -p /opt/spark/work-dir && \
chmod g+w /opt/spark/work-dir && \
touch /opt/spark/RELEASE && \
chown -R spark:spark /opt/spark && \
rm /bin/sh && \
ln -sv /bin/bash /bin/sh && \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
rm -rf /var/cache/apt/* && \
RUN set -ex; \
apt-get update; \
ln -s /lib /lib64; \
apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu; \
mkdir -p /opt/spark; \
mkdir /opt/spark/python; \
mkdir -p /opt/spark/examples; \
mkdir -p /opt/spark/work-dir; \
chmod g+w /opt/spark/work-dir; \
touch /opt/spark/RELEASE; \
chown -R spark:spark /opt/spark; \
rm /bin/sh; \
ln -sv /bin/bash /bin/sh; \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*

# Install Apache Spark
Expand Down
32 changes: 16 additions & 16 deletions Dockerfile.template
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,22 @@ ARG spark_uid=185
RUN groupadd --system --gid=${spark_uid} spark && \
useradd --system --uid=${spark_uid} --gid=spark spark

RUN set -ex && \
apt-get update && \
ln -s /lib /lib64 && \
apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu && \
mkdir -p /opt/spark && \
mkdir /opt/spark/python && \
mkdir -p /opt/spark/examples && \
mkdir -p /opt/spark/work-dir && \
chmod g+w /opt/spark/work-dir && \
touch /opt/spark/RELEASE && \
chown -R spark:spark /opt/spark && \
rm /bin/sh && \
ln -sv /bin/bash /bin/sh && \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd && \
rm -rf /var/cache/apt/* && \
RUN set -ex; \
apt-get update; \
ln -s /lib /lib64; \
apt install -y gnupg2 wget bash tini libc6 libpam-modules krb5-user libnss3 procps net-tools gosu; \
mkdir -p /opt/spark; \
mkdir /opt/spark/python; \
mkdir -p /opt/spark/examples; \
mkdir -p /opt/spark/work-dir; \
chmod g+w /opt/spark/work-dir; \
touch /opt/spark/RELEASE; \
chown -R spark:spark /opt/spark; \
rm /bin/sh; \
ln -sv /bin/bash /bin/sh; \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su; \
chgrp root /etc/passwd && chmod ug+rw /etc/passwd; \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*

# Install Apache Spark
Expand Down
10 changes: 5 additions & 5 deletions r-python.template
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@
ARG BASE_IMAGE=spark:{{ SPARK_VERSION }}-scala{{ SCALA_VERSION }}-java{{ JAVA_VERSION }}-ubuntu
FROM $BASE_IMAGE

RUN set -ex && \
apt-get update && \
RUN set -ex; \
apt-get update; \
{%- if HAVE_PY %}
apt install -y python3 python3-pip && \
apt install -y python3 python3-pip; \
{%- endif %}
{%- if HAVE_R %}
apt install -y r-base r-base-dev && \
apt install -y r-base r-base-dev; \
{%- endif %}
rm -rf /var/cache/apt/* && \
rm -rf /var/cache/apt/*; \
rm -rf /var/lib/apt/lists/*
{%- if HAVE_R %}

Expand Down