Skip to content

Commit 94476d9

Browse files
committed
Run backup job as user 1000
A switching a based image to podman allowed us to run a backup job as a regular user 1000 without any privileged escalation. Signed-off-by: Ales Raszka <[email protected]>
1 parent 2e99b7e commit 94476d9

File tree

3 files changed

+30
-48
lines changed

3 files changed

+30
-48
lines changed

controllers/backupcronjob/backupcronjob_controller.go

Lines changed: 11 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -22,8 +22,6 @@ import (
2222
"sigs.k8s.io/controller-runtime/pkg/controller/controllerutil"
2323
"sigs.k8s.io/controller-runtime/pkg/reconcile"
2424

25-
"k8s.io/utils/ptr"
26-
2725
dw "github.com/devfile/api/v2/pkg/apis/workspaces/v1alpha2"
2826
controllerv1alpha1 "github.com/devfile/devworkspace-operator/apis/controller/v1alpha1"
2927
"github.com/devfile/devworkspace-operator/internal/images"
@@ -40,6 +38,7 @@ import (
4038
corev1 "k8s.io/api/core/v1"
4139
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
4240
"k8s.io/apimachinery/pkg/runtime"
41+
"k8s.io/utils/ptr"
4342
ctrl "sigs.k8s.io/controller-runtime"
4443
"sigs.k8s.io/controller-runtime/pkg/client"
4544
"sigs.k8s.io/controller-runtime/pkg/event"
@@ -253,7 +252,7 @@ func (r *BackupCronJobReconciler) executeBackupSync(ctx context.Context, dwOpera
253252
}
254253
dwOperatorConfig.Status.LastBackupTime = &metav1.Time{Time: metav1.Now().Time}
255254

256-
err = r.Status().Patch(ctx, dwOperatorConfig, origConfig)
255+
err = r.NonCachingClient.Status().Patch(ctx, dwOperatorConfig, origConfig)
257256
if err != nil {
258257
log.Error(err, "Failed to update DevWorkspaceOperatorConfig status with last backup time")
259258
// Not returning error as the backup jobs were created successfully
@@ -346,12 +345,14 @@ func (r *BackupCronJobReconciler) createBackupJob(
346345
},
347346
Spec: batchv1.JobSpec{
348347
Template: corev1.PodTemplateSpec{
348+
ObjectMeta: metav1.ObjectMeta{
349+
Annotations: map[string]string{
350+
"io.kubernetes.cri-o.Devices": "/dev/fuse",
351+
},
352+
},
349353
Spec: corev1.PodSpec{
350354
ServiceAccountName: JobRunnerSAName + "-" + workspace.Status.DevWorkspaceId,
351355
RestartPolicy: corev1.RestartPolicyNever,
352-
SecurityContext: &corev1.PodSecurityContext{
353-
FSGroup: ptr.To[int64](0),
354-
},
355356
Containers: []corev1.Container{
356357
{
357358
Name: "backup-workspace",
@@ -363,10 +364,8 @@ func (r *BackupCronJobReconciler) createBackupJob(
363364
Name: "BACKUP_SOURCE_PATH",
364365
Value: "/workspace/" + workspacePath,
365366
},
366-
{Name: "STORAGE_DRIVER", Value: "overlay"},
367-
{Name: "BUILDAH_ISOLATION", Value: "chroot"},
368367
{Name: "DEVWORKSPACE_BACKUP_REGISTRY", Value: backUpConfig.Registry.Path},
369-
{Name: "BUILDAH_PUSH_OPTIONS", Value: "--tls-verify=false"},
368+
{Name: "PODMAN_PUSH_OPTIONS", Value: "--tls-verify=false"},
370369
},
371370
Image: images.GetProjectBackupImage(),
372371
Args: []string{
@@ -384,8 +383,7 @@ func (r *BackupCronJobReconciler) createBackupJob(
384383
},
385384
},
386385
SecurityContext: &corev1.SecurityContext{
387-
RunAsUser: ptr.To[int64](0),
388-
AllowPrivilegeEscalation: ptr.To[bool](false),
386+
RunAsUser: ptr.To[int64](1000),
389387
},
390388
},
391389
},
@@ -424,12 +422,12 @@ func (r *BackupCronJobReconciler) createBackupJob(
424422
})
425423
job.Spec.Template.Spec.Containers[0].VolumeMounts = append(job.Spec.Template.Spec.Containers[0].VolumeMounts, corev1.VolumeMount{
426424
Name: "registry-auth-secret",
427-
MountPath: "/home/user/.docker",
425+
MountPath: "/home/podman/.docker",
428426
ReadOnly: true,
429427
})
430428
job.Spec.Template.Spec.Containers[0].Env = append(job.Spec.Template.Spec.Containers[0].Env, corev1.EnvVar{
431429
Name: "REGISTRY_AUTH_FILE",
432-
Value: "/home/user/.docker/.dockerconfigjson",
430+
Value: "/home/podman/.docker/.dockerconfigjson",
433431
})
434432

435433
}

project-backup/Containerfile

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -1,29 +1,13 @@
1-
FROM registry.access.redhat.com/ubi9:9.5
1+
FROM quay.io/podman/stable:latest
22

3-
ARG USER_HOME_DIR="/home/user"
4-
ARG INSTALL_PACKAGES="shadow-utils bash jq podman buildah ca-certificates fuse-overlayfs"
3+
RUN set -e && \
4+
dnf update -y && \
5+
dnf clean all
56

6-
ENV HOME=${USER_HOME_DIR}
7-
ENV BUILDAH_ISOLATION=chroot
7+
COPY --chown=1000:1000 entrypoint.sh /
8+
COPY --chown=1000:1000 workspace-recovery.sh /
89

10+
RUN chmod +x /entrypoint.sh ; \
11+
chmod +x /workspace-recovery.sh
912

10-
COPY --chown=0:0 entrypoint.sh /
11-
COPY --chown=0:0 workspace-recovery.sh /
12-
13-
RUN set -e ; \
14-
dnf install -y ${INSTALL_PACKAGES} ; \
15-
dnf update -y ; \
16-
dnf clean all ; \
17-
mkdir -p ${USER_HOME_DIR} ; \
18-
mkdir -p ${USER_HOME_DIR}/.config/containers ; \
19-
(echo '[storage]';echo 'driver = "overlay"';echo 'graphroot = "/tmp/graphroot"';echo '[storage.options.overlay]';echo 'mount_program = "/usr/bin/fuse-overlayfs"') > ${USER_HOME_DIR}/.config/containers/storage.conf ; \
20-
chown -R 1000:1000 ${USER_HOME_DIR} ; \
21-
chmod +x /entrypoint.sh ; \
22-
chmod +x /workspace-recovery.sh ; \
23-
echo "user:x:1000:0:devspaces user:${USER_HOME_DIR}:/bin/bash" >> /etc/passwd ; \
24-
echo "user:x:1000:" >> /etc/group
25-
26-
27-
USER 1000
28-
WORKDIR ${USER_HOME_DIR}
29-
ENTRYPOINT ["/usr/libexec/podman/catatonit","--","/entrypoint.sh"]
13+
ENTRYPOINT ["/entrypoint.sh"]

project-backup/workspace-recovery.sh

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -12,19 +12,19 @@ BACKUP_IMAGE="${DEVWORKSPACE_BACKUP_REGISTRY}/backup-${DEVWORKSPACE_NAMESPACE}-$
1212

1313
# --- Functions ---
1414
backup() {
15-
local new_image
16-
new_image=$(buildah from scratch)
1715

18-
echo "Backing up workspace from path: $BACKUP_SOURCE_PATH"
19-
ls -la "$BACKUP_SOURCE_PATH"
16+
cat <<EOF > /home/podman/Dockerfile.backup
17+
FROM scratch
18+
COPY "$BACKUP_SOURCE_PATH" /
19+
LABEL DEVWORKSPACE="$DEVWORKSPACE_NAME"
20+
LABEL NAMESPACE="$DEVWORKSPACE_NAMESPACE"
21+
EOF
22+
podman build \
23+
--file /home/podman/Dockerfile.backup \
24+
--tag "$BACKUP_IMAGE" /
2025

21-
buildah copy "$new_image" "$BACKUP_SOURCE_PATH" /
22-
buildah config --label DEVWORKSPACE="$DEVWORKSPACE_NAME" "$new_image"
23-
buildah config --label NAMESPACE="$DEVWORKSPACE_NAMESPACE" "$new_image"
24-
buildah commit "$new_image" "$BACKUP_IMAGE"
26+
podman push ${PODMAN_PUSH_OPTIONS:-} "$BACKUP_IMAGE"
2527

26-
buildah umount "$new_image"
27-
buildah push ${BUILDAH_PUSH_OPTIONS:-} "$BACKUP_IMAGE"
2828
}
2929

3030
restore() {

0 commit comments

Comments
 (0)