From ed1c9c690b28bf743c92d33f89c79ed505144b17 Mon Sep 17 00:00:00 2001 From: Tesshu Flower Date: Fri, 11 Oct 2024 15:17:02 -0400 Subject: [PATCH] update ensure_initialized in restic mover script Signed-off-by: Tesshu Flower --- controllers/mover/restic/logfilter.go | 5 +- controllers/mover/restic/logfilter_test.go | 35 ++- custom-scorecard-tests/config-downstream.yaml | 10 + custom-scorecard-tests/config.yaml | 10 + .../bases/patches/e2e-tests-stage1.yaml | 10 + mover-restic/entry.sh | 45 +++- .../roles/create_restic_secret/tasks/main.yml | 12 +- ...restic_manual_normal_restore_emptyrepo.yml | 250 ++++++++++++++++++ 8 files changed, 368 insertions(+), 9 deletions(-) create mode 100644 test-e2e/test_restic_manual_normal_restore_emptyrepo.yml diff --git a/controllers/mover/restic/logfilter.go b/controllers/mover/restic/logfilter.go index 109805173..cef511dc2 100644 --- a/controllers/mover/restic/logfilter.go +++ b/controllers/mover/restic/logfilter.go @@ -37,7 +37,10 @@ var resticRegex = regexp.MustCompile( `^\s*([aA]dded to the repository)|` + `^\s*([sS]uccessfully)|` + `(RESTORE_OPTIONS)|` + - `^\s*(Restic completed in)`) + `([iI]nitialize [dD]ir)|` + + `^\s*([fF]atal)|` + + `^\s*(ERROR)|` + + `^\s*([rR]estic completed in)`) // Filter restic log lines for a successful move job func LogLineFilterSuccess(line string) *string { diff --git a/controllers/mover/restic/logfilter_test.go b/controllers/mover/restic/logfilter_test.go index 92acdd20f..3d83c8c19 100644 --- a/controllers/mover/restic/logfilter_test.go +++ b/controllers/mover/restic/logfilter_test.go @@ -68,7 +68,8 @@ Restic completed in 18s === Done ===` // nolint:lll - expectedFilteredResticSourceLogSuccessful := `created restic repository f5bccd54c8 at s3:http://minio-api-minio.apps.app-aws-411ga-sno-net2-zp5jq.dev06.red-chesterfield.com/ttest-restic-new + expectedFilteredResticSourceLogSuccessful := `== Initialize Dir ======= +created restic repository f5bccd54c8 at s3:http://minio-api-minio.apps.app-aws-411ga-sno-net2-zp5jq.dev06.red-chesterfield.com/ttest-restic-new repository f5bccd54 opened (repository version 2) successfully, password is correct created new cache in /home/testuser/DEVFEDORA/volsync/RESTICTESTS/CACHE no parent snapshot found, will read all files @@ -118,7 +119,6 @@ Testing mandatory env variables 913a91c60431342abb402d7707f50a370c52a911e01abdf4160e5d41a77e5151 successfully removed 1 locks == Checking directory for content === -== Initialize Dir ======= ID Time Host Tags Paths ------------------------------------------------------------------------ 4e825939 2023-04-07 20:17:00 volsync /mover-syncthing @@ -248,7 +248,8 @@ Restic completed in 4s === Done ===` // nolint:lll - expectedFilteredResticDestlogSuccessful := `created restic repository 374c6313cb at s3:http://minio.minio.svc.cluster.local:9000/resticbucket1b + expectedFilteredResticDestlogSuccessful := `=== Initialize Dir === +created restic repository 374c6313cb at s3:http://minio.minio.svc.cluster.local:9000/resticbucket1b No eligible snapshots found === No data will be restored === Restic completed in 4s` @@ -262,4 +263,32 @@ Restic completed in 4s` Expect(filteredLines).To(Equal(expectedFilteredResticDestlogSuccessful)) }) }) + + Context("Restic mover logs - bad password", func() { + // Sample restore log for restic mover + // nolint:lll + resticMoverLog := `Starting container +VolSync restic container version: v0.11.0+f866a4ec-dirty +restore +restic 0.17.0 compiled with go1.22.8 on linux/amd64 +Testing mandatory env variables +=== Check for dir initialized === +Fatal: wrong password or no key found +ERROR: failure checking existence of repository +` + + // nolint:lll + expectedFilteredResticMoverLog := `Fatal: wrong password or no key found +ERROR: failure checking existence of repository` + + It("Should filter the logs from the replication source or dest", func() { + reader := strings.NewReader(resticMoverLog) + filteredLines, err := utils.FilterLogs(reader, restic.LogLineFilterSuccess) + Expect(err).NotTo(HaveOccurred()) + + logger.Info("Logs after filter", "filteredLines", filteredLines) + Expect(filteredLines).To(Equal(expectedFilteredResticMoverLog)) + }) + }) + }) diff --git a/custom-scorecard-tests/config-downstream.yaml b/custom-scorecard-tests/config-downstream.yaml index 7ccd4ea1f..1afc146ff 100644 --- a/custom-scorecard-tests/config-downstream.yaml +++ b/custom-scorecard-tests/config-downstream.yaml @@ -125,6 +125,16 @@ stages: storage: spec: mountPath: {} + - entrypoint: + - volsync-custom-scorecard-tests + - test_restic_manual_normal_restore_emptyrepo.yml + image: quay.io/backube/volsync-custom-scorecard-tests:latest + labels: + suite: volsync-e2e + test: test_restic_manual_normal_restore_emptyrepo.yml + storage: + spec: + mountPath: {} - entrypoint: - volsync-custom-scorecard-tests - test_restic_manual_priv.yml diff --git a/custom-scorecard-tests/config.yaml b/custom-scorecard-tests/config.yaml index 7ccd4ea1f..1afc146ff 100644 --- a/custom-scorecard-tests/config.yaml +++ b/custom-scorecard-tests/config.yaml @@ -125,6 +125,16 @@ stages: storage: spec: mountPath: {} + - entrypoint: + - volsync-custom-scorecard-tests + - test_restic_manual_normal_restore_emptyrepo.yml + image: quay.io/backube/volsync-custom-scorecard-tests:latest + labels: + suite: volsync-e2e + test: test_restic_manual_normal_restore_emptyrepo.yml + storage: + spec: + mountPath: {} - entrypoint: - volsync-custom-scorecard-tests - test_restic_manual_priv.yml diff --git a/custom-scorecard-tests/scorecard/bases/patches/e2e-tests-stage1.yaml b/custom-scorecard-tests/scorecard/bases/patches/e2e-tests-stage1.yaml index edafe07bd..34ff57e9a 100644 --- a/custom-scorecard-tests/scorecard/bases/patches/e2e-tests-stage1.yaml +++ b/custom-scorecard-tests/scorecard/bases/patches/e2e-tests-stage1.yaml @@ -111,6 +111,16 @@ storage: spec: mountPath: {} + - entrypoint: + - volsync-custom-scorecard-tests + - test_restic_manual_normal_restore_emptyrepo.yml + image: quay.io/backube/volsync-custom-scorecard-tests:latest + labels: + suite: volsync-e2e + test: test_restic_manual_normal_restore_emptyrepo.yml + storage: + spec: + mountPath: {} - entrypoint: - volsync-custom-scorecard-tests - test_restic_manual_priv.yml diff --git a/mover-restic/entry.sh b/mover-restic/entry.sh index 08e83964e..5c17bb867 100755 --- a/mover-restic/entry.sh +++ b/mover-restic/entry.sh @@ -90,9 +90,27 @@ function check_contents { # Ensure the repo has been initialized function ensure_initialized { echo "=== Check for dir initialized ===" - # Try a restic command and capture the rc & output + # check for restic config and capture rc + # See: https://restic.readthedocs.io/en/stable/075_scripting.html#check-if-a-repository-is-already-initialized + set +e # Don't exit on command failure + outfile=$(mktemp -q) - if ! "${RESTIC[@]}" snapshots 2>"$outfile"; then + "${RESTIC[@]}" cat config > /dev/null 2>"$outfile" + rc=$? + + set -e # Exit on command failure + + case $rc in + 0) + echo "dir is initialized" + ;; + 1) + # This can happen for some providers (e.g. minio) if the bucket does not exist + # Restic will return 10 if the bucket exists and no restic repo at the path exists, but will + # still return 1 if the bucket itself doesn't exist. + # We can proceed with trying an init which will create the bucket (and path in the bucket if there is one) + # restic init should fail if somehow the repo already exists when init is run or if it's unable to + # create the bucket output=$(<"$outfile") # Match against error string for uninitialized repo # This string also appears when credentials are incorrect (in which case @@ -104,7 +122,28 @@ function ensure_initialized { cat "$outfile" error 3 "failure checking existence of repository" fi - fi + ;; + 10) + # rc = 10 Repository does not exist (since restic 0.17.0) + echo "=== Initialize Dir ===" + "${RESTIC[@]}" init + ;; + 11) + # rc = 11 Failed to lock repository (since restic 0.17.0) + cat "$outfile" + error 3 "failure locking repository" + ;; + 12) + # rc = 12 Wrong password (since restic 0.17.1) + cat "$outfile" + error 3 "failure connecting to repository, incorrect password" + ;; + *) + cat "$outfile" + error 3 "failure checking existence of repository" + ;; + esac + rm -f "$outfile" } diff --git a/test-e2e/roles/create_restic_secret/tasks/main.yml b/test-e2e/roles/create_restic_secret/tasks/main.yml index 41ec3a728..a725f36e3 100644 --- a/test-e2e/roles/create_restic_secret/tasks/main.yml +++ b/test-e2e/roles/create_restic_secret/tasks/main.yml @@ -13,15 +13,23 @@ - include_role: name: get_minio_credentials +- name: Get bucket name to use (default is 'restic-e2e') + ansible.builtin.set_fact: + bucket_name: "{{ bucket_name | default('restic-e2e') }}" + +- name: Get path name to use under the bucket (default is namespace name) + ansible.builtin.set_fact: + path_name: "{{ path_name | default(namespace) }}" + # Path in restic will include the namespace to avoid re-runs of tests interferring with each-other # And also to prevent multiple tests from using the same path (each test should use its own namespace) - name: Determine repo URL set_fact: - repo_url: "s3:http://minio.{{ minio_namespace }}.svc.cluster.local:9000/restic-e2e/{{ namespace }}" + repo_url: "s3:http://minio.{{ minio_namespace }}.svc.cluster.local:9000/{{ bucket_name }}/{{ path_name }}" - name: Set repo URL to use TLS set_fact: - repo_url: "s3:https://minio.{{ minio_namespace }}.svc.cluster.local:9000/restic-e2e/{{ namespace }}" + repo_url: "s3:https://minio.{{ minio_namespace }}.svc.cluster.local:9000/{{ bucket_name }}/{{ path_name }}" when: use_tls is defined and use_tls == true - name: Create restic secret diff --git a/test-e2e/test_restic_manual_normal_restore_emptyrepo.yml b/test-e2e/test_restic_manual_normal_restore_emptyrepo.yml new file mode 100644 index 000000000..4c4ec8ff4 --- /dev/null +++ b/test-e2e/test_restic_manual_normal_restore_emptyrepo.yml @@ -0,0 +1,250 @@ +--- +- hosts: localhost + tags: + - e2e + - restic + - unprivileged + - emptyrepo + vars: + restic_secret_name: restic-secret + tasks: + # + # Purpose of this test is to make sure restoring a restic replicationdestination from a + # repo that is un-initialized or does not exist will work. The replicationdestionation (restore) + # should initialize the repo if it does not exist and then proceed with the empty restore rather + # than failing. This is to support gitops scenarios - see discussion + # related to PR: https://github.com/backube/volsync/pull/1190 + # + - include_role: + name: create_namespace + + - include_role: + name: gather_cluster_info + + # We're running everything as a normal user + - name: Define podSecurityContext + ansible.builtin.set_fact: + podSecurityContext: + fsGroup: 5678 + runAsGroup: 5678 + runAsNonRoot: true + runAsUser: 1234 + seccompProfile: + type: RuntimeDefault + when: not cluster_info.is_openshift + + # + # Test1: test with bucket that does not exist + # use new bucket_name that should be unique since ns name should be unique to this test + # + - include_role: + name: create_restic_secret + vars: + minio_namespace: minio + bucket_name: "test-e2e-newbucket-{{ namespace }}" + + # No source pvc or replicationsource for this test - restore from empty path in repo + + - name: Create dest PVC (restore volume) + kubernetes.core.k8s: + state: present + definition: + kind: PersistentVolumeClaim + apiVersion: v1 + metadata: + name: data-dest + namespace: "{{ namespace }}" + spec: + accessModes: + - ReadWriteOnce + resources: + requests: + storage: 1Gi + + - name: Restore data to destination (w/ mSC) + kubernetes.core.k8s: + state: present + definition: + apiVersion: volsync.backube/v1alpha1 + kind: ReplicationDestination + metadata: + name: restore + namespace: "{{ namespace }}" + spec: + trigger: + manual: restore-test1-1 + restic: + repository: "{{ restic_secret_name }}" + destinationPVC: data-dest + copyMethod: Direct + cacheCapacity: 1Gi + moverSecurityContext: "{{ podSecurityContext }}" + when: podSecurityContext is defined + + - name: Restore data to destination (w/o mSC) + kubernetes.core.k8s: + state: present + definition: + apiVersion: volsync.backube/v1alpha1 + kind: ReplicationDestination + metadata: + name: restore + namespace: "{{ namespace }}" + spec: + trigger: + manual: restore-test1-1 + restic: + repository: "{{ restic_secret_name }}" + destinationPVC: data-dest + copyMethod: Direct + cacheCapacity: 1Gi + when: podSecurityContext is not defined + + # Should have created new repo for empty path and then restored no data + - name: Wait for restore to complete + kubernetes.core.k8s_info: + api_version: volsync.backube/v1alpha1 + kind: ReplicationDestination + name: restore + namespace: "{{ namespace }}" + register: res + until: > + res.resources | length > 0 and + res.resources[0].status.lastManualSync is defined and + res.resources[0].status.lastManualSync=="restore-test1-1" and + res.resources[0].status.latestMoverStatus is defined and + res.resources[0].status.latestMoverStatus.result == "Successful" and + res.resources[0].status.latestMoverStatus.logs is search("Initialize Dir.*") and + res.resources[0].status.latestMoverStatus.logs is search("created restic repository.*") and + res.resources[0].status.latestMoverStatus.logs is search("No data will be restored.*") and + res.resources[0].status.latestMoverStatus.logs is search("Restic completed in.*") + delay: 1 + retries: 300 + + # Now that the bucket/path has been created, run again and this time the repo should not be initialized + - name: Trigger another restore after bucket created and repo initialized + kubernetes.core.k8s: + state: patched + definition: + apiVersion: volsync.backube/v1alpha1 + kind: ReplicationDestination + metadata: + name: restore + namespace: "{{ namespace }}" + spec: + restic: + enableFileDeletion: true + trigger: + manual: restore-test1-2 + + # Second restore should also be successful, no init and no files restored + - name: Wait for restore to complete + kubernetes.core.k8s_info: + api_version: volsync.backube/v1alpha1 + kind: ReplicationDestination + name: restore + namespace: "{{ namespace }}" + register: res + until: > + res.resources | length > 0 and + res.resources[0].status.lastManualSync is defined and + res.resources[0].status.lastManualSync=="restore-test1-2" and + res.resources[0].status.latestMoverStatus is defined and + res.resources[0].status.latestMoverStatus.result == "Successful" and + res.resources[0].status.latestMoverStatus.logs is not search("Initialize Dir.*") and + res.resources[0].status.latestMoverStatus.logs is search("No data will be restored.*") and + res.resources[0].status.latestMoverStatus.logs is search("Restic completed in.*") + delay: 1 + retries: 300 + + # + # Test2: use existing bucket, but non-existent repo in the bucket + # - delete current restic secret and create a new one that will use a repo in the same + # bucket, but using a new path + # + - name: Remove the existing restic secret so we can create a new one with different repo path + kubernetes.core.k8s: + state: absent + api_version: v1 + kind: Secret + name: "{{ restic_secret_name }}" + namespace: "{{ namespace }}" + + - include_role: + name: create_restic_secret + vars: + minio_namespace: minio + bucket_name: "test-e2e-custombucket-{{ namespace }}" + path_name: "{{ namespace }}-test2" + + - name: Trigger another restore now that the restic secret uses a new path in the bucket + kubernetes.core.k8s: + state: patched + definition: + apiVersion: volsync.backube/v1alpha1 + kind: ReplicationDestination + metadata: + name: restore + namespace: "{{ namespace }}" + spec: + restic: + enableFileDeletion: true + trigger: + manual: restore-test2-1 + + # Should have created new repo for empty path and then restored no data + - name: Wait for restore to complete + kubernetes.core.k8s_info: + api_version: volsync.backube/v1alpha1 + kind: ReplicationDestination + name: restore + namespace: "{{ namespace }}" + register: res + until: > + res.resources | length > 0 and + res.resources[0].status.lastManualSync is defined and + res.resources[0].status.lastManualSync=="restore-test2-1" and + res.resources[0].status.latestMoverStatus is defined and + res.resources[0].status.latestMoverStatus.result == "Successful" and + res.resources[0].status.latestMoverStatus.logs is search("Initialize Dir.*") and + res.resources[0].status.latestMoverStatus.logs is search("created restic repository.*") and + res.resources[0].status.latestMoverStatus.logs is search("No data will be restored.*") and + res.resources[0].status.latestMoverStatus.logs is search("Restic completed in.*") + delay: 1 + retries: 300 + + # Now that the path in the bucket has been created, run again and this time the repo should not be initialized + - name: Trigger another restore after repo initialized + kubernetes.core.k8s: + state: patched + definition: + apiVersion: volsync.backube/v1alpha1 + kind: ReplicationDestination + metadata: + name: restore + namespace: "{{ namespace }}" + spec: + restic: + enableFileDeletion: true + trigger: + manual: restore-test2-2 + + # Second restore should also be successful, no init and no files restored + - name: Wait for restore to complete + kubernetes.core.k8s_info: + api_version: volsync.backube/v1alpha1 + kind: ReplicationDestination + name: restore + namespace: "{{ namespace }}" + register: res + until: > + res.resources | length > 0 and + res.resources[0].status.lastManualSync is defined and + res.resources[0].status.lastManualSync=="restore-test2-2" and + res.resources[0].status.latestMoverStatus is defined and + res.resources[0].status.latestMoverStatus.result == "Successful" and + res.resources[0].status.latestMoverStatus.logs is not search("Initialize Dir.*") and + res.resources[0].status.latestMoverStatus.logs is search("No data will be restored.*") and + res.resources[0].status.latestMoverStatus.logs is search("Restic completed in.*") + delay: 1 + retries: 300