From 5280a27bc58748be16daea40bac84d006e1b6de3 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Wed, 15 May 2024 18:46:57 +0000 Subject: [PATCH 01/10] merge main --- bundles/uds-core-swf/uds-bundle.yaml | 15 +++++++++++++++ packages/init/zarf.yaml | 1 + 2 files changed, 16 insertions(+) diff --git a/bundles/uds-core-swf/uds-bundle.yaml b/bundles/uds-core-swf/uds-bundle.yaml index 3c4cb7dd..a893258a 100644 --- a/bundles/uds-core-swf/uds-bundle.yaml +++ b/bundles/uds-core-swf/uds-bundle.yaml @@ -57,6 +57,21 @@ packages: velero: velero: values: + - path: initContainers + - name: velero-plugin-for-csi + image: velero/velero-plugin-for-csi:v0.7.1 + imagePullPolicy: IfNotPresent + volumeMounts: + - mountPath: /target + name: plugins + - name: velero-plugin-for-aws + image: velero/velero-plugin-for-aws:v1.9.2 + imagePullPolicy: IfNotPresent + volumeMounts: + - mountPath: /target + name: plugins + - path: configuration.features + value: EnableCSI - path: snapshotsEnabled value: true - path: configuration.volumeSnapshotLocation diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index dc67cef8..7c625ba8 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -131,6 +131,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v7.0.2 - registry.k8s.io/sig-storage/csi-resizer:v1.10.1 - registry.k8s.io/sig-storage/livenessprobe:v2.12.0 + - velero/velero-plugin-for-csi:v0.7.1 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" # Creates the pod+git mutating webhook From 3ccba98c99f1fd21734e2562f39deff078628a7d Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Fri, 17 May 2024 18:20:31 +0000 Subject: [PATCH 02/10] image fixes --- bundles/uds-core-swf/uds-bundle.yaml | 25 +++++++++++++------------ packages/init/zarf.yaml | 3 ++- 2 files changed, 15 insertions(+), 13 deletions(-) diff --git a/bundles/uds-core-swf/uds-bundle.yaml b/bundles/uds-core-swf/uds-bundle.yaml index 2da4b872..de47aa15 100644 --- a/bundles/uds-core-swf/uds-bundle.yaml +++ b/bundles/uds-core-swf/uds-bundle.yaml @@ -58,18 +58,19 @@ packages: velero: values: - path: initContainers - - name: velero-plugin-for-csi - image: velero/velero-plugin-for-csi:v0.7.1 - imagePullPolicy: IfNotPresent - volumeMounts: - - mountPath: /target - name: plugins - - name: velero-plugin-for-aws - image: velero/velero-plugin-for-aws:v1.9.2 - imagePullPolicy: IfNotPresent - volumeMounts: - - mountPath: /target - name: plugins + value: + - name: velero-plugin-for-csi + image: velero/velero-plugin-for-csi:v0.7.1 + imagePullPolicy: IfNotPresent + volumeMounts: + - mountPath: /target + name: plugins + - name: velero-plugin-for-aws + image: velero/velero-plugin-for-aws:v1.9.2 + imagePullPolicy: IfNotPresent + volumeMounts: + - mountPath: /target + name: plugins - path: configuration.features value: EnableCSI - path: snapshotsEnabled diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index 7c625ba8..62a52882 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -71,6 +71,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v7.0.2 - registry.k8s.io/sig-storage/csi-resizer:v1.10.1 - registry.k8s.io/sig-storage/livenessprobe:v2.12.0 + - docker.io/velero/velero-plugin-for-csi:v0.7.1 - name: namespaces required: true @@ -131,7 +132,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v7.0.2 - registry.k8s.io/sig-storage/csi-resizer:v1.10.1 - registry.k8s.io/sig-storage/livenessprobe:v2.12.0 - - velero/velero-plugin-for-csi:v0.7.1 + - docker.io/velero/velero-plugin-for-csi:v0.7.1 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" # Creates the pod+git mutating webhook From 7848873831ce24089d2d85f9456937c6ce80ce9f Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Fri, 17 May 2024 20:16:21 +0000 Subject: [PATCH 03/10] checkpoint, please squish after fixing --- packages/init/zarf.yaml | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index 62a52882..04bd3cc9 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -63,14 +63,14 @@ components: required: true description: Push nutanix images to the zarf registry images: - - registry.k8s.io/sig-storage/snapshot-controller:v7.0.2 - - registry.k8s.io/sig-storage/snapshot-validation-webhook:v7.0.2 - - quay.io/karbon/ntnx-csi:v2.6.8 - - registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.10.1 - - registry.k8s.io/sig-storage/csi-provisioner:v4.0.1 - - registry.k8s.io/sig-storage/csi-snapshotter:v7.0.2 - - registry.k8s.io/sig-storage/csi-resizer:v1.10.1 - - registry.k8s.io/sig-storage/livenessprobe:v2.12.0 + - registry.k8s.io/sig-storage/snapshot-controller:v6.3.2 + - registry.k8s.io/sig-storage/snapshot-validation-webhook:v6.3.2 + - quay.io/karbon/ntnx-csi:v2.6.6 + - registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.9.1 + - registry.k8s.io/sig-storage/csi-provisioner:v3.6.2 + - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 + - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 + - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - docker.io/velero/velero-plugin-for-csi:v0.7.1 - name: namespaces @@ -124,14 +124,14 @@ components: required: true description: Push nutanix csi images to the zarf registry images: - - registry.k8s.io/sig-storage/snapshot-controller:v7.0.2 - - registry.k8s.io/sig-storage/snapshot-validation-webhook:v7.0.2 - - quay.io/karbon/ntnx-csi:v2.6.8 - - registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.10.1 - - registry.k8s.io/sig-storage/csi-provisioner:v4.0.1 - - registry.k8s.io/sig-storage/csi-snapshotter:v7.0.2 - - registry.k8s.io/sig-storage/csi-resizer:v1.10.1 - - registry.k8s.io/sig-storage/livenessprobe:v2.12.0 + - registry.k8s.io/sig-storage/snapshot-controller:v6.3.2 + - registry.k8s.io/sig-storage/snapshot-validation-webhook:v6.3.2 + - quay.io/karbon/ntnx-csi:v2.6.6 + - registry.k8s.io/sig-storage/csi-node-driver-registrar:v2.9.1 + - registry.k8s.io/sig-storage/csi-provisioner:v3.6.2 + - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 + - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 + - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - docker.io/velero/velero-plugin-for-csi:v0.7.1 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" From f155bd944ac9ce503cc5006867907f0b27fba4f3 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Fri, 17 May 2024 20:49:18 +0000 Subject: [PATCH 04/10] ironbank image --- bundles/uds-core-swf/uds-bundle.yaml | 4 ++-- packages/init/zarf.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/bundles/uds-core-swf/uds-bundle.yaml b/bundles/uds-core-swf/uds-bundle.yaml index de47aa15..b26e6478 100644 --- a/bundles/uds-core-swf/uds-bundle.yaml +++ b/bundles/uds-core-swf/uds-bundle.yaml @@ -60,13 +60,13 @@ packages: - path: initContainers value: - name: velero-plugin-for-csi - image: velero/velero-plugin-for-csi:v0.7.1 + image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 imagePullPolicy: IfNotPresent volumeMounts: - mountPath: /target name: plugins - name: velero-plugin-for-aws - image: velero/velero-plugin-for-aws:v1.9.2 + image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 imagePullPolicy: IfNotPresent volumeMounts: - mountPath: /target diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index 04bd3cc9..21b288d6 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -71,7 +71,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - - docker.io/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 - name: namespaces required: true @@ -132,7 +132,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - - docker.io/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" # Creates the pod+git mutating webhook From 2ec35efeef6229061cff1c434414eec95b5b2432 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Mon, 20 May 2024 13:30:51 +0000 Subject: [PATCH 05/10] pushing to ask for eyes --- bundles/uds-core-swf/uds-bundle.yaml | 8 ++++---- packages/init/zarf.yaml | 2 ++ 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/bundles/uds-core-swf/uds-bundle.yaml b/bundles/uds-core-swf/uds-bundle.yaml index b26e6478..61dbdb55 100644 --- a/bundles/uds-core-swf/uds-bundle.yaml +++ b/bundles/uds-core-swf/uds-bundle.yaml @@ -59,14 +59,14 @@ packages: values: - path: initContainers value: - - name: velero-plugin-for-csi - image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + - name: velero-plugin-for-aws + image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 imagePullPolicy: IfNotPresent volumeMounts: - mountPath: /target name: plugins - - name: velero-plugin-for-aws - image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 + - name: velero-plugin-for-csi + image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 imagePullPolicy: IfNotPresent volumeMounts: - mountPath: /target diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index 21b288d6..508dcf0a 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -72,6 +72,7 @@ components: - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 - name: namespaces required: true @@ -133,6 +134,7 @@ components: - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" # Creates the pod+git mutating webhook From e4f57818bfe5238ff154244e322d78b336a085c8 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Mon, 20 May 2024 20:48:36 +0000 Subject: [PATCH 06/10] downgrade csi plugin --- bundles/uds-core-swf/uds-bundle.yaml | 2 +- packages/init/zarf.yaml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/bundles/uds-core-swf/uds-bundle.yaml b/bundles/uds-core-swf/uds-bundle.yaml index 61dbdb55..cfd70ebb 100644 --- a/bundles/uds-core-swf/uds-bundle.yaml +++ b/bundles/uds-core-swf/uds-bundle.yaml @@ -66,7 +66,7 @@ packages: - mountPath: /target name: plugins - name: velero-plugin-for-csi - image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + image: registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.0 imagePullPolicy: IfNotPresent volumeMounts: - mountPath: /target diff --git a/packages/init/zarf.yaml b/packages/init/zarf.yaml index 508dcf0a..d5f26f91 100644 --- a/packages/init/zarf.yaml +++ b/packages/init/zarf.yaml @@ -71,7 +71,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.0 - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 - name: namespaces @@ -133,7 +133,7 @@ components: - registry.k8s.io/sig-storage/csi-snapshotter:v6.3.2 - registry.k8s.io/sig-storage/csi-resizer:v1.9.2 - registry.k8s.io/sig-storage/livenessprobe:v2.11.0 - - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.1 + - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-csi:v0.7.0 - registry1.dso.mil/ironbank/opensource/velero/velero-plugin-for-aws:v1.9.2 - "###ZARF_PKG_TMPL_REGISTRY_IMAGE_DOMAIN######ZARF_PKG_TMPL_REGISTRY_IMAGE###:###ZARF_PKG_TMPL_REGISTRY_IMAGE_TAG###" From f66e0d8e47947d095f56f434342f7b8cbd67a041 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Tue, 21 May 2024 17:20:57 +0000 Subject: [PATCH 07/10] readme --- README.md | 5 ----- docs/backup-restore.md | 46 +++++++++++++++++++++++++++++++++++++----- 2 files changed, 41 insertions(+), 10 deletions(-) diff --git a/README.md b/README.md index b6ffa19c..043c58f9 100644 --- a/README.md +++ b/README.md @@ -61,11 +61,6 @@ These are the default bucket names. Gitlab allows you to add a suffix in your `u * velero-backups -
- Velero - - * velero-backups -
Gitlab diff --git a/docs/backup-restore.md b/docs/backup-restore.md index 9c35091d..01e27b79 100644 --- a/docs/backup-restore.md +++ b/docs/backup-restore.md @@ -1,15 +1,51 @@ # Backup and Restore ## Velero -This bundle has Velero configured to run automated backups and stores that data to the configured object storage bucket. To perform a restore you will want to get the name of the velero backup you want to use for your restore and perform a velero restore for the relevant namespace. +This bundle has Velero configured to run automated backups and stores that data to the configured object storage bucket. The backup can be kicked off manually. Below is a start to finish process of taking a backup and restoring it, including restoring data on the Persistant Volume. -Example command to start a velero restore for a namespace: +- Kick off backup ```bash -kubectl exec -it -n velero svc/velero-velero -- /bin/bash -c \ - "velero restore create my-confluence-restore-$(date +%s) \ - --from-backup velero-velero-uds-confluence-backup-20240129050033 --include-namespaces confluence --wait" +$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero backup create manual-nexus-velero-back +up-$(date +%s) --include-namespaces nexus" + +Backup request "manual-nexus-velero-backup-1716311265" submitted successfully. +Run `velero backup describe manual-nexus-velero-backup-1716311265` or `velero backup logs manual-nexus-velero-backup-1716311265` for more details. +``` + +- Delete the PVC (Velero will not restore data if the backed up PV/PVC still exist) +```bash +$ kubectl delete pvc nexus-nexus-repository-manager-data + +persistentvolumeclaim "nexus-nexus-repository-manager-data" deleted ``` +- Remove the finalizers on the PVC, allowing it to be deleted + ```yaml + finalizers: + - kubernetes.io/pvc-protection + ``` +```bash +$ kubectl edit pvc nexus-nexus-repository-manager-data +persistentvolumeclaim/nexus-nexus-repository-manager-data edited + +$ kubectl get pvc + +No resources found in nexus namespace. +``` + +- Run the restore +```bash +$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero restore create velero-test-nexus-resto +re-$(date +%s) --from-backup manual-nexus-velero-backup-1716311265 --include-namespaces nexus --wait" + +Restore request "velero-test-nexus-restore-1716311387" submitted successfully. +Waiting for restore to complete. You may safely press ctrl-c to stop waiting - your restore will continue in the background. +......... +Restore completed with status: Completed. You may check for more information using the commands `velero restore describe velero-test-nexus-restore-1716311387` and `velero restore logs velero-test-nexus-restore-1716311387`. +``` + +At this point, the pods should restart with the new data. The pods can also be deleted and allowed to recreate. Data should be restored to the PV from the time of the backup. + ## Gitlab Gitlab has its own utility to perform the backup and restore functionality. More details on how to use it shown are below. From c3b1d9c84209c7755c733558e5ea752a9d9e4e1d Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Tue, 21 May 2024 17:27:06 +0000 Subject: [PATCH 08/10] fmt --- docs/backup-restore.md | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/docs/backup-restore.md b/docs/backup-restore.md index 01e27b79..ea324421 100644 --- a/docs/backup-restore.md +++ b/docs/backup-restore.md @@ -3,10 +3,10 @@ ## Velero This bundle has Velero configured to run automated backups and stores that data to the configured object storage bucket. The backup can be kicked off manually. Below is a start to finish process of taking a backup and restoring it, including restoring data on the Persistant Volume. -- Kick off backup +- Manually kick off a backup ```bash -$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero backup create manual-nexus-velero-back -up-$(date +%s) --include-namespaces nexus" +$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero backup create \ + manual-nexus-velero-backup-$(date +%s) --include-namespaces nexus" Backup request "manual-nexus-velero-backup-1716311265" submitted successfully. Run `velero backup describe manual-nexus-velero-backup-1716311265` or `velero backup logs manual-nexus-velero-backup-1716311265` for more details. @@ -25,7 +25,8 @@ persistentvolumeclaim "nexus-nexus-repository-manager-data" deleted - kubernetes.io/pvc-protection ``` ```bash -$ kubectl edit pvc nexus-nexus-repository-manager-data +$ kubectl edit pvc nexus-nexus-repository-manager-data + persistentvolumeclaim/nexus-nexus-repository-manager-data edited $ kubectl get pvc @@ -35,8 +36,8 @@ No resources found in nexus namespace. - Run the restore ```bash -$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero restore create velero-test-nexus-resto -re-$(date +%s) --from-backup manual-nexus-velero-backup-1716311265 --include-namespaces nexus --wait" +$ kubectl exec -it -n velero svc/velero -- /bin/bash -c "velero restore create velero-test-nexus-restore-$(date +%s) \ + --from-backup manual-nexus-velero-backup-1716311265 --include-namespaces nexus --wait" Restore request "velero-test-nexus-restore-1716311387" submitted successfully. Waiting for restore to complete. You may safely press ctrl-c to stop waiting - your restore will continue in the background. From 74ff0e7f212c454e4b2e2a840671f19ebe5202cf Mon Sep 17 00:00:00 2001 From: Opnauticus Date: Wed, 22 May 2024 12:45:54 -0700 Subject: [PATCH 09/10] Update backup-restore.md --- docs/backup-restore.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/backup-restore.md b/docs/backup-restore.md index ea324421..a92a7cfd 100644 --- a/docs/backup-restore.md +++ b/docs/backup-restore.md @@ -47,6 +47,8 @@ Restore completed with status: Completed. You may check for more information usi At this point, the pods should restart with the new data. The pods can also be deleted and allowed to recreate. Data should be restored to the PV from the time of the backup. +> NOTE - Unfortunately [according to the nutanix docs](https://portal.nutanix.com/page/documents/solutions/details?targetId=NVD-2177-Cloud-Native-6-5-OpenShift:application-backup-and-disaster-recovery.html#:~:text=As%20an%20alternative%20solution), NFS shares that are dynamically provisioned by the Nutanix Files CSI provisioner don't currently support the Kubernetes CSI Snapshot API -- meaning RWX volumes WILL NOT BE BACKED UP BY THIS PROCESS until an alternative solution like restic is in place. + ## Gitlab Gitlab has its own utility to perform the backup and restore functionality. More details on how to use it shown are below. From 99bf69372fa8946c1c14065ab11b85c5f1134e55 Mon Sep 17 00:00:00 2001 From: justicorn <167213144+justicorn@users.noreply.github.com> Date: Thu, 23 May 2024 11:21:04 +0000 Subject: [PATCH 10/10] flavor --- docs/backup-restore.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/docs/backup-restore.md b/docs/backup-restore.md index a92a7cfd..4723692b 100644 --- a/docs/backup-restore.md +++ b/docs/backup-restore.md @@ -1,7 +1,7 @@ # Backup and Restore ## Velero -This bundle has Velero configured to run automated backups and stores that data to the configured object storage bucket. The backup can be kicked off manually. Below is a start to finish process of taking a backup and restoring it, including restoring data on the Persistant Volume. +This bundle has Velero configured to run automated backups and stores that data to the configured object storage bucket. The backup can be kicked off manually. Below is a start to finish process of taking a backup and restoring it, including restoring data on the Persistant Volume. If running a restore off of an automated backup, skip the first, manual step; the rest is the same. - Manually kick off a backup ```bash @@ -47,7 +47,8 @@ Restore completed with status: Completed. You may check for more information usi At this point, the pods should restart with the new data. The pods can also be deleted and allowed to recreate. Data should be restored to the PV from the time of the backup. -> NOTE - Unfortunately [according to the nutanix docs](https://portal.nutanix.com/page/documents/solutions/details?targetId=NVD-2177-Cloud-Native-6-5-OpenShift:application-backup-and-disaster-recovery.html#:~:text=As%20an%20alternative%20solution), NFS shares that are dynamically provisioned by the Nutanix Files CSI provisioner don't currently support the Kubernetes CSI Snapshot API -- meaning RWX volumes WILL NOT BE BACKED UP BY THIS PROCESS until an alternative solution like restic is in place. +> [!WARNING] +> Unfortunately [according to the nutanix docs](https://portal.nutanix.com/page/documents/solutions/details?targetId=NVD-2177-Cloud-Native-6-5-OpenShift:application-backup-and-disaster-recovery.html#:~:text=As%20an%20alternative%20solution), NFS shares that are dynamically provisioned by the Nutanix Files CSI provisioner don't currently support the Kubernetes CSI Snapshot API -- meaning RWX volumes WILL NOT BE BACKED UP BY THIS PROCESS until an alternative solution like restic is in place. ## Gitlab Gitlab has its own utility to perform the backup and restore functionality. More details on how to use it shown are below.