diff --git a/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/create.yml b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/create.yml new file mode 100644 index 0000000..89cb419 --- /dev/null +++ b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/create.yml @@ -0,0 +1,49 @@ +--- +- name: Get state of VirtualMachine + redhat.openshift_virtualization.kubevirt_vm_info: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + register: state + +- name: Stop VirtualMachine + redhat.openshift_virtualization.kubevirt_vm: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + running: false + wait: true + when: state.resources.0.spec.running + +- name: Create a VirtualMachineSnapshot + kubernetes.core.k8s: + definition: + apiVersion: snapshot.kubevirt.io/v1alpha1 + kind: VirtualMachineSnapshot + metadata: + generateName: "{{ item }}-{{ ansible_date_time.epoch }}" + namespace: "{{ vm_namespace }}" + spec: + source: + apiGroup: kubevirt.io + kind: VirtualMachine + name: "{{ item }}" + wait: true + wait_condition: + type: Ready + register: snapshot + +- name: Start VirtualMachine + redhat.openshift_virtualization.kubevirt_vm: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + running: true + wait: true + when: state.resources.0.spec.running + +- name: Export snapshot name + ansible.builtin.set_stats: + data: + restore_snapshot_name: "{{ snapshot.result.metadata.name }}" + +- name: Output snapshot name + ansible.builtin.debug: + msg: "Successfully created snapshot {{ snapshot.result.metadata.name }}" diff --git a/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/main.yml b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/main.yml new file mode 100644 index 0000000..bc0eead --- /dev/null +++ b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/main.yml @@ -0,0 +1,12 @@ +--- +# parameters +# snapshot_opeation: +- name: Show hostnames we care about + ansible.builtin.debug: + msg: "About to {{ snapshot_operation }} snapshot(s) for the following hosts: +{{ lookup('ansible.builtin.inventory_hostnames', snapshot_hosts) | split(',') | difference(['localhost'])}}" + +- name: Manage snapshots based on operation + ansible.builtin.include_tasks: + file: "{{ snapshot_operation }}.yml" + loop: "{{ lookup('ansible.builtin.inventory_hostnames', snapshot_hosts) | regex_replace(vm_namespace+'-', '') | split(',') | difference(['localhost']) }}" diff --git a/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/restore.yml b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/restore.yml new file mode 100644 index 0000000..dfe6cb0 --- /dev/null +++ b/collections/ansible_collections/demo/openshift/roles/snapshot/tasks/restore.yml @@ -0,0 +1,51 @@ +--- +- name: Get state of VirtualMachine + redhat.openshift_virtualization.kubevirt_vm_info: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + register: state + +- name: List snapshots + kubernetes.core.k8s_info: + api_version: snapshot.kubevirt.io/v1alpha1 + kind: VirtualMachineSnapshot + namespace: "{{ vm_namespace }}" + register: snapshot + +- name: Set snapshot name for {{ item }} + ansible.builtin.set_fact: + latest_snapshot: "{{ snapshot.resources|selectattr('spec.source.name', 'equalto', item)|sort(attribute='metadata.creationTimestamp')|first}}" + +- name: Stop VirtualMachine + redhat.openshift_virtualization.kubevirt_vm: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + running: false + wait: true + when: state.resources.0.spec.running + +- name: Restore a VirtualMachineSnapshot + kubernetes.core.k8s: + definition: + apiVersion: snapshot.kubevirt.io/v1alpha1 + kind: VirtualMachineRestore + metadata: + generateName: "{{ latest_snapshot.metadata.generateName }}" + namespace: "{{ vm_namespace }}" + spec: + target: + apiGroup: kubevirt.io + kind: VirtualMachine + name: "{{ item }}" + virtualMachineSnapshotName: "{{ latest_snapshot.metadata.name }}" + wait: true + wait_condition: + type: Ready + +- name: Start VirtualMachine + redhat.openshift_virtualization.kubevirt_vm: + name: "{{ item }}" + namespace: "{{ vm_namespace }}" + running: true + wait: true + when: state.resources.0.spec.running diff --git a/openshift/cnv/provision.yml b/openshift/cnv/delete.yml similarity index 76% rename from openshift/cnv/provision.yml rename to openshift/cnv/delete.yml index 243a273..0a2b5f0 100644 --- a/openshift/cnv/provision.yml +++ b/openshift/cnv/delete.yml @@ -1,7 +1,12 @@ --- -- name: De-Provision OCP-CNV VM +- name: De-Provision OCP-CNV VMs hosts: localhost tasks: + - name: Show VM(s) we are about to make {{ instance_state }} + ansible.builtin.debug: + msg: "Setting the following hosts to {{ instance_state }} +{{ lookup('ansible.builtin.inventory_hostnames', vm_host_string) | split(',') | difference(['localhost'])}}" + - name: Define resources kubernetes.core.k8s: wait: true @@ -10,19 +15,19 @@ apiVersion: kubevirt.io/v1 kind: VirtualMachine metadata: - name: "{{ vm_name }}" + name: "{{ item }}" namespace: "{{ vm_namespace }}" labels: - app: "{{ vm_name }}" + app: "{{ item }}" os.template.kubevirt.io/fedora36: 'true' - vm.kubevirt.io/name: "{{ vm_name }}" + vm.kubevirt.io/name: "{{ item }}" spec: dataVolumeTemplates: - apiVersion: cdi.kubevirt.io/v1beta1 kind: DataVolume metadata: creationTimestamp: null - name: "{{ vm_name }}" + name: "{{ item }}" spec: sourceRef: kind: DataSource @@ -41,7 +46,7 @@ vm.kubevirt.io/workload: server creationTimestamp: null labels: - kubevirt.io/domain: "{{ vm_name }}" + kubevirt.io/domain: "{{ item }}" kubevirt.io/size: small spec: domain: @@ -72,5 +77,6 @@ terminationGracePeriodSeconds: 180 volumes: - dataVolume: - name: "{{ vm_name }}" + name: "{{ item }}" name: rootdisk + loop: "{{ lookup('ansible.builtin.inventory_hostnames', vm_host_string) | regex_replace(vm_namespace+'-', '') | split(',') | difference(['localhost']) }}" diff --git a/openshift/cnv/snapshot.yml b/openshift/cnv/snapshot.yml new file mode 100644 index 0000000..fb5cc55 --- /dev/null +++ b/openshift/cnv/snapshot.yml @@ -0,0 +1,9 @@ +--- +- name: Manage CNV snapshots + hosts: localhost + tasks: + - name: Include snapshot role + ansible.builtin.include_role: + name: "demo.openshift.snapshot" + vars: + snapshot_hosts: "{{ _hosts }}" diff --git a/openshift/setup.yml b/openshift/setup.yml index 3c152d7..6b385c6 100644 --- a/openshift/setup.yml +++ b/openshift/setup.yml @@ -25,6 +25,7 @@ controller_inventory_sources: source_path: openshift/inventory.kubevirt.yml credential: OpenShift Credential update_on_launch: false + overwrite: true controller_templates: - name: OpenShift / EDA / Install Controller @@ -96,11 +97,67 @@ controller_templates: credentials: - "OpenShift Credential" + - name: OpenShift / CNV / Create VM Snapshots + job_type: run + inventory: "Demo Inventory" + project: "Ansible official demo project" + playbook: "openshift/cnv/snapshot.yml" + notification_templates_started: Telemetry + notification_templates_success: Telemetry + notification_templates_error: Telemetry + extra_vars: + snapshot_operation: create + survey_enabled: true + survey: + name: '' + description: '' + spec: + - question_name: Server Name or Pattern + type: text + variable: _hosts + default: "openshift-cnv-rhel*" + required: true + - question_name: VM NameSpace + type: text + variable: vm_namespace + default: openshift-cnv + required: true + credentials: + - "OpenShift Credential" + + - name: OpenShift / CNV / Restore Latest VM Snapshots + job_type: run + inventory: "Demo Inventory" + project: "Ansible official demo project" + playbook: "openshift/cnv/snapshot.yml" + notification_templates_started: Telemetry + notification_templates_success: Telemetry + notification_templates_error: Telemetry + extra_vars: + snapshot_operation: restore + survey_enabled: true + survey: + name: '' + description: '' + spec: + - question_name: Server Name or Pattern + type: text + variable: _hosts + default: "openshift-cnv-rhel*" + required: true + - question_name: VM NameSpace + type: text + variable: vm_namespace + default: openshift-cnv + required: true + credentials: + - "OpenShift Credential" + - name: OpenShift / CNV / Delete VM job_type: run inventory: "Demo Inventory" project: "Ansible official demo project" - playbook: "openshift/cnv/provision.yml" + playbook: "openshift/cnv/delete.yml" notification_templates_started: Telemetry notification_templates_success: Telemetry notification_templates_error: Telemetry @@ -111,19 +168,20 @@ controller_templates: name: '' description: '' spec: - - question_name: VM name + - question_name: VM host string type: text - variable: vm_name + variable: vm_host_string required: true - question_name: VM NameSpace type: text variable: vm_namespace default: openshift-cnv required: true + credentials: - "OpenShift Credential" - - name: OpenShift / CNV / Patching + - name: OpenShift / CNV / Patch job_type: check inventory: "Demo Inventory" project: "Ansible official demo project" @@ -235,3 +293,48 @@ controller_workflows: unified_job_template: 'SUBMIT FEEDBACK' extra_data: feedback: Failed to create CNV instance + + - name: OpenShift / CNV / Patch CNV Workflow + description: A workflow to patch CNV instances with snapshot and restore on failure. + organization: Default + notification_templates_started: Telemetry + notification_templates_success: Telemetry + notification_templates_error: Telemetry + survey_enabled: true + survey: + name: '' + description: '' + spec: + - question_name: Specify target hosts + type: text + variable: _hosts + required: true + default: "openshift-cnv-rhel*" + simplified_workflow_nodes: + - identifier: Project Sync + unified_job_template: Ansible official demo project + success_nodes: + - Patch Instance + # We need to do an invnetory sync *after* creating snapshots, as turning VMs on/off changes their IP + - identifier: Inventory Sync + unified_job_template: OpenShift CNV Inventory + success_nodes: + - Patch Instance + - identifier: Take Snapshot + unified_job_template: OpenShift / CNV / Create VM Snapshots + success_nodes: + - Project Sync + - Inventory Sync + - identifier: Patch Instance + unified_job_template: OpenShift / CNV / Patch + job_type: run + failure_nodes: + - Restore from Snapshot + - identifier: Restore from Snapshot + unified_job_template: OpenShift / CNV / Restore Latest VM Snapshots + failure_nodes: + - Ticket - Restore Failed + - identifier: Ticket - Restore Failed + unified_job_template: 'SUBMIT FEEDBACK' + extra_data: + feedback: OpenShift / CNV / Patch CNV Workflow | Failed to restore CNV VM from snapshot